startai


Namestartai JSON
Version 0.0.8.0 PyPI version JSON
download
home_pagehttps://khulnasoft.com/startai
SummaryThe unified machine learning framework, enabling framework-agnostic functions, layers and libraries.
upload_time2024-04-02 02:21:59
maintainerNone
docs_urlNone
authorUnify
requires_pythonNone
licenseApache 2.0
keywords
VCS
bugtrack_url
requirements No requirements were recorded.
Travis-CI No Travis.
coveralls test coverage No coveralls.
            > **[Sign up on our console](https://console.khulnasoft.com/)** for pilot access!



<img class="only-light" width="100%" src="https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/logo.png?raw=true#gh-light-mode-only"/>

------------------------------------------------------------------------

<div style="display: block;" align="center">
<a href="https://khulnasoft.com/startai">
    <img class="dark-light" src="https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/website_button.svg">
</a>
<img class="dark-light" width="5%" src="https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/logos/supported/empty.png">
<a href="https://khulnasoft.com/docs/startai">
    <img class="dark-light" src="https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/docs_button.svg">
</a>
<img class="dark-light" width="5%" src="https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/logos/supported/empty.png">
<a href="https://khulnasoft.com/demos">
    <img class="dark-light" src="https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/examples_button.svg">
</a>
<img class="dark-light" width="5%" src="https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/logos/supported/empty.png">
<a href="https://khulnasoft.com/docs/startai/overview/design.html">
    <img class="dark-light" src="https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/design_button.svg">
</a>
<img class="dark-light" width="5%" src="https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/logos/supported/empty.png">
<a href="https://khulnasoft.com/docs/startai/overview/faq.html">
    <img class="dark-light" src="https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/faq_button.svg">
</a>
</div>

------------------------------------------------------------------------

# Status

<div>
    <a href="https://github.com/khulnasoft/startai/issues">
        <img class="dark-light" style="padding-right: 4px; padding-bottom: 4px;" src="https://img.shields.io/github/issues/khulnasoft/startai">
    </a>
    <a href="https://github.com/khulnasoft/startai/network/members">
        <img class="dark-light" style="padding-right: 4px; padding-bottom: 4px;" src="https://img.shields.io/github/forks/khulnasoft/startai">
    </a>
    <a href="https://github.com/khulnasoft/startai/stargazers">
        <img class="dark-light" style="padding-right: 4px; padding-bottom: 4px;" src="https://img.shields.io/github/stars/khulnasoft/startai">
    </a>
    <a href="https://github.com/khulnasoft/startai/pulls">
        <img class="dark-light" style="padding-right: 4px; padding-bottom: 4px;" src="https://img.shields.io/badge/PRs-welcome-brightgreen.svg">
    </a>
    <a href="https://pypi.org/project/startai">
        <img class="dark-light" style="padding-right: 4px; padding-bottom: 4px;" src="https://badge.fury.io/py/startai.svg">
    </a>
    <a href="https://github.com/khulnasoft/startai/actions?query=workflow%3Adocs">
        <img class="dark-light" style="padding-right: 4px; padding-bottom: 4px;" src="https://github.com/khulnasoft/startai/actions/workflows/docs.yml/badge.svg">
    </a>
    <a href="https://github.com/khulnasoft/startai/actions?query=workflow%3Atest-startai">
        <img class="dark-light" style="padding-right: 4px; padding-bottom: 4px;" src="https://github.com/khulnasoft/startai/actions/workflows/intelligent-tests.yml/badge.svg">
    </a>
    <a href="https://discord.gg/sXyFF8tDtm">
        <img class="dark-light" style="padding-right: 4px; padding-bottom: 4px;" src="https://img.shields.io/discord/799879767196958751?color=blue&label=%20&logo=discord&logoColor=white">
    </a>
</div>
<br clear="all" />

------------------------------------------------------------------------

# Unified AI

<div style="display: block;" align="center">
    <div>
    <a href="https://jax.readthedocs.io">
        <img class="dark-light" width="10%" src="https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/logos/supported/jax_logo.png">
    </a>
    <img class="dark-light" width="5%" src="https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/logos/supported/empty.png">
    <img class="dark-light" width="5%" src="https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/logos/supported/empty.png">
    <a href="https://www.tensorflow.org">
        <img class="dark-light" width="10%" src="https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/logos/supported/tensorflow_logo.png">
    </a>
    <img class="dark-light" width="5%" src="https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/logos/supported/empty.png">
    <img class="dark-light" width="5%" src="https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/logos/supported/empty.png">
    <a href="https://pytorch.org">
        <img class="dark-light" width="10%" src="https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/logos/supported/pytorch_logo.png">
    </a>
    <img class="dark-light" width="5%" src="https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/logos/supported/empty.png">
    <img class="dark-light" width="5%" src="https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/logos/supported/empty.png">
    <a href="https://numpy.org">
        <img class="dark-light" width="10%" src="https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/logos/supported/numpy_logo.png">
    </a>
    </div>
</div>

<br clear="all" />

------------------------------------------------------------------------

Startai is an open-source machine learning framework that
enables you to:

- 🔄 **Convert code into any framework**: Use and build on top of any model, library, or device by converting any code from one framework to another using `startai.transpile`.
- ⚒️ **Write framework-agnostic code**: Write your code once in `startai` and then choose the most appropriate ML framework as the backend to leverage all the benefits and tools.

[Join our growing community](https://discord.com/invite/sXyFF8tDtm) 🌍 to connect with people using Startai. **Let\'s** [khulnasoft.com](https://khulnasoft.com) **together 🦾**

------------------------------------------------------------------------

# Getting started

[Startai's transpiler](https://khulnasoft.com/docs/startai/overview/design/startai_as_a_transpiler.html) helps you convert code between different ML frameworks. To get pilot access to the transpiler, [sign up](https://console.khulnasoft.com/) and generate an API key. The [Get Started](https://khulnasoft.com/docs/startai/overview/get_started.html) notebook should help you set up your API key and the [Quickstart](https://khulnasoft.com/docs/startai/demos/quickstart.html) notebook should give you a brief idea of the features!

The most important notebooks are:

- [How to convert your code between frameworks?](https://khulnasoft.com/docs/startai/demos/learn_the_basics/04_transpile_code.html)
- [How to write framework-agnostic code?](https://khulnasoft.com/docs/startai/demos/learn_the_basics/01_write_startai_code.html)

Beyond that, based on the frameworks you want to convert code between, there are a few more [examples](#using-startai) further down this page 👇 which contain a number of models and libraries transpiled between PyTorch, JAX, TensorFlow and NumPy.

------------------------------------------------------------------------

## Installing startai

The easiest way to set up Startai is to install it using **pip**:

``` bash
pip install startai
```

<details>
<summary><b>Docker Images</b></summary>

Given the challenges of maintaining installations of various frameworks in a single environment,
users who would want to test `startai` with multiple frameworks at once can use our Docker images for a seamless experience.
You can pull the images from:

``` bash
docker pull khulnasoft/startai:latest      # CPU
docker pull khulnasoft/startai:latest-gpu  # GPU
```

</details>

<details>
<summary><b>From Source</b></summary>

You can also install Startai from source if you want to take advantage of
the latest changes, but we can\'t ensure everything will work as
expected 😅

``` bash
git clone https://github.com/khulnasoft/startai.git
cd startai
pip install --user -e .
```

If you want to set up testing and various frameworks it\'s probably     best
to check out the [Setting Up](https://khulnasoft.com/docs/startai/overview/contributing/setting_up.html)
page, where OS-specific and IDE-specific instructions and video
tutorials to do so are available!

</details>

------------------------------------------------------------------------

## Using Startai

After installing Startai, you can start using it straight away, for example:

  <details>
   <summary><b>Transpiling any code from one framework to another</b></summary>

   ``` python
   import startai
   import torch
   import jax

   def jax_fn(x):
       a = jax.numpy.dot(x, x)
       b = jax.numpy.mean(x)
       return x * a + b

   jax_x = jax.numpy.array([1., 2., 3.])
   torch_x = torch.tensor([1., 2., 3.])
   torch_fn = startai.transpile(jax_fn, source="jax", to="torch", args=(jax_x,))
   ret = torch_fn(torch_x)
   ```

   </details>

  <details>
    <summary><b>Running your code with any backend</b></summary>

   ``` python
    import startai
    import torch
    import jax

    startai.set_backend("jax")

    x = jax.numpy.array([1, 2, 3])
    y = jax.numpy.array([3, 2, 1])
    z = startai.add(x, y)

    startai.set_backend('torch')

    x = torch.tensor([1, 2, 3])
    y = torch.tensor([3, 2, 1])
    z = startai.add(x, y)
   ```

   </details>


\
The [Examples page](https://khulnasoft.com/demos/) features a wide range of
demos and tutorials showcasing the functionalities of Startai along with
multiple use cases, but feel free to check out some shorter
framework-specific examples here ⬇️

<details>
<summary><b>I'm using PyTorch&ensp;<img class="dark-light" src="https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/logos/supported/torch_small_logo.png"></b></summary>
   <blockquote>You can use Startai to get PyTorch code from:
      <details>
         <summary>Any model</summary>
         <blockquote>
            <details>
               <summary>From TensorFlow</summary>

``` python
import startai
import torch
import tensorflow as tf

# Get a pretrained keras model
eff_encoder = tf.keras.applications.efficientnet_v2.EfficientNetV2B0(
    include_top=False, weights="imagenet", input_shape=(224, 224, 3)
)

# Transpile it into a torch.nn.Module with the corresponding parameters
noise = tf.random.normal(shape=(1, 224, 224, 3))
torch_eff_encoder = startai.transpile(eff_encoder, source="tensorflow", to="torch", args=(noise,))

# Build a classifier using the transpiled encoder
class Classifier(torch.nn.Module):
    def __init__(self, num_classes=20):
        super().__init__()
        self.encoder = torch_eff_encoder
        self.fc = torch.nn.Linear(1280, num_classes)

    def forward(self, x):
        x = self.encoder(x)
        return self.fc(x)

# Initialize a trainable, customizable, torch.nn.Module
classifier = Classifier()
ret = classifier(torch.rand((1, 244, 244, 3)))
```

</details>
<details>
   <summary>From JAX</summary>

``` python
import startai
import jax
import torch

# Get a pretrained haiku model
# https://github.com/khulnasoft/demos/blob/15c235f/scripts/deepmind_perceiver_io.py
from deepmind_perceiver_io import key, perceiver_backbone

# Transpile it into a torch.nn.Module with the corresponding parameters
dummy_input = jax.random.uniform(key, shape=(1, 3, 224, 224))
params = perceiver_backbone.init(rng=key, images=dummy_input)
startai.set_backend("jax")
backbone = startai.transpile(
    perceiver_backbone, source="jax", to="torch", params_v=params, kwargs={"images": dummy_input}
)

# Build a classifier using the transpiled backbone
class PerceiverIOClassifier(torch.nn.Module):
    def __init__(self, num_classes=20):
        super().__init__()
        self.backbone = backbone
        self.max_pool = torch.nn.MaxPool2d((512, 1))
        self.flatten = torch.nn.Flatten()
        self.fc = torch.nn.Linear(1024, num_classes)

    def forward(self, x):
        x = self.backbone(images=x)
        x = self.flatten(self.max_pool(x))
        return self.fc(x)

# Initialize a trainable, customizable, torch.nn.Module
classifier = PerceiverIOClassifier()
ret = classifier(torch.rand((1, 3, 224, 224)))
```

</details>
</blockquote>
</details>

<details>
<summary>Any library</summary>
<blockquote>
<details>
   <summary>From Tensorflow</summary>

``` python
import startai
import torch
import os
os.environ["SM_FRAMEWORK"] = "tf.keras"
import segmentation_models as sm

# transpile sm from tensorflow to torch
torch_sm = startai.transpile(sm, source="tensorflow", to="torch")

# get some image-like arrays
output = torch.rand((1, 3, 512, 512))
target = torch.rand((1, 3, 512, 512))

# and use the transpiled version of any function from the library!
out = torch_sm.metrics.iou_score(output, target)
```

</details>
<details>
   <summary>From JAX</summary>

``` python
import startai
import rax
import torch

# transpile rax from jax to torch
torch_rax = startai.transpile(rax, source="jax", to="torch")

# get some arrays
scores = torch.tensor([2.2, 1.3, 5.4])
labels = torch.tensor([1.0, 0.0, 0.0])

# and use the transpiled version of any function from the library!
out = torch_rax.poly1_softmax_loss(scores, labels)
```

</details>
<details>
   <summary>From NumPy</summary>

``` python
import startai
import torch
import madmom

# transpile madmon from numpy to torch
torch_madmom = startai.transpile(madmom, source="numpy", to="torch")

# get some arrays
freqs = torch.arange(20) * 10

# and use the transpiled version of any function from the library!
out = torch_madmom.audio.filters.hz2midi(freqs)
```

</details>
</blockquote>
</details>

<details>
<summary>Any function</summary>
<blockquote>
<details>
   <summary>From Tensorflow</summary>

``` python
import startai
import tensorflow as tf
import torch

def loss(predictions, targets):
    return tf.sqrt(tf.reduce_mean(tf.square(predictions - targets)))

# transpile any function from tf to torch
torch_loss = startai.transpile(loss, source="tensorflow", to="torch")

# get some arrays
p = torch.tensor([3.0, 2.0, 1.0])
t = torch.tensor([0.0, 0.0, 0.0])

# and use the transpiled version!
out = torch_loss(p, t)
```

</details>
<details>
   <summary>From JAX</summary>

``` python
import startai
import jax.numpy as jnp
import torch

def loss(predictions, targets):
    return jnp.sqrt(jnp.mean((predictions - targets) ** 2))

# transpile any function from jax to torch
torch_loss = startai.transpile(loss, source="jax", to="torch")

# get some arrays
p = torch.tensor([3.0, 2.0, 1.0])
t = torch.tensor([0.0, 0.0, 0.0])

# and use the transpiled version!
out = torch_loss(p, t)
```

</details>
<details>
   <summary>From NumPy</summary>

``` python
import startai
import numpy as np
import torch

def loss(predictions, targets):
    return np.sqrt(np.mean((predictions - targets) ** 2))

# transpile any function from numpy to torch
torch_loss = startai.transpile(loss, source="numpy", to="torch")

# get some arrays
p = torch.tensor([3.0, 2.0, 1.0])
t = torch.tensor([0.0, 0.0, 0.0])

# and use the transpiled version!
out = torch_loss(p, t)
```

</details>
</blockquote>
</details>

</blockquote>
</details>

<details>
<summary><b>I'm using TensorFlow&ensp;<img class="dark-light" src="https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/logos/supported/tf_small_logo.png"></b></summary>
<blockquote>You can use Startai to get TensorFlow code from:
<details>
<summary>Any model</summary>
<blockquote>
<details>
   <summary>From PyTorch</summary>

``` python
import startai
import torch
import timm
import tensorflow as tf

# Get a pretrained pytorch model
mlp_encoder = timm.create_model("mixer_b16_224", pretrained=True, num_classes=0)

# Transpile it into a keras.Model with the corresponding parameters
noise = torch.randn(1, 3, 224, 224)
mlp_encoder = startai.transpile(mlp_encoder, to="tensorflow", args=(noise,))

# Build a classifier using the transpiled encoder
class Classifier(tf.keras.Model):
    def __init__(self):
        super().__init__()
        self.encoder = mlp_encoder
        self.output_dense = tf.keras.layers.Dense(units=1000, activation="softmax")

    def call(self, x):
        x = self.encoder(x)
        return self.output_dense(x)

# Transform the classifier and use it as a standard keras.Model
x = tf.random.normal(shape=(1, 3, 224, 224))
model = Classifier()
ret = model(x)
```

</details>
<details>
   <summary>From JAX</summary>

``` python
import startai
import jax
import tensorflow as tf

# Get a pretrained haiku model
# https://khulnasoft.com/demos/scripts/deepmind_perceiver_io.py
from deepmind_perceiver_io import key, perceiver_backbone

# Transpile it into a tf.keras.Model with the corresponding parameters
dummy_input = jax.random.uniform(key, shape=(1, 3, 224, 224))
params = perceiver_backbone.init(rng=key, images=dummy_input)
backbone = startai.transpile(
    perceiver_backbone, to="tensorflow", params_v=params, args=(dummy_input,)
)

# Build a classifier using the transpiled backbone
class PerceiverIOClassifier(tf.keras.Model):
    def __init__(self, num_classes=20):
        super().__init__()
        self.backbone = backbone
        self.max_pool = tf.keras.layers.MaxPooling1D(pool_size=512)
        self.flatten = tf.keras.layers.Flatten()
        self.fc = tf.keras.layers.Dense(num_classes)

    def call(self, x):
        x = self.backbone(x)
        x = self.flatten(self.max_pool(x))
        return self.fc(x)

# Initialize a trainable, customizable, tf.keras.Model
x = tf.random.normal(shape=(1, 3, 224, 224))
classifier = PerceiverIOClassifier()
ret = classifier(x)
```

</details>
</blockquote>
</details>

<details>
<summary>Any library</summary>
<blockquote>
<details>
   <summary>From PyTorch</summary>

``` python
import startai
import kornia
import requests
import numpy as np
import tensorflow as tf
from PIL import Image

# transpile kornia from torch to tensorflow
tf_kornia = startai.transpile(kornia, source="torch", to="tensorflow")

# get an image
url = "http://images.cocodataset.org/train2017/000000000034.jpg"
raw_img = Image.open(requests.get(url, stream=True).raw)

# convert it to the format expected by kornia
img = np.array(raw_img)
img = tf.transpose(tf.constant(img), (2, 0, 1))
img = tf.expand_dims(img, 0) / 255

# and use the transpiled version of any function from the library!
out = tf_kornia.enhance.sharpness(img, 5)
```

</details>
<details>
   <summary>From JAX</summary>

``` python
import startai
import rax
import tensorflow as tf

# transpile rax from jax to tensorflow
tf_rax = startai.transpile(rax, source="jax", to="tensorflow")

# get some arrays
scores = tf.constant([2.2, 1.3, 5.4])
labels = tf.constant([1.0, 0.0, 0.0])

# and use the transpiled version of any function from the library!
out = tf_rax.poly1_softmax_loss(scores, labels)
```

</details>
<details>
   <summary>From NumPy</summary>

``` python
import startai
import madmom
import tensorflow as tf

# transpile madmom from numpy to tensorflow
tf_madmom = startai.transpile(madmom, source="numpy", to="tensorflow")

# get some arrays
freqs = tf.range(20) * 10

# and use the transpiled version of any function from the library!
out = tf_madmom.audio.filters.hz2midi(freqs)
```

</details>
</blockquote>
</details>

<details>
<summary>Any function</summary>
<blockquote>
<details>
   <summary>From PyTorch</summary>

``` python
import startai
import torch
import tensorflow as tf

def loss(predictions, targets):
    return torch.sqrt(torch.mean((predictions - targets) ** 2))

# transpile any function from torch to tensorflow
tf_loss = startai.transpile(loss, source="torch", to="tensorflow")

# get some arrays
p = tf.constant([3.0, 2.0, 1.0])
t = tf.constant([0.0, 0.0, 0.0])

# and use the transpiled version!
out = tf_loss(p, t)
```

</details>
<details>
   <summary>From JAX</summary>

``` python
import startai
import jax.numpy as jnp
import tensorflow as tf

def loss(predictions, targets):
    return jnp.sqrt(jnp.mean((predictions - targets) ** 2))

# transpile any function from jax to tensorflow
tf_loss = startai.transpile(loss, source="jax", to="tensorflow")

# get some arrays
p = tf.constant([3.0, 2.0, 1.0])
t = tf.constant([0.0, 0.0, 0.0])

# and use the transpiled version!
out = tf_loss(p, t)
```

</details>
<details>
   <summary>From NumPy</summary>

``` python
import startai
import numpy as np
import tensorflow as tf

def loss(predictions, targets):
    return np.sqrt(np.mean((predictions - targets) ** 2))

# transpile any function from numpy to tensorflow
tf_loss = startai.transpile(loss, source="numpy", to="tensorflow")

# get some arrays
p = tf.constant([3.0, 2.0, 1.0])
t = tf.constant([0.0, 0.0, 0.0])

# and use the transpiled version!
out = tf_loss(p, t)
```

</details>
</blockquote>
</details>

</blockquote>
</details>

<details>
<summary><b>I'm using Jax&ensp;<img class="dark-light" src="https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/logos/supported/jax_small_logo.png"></b></summary>
<blockquote>You can use Startai to get JAX code from:
<details>
<summary>Any model</summary>
<blockquote>
<details>
   <summary>From PyTorch</summary>

``` python
import startai
import timm
import torch
import jax
import haiku as hk

# Get a pretrained pytorch model
mlp_encoder = timm.create_model("mixer_b16_224", pretrained=True, num_classes=0)

# Transpile it into a hk.Module with the corresponding parameters
noise = torch.randn(1, 3, 224, 224)
mlp_encoder = startai.transpile(mlp_encoder, source="torch", to="haiku", args=(noise,))

# Build a classifier using the transpiled encoder
class Classifier(hk.Module):
    def __init__(self, num_classes=1000):
        super().__init__()
        self.encoder = mlp_encoder()
        self.fc = hk.Linear(output_size=num_classes, with_bias=True)

    def __call__(self, x):
        x = self.encoder(x)
        x = self.fc(x)
        return x

def _forward_classifier(x):
    module = Classifier()
    return module(x)

# Transform the classifier and use it as a standard hk.Module
rng_key = jax.random.PRNGKey(42)
x = jax.random.uniform(key=rng_key, shape=(1, 3, 224, 224), dtype=jax.numpy.float32)
forward_classifier = hk.transform(_forward_classifier)
params = forward_classifier.init(rng=rng_key, x=x)

ret = forward_classifier.apply(params, None, x)
```

</details>
<details>
   <summary>From TensorFlow</summary>

``` python
import startai
import jax
import haiku as hk
import tensorflow as tf
jax.config.update("jax_enable_x64", True)

# Get a pretrained keras model
eff_encoder = tf.keras.applications.efficientnet_v2.EfficientNetV2B0(
    include_top=False, weights="imagenet", input_shape=(224, 224, 3)
)

# Transpile it into a hk.Module with the corresponding parameters
noise = tf.random.normal(shape=(1, 224, 224, 3))
hk_eff_encoder = startai.transpile(eff_encoder, source="tensorflow", to="haiku", args=(noise,))

# Build a classifier using the transpiled encoder
class Classifier(hk.Module):
    def __init__(self, num_classes=1000):
        super().__init__()
        self.encoder = hk_eff_encoder()
        self.fc = hk.Linear(output_size=num_classes, with_bias=True)

    def __call__(self, x):
        x = self.encoder(x)
        x = self.fc(x)
        return x

def _forward_classifier(x):
    module = Classifier()
    return module(x)

# Transform the classifier and use it as a standard hk.Module
rng_key = jax.random.PRNGKey(42)
dummy_x = jax.random.uniform(key=rng_key, shape=(1, 224, 224, 3))
forward_classifier = hk.transform(_forward_classifier)
params = forward_classifier.init(rng=rng_key, x=dummy_x)

ret = forward_classifier.apply(params, None, dummy_x)
```

</details>
</blockquote>
</details>

<details>
<summary>Any library</summary>
<blockquote>
<details>
   <summary>From PyTorch</summary>

``` python
import startai
import kornia
import requests
import jax.numpy as jnp
from PIL import Image
jax.config.update("jax_enable_x64", True)

# transpile kornia from torch to jax
jax_kornia = startai.transpile(kornia, source="torch", to="jax")

# get an image
url = "http://images.cocodataset.org/train2017/000000000034.jpg"
raw_img = Image.open(requests.get(url, stream=True).raw)

# convert it to the format expected by kornia
img = jnp.transpose(jnp.array(raw_img), (2, 0, 1))
img = jnp.expand_dims(img, 0) / 255

# and use the transpiled version of any function from the library!
out = jax_kornia.enhance.sharpness(img, 5)
```

</details>
<details>
   <summary>From TensorFlow</summary>

``` python
import startai
import jax
import os
os.environ["SM_FRAMEWORK"] = "tf.keras"
import segmentation_models as sm

# transpile sm from tensorflow to jax
jax_sm = startai.transpile(sm, source="tensorflow", to="jax")

# get some image-like arrays
key = jax.random.PRNGKey(23)
key1, key2 = jax.random.split(key)
output = jax.random.uniform(key1, (1, 3, 512, 512))
target = jax.random.uniform(key2, (1, 3, 512, 512))

# and use the transpiled version of any function from the library!
out = jax_sm.metrics.iou_score(output, target)
```

</details>
<details>
   <summary>From NumPy</summary>

``` python
import startai
import madmom
import jax.numpy as jnp

# transpile madmon from numpy to jax
jax_madmom = startai.transpile(madmom, source="numpy", to="jax")

# get some arrays
freqs = jnp.arange(20) * 10

# and use the transpiled version of any function from the library!
out = jax_madmom.audio.filters.hz2midi(freqs)
```

</details>
</blockquote>
</details>

<details>
<summary>Any function</summary>
<blockquote>
<details>
   <summary>From PyTorch</summary>

``` python
import startai
import torch
import jax.numpy as jnp

def loss(predictions, targets):
    return torch.sqrt(torch.mean((predictions - targets) ** 2))

# transpile any function from torch to jax
jax_loss = startai.transpile(loss, source="torch", to="jax")

# get some arrays
p = jnp.array([3.0, 2.0, 1.0])
t = jnp.array([0.0, 0.0, 0.0])

# and use the transpiled version!
out = jax_loss(p, t)
```

</details>
<details>
   <summary>From TensorFlow</summary>

``` python
import startai
import tensorflow as tf
import jax.numpy as jnp

def loss(predictions, targets):
    return tf.sqrt(tf.reduce_mean(tf.square(predictions - targets)))

# transpile any function from tf to jax
jax_loss = startai.transpile(loss, source="tensorflow", to="jax")

# get some arrays
p = jnp.array([3.0, 2.0, 1.0])
t = jnp.array([0.0, 0.0, 0.0])

# and use the transpiled version!
out = jax_loss(p, t)
```

</details>
<details>
   <summary>From NumPy</summary>

``` python
import startai
import numpy as np
import jax
import jax.numpy as jnp
jax.config.update('jax_enable_x64', True)

def loss(predictions, targets):
    return np.sqrt(np.mean((predictions - targets) ** 2))

# transpile any function from numpy to jax
jax_loss = startai.transpile(loss, source="numpy", to="jax")

# get some arrays
p = jnp.array([3.0, 2.0, 1.0])
t = jnp.array([0.0, 0.0, 0.0])

# and use the transpiled version!
out = jax_loss(p, t)
```

</details>
</blockquote>
</details>

</blockquote>
</details>

<details>
<summary><b>I'm using NumPy&ensp;<img class="dark-light" src="https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/logos/supported/numpy_small_logo.png"></b></summary>
<blockquote>You can use Startai to get NumPy code from:
<details>
<summary>Any library</summary>
<blockquote>
<details>
   <summary>From PyTorch</summary>

``` python
import startai
import kornia
import requests
import numpy as np
from PIL import Image

# transpile kornia from torch to np
np_kornia = startai.transpile(kornia, source="torch", to="numpy")

# get an image
url = "http://images.cocodataset.org/train2017/000000000034.jpg"
raw_img = Image.open(requests.get(url, stream=True).raw)

# convert it to the format expected by kornia
img = np.transpose(np.array(raw_img), (2, 0, 1))
img = np.expand_dims(img, 0) / 255

# and use the transpiled version of any function from the library!
out = np_kornia.enhance.sharpness(img, 5)
```

</details>
<details>
   <summary>From TensorFlow</summary>

``` python
import startai
import numpy as np
import os
os.environ["SM_FRAMEWORK"] = "tf.keras"
import segmentation_models as sm

# transpile sm from tensorflow to numpy
np_sm = startai.transpile(sm, source="tensorflow", to="numpy")

# get some image-like arrays
output = np.random.rand(1, 3, 512, 512).astype(dtype=np.float32)
target = np.random.rand(1, 3, 512, 512).astype(dtype=np.float32)

# and use the transpiled version of any function from the library!
out = np_sm.metrics.iou_score(output, target)
```

</details>
<details>
   <summary>From Jax</summary>

``` python
import startai
import rax
import numpy as np

# transpile rax from jax to numpy
np_rax = startai.transpile(rax, source="jax", to="numpy")

# get some arrays
scores = np.array([2.2, 1.3, 5.4])
labels = np.array([1.0, 0.0, 0.0])

# and use the transpiled version of any function from the library!
out = np_rax.poly1_softmax_loss(scores, labels)
```

</details>
</blockquote>
</details>

<details>
<summary>Any function</summary>
<blockquote>
<details>
   <summary>From PyTorch</summary>

``` python
import startai
import torch
import numpy as np

def loss(predictions, targets):
    return torch.sqrt(torch.mean((predictions - targets) ** 2))

# transpile any function from torch to numpy
np_loss = startai.transpile(loss, source="torch", to="numpy")

# get some arrays
p = np.array([3.0, 2.0, 1.0])
t = np.array([0.0, 0.0, 0.0])

# and use the transpiled version!
out = np_loss(p, t)
```

</details>
<details>
   <summary>From TensorFlow</summary>

``` python
import startai
import tensorflow as tf
import numpy as np

def loss(predictions, targets):
    return tf.sqrt(tf.reduce_mean(tf.square(predictions - targets)))

# transpile any function from tf to numpy
np_loss = startai.transpile(loss, source="tensorflow", to="numpy")

# get some arrays
p = np.array([3.0, 2.0, 1.0])
t = np.array([0.0, 0.0, 0.0])

# and use the transpiled version!
out = np_loss(p, t)
```

</details>
<details>
   <summary>From JAX</summary>

``` python
import startai
import jax.numpy as jnp
import numpy as np

def loss(predictions, targets):
    return jnp.sqrt(jnp.mean((predictions - targets) ** 2))

# transpile any function from jax to numpy
np_loss = startai.transpile(loss, source="jax", to="numpy")

# get some arrays
p = np.array([3.0, 2.0, 1.0])
t = np.array([0.0, 0.0, 0.0])

# and use the transpiled version!
out = np_loss(p, t)
```

</details>
</blockquote>
</details>

</blockquote>
</details>

<details>
<summary>
<b>I'm using Startai&ensp;<img height="25px" width="25px" class="dark-light" src="https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/logos/startai_logo_only.svg"></b>
</summary>
Or you can use Startai as a framework, breaking yourself (and your code)
free from deciding which community to support, allowing anyone to run
your code in their framework of choice!

``` python
import startai

# A simple image classification model
class StartaiNet(startai.Module):
    def __init__(
        self,
        h_w=(32, 32),
        input_channels=3,
        output_channels=512,
        num_classes=2,
        data_format="NCHW",
        device="cpu",
    ):
        self.h_w = h_w
        self.input_channels = input_channels
        self.output_channels = output_channels
        self.num_classes = num_classes
        self.data_format = data_format
        super().__init__(device=device)

    def _build(self, *args, **kwargs):
        self.extractor = startai.Sequential(
            startai.Conv2D(self.input_channels, 6, [5, 5], 1, "SAME", data_format=self.data_format),
            startai.GELU(),
            startai.Conv2D(6, 16, [5, 5], 1, "SAME", data_format=self.data_format),
            startai.GELU(),
            startai.Conv2D(16, self.output_channels, [5, 5], 1, "SAME", data_format=self.data_format),
            startai.GELU(),
        )

        self.classifier = startai.Sequential(
            # Since the padding is "SAME", this would be image_height x image_width x output_channels
            startai.Linear(self.h_w[0] * self.h_w[1] * self.output_channels, 512),
            startai.GELU(),
            startai.Linear(512, self.num_classes),
        )

    def _forward(self, x):
        x = self.extractor(x)
        # flatten all dims except batch dim
        x = startai.flatten(x, start_dim=1, end_dim=-1)
        logits = self.classifier(x)
        probs = startai.softmax(logits)
        return logits, probs
```

After building your model in Startai, you can set your favourite framework
as the backend to use its operations under the hood!

``` python
startai.set_backend("torch")
model = StartaiNet()
x = torch.randn(1, 3, 32, 32)
logits, probs = model(x)
```

``` python
startai.set_backend("tensorflow")
model = StartaiNet()
x = tf.random.uniform(shape=(1, 3, 32, 32))
logits, probs = model(x)
```

``` python
startai.set_backend("jax")
model = StartaiNet()
x = jax.random.uniform(key, shape=(1, 3, 32, 32))
logits, probs = model(x)
```

``` python
startai.set_backend("numpy")
model = StartaiNet()
x = np.random.uniform(size=(1, 3, 32, 32))
logits, probs = model(x)
```

Last but not least, we can also build the training pipeline in pure startai
⬇️

<details>
<summary><a>Let's define some helper functions first</a></summary>

``` python
# helper function for loading the dataset in batches
def generate_batches(images, classes, dataset_size, batch_size=32):
    if batch_size > dataset_size:
        raise startai.utils.exceptions.StartaiError("Use a smaller batch size")
    for idx in range(0, dataset_size, batch_size):
        yield images[idx : min(idx + batch_size, dataset_size)], classes[
            idx : min(idx + batch_size, dataset_size)
        ]


# helper function to get the number of current predictions
def num_correct(preds, labels):
    return (preds.argmax() == labels).sum().to_numpy().item()


# define a loss function
def loss_fn(params):
    v, model, x, y = params
    _, probs = model(x, v=v)
    return startai.cross_entropy(y, probs), probs
```

</details>

<details>
<summary><a>And train this model!</a></summary>

``` python
# train the model on gpu if it's available
device = "gpu:0" if startai.gpu_is_available() else "cpu"

# training hyperparams
optimizer = startai.Adam(1e-4)
batch_size = 4
num_epochs = 20
num_classes = 10

model = StartaiNet(
    h_w=(28, 28),
    input_channels=1,
    output_channels=120,
    num_classes=num_classes,
    device=device,
)

images = startai.random_uniform(shape=(16, 1, 28, 28))
classes = startai.randint(0, num_classes - 1, shape=(16,))


# training loop
def train(images, classes, epochs, model, device, num_classes=10, batch_size=32):
    # training metrics
    epoch_loss = 0.0
    metrics = []
    dataset_size = len(images)

    for epoch in range(epochs):
        train_correct = 0
        train_loop = tqdm(
            generate_batches(images, classes, len(images), batch_size=batch_size),
            total=dataset_size // batch_size,
            position=0,
            leave=True,
        )

        for xbatch, ybatch in train_loop:
            xbatch, ybatch = xbatch.to_device(device), ybatch.to_device(device)

            # Since the cross entropy function expects the target classes to be in one-hot encoded format
            ybatch_encoded = startai.one_hot(ybatch, num_classes)

            # update model params
            loss_probs, grads = startai.execute_with_gradients(
                loss_fn,
                (model.v, model, xbatch, ybatch_encoded),
            )

            model.v = optimizer.step(model.v, grads["0"])

            batch_loss = startai.to_numpy(loss_probs[0]).mean().item()  # batch mean loss
            epoch_loss += batch_loss * xbatch.shape[0]
            train_correct += num_correct(loss_probs[1], ybatch)

            train_loop.set_description(f"Epoch [{epoch + 1:2d}/{epochs}]")
            train_loop.set_postfix(
                running_loss=batch_loss,
                accuracy_percentage=(train_correct / dataset_size) * 100,
            )

        epoch_loss = epoch_loss / dataset_size
        training_accuracy = train_correct / dataset_size

        metrics.append([epoch, epoch_loss, training_accuracy])

        train_loop.write(
            f"\nAverage training loss: {epoch_loss:.6f}, Train Correct: {train_correct}",
            end="\n",
        )


# assuming the dataset(images and classes) are already prepared in a folder
train(
    images,
    classes,
    num_epochs,
    model,
    device,
    num_classes=num_classes,
    batch_size=batch_size,
)
```

</details>
</details>


\
For a more comprehensive overview, head over to the [Demos](https://khulnasoft.com/docs/startai/demos/index.html) section with more on the [basics](https://khulnasoft.com/docs/startai/demos/learn_the_basics.html), a few [guides](https://khulnasoft.com/docs/startai/demos/guides.html) and a wide-ranging set of [examples](https://khulnasoft.com/docs/startai/demos/examples_and_demos.html) that demonstrate the transpilation of various popular models. We continue to expand on that list, let us know what demos you'd like us to add next 🎯


Let's take a look at how Startai works both as a transpiler and a framework in a bit more detail to get an idea of why and where to use it.

<details>
<summary><b>Startai as a transpiler</b></summary>

<blockquote>
<details>
<summary>When should I use Startai as a transpiler?</summary>

If you want to use building blocks published in other frameworks (neural
networks, layers, array computing libraries, training pipelines\...),
you want to integrate code developed in various frameworks, or maybe
straight up move code from one framework to another, the transpiler is
definitely the tool 🔧 for the job! As the output of transpilation is
native code in the target framework, you can use the converted code just
as if it was code originally developed in that framework, applying
framework-specific optimizations or tools, instantly exposing your
project to all of the unique perks of a different framework.
</details>
</blockquote>

Startai\'s transpiler allows you to use code from any other framework (or
from any other version of the same framework!) in your own code, by just
adding one line of code. Under the hood, Startai traces a computational
graph and leverages the frontends and backends to link one framework to
another.

This way, Startai makes all ML-related projects available for you,
independently of the framework you want to use to research, develop, or
deploy systems. Feel free to head over to the docs for the full API
reference, but the functions you\'d most likely want to use are:

``` python
# Traces an efficient fully-functional graph from a function, removing all wrapping and redundant code
startai.trace_graph()

# Converts framework-specific code to a different framework
startai.transpile()

# Converts framework-specific code to Startai
startai.unify()
```

These functions can be used eagerly or lazily. If you pass the necessary
arguments for function tracing, the graph tracing/transpilation step will
happen instantly (eagerly). Otherwise, the graph tracing/transpilation
will happen only when the returned function is first invoked.

``` python
import startai
import jax
startai.set_backend("jax")

# Simple JAX function to transpile
def test_fn(x):
    return jax.numpy.sum(x)

x1 = startai.array([1., 2.])
```

``` python
# Arguments are available -> transpilation happens eagerly
eager_graph = startai.transpile(test_fn, source="jax", to="torch", args=(x1,))

# eager_graph is now torch code and runs efficiently
ret = eager_graph(x1)
```

``` python
# Arguments are not available -> transpilation happens lazily
lazy_graph = startai.transpile(test_fn, source="jax", to="torch")

# The transpiled graph is initialized, transpilation will happen here
ret = lazy_graph(x1)

# lazy_graph is now torch code and runs efficiently
ret = lazy_graph(x1)
```

If you want to learn more, you can find more information in the [Startai as
a transpiler section of the
docs!](https://khulnasoft.com/docs/startai/overview/design/startai_as_a_transpiler.html)

</details>

<details>
<summary><b>Startai as a framework</b></summary>

<blockquote>
<details>
<summary>When should I use Startai as a framework?</summary>

As Startai supports multiple backends, writing code in Startai breaks you free
from framework limitations. If you want to publish highly flexible code
for everyone to use, independently of the framework they are using, or
you plan to develop ML-related tools and want them to be interoperable
with not only the already existing frameworks, but also with future
frameworks, then Startai is for you!

</details>
</blockquote>

The Startai framework is built on top of various essential components,
mainly the [Backend
Handler](https://khulnasoft.com/docs/startai/overview/design/building_blocks.html#backend-handler),
which manages what framework is being used behind the scenes and the
[Backend Functional
APIs](https://khulnasoft.com/docs/startai/overview/design/building_blocks.html#backend-functional-apis),
which provide framework-specific implementations of the Startai functions.
Likewise, classes such as `startai.Container` or `startai.Array` are also
available, facilitating the use of structured data and array-like
objects (learn more about them
[here!](https://khulnasoft.com/docs/startai/overview/design/startai_as_a_framework.html)).

All of the functionalities in Startai are exposed through the
`Startai functional API` and the `Startai stateful API`. All functions in the
[Functional
API](https://khulnasoft.com/docs/startai/overview/design/building_blocks.html#startai-functional-api)
are **Framework Agnostic Functions**, which means that we can use them
like this:

``` python
import startai
import jax.numpy as jnp
import tensorflow as tf
import numpy as np
import torch

def mse_loss(y, target):
    return startai.mean((y - target)**2)

jax_mse   = mse_loss(jnp.ones((5,)), jnp.ones((5,)))
tf_mse    = mse_loss(tf.ones((5,)), tf.ones((5,)))
np_mse    = mse_loss(np.ones((5,)), np.ones((5,)))
torch_mse = mse_loss(torch.ones((5,)), torch.ones((5,)))
```

In the example above we show how Startai\'s functions are compatible with
tensors from different frameworks. This is the same for ALL Startai
functions. They can accept tensors from any framework and return the
correct result.

The [Startai Stateful
API](https://khulnasoft.com/docs/startai/overview/design/startai_as_a_framework/startai_stateful_api.html),
on the other hand, allows you to define trainable modules and layers,
which you can use alone or as a part of any other framework code!

``` python
import startai


class Regressor(startai.Module):
    def __init__(self, input_dim, output_dim):
        self.input_dim = input_dim
        self.output_dim = output_dim
        super().__init__()

    def _build(self, *args, **kwargs):
        self.linear0 = startai.Linear(self.input_dim, 128)
        self.linear1 = startai.Linear(128, self.output_dim)

    def _forward(self, x):
        x = self.linear0(x)
        x = startai.functional.relu(x)
        x = self.linear1(x)
        return x
```

If we put it all together, we\'ll have something like this. This example
uses PyTorch as the backend, but this can easily be changed to your
favorite frameworks, such as TensorFlow, or JAX.

``` python
import startai


class Regressor(startai.Module):
    def __init__(self, input_dim, output_dim):
        self.input_dim = input_dim
        self.output_dim = output_dim
        super().__init__()

    def _build(self, *args, **kwargs):
        self.linear0 = startai.Linear(self.input_dim, 128)
        self.linear1 = startai.Linear(128, self.output_dim)

    def _forward(self, x):
        x = self.linear0(x)
        x = startai.functional.relu(x)
        x = self.linear1(x)
        return x

startai.set_backend('torch')  # set backend to PyTorch (or any other backend!)

model = Regressor(input_dim=1, output_dim=1)
optimizer = startai.Adam(0.3)

n_training_examples = 2000
noise = startai.random.random_normal(shape=(n_training_examples, 1), mean=0, std=0.1)
x = startai.linspace(-6, 3, n_training_examples).reshape((n_training_examples, 1))
y = 0.2 * x ** 2 + 0.5 * x + 0.1 + noise


def loss_fn(v, x, target):
    pred = model(x, v=v)
    return startai.mean((pred - target) ** 2)

for epoch in range(40):
    # forward pass
    pred = model(x)

    # compute loss and gradients
    loss, grads = startai.execute_with_gradients(lambda params: loss_fn(*params), (model.v, x, y))

    # update parameters
    model.v = optimizer.step(model.v, grads)

    # print current loss
    print(f'Epoch: {epoch + 1:2d} --- Loss: {startai.to_numpy(loss).item():.5f}')

print('Finished training!')
```

The model\'s output can be visualized as follows:

<div align="center">
   <img width="50%" class="dark-light" src="https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/regressor_lq.gif">
</div>

As always, you can find more information about [Startai as a framework in
the
docs!](https://khulnasoft.com/docs/startai/overview/design/startai_as_a_framework.html)

</details>

------------------------------------------------------------------------

# Documentation

You can find Startai's documentation on the [Docs page](https://khulnasoft.com/docs/startai/), which includes:
- [Motivation](https://khulnasoft.com/docs/startai/overview/background.html): This contextualizes the problem Startai is trying to solve by going over
    - The current [ML Explosion](https://khulnasoft.com/docs/startai/overview/background/ml_explosion.html#ml-explosion).
    - Explaining why it is important [to solve this problem](https://khulnasoft.com/docs/startai/overview/background/why_unify.html#why-unify).
    - Explaining how we adhere to existing [standards](https://khulnasoft.com/docs/startai/overview/background/standardization.html#standardization) to make this happen.
- [Related Work](https://khulnasoft.com/docs/startai/overview/related_work.html): Which paints a picture of the role Startai plays in the ML stack, comparing it to other existing solutions in terms of functionalities and abstraction level.
- [Design](https://khulnasoft.com/docs/startai/overview/design.html): A user-focused guide about the design decision behind the architecture and the main building blocks of Startai.
- [Deep Dive](https://khulnasoft.com/docs/startai/overview/deep_dive.html): Which delves deeper into the implementation details of Startai and is oriented towards potential contributors to the code base.

------------------------------------------------------------------------

# Contributing


We believe that everyone can contribute and make a difference. Whether
it\'s writing code 💻, fixing bugs 🐛, or simply sharing feedback 💬,
your contributions are definitely welcome and appreciated 🙌

Check out all of our [Open Tasks](https://khulnasoft.com/docs/startai/overview/contributing/open_tasks.html),
and find out more info in our [Contributing guide](https://khulnasoft.com/docs/startai/overview/contributing.html)
in the docs!

Join our amazing community as a [contributor](https://khulnasoft.com/docs/startai/overview/contributing/volunteer_program.html), and help accelerate our journey to unify all ML frameworks!

<a href="https://github.com/khulnasoft/startai/graphs/contributors">
  <img class="dark-light" src="https://contrib.rocks/image?repo=khulnasoft/startai&anon=0&columns=20&max=100&r=true" />
</a>

------------------------------------------------------------------------

# Community


In order to achieve the ambitious goal of unifying AI, we definitely need
as many hands as possible on it! Whether you are a seasoned developer or
just starting out, you\'ll find a place here! Join the Startai community on
our [Discord](https://discord.gg/sXyFF8tDtm) 👾 server, which is the
perfect place to ask questions, share ideas, and get help from both
fellow developers and the Startai Team directly!

Also! Feel free to follow us on
[Twitter](https://twitter.com/letskhulnasoft) 🐦 as well, we use it to
share updates, sneak peeks, and all sorts of relevant news, certainly a
great way to stay in the loop 😄

Can\'t wait to see you there!

------------------------------------------------------------------------

# Citation

If you use Startai for your work, please don\'t forget to give proper credit
by including the accompanying [paper](https://arxiv.org/abs/2102.02886)
📄 in your references. It\'s a small way to show appreciation and help
to continue to support this and other open source projects 🙌


    @article{lenton2021startai,
      title={Startai: Templated deep learning for inter-framework portability},
      author={Lenton, Daniel and Pardo, Fabio and Falck, Fabian and James, Stephen and Clark, Ronald},
      journal={arXiv preprint arXiv:2102.02886},
      year={2021}
    }

            

Raw data

            {
    "_id": null,
    "home_page": "https://khulnasoft.com/startai",
    "name": "startai",
    "maintainer": null,
    "docs_url": null,
    "requires_python": null,
    "maintainer_email": null,
    "keywords": null,
    "author": "Unify",
    "author_email": "hello@khulnasoft.com",
    "download_url": null,
    "platform": null,
    "description": "> **[Sign up on our console](https://console.khulnasoft.com/)** for pilot access!\n\n\n\n<img class=\"only-light\" width=\"100%\" src=\"https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/logo.png?raw=true#gh-light-mode-only\"/>\n\n------------------------------------------------------------------------\n\n<div style=\"display: block;\" align=\"center\">\n<a href=\"https://khulnasoft.com/startai\">\n    <img class=\"dark-light\" src=\"https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/website_button.svg\">\n</a>\n<img class=\"dark-light\" width=\"5%\" src=\"https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/logos/supported/empty.png\">\n<a href=\"https://khulnasoft.com/docs/startai\">\n    <img class=\"dark-light\" src=\"https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/docs_button.svg\">\n</a>\n<img class=\"dark-light\" width=\"5%\" src=\"https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/logos/supported/empty.png\">\n<a href=\"https://khulnasoft.com/demos\">\n    <img class=\"dark-light\" src=\"https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/examples_button.svg\">\n</a>\n<img class=\"dark-light\" width=\"5%\" src=\"https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/logos/supported/empty.png\">\n<a href=\"https://khulnasoft.com/docs/startai/overview/design.html\">\n    <img class=\"dark-light\" src=\"https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/design_button.svg\">\n</a>\n<img class=\"dark-light\" width=\"5%\" src=\"https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/logos/supported/empty.png\">\n<a href=\"https://khulnasoft.com/docs/startai/overview/faq.html\">\n    <img class=\"dark-light\" src=\"https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/faq_button.svg\">\n</a>\n</div>\n\n------------------------------------------------------------------------\n\n# Status\n\n<div>\n    <a href=\"https://github.com/khulnasoft/startai/issues\">\n        <img class=\"dark-light\" style=\"padding-right: 4px; padding-bottom: 4px;\" src=\"https://img.shields.io/github/issues/khulnasoft/startai\">\n    </a>\n    <a href=\"https://github.com/khulnasoft/startai/network/members\">\n        <img class=\"dark-light\" style=\"padding-right: 4px; padding-bottom: 4px;\" src=\"https://img.shields.io/github/forks/khulnasoft/startai\">\n    </a>\n    <a href=\"https://github.com/khulnasoft/startai/stargazers\">\n        <img class=\"dark-light\" style=\"padding-right: 4px; padding-bottom: 4px;\" src=\"https://img.shields.io/github/stars/khulnasoft/startai\">\n    </a>\n    <a href=\"https://github.com/khulnasoft/startai/pulls\">\n        <img class=\"dark-light\" style=\"padding-right: 4px; padding-bottom: 4px;\" src=\"https://img.shields.io/badge/PRs-welcome-brightgreen.svg\">\n    </a>\n    <a href=\"https://pypi.org/project/startai\">\n        <img class=\"dark-light\" style=\"padding-right: 4px; padding-bottom: 4px;\" src=\"https://badge.fury.io/py/startai.svg\">\n    </a>\n    <a href=\"https://github.com/khulnasoft/startai/actions?query=workflow%3Adocs\">\n        <img class=\"dark-light\" style=\"padding-right: 4px; padding-bottom: 4px;\" src=\"https://github.com/khulnasoft/startai/actions/workflows/docs.yml/badge.svg\">\n    </a>\n    <a href=\"https://github.com/khulnasoft/startai/actions?query=workflow%3Atest-startai\">\n        <img class=\"dark-light\" style=\"padding-right: 4px; padding-bottom: 4px;\" src=\"https://github.com/khulnasoft/startai/actions/workflows/intelligent-tests.yml/badge.svg\">\n    </a>\n    <a href=\"https://discord.gg/sXyFF8tDtm\">\n        <img class=\"dark-light\" style=\"padding-right: 4px; padding-bottom: 4px;\" src=\"https://img.shields.io/discord/799879767196958751?color=blue&label=%20&logo=discord&logoColor=white\">\n    </a>\n</div>\n<br clear=\"all\" />\n\n------------------------------------------------------------------------\n\n# Unified AI\n\n<div style=\"display: block;\" align=\"center\">\n    <div>\n    <a href=\"https://jax.readthedocs.io\">\n        <img class=\"dark-light\" width=\"10%\" src=\"https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/logos/supported/jax_logo.png\">\n    </a>\n    <img class=\"dark-light\" width=\"5%\" src=\"https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/logos/supported/empty.png\">\n    <img class=\"dark-light\" width=\"5%\" src=\"https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/logos/supported/empty.png\">\n    <a href=\"https://www.tensorflow.org\">\n        <img class=\"dark-light\" width=\"10%\" src=\"https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/logos/supported/tensorflow_logo.png\">\n    </a>\n    <img class=\"dark-light\" width=\"5%\" src=\"https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/logos/supported/empty.png\">\n    <img class=\"dark-light\" width=\"5%\" src=\"https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/logos/supported/empty.png\">\n    <a href=\"https://pytorch.org\">\n        <img class=\"dark-light\" width=\"10%\" src=\"https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/logos/supported/pytorch_logo.png\">\n    </a>\n    <img class=\"dark-light\" width=\"5%\" src=\"https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/logos/supported/empty.png\">\n    <img class=\"dark-light\" width=\"5%\" src=\"https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/logos/supported/empty.png\">\n    <a href=\"https://numpy.org\">\n        <img class=\"dark-light\" width=\"10%\" src=\"https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/logos/supported/numpy_logo.png\">\n    </a>\n    </div>\n</div>\n\n<br clear=\"all\" />\n\n------------------------------------------------------------------------\n\nStartai is an open-source machine learning framework that\nenables you to:\n\n- \ud83d\udd04 **Convert code into any framework**: Use and build on top of any model, library, or device by converting any code from one framework to another using `startai.transpile`.\n- \u2692\ufe0f **Write framework-agnostic code**: Write your code once in `startai` and then choose the most appropriate ML framework as the backend to leverage all the benefits and tools.\n\n[Join our growing community](https://discord.com/invite/sXyFF8tDtm) \ud83c\udf0d to connect with people using Startai. **Let\\'s** [khulnasoft.com](https://khulnasoft.com) **together \ud83e\uddbe**\n\n------------------------------------------------------------------------\n\n# Getting started\n\n[Startai's transpiler](https://khulnasoft.com/docs/startai/overview/design/startai_as_a_transpiler.html) helps you convert code between different ML frameworks. To get pilot access to the transpiler, [sign up](https://console.khulnasoft.com/) and generate an API key. The [Get Started](https://khulnasoft.com/docs/startai/overview/get_started.html) notebook should help you set up your API key and the [Quickstart](https://khulnasoft.com/docs/startai/demos/quickstart.html) notebook should give you a brief idea of the features!\n\nThe most important notebooks are:\n\n- [How to convert your code between frameworks?](https://khulnasoft.com/docs/startai/demos/learn_the_basics/04_transpile_code.html)\n- [How to write framework-agnostic code?](https://khulnasoft.com/docs/startai/demos/learn_the_basics/01_write_startai_code.html)\n\nBeyond that, based on the frameworks you want to convert code between, there are a few more [examples](#using-startai) further down this page \ud83d\udc47 which contain a number of models and libraries transpiled between PyTorch, JAX, TensorFlow and NumPy.\n\n------------------------------------------------------------------------\n\n## Installing startai\n\nThe easiest way to set up Startai is to install it using **pip**:\n\n``` bash\npip install startai\n```\n\n<details>\n<summary><b>Docker Images</b></summary>\n\nGiven the challenges of maintaining installations of various frameworks in a single environment,\nusers who would want to test `startai` with multiple frameworks at once can use our Docker images for a seamless experience.\nYou can pull the images from:\n\n``` bash\ndocker pull khulnasoft/startai:latest      # CPU\ndocker pull khulnasoft/startai:latest-gpu  # GPU\n```\n\n</details>\n\n<details>\n<summary><b>From Source</b></summary>\n\nYou can also install Startai from source if you want to take advantage of\nthe latest changes, but we can\\'t ensure everything will work as\nexpected \ud83d\ude05\n\n``` bash\ngit clone https://github.com/khulnasoft/startai.git\ncd startai\npip install --user -e .\n```\n\nIf you want to set up testing and various frameworks it\\'s probably     best\nto check out the [Setting Up](https://khulnasoft.com/docs/startai/overview/contributing/setting_up.html)\npage, where OS-specific and IDE-specific instructions and video\ntutorials to do so are available!\n\n</details>\n\n------------------------------------------------------------------------\n\n## Using Startai\n\nAfter installing Startai, you can start using it straight away, for example:\n\n  <details>\n   <summary><b>Transpiling any code from one framework to another</b></summary>\n\n   ``` python\n   import startai\n   import torch\n   import jax\n\n   def jax_fn(x):\n       a = jax.numpy.dot(x, x)\n       b = jax.numpy.mean(x)\n       return x * a + b\n\n   jax_x = jax.numpy.array([1., 2., 3.])\n   torch_x = torch.tensor([1., 2., 3.])\n   torch_fn = startai.transpile(jax_fn, source=\"jax\", to=\"torch\", args=(jax_x,))\n   ret = torch_fn(torch_x)\n   ```\n\n   </details>\n\n  <details>\n    <summary><b>Running your code with any backend</b></summary>\n\n   ``` python\n    import startai\n    import torch\n    import jax\n\n    startai.set_backend(\"jax\")\n\n    x = jax.numpy.array([1, 2, 3])\n    y = jax.numpy.array([3, 2, 1])\n    z = startai.add(x, y)\n\n    startai.set_backend('torch')\n\n    x = torch.tensor([1, 2, 3])\n    y = torch.tensor([3, 2, 1])\n    z = startai.add(x, y)\n   ```\n\n   </details>\n\n\n\\\nThe [Examples page](https://khulnasoft.com/demos/) features a wide range of\ndemos and tutorials showcasing the functionalities of Startai along with\nmultiple use cases, but feel free to check out some shorter\nframework-specific examples here \u2b07\ufe0f\n\n<details>\n<summary><b>I'm using PyTorch&ensp;<img class=\"dark-light\" src=\"https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/logos/supported/torch_small_logo.png\"></b></summary>\n   <blockquote>You can use Startai to get PyTorch code from:\n      <details>\n         <summary>Any model</summary>\n         <blockquote>\n            <details>\n               <summary>From TensorFlow</summary>\n\n``` python\nimport startai\nimport torch\nimport tensorflow as tf\n\n# Get a pretrained keras model\neff_encoder = tf.keras.applications.efficientnet_v2.EfficientNetV2B0(\n    include_top=False, weights=\"imagenet\", input_shape=(224, 224, 3)\n)\n\n# Transpile it into a torch.nn.Module with the corresponding parameters\nnoise = tf.random.normal(shape=(1, 224, 224, 3))\ntorch_eff_encoder = startai.transpile(eff_encoder, source=\"tensorflow\", to=\"torch\", args=(noise,))\n\n# Build a classifier using the transpiled encoder\nclass Classifier(torch.nn.Module):\n    def __init__(self, num_classes=20):\n        super().__init__()\n        self.encoder = torch_eff_encoder\n        self.fc = torch.nn.Linear(1280, num_classes)\n\n    def forward(self, x):\n        x = self.encoder(x)\n        return self.fc(x)\n\n# Initialize a trainable, customizable, torch.nn.Module\nclassifier = Classifier()\nret = classifier(torch.rand((1, 244, 244, 3)))\n```\n\n</details>\n<details>\n   <summary>From JAX</summary>\n\n``` python\nimport startai\nimport jax\nimport torch\n\n# Get a pretrained haiku model\n# https://github.com/khulnasoft/demos/blob/15c235f/scripts/deepmind_perceiver_io.py\nfrom deepmind_perceiver_io import key, perceiver_backbone\n\n# Transpile it into a torch.nn.Module with the corresponding parameters\ndummy_input = jax.random.uniform(key, shape=(1, 3, 224, 224))\nparams = perceiver_backbone.init(rng=key, images=dummy_input)\nstartai.set_backend(\"jax\")\nbackbone = startai.transpile(\n    perceiver_backbone, source=\"jax\", to=\"torch\", params_v=params, kwargs={\"images\": dummy_input}\n)\n\n# Build a classifier using the transpiled backbone\nclass PerceiverIOClassifier(torch.nn.Module):\n    def __init__(self, num_classes=20):\n        super().__init__()\n        self.backbone = backbone\n        self.max_pool = torch.nn.MaxPool2d((512, 1))\n        self.flatten = torch.nn.Flatten()\n        self.fc = torch.nn.Linear(1024, num_classes)\n\n    def forward(self, x):\n        x = self.backbone(images=x)\n        x = self.flatten(self.max_pool(x))\n        return self.fc(x)\n\n# Initialize a trainable, customizable, torch.nn.Module\nclassifier = PerceiverIOClassifier()\nret = classifier(torch.rand((1, 3, 224, 224)))\n```\n\n</details>\n</blockquote>\n</details>\n\n<details>\n<summary>Any library</summary>\n<blockquote>\n<details>\n   <summary>From Tensorflow</summary>\n\n``` python\nimport startai\nimport torch\nimport os\nos.environ[\"SM_FRAMEWORK\"] = \"tf.keras\"\nimport segmentation_models as sm\n\n# transpile sm from tensorflow to torch\ntorch_sm = startai.transpile(sm, source=\"tensorflow\", to=\"torch\")\n\n# get some image-like arrays\noutput = torch.rand((1, 3, 512, 512))\ntarget = torch.rand((1, 3, 512, 512))\n\n# and use the transpiled version of any function from the library!\nout = torch_sm.metrics.iou_score(output, target)\n```\n\n</details>\n<details>\n   <summary>From JAX</summary>\n\n``` python\nimport startai\nimport rax\nimport torch\n\n# transpile rax from jax to torch\ntorch_rax = startai.transpile(rax, source=\"jax\", to=\"torch\")\n\n# get some arrays\nscores = torch.tensor([2.2, 1.3, 5.4])\nlabels = torch.tensor([1.0, 0.0, 0.0])\n\n# and use the transpiled version of any function from the library!\nout = torch_rax.poly1_softmax_loss(scores, labels)\n```\n\n</details>\n<details>\n   <summary>From NumPy</summary>\n\n``` python\nimport startai\nimport torch\nimport madmom\n\n# transpile madmon from numpy to torch\ntorch_madmom = startai.transpile(madmom, source=\"numpy\", to=\"torch\")\n\n# get some arrays\nfreqs = torch.arange(20) * 10\n\n# and use the transpiled version of any function from the library!\nout = torch_madmom.audio.filters.hz2midi(freqs)\n```\n\n</details>\n</blockquote>\n</details>\n\n<details>\n<summary>Any function</summary>\n<blockquote>\n<details>\n   <summary>From Tensorflow</summary>\n\n``` python\nimport startai\nimport tensorflow as tf\nimport torch\n\ndef loss(predictions, targets):\n    return tf.sqrt(tf.reduce_mean(tf.square(predictions - targets)))\n\n# transpile any function from tf to torch\ntorch_loss = startai.transpile(loss, source=\"tensorflow\", to=\"torch\")\n\n# get some arrays\np = torch.tensor([3.0, 2.0, 1.0])\nt = torch.tensor([0.0, 0.0, 0.0])\n\n# and use the transpiled version!\nout = torch_loss(p, t)\n```\n\n</details>\n<details>\n   <summary>From JAX</summary>\n\n``` python\nimport startai\nimport jax.numpy as jnp\nimport torch\n\ndef loss(predictions, targets):\n    return jnp.sqrt(jnp.mean((predictions - targets) ** 2))\n\n# transpile any function from jax to torch\ntorch_loss = startai.transpile(loss, source=\"jax\", to=\"torch\")\n\n# get some arrays\np = torch.tensor([3.0, 2.0, 1.0])\nt = torch.tensor([0.0, 0.0, 0.0])\n\n# and use the transpiled version!\nout = torch_loss(p, t)\n```\n\n</details>\n<details>\n   <summary>From NumPy</summary>\n\n``` python\nimport startai\nimport numpy as np\nimport torch\n\ndef loss(predictions, targets):\n    return np.sqrt(np.mean((predictions - targets) ** 2))\n\n# transpile any function from numpy to torch\ntorch_loss = startai.transpile(loss, source=\"numpy\", to=\"torch\")\n\n# get some arrays\np = torch.tensor([3.0, 2.0, 1.0])\nt = torch.tensor([0.0, 0.0, 0.0])\n\n# and use the transpiled version!\nout = torch_loss(p, t)\n```\n\n</details>\n</blockquote>\n</details>\n\n</blockquote>\n</details>\n\n<details>\n<summary><b>I'm using TensorFlow&ensp;<img class=\"dark-light\" src=\"https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/logos/supported/tf_small_logo.png\"></b></summary>\n<blockquote>You can use Startai to get TensorFlow code from:\n<details>\n<summary>Any model</summary>\n<blockquote>\n<details>\n   <summary>From PyTorch</summary>\n\n``` python\nimport startai\nimport torch\nimport timm\nimport tensorflow as tf\n\n# Get a pretrained pytorch model\nmlp_encoder = timm.create_model(\"mixer_b16_224\", pretrained=True, num_classes=0)\n\n# Transpile it into a keras.Model with the corresponding parameters\nnoise = torch.randn(1, 3, 224, 224)\nmlp_encoder = startai.transpile(mlp_encoder, to=\"tensorflow\", args=(noise,))\n\n# Build a classifier using the transpiled encoder\nclass Classifier(tf.keras.Model):\n    def __init__(self):\n        super().__init__()\n        self.encoder = mlp_encoder\n        self.output_dense = tf.keras.layers.Dense(units=1000, activation=\"softmax\")\n\n    def call(self, x):\n        x = self.encoder(x)\n        return self.output_dense(x)\n\n# Transform the classifier and use it as a standard keras.Model\nx = tf.random.normal(shape=(1, 3, 224, 224))\nmodel = Classifier()\nret = model(x)\n```\n\n</details>\n<details>\n   <summary>From JAX</summary>\n\n``` python\nimport startai\nimport jax\nimport tensorflow as tf\n\n# Get a pretrained haiku model\n# https://khulnasoft.com/demos/scripts/deepmind_perceiver_io.py\nfrom deepmind_perceiver_io import key, perceiver_backbone\n\n# Transpile it into a tf.keras.Model with the corresponding parameters\ndummy_input = jax.random.uniform(key, shape=(1, 3, 224, 224))\nparams = perceiver_backbone.init(rng=key, images=dummy_input)\nbackbone = startai.transpile(\n    perceiver_backbone, to=\"tensorflow\", params_v=params, args=(dummy_input,)\n)\n\n# Build a classifier using the transpiled backbone\nclass PerceiverIOClassifier(tf.keras.Model):\n    def __init__(self, num_classes=20):\n        super().__init__()\n        self.backbone = backbone\n        self.max_pool = tf.keras.layers.MaxPooling1D(pool_size=512)\n        self.flatten = tf.keras.layers.Flatten()\n        self.fc = tf.keras.layers.Dense(num_classes)\n\n    def call(self, x):\n        x = self.backbone(x)\n        x = self.flatten(self.max_pool(x))\n        return self.fc(x)\n\n# Initialize a trainable, customizable, tf.keras.Model\nx = tf.random.normal(shape=(1, 3, 224, 224))\nclassifier = PerceiverIOClassifier()\nret = classifier(x)\n```\n\n</details>\n</blockquote>\n</details>\n\n<details>\n<summary>Any library</summary>\n<blockquote>\n<details>\n   <summary>From PyTorch</summary>\n\n``` python\nimport startai\nimport kornia\nimport requests\nimport numpy as np\nimport tensorflow as tf\nfrom PIL import Image\n\n# transpile kornia from torch to tensorflow\ntf_kornia = startai.transpile(kornia, source=\"torch\", to=\"tensorflow\")\n\n# get an image\nurl = \"http://images.cocodataset.org/train2017/000000000034.jpg\"\nraw_img = Image.open(requests.get(url, stream=True).raw)\n\n# convert it to the format expected by kornia\nimg = np.array(raw_img)\nimg = tf.transpose(tf.constant(img), (2, 0, 1))\nimg = tf.expand_dims(img, 0) / 255\n\n# and use the transpiled version of any function from the library!\nout = tf_kornia.enhance.sharpness(img, 5)\n```\n\n</details>\n<details>\n   <summary>From JAX</summary>\n\n``` python\nimport startai\nimport rax\nimport tensorflow as tf\n\n# transpile rax from jax to tensorflow\ntf_rax = startai.transpile(rax, source=\"jax\", to=\"tensorflow\")\n\n# get some arrays\nscores = tf.constant([2.2, 1.3, 5.4])\nlabels = tf.constant([1.0, 0.0, 0.0])\n\n# and use the transpiled version of any function from the library!\nout = tf_rax.poly1_softmax_loss(scores, labels)\n```\n\n</details>\n<details>\n   <summary>From NumPy</summary>\n\n``` python\nimport startai\nimport madmom\nimport tensorflow as tf\n\n# transpile madmom from numpy to tensorflow\ntf_madmom = startai.transpile(madmom, source=\"numpy\", to=\"tensorflow\")\n\n# get some arrays\nfreqs = tf.range(20) * 10\n\n# and use the transpiled version of any function from the library!\nout = tf_madmom.audio.filters.hz2midi(freqs)\n```\n\n</details>\n</blockquote>\n</details>\n\n<details>\n<summary>Any function</summary>\n<blockquote>\n<details>\n   <summary>From PyTorch</summary>\n\n``` python\nimport startai\nimport torch\nimport tensorflow as tf\n\ndef loss(predictions, targets):\n    return torch.sqrt(torch.mean((predictions - targets) ** 2))\n\n# transpile any function from torch to tensorflow\ntf_loss = startai.transpile(loss, source=\"torch\", to=\"tensorflow\")\n\n# get some arrays\np = tf.constant([3.0, 2.0, 1.0])\nt = tf.constant([0.0, 0.0, 0.0])\n\n# and use the transpiled version!\nout = tf_loss(p, t)\n```\n\n</details>\n<details>\n   <summary>From JAX</summary>\n\n``` python\nimport startai\nimport jax.numpy as jnp\nimport tensorflow as tf\n\ndef loss(predictions, targets):\n    return jnp.sqrt(jnp.mean((predictions - targets) ** 2))\n\n# transpile any function from jax to tensorflow\ntf_loss = startai.transpile(loss, source=\"jax\", to=\"tensorflow\")\n\n# get some arrays\np = tf.constant([3.0, 2.0, 1.0])\nt = tf.constant([0.0, 0.0, 0.0])\n\n# and use the transpiled version!\nout = tf_loss(p, t)\n```\n\n</details>\n<details>\n   <summary>From NumPy</summary>\n\n``` python\nimport startai\nimport numpy as np\nimport tensorflow as tf\n\ndef loss(predictions, targets):\n    return np.sqrt(np.mean((predictions - targets) ** 2))\n\n# transpile any function from numpy to tensorflow\ntf_loss = startai.transpile(loss, source=\"numpy\", to=\"tensorflow\")\n\n# get some arrays\np = tf.constant([3.0, 2.0, 1.0])\nt = tf.constant([0.0, 0.0, 0.0])\n\n# and use the transpiled version!\nout = tf_loss(p, t)\n```\n\n</details>\n</blockquote>\n</details>\n\n</blockquote>\n</details>\n\n<details>\n<summary><b>I'm using Jax&ensp;<img class=\"dark-light\" src=\"https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/logos/supported/jax_small_logo.png\"></b></summary>\n<blockquote>You can use Startai to get JAX code from:\n<details>\n<summary>Any model</summary>\n<blockquote>\n<details>\n   <summary>From PyTorch</summary>\n\n``` python\nimport startai\nimport timm\nimport torch\nimport jax\nimport haiku as hk\n\n# Get a pretrained pytorch model\nmlp_encoder = timm.create_model(\"mixer_b16_224\", pretrained=True, num_classes=0)\n\n# Transpile it into a hk.Module with the corresponding parameters\nnoise = torch.randn(1, 3, 224, 224)\nmlp_encoder = startai.transpile(mlp_encoder, source=\"torch\", to=\"haiku\", args=(noise,))\n\n# Build a classifier using the transpiled encoder\nclass Classifier(hk.Module):\n    def __init__(self, num_classes=1000):\n        super().__init__()\n        self.encoder = mlp_encoder()\n        self.fc = hk.Linear(output_size=num_classes, with_bias=True)\n\n    def __call__(self, x):\n        x = self.encoder(x)\n        x = self.fc(x)\n        return x\n\ndef _forward_classifier(x):\n    module = Classifier()\n    return module(x)\n\n# Transform the classifier and use it as a standard hk.Module\nrng_key = jax.random.PRNGKey(42)\nx = jax.random.uniform(key=rng_key, shape=(1, 3, 224, 224), dtype=jax.numpy.float32)\nforward_classifier = hk.transform(_forward_classifier)\nparams = forward_classifier.init(rng=rng_key, x=x)\n\nret = forward_classifier.apply(params, None, x)\n```\n\n</details>\n<details>\n   <summary>From TensorFlow</summary>\n\n``` python\nimport startai\nimport jax\nimport haiku as hk\nimport tensorflow as tf\njax.config.update(\"jax_enable_x64\", True)\n\n# Get a pretrained keras model\neff_encoder = tf.keras.applications.efficientnet_v2.EfficientNetV2B0(\n    include_top=False, weights=\"imagenet\", input_shape=(224, 224, 3)\n)\n\n# Transpile it into a hk.Module with the corresponding parameters\nnoise = tf.random.normal(shape=(1, 224, 224, 3))\nhk_eff_encoder = startai.transpile(eff_encoder, source=\"tensorflow\", to=\"haiku\", args=(noise,))\n\n# Build a classifier using the transpiled encoder\nclass Classifier(hk.Module):\n    def __init__(self, num_classes=1000):\n        super().__init__()\n        self.encoder = hk_eff_encoder()\n        self.fc = hk.Linear(output_size=num_classes, with_bias=True)\n\n    def __call__(self, x):\n        x = self.encoder(x)\n        x = self.fc(x)\n        return x\n\ndef _forward_classifier(x):\n    module = Classifier()\n    return module(x)\n\n# Transform the classifier and use it as a standard hk.Module\nrng_key = jax.random.PRNGKey(42)\ndummy_x = jax.random.uniform(key=rng_key, shape=(1, 224, 224, 3))\nforward_classifier = hk.transform(_forward_classifier)\nparams = forward_classifier.init(rng=rng_key, x=dummy_x)\n\nret = forward_classifier.apply(params, None, dummy_x)\n```\n\n</details>\n</blockquote>\n</details>\n\n<details>\n<summary>Any library</summary>\n<blockquote>\n<details>\n   <summary>From PyTorch</summary>\n\n``` python\nimport startai\nimport kornia\nimport requests\nimport jax.numpy as jnp\nfrom PIL import Image\njax.config.update(\"jax_enable_x64\", True)\n\n# transpile kornia from torch to jax\njax_kornia = startai.transpile(kornia, source=\"torch\", to=\"jax\")\n\n# get an image\nurl = \"http://images.cocodataset.org/train2017/000000000034.jpg\"\nraw_img = Image.open(requests.get(url, stream=True).raw)\n\n# convert it to the format expected by kornia\nimg = jnp.transpose(jnp.array(raw_img), (2, 0, 1))\nimg = jnp.expand_dims(img, 0) / 255\n\n# and use the transpiled version of any function from the library!\nout = jax_kornia.enhance.sharpness(img, 5)\n```\n\n</details>\n<details>\n   <summary>From TensorFlow</summary>\n\n``` python\nimport startai\nimport jax\nimport os\nos.environ[\"SM_FRAMEWORK\"] = \"tf.keras\"\nimport segmentation_models as sm\n\n# transpile sm from tensorflow to jax\njax_sm = startai.transpile(sm, source=\"tensorflow\", to=\"jax\")\n\n# get some image-like arrays\nkey = jax.random.PRNGKey(23)\nkey1, key2 = jax.random.split(key)\noutput = jax.random.uniform(key1, (1, 3, 512, 512))\ntarget = jax.random.uniform(key2, (1, 3, 512, 512))\n\n# and use the transpiled version of any function from the library!\nout = jax_sm.metrics.iou_score(output, target)\n```\n\n</details>\n<details>\n   <summary>From NumPy</summary>\n\n``` python\nimport startai\nimport madmom\nimport jax.numpy as jnp\n\n# transpile madmon from numpy to jax\njax_madmom = startai.transpile(madmom, source=\"numpy\", to=\"jax\")\n\n# get some arrays\nfreqs = jnp.arange(20) * 10\n\n# and use the transpiled version of any function from the library!\nout = jax_madmom.audio.filters.hz2midi(freqs)\n```\n\n</details>\n</blockquote>\n</details>\n\n<details>\n<summary>Any function</summary>\n<blockquote>\n<details>\n   <summary>From PyTorch</summary>\n\n``` python\nimport startai\nimport torch\nimport jax.numpy as jnp\n\ndef loss(predictions, targets):\n    return torch.sqrt(torch.mean((predictions - targets) ** 2))\n\n# transpile any function from torch to jax\njax_loss = startai.transpile(loss, source=\"torch\", to=\"jax\")\n\n# get some arrays\np = jnp.array([3.0, 2.0, 1.0])\nt = jnp.array([0.0, 0.0, 0.0])\n\n# and use the transpiled version!\nout = jax_loss(p, t)\n```\n\n</details>\n<details>\n   <summary>From TensorFlow</summary>\n\n``` python\nimport startai\nimport tensorflow as tf\nimport jax.numpy as jnp\n\ndef loss(predictions, targets):\n    return tf.sqrt(tf.reduce_mean(tf.square(predictions - targets)))\n\n# transpile any function from tf to jax\njax_loss = startai.transpile(loss, source=\"tensorflow\", to=\"jax\")\n\n# get some arrays\np = jnp.array([3.0, 2.0, 1.0])\nt = jnp.array([0.0, 0.0, 0.0])\n\n# and use the transpiled version!\nout = jax_loss(p, t)\n```\n\n</details>\n<details>\n   <summary>From NumPy</summary>\n\n``` python\nimport startai\nimport numpy as np\nimport jax\nimport jax.numpy as jnp\njax.config.update('jax_enable_x64', True)\n\ndef loss(predictions, targets):\n    return np.sqrt(np.mean((predictions - targets) ** 2))\n\n# transpile any function from numpy to jax\njax_loss = startai.transpile(loss, source=\"numpy\", to=\"jax\")\n\n# get some arrays\np = jnp.array([3.0, 2.0, 1.0])\nt = jnp.array([0.0, 0.0, 0.0])\n\n# and use the transpiled version!\nout = jax_loss(p, t)\n```\n\n</details>\n</blockquote>\n</details>\n\n</blockquote>\n</details>\n\n<details>\n<summary><b>I'm using NumPy&ensp;<img class=\"dark-light\" src=\"https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/externally_linked/logos/supported/numpy_small_logo.png\"></b></summary>\n<blockquote>You can use Startai to get NumPy code from:\n<details>\n<summary>Any library</summary>\n<blockquote>\n<details>\n   <summary>From PyTorch</summary>\n\n``` python\nimport startai\nimport kornia\nimport requests\nimport numpy as np\nfrom PIL import Image\n\n# transpile kornia from torch to np\nnp_kornia = startai.transpile(kornia, source=\"torch\", to=\"numpy\")\n\n# get an image\nurl = \"http://images.cocodataset.org/train2017/000000000034.jpg\"\nraw_img = Image.open(requests.get(url, stream=True).raw)\n\n# convert it to the format expected by kornia\nimg = np.transpose(np.array(raw_img), (2, 0, 1))\nimg = np.expand_dims(img, 0) / 255\n\n# and use the transpiled version of any function from the library!\nout = np_kornia.enhance.sharpness(img, 5)\n```\n\n</details>\n<details>\n   <summary>From TensorFlow</summary>\n\n``` python\nimport startai\nimport numpy as np\nimport os\nos.environ[\"SM_FRAMEWORK\"] = \"tf.keras\"\nimport segmentation_models as sm\n\n# transpile sm from tensorflow to numpy\nnp_sm = startai.transpile(sm, source=\"tensorflow\", to=\"numpy\")\n\n# get some image-like arrays\noutput = np.random.rand(1, 3, 512, 512).astype(dtype=np.float32)\ntarget = np.random.rand(1, 3, 512, 512).astype(dtype=np.float32)\n\n# and use the transpiled version of any function from the library!\nout = np_sm.metrics.iou_score(output, target)\n```\n\n</details>\n<details>\n   <summary>From Jax</summary>\n\n``` python\nimport startai\nimport rax\nimport numpy as np\n\n# transpile rax from jax to numpy\nnp_rax = startai.transpile(rax, source=\"jax\", to=\"numpy\")\n\n# get some arrays\nscores = np.array([2.2, 1.3, 5.4])\nlabels = np.array([1.0, 0.0, 0.0])\n\n# and use the transpiled version of any function from the library!\nout = np_rax.poly1_softmax_loss(scores, labels)\n```\n\n</details>\n</blockquote>\n</details>\n\n<details>\n<summary>Any function</summary>\n<blockquote>\n<details>\n   <summary>From PyTorch</summary>\n\n``` python\nimport startai\nimport torch\nimport numpy as np\n\ndef loss(predictions, targets):\n    return torch.sqrt(torch.mean((predictions - targets) ** 2))\n\n# transpile any function from torch to numpy\nnp_loss = startai.transpile(loss, source=\"torch\", to=\"numpy\")\n\n# get some arrays\np = np.array([3.0, 2.0, 1.0])\nt = np.array([0.0, 0.0, 0.0])\n\n# and use the transpiled version!\nout = np_loss(p, t)\n```\n\n</details>\n<details>\n   <summary>From TensorFlow</summary>\n\n``` python\nimport startai\nimport tensorflow as tf\nimport numpy as np\n\ndef loss(predictions, targets):\n    return tf.sqrt(tf.reduce_mean(tf.square(predictions - targets)))\n\n# transpile any function from tf to numpy\nnp_loss = startai.transpile(loss, source=\"tensorflow\", to=\"numpy\")\n\n# get some arrays\np = np.array([3.0, 2.0, 1.0])\nt = np.array([0.0, 0.0, 0.0])\n\n# and use the transpiled version!\nout = np_loss(p, t)\n```\n\n</details>\n<details>\n   <summary>From JAX</summary>\n\n``` python\nimport startai\nimport jax.numpy as jnp\nimport numpy as np\n\ndef loss(predictions, targets):\n    return jnp.sqrt(jnp.mean((predictions - targets) ** 2))\n\n# transpile any function from jax to numpy\nnp_loss = startai.transpile(loss, source=\"jax\", to=\"numpy\")\n\n# get some arrays\np = np.array([3.0, 2.0, 1.0])\nt = np.array([0.0, 0.0, 0.0])\n\n# and use the transpiled version!\nout = np_loss(p, t)\n```\n\n</details>\n</blockquote>\n</details>\n\n</blockquote>\n</details>\n\n<details>\n<summary>\n<b>I'm using Startai&ensp;<img height=\"25px\" width=\"25px\" class=\"dark-light\" src=\"https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/logos/startai_logo_only.svg\"></b>\n</summary>\nOr you can use Startai as a framework, breaking yourself (and your code)\nfree from deciding which community to support, allowing anyone to run\nyour code in their framework of choice!\n\n``` python\nimport startai\n\n# A simple image classification model\nclass StartaiNet(startai.Module):\n    def __init__(\n        self,\n        h_w=(32, 32),\n        input_channels=3,\n        output_channels=512,\n        num_classes=2,\n        data_format=\"NCHW\",\n        device=\"cpu\",\n    ):\n        self.h_w = h_w\n        self.input_channels = input_channels\n        self.output_channels = output_channels\n        self.num_classes = num_classes\n        self.data_format = data_format\n        super().__init__(device=device)\n\n    def _build(self, *args, **kwargs):\n        self.extractor = startai.Sequential(\n            startai.Conv2D(self.input_channels, 6, [5, 5], 1, \"SAME\", data_format=self.data_format),\n            startai.GELU(),\n            startai.Conv2D(6, 16, [5, 5], 1, \"SAME\", data_format=self.data_format),\n            startai.GELU(),\n            startai.Conv2D(16, self.output_channels, [5, 5], 1, \"SAME\", data_format=self.data_format),\n            startai.GELU(),\n        )\n\n        self.classifier = startai.Sequential(\n            # Since the padding is \"SAME\", this would be image_height x image_width x output_channels\n            startai.Linear(self.h_w[0] * self.h_w[1] * self.output_channels, 512),\n            startai.GELU(),\n            startai.Linear(512, self.num_classes),\n        )\n\n    def _forward(self, x):\n        x = self.extractor(x)\n        # flatten all dims except batch dim\n        x = startai.flatten(x, start_dim=1, end_dim=-1)\n        logits = self.classifier(x)\n        probs = startai.softmax(logits)\n        return logits, probs\n```\n\nAfter building your model in Startai, you can set your favourite framework\nas the backend to use its operations under the hood!\n\n``` python\nstartai.set_backend(\"torch\")\nmodel = StartaiNet()\nx = torch.randn(1, 3, 32, 32)\nlogits, probs = model(x)\n```\n\n``` python\nstartai.set_backend(\"tensorflow\")\nmodel = StartaiNet()\nx = tf.random.uniform(shape=(1, 3, 32, 32))\nlogits, probs = model(x)\n```\n\n``` python\nstartai.set_backend(\"jax\")\nmodel = StartaiNet()\nx = jax.random.uniform(key, shape=(1, 3, 32, 32))\nlogits, probs = model(x)\n```\n\n``` python\nstartai.set_backend(\"numpy\")\nmodel = StartaiNet()\nx = np.random.uniform(size=(1, 3, 32, 32))\nlogits, probs = model(x)\n```\n\nLast but not least, we can also build the training pipeline in pure startai\n\u2b07\ufe0f\n\n<details>\n<summary><a>Let's define some helper functions first</a></summary>\n\n``` python\n# helper function for loading the dataset in batches\ndef generate_batches(images, classes, dataset_size, batch_size=32):\n    if batch_size > dataset_size:\n        raise startai.utils.exceptions.StartaiError(\"Use a smaller batch size\")\n    for idx in range(0, dataset_size, batch_size):\n        yield images[idx : min(idx + batch_size, dataset_size)], classes[\n            idx : min(idx + batch_size, dataset_size)\n        ]\n\n\n# helper function to get the number of current predictions\ndef num_correct(preds, labels):\n    return (preds.argmax() == labels).sum().to_numpy().item()\n\n\n# define a loss function\ndef loss_fn(params):\n    v, model, x, y = params\n    _, probs = model(x, v=v)\n    return startai.cross_entropy(y, probs), probs\n```\n\n</details>\n\n<details>\n<summary><a>And train this model!</a></summary>\n\n``` python\n# train the model on gpu if it's available\ndevice = \"gpu:0\" if startai.gpu_is_available() else \"cpu\"\n\n# training hyperparams\noptimizer = startai.Adam(1e-4)\nbatch_size = 4\nnum_epochs = 20\nnum_classes = 10\n\nmodel = StartaiNet(\n    h_w=(28, 28),\n    input_channels=1,\n    output_channels=120,\n    num_classes=num_classes,\n    device=device,\n)\n\nimages = startai.random_uniform(shape=(16, 1, 28, 28))\nclasses = startai.randint(0, num_classes - 1, shape=(16,))\n\n\n# training loop\ndef train(images, classes, epochs, model, device, num_classes=10, batch_size=32):\n    # training metrics\n    epoch_loss = 0.0\n    metrics = []\n    dataset_size = len(images)\n\n    for epoch in range(epochs):\n        train_correct = 0\n        train_loop = tqdm(\n            generate_batches(images, classes, len(images), batch_size=batch_size),\n            total=dataset_size // batch_size,\n            position=0,\n            leave=True,\n        )\n\n        for xbatch, ybatch in train_loop:\n            xbatch, ybatch = xbatch.to_device(device), ybatch.to_device(device)\n\n            # Since the cross entropy function expects the target classes to be in one-hot encoded format\n            ybatch_encoded = startai.one_hot(ybatch, num_classes)\n\n            # update model params\n            loss_probs, grads = startai.execute_with_gradients(\n                loss_fn,\n                (model.v, model, xbatch, ybatch_encoded),\n            )\n\n            model.v = optimizer.step(model.v, grads[\"0\"])\n\n            batch_loss = startai.to_numpy(loss_probs[0]).mean().item()  # batch mean loss\n            epoch_loss += batch_loss * xbatch.shape[0]\n            train_correct += num_correct(loss_probs[1], ybatch)\n\n            train_loop.set_description(f\"Epoch [{epoch + 1:2d}/{epochs}]\")\n            train_loop.set_postfix(\n                running_loss=batch_loss,\n                accuracy_percentage=(train_correct / dataset_size) * 100,\n            )\n\n        epoch_loss = epoch_loss / dataset_size\n        training_accuracy = train_correct / dataset_size\n\n        metrics.append([epoch, epoch_loss, training_accuracy])\n\n        train_loop.write(\n            f\"\\nAverage training loss: {epoch_loss:.6f}, Train Correct: {train_correct}\",\n            end=\"\\n\",\n        )\n\n\n# assuming the dataset(images and classes) are already prepared in a folder\ntrain(\n    images,\n    classes,\n    num_epochs,\n    model,\n    device,\n    num_classes=num_classes,\n    batch_size=batch_size,\n)\n```\n\n</details>\n</details>\n\n\n\\\nFor a more comprehensive overview, head over to the [Demos](https://khulnasoft.com/docs/startai/demos/index.html) section with more on the [basics](https://khulnasoft.com/docs/startai/demos/learn_the_basics.html), a few [guides](https://khulnasoft.com/docs/startai/demos/guides.html) and a wide-ranging set of [examples](https://khulnasoft.com/docs/startai/demos/examples_and_demos.html) that demonstrate the transpilation of various popular models. We continue to expand on that list, let us know what demos you'd like us to add next \ud83c\udfaf\n\n\nLet's take a look at how Startai works both as a transpiler and a framework in a bit more detail to get an idea of why and where to use it.\n\n<details>\n<summary><b>Startai as a transpiler</b></summary>\n\n<blockquote>\n<details>\n<summary>When should I use Startai as a transpiler?</summary>\n\nIf you want to use building blocks published in other frameworks (neural\nnetworks, layers, array computing libraries, training pipelines\\...),\nyou want to integrate code developed in various frameworks, or maybe\nstraight up move code from one framework to another, the transpiler is\ndefinitely the tool \ud83d\udd27 for the job! As the output of transpilation is\nnative code in the target framework, you can use the converted code just\nas if it was code originally developed in that framework, applying\nframework-specific optimizations or tools, instantly exposing your\nproject to all of the unique perks of a different framework.\n</details>\n</blockquote>\n\nStartai\\'s transpiler allows you to use code from any other framework (or\nfrom any other version of the same framework!) in your own code, by just\nadding one line of code. Under the hood, Startai traces a computational\ngraph and leverages the frontends and backends to link one framework to\nanother.\n\nThis way, Startai makes all ML-related projects available for you,\nindependently of the framework you want to use to research, develop, or\ndeploy systems. Feel free to head over to the docs for the full API\nreference, but the functions you\\'d most likely want to use are:\n\n``` python\n# Traces an efficient fully-functional graph from a function, removing all wrapping and redundant code\nstartai.trace_graph()\n\n# Converts framework-specific code to a different framework\nstartai.transpile()\n\n# Converts framework-specific code to Startai\nstartai.unify()\n```\n\nThese functions can be used eagerly or lazily. If you pass the necessary\narguments for function tracing, the graph tracing/transpilation step will\nhappen instantly (eagerly). Otherwise, the graph tracing/transpilation\nwill happen only when the returned function is first invoked.\n\n``` python\nimport startai\nimport jax\nstartai.set_backend(\"jax\")\n\n# Simple JAX function to transpile\ndef test_fn(x):\n    return jax.numpy.sum(x)\n\nx1 = startai.array([1., 2.])\n```\n\n``` python\n# Arguments are available -> transpilation happens eagerly\neager_graph = startai.transpile(test_fn, source=\"jax\", to=\"torch\", args=(x1,))\n\n# eager_graph is now torch code and runs efficiently\nret = eager_graph(x1)\n```\n\n``` python\n# Arguments are not available -> transpilation happens lazily\nlazy_graph = startai.transpile(test_fn, source=\"jax\", to=\"torch\")\n\n# The transpiled graph is initialized, transpilation will happen here\nret = lazy_graph(x1)\n\n# lazy_graph is now torch code and runs efficiently\nret = lazy_graph(x1)\n```\n\nIf you want to learn more, you can find more information in the [Startai as\na transpiler section of the\ndocs!](https://khulnasoft.com/docs/startai/overview/design/startai_as_a_transpiler.html)\n\n</details>\n\n<details>\n<summary><b>Startai as a framework</b></summary>\n\n<blockquote>\n<details>\n<summary>When should I use Startai as a framework?</summary>\n\nAs Startai supports multiple backends, writing code in Startai breaks you free\nfrom framework limitations. If you want to publish highly flexible code\nfor everyone to use, independently of the framework they are using, or\nyou plan to develop ML-related tools and want them to be interoperable\nwith not only the already existing frameworks, but also with future\nframeworks, then Startai is for you!\n\n</details>\n</blockquote>\n\nThe Startai framework is built on top of various essential components,\nmainly the [Backend\nHandler](https://khulnasoft.com/docs/startai/overview/design/building_blocks.html#backend-handler),\nwhich manages what framework is being used behind the scenes and the\n[Backend Functional\nAPIs](https://khulnasoft.com/docs/startai/overview/design/building_blocks.html#backend-functional-apis),\nwhich provide framework-specific implementations of the Startai functions.\nLikewise, classes such as `startai.Container` or `startai.Array` are also\navailable, facilitating the use of structured data and array-like\nobjects (learn more about them\n[here!](https://khulnasoft.com/docs/startai/overview/design/startai_as_a_framework.html)).\n\nAll of the functionalities in Startai are exposed through the\n`Startai functional API` and the `Startai stateful API`. All functions in the\n[Functional\nAPI](https://khulnasoft.com/docs/startai/overview/design/building_blocks.html#startai-functional-api)\nare **Framework Agnostic Functions**, which means that we can use them\nlike this:\n\n``` python\nimport startai\nimport jax.numpy as jnp\nimport tensorflow as tf\nimport numpy as np\nimport torch\n\ndef mse_loss(y, target):\n    return startai.mean((y - target)**2)\n\njax_mse   = mse_loss(jnp.ones((5,)), jnp.ones((5,)))\ntf_mse    = mse_loss(tf.ones((5,)), tf.ones((5,)))\nnp_mse    = mse_loss(np.ones((5,)), np.ones((5,)))\ntorch_mse = mse_loss(torch.ones((5,)), torch.ones((5,)))\n```\n\nIn the example above we show how Startai\\'s functions are compatible with\ntensors from different frameworks. This is the same for ALL Startai\nfunctions. They can accept tensors from any framework and return the\ncorrect result.\n\nThe [Startai Stateful\nAPI](https://khulnasoft.com/docs/startai/overview/design/startai_as_a_framework/startai_stateful_api.html),\non the other hand, allows you to define trainable modules and layers,\nwhich you can use alone or as a part of any other framework code!\n\n``` python\nimport startai\n\n\nclass Regressor(startai.Module):\n    def __init__(self, input_dim, output_dim):\n        self.input_dim = input_dim\n        self.output_dim = output_dim\n        super().__init__()\n\n    def _build(self, *args, **kwargs):\n        self.linear0 = startai.Linear(self.input_dim, 128)\n        self.linear1 = startai.Linear(128, self.output_dim)\n\n    def _forward(self, x):\n        x = self.linear0(x)\n        x = startai.functional.relu(x)\n        x = self.linear1(x)\n        return x\n```\n\nIf we put it all together, we\\'ll have something like this. This example\nuses PyTorch as the backend, but this can easily be changed to your\nfavorite frameworks, such as TensorFlow, or JAX.\n\n``` python\nimport startai\n\n\nclass Regressor(startai.Module):\n    def __init__(self, input_dim, output_dim):\n        self.input_dim = input_dim\n        self.output_dim = output_dim\n        super().__init__()\n\n    def _build(self, *args, **kwargs):\n        self.linear0 = startai.Linear(self.input_dim, 128)\n        self.linear1 = startai.Linear(128, self.output_dim)\n\n    def _forward(self, x):\n        x = self.linear0(x)\n        x = startai.functional.relu(x)\n        x = self.linear1(x)\n        return x\n\nstartai.set_backend('torch')  # set backend to PyTorch (or any other backend!)\n\nmodel = Regressor(input_dim=1, output_dim=1)\noptimizer = startai.Adam(0.3)\n\nn_training_examples = 2000\nnoise = startai.random.random_normal(shape=(n_training_examples, 1), mean=0, std=0.1)\nx = startai.linspace(-6, 3, n_training_examples).reshape((n_training_examples, 1))\ny = 0.2 * x ** 2 + 0.5 * x + 0.1 + noise\n\n\ndef loss_fn(v, x, target):\n    pred = model(x, v=v)\n    return startai.mean((pred - target) ** 2)\n\nfor epoch in range(40):\n    # forward pass\n    pred = model(x)\n\n    # compute loss and gradients\n    loss, grads = startai.execute_with_gradients(lambda params: loss_fn(*params), (model.v, x, y))\n\n    # update parameters\n    model.v = optimizer.step(model.v, grads)\n\n    # print current loss\n    print(f'Epoch: {epoch + 1:2d} --- Loss: {startai.to_numpy(loss).item():.5f}')\n\nprint('Finished training!')\n```\n\nThe model\\'s output can be visualized as follows:\n\n<div align=\"center\">\n   <img width=\"50%\" class=\"dark-light\" src=\"https://raw.githubusercontent.com/khulnasoft/khulnasoft.github.io/main/img/regressor_lq.gif\">\n</div>\n\nAs always, you can find more information about [Startai as a framework in\nthe\ndocs!](https://khulnasoft.com/docs/startai/overview/design/startai_as_a_framework.html)\n\n</details>\n\n------------------------------------------------------------------------\n\n# Documentation\n\nYou can find Startai's documentation on the [Docs page](https://khulnasoft.com/docs/startai/), which includes:\n- [Motivation](https://khulnasoft.com/docs/startai/overview/background.html): This contextualizes the problem Startai is trying to solve by going over\n    - The current [ML Explosion](https://khulnasoft.com/docs/startai/overview/background/ml_explosion.html#ml-explosion).\n    - Explaining why it is important [to solve this problem](https://khulnasoft.com/docs/startai/overview/background/why_unify.html#why-unify).\n    - Explaining how we adhere to existing [standards](https://khulnasoft.com/docs/startai/overview/background/standardization.html#standardization) to make this happen.\n- [Related Work](https://khulnasoft.com/docs/startai/overview/related_work.html): Which paints a picture of the role Startai plays in the ML stack, comparing it to other existing solutions in terms of functionalities and abstraction level.\n- [Design](https://khulnasoft.com/docs/startai/overview/design.html): A user-focused guide about the design decision behind the architecture and the main building blocks of Startai.\n- [Deep Dive](https://khulnasoft.com/docs/startai/overview/deep_dive.html): Which delves deeper into the implementation details of Startai and is oriented towards potential contributors to the code base.\n\n------------------------------------------------------------------------\n\n# Contributing\n\n\nWe believe that everyone can contribute and make a difference. Whether\nit\\'s writing code \ud83d\udcbb, fixing bugs \ud83d\udc1b, or simply sharing feedback \ud83d\udcac,\nyour contributions are definitely welcome and appreciated \ud83d\ude4c\n\nCheck out all of our [Open Tasks](https://khulnasoft.com/docs/startai/overview/contributing/open_tasks.html),\nand find out more info in our [Contributing guide](https://khulnasoft.com/docs/startai/overview/contributing.html)\nin the docs!\n\nJoin our amazing community as a [contributor](https://khulnasoft.com/docs/startai/overview/contributing/volunteer_program.html), and help accelerate our journey to unify all ML frameworks!\n\n<a href=\"https://github.com/khulnasoft/startai/graphs/contributors\">\n  <img class=\"dark-light\" src=\"https://contrib.rocks/image?repo=khulnasoft/startai&anon=0&columns=20&max=100&r=true\" />\n</a>\n\n------------------------------------------------------------------------\n\n# Community\n\n\nIn order to achieve the ambitious goal of unifying AI, we definitely need\nas many hands as possible on it! Whether you are a seasoned developer or\njust starting out, you\\'ll find a place here! Join the Startai community on\nour [Discord](https://discord.gg/sXyFF8tDtm) \ud83d\udc7e server, which is the\nperfect place to ask questions, share ideas, and get help from both\nfellow developers and the Startai Team directly!\n\nAlso! Feel free to follow us on\n[Twitter](https://twitter.com/letskhulnasoft) \ud83d\udc26 as well, we use it to\nshare updates, sneak peeks, and all sorts of relevant news, certainly a\ngreat way to stay in the loop \ud83d\ude04\n\nCan\\'t wait to see you there!\n\n------------------------------------------------------------------------\n\n# Citation\n\nIf you use Startai for your work, please don\\'t forget to give proper credit\nby including the accompanying [paper](https://arxiv.org/abs/2102.02886)\n\ud83d\udcc4 in your references. It\\'s a small way to show appreciation and help\nto continue to support this and other open source projects \ud83d\ude4c\n\n\n    @article{lenton2021startai,\n      title={Startai: Templated deep learning for inter-framework portability},\n      author={Lenton, Daniel and Pardo, Fabio and Falck, Fabian and James, Stephen and Clark, Ronald},\n      journal={arXiv preprint arXiv:2102.02886},\n      year={2021}\n    }\n",
    "bugtrack_url": null,
    "license": "Apache 2.0",
    "summary": "The unified machine learning framework, enabling framework-agnostic functions, layers and libraries.",
    "version": "0.0.8.0",
    "project_urls": {
        "Docs": "https://khulnasoft.com/docs/startai/",
        "Homepage": "https://khulnasoft.com/startai",
        "Source": "https://github.com/khulnasoft/startai"
    },
    "split_keywords": [],
    "urls": [
        {
            "comment_text": "",
            "digests": {
                "blake2b_256": "e931cc49e9ebac70a6c4c3a453dec2c5c1cf0f6e738149d8afe8a696180eb886",
                "md5": "06fa5565fd6e2137fd886546d19c3990",
                "sha256": "0795cc7d1976138a91f7c721dbdba7df4d8d4dc8701987e29f77af1ad65d3ce4"
            },
            "downloads": -1,
            "filename": "startai-0.0.8.0-cp310-cp310-macosx_12_0_arm64.whl",
            "has_sig": false,
            "md5_digest": "06fa5565fd6e2137fd886546d19c3990",
            "packagetype": "bdist_wheel",
            "python_version": "cp310",
            "requires_python": null,
            "size": 10015428,
            "upload_time": "2024-04-02T02:21:59",
            "upload_time_iso_8601": "2024-04-02T02:21:59.769116Z",
            "url": "https://files.pythonhosted.org/packages/e9/31/cc49e9ebac70a6c4c3a453dec2c5c1cf0f6e738149d8afe8a696180eb886/startai-0.0.8.0-cp310-cp310-macosx_12_0_arm64.whl",
            "yanked": false,
            "yanked_reason": null
        },
        {
            "comment_text": "",
            "digests": {
                "blake2b_256": "15a9163fc1569f00bc7294e9c2b0bc3b4c89d1f6d8f7613d79205d130debb37a",
                "md5": "881e8dcdab2abcc471cbd478016d8e71",
                "sha256": "13bec7eb76edc7b944cf6b95c9537a37c818f4cad432b6fd8810ee1359a395eb"
            },
            "downloads": -1,
            "filename": "startai-0.0.8.0-cp310-cp310-manylinux_2_17_x86_64.whl",
            "has_sig": false,
            "md5_digest": "881e8dcdab2abcc471cbd478016d8e71",
            "packagetype": "bdist_wheel",
            "python_version": "cp310",
            "requires_python": null,
            "size": 16466318,
            "upload_time": "2024-04-02T02:22:03",
            "upload_time_iso_8601": "2024-04-02T02:22:03.094116Z",
            "url": "https://files.pythonhosted.org/packages/15/a9/163fc1569f00bc7294e9c2b0bc3b4c89d1f6d8f7613d79205d130debb37a/startai-0.0.8.0-cp310-cp310-manylinux_2_17_x86_64.whl",
            "yanked": false,
            "yanked_reason": null
        },
        {
            "comment_text": "",
            "digests": {
                "blake2b_256": "361ef16ed558afc274888f36d8c25c1c5789e83791b244a9560acf94c2be365d",
                "md5": "ee9dff4b6a38683fe52e2bfadd876983",
                "sha256": "21d2f44fc7121315786970edb43ea247abaa6f17ec9de577abfb3ca75bd24ae3"
            },
            "downloads": -1,
            "filename": "startai-0.0.8.0-cp310-cp310-win_amd64.whl",
            "has_sig": false,
            "md5_digest": "ee9dff4b6a38683fe52e2bfadd876983",
            "packagetype": "bdist_wheel",
            "python_version": "cp310",
            "requires_python": null,
            "size": 16466311,
            "upload_time": "2024-04-02T02:22:06",
            "upload_time_iso_8601": "2024-04-02T02:22:06.736551Z",
            "url": "https://files.pythonhosted.org/packages/36/1e/f16ed558afc274888f36d8c25c1c5789e83791b244a9560acf94c2be365d/startai-0.0.8.0-cp310-cp310-win_amd64.whl",
            "yanked": false,
            "yanked_reason": null
        },
        {
            "comment_text": "",
            "digests": {
                "blake2b_256": "a7422fe824aa83755272029735ed971826bb7304003cd9fc42b05451a00833b0",
                "md5": "0dcdb0a66fa053d16126e9c6b4d851f2",
                "sha256": "d1abbeb4d726cf1d39bc88b28ed9c319e73d6468617c67d445b5b6c6d5116130"
            },
            "downloads": -1,
            "filename": "startai-0.0.8.0-cp311-cp311-macosx_12_0_arm64.whl",
            "has_sig": false,
            "md5_digest": "0dcdb0a66fa053d16126e9c6b4d851f2",
            "packagetype": "bdist_wheel",
            "python_version": "cp311",
            "requires_python": null,
            "size": 10035371,
            "upload_time": "2024-04-02T02:14:38",
            "upload_time_iso_8601": "2024-04-02T02:14:38.333585Z",
            "url": "https://files.pythonhosted.org/packages/a7/42/2fe824aa83755272029735ed971826bb7304003cd9fc42b05451a00833b0/startai-0.0.8.0-cp311-cp311-macosx_12_0_arm64.whl",
            "yanked": false,
            "yanked_reason": null
        },
        {
            "comment_text": "",
            "digests": {
                "blake2b_256": "78993701bf56f8fdda94dc50b9c70a14c9dbdca92042db51f08bf5a2ca52f504",
                "md5": "f045dfaad6c7c616707794caa5465264",
                "sha256": "a00eba35a5ab7a46efe22fe9edea3e3d8a7bd0998f1b3f088cccf4d6f34e4e70"
            },
            "downloads": -1,
            "filename": "startai-0.0.8.0-cp311-cp311-manylinux_2_17_x86_64.whl",
            "has_sig": false,
            "md5_digest": "f045dfaad6c7c616707794caa5465264",
            "packagetype": "bdist_wheel",
            "python_version": "cp311",
            "requires_python": null,
            "size": 16683757,
            "upload_time": "2024-04-02T02:14:42",
            "upload_time_iso_8601": "2024-04-02T02:14:42.760525Z",
            "url": "https://files.pythonhosted.org/packages/78/99/3701bf56f8fdda94dc50b9c70a14c9dbdca92042db51f08bf5a2ca52f504/startai-0.0.8.0-cp311-cp311-manylinux_2_17_x86_64.whl",
            "yanked": false,
            "yanked_reason": null
        },
        {
            "comment_text": "",
            "digests": {
                "blake2b_256": "dc48992fffbec117842f4335319bcd61ef87a0fc5e8ecf15a5f72c7ae85e39af",
                "md5": "3a6398888e9dfd8d7dd5e827f262abd7",
                "sha256": "c8529bb8f0ea649239361216df61bf071766723573cf0a7c984efd296004efd9"
            },
            "downloads": -1,
            "filename": "startai-0.0.8.0-cp311-cp311-win_amd64.whl",
            "has_sig": false,
            "md5_digest": "3a6398888e9dfd8d7dd5e827f262abd7",
            "packagetype": "bdist_wheel",
            "python_version": "cp311",
            "requires_python": null,
            "size": 16683747,
            "upload_time": "2024-04-02T02:14:46",
            "upload_time_iso_8601": "2024-04-02T02:14:46.704120Z",
            "url": "https://files.pythonhosted.org/packages/dc/48/992fffbec117842f4335319bcd61ef87a0fc5e8ecf15a5f72c7ae85e39af/startai-0.0.8.0-cp311-cp311-win_amd64.whl",
            "yanked": false,
            "yanked_reason": null
        }
    ],
    "upload_time": "2024-04-02 02:21:59",
    "github": true,
    "gitlab": false,
    "bitbucket": false,
    "codeberg": false,
    "github_user": "khulnasoft",
    "github_project": "startai",
    "travis_ci": false,
    "coveralls": false,
    "github_actions": true,
    "lcname": "startai"
}
        
Elapsed time: 0.21276s