# NHN AI EasyMaker SDK
```python
# Initialize EasyMaker SDK
import easymaker
easymaker.init(
appkey="EASYMAKER_APPKEY",
region="kr1",
secret_key="EASYMAKER_SECRET_KEY",
experiment_id="EXPERIMENT_ID", # Optional
)
# Create Experiment
experiment = easymaker.Experiment().create(
experiment_name="experiment_name",
experiment_description="experiment_description",
# wait=False
)
# Delete Experiment
experiment.delete()
easymaker.Experiment("experiment_id").delete()
easymaker.experiment.delete("experiment_id")
# Create Training
training = easymaker.Training().run(
experiment_id=experiment.experiment_id, # Optional if already set in init
training_name="training_name",
training_description="training_description",
train_image_name="Ubuntu 22.04 CPU TensorFlow Training",
train_instance_name="m2.c4m8",
distributed_node_count=1,
data_storage_size=300, # minimum size : 300G
source_dir_uri="obs://kr1-api-object-storage.nhncloudservice.com/v1/AUTH_{tenant_id}/{container_name}/{soucre_download_path}",
entry_point="training_start.py",
hyperparameter_list=[
{
"parameterName": "epochs",
"parameterValue": "10",
},
{
"parameterName": "batch-size",
"parameterValue": "30",
}
],
timeout_hours=100, # 1~720
model_upload_uri="obs://kr1-api-object-storage.nhncloudservice.com/v1/AUTH_{tenant_id}/{container_name}/{model_upload_path}",
check_point_input_uri="obs://kr1-api-object-storage.nhncloudservice.com/v1/AUTH_{tenant_id}/{container_name}/{checkpoint_input_path}",
check_point_upload_uri="obs://kr1-api-object-storage.nhncloudservice.com/v1/AUTH_{tenant_id}/{container_name}/{checkpoint_upload_path}",
dataset_list=[
{
"datasetName": "train",
"dataUri": "obs://kr1-api-object-storage.nhncloudservice.com/v1/AUTH_{tenant_id}/{container_name}/{train_data_download_path}"
},
{
"datasetName": "test",
"dataUri": "obs://kr1-api-object-storage.nhncloudservice.com/v1/AUTH_{tenant_id}/{container_name}/{test_data_download_path}"
}
],
tag_list=[ # maximum 10
{
"tagKey": "tag1",
"tagValue": "test_tag_1",
},
{
"tagKey": "tag2",
"tagValue": "test_tag_2",
}
],
use_log=True,
# wait=False
)
# Create Training By Algorithm (Image Classification)
training = easymaker.Training().run(
experiment_id=experiment.experiment_id, # Optional if already set in init
training_name="image_classification",
training_description="easymaker sdk test training",
train_image_name="Image Classification CPU",
train_instance_name="m2.c4m8",
distributed_node_count=1,
algorithm_name="Image Classification",
data_storage_size=300, # minimum size : 300G
hyperparameter_list=[
{
"parameterName": "input_size",
"parameterValue": "28",
},
{
"parameterName": "learning_rate",
"parameterValue": "0.1",
},
{
"parameterName": "per_device_train_batch_size",
"parameterValue": "16",
},
{
"parameterName": "per_device_eval_batch_size",
"parameterValue": "16",
},
{
"parameterName": "num_train_epochs",
"parameterValue": "3",
}
],
timeout_hours=1,
model_upload_uri="obs://api-storage.cloud.toast.com/v1/AUTH_{tenant_id}/{container_name}/{model_upload_path}",
check_point_upload_uri="obs://api-storage.cloud.toast.com/v1/AUTH_{tenant_id}/{container_name}/{checkpoint_upload_path}",
dataset_list=[
{
"datasetName": "train",
"dataUri": "obs://api-storage.cloud.toast.com/v1/AUTH_{tenant_id}/{container_name}/{train_data_download_path}"
},
{
"datasetName": "validation",
"dataUri": "obs://api-storage.cloud.toast.com/v1/AUTH_{tenant_id}/{container_name}/{validation_data_download_path}"
}
],
tag_list=[ # 최대 10개
{
"tagKey": "tag1",
"tagValue": "test_tag_1",
},
{
"tagKey": "tag2",
"tagValue": "test_tag_2",
}
],
use_torchrun=True,
nproc_per_node=1,
use_log=True,
# wait=False
)
# Delete Training
training.delete()
easymaker.Training("training_id").delete()
easymaker.training.delete("training_id")
# Create Hyperparameter Tuning
hyperparameter_tuning = easymaker.HyperparameterTuning().run(
experiment_id=experiment.experiment_id, # Optional if already set in init
hyperparameter_tuning_name="hyperparameter_tuning_name",
hyperparameter_tuning_description="hyperparameter_tuning_description",
image_name="Ubuntu 22.04 CPU TensorFlow Training",
instance_name="m2.c8m16",
distributed_node_count=1,
parallel_trial_count=1,
data_storage_size=300,
source_dir_uri="obs://kr1-api-object-storage.nhncloudservice.com/v1/AUTH_{tenant_id}/{container_name}/{soucre_download_path}",
entry_point="training_start.py",
hyperparameter_spec_list=[
{
"hyperparameterName": "learning_rate",
"hyperparameterTypeCode": easymaker.HYPERPARAMETER_TYPE_CODE.DOUBLE,
"hyperparameterMinValue": "0.01",
"hyperparameterMaxValue": "0.05",
},
{
"hyperparameterName": "epochs",
"hyperparameterTypeCode": easymaker.HYPERPARAMETER_TYPE_CODE.INT,
"hyperparameterMinValue": "100",
"hyperparameterMaxValue": "1000",
}
],
timeout_hours=10,
model_upload_uri="obs://kr1-api-object-storage.nhncloudservice.com/v1/AUTH_{tenant_id}/{container_name}/{model_upload_path}",
check_point_input_uri="obs://kr1-api-object-storage.nhncloudservice.com/v1/AUTH_{tenant_id}/{container_name}/{checkpoint_input_path}",
check_point_upload_uri="obs://kr1-api-object-storage.nhncloudservice.com/v1/AUTH_{tenant_id}/{container_name}/{checkpoint_upload_path}",
dataset_list=[
{
"datasetName": "train",
"dataUri": "obs://kr1-api-object-storage.nhncloudservice.com/v1/AUTH_{tenant_id}/{container_name}/{train_data_download_path}"
},
{
"datasetName": "test",
"dataUri": "obs://kr1-api-object-storage.nhncloudservice.com/v1/AUTH_{tenant_id}/{container_name}/{test_data_download_path}"
}
],
metric_list=["val_loss", "loss", "accuracy"}],
metric_regex="([\w|-]+)\s*:\s*([+-]?\d*(\.\d+)?([Ee][+-]?\d+)?)",
objective_metric_name="val_loss",
objective_type_code=easymaker.OBJECTIVE_TYPE_CODE.MINIMIZE,
objective_goal=0.01,
max_failed_trial_count=3,
max_trial_count=10,
tuning_strategy_name=easymaker.TUNING_STRATEGY.BAYESIAN_OPTIMIZATION,
tuning_strategy_random_state=1,
early_stopping_algorithm=easymaker.EARLY_STOPPING_ALGORITHM.MEDIAN,
early_stopping_min_trial_count=3,
early_stopping_start_step=4,
tag_list=[
{
"tagKey": "tag1",
"tagValue": "test_tag_1",
}
],
use_log=True,
# wait=False,
)
# Create Hyperparameter Tuning By Algorithm (Image Classification)
hyperparameter_tuning = easymaker.HyperparameterTuning().run(
experiment_id=experiment.experiment_id, # Optional if already set in init
hyperparameter_tuning_name="hyperparameter_tuning_name",
algorithm_name="Image Classification",
image_name="Image Classification CPU",
instance_name="m2.c2m4",
distributed_node_count=1,
parallel_trial_count=1,
data_storage_size=300,
hyperparameter_spec_list=[
{
"hyperparameterName": "input_size",
"hyperparameterTypeCode": easymaker.HYPERPARAMETER_TYPE_CODE.DOUBLE,
"hyperparameterMinValue": "4",
"hyperparameterMaxValue": "6",
"hyperparameterStep": "1",
},
{
"hyperparameterName": "learning_rate",
"hyperparameterTypeCode": easymaker.HYPERPARAMETER_TYPE_CODE.DOUBLE,
"hyperparameterMinValue": "0",
"hyperparameterMaxValue": "0.5",
"hyperparameterStep": "0.1",
},
{
"hyperparameterName": "per_device_train_batch_size",
"hyperparameterTypeCode": easymaker.HYPERPARAMETER_TYPE_CODE.INT,
"hyperparameterMinValue": "2",
"hyperparameterMaxValue": "5",
"hyperparameterStep": "1",
},
{
"hyperparameterName": "per_device_eval_batch_size",
"hyperparameterTypeCode": easymaker.HYPERPARAMETER_TYPE_CODE.INT,
"hyperparameterMinValue": "2",
"hyperparameterMaxValue": "5",
"hyperparameterStep": "1",
},
{
"hyperparameterName": "num_train_epochs",
"hyperparameterTypeCode": easymaker.HYPERPARAMETER_TYPE_CODE.INT,
"hyperparameterMinValue": "2",
"hyperparameterMaxValue": "5",
"hyperparameterStep": "1",
},
{
"hyperparameterName": "save_steps",
"hyperparameterTypeCode": easymaker.HYPERPARAMETER_TYPE_CODE.INT,
"hyperparameterMinValue": "1",
"hyperparameterMaxValue": "1",
"hyperparameterStep": "1",
},
{
"hyperparameterName": "logging_steps",
"hyperparameterTypeCode": easymaker.HYPERPARAMETER_TYPE_CODE.INT,
"hyperparameterMinValue": "1",
"hyperparameterMaxValue": "1",
"hyperparameterStep": "1",
}
],
timeout_hours=1,
model_upload_uri="obs://api-storage.cloud.toast.com/v1/AUTH_{tenant_id}/{container_name}/{model_upload_path}",
check_point_upload_uri="obs://api-storage.cloud.toast.com/v1/AUTH_{tenant_id}/{container_name}/{checkpoint_upload_path}",
dataset_list=[
{
"datasetName": "train",
"dataUri": "obs://api-storage.cloud.toast.com/v1/AUTH_{tenant_id}/{container_name}/{train_data_download_path}"
},
{
"datasetName": "validation",
"dataUri": "obs://api-storage.cloud.toast.com/v1/AUTH_{tenant_id}/{container_name}/{validation_data_download_path}"
}
],
tag_list=[
{
"tagKey": "tag1",
"tagValue": "test_tag_1",
}
],
objective_goal=1,
max_failed_trial_count=2,
max_trial_count=3,
tuning_strategy_name=easymaker.TUNING_STRATEGY.GRID,
tuning_strategy_random_state=1,
early_stopping_algorithm=easymaker.EARLY_STOPPING_ALGORITHM.MEDIAN,
early_stopping_min_trial_count=3,
early_stopping_start_step=4,
use_log=True,
use_torchrun=True,
nproc_per_node=1,
# wait=False,
)
# Delete Hyperparameter Tuning
hyperparameter_tuning.delete()
easymaker.HyperparameterTuning("hyperparameter_tuning_id").delete()
easymaker.hyperparameter_tuning().delete("hyperparameter_tuning_id")
# Create Model
model = easymaker.Model().create(
training_id=training.training_id, # or hyperparameter_tuning_id=hyperparameter_tuning.hyperparameter_tuning_id,
model_name="model_name",
model_description="model_description",
)
model2 = easymaker.Model().create_by_model_upload_uri(
model_type_code=easymaker.TENSORFLOW,
model_upload_uri="obs://kr1-api-object-storage.nhncloudservice.com/v1/AUTH_{tenant_id}/{container_name}/{model_upload_path}",
model_name="model_name",
model_description="model_description",
)
# Create Hugging Face Model
model3 = easymaker.Model().create_hugging_face_model(
model_name="model_name",
parameter_list=[
{
"parameterName": "model_id",
"parameterValue": "google-bert/bert-base-uncased",
}
],
model_description="model_description",
tag_list=[],
)
# Delete Model
model.delete()
easymaker.Model("model_id").delete()
easymaker.model.delete("model_id")
# Create Endpoint
endpoint = easymaker.Endpoint().create(
model_id=model.model_id,
endpoint_name="endpoint_name",
endpoint_description="endpoint_description",
endpoint_instance_name="c2.c16m16",
endpoint_model_resource_list=[
{
"modelId": model.model_id,
"resourceOptionDetail": {
"cpu": "15",
"memory": "15Gi",
},
"description": "test",
}
],
use_log=True,
# wait=False,
# autoscaler_enable=True, # default False
# autoscaler_min_node_count=1,
# autoscaler_max_node_count=10,
# autoscaler_scale_down_enable=True,
# autoscaler_scale_down_util_threshold=50,
# autoscaler_scale_down_unneeded_time=10,
# autoscaler_scale_down_delay_after_add=10,
)
# Delete Endpoint
endpoint.delete()
easymaker.Endpoint("endpoint_id").delete()
easymaker.endpoint.delete_endpoint("endpoint_id")
# Create Endpoint Stage
endpoint_stage = endpoint.EndpointStage().create(
stage_name="stage01",
endpoint_id=endpoint.endpoint_id,
stage_description="test endpoint",
endpoint_instance_name="c2.c16m16",
endpoint_model_resource_list=[
{
"modelId": model.model_id,
"resourceOptionDetail": {
"cpu": "15",
"memory": "15Gi",
},
"description": "test",
}
],
endpoint_instance_count=1,
# wait=False,
# autoscaler_enable=True, # default False
# autoscaler_min_node_count=1,
# autoscaler_max_node_count=10,
# autoscaler_scale_down_enable=True,
# autoscaler_scale_down_util_threshold=50,
# autoscaler_scale_down_unneeded_time=10,
# autoscaler_scale_down_delay_after_add=10,
)
# Delete Endpoint Stage
endpoint_stage.delete()
easymaker.EndpointStage("endpoint_stage_id").delete()
easymaker.endpoint.delete_endpoint_stage("endpoint_stage_id")
# Inference
easymaker.EndpointStage("endpoint_stage_id").predict(model_id=model_id, json={"instances": [[6.8, 2.8, 4.8, 1.4]]})
# Delete Endpoint Model
easymaker.EndpointModel("endpoint_model_id").delete()
easymaker.endpoint.delete_endpoint_model("endpoint_model_id")
# Batch Inference
batch_inference = easymaker.BatchInference().run(
batch_inference_name="test_batch_2",
instance_count=1,
timeout_hours=720,
instance_name="m2.c2m4",
model_name="model_create_test3",
#
pod_count=1,
batch_size=120,
inference_timeout_seconds=120,
#
input_data_uri="obs://kr1-api-object-storage.nhncloudservice.com/v1/AUTH_bd47a7932e3f464982cef083a7780f94/dev-test/batch_inference/input/case_1_500_record",
input_data_type="JSONL",
include_glob_pattern=None,
exclude_glob_pattern=None,
output_upload_uri=f"obs://kr1-api-object-storage.nhncloudservice.com/v1/AUTH_33634be0ec1340f3aa966a610eea77f0/model/batch_inference/output",
#
data_storage_size=300,
#
description=None,
tag_list=None,
use_log=False,
wait=True,
)
# Delete Batch Inference
endpoint.delete()
easymaker.BatchInference("batch_inference_id").delete()
easymaker.batch_inference.delete("batch_inference_id")
# Upload Pipeline
pipeline = easymaker.Pipeline().upload(
pipeline_name="pipeline_01",
pipeline_spec_manifest_path="./sample-pipeline.yaml",
description="test",
tag_list=[],
)
# Delete Pipeline
pipeline.delete()
easymaker.Pipeline("pipeline_id").delete()
easymaker.pipeline.delete("pipeline_id")
# Create Pipeline Run
pipeline_run = easymaker.PipelineRun().create(
pipeline_run_name="pipeline_run",
description="test",
pipeline_id=pipeline.pipeline_id,
experiment_id=experiment.experiment_id, # Optional if already set in init
instance_name="m2.c2m4",
instance_count=1,
boot_storage_size=50,
)
# Delete Pipeline Run
easymaker.PipelineRun("pipeline_run_id").delete()
easymaker.pipeline_run.delete("pipeline_run_id")
# Create Pipeline Recurring Run
pipeline_recurring_run = easymaker.PipelineRecurringRun().create(
pipeline_recurring_run_name="pipeline_recurring_run",
description="test",
pipeline_id=pipeline.pipeline_id,
experiment_id=experiment.experiment_id, # Optional if already set in init
instance_name="m2.c2m4",
instance_count=1,
boot_storage_size=50,
schedule_periodic_minutes=60,
max_concurrency_count=1,
)
# Stop Pipeline Recurring Run
pipeline_recurring_run.stop()
# Start Pipeline Recurring Run
pipeline_recurring_run.start()
# Delete Pipeline Recurring Run
pipeline_recurring_run.delete()
easymaker.PipelineRecurringRun("pipeline_recurring_run_id").delete()
easymaker.pipeline_recurring_run.delete("pipeline_recurring_run_id")
# Log (Log & Crash)
easymaker_logger = easymaker.logger(logncrash_appkey="log&crash_product_app_key")
easymaker_logger.send("test log meassage") # Output to stdout & send log to log&crash product
easymaker_logger.send(log_message="log meassage",
log_level="INFO", # default: INFO
project_version="1.0.0", # default: 1.0.0
parameters={"serviceType": "EasyMakerSample"}) # Add custom parameters
# NHN Cloud ObjectStorage download, upload, delete
easymaker_obs = easymaker.ObjectStorage(
easymaker_region="kr1",
username="username@nhn.com",
password="nhn_object_storage_api_password"
)
easymaker_obs.download(
easymaker_obs_uri="obs://api-storage.cloud.toast.com/v1/AUTH_{tenant_id}/{container_name}/{source_dir}",
download_dir_path="./source_dir",
)
easymaker_obs.upload(
easymaker_obs_uri="obs://api-storage.cloud.toast.com/v1/AUTH_{tenant_id}/{container_name}/{upload_path}",
local_path="./local_dir",
)
easymaker_obs.delete(
easymaker_obs_uri="obs://api-storage.cloud.toast.com/v1/AUTH_{tenant_id}/{container_name}/{object_path}",
# file_extension=".json", # Delete files with specific extensions
)
```
## CLI Command
- instance type list : `python -m easymaker -instance --region kr1 --appkey EM_APPKEY --secret_key EM_SECRET_KEY`
- image list : `python -m easymaker -image --region kr1 --appkey EM_APPKEY --secret_key EM_SECRET_KEY`
- experiment list : `python -m easymaker -experiment --region kr1 --appkey EM_APPKEY --secret_key EM_SECRET_KEY`
- training list : `python -m easymaker -training --region kr1 --appkey EM_APPKEY --secret_key EM_SECRET_KEY`
- hyperparameter tuning list : `python -m easymaker -tuning --region kr1 --appkey EM_APPKEY --secret_key EM_SECRET_KEY`
- model list : `python -m easymaker -model --region kr1 --appkey EM_APPKEY --secret_key EM_SECRET_KEY`
- endpoint list : `python -m easymaker -endpoint --region kr1 --appkey EM_APPKEY --secret_key EM_SECRET_KEY`
Raw data
{
"_id": null,
"home_page": "https://www.nhncloud.com",
"name": "easymaker",
"maintainer": null,
"docs_url": null,
"requires_python": null,
"maintainer_email": null,
"keywords": "NHN Cloud AI EasyMaker",
"author": "NHN Cloud AI EasyMaker Services",
"author_email": null,
"download_url": null,
"platform": null,
"description": "# NHN AI EasyMaker SDK\n\n```python\n# Initialize EasyMaker SDK\nimport easymaker\n\neasymaker.init(\n appkey=\"EASYMAKER_APPKEY\",\n region=\"kr1\",\n secret_key=\"EASYMAKER_SECRET_KEY\",\n experiment_id=\"EXPERIMENT_ID\", # Optional\n)\n\n# Create Experiment\nexperiment = easymaker.Experiment().create(\n experiment_name=\"experiment_name\",\n experiment_description=\"experiment_description\",\n # wait=False\n)\n\n# Delete Experiment\nexperiment.delete()\neasymaker.Experiment(\"experiment_id\").delete()\neasymaker.experiment.delete(\"experiment_id\")\n\n# Create Training\ntraining = easymaker.Training().run(\n experiment_id=experiment.experiment_id, # Optional if already set in init\n training_name=\"training_name\",\n training_description=\"training_description\",\n train_image_name=\"Ubuntu 22.04 CPU TensorFlow Training\",\n train_instance_name=\"m2.c4m8\",\n distributed_node_count=1,\n data_storage_size=300, # minimum size : 300G\n source_dir_uri=\"obs://kr1-api-object-storage.nhncloudservice.com/v1/AUTH_{tenant_id}/{container_name}/{soucre_download_path}\",\n entry_point=\"training_start.py\",\n hyperparameter_list=[\n {\n \"parameterName\": \"epochs\",\n \"parameterValue\": \"10\",\n },\n {\n \"parameterName\": \"batch-size\",\n \"parameterValue\": \"30\",\n }\n ],\n timeout_hours=100, # 1~720\n model_upload_uri=\"obs://kr1-api-object-storage.nhncloudservice.com/v1/AUTH_{tenant_id}/{container_name}/{model_upload_path}\",\n check_point_input_uri=\"obs://kr1-api-object-storage.nhncloudservice.com/v1/AUTH_{tenant_id}/{container_name}/{checkpoint_input_path}\",\n check_point_upload_uri=\"obs://kr1-api-object-storage.nhncloudservice.com/v1/AUTH_{tenant_id}/{container_name}/{checkpoint_upload_path}\",\n dataset_list=[\n {\n \"datasetName\": \"train\",\n \"dataUri\": \"obs://kr1-api-object-storage.nhncloudservice.com/v1/AUTH_{tenant_id}/{container_name}/{train_data_download_path}\"\n },\n {\n \"datasetName\": \"test\",\n \"dataUri\": \"obs://kr1-api-object-storage.nhncloudservice.com/v1/AUTH_{tenant_id}/{container_name}/{test_data_download_path}\"\n }\n ],\n tag_list=[ # maximum 10\n {\n \"tagKey\": \"tag1\",\n \"tagValue\": \"test_tag_1\",\n },\n {\n \"tagKey\": \"tag2\",\n \"tagValue\": \"test_tag_2\",\n }\n ],\n use_log=True,\n # wait=False\n)\n\n# Create Training By Algorithm (Image Classification)\ntraining = easymaker.Training().run(\n experiment_id=experiment.experiment_id, # Optional if already set in init\n training_name=\"image_classification\",\n training_description=\"easymaker sdk test training\",\n train_image_name=\"Image Classification CPU\",\n train_instance_name=\"m2.c4m8\",\n distributed_node_count=1,\n algorithm_name=\"Image Classification\",\n data_storage_size=300, # minimum size : 300G\n hyperparameter_list=[\n {\n \"parameterName\": \"input_size\",\n \"parameterValue\": \"28\",\n },\n {\n \"parameterName\": \"learning_rate\",\n \"parameterValue\": \"0.1\",\n },\n {\n \"parameterName\": \"per_device_train_batch_size\",\n \"parameterValue\": \"16\",\n },\n {\n \"parameterName\": \"per_device_eval_batch_size\",\n \"parameterValue\": \"16\",\n },\n {\n \"parameterName\": \"num_train_epochs\",\n \"parameterValue\": \"3\",\n }\n ],\n timeout_hours=1,\n model_upload_uri=\"obs://api-storage.cloud.toast.com/v1/AUTH_{tenant_id}/{container_name}/{model_upload_path}\",\n check_point_upload_uri=\"obs://api-storage.cloud.toast.com/v1/AUTH_{tenant_id}/{container_name}/{checkpoint_upload_path}\",\n dataset_list=[\n {\n \"datasetName\": \"train\",\n \"dataUri\": \"obs://api-storage.cloud.toast.com/v1/AUTH_{tenant_id}/{container_name}/{train_data_download_path}\"\n },\n {\n \"datasetName\": \"validation\",\n \"dataUri\": \"obs://api-storage.cloud.toast.com/v1/AUTH_{tenant_id}/{container_name}/{validation_data_download_path}\"\n }\n ],\n tag_list=[ # \ucd5c\ub300 10\uac1c\n {\n \"tagKey\": \"tag1\",\n \"tagValue\": \"test_tag_1\",\n },\n {\n \"tagKey\": \"tag2\",\n \"tagValue\": \"test_tag_2\",\n }\n ],\n use_torchrun=True,\n nproc_per_node=1,\n use_log=True,\n # wait=False\n)\n\n# Delete Training\ntraining.delete()\neasymaker.Training(\"training_id\").delete()\neasymaker.training.delete(\"training_id\")\n\n# Create Hyperparameter Tuning\nhyperparameter_tuning = easymaker.HyperparameterTuning().run(\n experiment_id=experiment.experiment_id, # Optional if already set in init\n hyperparameter_tuning_name=\"hyperparameter_tuning_name\",\n hyperparameter_tuning_description=\"hyperparameter_tuning_description\",\n image_name=\"Ubuntu 22.04 CPU TensorFlow Training\",\n instance_name=\"m2.c8m16\",\n distributed_node_count=1,\n parallel_trial_count=1,\n data_storage_size=300,\n source_dir_uri=\"obs://kr1-api-object-storage.nhncloudservice.com/v1/AUTH_{tenant_id}/{container_name}/{soucre_download_path}\",\n entry_point=\"training_start.py\",\n hyperparameter_spec_list=[\n {\n \"hyperparameterName\": \"learning_rate\",\n \"hyperparameterTypeCode\": easymaker.HYPERPARAMETER_TYPE_CODE.DOUBLE,\n \"hyperparameterMinValue\": \"0.01\",\n \"hyperparameterMaxValue\": \"0.05\",\n },\n {\n \"hyperparameterName\": \"epochs\",\n \"hyperparameterTypeCode\": easymaker.HYPERPARAMETER_TYPE_CODE.INT,\n \"hyperparameterMinValue\": \"100\",\n \"hyperparameterMaxValue\": \"1000\",\n }\n ],\n timeout_hours=10,\n model_upload_uri=\"obs://kr1-api-object-storage.nhncloudservice.com/v1/AUTH_{tenant_id}/{container_name}/{model_upload_path}\",\n check_point_input_uri=\"obs://kr1-api-object-storage.nhncloudservice.com/v1/AUTH_{tenant_id}/{container_name}/{checkpoint_input_path}\",\n check_point_upload_uri=\"obs://kr1-api-object-storage.nhncloudservice.com/v1/AUTH_{tenant_id}/{container_name}/{checkpoint_upload_path}\",\n dataset_list=[\n {\n \"datasetName\": \"train\",\n \"dataUri\": \"obs://kr1-api-object-storage.nhncloudservice.com/v1/AUTH_{tenant_id}/{container_name}/{train_data_download_path}\"\n },\n {\n \"datasetName\": \"test\",\n \"dataUri\": \"obs://kr1-api-object-storage.nhncloudservice.com/v1/AUTH_{tenant_id}/{container_name}/{test_data_download_path}\"\n }\n ],\n metric_list=[\"val_loss\", \"loss\", \"accuracy\"}],\n metric_regex=\"([\\w|-]+)\\s*:\\s*([+-]?\\d*(\\.\\d+)?([Ee][+-]?\\d+)?)\",\n objective_metric_name=\"val_loss\",\n objective_type_code=easymaker.OBJECTIVE_TYPE_CODE.MINIMIZE,\n objective_goal=0.01,\n max_failed_trial_count=3,\n max_trial_count=10,\n tuning_strategy_name=easymaker.TUNING_STRATEGY.BAYESIAN_OPTIMIZATION,\n tuning_strategy_random_state=1,\n early_stopping_algorithm=easymaker.EARLY_STOPPING_ALGORITHM.MEDIAN,\n early_stopping_min_trial_count=3,\n early_stopping_start_step=4,\n tag_list=[\n {\n \"tagKey\": \"tag1\",\n \"tagValue\": \"test_tag_1\",\n }\n ],\n use_log=True,\n # wait=False,\n)\n\n# Create Hyperparameter Tuning By Algorithm (Image Classification)\nhyperparameter_tuning = easymaker.HyperparameterTuning().run(\n experiment_id=experiment.experiment_id, # Optional if already set in init\n hyperparameter_tuning_name=\"hyperparameter_tuning_name\",\n algorithm_name=\"Image Classification\",\n image_name=\"Image Classification CPU\",\n instance_name=\"m2.c2m4\",\n distributed_node_count=1,\n parallel_trial_count=1,\n data_storage_size=300,\n hyperparameter_spec_list=[\n {\n \"hyperparameterName\": \"input_size\",\n \"hyperparameterTypeCode\": easymaker.HYPERPARAMETER_TYPE_CODE.DOUBLE,\n \"hyperparameterMinValue\": \"4\",\n \"hyperparameterMaxValue\": \"6\",\n \"hyperparameterStep\": \"1\",\n },\n {\n \"hyperparameterName\": \"learning_rate\",\n \"hyperparameterTypeCode\": easymaker.HYPERPARAMETER_TYPE_CODE.DOUBLE,\n \"hyperparameterMinValue\": \"0\",\n \"hyperparameterMaxValue\": \"0.5\",\n \"hyperparameterStep\": \"0.1\",\n },\n {\n \"hyperparameterName\": \"per_device_train_batch_size\",\n \"hyperparameterTypeCode\": easymaker.HYPERPARAMETER_TYPE_CODE.INT,\n \"hyperparameterMinValue\": \"2\",\n \"hyperparameterMaxValue\": \"5\",\n \"hyperparameterStep\": \"1\",\n },\n {\n \"hyperparameterName\": \"per_device_eval_batch_size\",\n \"hyperparameterTypeCode\": easymaker.HYPERPARAMETER_TYPE_CODE.INT,\n \"hyperparameterMinValue\": \"2\",\n \"hyperparameterMaxValue\": \"5\",\n \"hyperparameterStep\": \"1\",\n },\n {\n \"hyperparameterName\": \"num_train_epochs\",\n \"hyperparameterTypeCode\": easymaker.HYPERPARAMETER_TYPE_CODE.INT,\n \"hyperparameterMinValue\": \"2\",\n \"hyperparameterMaxValue\": \"5\",\n \"hyperparameterStep\": \"1\",\n },\n {\n \"hyperparameterName\": \"save_steps\",\n \"hyperparameterTypeCode\": easymaker.HYPERPARAMETER_TYPE_CODE.INT,\n \"hyperparameterMinValue\": \"1\",\n \"hyperparameterMaxValue\": \"1\",\n \"hyperparameterStep\": \"1\",\n },\n {\n \"hyperparameterName\": \"logging_steps\",\n \"hyperparameterTypeCode\": easymaker.HYPERPARAMETER_TYPE_CODE.INT,\n \"hyperparameterMinValue\": \"1\",\n \"hyperparameterMaxValue\": \"1\",\n \"hyperparameterStep\": \"1\",\n }\n ],\n timeout_hours=1,\n model_upload_uri=\"obs://api-storage.cloud.toast.com/v1/AUTH_{tenant_id}/{container_name}/{model_upload_path}\",\n check_point_upload_uri=\"obs://api-storage.cloud.toast.com/v1/AUTH_{tenant_id}/{container_name}/{checkpoint_upload_path}\",\n dataset_list=[\n {\n \"datasetName\": \"train\",\n \"dataUri\": \"obs://api-storage.cloud.toast.com/v1/AUTH_{tenant_id}/{container_name}/{train_data_download_path}\"\n },\n {\n \"datasetName\": \"validation\",\n \"dataUri\": \"obs://api-storage.cloud.toast.com/v1/AUTH_{tenant_id}/{container_name}/{validation_data_download_path}\"\n }\n ],\n tag_list=[\n {\n \"tagKey\": \"tag1\",\n \"tagValue\": \"test_tag_1\",\n }\n ],\n objective_goal=1,\n max_failed_trial_count=2,\n max_trial_count=3,\n tuning_strategy_name=easymaker.TUNING_STRATEGY.GRID,\n tuning_strategy_random_state=1,\n early_stopping_algorithm=easymaker.EARLY_STOPPING_ALGORITHM.MEDIAN,\n early_stopping_min_trial_count=3,\n early_stopping_start_step=4,\n use_log=True,\n use_torchrun=True,\n nproc_per_node=1,\n # wait=False,\n)\n\n# Delete Hyperparameter Tuning\nhyperparameter_tuning.delete()\neasymaker.HyperparameterTuning(\"hyperparameter_tuning_id\").delete()\neasymaker.hyperparameter_tuning().delete(\"hyperparameter_tuning_id\")\n\n# Create Model\nmodel = easymaker.Model().create(\n training_id=training.training_id, # or hyperparameter_tuning_id=hyperparameter_tuning.hyperparameter_tuning_id,\n model_name=\"model_name\",\n model_description=\"model_description\",\n)\nmodel2 = easymaker.Model().create_by_model_upload_uri(\n model_type_code=easymaker.TENSORFLOW,\n model_upload_uri=\"obs://kr1-api-object-storage.nhncloudservice.com/v1/AUTH_{tenant_id}/{container_name}/{model_upload_path}\",\n model_name=\"model_name\",\n model_description=\"model_description\",\n)\n\n# Create Hugging Face Model\nmodel3 = easymaker.Model().create_hugging_face_model(\n model_name=\"model_name\",\n parameter_list=[\n {\n \"parameterName\": \"model_id\",\n \"parameterValue\": \"google-bert/bert-base-uncased\",\n }\n ],\n model_description=\"model_description\",\n tag_list=[],\n)\n\n# Delete Model\nmodel.delete()\neasymaker.Model(\"model_id\").delete()\neasymaker.model.delete(\"model_id\")\n\n# Create Endpoint\nendpoint = easymaker.Endpoint().create(\n model_id=model.model_id,\n endpoint_name=\"endpoint_name\",\n endpoint_description=\"endpoint_description\",\n endpoint_instance_name=\"c2.c16m16\",\n endpoint_model_resource_list=[\n {\n \"modelId\": model.model_id,\n \"resourceOptionDetail\": {\n \"cpu\": \"15\",\n \"memory\": \"15Gi\",\n },\n \"description\": \"test\",\n }\n ],\n use_log=True,\n # wait=False,\n # autoscaler_enable=True, # default False\n # autoscaler_min_node_count=1,\n # autoscaler_max_node_count=10,\n # autoscaler_scale_down_enable=True,\n # autoscaler_scale_down_util_threshold=50,\n # autoscaler_scale_down_unneeded_time=10,\n # autoscaler_scale_down_delay_after_add=10,\n)\n\n# Delete Endpoint\nendpoint.delete()\neasymaker.Endpoint(\"endpoint_id\").delete()\neasymaker.endpoint.delete_endpoint(\"endpoint_id\")\n\n# Create Endpoint Stage\nendpoint_stage = endpoint.EndpointStage().create(\n stage_name=\"stage01\",\n endpoint_id=endpoint.endpoint_id,\n stage_description=\"test endpoint\",\n endpoint_instance_name=\"c2.c16m16\",\n endpoint_model_resource_list=[\n {\n \"modelId\": model.model_id,\n \"resourceOptionDetail\": {\n \"cpu\": \"15\",\n \"memory\": \"15Gi\",\n },\n \"description\": \"test\",\n }\n ],\n endpoint_instance_count=1,\n # wait=False,\n # autoscaler_enable=True, # default False\n # autoscaler_min_node_count=1,\n # autoscaler_max_node_count=10,\n # autoscaler_scale_down_enable=True,\n # autoscaler_scale_down_util_threshold=50,\n # autoscaler_scale_down_unneeded_time=10,\n # autoscaler_scale_down_delay_after_add=10,\n)\n\n# Delete Endpoint Stage\nendpoint_stage.delete()\neasymaker.EndpointStage(\"endpoint_stage_id\").delete()\neasymaker.endpoint.delete_endpoint_stage(\"endpoint_stage_id\")\n\n# Inference\neasymaker.EndpointStage(\"endpoint_stage_id\").predict(model_id=model_id, json={\"instances\": [[6.8, 2.8, 4.8, 1.4]]})\n\n# Delete Endpoint Model\neasymaker.EndpointModel(\"endpoint_model_id\").delete()\neasymaker.endpoint.delete_endpoint_model(\"endpoint_model_id\")\n\n# Batch Inference\nbatch_inference = easymaker.BatchInference().run(\n batch_inference_name=\"test_batch_2\",\n instance_count=1,\n timeout_hours=720,\n instance_name=\"m2.c2m4\",\n model_name=\"model_create_test3\",\n #\n pod_count=1,\n batch_size=120,\n inference_timeout_seconds=120,\n #\n input_data_uri=\"obs://kr1-api-object-storage.nhncloudservice.com/v1/AUTH_bd47a7932e3f464982cef083a7780f94/dev-test/batch_inference/input/case_1_500_record\",\n input_data_type=\"JSONL\",\n include_glob_pattern=None,\n exclude_glob_pattern=None,\n output_upload_uri=f\"obs://kr1-api-object-storage.nhncloudservice.com/v1/AUTH_33634be0ec1340f3aa966a610eea77f0/model/batch_inference/output\",\n #\n data_storage_size=300,\n #\n description=None,\n tag_list=None,\n use_log=False,\n wait=True,\n)\n\n# Delete Batch Inference\nendpoint.delete()\neasymaker.BatchInference(\"batch_inference_id\").delete()\neasymaker.batch_inference.delete(\"batch_inference_id\")\n\n# Upload Pipeline\npipeline = easymaker.Pipeline().upload(\n pipeline_name=\"pipeline_01\",\n pipeline_spec_manifest_path=\"./sample-pipeline.yaml\",\n description=\"test\",\n tag_list=[],\n)\n\n# Delete Pipeline\npipeline.delete()\neasymaker.Pipeline(\"pipeline_id\").delete()\neasymaker.pipeline.delete(\"pipeline_id\")\n\n# Create Pipeline Run\npipeline_run = easymaker.PipelineRun().create(\n pipeline_run_name=\"pipeline_run\",\n description=\"test\",\n pipeline_id=pipeline.pipeline_id,\n experiment_id=experiment.experiment_id, # Optional if already set in init\n instance_name=\"m2.c2m4\",\n instance_count=1,\n boot_storage_size=50,\n)\n\n# Delete Pipeline Run\neasymaker.PipelineRun(\"pipeline_run_id\").delete()\neasymaker.pipeline_run.delete(\"pipeline_run_id\")\n\n# Create Pipeline Recurring Run\npipeline_recurring_run = easymaker.PipelineRecurringRun().create(\n pipeline_recurring_run_name=\"pipeline_recurring_run\",\n description=\"test\",\n pipeline_id=pipeline.pipeline_id,\n experiment_id=experiment.experiment_id, # Optional if already set in init\n instance_name=\"m2.c2m4\",\n instance_count=1,\n boot_storage_size=50,\n schedule_periodic_minutes=60,\n max_concurrency_count=1,\n)\n\n# Stop Pipeline Recurring Run\npipeline_recurring_run.stop()\n\n# Start Pipeline Recurring Run\npipeline_recurring_run.start()\n\n# Delete Pipeline Recurring Run\npipeline_recurring_run.delete()\neasymaker.PipelineRecurringRun(\"pipeline_recurring_run_id\").delete()\neasymaker.pipeline_recurring_run.delete(\"pipeline_recurring_run_id\")\n\n# Log (Log & Crash)\neasymaker_logger = easymaker.logger(logncrash_appkey=\"log&crash_product_app_key\")\neasymaker_logger.send(\"test log meassage\") # Output to stdout & send log to log&crash product\neasymaker_logger.send(log_message=\"log meassage\",\n log_level=\"INFO\", # default: INFO\n project_version=\"1.0.0\", # default: 1.0.0\n parameters={\"serviceType\": \"EasyMakerSample\"}) # Add custom parameters\n\n\n# NHN Cloud ObjectStorage download, upload, delete\neasymaker_obs = easymaker.ObjectStorage(\n easymaker_region=\"kr1\",\n username=\"username@nhn.com\",\n password=\"nhn_object_storage_api_password\"\n)\n\neasymaker_obs.download(\n easymaker_obs_uri=\"obs://api-storage.cloud.toast.com/v1/AUTH_{tenant_id}/{container_name}/{source_dir}\",\n download_dir_path=\"./source_dir\",\n)\n\neasymaker_obs.upload(\n easymaker_obs_uri=\"obs://api-storage.cloud.toast.com/v1/AUTH_{tenant_id}/{container_name}/{upload_path}\",\n local_path=\"./local_dir\",\n)\n\neasymaker_obs.delete(\n easymaker_obs_uri=\"obs://api-storage.cloud.toast.com/v1/AUTH_{tenant_id}/{container_name}/{object_path}\",\n # file_extension=\".json\", # Delete files with specific extensions\n)\n\n\n```\n\n## CLI Command\n\n- instance type list : `python -m easymaker -instance --region kr1 --appkey EM_APPKEY --secret_key EM_SECRET_KEY`\n- image list : `python -m easymaker -image --region kr1 --appkey EM_APPKEY --secret_key EM_SECRET_KEY`\n- experiment list : `python -m easymaker -experiment --region kr1 --appkey EM_APPKEY --secret_key EM_SECRET_KEY`\n- training list : `python -m easymaker -training --region kr1 --appkey EM_APPKEY --secret_key EM_SECRET_KEY`\n- hyperparameter tuning list : `python -m easymaker -tuning --region kr1 --appkey EM_APPKEY --secret_key EM_SECRET_KEY`\n- model list : `python -m easymaker -model --region kr1 --appkey EM_APPKEY --secret_key EM_SECRET_KEY`\n- endpoint list : `python -m easymaker -endpoint --region kr1 --appkey EM_APPKEY --secret_key EM_SECRET_KEY`\n",
"bugtrack_url": null,
"license": "Apache License 2.0",
"summary": "AI EasyMaker SDK for Python.",
"version": "2.0.0",
"project_urls": {
"Homepage": "https://www.nhncloud.com"
},
"split_keywords": [
"nhn",
"cloud",
"ai",
"easymaker"
],
"urls": [
{
"comment_text": "",
"digests": {
"blake2b_256": "7c6fbab80b8fa2ba38d0b42206ffe348b8e2f9eccf6753b95c17b755a4919b23",
"md5": "4171f8b7a5a99ad00c3b9b662a058ba1",
"sha256": "9072558ba29331a67694c6e9324ed095aba39df0eeaa10b448508a002ef222df"
},
"downloads": -1,
"filename": "easymaker-2.0.0-py3-none-any.whl",
"has_sig": false,
"md5_digest": "4171f8b7a5a99ad00c3b9b662a058ba1",
"packagetype": "bdist_wheel",
"python_version": "py3",
"requires_python": null,
"size": 38608,
"upload_time": "2024-10-28T23:02:55",
"upload_time_iso_8601": "2024-10-28T23:02:55.397558Z",
"url": "https://files.pythonhosted.org/packages/7c/6f/bab80b8fa2ba38d0b42206ffe348b8e2f9eccf6753b95c17b755a4919b23/easymaker-2.0.0-py3-none-any.whl",
"yanked": false,
"yanked_reason": null
}
],
"upload_time": "2024-10-28 23:02:55",
"github": false,
"gitlab": false,
"bitbucket": false,
"codeberg": false,
"lcname": "easymaker"
}