# NHN AI EasyMaker SDK
```
# Initialize EasyMaker SDK
import easymaker
easymaker.init(
appkey='EASYMAKER_APPKEY',
region='kr1',
secret_key='EASYMAKER_SECRET_KEY',
)
# NHN Cloud ObjectStorage upload/download
easymaker.download(
easymaker_obs_uri='obs://api-storage.cloud.toast.com/v1/AUTH_{tenant_id}/{container_name}/{source_dir}',
download_dir_path='./source_dir',
username='username@nhn.com',
password='nhn_object_storage_api_password'
)
easymaker.upload(
easymaker_obs_uri='obs://api-storage.cloud.toast.com/v1/AUTH_{tenant_id}/{container_name}/{upload_path}',
src_dir_path='./local_dir',
username='username@nhn.com',
password='nhn_object_storage_api_password'
)
# Create Experiment
experiment_id = easymaker.Experiment().create(
experiment_name='experiment_name',
experiment_description='experiment_description',
# wait=False
)
# Create Training
training_id = easymaker.Training().run(
experiment_id=experiment_id,
training_name='training_name',
training_description='training_description',
train_image_name='Ubuntu 18.04 CPU TensorFlow Training',
train_instance_name='m2.c4m8',
train_instance_count=1,
data_storage_size=300, # minimum size : 300G
source_dir_uri='obs://api-storage.cloud.toast.com/v1/AUTH_{tenant_id}/{container_name}/{soucre_download_path}',
entry_point='training_start.py',
hyperparameter_list=[
{
"hyperparameterKey": "epochs",
"hyperparameterValue": "10",
},
{
"hyperparameterKey": "batch-size",
"hyperparameterValue": "30",
}
],
timeout_hours=100, # 1~720
model_upload_uri='obs://api-storage.cloud.toast.com/v1/AUTH_{tenant_id}/{container_name}/{model_upload_path}',
check_point_upload_uri='obs://api-storage.cloud.toast.com/v1/AUTH_{tenant_id}/{container_name}/{checkpoint_upload_path}',
dataset_list=[
{
"datasetName": "train",
"dataUri": "obs://api-storage.cloud.toast.com/v1/AUTH_{tenant_id}/{container_name}/{train_data_download_path}"
},
{
"datasetName": "test",
"dataUri": "obs://api-storage.cloud.toast.com/v1/AUTH_{tenant_id}/{container_name}/{test_data_download_path}"
}
],
tag_list=[ # maximum 10
{
"tagKey": "tag_num",
"tagValue": "test_tag_1",
},
{
"tagKey": "tag2",
"tagValue": "test_tag_2",
}
],
use_log=True,
# wait=False
)
# Create Model
model_id = easymaker.Model().create(
training_id=training_id,
model_name='model_name',
model_description='model_description',
)
model_id2 = easymaker.Model().create_by_model_uri(
framework_code=easymaker.TENSORFLOW,
model_uri='obs://api-storage.cloud.toast.com/v1/AUTH_{tenant_id}/{container_name}/{model_upload_path}',
model_name='model_name',
model_description='model_description',
)
# Create Endpoint
endpoint = easymaker.Endpoint()
endpoint_id = endpoint.create(
model_id=model_id,
endpoint_name='endpoint_name',
endpoint_description='endpoint_description',
endpoint_instance_name='c2.c16m16',
apigw_resource_uri='/api-path',
endpoint_instance_count=1,
use_log=True,
# wait=False,
# autoscaler_enable=True, # default False
# autoscaler_min_node_count=1,
# autoscaler_max_node_count=10,
# autoscaler_scale_down_enable=True,
# autoscaler_scale_down_util_threshold=50,
# autoscaler_scale_down_unneeded_time=10,
# autoscaler_scale_down_delay_after_add=10,
)
# Create Endpoint Stage
stage_id = endpoint.create_stage(
model_id=model_id,
stage_name='stage01',
stage_description='test endpoint',
endpoint_instance_name='c2.c16m16',
apigw_resource_uri='/test-api',
endpoint_instance_count=1,
# wait=False,
# autoscaler_enable=True, # default False
# autoscaler_min_node_count=1,
# autoscaler_max_node_count=10,
# autoscaler_scale_down_enable=True,
# autoscaler_scale_down_util_threshold=50,
# autoscaler_scale_down_unneeded_time=10,
# autoscaler_scale_down_delay_after_add=10,
)
# Get an endpoint that already exists
endpoint = easymaker.Endpoint(endpoint_id)
# get endpoint list
endpoint_stage_info_list = endpoint.get_endpoint_stage_info_list()
# Inference
endpoint.predict(json={'instances': [[6.8, 2.8, 4.8, 1.4]]})
endpoint.predict(endpoint_stage_info=endpoint_stage_info_list[1], # If endpoint_stage_info is not set, use the default endpoint
json={'instances': [[6.8, 2.8, 4.8, 1.4]]})
# Log (Log & Crash)
easymaker_logger = easymaker.logger(logncrash_appkey='log&crash_product_app_key')
easymaker_logger.send('test log meassage') # Output to stdout & send log to log&crash product
easymaker_logger.send(log_message='log meassage',
log_level='INFO', # default: INFO
project_version='1.0.0', # default: 1.0.0
parameters={'serviceType': 'EasyMakerSample'}) # Add custom parameters
```
## CLI Command
- instance type list : `python -m easymaker -instance --region kr1 --appkey EM_APPKEY --secret_key EM_SECRET_KEY`
- image list : `python -m easymaker -image --region kr1 --appkey EM_APPKEY --secret_key EM_SECRET_KEY`
- experiment list : `python -m easymaker -experiment --region kr1 --appkey EM_APPKEY --secret_key EM_SECRET_KEY`
- training list : 'python -m easymaker -training --region kr1 --appkey EM_APPKEY --secret_key EM_SECRET_KEY'
- model list : 'python -m easymaker -model --region kr1 --appkey EM_APPKEY --secret_key EM_SECRET_KEY'
- endpoint list : 'python -m easymaker -endpoint --region kr1 --appkey EM_APPKEY --secret_key EM_SECRET_KEY'
Raw data
{
"_id": null,
"home_page": "https://www.toast.com",
"name": "easymaker",
"maintainer": "",
"docs_url": null,
"requires_python": "",
"maintainer_email": "",
"keywords": "NHN Cloud AI EasyMaker",
"author": "NHN Cloud AI EasyMaker Services",
"author_email": "",
"download_url": "",
"platform": null,
"description": "# NHN AI EasyMaker SDK\n\n```\n# Initialize EasyMaker SDK\nimport easymaker\n\neasymaker.init(\n appkey='EASYMAKER_APPKEY',\n region='kr1',\n secret_key='EASYMAKER_SECRET_KEY',\n)\n\n# NHN Cloud ObjectStorage upload/download\neasymaker.download(\n easymaker_obs_uri='obs://api-storage.cloud.toast.com/v1/AUTH_{tenant_id}/{container_name}/{source_dir}',\n download_dir_path='./source_dir',\n username='username@nhn.com',\n password='nhn_object_storage_api_password'\n)\neasymaker.upload(\n easymaker_obs_uri='obs://api-storage.cloud.toast.com/v1/AUTH_{tenant_id}/{container_name}/{upload_path}',\n src_dir_path='./local_dir',\n username='username@nhn.com',\n password='nhn_object_storage_api_password'\n)\n\n# Create Experiment\nexperiment_id = easymaker.Experiment().create(\n experiment_name='experiment_name',\n experiment_description='experiment_description',\n # wait=False\n)\n\n# Create Training\ntraining_id = easymaker.Training().run(\n experiment_id=experiment_id,\n training_name='training_name',\n training_description='training_description',\n train_image_name='Ubuntu 18.04 CPU TensorFlow Training',\n train_instance_name='m2.c4m8',\n train_instance_count=1,\n data_storage_size=300, # minimum size : 300G\n source_dir_uri='obs://api-storage.cloud.toast.com/v1/AUTH_{tenant_id}/{container_name}/{soucre_download_path}',\n entry_point='training_start.py',\n hyperparameter_list=[\n {\n \"hyperparameterKey\": \"epochs\",\n \"hyperparameterValue\": \"10\",\n },\n {\n \"hyperparameterKey\": \"batch-size\",\n \"hyperparameterValue\": \"30\",\n }\n ],\n timeout_hours=100, # 1~720\n model_upload_uri='obs://api-storage.cloud.toast.com/v1/AUTH_{tenant_id}/{container_name}/{model_upload_path}',\n check_point_upload_uri='obs://api-storage.cloud.toast.com/v1/AUTH_{tenant_id}/{container_name}/{checkpoint_upload_path}',\n dataset_list=[\n {\n \"datasetName\": \"train\",\n \"dataUri\": \"obs://api-storage.cloud.toast.com/v1/AUTH_{tenant_id}/{container_name}/{train_data_download_path}\"\n },\n {\n \"datasetName\": \"test\",\n \"dataUri\": \"obs://api-storage.cloud.toast.com/v1/AUTH_{tenant_id}/{container_name}/{test_data_download_path}\"\n }\n ],\n tag_list=[ # maximum 10\n {\n \"tagKey\": \"tag_num\",\n \"tagValue\": \"test_tag_1\",\n },\n {\n \"tagKey\": \"tag2\",\n \"tagValue\": \"test_tag_2\",\n }\n ],\n use_log=True,\n # wait=False\n)\n\n# Create Model\nmodel_id = easymaker.Model().create(\n training_id=training_id,\n model_name='model_name',\n model_description='model_description',\n)\nmodel_id2 = easymaker.Model().create_by_model_uri(\n framework_code=easymaker.TENSORFLOW,\n model_uri='obs://api-storage.cloud.toast.com/v1/AUTH_{tenant_id}/{container_name}/{model_upload_path}',\n model_name='model_name',\n model_description='model_description',\n)\n\n# Create Endpoint\nendpoint = easymaker.Endpoint()\nendpoint_id = endpoint.create(\n model_id=model_id,\n endpoint_name='endpoint_name',\n endpoint_description='endpoint_description',\n endpoint_instance_name='c2.c16m16',\n apigw_resource_uri='/api-path',\n endpoint_instance_count=1,\n use_log=True,\n # wait=False,\n # autoscaler_enable=True, # default False\n # autoscaler_min_node_count=1,\n # autoscaler_max_node_count=10,\n # autoscaler_scale_down_enable=True,\n # autoscaler_scale_down_util_threshold=50,\n # autoscaler_scale_down_unneeded_time=10,\n # autoscaler_scale_down_delay_after_add=10,\n)\n# Create Endpoint Stage\nstage_id = endpoint.create_stage(\n model_id=model_id,\n stage_name='stage01',\n stage_description='test endpoint',\n endpoint_instance_name='c2.c16m16',\n apigw_resource_uri='/test-api',\n endpoint_instance_count=1,\n # wait=False,\n # autoscaler_enable=True, # default False\n # autoscaler_min_node_count=1,\n # autoscaler_max_node_count=10,\n # autoscaler_scale_down_enable=True,\n # autoscaler_scale_down_util_threshold=50,\n # autoscaler_scale_down_unneeded_time=10,\n # autoscaler_scale_down_delay_after_add=10,\n)\n\n# Get an endpoint that already exists\nendpoint = easymaker.Endpoint(endpoint_id)\n\n# get endpoint list\nendpoint_stage_info_list = endpoint.get_endpoint_stage_info_list()\n\n# Inference\nendpoint.predict(json={'instances': [[6.8, 2.8, 4.8, 1.4]]})\nendpoint.predict(endpoint_stage_info=endpoint_stage_info_list[1], # If endpoint_stage_info is not set, use the default endpoint\n json={'instances': [[6.8, 2.8, 4.8, 1.4]]})\n\n# Log (Log & Crash)\neasymaker_logger = easymaker.logger(logncrash_appkey='log&crash_product_app_key')\neasymaker_logger.send('test log meassage') # Output to stdout & send log to log&crash product\neasymaker_logger.send(log_message='log meassage',\n log_level='INFO', # default: INFO\n project_version='1.0.0', # default: 1.0.0\n parameters={'serviceType': 'EasyMakerSample'}) # Add custom parameters\n```\n\n## CLI Command\n- instance type list : `python -m easymaker -instance --region kr1 --appkey EM_APPKEY --secret_key EM_SECRET_KEY`\n- image list : `python -m easymaker -image --region kr1 --appkey EM_APPKEY --secret_key EM_SECRET_KEY`\n- experiment list : `python -m easymaker -experiment --region kr1 --appkey EM_APPKEY --secret_key EM_SECRET_KEY`\n- training list : 'python -m easymaker -training --region kr1 --appkey EM_APPKEY --secret_key EM_SECRET_KEY'\n- model list : 'python -m easymaker -model --region kr1 --appkey EM_APPKEY --secret_key EM_SECRET_KEY'\n- endpoint list : 'python -m easymaker -endpoint --region kr1 --appkey EM_APPKEY --secret_key EM_SECRET_KEY'\n",
"bugtrack_url": null,
"license": "Apache License 2.0",
"summary": "AI EasyMaker SDK for Python.",
"version": "1.0.9",
"split_keywords": [
"nhn",
"cloud",
"ai",
"easymaker"
],
"urls": [
{
"comment_text": "",
"digests": {
"md5": "cb7b98c30a7f1317acb7c196de18af7c",
"sha256": "9eeca02785008e2bb6ef1378fd68a717ae0f13b14590d6bbc3d72e339c562a48"
},
"downloads": -1,
"filename": "easymaker-1.0.9-py3-none-any.whl",
"has_sig": false,
"md5_digest": "cb7b98c30a7f1317acb7c196de18af7c",
"packagetype": "bdist_wheel",
"python_version": "py3",
"requires_python": null,
"size": 21563,
"upload_time": "2022-12-27T09:25:34",
"upload_time_iso_8601": "2022-12-27T09:25:34.902926Z",
"url": "https://files.pythonhosted.org/packages/0b/57/43b3478a8184079a46ab3ee4d683b9500b28c36a9cc3a37c6fce08a27f52/easymaker-1.0.9-py3-none-any.whl",
"yanked": false,
"yanked_reason": null
}
],
"upload_time": "2022-12-27 09:25:34",
"github": false,
"gitlab": false,
"bitbucket": false,
"lcname": "easymaker"
}