Skip to content

fix MME 'NoneType' object error #141

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 28 commits into
base: master
Choose a base branch
from
14 changes: 13 additions & 1 deletion src/sagemaker_mxnet_serving_container/handler_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
from __future__ import absolute_import

import importlib
import logging
import os

import mxnet as mx
Expand All @@ -25,6 +26,8 @@
from sagemaker_mxnet_serving_container.mxnet_module_transformer import MXNetModuleTransformer

PYTHON_PATH_ENV = "PYTHONPATH"
logging.basicConfig(level=logging.INFO)
log = logging.getLogger(__name__)


class HandlerService(DefaultHandlerService):
Expand All @@ -44,7 +47,16 @@ def __init__(self):

@staticmethod
def _user_module_transformer(model_dir=environment.model_dir):
user_module = importlib.import_module(environment.Environment().module_name)
module_name = environment.Environment().module_name
inference_script = model_dir + '/code' + '/{}.py'.format(module_name)
if os.path.exists(inference_script):
spec = importlib.util.spec_from_file_location(module_name, inference_script)
user_module = importlib.util.module_from_spec(spec)
log.info(user_module)
else:
log.info("Please include /code/{}.py in model_dir".format(module_name))
raise ValueError('Invalid inference_script path: Could not find '
'valid inference_script path {} in model artifact'.format(inference_script))

if hasattr(user_module, 'transform_fn'):
return Transformer(default_inference_handler=DefaultMXNetInferenceHandler())
Expand Down
2 changes: 1 addition & 1 deletion test/container/1.7.0/Dockerfile.dlc.cpu
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
ARG region
FROM 763104351884.dkr.ecr.$region.amazonaws.com/mxnet-inference:1.6.0-cpu-py2
FROM 763104351884.dkr.ecr.$region.amazonaws.com/mxnet-inference:1.6.0-cpu-py3

COPY dist/sagemaker_mxnet_inference-*.tar.gz /sagemaker_mxnet_inference.tar.gz
RUN pip install --upgrade --no-cache-dir /sagemaker_mxnet_inference.tar.gz && \
Expand Down
2 changes: 1 addition & 1 deletion test/integration/local/test_hosting.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from utils import local_mode_utils

HOSTING_RESOURCE_PATH = os.path.join(RESOURCE_PATH, 'dummy_hosting')
MODEL_PATH = os.path.join(HOSTING_RESOURCE_PATH, 'code')
MODEL_PATH = os.path.join(HOSTING_RESOURCE_PATH, 'model')
SCRIPT_PATH = os.path.join(HOSTING_RESOURCE_PATH, 'code', 'dummy_hosting_module.py')


Expand Down
Binary file added test/resources/dummy_hosting/model.tar.gz
Binary file not shown.
25 changes: 25 additions & 0 deletions test/resources/dummy_hosting/model/code/dummy_hosting_module.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.


class DummyModel(object):
def predict(self, data):
return data


def model_fn(model_dir):
return DummyModel()


def transform_fn(model, data, input_content_type, output_content_type):
return data, "application/json"
Binary file not shown.
1 change: 1 addition & 0 deletions test/resources/dummy_hosting/model/model-shapes.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
[{"name": "data", "shape": [100, 1, 28, 28]}]
111 changes: 111 additions & 0 deletions test/resources/dummy_hosting/model/model-symbol.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,111 @@
{
"nodes": [
{
"op": "null",
"name": "data",
"inputs": []
},
{
"op": "Flatten",
"name": "flatten0",
"inputs": [[0, 0, 0]]
},
{
"op": "null",
"name": "fullyconnected0_weight",
"attrs": {"num_hidden": "128"},
"inputs": []
},
{
"op": "null",
"name": "fullyconnected0_bias",
"attrs": {"num_hidden": "128"},
"inputs": []
},
{
"op": "FullyConnected",
"name": "fullyconnected0",
"attrs": {"num_hidden": "128"},
"inputs": [[1, 0, 0], [2, 0, 0], [3, 0, 0]]
},
{
"op": "Activation",
"name": "activation0",
"attrs": {"act_type": "relu"},
"inputs": [[4, 0, 0]]
},
{
"op": "null",
"name": "fullyconnected1_weight",
"attrs": {"num_hidden": "64"},
"inputs": []
},
{
"op": "null",
"name": "fullyconnected1_bias",
"attrs": {"num_hidden": "64"},
"inputs": []
},
{
"op": "FullyConnected",
"name": "fullyconnected1",
"attrs": {"num_hidden": "64"},
"inputs": [[5, 0, 0], [6, 0, 0], [7, 0, 0]]
},
{
"op": "Activation",
"name": "activation1",
"attrs": {"act_type": "relu"},
"inputs": [[8, 0, 0]]
},
{
"op": "null",
"name": "fullyconnected2_weight",
"attrs": {"num_hidden": "10"},
"inputs": []
},
{
"op": "null",
"name": "fullyconnected2_bias",
"attrs": {"num_hidden": "10"},
"inputs": []
},
{
"op": "FullyConnected",
"name": "fullyconnected2",
"attrs": {"num_hidden": "10"},
"inputs": [[9, 0, 0], [10, 0, 0], [11, 0, 0]]
},
{
"op": "null",
"name": "softmax_label",
"inputs": []
},
{
"op": "SoftmaxOutput",
"name": "softmax",
"inputs": [[12, 0, 0], [13, 0, 0]]
}
],
"arg_nodes": [0, 2, 3, 6, 7, 10, 11, 13],
"node_row_ptr": [
0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15
],
"heads": [[14, 0, 0]],
"attrs": {"mxnet_version": ["int", 10800]}
}
41 changes: 28 additions & 13 deletions test/unit/test_handler_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,12 +51,13 @@ def __init__(self):


@patch('sagemaker_inference.environment.Environment')
@patch('importlib.import_module', return_value=UserModuleTransformFn())
def test_user_module_transform_fn(import_module, env):
@patch('importlib.util.module_from_spec', return_value=UserModuleTransformFn())
@patch('os.path.exists', return_value=True)
def test_user_module_transform_fn(path_exists, module_from_spec, env):
env.return_value.module_name = MODULE_NAME
transformer = HandlerService._user_module_transformer()

import_module.assert_called_once_with(MODULE_NAME)
module_from_spec.assert_called_once()
assert isinstance(transformer._default_inference_handler, DefaultInferenceHandler)
assert isinstance(transformer, Transformer)

Expand All @@ -67,39 +68,53 @@ def __init__(self):


@patch('sagemaker_inference.environment.Environment')
@patch('importlib.import_module', return_value=UserModuleModelFn())
def test_user_module_mxnet_module_transformer(import_module, env):
@patch('importlib.util.module_from_spec', return_value=UserModuleModelFn())
@patch('os.path.exists', return_value=True)
def test_user_module_mxnet_module_transformer(path_exists, module_from_spec, env):
env.return_value.module_name = MODULE_NAME
import_module.return_value.model_fn.return_value = mx.module.BaseModule()
module_from_spec.return_value.model_fn.return_value = mx.module.BaseModule()

transformer = HandlerService._user_module_transformer()

import_module.assert_called_once_with(MODULE_NAME)
module_from_spec.assert_called_once()
assert isinstance(transformer, MXNetModuleTransformer)


@patch('sagemaker_inference.environment.Environment')
@patch('sagemaker_mxnet_serving_container.default_inference_handler.DefaultMXNetInferenceHandler.default_model_fn')
@patch('importlib.import_module', return_value=object())
def test_default_inference_handler_mxnet_gluon_transformer(import_module, model_fn, env):
@patch('importlib.util.module_from_spec', return_value=object())
@patch('os.path.exists', return_value=True)
def test_default_inference_handler_mxnet_gluon_transformer(path_exists, module_from_spec, model_fn, env):
env.return_value.module_name = MODULE_NAME
model_fn.return_value = mx.gluon.block.Block()

transformer = HandlerService._user_module_transformer()

import_module.assert_called_once_with(MODULE_NAME)
module_from_spec.assert_called_once()
model_fn.assert_called_once_with(environment.model_dir)
assert isinstance(transformer, Transformer)
assert isinstance(transformer._default_inference_handler, DefaultGluonBlockInferenceHandler)


@patch('sagemaker_inference.environment.Environment')
@patch('importlib.import_module', return_value=UserModuleModelFn())
def test_user_module_unsupported(import_module, env):
@patch('importlib.util.module_from_spec', return_value=UserModuleModelFn())
@patch('os.path.exists', return_value=True)
def test_user_module_unsupported(path_exists, module_from_spec, env):
env.return_value.module_name = MODULE_NAME

with pytest.raises(ValueError) as e:
HandlerService._user_module_transformer()

import_module.assert_called_once_with(MODULE_NAME)
module_from_spec.assert_called_once()
e.match('Unsupported model type')


@patch('sagemaker_inference.environment.Environment')
@patch('importlib.util.module_from_spec', return_value=UserModuleModelFn())
def test_user_module_invalid_path(module_from_spec, env):
env.return_value.module_name = MODULE_NAME

with pytest.raises(ValueError) as e:
HandlerService._user_module_transformer()

e.match('Invalid inference_script path')