Skip to content

Commit c3744f2

Browse files
author
Pravali Uppugunduri
committed
fix: Update all model server prepare.py to use plain SHA-256
Remove generate_secret_key import and usage from TorchServe, MMS, TF Serving, and SMD prepare functions. Switch compute_hash calls from HMAC-SHA256 to plain SHA-256 (no secret_key parameter). This is required because generate_secret_key was removed from check_integrity.py in the previous commit. Without this change, all model server imports fail with ImportError.
1 parent fc535b4 commit c3744f2

31 files changed

Lines changed: 1943 additions & 1815 deletions

sagemaker-serve/src/sagemaker/serve/model_builder_utils.py

Lines changed: 455 additions & 447 deletions
Large diffs are not rendered by default.

sagemaker-serve/src/sagemaker/serve/model_server/multi_model_server/prepare.py

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,6 @@
2626
from sagemaker.serve.spec.inference_spec import InferenceSpec
2727
from sagemaker.serve.detector.dependency_manager import capture_dependencies
2828
from sagemaker.serve.validations.check_integrity import (
29-
generate_secret_key,
3029
compute_hash,
3130
)
3231
from sagemaker.core.remote_function.core.serialization import _MetaData
@@ -119,11 +118,8 @@ def prepare_for_mms(
119118

120119
capture_dependencies(dependencies=dependencies, work_dir=code_dir)
121120

122-
secret_key = generate_secret_key()
123121
with open(str(code_dir.joinpath("serve.pkl")), "rb") as f:
124122
buffer = f.read()
125-
hash_value = compute_hash(buffer=buffer, secret_key=secret_key)
123+
hash_value = compute_hash(buffer=buffer)
126124
with open(str(code_dir.joinpath("metadata.json")), "wb") as metadata:
127125
metadata.write(_MetaData(hash_value).to_json())
128-
129-
return secret_key

sagemaker-serve/src/sagemaker/serve/model_server/multi_model_server/server.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,6 @@ def _start_serving(
3535
env = {
3636
"SAGEMAKER_SUBMIT_DIRECTORY": "/opt/ml/model/code",
3737
"SAGEMAKER_PROGRAM": "inference.py",
38-
"SAGEMAKER_SERVE_SECRET_KEY": secret_key,
3938
"LOCAL_PYTHON": platform.python_version(),
4039
}
4140
if env_vars:
@@ -47,7 +46,7 @@ def _start_serving(
4746
image,
4847
"serve",
4948
# network_mode="host",
50-
ports={'8080/tcp': 8080},
49+
ports={"8080/tcp": 8080},
5150
detach=True,
5251
auto_remove=True,
5352
volumes={
@@ -131,7 +130,6 @@ def _upload_server_artifacts(
131130
env_vars = {
132131
"SAGEMAKER_SUBMIT_DIRECTORY": "/opt/ml/model/code",
133132
"SAGEMAKER_PROGRAM": "inference.py",
134-
"SAGEMAKER_SERVE_SECRET_KEY": secret_key,
135133
"SAGEMAKER_REGION": sagemaker_session.boto_region_name,
136134
"SAGEMAKER_CONTAINER_LOG_LEVEL": "10",
137135
"LOCAL_PYTHON": platform.python_version(),

sagemaker-serve/src/sagemaker/serve/model_server/smd/prepare.py

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,6 @@
1212
from sagemaker.serve.spec.inference_spec import InferenceSpec
1313
from sagemaker.serve.detector.dependency_manager import capture_dependencies
1414
from sagemaker.serve.validations.check_integrity import (
15-
generate_secret_key,
1615
compute_hash,
1716
)
1817
from sagemaker.core.remote_function.core.serialization import _MetaData
@@ -64,11 +63,8 @@ def prepare_for_smd(
6463

6564
capture_dependencies(dependencies=dependencies, work_dir=code_dir)
6665

67-
secret_key = generate_secret_key()
6866
with open(str(code_dir.joinpath("serve.pkl")), "rb") as f:
6967
buffer = f.read()
70-
hash_value = compute_hash(buffer=buffer, secret_key=secret_key)
68+
hash_value = compute_hash(buffer=buffer)
7169
with open(str(code_dir.joinpath("metadata.json")), "wb") as metadata:
7270
metadata.write(_MetaData(hash_value).to_json())
73-
74-
return secret_key

sagemaker-serve/src/sagemaker/serve/model_server/smd/server.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,6 @@ def _upload_smd_artifacts(
5353
"SAGEMAKER_INFERENCE_CODE_DIRECTORY": "/opt/ml/model/code",
5454
"SAGEMAKER_INFERENCE_CODE": "inference.handler",
5555
"SAGEMAKER_REGION": sagemaker_session.boto_region_name,
56-
"SAGEMAKER_SERVE_SECRET_KEY": secret_key,
5756
"LOCAL_PYTHON": platform.python_version(),
5857
}
5958
return s3_upload_path, env_vars

sagemaker-serve/src/sagemaker/serve/model_server/tei/server.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -38,8 +38,6 @@ def _start_tei_serving(
3838
secret_key: Secret key to use for authentication
3939
env_vars: Environment variables to set
4040
"""
41-
if env_vars and secret_key:
42-
env_vars["SAGEMAKER_SERVE_SECRET_KEY"] = secret_key
4341

4442
self.container = client.containers.run(
4543
image,

sagemaker-serve/src/sagemaker/serve/model_server/tensorflow_serving/prepare.py

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,6 @@
1111
)
1212
from sagemaker.serve.detector.dependency_manager import capture_dependencies
1313
from sagemaker.serve.validations.check_integrity import (
14-
generate_secret_key,
1514
compute_hash,
1615
)
1716
from sagemaker.core.remote_function.core.serialization import _MetaData
@@ -56,12 +55,9 @@ def prepare_for_tf_serving(
5655
if not mlflow_saved_model_dir:
5756
raise ValueError("SavedModel is not found for Tensorflow or Keras flavor.")
5857
_move_contents(src_dir=mlflow_saved_model_dir, dest_dir=saved_model_bundle_dir)
59-
60-
secret_key = generate_secret_key()
58+
6159
with open(str(code_dir.joinpath("serve.pkl")), "rb") as f:
6260
buffer = f.read()
63-
hash_value = compute_hash(buffer=buffer, secret_key=secret_key)
61+
hash_value = compute_hash(buffer=buffer)
6462
with open(str(code_dir.joinpath("metadata.json")), "wb") as metadata:
6563
metadata.write(_MetaData(hash_value).to_json())
66-
67-
return secret_key

sagemaker-serve/src/sagemaker/serve/model_server/tensorflow_serving/server.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ def _start_tensorflow_serving(
3737
detach=True,
3838
auto_remove=False, # Temporarily disabled to see crash logs
3939
# network_mode="host",
40-
ports={'8501/tcp': 8501},
40+
ports={"8501/tcp": 8501},
4141
volumes={
4242
Path(model_path): {
4343
"bind": "/opt/ml/model",
@@ -47,7 +47,6 @@ def _start_tensorflow_serving(
4747
environment={
4848
"SAGEMAKER_SUBMIT_DIRECTORY": "/opt/ml/model/code",
4949
"SAGEMAKER_PROGRAM": "inference.py",
50-
"SAGEMAKER_SERVE_SECRET_KEY": secret_key,
5150
"LOCAL_PYTHON": platform.python_version(),
5251
**env_vars,
5352
},
@@ -124,7 +123,6 @@ def _upload_tensorflow_serving_artifacts(
124123
"SAGEMAKER_PROGRAM": "inference.py",
125124
"SAGEMAKER_REGION": sagemaker_session.boto_region_name,
126125
"SAGEMAKER_CONTAINER_LOG_LEVEL": "10",
127-
"SAGEMAKER_SERVE_SECRET_KEY": secret_key,
128126
"LOCAL_PYTHON": platform.python_version(),
129127
}
130128
return s3_upload_path, env_vars

sagemaker-serve/src/sagemaker/serve/model_server/torchserve/prepare.py

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,6 @@
1313
from sagemaker.serve.spec.inference_spec import InferenceSpec
1414
from sagemaker.serve.detector.dependency_manager import capture_dependencies
1515
from sagemaker.serve.validations.check_integrity import (
16-
generate_secret_key,
1716
compute_hash,
1817
)
1918
from sagemaker.serve.validations.check_image_uri import is_1p_image_uri
@@ -56,7 +55,9 @@ def prepare_for_torchserve(
5655
# https://github.com/aws/sagemaker-python-sdk/issues/4288
5756
if is_1p_image_uri(image_uri=image_uri) and "xgboost" in image_uri:
5857
shutil.copy2(Path(__file__).parent.joinpath("xgboost_inference.py"), code_dir)
59-
os.rename(str(code_dir.joinpath("xgboost_inference.py")), str(code_dir.joinpath("inference.py")))
58+
os.rename(
59+
str(code_dir.joinpath("xgboost_inference.py")), str(code_dir.joinpath("inference.py"))
60+
)
6061
else:
6162
shutil.copy2(Path(__file__).parent.joinpath("inference.py"), code_dir)
6263

@@ -67,11 +68,8 @@ def prepare_for_torchserve(
6768

6869
capture_dependencies(dependencies=dependencies, work_dir=code_dir)
6970

70-
secret_key = generate_secret_key()
7171
with open(str(code_dir.joinpath("serve.pkl")), "rb") as f:
7272
buffer = f.read()
73-
hash_value = compute_hash(buffer=buffer, secret_key=secret_key)
73+
hash_value = compute_hash(buffer=buffer)
7474
with open(str(code_dir.joinpath("metadata.json")), "wb") as metadata:
7575
metadata.write(_MetaData(hash_value).to_json())
76-
77-
return secret_key

sagemaker-serve/src/sagemaker/serve/model_server/torchserve/server.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ def _start_torch_serve(
2929
detach=True,
3030
auto_remove=True,
3131
# network_mode="host",
32-
ports={'8080/tcp': 8080},
32+
ports={"8080/tcp": 8080},
3333
volumes={
3434
Path(model_path): {
3535
"bind": "/opt/ml/model",
@@ -39,7 +39,6 @@ def _start_torch_serve(
3939
environment={
4040
"SAGEMAKER_SUBMIT_DIRECTORY": "/opt/ml/model/code",
4141
"SAGEMAKER_PROGRAM": "inference.py",
42-
"SAGEMAKER_SERVE_SECRET_KEY": secret_key,
4342
"LOCAL_PYTHON": platform.python_version(),
4443
**env_vars,
4544
},
@@ -103,7 +102,6 @@ def _upload_torchserve_artifacts(
103102
"SAGEMAKER_PROGRAM": "inference.py",
104103
"SAGEMAKER_REGION": sagemaker_session.boto_region_name,
105104
"SAGEMAKER_CONTAINER_LOG_LEVEL": "10",
106-
"SAGEMAKER_SERVE_SECRET_KEY": secret_key,
107105
"LOCAL_PYTHON": platform.python_version(),
108106
}
109107
return s3_upload_path, env_vars

0 commit comments

Comments
 (0)