Spaces:
Runtime error
Runtime error
File size: 1,865 Bytes
c73381c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 |
# Apache Software License 2.0
#
# Copyright (c) ZenML GmbH 2023. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import pandas as pd
from typing_extensions import Annotated
from zenml import get_step_context, step
from zenml.logger import get_logger
logger = get_logger(__name__)
@step
def inference_predict(
dataset_inf: pd.DataFrame,
) -> Annotated[pd.Series, "predictions"]:
"""Predictions step.
This is an example of a predictions step that takes the data in and returns
predicted values.
This step is parameterized, which allows you to configure the step
independently of the step code, before running it in a pipeline.
In this example, the step can be configured to use different input data.
See the documentation for more information:
https://docs.zenml.io/user-guide/advanced-guide/configure-steps-pipelines
Args:
dataset_inf: The inference dataset.
Returns:
The predictions as pandas series
"""
### ADD YOUR OWN CODE HERE - THIS IS JUST AN EXAMPLE ###
model_version = get_step_context().model_version
# run prediction from memory
predictor = model_version.load_artifact("model")
predictions = predictor.predict(dataset_inf)
predictions = pd.Series(predictions, name="predicted")
### YOUR CODE ENDS HERE ###
return predictions
|