class ModelStub(Model):
def __init__(self, model_id: str, api_key: str):
super().__init__()
self.model_id = model_id
self.api_key = api_key
self.dataset_id, self.version_id = get_model_id_chunks(model_id=model_id)
self.metrics = {"num_inferences": 0, "avg_inference_time": 0.0}
initialise_cache(model_id=model_id)
def infer_from_request(
self, request: InferenceRequest
) -> Union[InferenceResponse, List[InferenceResponse]]:
t1 = perf_counter()
stub_prediction = self.infer(**request.dict())
response = self.make_response(request=request, prediction=stub_prediction)
response.time = perf_counter() - t1
return response
def infer(self, *args, **kwargs) -> Any:
_ = self.preprocess()
dummy_prediction = self.predict()
return self.postprocess(dummy_prediction)
def preprocess(
self, *args, **kwargs
) -> Tuple[np.ndarray, PreprocessReturnMetadata]:
return np.zeros((128, 128, 3), dtype=np.uint8), {} # type: ignore
def predict(self, *args, **kwargs) -> Tuple[np.ndarray, ...]:
return (np.zeros((1, 8)),)
def postprocess(self, predictions: Tuple[np.ndarray, ...], *args, **kwargs) -> Any:
return {
"is_stub": True,
"model_id": self.model_id,
}
def clear_cache(self, delete_from_disk: bool = True) -> None:
"""Clear the cache directory for this model.
Args:
delete_from_disk (bool, optional): Whether to delete cached files from disk. Defaults to True.
"""
clear_cache(model_id=self.model_id, delete_from_disk=delete_from_disk)
@abstractmethod
def make_response(
self, request: InferenceRequest, prediction: dict, **kwargs
) -> Union[InferenceResponse, List[InferenceResponse]]:
pass