Bases: ModelManagerDecorator
Logger Decorator, it logs what's going on inside the manager.
Source code in inference/core/managers/decorators/logger.py
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73 | class WithLogger(ModelManagerDecorator):
"""Logger Decorator, it logs what's going on inside the manager."""
def add_model(
self, model_id: str, api_key: str, model_id_alias: Optional[str] = None
):
"""Adds a model to the manager and logs the action.
Args:
model_id (str): The identifier of the model.
model (Model): The model instance.
Returns:
The result of the add_model method from the superclass.
"""
logger.info(f"π€ {model_id} added.")
return super().add_model(model_id, api_key, model_id_alias=model_id_alias)
async def infer_from_request(
self, model_id: str, request: InferenceRequest, **kwargs
) -> InferenceResponse:
"""Processes a complete inference request and logs both the request and response.
Args:
model_id (str): The identifier of the model.
request (InferenceRequest): The request to process.
Returns:
InferenceResponse: The response from the inference.
"""
logger.info(f"π₯ [{model_id}] request={request}.")
res = await super().infer_from_request(model_id, request, **kwargs)
logger.info(f"π₯ [{model_id}] res={res}.")
return res
def infer_from_request_sync(
self, model_id: str, request: InferenceRequest, **kwargs
) -> InferenceResponse:
"""Processes a complete inference request and logs both the request and response.
Args:
model_id (str): The identifier of the model.
request (InferenceRequest): The request to process.
Returns:
InferenceResponse: The response from the inference.
"""
logger.info(f"π₯ [{model_id}] request={request}.")
res = super().infer_from_request_sync(model_id, request, **kwargs)
logger.info(f"π₯ [{model_id}] res={res}.")
return res
def remove(self, model_id: str) -> Model:
"""Removes a model from the manager and logs the action.
Args:
model_id (str): The identifier of the model to remove.
Returns:
Model: The removed model.
"""
res = super().remove(model_id)
logger.info(f"β removed {model_id}")
return res
|
add_model(model_id, api_key, model_id_alias=None)
Adds a model to the manager and logs the action.
Parameters:
Name |
Type |
Description |
Default |
model_id
|
str
|
The identifier of the model.
|
required
|
model
|
Model
|
|
required
|
Returns:
Type |
Description |
|
The result of the add_model method from the superclass.
|
Source code in inference/core/managers/decorators/logger.py
13
14
15
16
17
18
19
20
21
22
23
24
25
26 | def add_model(
self, model_id: str, api_key: str, model_id_alias: Optional[str] = None
):
"""Adds a model to the manager and logs the action.
Args:
model_id (str): The identifier of the model.
model (Model): The model instance.
Returns:
The result of the add_model method from the superclass.
"""
logger.info(f"π€ {model_id} added.")
return super().add_model(model_id, api_key, model_id_alias=model_id_alias)
|
infer_from_request(model_id, request, **kwargs)
async
Processes a complete inference request and logs both the request and response.
Parameters:
Name |
Type |
Description |
Default |
model_id
|
str
|
The identifier of the model.
|
required
|
request
|
InferenceRequest
|
|
required
|
Returns:
Name | Type |
Description |
InferenceResponse |
InferenceResponse
|
The response from the inference.
|
Source code in inference/core/managers/decorators/logger.py
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43 | async def infer_from_request(
self, model_id: str, request: InferenceRequest, **kwargs
) -> InferenceResponse:
"""Processes a complete inference request and logs both the request and response.
Args:
model_id (str): The identifier of the model.
request (InferenceRequest): The request to process.
Returns:
InferenceResponse: The response from the inference.
"""
logger.info(f"π₯ [{model_id}] request={request}.")
res = await super().infer_from_request(model_id, request, **kwargs)
logger.info(f"π₯ [{model_id}] res={res}.")
return res
|
infer_from_request_sync(model_id, request, **kwargs)
Processes a complete inference request and logs both the request and response.
Parameters:
Name |
Type |
Description |
Default |
model_id
|
str
|
The identifier of the model.
|
required
|
request
|
InferenceRequest
|
|
required
|
Returns:
Name | Type |
Description |
InferenceResponse |
InferenceResponse
|
The response from the inference.
|
Source code in inference/core/managers/decorators/logger.py
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60 | def infer_from_request_sync(
self, model_id: str, request: InferenceRequest, **kwargs
) -> InferenceResponse:
"""Processes a complete inference request and logs both the request and response.
Args:
model_id (str): The identifier of the model.
request (InferenceRequest): The request to process.
Returns:
InferenceResponse: The response from the inference.
"""
logger.info(f"π₯ [{model_id}] request={request}.")
res = super().infer_from_request_sync(model_id, request, **kwargs)
logger.info(f"π₯ [{model_id}] res={res}.")
return res
|
remove(model_id)
Removes a model from the manager and logs the action.
Parameters:
Name |
Type |
Description |
Default |
model_id
|
str
|
The identifier of the model to remove.
|
required
|
Returns:
Name | Type |
Description |
Model |
Model
|
|
Source code in inference/core/managers/decorators/logger.py
62
63
64
65
66
67
68
69
70
71
72
73 | def remove(self, model_id: str) -> Model:
"""Removes a model from the manager and logs the action.
Args:
model_id (str): The identifier of the model to remove.
Returns:
Model: The removed model.
"""
res = super().remove(model_id)
logger.info(f"β removed {model_id}")
return res
|