Skip to content

inference_cli API Reference

lib

Internal adapters for Docker container management, benchmarking, cloud deployment, and inference execution.

inference_cli.lib.container_adapter

Functions

terminate_running_containers

terminate_running_containers(
    containers, interactive_mode=True
)

Parameters:

Name Type Description Default
containers List[Container]

List of containers to handle

required
interactive_mode bool

Flag to determine if user prompt should decide on container termination

True

boolean value that informs if there are containers that have not received SIGKILL

Type Description
bool

as a result of procedure.

Source code in inference_cli/lib/container_adapter.py
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
def terminate_running_containers(
    containers: List[Container], interactive_mode: bool = True
) -> bool:
    """
    Args:
        containers (List[Container]): List of containers to handle
        interactive_mode (bool): Flag to determine if user prompt should decide on container termination

    Returns: boolean value that informs if there are containers that have not received SIGKILL
        as a result of procedure.
    """
    running_inference_containers = [
        c for c in containers if is_container_running(container=c)
    ]
    containers_to_kill = running_inference_containers
    if interactive_mode:
        containers_to_kill = [
            c for c in running_inference_containers if ask_user_to_kill_container(c)
        ]
    kill_containers(containers=containers_to_kill)
    return len(containers_to_kill) < len(running_inference_containers)

lib/roboflow_cloud/data_staging

Data staging operations for uploading and managing data in the Roboflow cloud.

inference_cli.lib.roboflow_cloud.data_staging.api_operations

Functions

create_images_batch_from_cloud_storage

create_images_batch_from_cloud_storage(
    bucket_path,
    batch_id,
    api_key,
    batch_name=None,
    ingest_id=None,
    notifications_url=None,
    notification_categories=None,
    presign_expiration_seconds=86400,
)

Create image batch from cloud storage by generating presigned URLs.

Parameters:

Name Type Description Default
bucket_path str

Cloud path with optional glob pattern (e.g., 's3://bucket/*/.jpg')

required
batch_id str

Batch identifier

required
api_key str

Roboflow API key

required
presign_expiration_seconds int

Presigned URL expiration time (default: 24 hours)

86400

Internally calls trigger_images_references_ingest with generated presigned URLs.

Source code in inference_cli/lib/roboflow_cloud/data_staging/api_operations.py
2152
2153
2154
2155
2156
2157
2158
2159
2160
2161
2162
2163
2164
2165
2166
2167
2168
2169
2170
2171
2172
2173
2174
2175
2176
2177
2178
2179
2180
2181
2182
2183
2184
2185
2186
2187
2188
2189
2190
2191
2192
2193
2194
2195
2196
2197
2198
2199
2200
2201
2202
2203
2204
2205
2206
2207
2208
2209
2210
2211
2212
2213
2214
2215
2216
2217
2218
2219
2220
2221
2222
2223
2224
2225
2226
2227
2228
2229
2230
2231
2232
2233
2234
2235
2236
2237
2238
2239
2240
2241
2242
2243
2244
2245
2246
2247
2248
2249
2250
def create_images_batch_from_cloud_storage(
    bucket_path: str,
    batch_id: str,
    api_key: str,
    batch_name: Optional[str] = None,
    ingest_id: Optional[str] = None,
    notifications_url: Optional[str] = None,
    notification_categories: Optional[List[str]] = None,
    presign_expiration_seconds: int = 86400,
) -> None:
    """
    Create image batch from cloud storage by generating presigned URLs.

    Args:
        bucket_path: Cloud path with optional glob pattern (e.g., 's3://bucket/**/*.jpg')
        batch_id: Batch identifier
        api_key: Roboflow API key
        presign_expiration_seconds: Presigned URL expiration time (default: 24 hours)

    Internally calls trigger_images_references_ingest with generated presigned URLs.
    """
    try:
        import fsspec
    except ImportError:
        raise ImportError(
            "Cloud storage support requires additional dependencies. "
            "Install with: pip install 'inference-cli[cloud-storage]'"
        )

    base_path, glob_pattern = _parse_bucket_path(bucket_path)
    protocol = base_path.split("://")[0]
    fs = fsspec.filesystem(protocol, **_get_fs_kwargs(protocol))

    # Stream and filter image files with progress
    image_files_generator = _list_and_filter_files_streaming(
        fs, base_path, glob_pattern, IMAGES_EXTENSIONS
    )

    # Generate presigned URLs in parallel (consumes generator and shows progress)
    references = _generate_presigned_urls_parallel(
        fs, image_files_generator, base_path, presign_expiration_seconds
    )

    if len(references) == 0:
        pattern_desc = glob_pattern if glob_pattern else "all image files"
        raise ValueError(
            f"No image files found matching pattern: {pattern_desc} in {base_path}\n"
            f"Supported extensions: {', '.join(IMAGES_EXTENSIONS)}\n"
            f"Note: If you're getting connection errors, check your cloud credentials and network access."
        )

    if len(references) > MAX_IMAGE_REFERENCES_IN_INGEST_REQUEST:
        num_chunks = (
            len(references) + MAX_IMAGE_REFERENCES_IN_INGEST_REQUEST - 1
        ) // MAX_IMAGE_REFERENCES_IN_INGEST_REQUEST
        print(
            f"Files will be split into {num_chunks} chunks of up to {MAX_IMAGE_REFERENCES_IN_INGEST_REQUEST} files each"
        )

    workspace = get_workspace(api_key=api_key)

    # Split into batches if needed
    ingest_parts = list(
        create_batches(
            sequence=references, batch_size=MAX_IMAGE_REFERENCES_IN_INGEST_REQUEST
        )
    )
    if len(ingest_parts) > 1:
        print(
            f"Your ingest exceeds {MAX_IMAGE_REFERENCES_IN_INGEST_REQUEST} files - we split the ingest "
            f"into {len(ingest_parts)} chunks."
        )

    # Trigger ingest for each batch
    for batch_references in ingest_parts:
        response = trigger_images_references_ingest(
            workspace=workspace,
            batch_id=batch_id,
            references=batch_references,
            api_key=api_key,
            ingest_id=ingest_id,
            batch_name=batch_name,
            notifications_url=notifications_url,
            notification_categories=notification_categories,
        )
        print(f"Ingest triggered. Ingest ID: {response.ingest_id}")

    if notifications_url:
        print(f"Monitor updates that will be sent to: {notifications_url}")
        print(
            f"You can also use `inference rf-cloud data-staging list-ingest-details --batch-id {batch_id}` command "
            f"to check progress."
        )
    else:
        print(
            f"Use `inference rf-cloud data-staging list-ingest-details --batch-id {batch_id}` "
            "command to watch the ingest progress. If you want automated updates - use `--notifications-url` option "
            "of this command."
        )

create_videos_batch_from_cloud_storage

create_videos_batch_from_cloud_storage(
    bucket_path,
    batch_id,
    api_key,
    batch_name=None,
    ingest_id=None,
    notifications_url=None,
    notification_categories=None,
    presign_expiration_seconds=86400,
)

Create video batch from cloud storage by generating presigned URLs.

Parameters:

Name Type Description Default
bucket_path str

Cloud path with optional glob pattern (e.g., 's3://bucket/*/.mp4')

required
batch_id str

Batch identifier

required
api_key str

Roboflow API key

required
presign_expiration_seconds int

Presigned URL expiration time (default: 24 hours)

86400

Internally calls trigger_videos_references_ingest with generated presigned URLs.

Source code in inference_cli/lib/roboflow_cloud/data_staging/api_operations.py
2253
2254
2255
2256
2257
2258
2259
2260
2261
2262
2263
2264
2265
2266
2267
2268
2269
2270
2271
2272
2273
2274
2275
2276
2277
2278
2279
2280
2281
2282
2283
2284
2285
2286
2287
2288
2289
2290
2291
2292
2293
2294
2295
2296
2297
2298
2299
2300
2301
2302
2303
2304
2305
2306
2307
2308
2309
2310
2311
2312
2313
2314
2315
2316
2317
2318
2319
2320
2321
2322
2323
2324
2325
2326
2327
2328
2329
2330
2331
2332
2333
2334
2335
2336
def create_videos_batch_from_cloud_storage(
    bucket_path: str,
    batch_id: str,
    api_key: str,
    batch_name: Optional[str] = None,
    ingest_id: Optional[str] = None,
    notifications_url: Optional[str] = None,
    notification_categories: Optional[List[str]] = None,
    presign_expiration_seconds: int = 86400,
) -> None:
    """
    Create video batch from cloud storage by generating presigned URLs.

    Args:
        bucket_path: Cloud path with optional glob pattern (e.g., 's3://bucket/**/*.mp4')
        batch_id: Batch identifier
        api_key: Roboflow API key
        presign_expiration_seconds: Presigned URL expiration time (default: 24 hours)

    Internally calls trigger_videos_references_ingest with generated presigned URLs.
    """
    try:
        import fsspec
    except ImportError:
        raise ImportError(
            "Cloud storage support requires additional dependencies. "
            "Install with: pip install 'inference-cli[cloud-storage]'"
        )

    base_path, glob_pattern = _parse_bucket_path(bucket_path)
    protocol = base_path.split("://")[0]
    fs = fsspec.filesystem(protocol, **_get_fs_kwargs(protocol))

    # Stream and filter video files with progress
    video_files_generator = _list_and_filter_files_streaming(
        fs, base_path, glob_pattern, VIDEOS_EXTENSIONS
    )

    # Generate presigned URLs in parallel (consumes generator and shows progress)
    references = _generate_presigned_urls_parallel(
        fs, video_files_generator, base_path, presign_expiration_seconds
    )

    if len(references) == 0:
        pattern_desc = glob_pattern if glob_pattern else "all video files"
        raise ValueError(
            f"No video files found matching pattern: {pattern_desc} in {base_path}\n"
            f"Supported extensions: {', '.join(VIDEOS_EXTENSIONS)}\n"
            f"Note: If you're getting connection errors, check your cloud credentials and network access."
        )

    print(f"Found {len(references)} video files")
    if len(references) > SUGGESTED_MAX_VIDEOS_IN_BATCH:
        print(
            f"Warning: Found {len(references)} videos. Suggested max is {SUGGESTED_MAX_VIDEOS_IN_BATCH} videos per batch."
        )

    workspace = get_workspace(api_key=api_key)

    # Trigger ingest directly with the list of references
    response = trigger_videos_references_ingest(
        workspace=workspace,
        batch_id=batch_id,
        references=references,
        api_key=api_key,
        ingest_id=ingest_id,
        batch_name=batch_name,
        notifications_url=notifications_url,
        notification_categories=notification_categories,
    )
    print(f"Ingest triggered. Ingest ID: {response.ingest_id}")

    if notifications_url:
        print(f"Monitor updates that will be sent to: {notifications_url}")
        print(
            f"You can also use `inference rf-cloud data-staging list-ingest-details --batch-id {batch_id}` command "
            f"to check progress."
        )
    else:
        print(
            f"Use `inference rf-cloud data-staging list-ingest-details --batch-id {batch_id}` "
            "command to watch the ingest progress. If you want automated updates - use `--notifications-url` option "
            "of this command."
        )