Appearance
Console REST API
The Telovix Console exposes a versioned REST API. This page documents the supported external /api/v2/ API. Responses use JSON unless noted otherwise, and programmatic access uses HMAC-SHA256 signed requests rather than static bearer tokens.
For Telovix self-hosted deployments the default port is 15483. For Telovix Cloud deployments the Console is served over standard HTTPS on port 443. All examples use https://console.example.com without a port suffix - replace the host with your actual Console URL exactly as it appears in the Portal.
All external automation and integration must use
/api/v2/endpoints with HMAC-SHA256 signed requests. See API Keys for authentication details.OpenAPI specification: A machine-readable OpenAPI 3.0 specification for the v2 API is available for download from Settings > API in the Console. Use it to generate client SDKs, validate requests in your IDE, or import the collection into tools such as Postman or Insomnia.
Five-minute quickstart
1 - Create an API key
Open Telovix Console, navigate to Settings - API Keys, and click New key. Choose the scopes you need (at minimum sensors:read) and click Create. The console shows you:
Key ID: a3f8c2d1e9b47f05
HMAC secret: 4a7f2e8c1b9d3f6a0e5c4b2d9f8e7a1c3b6d4e9f2a8c5b1d7e3f0a4c6b9d2e5fCopy the HMAC secret now - it is shown only once.
2 - Make your first request
bash
KEY_ID="a3f8c2d1e9b47f05"
SECRET_HEX="4a7f2e8c1b9d3f6a0e5c4b2d9f8e7a1c3b6d4e9f2a8c5b1d7e3f0a4c6b9d2e5f"
BASE="https://console.example.com"
PATH="/api/v2/sensors"
TS=$(date +%s)
NONCE=$(openssl rand -hex 16)
BH=$(printf '' | openssl dgst -sha256 -hex | awk '{print $2}')
SIG=$(printf "GET\n${PATH}\n${TS}\n${NONCE}\n${BH}" \
| openssl dgst -sha256 -mac HMAC -macopt "hexkey:${SECRET_HEX}" \
| awk '{print $2}')
curl -sf "${BASE}${PATH}" \
-H "X-Telovix-Key-ID: ${KEY_ID}" \
-H "X-Telovix-Timestamp: ${TS}" \
-H "X-Telovix-Nonce: ${NONCE}" \
-H "X-Telovix-Signature: sha256=${SIG}"3 - Parse the response
json
{
"data": [
{
"sensor_id": "{sensor_id}",
"node_name": "upf-prod-01",
"declared_role": "upf",
"health_state": "healthy",
"status": "active",
"software_version": "1.5.0",
"last_seen_at": "2026-04-26T14:22:00Z",
"is_contained": false,
"tags": ["site:oslo", "plmn:242-01"],
"k8s_cluster_name": null
}
],
"total": 14,
"limit": 100,
"next_cursor": null
}When next_cursor is not null, pass it as the after query parameter to fetch the next page.
Base URL
The base URL is the full URL of your Console, including protocol and port if applicable.
| Deployment type | Example base URL |
|---|---|
| Self-hosted (default port) | https://console.your-company.com:15483 |
| Self-hosted (custom TLS on 443) | https://console.your-company.com |
| Telovix Cloud | https://console-<id>.telovix.com (shown in the Portal) |
All code examples use https://console.example.com as a placeholder. Replace it with your actual Console URL.
Authentication
Every /api/v2/ API request requires four headers:
X-Telovix-Key-ID: <your key ID>
X-Telovix-Timestamp: <Unix epoch seconds, integer>
X-Telovix-Nonce: <Unique per-request nonce>
X-Telovix-Signature: sha256=<hex-encoded HMAC-SHA256>Signing string format
{METHOD}\n{PATH}\n{TIMESTAMP}\n{NONCE}\n{HEX(SHA256(body_bytes))}METHOD- uppercase HTTP verb:GET,POST,PATCH,PUT,DELETEPATH- full path including query string, e.g./api/v2/sensors?role=upfTIMESTAMP- same integer sent inX-Telovix-TimestampNONCE- a fresh per-request nonce, e.g. a UUID or random hex stringHEX(SHA256(body_bytes))- SHA-256 of raw request body bytes, hex-encoded; for no-body requests hash an empty string (e3b0c44298fc1c149afb...)
Timestamp window - requests are rejected if the timestamp differs from server time by more than 300 seconds.
Nonce reuse - requests are rejected if the same X-Telovix-Nonce is reused for the same API key within the acceptance window.
Raw secret - hmac_secret is returned as hex at key creation. Decode to raw bytes before computing the HMAC.
Signing helpers
python
"""
telovix_auth.py - HMAC signing helper for the Telovix Console API
Requires: pip install requests
"""
import hashlib
import hmac
import time
import uuid
from typing import Optional
import requests
KEY_ID = "a3f8c2d1e9b47f05"
SECRET = bytes.fromhex("4a7f2e8c1b9d3f6a0e5c4b2d9f8e7a1c3b6d4e9f2a8c5b1d7e3f0a4c6b9d2e5f")
BASE_URL = "https://console.example.com"
def sign(method: str, path: str, body: bytes = b"") -> dict[str, str]:
ts = str(int(time.time()))
nonce = str(uuid.uuid4())
bh = hashlib.sha256(body).hexdigest()
message = f"{method}\n{path}\n{ts}\n{nonce}\n{bh}".encode()
sig = hmac.new(SECRET, message, hashlib.sha256).hexdigest()
return {
"X-Telovix-Key-ID": KEY_ID,
"X-Telovix-Timestamp": ts,
"X-Telovix-Nonce": nonce,
"X-Telovix-Signature": f"sha256={sig}",
"Content-Type": "application/json",
}
def api_get(path: str, params: Optional[dict] = None) -> dict:
from urllib.parse import urlencode
qs = ("?" + urlencode(params)) if params else ""
full = path + qs
response = requests.get(BASE_URL + full, headers=sign("GET", full))
response.raise_for_status()
return response.json()
def api_post(path: str, payload: dict) -> dict:
import json
body = json.dumps(payload).encode()
response = requests.post(BASE_URL + path, headers=sign("POST", path, body), data=body)
response.raise_for_status()
return response.json()
if __name__ == "__main__":
sensors = api_get("/api/v2/sensors", {"health": "degraded", "limit": 10})
for s in sensors["data"]:
print(s["node_name"], s["health_state"])typescript
/**
* telovix-auth.ts - HMAC signing helper for the Telovix Console API
* Runtime: Node.js 18+ (built-in fetch and crypto)
*/
import { createHash, createHmac, randomUUID } from "node:crypto";
const KEY_ID = "a3f8c2d1e9b47f05";
const SECRET = Buffer.from("4a7f2e8c1b9d3f6a0e5c4b2d9f8e7a1c3b6d4e9f2a8c5b1d7e3f0a4c6b9d2e5f", "hex");
const BASE_URL = "https://console.example.com";
export function sign(
method: string,
path: string,
body: Buffer = Buffer.alloc(0),
): Record<string, string> {
const ts = String(Math.floor(Date.now() / 1000));
const nonce = randomUUID();
const bh = createHash("sha256").update(body).digest("hex");
const message = `${method}\n${path}\n${ts}\n${nonce}\n${bh}`;
const sig = createHmac("sha256", SECRET).update(message).digest("hex");
return {
"X-Telovix-Key-ID": KEY_ID,
"X-Telovix-Timestamp": ts,
"X-Telovix-Nonce": nonce,
"X-Telovix-Signature": `sha256=${sig}`,
"Content-Type": "application/json",
};
}
export async function apiGet<T = unknown>(
path: string,
params?: Record<string, string | number>,
): Promise<T> {
const qs = params ? "?" + new URLSearchParams(
Object.entries(params).map(([k, v]) => [k, String(v)])
).toString() : "";
const full = path + qs;
const res = await fetch(BASE_URL + full, { headers: sign("GET", full) });
if (!res.ok) throw new Error(`${res.status} ${await res.text()}`);
return res.json() as Promise<T>;
}
export async function apiPost<T = unknown>(path: string, payload: unknown): Promise<T> {
const body = Buffer.from(JSON.stringify(payload));
const res = await fetch(BASE_URL + path, {
method: "POST",
headers: sign("POST", path, body),
body,
});
if (!res.ok) throw new Error(`${res.status} ${await res.text()}`);
return res.json() as Promise<T>;
}
// Quick smoke test
const sensors = await apiGet<{ data: Array<{ node_name: string; health_state: string }> }>(
"/api/v2/sensors",
{ health: "degraded", limit: 10 },
);
for (const s of sensors.data) console.log(s.node_name, s.health_state);go
// telovix_auth.go - HMAC signing helper for the Telovix Console API
// go run telovix_auth.go (no external dependencies)
package main
import (
"crypto/rand"
"crypto/hmac"
"crypto/sha256"
"encoding/hex"
"encoding/json"
"fmt"
"io"
"net/http"
"net/url"
"strconv"
"strings"
"time"
)
const (
keyID = "a3f8c2d1e9b47f05"
baseURL = "https://console.example.com"
)
var secret []byte
func init() {
var err error
secret, err = hex.DecodeString("4a7f2e8c1b9d3f6a0e5c4b2d9f8e7a1c3b6d4e9f2a8c5b1d7e3f0a4c6b9d2e5f")
if err != nil {
panic("invalid SECRET_HEX: " + err.Error())
}
}
func nonce() string {
buf := make([]byte, 16)
if _, err := rand.Read(buf); err != nil {
panic(err)
}
return hex.EncodeToString(buf)
}
func sign(method, path string, body []byte) http.Header {
ts := strconv.FormatInt(time.Now().Unix(), 10)
n := nonce()
bsum := sha256.Sum256(body)
bh := hex.EncodeToString(bsum[:])
msg := fmt.Sprintf("%s\n%s\n%s\n%s\n%s", method, path, ts, n, bh)
mac := hmac.New(sha256.New, secret)
mac.Write([]byte(msg))
sig := hex.EncodeToString(mac.Sum(nil))
h := make(http.Header)
h.Set("X-Telovix-Key-ID", keyID)
h.Set("X-Telovix-Timestamp", ts)
h.Set("X-Telovix-Nonce", n)
h.Set("X-Telovix-Signature", "sha256="+sig)
h.Set("Content-Type", "application/json")
return h
}
func apiGet(path string, params url.Values) ([]byte, error) {
full := path
if len(params) > 0 {
full = path + "?" + params.Encode()
}
req, err := http.NewRequest(http.MethodGet, baseURL+full, nil)
if err != nil {
return nil, err
}
req.Header = sign("GET", full, nil)
resp, err := http.DefaultClient.Do(req)
if err != nil {
return nil, err
}
defer resp.Body.Close()
return io.ReadAll(resp.Body)
}
func apiPost(path string, payload any) ([]byte, error) {
body, err := json.Marshal(payload)
if err != nil {
return nil, err
}
req, err := http.NewRequest(http.MethodPost, baseURL+path, strings.NewReader(string(body)))
if err != nil {
return nil, err
}
req.Header = sign("POST", path, body)
resp, err := http.DefaultClient.Do(req)
if err != nil {
return nil, err
}
defer resp.Body.Close()
return io.ReadAll(resp.Body)
}
func main() {
params := url.Values{"health": {"degraded"}, "limit": {"10"}}
data, err := apiGet("/api/v2/sensors", params)
if err != nil {
panic(err)
}
fmt.Println(string(data))
}rust
//! telovix_auth.rs - HMAC signing helper for the Telovix Console API
//!
//! Cargo.toml dependencies:
//! reqwest = { version = "0.12", features = ["json", "blocking"] }
//! hmac = "0.12"
//! sha2 = "0.10"
//! hex = "0.4"
//! serde_json = "1"
use hmac::{Hmac, Mac};
use sha2::{Digest, Sha256};
use std::collections::HashMap;
use std::time::{SystemTime, UNIX_EPOCH};
type HmacSha256 = Hmac<Sha256>;
const KEY_ID: &str = "a3f8c2d1e9b47f05";
const SECRET_HEX: &str = "4a7f2e8c1b9d3f6a0e5c4b2d9f8e7a1c3b6d4e9f2a8c5b1d7e3f0a4c6b9d2e5f";
const BASE_URL: &str = "https://console.example.com";
struct TelovixClient {
secret: Vec<u8>,
http: reqwest::blocking::Client,
}
impl TelovixClient {
fn new() -> Self {
Self {
secret: hex::decode(SECRET_HEX).expect("invalid SECRET_HEX"),
http: reqwest::blocking::Client::new(),
}
}
fn auth_headers(&self, method: &str, path: &str, body: &[u8]) -> HashMap<&'static str, String> {
let ts = SystemTime::now()
.duration_since(UNIX_EPOCH).unwrap().as_secs().to_string();
let nonce = SystemTime::now()
.duration_since(UNIX_EPOCH).unwrap().as_nanos().to_string();
let body_hash = hex::encode(Sha256::digest(body));
let message = format!("{}\n{}\n{}\n{}\n{}", method, path, ts, nonce, body_hash);
let mut mac = HmacSha256::new_from_slice(&self.secret).unwrap();
mac.update(message.as_bytes());
let sig = hex::encode(mac.finalize().into_bytes());
let mut h = HashMap::new();
h.insert("X-Telovix-Key-ID", KEY_ID.to_string());
h.insert("X-Telovix-Timestamp", ts);
h.insert("X-Telovix-Nonce", nonce);
h.insert("X-Telovix-Signature", format!("sha256={}", sig));
h.insert("Content-Type", "application/json".to_string());
h
}
fn get(&self, path: &str, params: &[(&str, &str)]) -> serde_json::Value {
let qs: String = if params.is_empty() {
String::new()
} else {
"?".to_string() + ¶ms.iter()
.map(|(k, v)| format!("{}={}", k, v))
.collect::<Vec<_>>().join("&")
};
let full = format!("{}{}", path, qs);
let url = format!("{}{}", BASE_URL, full);
let hdrs = self.auth_headers("GET", &full, &[]);
let mut req = self.http.get(&url);
for (k, v) in &hdrs { req = req.header(*k, v); }
req.send().unwrap().json().unwrap()
}
fn post(&self, path: &str, payload: &serde_json::Value) -> serde_json::Value {
let body = serde_json::to_vec(payload).unwrap();
let hdrs = self.auth_headers("POST", path, &body);
let url = format!("{}{}", BASE_URL, path);
let mut req = self.http.post(&url).body(body);
for (k, v) in &hdrs { req = req.header(*k, v); }
req.send().unwrap().json().unwrap()
}
}
fn main() {
let client = TelovixClient::new();
let result = client.get("/api/v2/sensors", &[("health", "degraded"), ("limit", "10")]);
println!("{}", serde_json::to_string_pretty(&result).unwrap());
}bash
#!/usr/bin/env bash
# telovix-auth.sh - Requires: bash, openssl, curl
KEY_ID="a3f8c2d1e9b47f05"
SECRET_HEX="4a7f2e8c1b9d3f6a0e5c4b2d9f8e7a1c3b6d4e9f2a8c5b1d7e3f0a4c6b9d2e5f"
BASE="https://console.example.com"
sign() {
local method="$1" path="$2" body="${3:-}"
local ts; ts=$(date +%s)
local nonce; nonce=$(openssl rand -hex 16)
local body_hash; body_hash=$(printf '%s' "$body" | openssl dgst -sha256 -hex | awk '{print $2}')
local msg="${method}\n${path}\n${ts}\n${nonce}\n${body_hash}"
local sig; sig=$(printf "%b" "$msg" \
| openssl dgst -sha256 -mac HMAC -macopt "hexkey:${SECRET_HEX}" | awk '{print $2}')
printf -- '-H "X-Telovix-Key-ID: %s" -H "X-Telovix-Timestamp: %s" -H "X-Telovix-Nonce: %s" -H "X-Telovix-Signature: sha256=%s"' \
"$KEY_ID" "$ts" "$nonce" "$sig"
}
api_get() {
eval curl -sf $(sign "GET" "$1") -H "Content-Type: application/json" "${BASE}$1"
}
api_post() {
eval curl -sf -X POST $(sign "POST" "$1" "$2") \
-H "Content-Type: application/json" -d "$2" "${BASE}$1"
}
# Quick smoke test
api_get "/api/v2/sensors?health=degraded&limit=10"Scopes
| Scope | What it permits |
|---|---|
sensors:read | Read sensor list, detail, metrics, health, and energy; list enrollment tokens |
sensors:write | Contain and release sensors; create and revoke enrollment tokens |
events:read | Query runtime events, network connections, and network listeners |
events:stream | Connect to the real-time SSE event stream |
anomalies:read | Read anomaly scores, attack chains, suppression rules, and baselines |
anomalies:write | Patch score verdicts, suppress chains, manage suppression rules, rebuild baselines |
compliance:read | Read compliance posture, controls, evidence, and exports |
alerts:read | List and read alerts; export alerts; list webhooks |
alerts:write | Update alert status, add notes, manage webhooks |
policies:read | Read policy rules and enforcement state |
policies:write | Create, update, delete policy rules; toggle enforcement |
investigations:read | Read investigations |
investigations:write | Create investigations, add events and notes |
audit:read | Read the audit log |
chat:read | Use the AI assistant |
sbom:read | Read SBOM scans, vulnerabilities, and CycloneDX exports |
analytics:read | Legacy alias for events:read. Accepted for backward compatibility. |
Error format
json
{
"error": "sensor_not_found",
"message": "No sensor with ID {sensor_id} exists in this Console."
}| HTTP status | When |
|---|---|
400 | Invalid request parameters or malformed input |
401 | Missing or invalid HMAC headers, nonce reuse, or timestamp outside the 300-second window |
403 | Valid key but missing required scope |
404 | Resource not found |
409 | Resource state conflict (e.g. sensor already contained) |
422 | Request body failed validation |
429 | Rate limit exceeded |
503 | Upstream dependency temporarily unavailable |
Rate limits
Limits are enforced per API key per 60-second sliding window.
| Key role | Requests per minute |
|---|---|
admin | 1,000 |
operator | 1,000 |
analyst | 300 |
| Unauthenticated (per source IP) | 200 |
When exceeded, the server returns HTTP 429 with Retry-After: <seconds>:
json
{
"error": "rate_limit_exceeded",
"message": "Too many requests. Try again in 12 seconds.",
"retry_after": 12
}Pagination
All high-volume list endpoints use cursor-based pagination. Each list response includes next_cursor.
- When
next_cursorisnull, you are on the last page. - When
next_cursoris a string, pass it as theafter(orcursor) query parameter on the next request.
The cursor is an opaque base64-encoded token - do not parse it.
python
def list_all_sensors(role: str) -> list:
results, cursor = [], None
while True:
params = {"role": role, "limit": 500}
if cursor:
params["after"] = cursor
page = api_get("/api/v2/sensors", params)
results.extend(page["data"])
cursor = page.get("next_cursor")
if not cursor:
break
return resultstypescript
async function listAllSensors(role: string) {
const results: unknown[] = [];
let cursor: string | null = null;
do {
const params: Record<string, string> = { role, limit: "500" };
if (cursor) params.after = cursor;
const page = await apiGet<{ data: unknown[]; next_cursor: string | null }>(
"/api/v2/sensors", params,
);
results.push(...page.data);
cursor = page.next_cursor;
} while (cursor);
return results;
}go
func listAllSensors(role string) ([]json.RawMessage, error) {
var all []json.RawMessage
cursor := ""
for {
params := url.Values{"role": {role}, "limit": {"500"}}
if cursor != "" {
params.Set("after", cursor)
}
raw, err := apiGet("/api/v2/sensors", params)
if err != nil {
return nil, err
}
var page struct {
Data []json.RawMessage `json:"data"`
NextCursor *string `json:"next_cursor"`
}
if err := json.Unmarshal(raw, &page); err != nil {
return nil, err
}
all = append(all, page.Data...)
if page.NextCursor == nil || *page.NextCursor == "" {
break
}
cursor = *page.NextCursor
}
return all, nil
}Sensors
The Sensor resource represents a single enrolled Telovix Sensor instance running on a protected node.
List sensors
GET /api/v2/sensors · Scope: sensors:read
Returns a paginated list of all enrolled sensors with health, status, and identification fields.
Request parameters
| Parameter | Type | Required | Default | Description |
|---|---|---|---|---|
limit | integer | No | 100 | Maximum sensors to return. Maximum 500. |
after | string | No | - | Pagination cursor from next_cursor of a previous response. |
role | string | No | - | Filter by declared role. Examples: amf, smf, upf, gnb, cu, du. |
health | string | No | - | Filter by health state: healthy, watch, degraded, critical, offline. |
cluster | string | No | - | Filter by Kubernetes cluster name. Exact match. |
Response schema
| Field | Type | Description |
|---|---|---|
data | SensorSummary[] | Array of sensor objects for this page. |
total | integer | Total sensors matching the current filters. |
limit | integer | Effective page size used for this response. |
next_cursor | string | null | Opaque cursor for the next page. null when this is the last page. |
Each SensorSummary object:
| Field | Type | Description |
|---|---|---|
sensor_id | string | Immutable unique identifier. Format: s_<ulid>. |
node_name | string | Hostname of the protected node as reported by the sensor. |
declared_role | string | Telecom or generic role assigned at enrollment (e.g. upf, worker, gnb). |
health_state | string | Aggregate health state: healthy, watch, degraded, critical, or offline. |
status | string | Enrollment lifecycle state: active, disabled, or revoked. |
software_version | string | Sensor software version (semver). |
last_seen_at | string | RFC 3339 timestamp of the most recent heartbeat received by the Console. |
is_contained | boolean | true when the sensor is under active network containment. |
tags | string[] | Free-form key:value labels (e.g. site:oslo, plmn:242-01). |
k8s_cluster_name | string | null | Kubernetes cluster name for Helm deployments. null for bare metal. |
Response example
json
{
"data": [
{
"sensor_id": "{sensor_id}",
"node_name": "upf-prod-01",
"declared_role": "upf",
"health_state": "healthy",
"status": "active",
"software_version": "1.5.0",
"last_seen_at": "2026-04-26T14:22:00Z",
"is_contained": false,
"tags": ["site:oslo", "plmn:242-01"],
"k8s_cluster_name": null
},
{
"sensor_id": "{sensor_id}",
"node_name": "amf-prod-01",
"declared_role": "amf",
"health_state": "watch",
"status": "active",
"software_version": "1.5.0",
"last_seen_at": "2026-04-26T14:21:30Z",
"is_contained": false,
"tags": ["site:oslo", "plmn:242-01"],
"k8s_cluster_name": "5g-core-prod"
}
],
"total": 14,
"limit": 100,
"next_cursor": null
}Code examples
python
# List all degraded UPF sensors
page = api_get("/api/v2/sensors", {"role": "upf", "health": "degraded", "limit": 50})
for s in page["data"]:
print(s["sensor_id"], s["node_name"], s["health_state"])
# Paginate all sensors
cursor, all_sensors = None, []
while True:
params = {"limit": 500}
if cursor:
params["after"] = cursor
page = api_get("/api/v2/sensors", params)
all_sensors.extend(page["data"])
cursor = page.get("next_cursor")
if not cursor:
break
print(f"Total: {len(all_sensors)}")typescript
// List degraded UPF sensors
const page = await apiGet<{
data: Array<{ sensor_id: string; node_name: string; health_state: string }>;
next_cursor: string | null;
}>("/api/v2/sensors", { role: "upf", health: "degraded", limit: 50 });
for (const s of page.data) console.log(s.sensor_id, s.node_name, s.health_state);
// Paginate all sensors
let cursor: string | null = null;
const allSensors: unknown[] = [];
do {
const p = await apiGet<{ data: unknown[]; next_cursor: string | null }>(
"/api/v2/sensors",
cursor ? { limit: "500", after: cursor } : { limit: "500" },
);
allSensors.push(...p.data);
cursor = p.next_cursor;
} while (cursor);
console.log("Total:", allSensors.length);go
params := url.Values{"role": {"upf"}, "health": {"degraded"}, "limit": {"50"}}
data, err := apiGet("/api/v2/sensors", params)
if err != nil {
log.Fatal(err)
}
var result struct {
Data []struct {
SensorID string `json:"sensor_id"`
NodeName string `json:"node_name"`
HealthState string `json:"health_state"`
} `json:"data"`
NextCursor *string `json:"next_cursor"`
}
json.Unmarshal(data, &result)
for _, s := range result.Data {
fmt.Println(s.SensorID, s.NodeName, s.HealthState)
}rust
let client = TelovixClient::new();
let result = client.get("/api/v2/sensors", &[("role", "upf"), ("health", "degraded"), ("limit", "50")]);
if let Some(sensors) = result["data"].as_array() {
for s in sensors {
println!("{} {} {}", s["sensor_id"], s["node_name"], s["health_state"]);
}
}bash
api_get "/api/v2/sensors?role=upf&health=degraded&limit=50"Errors
| Error code | HTTP | Meaning |
|---|---|---|
insufficient_scope | 403 | The API key does not have the sensors:read scope. |
Get sensor
GET /api/v2/sensors/{sensor_id} · Scope: sensors:read
Returns the full SensorSummary object for a single sensor.
Request parameters
| Parameter | Type | Required | Default | Description |
|---|---|---|---|---|
sensor_id | string (path) | Yes | - | The sensor identifier. Format: s_<ulid>. |
Response schema
Returns a single SensorSummary object. All fields are identical to those in the List sensors response schema.
Response example
json
{
"sensor_id": "{sensor_id}",
"node_name": "upf-prod-01",
"declared_role": "upf",
"health_state": "healthy",
"status": "active",
"software_version": "1.5.0",
"last_seen_at": "2026-04-26T14:22:00Z",
"is_contained": false,
"tags": ["site:oslo", "plmn:242-01"],
"k8s_cluster_name": null
}Code examples
python
sensor_id = "{sensor_id}"
sensor = api_get(f"/api/v2/sensors/{sensor_id}")
print(sensor["node_name"], sensor["health_state"], sensor["software_version"])typescript
const sensorId = "{sensor_id}";
const sensor = await apiGet<{
sensor_id: string; node_name: string; health_state: string; software_version: string;
}>(`/api/v2/sensors/${sensorId}`);
console.log(sensor.node_name, sensor.health_state, sensor.software_version);go
sensorID := "{sensor_id}"
data, err := apiGet("/api/v2/sensors/"+sensorID, nil)
if err != nil { log.Fatal(err) }
var s struct {
NodeName string `json:"node_name"`
HealthState string `json:"health_state"`
Version string `json:"software_version"`
}
json.Unmarshal(data, &s)
fmt.Println(s.NodeName, s.HealthState, s.Version)rust
let client = TelovixClient::new();
let sensor_id = "{sensor_id}";
let sensor = client.get(&format!("/api/v2/sensors/{}", sensor_id), &[]);
println!("{} - {} ({})",
sensor["node_name"].as_str().unwrap_or(""),
sensor["health_state"].as_str().unwrap_or(""),
sensor["software_version"].as_str().unwrap_or(""));bash
api_get "/api/v2/sensors/{sensor_id}"Errors
| Error code | HTTP | Meaning |
|---|---|---|
sensor_not_found | 404 | No sensor with the given sensor_id exists. |
insufficient_scope | 403 | The API key does not have the sensors:read scope. |
Get sensor health
GET /api/v2/sensors/{sensor_id}/health · Scope: sensors:read
Returns the detailed health state and current resource snapshot for a single sensor.
Request parameters
| Parameter | Type | Required | Default | Description |
|---|---|---|---|---|
sensor_id | string (path) | Yes | - | The sensor identifier. |
Response schema
| Field | Type | Description |
|---|---|---|
sensor_id | string | Unique identifier for the sensor. |
node_name | string | Hostname of the protected node. |
health_state | string | Aggregate health: healthy, watch, degraded, critical, or offline. |
health_reasons | string[] | Human-readable strings explaining non-healthy conditions. Empty when healthy. |
status | string | Enrollment lifecycle state: active, disabled, or revoked. |
is_contained | boolean | true when the sensor is under active network containment. |
ws_connected | boolean | true when the sensor has an active WebSocket connection to the Console. |
last_seen_at | string | RFC 3339 timestamp of the most recent heartbeat. |
last_cpu_percent | float | CPU utilization percentage from the most recent heartbeat (0.0–100.0). |
last_memory_used | integer | Resident memory used by the sensor process, in bytes. |
last_memory_total | integer | Total system memory on the node, in bytes. |
last_load_avg | float | 1-minute load average from the most recent heartbeat. |
last_bpf_loss_per_mille | float | BPF ring buffer loss rate, per thousand events (‰). 0.0 means no loss. |
has_active_trust_alert | boolean | true when there is an unresolved kernel_guard or sensor_tamper alert for this sensor. |
kernel_guard_ok | boolean | false when unexpected modifications to BPF programs or kernel hooks have been detected. |
alert_delivery_p95_ms | integer | 95th-percentile alert delivery latency in milliseconds over the last 5 minutes. |
Response example
json
{
"sensor_id": "{sensor_id}",
"node_name": "upf-prod-01",
"health_state": "watch",
"health_reasons": [
"BPF event loss rate elevated: 3.2‰ over the last 5 minutes"
],
"status": "active",
"is_contained": false,
"ws_connected": true,
"last_seen_at": "2026-04-26T14:22:00Z",
"last_cpu_percent": 4.7,
"last_memory_used": 104857600,
"last_memory_total": 68719476736,
"last_load_avg": 1.12,
"last_bpf_loss_per_mille": 3.2,
"has_active_trust_alert": false,
"kernel_guard_ok": true,
"alert_delivery_p95_ms": 145
}Code examples
python
sensor_id = "{sensor_id}"
health = api_get(f"/api/v2/sensors/{sensor_id}/health")
print(f"Health: {health['health_state']} CPU: {health['last_cpu_percent']}%"
f" BPF loss: {health['last_bpf_loss_per_mille']}‰")
for reason in health["health_reasons"]:
print(f" ! {reason}")typescript
interface SensorHealth {
sensor_id: string; node_name: string;
health_state: "healthy" | "watch" | "degraded" | "critical" | "offline";
health_reasons: string[];
status: string; is_contained: boolean; ws_connected: boolean; last_seen_at: string;
last_cpu_percent: number; last_memory_used: number; last_memory_total: number;
last_load_avg: number; last_bpf_loss_per_mille: number;
has_active_trust_alert: boolean; kernel_guard_ok: boolean; alert_delivery_p95_ms: number;
}
const sensorId = "{sensor_id}";
const health = await apiGet<SensorHealth>(`/api/v2/sensors/${sensorId}/health`);
console.log("Health:", health.health_state, "BPF loss:", health.last_bpf_loss_per_mille + "‰");
if (!health.kernel_guard_ok) console.warn("KERNEL GUARD FAILURE on", health.node_name);go
sensorID := "{sensor_id}"
data, err := apiGet("/api/v2/sensors/"+sensorID+"/health", nil)
if err != nil { log.Fatal(err) }
var h struct {
HealthState string `json:"health_state"`
HealthReasons []string `json:"health_reasons"`
LastCPUPercent float64 `json:"last_cpu_percent"`
LastBPFLoss float64 `json:"last_bpf_loss_per_mille"`
KernelGuardOK bool `json:"kernel_guard_ok"`
AlertDeliveryP95 int `json:"alert_delivery_p95_ms"`
}
json.Unmarshal(data, &h)
fmt.Printf("Health: %s CPU: %.1f%% BPF loss: %.1f‰\n", h.HealthState, h.LastCPUPercent, h.LastBPFLoss)
if !h.KernelGuardOK { fmt.Println("WARNING: kernel guard failure") }
for _, r := range h.HealthReasons { fmt.Println(" !", r) }rust
let client = TelovixClient::new();
let sensor_id = "{sensor_id}";
let health = client.get(&format!("/api/v2/sensors/{}/health", sensor_id), &[]);
println!("Health: {} CPU: {}% BPF loss: {}‰",
health["health_state"].as_str().unwrap_or(""),
health["last_cpu_percent"].as_f64().unwrap_or(0.0),
health["last_bpf_loss_per_mille"].as_f64().unwrap_or(0.0));bash
api_get "/api/v2/sensors/{sensor_id}/health"Errors
| Error code | HTTP | Meaning |
|---|---|---|
sensor_not_found | 404 | No sensor with the given sensor_id exists. |
insufficient_scope | 403 | The API key does not have the sensors:read scope. |
Get sensor metrics
GET /api/v2/sensors/{sensor_id}/metrics · Scope: sensors:read
Returns a time-ordered array of resource metric snapshots. Default window: last 24 hours at 5-minute resolution (288 samples).
Request parameters
| Parameter | Type | Required | Default | Description |
|---|---|---|---|---|
sensor_id | string (path) | Yes | - | The sensor identifier. |
limit | integer | No | 288 | Number of samples to return. Maximum 2880 (~10 days at 5-minute resolution). Newest-first. |
Response schema
| Field | Type | Description |
|---|---|---|
sensor_id | string | The sensor this data belongs to. |
count | integer | Number of samples returned. |
limit | integer | Effective limit used for this response. |
data | MetricSample[] | Array of metric snapshots, newest-first. |
Each MetricSample:
| Field | Type | Description |
|---|---|---|
ts | string | RFC 3339 timestamp for this sample. |
cpu_percent | float | CPU utilization percentage (0.0–100.0). |
memory_used | integer | Resident memory used by the sensor process, in bytes. |
load_avg | float | 1-minute system load average. |
bpf_loss_per_mille | float | BPF ring buffer event loss rate, per thousand events (‰). |
events_forwarded | integer | Number of events forwarded to the Console during this interval. |
Response example
json
{
"sensor_id": "{sensor_id}",
"count": 3,
"limit": 288,
"data": [
{
"ts": "2026-04-26T14:20:00Z",
"cpu_percent": 4.7,
"memory_used": 104857600,
"load_avg": 1.12,
"bpf_loss_per_mille": 0.0,
"events_forwarded": 1842
},
{
"ts": "2026-04-26T14:15:00Z",
"cpu_percent": 3.9,
"memory_used": 103809024,
"load_avg": 0.98,
"bpf_loss_per_mille": 0.0,
"events_forwarded": 1756
},
{
"ts": "2026-04-26T14:10:00Z",
"cpu_percent": 5.2,
"memory_used": 105906176,
"load_avg": 1.34,
"bpf_loss_per_mille": 1.1,
"events_forwarded": 2014
}
]
}Code examples
python
sensor_id = "{sensor_id}"
metrics = api_get(f"/api/v2/sensors/{sensor_id}/metrics", {"limit": 144})
avg_cpu = sum(s["cpu_percent"] for s in metrics["data"]) / len(metrics["data"])
max_bpf = max(s["bpf_loss_per_mille"] for s in metrics["data"])
print(f"Avg CPU: {avg_cpu:.1f}% Max BPF loss: {max_bpf:.1f}‰")typescript
interface MetricSample {
ts: string; cpu_percent: number; memory_used: number;
load_avg: number; bpf_loss_per_mille: number; events_forwarded: number;
}
const sensorId = "{sensor_id}";
const metrics = await apiGet<{ count: number; limit: number; data: MetricSample[] }>(
`/api/v2/sensors/${sensorId}/metrics`, { limit: 144 },
);
const avgCpu = metrics.data.reduce((s, m) => s + m.cpu_percent, 0) / metrics.data.length;
console.log(`Avg CPU: ${avgCpu.toFixed(1)}%`);go
sensorID := "{sensor_id}"
data, err := apiGet("/api/v2/sensors/"+sensorID+"/metrics", url.Values{"limit": {"144"}})
if err != nil { log.Fatal(err) }
var result struct {
Data []struct {
Ts string `json:"ts"`
CPUPercent float64 `json:"cpu_percent"`
BPFLossPerMille float64 `json:"bpf_loss_per_mille"`
EventsForwarded int `json:"events_forwarded"`
} `json:"data"`
}
json.Unmarshal(data, &result)
var total float64
for _, s := range result.Data { total += s.CPUPercent }
fmt.Printf("Avg CPU: %.1f%%\n", total/float64(len(result.Data)))rust
let client = TelovixClient::new();
let sensor_id = "{sensor_id}";
let metrics = client.get(&format!("/api/v2/sensors/{}/metrics", sensor_id), &[("limit", "144")]);
if let Some(samples) = metrics["data"].as_array() {
let avg: f64 = samples.iter().filter_map(|s| s["cpu_percent"].as_f64()).sum::<f64>()
/ samples.len() as f64;
println!("Avg CPU: {:.1}%", avg);
}bash
api_get "/api/v2/sensors/{sensor_id}/metrics?limit=144"Errors
| Error code | HTTP | Meaning |
|---|---|---|
sensor_not_found | 404 | No sensor with the given sensor_id exists. |
insufficient_scope | 403 | The API key does not have the sensors:read scope. |
Contain sensor
POST /api/v2/sensors/{sensor_id}/contain · Scope: sensors:write
Places a sensor into network containment. When contained, the sensor enforces an outbound network allowlist at the eBPF layer. Containment is immediate and persists until explicitly released.
Request parameters
| Parameter | Type | Required | Default | Description |
|---|---|---|---|---|
sensor_id | string (path) | Yes | - | The sensor identifier. |
reason | string (body) | No | - | Human-readable reason. Recorded in the audit log. Max 500 characters. |
Response schema
| Field | Type | Description |
|---|---|---|
updated | boolean | true when containment was applied by this request. |
detail | string | Human-readable confirmation message. |
sensor | SensorSummary | The updated sensor object with is_contained: true. |
Response example
json
{
"updated": true,
"detail": "Sensor contained successfully.",
"sensor": {
"sensor_id": "{sensor_id}",
"node_name": "upf-prod-01",
"declared_role": "upf",
"health_state": "healthy",
"status": "active",
"software_version": "1.5.0",
"last_seen_at": "2026-04-26T14:22:00Z",
"is_contained": true,
"tags": ["site:oslo", "plmn:242-01"],
"k8s_cluster_name": null
}
}Code examples
python
sensor_id = "{sensor_id}"
result = api_post(
f"/api/v2/sensors/{sensor_id}/contain",
{"reason": "Active lateral movement investigation - INC-2847"},
)
print("Contained:", result["sensor"]["node_name"])typescript
const sensorId = "{sensor_id}";
const result = await apiPost<{ updated: boolean; detail: string; sensor: { node_name: string } }>(
`/api/v2/sensors/${sensorId}/contain`,
{ reason: "Active lateral movement investigation - INC-2847" },
);
console.log("Contained:", result.sensor.node_name);go
sensorID := "{sensor_id}"
raw, err := apiPost("/api/v2/sensors/"+sensorID+"/contain", struct {
Reason string `json:"reason"`
}{"Active lateral movement investigation - INC-2847"})
if err != nil { log.Fatal(err) }
var resp struct {
Updated bool `json:"updated"`
Detail string `json:"detail"`
}
json.Unmarshal(raw, &resp)
fmt.Println(resp.Updated, resp.Detail)rust
let client = TelovixClient::new();
let sensor_id = "{sensor_id}";
let result = client.post(
&format!("/api/v2/sensors/{}/contain", sensor_id),
&serde_json::json!({"reason": "Active lateral movement investigation - INC-2847"}),
);
println!("updated={} detail={}",
result["updated"].as_bool().unwrap_or(false),
result["detail"].as_str().unwrap_or(""));bash
BODY='{"reason":"Active lateral movement investigation - INC-2847"}'
api_post "/api/v2/sensors/{sensor_id}/contain" "$BODY"Errors
| Error code | HTTP | Meaning |
|---|---|---|
sensor_not_found | 404 | No sensor with the given sensor_id exists. |
already_contained | 409 | The sensor is already under containment. |
insufficient_scope | 403 | The API key does not have the sensors:write scope. |
Release sensor
POST /api/v2/sensors/{sensor_id}/release · Scope: sensors:write
Removes network containment from a sensor. The outbound block is lifted at the eBPF layer within the next heartbeat cycle (typically under 5 seconds).
Request parameters
| Parameter | Type | Required | Default | Description |
|---|---|---|---|---|
sensor_id | string (path) | Yes | - | The sensor identifier. |
reason | string (body) | No | - | Human-readable reason. Recorded in the audit log. Max 500 characters. |
Response schema
| Field | Type | Description |
|---|---|---|
updated | boolean | true when containment was removed by this request. |
detail | string | Human-readable confirmation message. |
sensor | SensorSummary | The updated sensor object with is_contained: false. |
Response example
json
{
"updated": true,
"detail": "Sensor released from containment.",
"sensor": {
"sensor_id": "{sensor_id}",
"node_name": "upf-prod-01",
"declared_role": "upf",
"health_state": "healthy",
"status": "active",
"software_version": "1.5.0",
"last_seen_at": "2026-04-26T14:22:00Z",
"is_contained": false,
"tags": ["site:oslo", "plmn:242-01"],
"k8s_cluster_name": null
}
}Code examples
python
sensor_id = "{sensor_id}"
result = api_post(f"/api/v2/sensors/{sensor_id}/release",
{"reason": "Investigation closed - INC-2847 resolved as false positive"})
print(result["detail"])typescript
const sensorId = "{sensor_id}";
const result = await apiPost<{ updated: boolean; detail: string }>(
`/api/v2/sensors/${sensorId}/release`,
{ reason: "Investigation closed - INC-2847 resolved as false positive" },
);
console.log(result.detail);go
sensorID := "{sensor_id}"
raw, err := apiPost("/api/v2/sensors/"+sensorID+"/release", struct {
Reason string `json:"reason"`
}{"Investigation closed - INC-2847 resolved as false positive"})
if err != nil { log.Fatal(err) }
var resp struct{ Detail string `json:"detail"` }
json.Unmarshal(raw, &resp)
fmt.Println(resp.Detail)rust
let client = TelovixClient::new();
let sensor_id = "{sensor_id}";
let result = client.post(
&format!("/api/v2/sensors/{}/release", sensor_id),
&serde_json::json!({"reason": "Investigation closed - INC-2847 resolved as false positive"}),
);
println!("{}", result["detail"].as_str().unwrap_or(""));bash
BODY='{"reason":"Investigation closed - INC-2847 resolved as false positive"}'
api_post "/api/v2/sensors/{sensor_id}/release" "$BODY"Errors
| Error code | HTTP | Meaning |
|---|---|---|
sensor_not_found | 404 | No sensor with the given sensor_id exists. |
not_contained | 409 | The sensor is not currently under containment. |
insufficient_scope | 403 | The API key does not have the sensors:write scope. |
List enrollment tokens
GET /api/v2/sensors/enrollment-tokens · Scope: sensors:read
Returns up to 200 enrollment tokens ordered by created_at descending. The response includes pending, used, expired, and revoked tokens.
Request parameters
No request parameters.
Response schema
Returns a JSON array of EnrollmentToken objects.
Each EnrollmentToken object:
| Field | Type | Description |
|---|---|---|
token_id | string | Stable enrollment token identifier. |
status | string | pending, used, expired, or revoked. |
label | string | null | Optional label associated with the token. |
token_kind | string | once for one-time tokens or cluster for reusable Kubernetes cluster tokens. |
cluster_id | string | null | Kubernetes cluster identifier for cluster tokens. null for one-time tokens. |
target_sensor_id | string | null | Target sensor ID when the token is bound to a specific sensor. |
max_nodes | integer | null | Per-token node cap when configured. null means no explicit cap. |
enrolled_sensor_count | integer | Number of sensors that have enrolled with this token. |
expires_at | string | RFC 3339 expiry timestamp. |
created_at | string | RFC 3339 creation timestamp. |
used_at | string | null | RFC 3339 timestamp when the token was first used. |
used_by_sensor_id | string | null | Sensor ID that used the token, when applicable. |
Response example
json
[
{
"token_id": "{token_id}",
"status": "pending",
"label": "k8s:5g-core-prod",
"token_kind": "cluster",
"cluster_id": "5g-core-prod",
"target_sensor_id": null,
"max_nodes": null,
"enrolled_sensor_count": 0,
"expires_at": "2027-04-30T09:00:00Z",
"created_at": "2026-04-30T09:00:00Z",
"used_at": null,
"used_by_sensor_id": null
}
]Errors
| Error code | HTTP | Meaning |
|---|---|---|
insufficient_scope | 403 | The API key does not have the sensors:read scope. |
Create enrollment token
POST /api/v2/sensors/enrollment-tokens · Scope: sensors:write
Creates a new enrollment token. The raw enrollment_token is returned only in this response. When deployment_type is kubernetes, the API creates a reusable cluster token valid for 365 days; otherwise it creates a one-time token using the Console's configured enrollment token TTL.
Request body
All fields are optional.
| Field | Type | Required | Description |
|---|---|---|---|
deployment_type | string | No | Set to kubernetes to create a reusable cluster token. Any other value, or omission, creates a one-time token. |
cluster_name | string | No | Used only when deployment_type is kubernetes. When omitted or blank, the Console stores default. |
Response
Returns 201 Created with the raw enrollment token plus its stored summary:
| Field | Type | Description |
|---|---|---|
detail | string | Human-readable summary of the token that was created. |
enrollment_token | string | Raw enrollment token string. Store it securely; it is not returned again. |
token | EnrollmentToken | Stored token summary object (same shape as the list response). |
capacity_warning | string | null | License-capacity warning when the fleet is near or at the licensed node limit; otherwise null. |
Response example
json
{
"detail": "Created a reusable Kubernetes cluster enrollment token for cluster '5g-core-prod'. Valid for 365 days.",
"enrollment_token": "tse_<raw_token>",
"token": {
"token_id": "{token_id}",
"status": "pending",
"label": "k8s:5g-core-prod",
"token_kind": "cluster",
"cluster_id": "5g-core-prod",
"target_sensor_id": null,
"max_nodes": null,
"enrolled_sensor_count": 0,
"expires_at": "2027-04-30T09:00:00Z",
"created_at": "2026-04-30T09:00:00Z",
"used_at": null,
"used_by_sensor_id": null
},
"capacity_warning": null
}Errors
| Error code | HTTP | Meaning |
|---|---|---|
active_license_required | 409 | No active license is installed, so enrollment-token creation is unavailable. |
license_expired | 403 | The active license is past its grace window. |
node_limit_reached | 403 | The deployment has reached its licensed protected-node limit. |
insufficient_scope | 403 | The API key does not have the sensors:write scope. |
Revoke enrollment token
POST /api/v2/sensors/enrollment-tokens/{token_id}/revoke · Scope: sensors:write
Revokes an active enrollment token immediately. Used, expired, or already-revoked tokens return enrollment_token_not_found.
Request parameters
| Parameter | Type | Required | Description |
|---|---|---|---|
token_id | string (path) | Yes | The enrollment token ID to revoke. |
Request body
No request body.
Response
Returns 200 OK:
json
{ "ok": true, "revoked": true, "token_id": "{token_id}" }Errors
| Error code | HTTP | Meaning |
|---|---|---|
enrollment_token_not_found | 404 | No active enrollment token with the given token_id exists. |
insufficient_scope | 403 | The API key does not have the sensors:write scope. |
Get sensor energy
GET /api/v2/sensors/{sensor_id}/energy · Scope: sensors:read
Returns daily energy consumption data for a sensor with efficiency savings estimates compared to the fleet baseline.
Request parameters
| Parameter | Type | Required | Default | Description |
|---|---|---|---|---|
sensor_id | string (path) | Yes | - | The sensor identifier. |
days | integer | No | 30 | Number of days to look back. Minimum 1, maximum 90. |
Response schema
| Field | Type | Description |
|---|---|---|
sensor_id | string | The sensor this data belongs to. |
days | integer | The lookback period used. |
from | string | RFC 3339 start of the reporting window (inclusive). |
until | string | RFC 3339 end of the reporting window (exclusive). |
rows | EnergyRow[] | Daily consumption rows, oldest-first. |
savings | EnergySavings | Summary of efficiency savings over the window. |
Each EnergyRow:
| Field | Type | Description |
|---|---|---|
date | string | Calendar date in YYYY-MM-DD format (UTC). |
avg_watts | float | Average power draw for this sensor on this day, in watts. |
The EnergySavings object:
| Field | Type | Description |
|---|---|---|
sensor_avg_watts | float | Average power draw over the full window, in watts. |
baseline_watts | float | Fleet p50 average power draw over the same window, in watts. |
savings_watts | float | Power saved vs. baseline. Negative when this sensor draws more than baseline. |
savings_pct | float | Percentage savings relative to baseline. |
method | string | Description of the comparison methodology. |
Response example
json
{
"sensor_id": "{sensor_id}",
"days": 7,
"from": "2026-04-19T00:00:00Z",
"until": "2026-04-26T00:00:00Z",
"rows": [
{ "date": "2026-04-19", "avg_watts": 13.8 },
{ "date": "2026-04-20", "avg_watts": 14.1 },
{ "date": "2026-04-21", "avg_watts": 13.5 },
{ "date": "2026-04-22", "avg_watts": 13.9 },
{ "date": "2026-04-23", "avg_watts": 14.4 },
{ "date": "2026-04-24", "avg_watts": 14.2 },
{ "date": "2026-04-25", "avg_watts": 13.7 }
],
"savings": {
"sensor_avg_watts": 13.9,
"baseline_watts": 18.2,
"savings_watts": 4.3,
"savings_pct": 23.6,
"method": "comparison against fleet p50 over the same window"
}
}Code examples
python
sensor_id = "{sensor_id}"
energy = api_get(f"/api/v2/sensors/{sensor_id}/energy", {"days": 7})
print(f"Savings: {energy['savings']['savings_pct']}% vs fleet baseline")
for row in energy["rows"]:
print(f" {row['date']} {row['avg_watts']} W")typescript
const sensorId = "{sensor_id}";
const energy = await apiGet<{
from: string; until: string; rows: Array<{ date: string; avg_watts: number }>;
savings: { savings_pct: number; sensor_avg_watts: number; baseline_watts: number };
}>(`/api/v2/sensors/${sensorId}/energy`, { days: 7 });
console.log(`Savings: ${energy.savings.savings_pct}% vs fleet baseline`);go
sensorID := "{sensor_id}"
data, err := apiGet("/api/v2/sensors/"+sensorID+"/energy", url.Values{"days": {"7"}})
if err != nil { log.Fatal(err) }
var result struct {
Savings struct {
SavingsPct float64 `json:"savings_pct"`
} `json:"savings"`
}
json.Unmarshal(data, &result)
fmt.Printf("Savings: %.1f%%\n", result.Savings.SavingsPct)rust
let client = TelovixClient::new();
let sensor_id = "{sensor_id}";
let energy = client.get(&format!("/api/v2/sensors/{}/energy", sensor_id), &[("days", "7")]);
println!("Savings: {}%", energy["savings"]["savings_pct"].as_f64().unwrap_or(0.0));bash
api_get "/api/v2/sensors/{sensor_id}/energy?days=7"Errors
| Error code | HTTP | Meaning |
|---|---|---|
sensor_not_found | 404 | No sensor with the given sensor_id exists. |
insufficient_scope | 403 | The API key does not have the sensors:read scope. |
Events
Runtime events are the raw observability record emitted by sensors. Every process execution, file write, privilege change, network connection, and policy violation is stored as an event, queryable with millisecond granularity.
List events
GET /api/v2/events · Scope: events:read
Returns a cursor-paginated list of runtime events matching the given filters. Default time window: last hour. Results ordered newest-first.
Request parameters
| Parameter | Type | Required | Default | Description |
|---|---|---|---|---|
sensor_id | string | No | - | Restrict results to a single sensor. |
event_kind | string | No | - | Filter by exact event kind (e.g. process_exec, privilege_change, fim_alert). See Event Kinds Reference. |
since | string | No | 1 hour ago | ISO 8601 start of query window. |
until | string | No | Now | ISO 8601 end of query window. |
severity | string | No | - | info, warning, or critical. |
process_executable | string | No | - | Substring match on process executable path. Case-insensitive. |
q | string | No | - | Full-text search across message and process_executable. |
limit | integer | No | 100 | Maximum events to return. Maximum 1000. |
cursor | string | No | - | Base64 pagination cursor from next_cursor of a previous response. |
Response schema
| Field | Type | Description |
|---|---|---|
data | Event[] | Array of event objects for this page. |
count | integer | Number of events in this page. |
limit | integer | Effective limit used for this response. |
next_cursor | string | null | Opaque base64 cursor for the next page. null on the last page. |
from | string | Effective start of the query window (ISO 8601). |
until | string | Effective end of the query window (ISO 8601). |
Each Event object:
| Field | Type | Description |
|---|---|---|
event_id | string | Unique event identifier. Format: <event_kind>_<ulid>. |
event_kind | string | Event category. See Event Kinds Reference. |
severity | string | info, warning, or critical. |
sensor_id | string | Identifier of the sensor that observed this event. |
node_name | string | Hostname of the node where the event occurred. |
process_executable | string | Absolute path of the process that triggered the event. |
parent_executable | string | null | Absolute path of the parent process. null when unavailable. |
pid | integer | null | Process ID of the triggering process. |
uid | integer | null | User ID of the triggering process. |
message | string | Human-readable description of the event. |
observed_at | string | RFC 3339 timestamp when the event was observed at the sensor. |
k8s_namespace | string | null | Kubernetes namespace when the process was in a Pod. null for bare-metal. |
pod_name | string | null | Kubernetes Pod name. null for bare-metal. |
declared_role | string | null | Declared role of the sensor that emitted this event. |
detected_class | string | null | Classifier label from the anomaly engine (e.g. lateral_movement). null if not classified. |
suppressed | boolean | true when a suppression rule matched this event. |
pack_id | string | null | ID of the policy pack rule that generated this event. null for built-in rules. |
Response example
json
{
"data": [
{
"event_id": "{event_id}",
"event_kind": "privilege_change",
"severity": "warning",
"sensor_id": "{sensor_id}",
"node_name": "upf-prod-01",
"process_executable": "/opt/open5gs/bin/upfd",
"parent_executable": "/lib/systemd/systemd",
"pid": 4821,
"uid": 1001,
"message": "setuid(0) called by /opt/open5gs/bin/upfd (uid 1001 - uid 0)",
"observed_at": "2026-04-26T14:22:01Z",
"k8s_namespace": null,
"pod_name": null,
"declared_role": "upf",
"detected_class": null,
"suppressed": false,
"pack_id": "pack_telecom_core_v2"
}
],
"count": 1,
"limit": 100,
"next_cursor": null,
"from": "2026-04-26T13:22:00Z",
"until": "2026-04-26T14:22:00Z"
}Code examples
python
from datetime import datetime, timezone, timedelta
sensor_id = "{sensor_id}"
since = (datetime.now(timezone.utc) - timedelta(hours=4)).isoformat()
# Fetch first page
page = api_get("/api/v2/events", {
"sensor_id": sensor_id, "event_kind": "privilege_change",
"severity": "warning", "since": since, "limit": 100,
})
for evt in page["data"]:
print(f"{evt['observed_at']} {evt['node_name']} {evt['message']}")
# Paginate all pages
all_events = list(page["data"])
cursor = page.get("next_cursor")
while cursor:
page = api_get("/api/v2/events", {
"sensor_id": sensor_id, "event_kind": "privilege_change",
"severity": "warning", "since": since, "limit": 100, "cursor": cursor,
})
all_events.extend(page["data"])
cursor = page.get("next_cursor")
print(f"Total: {len(all_events)}")typescript
interface Event {
event_id: string; event_kind: string; severity: "info" | "warning" | "critical";
sensor_id: string; node_name: string; process_executable: string;
parent_executable: string | null; pid: number | null; uid: number | null;
message: string; observed_at: string; k8s_namespace: string | null;
pod_name: string | null; declared_role: string | null;
detected_class: string | null; suppressed: boolean; pack_id: string | null;
}
const sensorId = "{sensor_id}";
const since = new Date(Date.now() - 4 * 3600 * 1000).toISOString();
const allEvents: Event[] = [];
let cursor: string | null = null;
do {
const params: Record<string, string> = {
sensor_id: sensorId, event_kind: "privilege_change",
severity: "warning", since, limit: "100",
};
if (cursor) params.cursor = cursor;
const page = await apiGet<{ data: Event[]; next_cursor: string | null }>("/api/v2/events", params);
allEvents.push(...page.data);
cursor = page.next_cursor;
} while (cursor);
console.log("Total:", allEvents.length);
for (const evt of allEvents) console.log(evt.observed_at, evt.node_name, evt.message);go
type Event struct {
EventID string `json:"event_id"`
EventKind string `json:"event_kind"`
Severity string `json:"severity"`
SensorID string `json:"sensor_id"`
NodeName string `json:"node_name"`
ProcessExecutable string `json:"process_executable"`
ParentExecutable *string `json:"parent_executable"`
PID *int `json:"pid"`
UID *int `json:"uid"`
Message string `json:"message"`
ObservedAt string `json:"observed_at"`
K8sNamespace *string `json:"k8s_namespace"`
PodName *string `json:"pod_name"`
DeclaredRole *string `json:"declared_role"`
DetectedClass *string `json:"detected_class"`
Suppressed bool `json:"suppressed"`
PackID *string `json:"pack_id"`
}
since := time.Now().Add(-4 * time.Hour).UTC().Format(time.RFC3339)
cursor := ""
var all []Event
for {
params := url.Values{
"sensor_id": {"{sensor_id}"},
"event_kind": {"privilege_change"},
"severity": {"warning"},
"since": {since},
"limit": {"100"},
}
if cursor != "" { params.Set("cursor", cursor) }
raw, err := apiGet("/api/v2/events", params)
if err != nil { log.Fatal(err) }
var page struct {
Data []Event `json:"data"`
NextCursor *string `json:"next_cursor"`
}
json.Unmarshal(raw, &page)
all = append(all, page.Data...)
if page.NextCursor == nil || *page.NextCursor == "" { break }
cursor = *page.NextCursor
}
fmt.Printf("Total events: %d\n", len(all))rust
let client = TelovixClient::new();
let sensor_id = "{sensor_id}";
let mut cursor = String::new();
let mut all_events: Vec<serde_json::Value> = Vec::new();
loop {
let mut params = vec![
("sensor_id", sensor_id),
("event_kind", "privilege_change"),
("severity", "warning"),
("limit", "100"),
];
let cursor_copy = cursor.clone();
if !cursor_copy.is_empty() { params.push(("cursor", &cursor_copy)); }
let page = client.get("/api/v2/events", ¶ms);
if let Some(events) = page["data"].as_array() {
all_events.extend(events.clone());
}
match page["next_cursor"].as_str() {
Some(c) if !c.is_empty() => cursor = c.to_string(),
_ => break,
}
}
println!("Total events: {}", all_events.len());bash
SENSOR_ID="{sensor_id}"
SINCE=$(date -u -d "4 hours ago" +"%Y-%m-%dT%H:%M:%SZ" 2>/dev/null \
|| date -u -v-4H +"%Y-%m-%dT%H:%M:%SZ")
QS="sensor_id=${SENSOR_ID}&event_kind=privilege_change&severity=warning&since=${SINCE}&limit=100"
PAGE=$(api_get "/api/v2/events?${QS}")
echo "$PAGE" | jq '.data[].message'
# Paginate with jq
cursor=$(echo "$PAGE" | jq -r '.next_cursor // empty')
while [ -n "$cursor" ]; do
PAGE=$(api_get "/api/v2/events?${QS}&cursor=${cursor}")
echo "$PAGE" | jq '.data[].message'
cursor=$(echo "$PAGE" | jq -r '.next_cursor // empty')
doneErrors
| Error code | HTTP | Meaning |
|---|---|---|
invalid_since_format | 400 | The since parameter is not a valid ISO 8601 timestamp. |
invalid_until_format | 400 | The until parameter is not a valid ISO 8601 timestamp. |
insufficient_scope | 403 | The API key does not have the events:read scope. |
Stream events (SSE)
GET /api/v2/events/stream · Scope: events:stream
Opens a persistent Server-Sent Events (SSE) connection that delivers runtime events in real time as they arrive from sensors. Include the Accept: text/event-stream header. HMAC signing is required on the initial request.
The server sends a keep-alive comment every 15 seconds: : ping
Each SSE data line contains a JSON array of Event objects (same fields as List events).
Request parameters
| Parameter | Type | Required | Default | Description |
|---|---|---|---|---|
sensor_id | string | No | - | Restrict the stream to a single sensor. |
event_kinds | string | No | - | Comma-separated event kinds (e.g. process_exec,fim_alert). Omit for all. |
severity | string | No | - | Exact-match severity filter: info, warning, or critical. |
q | string | No | - | Full-text filter on message. |
Response schema
Continuous SSE stream. Each data: frame is a complete JSON array of Event objects with all fields described in the List events response schema. No envelope.
Code examples
python
"""
Requires: pip install requests sseclient-py
"""
import json
import sseclient
import requests
sensor_id = "{sensor_id}"
stream_path = (
f"/api/v2/events/stream"
f"?sensor_id={sensor_id}"
f"&event_kinds=fim_alert,privilege_change,net_connect"
f"&severity=warning"
)
response = requests.get(
BASE_URL + stream_path,
headers={**sign("GET", stream_path), "Accept": "text/event-stream"},
stream=True,
timeout=None,
)
response.raise_for_status()
for event in sseclient.SSEClient(response).events():
if not event.data:
continue
try:
evt = json.loads(event.data)
print(f"{evt['observed_at']} [{evt['severity'].upper():8s}]"
f" {evt['node_name']} {evt['event_kind']} {evt['message']}")
except (json.JSONDecodeError, KeyError):
passtypescript
const sensorId = "{sensor_id}";
const streamPath = `/api/v2/events/stream?sensor_id=${sensorId}&event_kinds=fim_alert,privilege_change&severity=warning`;
const response = await fetch(BASE_URL + streamPath, {
headers: { ...sign("GET", streamPath), "Accept": "text/event-stream" },
});
if (!response.ok || !response.body) throw new Error(`${response.status}`);
const reader = response.body.getReader();
const decoder = new TextDecoder();
let buffer = "";
while (true) {
const { done, value } = await reader.read();
if (done) break;
buffer += decoder.decode(value, { stream: true });
const lines = buffer.split("\n");
buffer = lines.pop() ?? "";
for (const line of lines) {
if (!line.startsWith("data: ")) continue;
const raw = line.slice(6).trim();
if (!raw) continue;
try {
const evt = JSON.parse(raw);
console.log(`${evt.observed_at} [${evt.severity.toUpperCase().padEnd(8)}]`
+ ` ${evt.node_name} ${evt.event_kind} ${evt.message}`);
} catch { /* skip malformed */ }
}
}go
func streamEvents(sensorID string) {
streamPath := fmt.Sprintf(
"/api/v2/events/stream?sensor_id=%s&event_kinds=fim_alert,privilege_change&severity=warning",
sensorID,
)
req, _ := http.NewRequest(http.MethodGet, baseURL+streamPath, nil)
for k, v := range sign("GET", streamPath, nil) {
req.Header[k] = []string{v}
}
req.Header.Set("Accept", "text/event-stream")
client := &http.Client{Timeout: 0}
resp, err := client.Do(req)
if err != nil { log.Fatal(err) }
defer resp.Body.Close()
type Event struct {
ObservedAt string `json:"observed_at"`
Severity string `json:"severity"`
NodeName string `json:"node_name"`
EventKind string `json:"event_kind"`
Message string `json:"message"`
}
scanner := bufio.NewScanner(resp.Body)
for scanner.Scan() {
line := scanner.Text()
if !strings.HasPrefix(line, "data: ") { continue }
raw := strings.TrimPrefix(line, "data: ")
if raw == "" { continue }
var evt Event
if err := json.Unmarshal([]byte(raw), &evt); err != nil { continue }
fmt.Printf("%s [%-8s] %s %s %s\n",
evt.ObservedAt, strings.ToUpper(evt.Severity),
evt.NodeName, evt.EventKind, evt.Message)
}
}rust
//! Add to Cargo.toml:
//! reqwest = { version = "0.12", features = ["stream"] }
//! tokio = { version = "1", features = ["full"] }
//! futures-util = "0.3"
use futures_util::StreamExt;
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let sensor_id = "{sensor_id}";
let stream_path = format!(
"/api/v2/events/stream?sensor_id={}&event_kinds=fim_alert,privilege_change&severity=warning",
sensor_id,
);
// Build auth using the signing logic from the auth helper above
let secret = hex::decode(SECRET_HEX)?;
let ts = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)?.as_secs().to_string();
let body_hash = hex::encode(sha2::Sha256::digest(b""));
let message = format!("GET\n{}\n{}\n{}", stream_path, ts, body_hash);
let mut mac = <hmac::Hmac<sha2::Sha256> as hmac::Mac>::new_from_slice(&secret)?;
hmac::Mac::update(&mut mac, message.as_bytes());
let sig = hex::encode(hmac::Mac::finalize(mac).into_bytes());
let client = reqwest::Client::new();
let response = client.get(format!("{}{}", BASE_URL, stream_path))
.header("X-Telovix-Key-ID", KEY_ID)
.header("X-Telovix-Timestamp", &ts)
.header("X-Telovix-Signature", format!("sha256={}", sig))
.header("Accept", "text/event-stream")
.send().await?;
let mut stream = response.bytes_stream();
let mut buffer = String::new();
while let Some(chunk) = stream.next().await {
buffer.push_str(&String::from_utf8_lossy(&chunk?));
while let Some(pos) = buffer.find('\n') {
let line = buffer[..pos].trim().to_string();
buffer = buffer[pos + 1..].to_string();
if let Some(raw) = line.strip_prefix("data: ") {
if raw.is_empty() { continue; }
if let Ok(evt) = serde_json::from_str::<serde_json::Value>(raw) {
println!("{} {} {} {}",
evt["observed_at"].as_str().unwrap_or(""),
evt["severity"].as_str().unwrap_or(""),
evt["node_name"].as_str().unwrap_or(""),
evt["message"].as_str().unwrap_or(""));
}
}
}
}
Ok(())
}bash
SENSOR_ID="{sensor_id}"
STREAM_PATH="/api/v2/events/stream?sensor_id=${SENSOR_ID}&event_kinds=fim_alert,privilege_change&severity=warning"
# -N disables buffering so SSE frames print immediately
eval curl -sfN \
-H "Accept: text/event-stream" \
$(sign "GET" "$STREAM_PATH") \
"${BASE}${STREAM_PATH}"Errors
| Error code | HTTP | Meaning |
|---|---|---|
insufficient_scope | 403 | The API key does not have the events:stream scope. |
stream_unavailable | 503 | The SSE broker is temporarily unavailable. Retry with exponential backoff. |
Anomalies
Telovix Sensor builds a behavioral baseline for every binary observed on each node. The anomaly engine scores each runtime event against that baseline and emits a score from 0 (fully expected) to 100 (never-before-seen behavior). Attack chains correlate clusters of high-scoring events into a single detection timeline.
List anomaly scores
GET /api/v2/anomalies/scores · Scope: anomalies:read
Returns behavioral anomaly scores sorted by anomaly_score descending. Scores below min_score and scores still in the learning window are excluded unless explicitly requested.
Query parameters
| Parameter | Type | Required | Default | Description |
|---|---|---|---|---|
min_score | integer | No | 30 | Minimum score threshold. Range 0–100. |
since_hours | integer | No | 24 | Lookback window in hours. Max 720 (30 days). |
sensor_id | string | No | - | Filter to a single sensor. |
event_kind | string | No | - | Filter by event kind (e.g. process_exec, privilege_change). |
suppressed | boolean | No | false | Set true to include suppressed scores in results. |
limit | integer | No | 100 | Maximum number of results. Max 500. |
Response schema
| Field | Type | Description |
|---|---|---|
data | AnomalyScore[] | Sorted list of scores. |
total | integer | Total matching scores before limit is applied. |
limit | integer | Effective limit used for this request. |
AnomalyScore object
| Field | Type | Description |
|---|---|---|
score_id | string | Stable identifier for this score record (e.g. {score_id}). |
scored_at | string (ISO 8601) | Timestamp when the score was computed. |
sensor_id | string | Sensor that observed the event. |
node_name | string | Human-readable hostname of the node. |
event_kind | string | Runtime event kind that triggered scoring. |
binary_path | string | Full path of the scored executable. |
parent_binary | string | null | Full path of the parent process, if available. |
anomaly_score | integer | Composite behavioral anomaly score (0–100). |
spawn_score | integer | Sub-score for process spawn behavior (0–100). |
net_score | integer | Sub-score for network behavior (0–100). |
file_score | integer | Sub-score for file access behavior (0–100). |
args_score | integer | Sub-score for command-line argument patterns (0–100). |
reasons | string[] | Human-readable descriptions of anomalous signals. |
event_message | string | Original event message from the sensor. |
is_learning | boolean | true if the baseline for this binary is still accumulating data. |
suppressed | boolean | true if this score is suppressed by a suppression rule or manual verdict. |
mitre_techniques | string[] | MITRE ATT&CK technique IDs associated with this event kind (e.g. ["T1059"]). |
occurrence_count | integer | Number of times this binary has scored above threshold in the lookback window. |
last_occurrence_at | string (ISO 8601) | Timestamp of the most recent high-scoring occurrence. |
Response example
json
{
"data": [
{
"score_id": "{score_id}",
"scored_at": "2026-04-26T14:22:00Z",
"sensor_id": "{sensor_id}",
"node_name": "upf-prod-01",
"event_kind": "process_exec",
"binary_path": "/tmp/beacon",
"parent_binary": "/bin/bash",
"anomaly_score": 91,
"spawn_score": 94,
"net_score": 88,
"file_score": 40,
"args_score": 72,
"reasons": [
"never-before-seen path prefix /tmp",
"unusual parent: /bin/bash spawning network-active binary",
"outbound TCP to 203.0.113.45:4444 not in baseline"
],
"event_message": "process_exec: /tmp/beacon (pid=38821, parent=/bin/bash)",
"is_learning": false,
"suppressed": false,
"mitre_techniques": ["T1059.004", "T1071.001"],
"occurrence_count": 3,
"last_occurrence_at": "2026-04-26T14:21:55Z"
}
],
"total": 1,
"limit": 100
}python
scores = requests.get(
f"{BASE}/api/v2/anomalies/scores",
headers=sign("GET", "/api/v2/anomalies/scores"),
params={"min_score": 70, "since_hours": 48, "sensor_id": "{sensor_id}"},
).json()
for s in scores["data"]:
print(s["score_id"], s["anomaly_score"], s["binary_path"])typescript
const path = "/api/v2/anomalies/scores?min_score=70&since_hours=48&sensor_id={sensor_id}";
const scores = await fetch(`${BASE}${path}`, {
headers: sign("GET", path),
}).then(r => r.json());
for (const s of scores.data) {
console.log(s.score_id, s.anomaly_score, s.binary_path);
}go
import (
"encoding/json"
"fmt"
"io"
"net/http"
)
req, _ := http.NewRequest("GET",
BASE+"/api/v2/anomalies/scores?min_score=70&since_hours=48",
nil,
)
req.Header = sign("GET", "/api/v2/anomalies/scores?min_score=70&since_hours=48", nil)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
body, _ := io.ReadAll(resp.Body)
var result map[string]any
json.Unmarshal(body, &result)
fmt.Println(result)bash
eval curl -sf \
$(sign GET "/api/v2/anomalies/scores?min_score=70&since_hours=48") \
"${BASE}/api/v2/anomalies/scores?min_score=70&since_hours=48"Errors
| Error code | HTTP | Meaning |
|---|---|---|
insufficient_scope | 403 | Key does not have the anomalies:read scope. |
Get anomaly score
GET /api/v2/anomalies/scores/{score_id} · Scope: anomalies:read
Returns a single anomaly score by its stable score_id. Returns the same AnomalyScore object as the list endpoint, with all fields populated.
Path parameters
| Parameter | Type | Required | Description |
|---|---|---|---|
score_id | string | Yes | The score_id from a list or webhook response (e.g. {score_id}). |
Response schema
Returns a single AnomalyScore object. Field definitions are identical to the list endpoint.
Response example
json
{
"score_id": "{score_id}",
"scored_at": "2026-04-26T14:22:00Z",
"sensor_id": "{sensor_id}",
"node_name": "upf-prod-01",
"event_kind": "process_exec",
"binary_path": "/tmp/beacon",
"parent_binary": "/bin/bash",
"anomaly_score": 91,
"spawn_score": 94,
"net_score": 88,
"file_score": 40,
"args_score": 72,
"reasons": [
"never-before-seen path prefix /tmp",
"unusual parent: /bin/bash spawning network-active binary",
"outbound TCP to 203.0.113.45:4444 not in baseline"
],
"event_message": "process_exec: /tmp/beacon (pid=38821, parent=/bin/bash)",
"is_learning": false,
"suppressed": false,
"mitre_techniques": ["T1059.004", "T1071.001"],
"occurrence_count": 3,
"last_occurrence_at": "2026-04-26T14:21:55Z"
}python
score = requests.get(
f"{BASE}/api/v2/anomalies/scores/{score_id}",
headers=sign("GET", "/api/v2/anomalies/scores/{score_id}"),
).json()
print(score["anomaly_score"], score["reasons"])typescript
const score = await fetch(`${BASE}/api/v2/anomalies/scores/{score_id}`, {
headers: sign("GET", "/api/v2/anomalies/scores/{score_id}"),
}).then(r => r.json());
console.log(score.anomaly_score, score.reasons);go
import (
"encoding/json"
"io"
"net/http"
)
req, _ := http.NewRequest("GET",
BASE+"/api/v2/anomalies/scores/{score_id}",
nil,
)
req.Header = sign("GET", "/api/v2/anomalies/scores/{score_id}", nil)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
body, _ := io.ReadAll(resp.Body)
var score map[string]any
json.Unmarshal(body, &score)bash
eval curl -sf \
$(sign GET /api/v2/anomalies/scores/{score_id}) \
"${BASE}/api/v2/anomalies/scores/{score_id}"Errors
| Error code | HTTP | Meaning |
|---|---|---|
insufficient_scope | 403 | Key does not have the anomalies:read scope. |
score_not_found | 404 | No score exists with the given score_id. |
Update anomaly score verdict
PATCH /api/v2/anomalies/scores/{score_id} · Scope: anomalies:write
Sets a human verdict on a score. A false_positive verdict automatically sets suppressed: true on this score and creates a transient suppression entry for the same binary path and event kind. A true_positive verdict clears any existing suppression caused by a prior false positive verdict and flags the score for escalation.
Path parameters
| Parameter | Type | Required | Description |
|---|---|---|---|
score_id | string | Yes | The score to update. |
Request body
| Parameter | Type | Required | Default | Description |
|---|---|---|---|---|
verdict | string | Yes | - | "true_positive" or "false_positive". |
Response schema
| Field | Type | Description |
|---|---|---|
score_id | string | The updated score ID. |
verdict | string | The verdict that was applied. |
suppressed | boolean | Whether the score is now suppressed. |
updated | boolean | Always true. |
Response example
json
{
"score_id": "{score_id}",
"verdict": "false_positive",
"suppressed": true,
"updated": true
}python
import json
body = json.dumps({"verdict": "false_positive"}).encode()
result = requests.patch(
f"{BASE}/api/v2/anomalies/scores/{score_id}",
headers=sign("PATCH", "/api/v2/anomalies/scores/{score_id}", body),
data=body,
).json()
print(result["suppressed"]) # Truetypescript
const body = JSON.stringify({ verdict: "false_positive" });
const result = await fetch(`${BASE}/api/v2/anomalies/scores/{score_id}`, {
method: "PATCH",
headers: sign("PATCH", "/api/v2/anomalies/scores/{score_id}", Buffer.from(body)),
body,
}).then(r => r.json());
console.log(result.suppressed); // truego
import (
"bytes"
"encoding/json"
"io"
"net/http"
)
payload := []byte(`{"verdict":"false_positive"}`)
req, _ := http.NewRequest("PATCH",
BASE+"/api/v2/anomalies/scores/{score_id}",
bytes.NewReader(payload),
)
req.Header = sign("PATCH", "/api/v2/anomalies/scores/{score_id}", payload)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
body, _ := io.ReadAll(resp.Body)
var result map[string]any
json.Unmarshal(body, &result)bash
BODY='{"verdict":"false_positive"}'
eval curl -sf -X PATCH \
$(sign PATCH /api/v2/anomalies/scores/{score_id} "$BODY") \
-d "$BODY" \
"${BASE}/api/v2/anomalies/scores/{score_id}"Errors
| Error code | HTTP | Meaning |
|---|---|---|
insufficient_scope | 403 | Key does not have the anomalies:write scope. |
score_not_found | 404 | No score exists with the given score_id. |
invalid_verdict | 422 | verdict is not "true_positive" or "false_positive". |
List attack chains
GET /api/v2/anomalies/chains · Scope: anomalies:read
Returns correlated multi-stage attack chains. A chain is created when multiple high-scoring anomaly scores for the same node occur within a short temporal and causal window. Chains are deduplicated by chain_id; a chain can contain events spanning many processes.
Query parameters
| Parameter | Type | Required | Default | Description |
|---|---|---|---|---|
sensor_id | string | No | - | Filter to a single sensor. |
severity | string | No | - | critical, high, or medium. |
suppressed | boolean | No | false | Set true to include suppressed chains. |
limit | integer | No | 50 | Maximum number of results. Max 200. |
Response schema
| Field | Type | Description |
|---|---|---|
data | Chain[] | Sorted list of chains, newest first. |
total | integer | Total matching chains before limit is applied. |
limit | integer | Effective limit used for this request. |
Chain object
| Field | Type | Description |
|---|---|---|
id | integer | Integer row ID. Use this in the suppress and detail endpoints. |
chain_id | string | Stable string identifier for the chain (e.g. {chain_id}). |
chain_name | string | Human-readable detection name (e.g. "Reverse shell via bash"). |
sensor_id | string | Sensor where the chain was observed. |
node_name | string | Hostname of the node. |
binary_path | string | Path of the root process in the chain. |
severity | string | critical, high, or medium. |
chain_start_at | string (ISO 8601) | Timestamp of the first event in the chain. |
chain_end_at | string (ISO 8601) | Timestamp of the last event in the chain. |
event_sequence | object[] | Ordered list of contributing events. Each object contains event_kind, binary_path, score, and occurred_at. |
suppressed | boolean | Whether the chain has been suppressed. |
fired_at | string (ISO 8601) | When the chain detection was first emitted. |
mitre_techniques | string[] | Union of all MITRE ATT&CK technique IDs across events in the chain. |
Response example
json
{
"data": [
{
"id": 42,
"chain_id": "{chain_id}",
"chain_name": "Reverse shell via bash on upf-prod-01",
"sensor_id": "{sensor_id}",
"node_name": "upf-prod-01",
"binary_path": "/bin/bash",
"severity": "critical",
"chain_start_at": "2026-04-26T14:20:00Z",
"chain_end_at": "2026-04-26T14:22:30Z",
"event_sequence": [
{
"event_kind": "process_exec",
"binary_path": "/bin/bash",
"score": 78,
"occurred_at": "2026-04-26T14:20:00Z"
},
{
"event_kind": "net_connect",
"binary_path": "/bin/bash",
"score": 91,
"occurred_at": "2026-04-26T14:20:05Z"
},
{
"event_kind": "process_exec",
"binary_path": "/tmp/beacon",
"score": 94,
"occurred_at": "2026-04-26T14:22:30Z"
}
],
"suppressed": false,
"fired_at": "2026-04-26T14:22:35Z",
"mitre_techniques": ["T1059.004", "T1071.001", "T1105"]
}
],
"total": 1,
"limit": 50
}python
chains = requests.get(
f"{BASE}/api/v2/anomalies/chains",
headers=sign("GET", "/api/v2/anomalies/chains"),
params={"severity": "critical", "suppressed": "false"},
).json()
for c in chains["data"]:
print(c["id"], c["chain_name"], c["severity"])typescript
const path = "/api/v2/anomalies/chains?severity=critical&suppressed=false";
const chains = await fetch(`${BASE}${path}`, {
headers: sign("GET", path),
}).then(r => r.json());
for (const c of chains.data) {
console.log(c.id, c.chain_name, c.severity);
}go
import (
"encoding/json"
"io"
"net/http"
)
req, _ := http.NewRequest("GET",
BASE+"/api/v2/anomalies/chains?severity=critical&suppressed=false",
nil,
)
req.Header = sign("GET", "/api/v2/anomalies/chains?severity=critical&suppressed=false", nil)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
body, _ := io.ReadAll(resp.Body)
var result map[string]any
json.Unmarshal(body, &result)bash
eval curl -sf \
$(sign GET "/api/v2/anomalies/chains?severity=critical") \
"${BASE}/api/v2/anomalies/chains?severity=critical"Errors
| Error code | HTTP | Meaning |
|---|---|---|
insufficient_scope | 403 | Key does not have the anomalies:read scope. |
Get attack chain
GET /api/v2/anomalies/chains/{id} · Scope: anomalies:read
Returns a single attack chain by its integer row id. Use the id field from the list response, not the chain_id string.
Path parameters
| Parameter | Type | Required | Description |
|---|---|---|---|
id | integer | Yes | Integer row ID from the list response. |
Response schema
Returns a single Chain object. Field definitions are identical to the list endpoint.
Response example
json
{
"id": 42,
"chain_id": "{chain_id}",
"chain_name": "Reverse shell via bash on upf-prod-01",
"sensor_id": "{sensor_id}",
"node_name": "upf-prod-01",
"binary_path": "/bin/bash",
"severity": "critical",
"chain_start_at": "2026-04-26T14:20:00Z",
"chain_end_at": "2026-04-26T14:22:30Z",
"event_sequence": [
{
"event_kind": "process_exec",
"binary_path": "/bin/bash",
"score": 78,
"occurred_at": "2026-04-26T14:20:00Z"
},
{
"event_kind": "net_connect",
"binary_path": "/bin/bash",
"score": 91,
"occurred_at": "2026-04-26T14:20:05Z"
},
{
"event_kind": "process_exec",
"binary_path": "/tmp/beacon",
"score": 94,
"occurred_at": "2026-04-26T14:22:30Z"
}
],
"suppressed": false,
"fired_at": "2026-04-26T14:22:35Z",
"mitre_techniques": ["T1059.004", "T1071.001", "T1105"]
}python
chain = requests.get(
f"{BASE}/api/v2/anomalies/chains/42",
headers=sign("GET", "/api/v2/anomalies/chains/42"),
).json()
print(chain["chain_name"], chain["severity"])
for step in chain["event_sequence"]:
print(" ->", step["event_kind"], step["binary_path"], step["score"])typescript
const chain = await fetch(`${BASE}/api/v2/anomalies/chains/42`, {
headers: sign("GET", "/api/v2/anomalies/chains/42"),
}).then(r => r.json());
console.log(chain.chain_name, chain.severity);
chain.event_sequence.forEach((step: any) => {
console.log("->", step.event_kind, step.binary_path, step.score);
});go
import (
"encoding/json"
"io"
"net/http"
)
req, _ := http.NewRequest("GET",
BASE+"/api/v2/anomalies/chains/42",
nil,
)
req.Header = sign("GET", "/api/v2/anomalies/chains/42", nil)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
body, _ := io.ReadAll(resp.Body)
var chain map[string]any
json.Unmarshal(body, &chain)bash
eval curl -sf \
$(sign GET /api/v2/anomalies/chains/42) \
"${BASE}/api/v2/anomalies/chains/42"Errors
| Error code | HTTP | Meaning |
|---|---|---|
insufficient_scope | 403 | Key does not have the anomalies:read scope. |
chain_not_found | 404 | No chain exists with the given integer id. |
Suppress attack chain
POST /api/v2/anomalies/chains/{id}/suppress · Scope: anomalies:write
Suppresses or un-suppresses an attack chain. Suppressed chains are hidden from the default list view and do not trigger webhook deliveries for new matching events.
Path parameters
| Parameter | Type | Required | Description |
|---|---|---|---|
id | integer | Yes | Integer row ID of the chain to update. |
Request body
| Parameter | Type | Required | Default | Description |
|---|---|---|---|---|
suppressed | boolean | Yes | - | true to suppress the chain, false to un-suppress. |
Response schema
| Field | Type | Description |
|---|---|---|
id | integer | The chain row ID that was updated. |
suppressed | boolean | The new suppression state. |
updated | boolean | Always true. |
Response example
json
{
"id": 42,
"suppressed": true,
"updated": true
}python
import json
body = json.dumps({"suppressed": True}).encode()
result = requests.post(
f"{BASE}/api/v2/anomalies/chains/42/suppress",
headers=sign("POST", "/api/v2/anomalies/chains/42/suppress", body),
data=body,
).json()
print(result["suppressed"]) # Truetypescript
const body = JSON.stringify({ suppressed: true });
const result = await fetch(`${BASE}/api/v2/anomalies/chains/42/suppress`, {
method: "POST",
headers: sign("POST", "/api/v2/anomalies/chains/42/suppress", Buffer.from(body)),
body,
}).then(r => r.json());
console.log(result.suppressed); // truego
import (
"bytes"
"encoding/json"
"io"
"net/http"
)
payload := []byte(`{"suppressed":true}`)
req, _ := http.NewRequest("POST",
BASE+"/api/v2/anomalies/chains/42/suppress",
bytes.NewReader(payload),
)
req.Header = sign("POST", "/api/v2/anomalies/chains/42/suppress", payload)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
body, _ := io.ReadAll(resp.Body)
var result map[string]any
json.Unmarshal(body, &result)bash
BODY='{"suppressed":true}'
eval curl -sf -X POST \
$(sign POST /api/v2/anomalies/chains/42/suppress "$BODY") \
-d "$BODY" \
"${BASE}/api/v2/anomalies/chains/42/suppress"Errors
| Error code | HTTP | Meaning |
|---|---|---|
insufficient_scope | 403 | Key does not have the anomalies:write scope. |
chain_not_found | 404 | No chain exists with the given integer id. |
List suppression rules
GET /api/v2/anomalies/suppressions · Scope: anomalies:read
Returns all suppression rules with 24-hour match counts. A suppression rule silences anomaly scores and chain detections that match its criteria. Rules with a past expires_at are automatically inactive but remain visible in the list.
Response schema
| Field | Type | Description |
|---|---|---|
data | SuppressionRule[] | All suppression rules, newest first. |
total | integer | Total count of rules returned. |
SuppressionRule object
| Field | Type | Description |
|---|---|---|
rule_id | string | Stable identifier for this suppression rule. |
name | string | Human-readable name. |
event_kind | string | null | If set, the rule only applies to this event kind. |
process_executable_pattern | string | null | If set, matched against binary paths using substring matching. |
sensor_id | string | null | If set, the rule only applies to scores from this sensor. |
scope | string | Scope label stored with the rule. |
reason | string | null | Optional operator note explaining the suppression. |
expires_at | string (ISO 8601) | null | If set, the rule expires and stops matching after this time. |
created_at | string (ISO 8601) | When the rule was created. |
match_count_24h | integer | Number of scores or chains suppressed by this rule in the last 24 hours. |
Response example
json
{
"data": [
{
"rule_id": "{sup_id}",
"name": "Suppress systemd-tmpfiles exec",
"event_kind": "process_exec",
"process_executable_pattern": "/usr/bin/systemd-tmpfiles",
"sensor_id": null,
"scope": "all_sensors",
"reason": "Known-good scheduled cleanup - approved by security team",
"expires_at": "2026-06-01T00:00:00Z",
"created_at": "2026-04-26T09:00:00Z",
"match_count_24h": 14
}
],
"total": 1
}python
rules = requests.get(
f"{BASE}/api/v2/anomalies/suppressions",
headers=sign("GET", "/api/v2/anomalies/suppressions"),
).json()
for r in rules["data"]:
print(r["rule_id"], r["name"], f"matches_24h={r['match_count_24h']}")typescript
const rules = await fetch(`${BASE}/api/v2/anomalies/suppressions`, {
headers: sign("GET", "/api/v2/anomalies/suppressions"),
}).then(r => r.json());
rules.data.forEach((rule: any) => {
console.log(rule.rule_id, rule.name, `matches_24h=${rule.match_count_24h}`);
});go
import (
"encoding/json"
"io"
"net/http"
)
req, _ := http.NewRequest("GET",
BASE+"/api/v2/anomalies/suppressions",
nil,
)
req.Header = sign("GET", "/api/v2/anomalies/suppressions", nil)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
body, _ := io.ReadAll(resp.Body)
var result map[string]any
json.Unmarshal(body, &result)bash
eval curl -sf \
$(sign GET /api/v2/anomalies/suppressions) \
"${BASE}/api/v2/anomalies/suppressions"Errors
| Error code | HTTP | Meaning |
|---|---|---|
insufficient_scope | 403 | Key does not have the anomalies:read scope. |
Create suppression rule
POST /api/v2/anomalies/suppressions · Scope: anomalies:write
Creates a new suppression rule. At least one of event_kind, process_executable_pattern, or sensor_id should be specified alongside name to avoid creating an overly broad rule that suppresses all scores fleet-wide.
Returns HTTP 201 on success.
Request body
| Parameter | Type | Required | Default | Description |
|---|---|---|---|---|
name | string | Yes | - | Human-readable label for the rule (max 255 chars). |
event_kind | string | No | - | Restrict matching to this event kind. |
process_executable_pattern | string | No | - | Substring match against binary_path. |
sensor_id | string | No | - | Restrict matching to a single sensor. |
reason | string | No | - | Operator note. Stored for audit purposes. |
expires_at | string (ISO 8601) | No | - | Rule deactivation time. Must be in the future. |
Response schema
| Field | Type | Description |
|---|---|---|
rule_id | string | The newly created suppression rule ID. |
Response example
json
{
"rule_id": "{sup_id}"
}python
import json
body = json.dumps({
"name": "Suppress systemd-tmpfiles exec",
"event_kind": "process_exec",
"process_executable_pattern": "/usr/bin/systemd-tmpfiles",
"reason": "Known-good scheduled cleanup",
"expires_at": "2026-06-01T00:00:00Z",
}).encode()
r = requests.post(
f"{BASE}/api/v2/anomalies/suppressions",
headers=sign("POST", "/api/v2/anomalies/suppressions", body),
data=body,
)
assert r.status_code == 201
rule_id = r.json()["rule_id"]typescript
const body = JSON.stringify({
name: "Suppress systemd-tmpfiles exec",
event_kind: "process_exec",
process_executable_pattern: "/usr/bin/systemd-tmpfiles",
reason: "Known-good scheduled cleanup",
expires_at: "2026-06-01T00:00:00Z",
});
const r = await fetch(`${BASE}/api/v2/anomalies/suppressions`, {
method: "POST",
headers: sign("POST", "/api/v2/anomalies/suppressions", Buffer.from(body)),
body,
});
const { rule_id } = await r.json(); // HTTP 201go
import (
"bytes"
"encoding/json"
"io"
"net/http"
)
payload := []byte(`{
"name": "Suppress systemd-tmpfiles exec",
"event_kind": "process_exec",
"process_executable_pattern": "/usr/bin/systemd-tmpfiles",
"expires_at": "2026-06-01T00:00:00Z"
}`)
req, _ := http.NewRequest("POST",
BASE+"/api/v2/anomalies/suppressions",
bytes.NewReader(payload),
)
req.Header = sign("POST", "/api/v2/anomalies/suppressions", payload)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
body, _ := io.ReadAll(resp.Body)
var result map[string]any
json.Unmarshal(body, &result)
ruleID := result["rule_id"]bash
BODY='{
"name": "Suppress systemd-tmpfiles exec",
"event_kind": "process_exec",
"process_executable_pattern": "/usr/bin/systemd-tmpfiles",
"expires_at": "2026-06-01T00:00:00Z"
}'
eval curl -sf -X POST \
$(sign POST /api/v2/anomalies/suppressions "$BODY") \
-d "$BODY" \
"${BASE}/api/v2/anomalies/suppressions"Errors
| Error code | HTTP | Meaning |
|---|---|---|
insufficient_scope | 403 | Key does not have the anomalies:write scope. |
name_required | 422 | The name field is missing or empty. |
invalid_expires_at | 422 | expires_at is not a valid ISO 8601 timestamp or is in the past. |
Delete suppression rule
DELETE /api/v2/anomalies/suppressions/{rule_id} · Scope: anomalies:write
Permanently deletes a suppression rule. Scores and chains previously suppressed by this rule remain suppressed unless their suppressed flag is explicitly cleared. Returns HTTP 204 on success with no body.
Path parameters
| Parameter | Type | Required | Description |
|---|---|---|---|
rule_id | string | Yes | The suppression rule to delete. |
python
r = requests.delete(
f"{BASE}/api/v2/anomalies/suppressions/{rule_id}",
headers=sign("DELETE", "/api/v2/anomalies/suppressions/{rule_id}"),
)
assert r.status_code == 204typescript
const r = await fetch(`${BASE}/api/v2/anomalies/suppressions/{rule_id}`, {
method: "DELETE",
headers: sign("DELETE", "/api/v2/anomalies/suppressions/{rule_id}"),
});
// r.status === 204go
import "net/http"
req, _ := http.NewRequest("DELETE",
BASE+"/api/v2/anomalies/suppressions/{rule_id}",
nil,
)
req.Header = sign("DELETE", "/api/v2/anomalies/suppressions/{rule_id}", nil)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
// resp.StatusCode == 204bash
eval curl -sf -X DELETE \
$(sign DELETE /api/v2/anomalies/suppressions/{rule_id}) \
"${BASE}/api/v2/anomalies/suppressions/{rule_id}"
# HTTP 204 - no bodyErrors
| Error code | HTTP | Meaning |
|---|---|---|
insufficient_scope | 403 | Key does not have the anomalies:write scope. |
suppression_not_found | 404 | No suppression rule exists with the given rule_id. |
List baselines
GET /api/v2/anomalies/baselines · Scope: anomalies:read
Returns the behavioral baseline summary for every binary observed on every sensor. Each row represents one binary-sensor combination. During the learning window (is_learning: true) scores are computed but are lower-confidence.
Query parameters
| Parameter | Type | Required | Default | Description |
|---|---|---|---|---|
sensor_id | string | No | - | Filter to a single sensor. |
page | integer | No | 1 | Page number (1-based). |
page_size | integer | No | 100 | Results per page. Max 500. |
Response schema
| Field | Type | Description |
|---|---|---|
data | Baseline[] | Current page of baseline records. |
total | integer | Total baseline records matching the filter. |
page | integer | Current page number. |
page_size | integer | Effective page size. |
page_count | integer | Total number of pages. |
Baseline object
| Field | Type | Description |
|---|---|---|
sensor_id | string | Sensor this baseline belongs to. |
node_name | string | Hostname of the node. |
binary_path | string | Full path of the binary this baseline describes. |
event_count | integer | Number of training events accumulated for this baseline. |
is_learning | boolean | true while the baseline has fewer than the minimum required events (default threshold: 10). |
data_days | integer | Number of calendar days of data used to build this baseline. |
last_scored_at | string (ISO 8601) | null | When an event for this binary was last scored. |
Response example
json
{
"data": [
{
"sensor_id": "{sensor_id}",
"node_name": "upf-prod-01",
"binary_path": "/opt/open5gs/bin/upf",
"event_count": 8420,
"is_learning": false,
"data_days": 14,
"last_scored_at": "2026-04-26T14:22:00Z"
},
{
"sensor_id": "{sensor_id}",
"node_name": "upf-prod-01",
"binary_path": "/tmp/beacon",
"event_count": 3,
"is_learning": true,
"data_days": 0,
"last_scored_at": "2026-04-26T14:22:00Z"
}
],
"total": 142,
"page": 1,
"page_size": 100,
"page_count": 2
}python
page = 1
while True:
r = requests.get(
f"{BASE}/api/v2/anomalies/baselines",
headers=sign("GET", "/api/v2/anomalies/baselines"),
params={"sensor_id": "{sensor_id}", "page": page, "page_size": 100},
).json()
for b in r["data"]:
print(b["binary_path"], b["event_count"], "learning=" + str(b["is_learning"]))
if page >= r["page_count"]:
break
page += 1typescript
let page = 1;
let pageCount = 1;
do {
const path = `/api/v2/anomalies/baselines?sensor_id={sensor_id}&page=${page}&page_size=100`;
const r = await fetch(`${BASE}${path}`, {
headers: sign("GET", path),
}).then(res => res.json());
for (const b of r.data) {
console.log(b.binary_path, b.event_count, `learning=${b.is_learning}`);
}
pageCount = r.page_count;
page++;
} while (page <= pageCount);go
import (
"encoding/json"
"fmt"
"io"
"net/http"
)
for page := 1; ; page++ {
url := fmt.Sprintf("%s/api/v2/anomalies/baselines?sensor_id={sensor_id}&page=%d&page_size=100", BASE, page)
pathOnly := fmt.Sprintf("/api/v2/anomalies/baselines?sensor_id={sensor_id}&page=%d&page_size=100", page)
req, _ := http.NewRequest("GET", url, nil)
req.Header = sign("GET", pathOnly, nil)
resp, _ := http.DefaultClient.Do(req)
body, _ := io.ReadAll(resp.Body)
resp.Body.Close()
var r map[string]any
json.Unmarshal(body, &r)
pageCount := int(r["page_count"].(float64))
if page >= pageCount {
break
}
}bash
eval curl -sf \
$(sign GET "/api/v2/anomalies/baselines?sensor_id={sensor_id}&page=1") \
"${BASE}/api/v2/anomalies/baselines?sensor_id={sensor_id}&page=1"Errors
| Error code | HTTP | Meaning |
|---|---|---|
insufficient_scope | 403 | Key does not have the anomalies:read scope. |
Get behavioral fingerprint
GET /api/v2/anomalies/baselines/fingerprint · Scope: anomalies:read
Returns the full behavioral fingerprint for a specific binary on a specific sensor. A fingerprint captures what the baseline considers "normal" across spawn, network, file, argument, and time dimensions. Both sensor_id and binary_path are required.
Query parameters
| Parameter | Type | Required | Default | Description |
|---|---|---|---|---|
sensor_id | string | Yes | - | The sensor ID to query. |
binary_path | string | Yes | - | Full path of the binary (e.g. /usr/bin/ssh). Must be URL-encoded. |
Response schema
| Field | Type | Description |
|---|---|---|
sensor_id | string | The queried sensor. |
node_name | string | Hostname of the node. |
binary_path | string | The queried binary path. |
spawn_profile | string[] | List of child process paths historically spawned by this binary. |
net_profile | object | Network baseline. Contains outbound_ips: string[], outbound_ports: integer[], protocols: string[]. |
file_profile | string[] | Path prefixes historically accessed by this binary. |
args_profile | string[] | Representative argument patterns observed in training data. |
time_profile | integer[24] | Array of 24 integers, one per hour-of-day (UTC), representing relative execution frequency. |
event_count | integer | Number of training events in this fingerprint. |
is_learning | boolean | true if the baseline has not yet met the minimum event threshold. |
data_days | integer | Number of calendar days of data in the fingerprint. |
Response example
json
{
"sensor_id": "{sensor_id}",
"node_name": "upf-prod-01",
"binary_path": "/opt/open5gs/bin/upf",
"spawn_profile": [
"/bin/sh",
"/usr/bin/logger"
],
"net_profile": {
"outbound_ips": ["10.0.0.2", "10.0.0.3"],
"outbound_ports": [8805, 2123, 2152],
"protocols": ["udp", "tcp"]
},
"file_profile": [
"/etc/open5gs/",
"/var/log/open5gs/",
"/run/open5gs/"
],
"args_profile": [
"-c /etc/open5gs/upf.yaml",
"--pid-file /run/open5gs/upfd.pid"
],
"time_profile": [
12, 11, 13, 10, 9, 8, 7, 14, 22, 35, 41, 45,
48, 50, 51, 49, 46, 38, 30, 25, 22, 18, 15, 13
],
"event_count": 8420,
"is_learning": false,
"data_days": 14
}python
from urllib.parse import urlencode
params = urlencode({"sensor_id": "{sensor_id}", "binary_path": "/opt/open5gs/bin/upf"})
path = f"/api/v2/anomalies/baselines/fingerprint?{params}"
fp = requests.get(
f"{BASE}{path}",
headers=sign("GET", path),
).json()
print("spawn profile:", fp["spawn_profile"])
print("network ports:", fp["net_profile"]["outbound_ports"])typescript
const qs = new URLSearchParams({
sensor_id: "{sensor_id}",
binary_path: "/opt/open5gs/bin/upf",
});
const path = `/api/v2/anomalies/baselines/fingerprint?${qs}`;
const fp = await fetch(`${BASE}${path}`, {
headers: sign("GET", path),
}).then(r => r.json());
console.log("spawn profile:", fp.spawn_profile);
console.log("network ports:", fp.net_profile.outbound_ports);go
import (
"encoding/json"
"io"
"net/http"
"net/url"
)
params := url.Values{}
params.Set("sensor_id", "{sensor_id}")
params.Set("binary_path", "/opt/open5gs/bin/upf")
pathOnly := "/api/v2/anomalies/baselines/fingerprint?" + params.Encode()
req, _ := http.NewRequest("GET", BASE+pathOnly, nil)
req.Header = sign("GET", pathOnly, nil)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
body, _ := io.ReadAll(resp.Body)
var fp map[string]any
json.Unmarshal(body, &fp)bash
SENSOR="{sensor_id}"
BINARY=$(python3 -c "import urllib.parse; print(urllib.parse.quote('/opt/open5gs/bin/upf'))")
eval curl -sf \
$(sign GET "/api/v2/anomalies/baselines/fingerprint?sensor_id=${SENSOR}&binary_path=${BINARY}") \
"${BASE}/api/v2/anomalies/baselines/fingerprint?sensor_id=${SENSOR}&binary_path=${BINARY}"Errors
| Error code | HTTP | Meaning |
|---|---|---|
insufficient_scope | 403 | Key does not have the anomalies:read scope. |
sensor_id_required | 400 | The sensor_id query parameter is missing. |
binary_path_required | 400 | The binary_path query parameter is missing. |
baseline_not_found | 404 | No baseline exists for this binary on this sensor. |
Rebuild baselines
POST /api/v2/anomalies/baselines/rebuild · Scope: anomalies:write
Triggers a full behavioral baseline rebuild from historical event data for all sensors. The rebuild is synchronous and may take several seconds on large deployments. All existing baselines are replaced with the freshly computed result. Scores are backfilled against the new baselines.
This endpoint is useful after a significant deployment change (e.g. new software version) that would otherwise cause elevated false-positive anomaly scores during the learning window.
Request body
No body required.
Response schema
| Field | Type | Description |
|---|---|---|
rebuilt | boolean | Always true on success. |
baselines_built | integer | Number of binary-sensor baseline profiles rebuilt. |
scores_backfilled | integer | Number of historical scores recomputed against the new baselines. |
detail | string | Human-readable summary message. |
Response example
json
{
"rebuilt": true,
"baselines_built": 142,
"scores_backfilled": 38,
"detail": "Baseline rebuild completed. 142 profiles updated, 38 scores backfilled."
}python
result = requests.post(
f"{BASE}/api/v2/anomalies/baselines/rebuild",
headers=sign("POST", "/api/v2/anomalies/baselines/rebuild"),
).json()
print(f"Rebuilt {result['baselines_built']} baselines, backfilled {result['scores_backfilled']} scores")typescript
const result = await fetch(`${BASE}/api/v2/anomalies/baselines/rebuild`, {
method: "POST",
headers: sign("POST", "/api/v2/anomalies/baselines/rebuild"),
}).then(r => r.json());
console.log(`Rebuilt ${result.baselines_built} baselines, backfilled ${result.scores_backfilled} scores`);go
import (
"encoding/json"
"io"
"net/http"
)
req, _ := http.NewRequest("POST",
BASE+"/api/v2/anomalies/baselines/rebuild",
nil,
)
req.Header = sign("POST", "/api/v2/anomalies/baselines/rebuild", nil)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
body, _ := io.ReadAll(resp.Body)
var result map[string]any
json.Unmarshal(body, &result)
fmt.Printf("Rebuilt %v baselines\n", result["baselines_built"])bash
eval curl -sf -X POST \
$(sign POST /api/v2/anomalies/baselines/rebuild) \
"${BASE}/api/v2/anomalies/baselines/rebuild"Errors
| Error code | HTTP | Meaning |
|---|---|---|
insufficient_scope | 403 | Key does not have the anomalies:write scope. |
Compliance
The compliance engine evaluates runtime sensor telemetry against the selected regulatory frameworks on a rolling basis. Scores are recomputed approximately every hour. Evidence items are the raw runtime observations used to justify each control status.
Supported frameworks: cis_benchmark, nis2_directive, etsi_security, 3gpp_ts33117, o_ran_wg11, nis2_telecom, cis_telecom.
Get compliance posture
GET /api/v2/compliance/posture · Scope: compliance:read
Returns a high-level aggregate score for each requested compliance framework. Use this endpoint to build summary dashboards or determine overall compliance health at a glance.
Query parameters
| Parameter | Type | Required | Default | Description |
|---|---|---|---|---|
period | string | No | last_30d | Evaluation window. One of last_7d, last_30d, last_90d. |
frameworks | string | No | all | Comma-separated list of framework IDs to include. Omit to return all configured frameworks. |
Response schema
| Field | Type | Description |
|---|---|---|
period | string | The evaluation period used for this response. |
frameworks | FrameworkScore[] | One entry per framework. |
total | integer | Number of frameworks returned. |
FrameworkScore object
| Field | Type | Description |
|---|---|---|
framework | string | Framework identifier (e.g. o_ran_wg11). |
overall_score | integer | Aggregate score 0–100 across all controls in this framework. |
pass_count | integer | Number of controls with status pass. |
partial_count | integer | Number of controls with status partial. |
fail_count | integer | Number of controls with status fail. |
info_count | integer | Number of controls with status info (informational, not graded). |
last_computed_at | string (ISO 8601) | When this framework score was last recomputed. |
Response example
json
{
"period": "last_30d",
"frameworks": [
{
"framework": "o_ran_wg11",
"overall_score": 91,
"pass_count": 6,
"partial_count": 1,
"fail_count": 0,
"info_count": 1,
"last_computed_at": "2026-04-26T14:00:00Z"
},
{
"framework": "3gpp_ts33117",
"overall_score": 78,
"pass_count": 9,
"partial_count": 3,
"fail_count": 2,
"info_count": 0,
"last_computed_at": "2026-04-26T14:00:00Z"
},
{
"framework": "nis2_directive",
"overall_score": 85,
"pass_count": 11,
"partial_count": 2,
"fail_count": 1,
"info_count": 0,
"last_computed_at": "2026-04-26T14:00:00Z"
}
],
"total": 3
}python
posture = requests.get(
f"{BASE}/api/v2/compliance/posture",
headers=sign("GET", "/api/v2/compliance/posture"),
params={
"period": "last_30d",
"frameworks": "o_ran_wg11,3gpp_ts33117,nis2_directive",
},
).json()
for fw in posture["frameworks"]:
print(f"{fw['framework']}: {fw['overall_score']}/100 "
f"(pass={fw['pass_count']}, fail={fw['fail_count']})")typescript
const path = "/api/v2/compliance/posture?period=last_30d&frameworks=o_ran_wg11,3gpp_ts33117";
const posture = await fetch(`${BASE}${path}`, {
headers: sign("GET", path),
}).then(r => r.json());
for (const fw of posture.frameworks) {
console.log(`${fw.framework}: ${fw.overall_score}/100 (pass=${fw.pass_count}, fail=${fw.fail_count})`);
}go
import (
"encoding/json"
"io"
"net/http"
)
path := "/api/v2/compliance/posture?period=last_30d&frameworks=o_ran_wg11,3gpp_ts33117"
req, _ := http.NewRequest("GET", BASE+path, nil)
req.Header = sign("GET", path, nil)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
body, _ := io.ReadAll(resp.Body)
var posture map[string]any
json.Unmarshal(body, &posture)bash
eval curl -sf \
$(sign GET "/api/v2/compliance/posture?period=last_30d&frameworks=o_ran_wg11,3gpp_ts33117") \
"${BASE}/api/v2/compliance/posture?period=last_30d&frameworks=o_ran_wg11,3gpp_ts33117"Errors
| Error code | HTTP | Meaning |
|---|---|---|
insufficient_scope | 403 | Key does not have the compliance:read scope. |
invalid_period | 400 | period is not one of last_7d, last_30d, last_90d. |
List compliance controls
GET /api/v2/compliance/controls · Scope: compliance:read
Returns the status and evidence summary for every control within a single framework. Use the control_id from this response to drill into evidence with the evidence endpoint.
Query parameters
| Parameter | Type | Required | Default | Description |
|---|---|---|---|---|
framework | string | Yes | - | Framework ID (e.g. o_ran_wg11). See posture endpoint for full list. |
period | string | No | last_30d | last_7d, last_30d, or last_90d. |
Response schema
| Field | Type | Description |
|---|---|---|
framework | string | The queried framework ID. |
period | string | The evaluation period. |
overall_score | integer | Aggregate score 0–100 for this framework in this period. |
pass_count | integer | Controls with pass status. |
partial_count | integer | Controls with partial status. |
fail_count | integer | Controls with fail status. |
info_count | integer | Controls with info status. |
controls | Control[] | Per-control detail. |
last_computed_at | string (ISO 8601) | When the scores were last computed. |
Control object
| Field | Type | Description |
|---|---|---|
control_id | string | Stable control identifier (e.g. ORAN_E2, CIS_5_1). |
control_name | string | Human-readable control name. |
status | string | pass, partial, fail, or info. |
metric | string | Short human-readable metric summary (e.g. "12/12 sensors compliant"). |
evidence_summary | string | One-sentence description of the evidence used to determine this status. |
has_evidence | boolean | true if detail evidence items are available via the evidence endpoint. |
Response example
json
{
"framework": "o_ran_wg11",
"period": "last_30d",
"overall_score": 91,
"pass_count": 6,
"partial_count": 1,
"fail_count": 0,
"info_count": 1,
"controls": [
{
"control_id": "ORAN_E2",
"control_name": "E2 interface peer integrity",
"status": "pass",
"metric": "8/8 sensors - no unauthorized E2 peers observed",
"evidence_summary": "All E2 terminations are on whitelisted RIC addresses. No anomalous peer connections in period.",
"has_evidence": true
},
{
"control_id": "ORAN_O1",
"control_name": "O1 management interface access control",
"status": "partial",
"metric": "6/8 sensors compliant",
"evidence_summary": "2 sensors observed O1 connections from non-whitelisted management IPs.",
"has_evidence": true
},
{
"control_id": "ORAN_XAPP",
"control_name": "xApp runtime behavior",
"status": "info",
"metric": "Informational - no xApp sensors enrolled",
"evidence_summary": "No xApp-role sensors are enrolled; this control is not evaluated.",
"has_evidence": false
}
],
"last_computed_at": "2026-04-26T14:00:00Z"
}python
controls = requests.get(
f"{BASE}/api/v2/compliance/controls",
headers=sign("GET", "/api/v2/compliance/controls"),
params={"framework": "o_ran_wg11", "period": "last_30d"},
).json()
for ctrl in controls["controls"]:
print(f"{ctrl['control_id']} [{ctrl['status'].upper()}] {ctrl['metric']}")typescript
const path = "/api/v2/compliance/controls?framework=o_ran_wg11&period=last_30d";
const controls = await fetch(`${BASE}${path}`, {
headers: sign("GET", path),
}).then(r => r.json());
for (const ctrl of controls.controls) {
console.log(`${ctrl.control_id} [${ctrl.status.toUpperCase()}] ${ctrl.metric}`);
}go
import (
"encoding/json"
"io"
"net/http"
)
path := "/api/v2/compliance/controls?framework=o_ran_wg11&period=last_30d"
req, _ := http.NewRequest("GET", BASE+path, nil)
req.Header = sign("GET", path, nil)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
body, _ := io.ReadAll(resp.Body)
var result map[string]any
json.Unmarshal(body, &result)bash
eval curl -sf \
$(sign GET "/api/v2/compliance/controls?framework=o_ran_wg11") \
"${BASE}/api/v2/compliance/controls?framework=o_ran_wg11"Errors
| Error code | HTTP | Meaning |
|---|---|---|
insufficient_scope | 403 | Key does not have the compliance:read scope. |
framework_required | 400 | The framework query parameter is missing. |
invalid_framework | 400 | The provided framework value is not a recognized framework ID. |
Get control evidence
GET /api/v2/compliance/controls/{control_id}/evidence · Scope: compliance:read
Returns the individual runtime evidence items that the compliance engine used to determine the status of a specific control. Evidence items are the raw observations (events, absence-of-events, configuration snapshots) collected from sensors during the evaluation period.
Path parameters
| Parameter | Type | Required | Description |
|---|---|---|---|
control_id | string | Yes | The control identifier (e.g. ORAN_E2). |
Query parameters
| Parameter | Type | Required | Default | Description |
|---|---|---|---|---|
framework | string | Yes | - | The framework that owns this control. Required to disambiguate controls that appear in multiple frameworks. |
period | string | No | last_30d | last_7d, last_30d, or last_90d. |
limit | integer | No | 50 | Maximum number of evidence items. Max 200. |
Response schema
| Field | Type | Description |
|---|---|---|
control_id | string | The queried control. |
control_name | string | Human-readable control name. |
framework | string | The framework this evidence belongs to. |
period | string | The evaluation period. |
total | integer | Total evidence items before the limit is applied. |
items | EvidenceItem[] | Evidence items sorted newest first. |
EvidenceItem object
| Field | Type | Description |
|---|---|---|
timestamp | string (ISO 8601) | null | When the underlying event or observation occurred, if applicable. |
sensor_id | string | null | The sensor that produced this evidence item, if applicable. |
node_name | string | null | Hostname of the sensor node, if applicable. |
description | string | Short description of the evidence item (e.g. "Unauthorized E2 peer connection"). |
detail | string | Full detail string with addresses, processes, or other context. |
Response example
json
{
"control_id": "ORAN_E2",
"control_name": "E2 interface peer integrity",
"framework": "o_ran_wg11",
"period": "last_30d",
"total": 2,
"items": [
{
"timestamp": "2026-04-26T14:20:00Z",
"sensor_id": "{sensor_id}",
"node_name": "upf-prod-01",
"description": "E2 connection to known-good RIC peer",
"detail": "TCP connect from /opt/o-ran/bin/ric-agent to 10.0.0.5:36421 - peer in allowlist"
},
{
"timestamp": "2026-04-20T08:15:00Z",
"sensor_id": "{sensor_id}",
"node_name": "upf-prod-01",
"description": "E2 connection to known-good RIC peer",
"detail": "TCP connect from /opt/o-ran/bin/ric-agent to 10.0.0.5:36421 - peer in allowlist"
}
]
}python
evidence = requests.get(
f"{BASE}/api/v2/compliance/controls/ORAN_E2/evidence",
headers=sign("GET", "/api/v2/compliance/controls/ORAN_E2/evidence"),
params={"framework": "o_ran_wg11", "period": "last_30d", "limit": 100},
).json()
for item in evidence["items"]:
print(f"[{item['timestamp']}] {item['node_name']}: {item['description']}")typescript
const path = "/api/v2/compliance/controls/ORAN_E2/evidence?framework=o_ran_wg11&period=last_30d";
const evidence = await fetch(`${BASE}${path}`, {
headers: sign("GET", path),
}).then(r => r.json());
for (const item of evidence.items) {
console.log(`[${item.timestamp}] ${item.node_name}: ${item.description}`);
}go
import (
"encoding/json"
"io"
"net/http"
)
path := "/api/v2/compliance/controls/ORAN_E2/evidence?framework=o_ran_wg11&period=last_30d"
req, _ := http.NewRequest("GET", BASE+path, nil)
req.Header = sign("GET", path, nil)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
body, _ := io.ReadAll(resp.Body)
var evidence map[string]any
json.Unmarshal(body, &evidence)bash
eval curl -sf \
$(sign GET "/api/v2/compliance/controls/ORAN_E2/evidence?framework=o_ran_wg11") \
"${BASE}/api/v2/compliance/controls/ORAN_E2/evidence?framework=o_ran_wg11"Errors
| Error code | HTTP | Meaning |
|---|---|---|
insufficient_scope | 403 | Key does not have the compliance:read scope. |
framework_required | 400 | The framework query parameter is missing. |
control_not_found | 404 | No control exists with the given control_id in the specified framework. |
Export compliance data
GET /api/v2/compliance/export · Scope: compliance:read
Exports compliance evidence records for a time window as JSON or CSV. Suitable for submitting to auditors, feeding a SIEM, or importing into a GRC tool. The CSV format includes one row per matched compliance event with a fixed set of columns.
Query parameters
| Parameter | Type | Required | Default | Description |
|---|---|---|---|---|
format | string | No | json | json or csv. |
framework | string | No | - | Substring filter applied to framework IDs (e.g. nis2 matches both nis2_directive and nis2_telecom). |
from | string (ISO 8601) | No | 30 days ago | Start of export window. |
until | string (ISO 8601) | No | now | End of export window. |
Response schema (JSON format)
| Field | Type | Description |
|---|---|---|
from | string (ISO 8601) | Effective start of the export window. |
until | string (ISO 8601) | Effective end of the export window. |
count | integer | Number of records exported. |
data | object[] | Compliance event records. Each object contains matched_at, rule_name, compliance_tags, sensor, event_kind, process, status, and resolution_note. |
CSV column reference
| Column | Description |
|---|---|
matched_at | ISO 8601 timestamp when the compliance event was matched. |
rule_name | Name of the compliance rule that matched. |
compliance_tags | Semicolon-separated framework and control IDs (e.g. o_ran_wg11:ORAN_E2). |
sensor | Hostname of the sensor that produced the event. |
event_kind | Runtime event kind. |
process | Binary path of the process involved. |
status | pass, partial, fail, or info. |
resolution_note | Free-text note if the event has been resolved. |
Response example (JSON)
json
{
"from": "2026-03-27T00:00:00Z",
"until": "2026-04-26T00:00:00Z",
"count": 3,
"data": [
{
"matched_at": "2026-04-26T14:20:00Z",
"rule_name": "E2 interface peer integrity",
"compliance_tags": "o_ran_wg11:ORAN_E2",
"sensor": "upf-prod-01",
"event_kind": "net_connect",
"process": "/opt/o-ran/bin/ric-agent",
"status": "pass",
"resolution_note": ""
}
]
}Response example (CSV)
matched_at,rule_name,compliance_tags,sensor,event_kind,process,status,resolution_note
2026-04-26T14:20:00Z,E2 interface peer integrity,o_ran_wg11:ORAN_E2,upf-prod-01,net_connect,/opt/o-ran/bin/ric-agent,pass,python
import json
# JSON export
path = "/api/v2/compliance/export?format=json&framework=o_ran_wg11"
export = requests.get(f"{BASE}{path}", headers=sign("GET", path)).json()
print(f"Exported {export['count']} records")
# CSV export - save to file
path_csv = "/api/v2/compliance/export?format=csv&framework=o_ran_wg11"
csv_bytes = requests.get(f"{BASE}{path_csv}", headers=sign("GET", path_csv)).content
with open("compliance_export.csv", "wb") as f:
f.write(csv_bytes)typescript
// JSON export
const path = "/api/v2/compliance/export?format=json&framework=o_ran_wg11";
const export_ = await fetch(`${BASE}${path}`, {
headers: sign("GET", path),
}).then(r => r.json());
console.log(`Exported ${export_.count} records`);
// CSV export
const csvPath = "/api/v2/compliance/export?format=csv&framework=o_ran_wg11";
const csv = await fetch(`${BASE}${csvPath}`, {
headers: sign("GET", csvPath),
}).then(r => r.text());
// write csv to disk or stream to callergo
import (
"io"
"net/http"
"os"
)
// CSV export
path := "/api/v2/compliance/export?format=csv&framework=o_ran_wg11"
req, _ := http.NewRequest("GET", BASE+path, nil)
req.Header = sign("GET", path, nil)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
f, _ := os.Create("compliance_export.csv")
defer f.Close()
io.Copy(f, resp.Body)bash
# JSON to stdout
eval curl -sf \
$(sign GET "/api/v2/compliance/export?format=json&framework=o_ran_wg11") \
"${BASE}/api/v2/compliance/export?format=json&framework=o_ran_wg11"
# CSV to file
eval curl -sf \
$(sign GET "/api/v2/compliance/export?format=csv&framework=o_ran_wg11") \
"${BASE}/api/v2/compliance/export?format=csv&framework=o_ran_wg11" \
-o compliance_export.csvErrors
| Error code | HTTP | Meaning |
|---|---|---|
insufficient_scope | 403 | Key does not have the compliance:read scope. |
invalid_format | 400 | format is not json or csv. |
Alerts
Alerts are generated when a runtime detection rule matches an event on a sensor. Each alert has a lifecycle (new - acknowledged - in_progress - resolved) and supports assignment and threaded notes for collaborative triage.
List alerts
GET /api/v2/alerts · Scope: alerts:read
Returns a paginated list of alerts. By default returns all statuses sorted by triggered_at descending.
Query parameters
| Parameter | Type | Required | Default | Description |
|---|---|---|---|---|
status | string | No | - | Filter by status. One of new, acknowledged, in_progress, resolved, resolved_true_positive, resolved_false_positive, false_positive, suppressed. |
severity | string | No | - | Filter by severity: critical, high, medium, low. |
sensor_id | string | No | - | Filter to a single sensor. |
page | integer | No | 1 | Page number (1-based). |
page_size | integer | No | 50 | Page size. Max 200. |
Response schema
| Field | Type | Description |
|---|---|---|
data | Alert[] | Current page of alerts. |
total | integer | Total matching alerts before pagination. |
page | integer | Current page number. |
page_size | integer | Effective page size. |
next_cursor | integer | null | Next page number, or null when there are no more results. |
Pass the returned next_cursor value as the page query parameter on the next request to fetch the next page.
Alert object
| Field | Type | Description |
|---|---|---|
alert_id | string | Stable alert identifier. |
rule_name | string | Name of the detection rule that fired. |
event_kind | string | Runtime event kind that triggered the alert. |
severity | string | critical, high, medium, or low. |
status | string | Current lifecycle status. |
sensor_id | string | Sensor that produced the triggering event. |
node_name | string | Hostname of the node. |
triggered_at | string (ISO 8601) | When the alert was first created. |
process_executable | string | null | Path of the process involved, if available. |
message | string | null | Human-readable alert description. |
mitre_technique | string | null | Primary MITRE ATT&CK technique ID (e.g. T1059). |
mitre_tactic | string | null | Primary MITRE ATT&CK tactic (e.g. Execution). |
assignee | string | null | Email of the user the alert is assigned to, if any. |
Response example
json
{
"data": [
{
"alert_id": "{alert_id}",
"rule_name": "Block execution from /tmp",
"event_kind": "process_exec",
"severity": "critical",
"status": "new",
"sensor_id": "{sensor_id}",
"node_name": "upf-prod-01",
"triggered_at": "2026-04-26T14:22:00Z",
"process_executable": "/tmp/beacon",
"message": "process_exec blocked: /tmp/beacon attempted execution on upf-prod-01",
"mitre_technique": "T1059.004",
"mitre_tactic": "Execution",
"assignee": null
}
],
"total": 1,
"page": 1,
"page_size": 50,
"next_cursor": null
}python
alerts = requests.get(
f"{BASE}/api/v2/alerts",
headers=sign("GET", "/api/v2/alerts"),
params={"severity": "critical", "status": "new", "page_size": 50},
).json()
for a in alerts["data"]:
print(f"[{a['severity'].upper()}] {a['alert_id']} - {a['message']}")typescript
const path = "/api/v2/alerts?severity=critical&status=new&page_size=50";
const alerts = await fetch(`${BASE}${path}`, {
headers: sign("GET", path),
}).then(r => r.json());
for (const a of alerts.data) {
console.log(`[${a.severity.toUpperCase()}] ${a.alert_id} - ${a.message}`);
}go
import (
"encoding/json"
"io"
"net/http"
)
path := "/api/v2/alerts?severity=critical&status=new&page_size=50"
req, _ := http.NewRequest("GET", BASE+path, nil)
req.Header = sign("GET", path, nil)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
body, _ := io.ReadAll(resp.Body)
var result map[string]any
json.Unmarshal(body, &result)bash
eval curl -sf \
$(sign GET "/api/v2/alerts?severity=critical&status=new") \
"${BASE}/api/v2/alerts?severity=critical&status=new"Errors
| Error code | HTTP | Meaning |
|---|---|---|
insufficient_scope | 403 | Key does not have the alerts:read scope. |
Get alert
GET /api/v2/alerts/{alert_id} · Scope: alerts:read
Returns full detail for a single alert.
Path parameters
| Parameter | Type | Required | Description |
|---|---|---|---|
alert_id | string | Yes | The alert identifier (e.g. {alert_id}). |
Response schema
Returns a single Alert object (same fields as list). This response does not include alert notes.
Response example
json
{
"alert_id": "{alert_id}",
"rule_name": "Block execution from /tmp",
"event_kind": "process_exec",
"severity": "critical",
"status": "acknowledged",
"sensor_id": "{sensor_id}",
"node_name": "upf-prod-01",
"triggered_at": "2026-04-26T14:22:00Z",
"process_executable": "/tmp/beacon",
"message": "process_exec blocked: /tmp/beacon attempted execution on upf-prod-01",
"mitre_technique": "T1059.004",
"mitre_tactic": "Execution",
"assignee": "ops@example.com"
}python
alert = requests.get(
f"{BASE}/api/v2/alerts/{alert_id}",
headers=sign("GET", "/api/v2/alerts/{alert_id}"),
).json()
print(f"Status: {alert['status']}")typescript
const alert = await fetch(`${BASE}/api/v2/alerts/{alert_id}`, {
headers: sign("GET", "/api/v2/alerts/{alert_id}"),
}).then(r => r.json());
console.log(`Status: ${alert.status}`);go
import (
"encoding/json"
"io"
"net/http"
)
req, _ := http.NewRequest("GET",
BASE+"/api/v2/alerts/{alert_id}",
nil,
)
req.Header = sign("GET", "/api/v2/alerts/{alert_id}", nil)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
body, _ := io.ReadAll(resp.Body)
var alert map[string]any
json.Unmarshal(body, &alert)bash
eval curl -sf \
$(sign GET /api/v2/alerts/{alert_id}) \
"${BASE}/api/v2/alerts/{alert_id}"Errors
| Error code | HTTP | Meaning |
|---|---|---|
insufficient_scope | 403 | Key does not have the alerts:read scope. |
alert_not_found | 404 | No alert exists with the given alert_id. |
Update alert
PATCH /api/v2/alerts/{alert_id} · Scope: alerts:write
Updates the status, assignee, or resolution note on an alert. The status field must be included; assignee and resolution are optional.
Path parameters
| Parameter | Type | Required | Description |
|---|---|---|---|
alert_id | string | Yes | The alert to update. |
Request body
| Parameter | Type | Required | Default | Description |
|---|---|---|---|---|
status | string | Yes | - | New lifecycle status. One of new, acknowledged, in_progress, resolved, resolved_true_positive, resolved_false_positive, false_positive, suppressed. |
assignee | string | No | - | Email of the assignee. Pass null to unassign. |
resolution | string | No | - | Free-text resolution note. Used when closing the alert. |
Response schema
| Field | Type | Description |
|---|---|---|
alert_id | string | The updated alert ID. |
status | string | The new alert status. |
updated | boolean | Always true. |
Response example
json
{
"alert_id": "{alert_id}",
"status": "acknowledged",
"updated": true
}python
import json
body = json.dumps({
"status": "acknowledged",
"assignee": "ops@example.com",
}).encode()
result = requests.patch(
f"{BASE}/api/v2/alerts/{alert_id}",
headers=sign("PATCH", "/api/v2/alerts/{alert_id}", body),
data=body,
).json()
print(result["status"]) # acknowledgedtypescript
const body = JSON.stringify({
status: "acknowledged",
assignee: "ops@example.com",
});
const result = await fetch(`${BASE}/api/v2/alerts/{alert_id}`, {
method: "PATCH",
headers: sign("PATCH", "/api/v2/alerts/{alert_id}", Buffer.from(body)),
body,
}).then(r => r.json());
console.log(result.status); // "acknowledged"go
import (
"bytes"
"encoding/json"
"io"
"net/http"
)
payload := []byte(`{"status":"acknowledged","assignee":"ops@example.com"}`)
req, _ := http.NewRequest("PATCH",
BASE+"/api/v2/alerts/{alert_id}",
bytes.NewReader(payload),
)
req.Header = sign("PATCH", "/api/v2/alerts/{alert_id}", payload)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
body, _ := io.ReadAll(resp.Body)
var result map[string]any
json.Unmarshal(body, &result)bash
BODY='{"status":"acknowledged","assignee":"ops@example.com"}'
eval curl -sf -X PATCH \
$(sign PATCH /api/v2/alerts/{alert_id} "$BODY") \
-d "$BODY" \
"${BASE}/api/v2/alerts/{alert_id}"Errors
| Error code | HTTP | Meaning |
|---|---|---|
insufficient_scope | 403 | Key does not have the alerts:write scope. |
alert_not_found | 404 | No alert exists with the given alert_id. |
invalid_alert_status | 422 | The provided status value is not a valid lifecycle state. |
Add alert note
POST /api/v2/alerts/{alert_id}/notes · Scope: alerts:write
Appends a note to the alert's thread. Notes are immutable once created.
Path parameters
| Parameter | Type | Required | Description |
|---|---|---|---|
alert_id | string | Yes | The alert to annotate. |
Request body
| Parameter | Type | Required | Default | Description |
|---|---|---|---|---|
content | string | Yes | - | Note text. Maximum 2000 characters. |
Response schema
| Field | Type | Description |
|---|---|---|
alert_id | string | The alert this note was added to. |
note_id | string | The stable identifier of the newly created note. |
created | boolean | Always true. |
Response example
json
{
"alert_id": "{alert_id}",
"note_id": "{note_id}",
"created": true
}python
import json
body = json.dumps({
"content": "Confirmed false positive - this is an internal vulnerability scanner running from /tmp.",
}).encode()
result = requests.post(
f"{BASE}/api/v2/alerts/{alert_id}/notes",
headers=sign("POST", "/api/v2/alerts/{alert_id}/notes", body),
data=body,
).json()
print(result["note_id"])typescript
const body = JSON.stringify({
content: "Confirmed false positive - internal vulnerability scanner running from /tmp.",
});
const result = await fetch(`${BASE}/api/v2/alerts/{alert_id}/notes`, {
method: "POST",
headers: sign("POST", "/api/v2/alerts/{alert_id}/notes", Buffer.from(body)),
body,
}).then(r => r.json());
console.log(result.note_id);go
import (
"bytes"
"encoding/json"
"io"
"net/http"
)
payload := []byte(`{"content":"Confirmed false positive - internal vulnerability scanner."}`)
req, _ := http.NewRequest("POST",
BASE+"/api/v2/alerts/{alert_id}/notes",
bytes.NewReader(payload),
)
req.Header = sign("POST", "/api/v2/alerts/{alert_id}/notes", payload)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
body, _ := io.ReadAll(resp.Body)
var result map[string]any
json.Unmarshal(body, &result)bash
BODY='{"content":"Confirmed false positive - internal vulnerability scanner."}'
eval curl -sf -X POST \
$(sign POST /api/v2/alerts/{alert_id}/notes "$BODY") \
-d "$BODY" \
"${BASE}/api/v2/alerts/{alert_id}/notes"Errors
| Error code | HTTP | Meaning |
|---|---|---|
insufficient_scope | 403 | Key does not have the alerts:write scope. |
alert_not_found | 404 | No alert exists with the given alert_id. |
content_required | 400 | The content field is empty. |
content_too_long | 400 | The content field exceeds 2000 characters. |
Export alerts
GET /api/v2/alerts/export · Scope: alerts:read
Returns up to 200 matching alerts as a JSON array, ordered by triggered_at descending. This endpoint does not paginate.
Request parameters
| Parameter | Type | Required | Description |
|---|---|---|---|
status | string | No | Filter by status. One of new, acknowledged, in_progress, resolved, resolved_true_positive, resolved_false_positive, false_positive, suppressed. |
severity | string | No | Filter by severity: critical, high, medium, low. |
sensor_id | string | No | Filter to a single sensor. |
Response schema
Returns a JSON array of alert objects.
Each object contains:
| Field | Type | Description |
|---|---|---|
alert_id | string | Stable alert identifier. |
rule_id | string | null | Detection rule identifier. |
rule_name | string | Detection rule name. |
source | string | Alert source. |
sensor_id | string | Sensor that produced the alert. |
node_name | string | Hostname of the node. |
event_kind | string | Runtime event kind that triggered the alert. |
severity | string | Alert severity. |
headline | string | Short alert headline. |
detail | string | null | Additional alert detail. |
process_executable | string | null | Path of the process involved, when available. |
message | string | null | Human-readable alert message. |
mitre_technique | string | null | Primary MITRE ATT&CK technique ID, when available. |
mitre_tactic | string | null | Primary MITRE ATT&CK tactic, when available. |
status | string | Alert lifecycle status. |
assignee | string | null | Current assignee. |
resolution | string | null | Resolution value, when set. |
triggered_at | string | RFC 3339 timestamp when the alert was created. |
acknowledged_at | string | null | RFC 3339 timestamp when the alert was first acknowledged or moved in progress. |
closed_at | string | null | RFC 3339 timestamp when the alert entered a closed status. |
updated_at | string | null | RFC 3339 timestamp of the last update. |
last_updated_by | string | null | Last actor that updated the alert. |
investigation_case_id | string | null | Linked investigation case, when present. |
ai_triage_severity | string | null | AI-generated triage severity, when present. |
ai_triage_rationale | string | null | AI-generated triage rationale, when present. |
ai_false_positive_likely | boolean | null | AI false-positive estimate, when present. |
ai_l2_narrative | string | null | AI-generated L2 narrative, when present. |
ai_recommended_actions | json | null | AI-recommended actions payload, when present. |
ai_confidence | number | null | AI confidence score, when present. |
ai_model_used | string | null | Model name used for AI triage, when present. |
ai_triaged_at | string | null | RFC 3339 timestamp when AI triage ran, when present. |
Errors
| Error code | HTTP | Meaning |
|---|---|---|
invalid_alert_status | 422 | The status filter is not a valid alert status. |
invalid_alert_severity | 422 | The severity filter is not one of critical, high, medium, or low. |
insufficient_scope | 403 | The API key does not have the alerts:read scope. |
Policies
Policy rules define what the sensor should detect or block. Each rule is created from a predefined template or custom YAML and can be scoped to an individual sensor or a sensor group. The enforcement endpoint controls whether a rule is in observe mode or active enforcement mode.
List policy rules
GET /api/v2/policies/rules · Scope: policies:read
Returns all policy rules in scope. Rules may be in observe mode, enforcement-ready, or actively enforcing depending on their enforcement_state.
Response schema
| Field | Type | Description |
|---|---|---|
data | PolicyRule[] | All policy rules. |
total | integer | Total count. |
PolicyRule object
| Field | Type | Description |
|---|---|---|
rule_id | string | Stable rule identifier. |
template_id | string | Template this rule was created from (see below). custom for user-supplied YAML. |
name | string | Human-readable rule name. |
description | string | null | Optional operator description. |
sensor_id | string | null | If set, the rule applies only to this sensor. |
target_group_id | string | null | If set, the rule applies to all sensors in this group. |
enabled | boolean | Whether the rule is active. |
created_at | string (ISO 8601) | When the rule was created. |
created_by | string | Email or API key display name of the creator. |
exceptions | Exception[] | List of exceptions that narrow the rule's scope. Each exception has an executable field (string). |
Response example
json
{
"data": [
{
"rule_id": "{rule_id}",
"template_id": "block-execve-tmp",
"name": "Block execution from /tmp - UPF fleet",
"description": null,
"sensor_id": null,
"target_group_id": "{grp_id}",
"enabled": true,
"created_at": "2026-04-20T09:00:00Z",
"created_by": "ops@example.com",
"exceptions": [
{ "executable": "/tmp/trivy" }
]
}
],
"total": 1
}python
rules = requests.get(
f"{BASE}/api/v2/policies/rules",
headers=sign("GET", "/api/v2/policies/rules"),
).json()
for r in rules["data"]:
print(r["rule_id"], r["template_id"], "enabled=" + str(r["enabled"]))typescript
const rules = await fetch(`${BASE}/api/v2/policies/rules`, {
headers: sign("GET", "/api/v2/policies/rules"),
}).then(r => r.json());
for (const rule of rules.data) {
console.log(rule.rule_id, rule.template_id, `enabled=${rule.enabled}`);
}go
import (
"encoding/json"
"io"
"net/http"
)
req, _ := http.NewRequest("GET",
BASE+"/api/v2/policies/rules",
nil,
)
req.Header = sign("GET", "/api/v2/policies/rules", nil)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
body, _ := io.ReadAll(resp.Body)
var result map[string]any
json.Unmarshal(body, &result)bash
eval curl -sf \
$(sign GET /api/v2/policies/rules) \
"${BASE}/api/v2/policies/rules"Errors
| Error code | HTTP | Meaning |
|---|---|---|
insufficient_scope | 403 | Key does not have the policies:read scope. |
Create policy rule
POST /api/v2/policies/rules · Scope: policies:write
Creates a new policy rule from a template or from custom YAML. Returns HTTP 201 on success.
Request body
| Parameter | Type | Required | Default | Description |
|---|---|---|---|---|
template_id | string | Yes | - | Template to use. See list below. Use custom for user-supplied YAML. |
name | string | Yes | - | Human-readable name (max 255 chars). |
description | string | No | - | Operator description. |
sensor_id | string | No | - | Scope to a single sensor. Mutually exclusive with target_group_id. |
target_group_id | string | No | - | Scope to a sensor group. Mutually exclusive with sensor_id. |
exceptions | object[] | No | [] | Exceptions that narrow the rule. Each object must have an executable field (string). |
custom_enforcement_yaml | string | Conditional | - | Required when template_id is custom. A valid TracingPolicy YAML document. |
Available template IDs
| Template ID | What it blocks |
|---|---|
block-kmod-load | Kernel module load (init_module / finit_module) |
block-user-namespace | Unprivileged user namespace creation |
block-raw-sockets | Raw and packet socket creation |
block-capset-sysadmin | CAP_SYS_ADMIN capability grant via capset |
kill-ptrace-attach | ptrace(PTRACE_ATTACH) from processes outside the parent tree |
block-execve-tmp | Process execution from /tmp, /dev/shm, and other world-writable directories |
block-memfd-create | Anonymous in-memory file creation (memfd_create) |
block-pivot-root | Filesystem root pivot (pivot_root) |
block-mount-syscall | Arbitrary mount calls from non-init mount namespaces |
block-unshare | unshare(CLONE_NEWUSER) calls from unprivileged processes |
block-bpf-load | BPF program load by non-root processes |
block-socket-raw | Raw socket creation (SOCK_RAW, SOCK_PACKET) |
block-chmod-suid | chmod/fchmod calls setting SUID or SGID bits |
custom | User-supplied TracingPolicy YAML (custom_enforcement_yaml required) |
Response schema
| Field | Type | Description |
|---|---|---|
rule_id | string | The newly created rule ID. |
Response example
json
{
"rule_id": "{rule_id}"
}python
import json
body = json.dumps({
"template_id": "block-execve-tmp",
"name": "Block exec from /tmp - UPF fleet",
"target_group_id": "{grp_id}",
"exceptions": [
{"executable": "/tmp/trivy"}
],
}).encode()
r = requests.post(
f"{BASE}/api/v2/policies/rules",
headers=sign("POST", "/api/v2/policies/rules", body),
data=body,
)
assert r.status_code == 201
rule_id = r.json()["rule_id"]typescript
const body = JSON.stringify({
template_id: "block-execve-tmp",
name: "Block exec from /tmp - UPF fleet",
target_group_id: "{grp_id}",
exceptions: [{ executable: "/tmp/trivy" }],
});
const r = await fetch(`${BASE}/api/v2/policies/rules`, {
method: "POST",
headers: sign("POST", "/api/v2/policies/rules", Buffer.from(body)),
body,
});
const { rule_id } = await r.json(); // HTTP 201go
import (
"bytes"
"encoding/json"
"io"
"net/http"
)
payload := []byte(`{
"template_id": "block-execve-tmp",
"name": "Block exec from /tmp - UPF fleet",
"target_group_id": "{grp_id}",
"exceptions": [{"executable": "/tmp/trivy"}]
}`)
req, _ := http.NewRequest("POST",
BASE+"/api/v2/policies/rules",
bytes.NewReader(payload),
)
req.Header = sign("POST", "/api/v2/policies/rules", payload)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
body, _ := io.ReadAll(resp.Body)
var result map[string]any
json.Unmarshal(body, &result)
ruleID := result["rule_id"]bash
BODY='{
"template_id": "block-execve-tmp",
"name": "Block exec from /tmp",
"exceptions": [{"executable": "/tmp/trivy"}]
}'
eval curl -sf -X POST \
$(sign POST /api/v2/policies/rules "$BODY") \
-d "$BODY" \
"${BASE}/api/v2/policies/rules"Errors
| Error code | HTTP | Meaning |
|---|---|---|
insufficient_scope | 403 | Key does not have the policies:write scope. |
template_id_required | 422 | The template_id field is missing. |
name_required | 422 | The name field is missing or empty. |
invalid_template_id | 422 | The template_id value is not a recognized template or custom. |
custom_yaml_required | 422 | template_id is custom but custom_enforcement_yaml was not provided. |
custom_yaml_invalid | 422 | The custom_enforcement_yaml is not valid TracingPolicy YAML. |
sensor_not_found | 404 | The sensor_id does not match any enrolled sensor. |
group_not_found | 404 | The target_group_id does not match any sensor group. |
scope_conflict | 422 | Both sensor_id and target_group_id were provided; supply only one. |
rule_limit_exceeded | 422 | The 10 custom rule limit for this Console has been reached. Delete an existing rule before creating a new one. |
Update policy rule
PUT /api/v2/policies/rules/{rule_id} · Scope: policies:write
Updates the enabled state or exceptions list of an existing rule. The template_id, sensor_id, and target_group_id of an existing rule cannot be changed; create a new rule instead.
Path parameters
| Parameter | Type | Required | Description |
|---|---|---|---|
rule_id | string | Yes | The rule to update. |
Request body
| Parameter | Type | Required | Default | Description |
|---|---|---|---|---|
enabled | boolean | No | - | Enable or disable the rule without deleting it. |
exceptions | object[] | No | - | Replaces the full exceptions list. Each object must have an executable field (string). |
Response schema
| Field | Type | Description |
|---|---|---|
rule_id | string | The updated rule ID. |
updated | boolean | Always true. |
Response example
json
{
"rule_id": "{rule_id}",
"updated": true
}python
import json
body = json.dumps({
"enabled": False,
"exceptions": [
{"executable": "/tmp/trivy"},
{"executable": "/tmp/grype"},
],
}).encode()
result = requests.put(
f"{BASE}/api/v2/policies/rules/{rule_id}",
headers=sign("PUT", "/api/v2/policies/rules/{rule_id}", body),
data=body,
).json()
print(result["updated"]) # Truetypescript
const body = JSON.stringify({
enabled: false,
exceptions: [
{ executable: "/tmp/trivy" },
{ executable: "/tmp/grype" },
],
});
const result = await fetch(`${BASE}/api/v2/policies/rules/{rule_id}`, {
method: "PUT",
headers: sign("PUT", "/api/v2/policies/rules/{rule_id}", Buffer.from(body)),
body,
}).then(r => r.json());
console.log(result.updated); // truego
import (
"bytes"
"encoding/json"
"io"
"net/http"
)
payload := []byte(`{"enabled":false,"exceptions":[{"executable":"/tmp/trivy"}]}`)
req, _ := http.NewRequest("PUT",
BASE+"/api/v2/policies/rules/{rule_id}",
bytes.NewReader(payload),
)
req.Header = sign("PUT", "/api/v2/policies/rules/{rule_id}", payload)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
body, _ := io.ReadAll(resp.Body)
var result map[string]any
json.Unmarshal(body, &result)bash
BODY='{"enabled":false,"exceptions":[{"executable":"/tmp/trivy"}]}'
eval curl -sf -X PUT \
$(sign PUT /api/v2/policies/rules/{rule_id} "$BODY") \
-d "$BODY" \
"${BASE}/api/v2/policies/rules/{rule_id}"Errors
| Error code | HTTP | Meaning |
|---|---|---|
insufficient_scope | 403 | Key does not have the policies:write scope. |
rule_not_found | 404 | No rule exists with the given rule_id. |
invalid_exceptions_format | 422 | One or more exception objects in the exceptions array are missing the required executable field. |
Delete policy rule
DELETE /api/v2/policies/rules/{rule_id} · Scope: policies:write
Permanently deletes a policy rule. The rule is immediately de-pushed from all sensors. Returns HTTP 204 with no body on success.
Path parameters
| Parameter | Type | Required | Description |
|---|---|---|---|
rule_id | string | Yes | The rule to delete. |
python
r = requests.delete(
f"{BASE}/api/v2/policies/rules/{rule_id}",
headers=sign("DELETE", "/api/v2/policies/rules/{rule_id}"),
)
assert r.status_code == 204typescript
const r = await fetch(`${BASE}/api/v2/policies/rules/{rule_id}`, {
method: "DELETE",
headers: sign("DELETE", "/api/v2/policies/rules/{rule_id}"),
});
// r.status === 204go
import "net/http"
req, _ := http.NewRequest("DELETE",
BASE+"/api/v2/policies/rules/{rule_id}",
nil,
)
req.Header = sign("DELETE", "/api/v2/policies/rules/{rule_id}", nil)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
// resp.StatusCode == 204bash
eval curl -sf -X DELETE \
$(sign DELETE /api/v2/policies/rules/{rule_id}) \
"${BASE}/api/v2/policies/rules/{rule_id}"
# HTTP 204 - no bodyErrors
| Error code | HTTP | Meaning |
|---|---|---|
insufficient_scope | 403 | Key does not have the policies:write scope. |
rule_not_found | 404 | No rule exists with the given rule_id. |
List enforcement policies
GET /api/v2/policies/enforcement · Scope: policies:read
Returns the current enforcement state for every policy rule across the fleet. Enforcement state tracks the progression from passive observation to active enforcement.
Enforcement states
| State | Meaning |
|---|---|
observe | The sensor is collecting telemetry against this rule but not blocking. |
enforce_ready | The rule has been validated in observe mode and is staged for active enforcement on next sensor sync. |
enforced | The sensor is actively blocking events that match this rule. |
Response schema
| Field | Type | Description |
|---|---|---|
data | EnforcementRule[] | Enforcement state for each rule. |
total | integer | Total count. |
EnforcementRule object
| Field | Type | Description |
|---|---|---|
rule_id | string | The policy rule identifier. |
template_id | string | Template this rule is based on. |
name | string | Rule name. |
enabled | boolean | Whether the rule is active. |
sensor_id | string | null | Sensor scope if the rule is sensor-scoped. |
target_group_id | string | null | Group scope if the rule is group-scoped. |
enforcement_state | string | observe, enforce_ready, or enforced. |
created_at | string (ISO 8601) | When the rule was created. |
Response example
json
{
"data": [
{
"rule_id": "{rule_id}",
"template_id": "block-execve-tmp",
"name": "Block exec from /tmp - UPF fleet",
"enabled": true,
"sensor_id": null,
"target_group_id": "{grp_id}",
"enforcement_state": "enforced",
"created_at": "2026-04-20T09:00:00Z"
},
{
"rule_id": "{rule_id}",
"template_id": "block-kmod-load",
"name": "Block kernel module load - AMF nodes",
"enabled": true,
"sensor_id": null,
"target_group_id": "7e3f2a1c-4b8d-9e0f-b2c1-d4e7f9010203",
"enforcement_state": "observe",
"created_at": "2026-04-25T11:00:00Z"
}
],
"total": 2
}python
enforcement = requests.get(
f"{BASE}/api/v2/policies/enforcement",
headers=sign("GET", "/api/v2/policies/enforcement"),
).json()
for r in enforcement["data"]:
print(r["rule_id"], r["enforcement_state"])typescript
const enforcement = await fetch(`${BASE}/api/v2/policies/enforcement`, {
headers: sign("GET", "/api/v2/policies/enforcement"),
}).then(r => r.json());
for (const rule of enforcement.data) {
console.log(rule.rule_id, rule.enforcement_state);
}go
import (
"encoding/json"
"io"
"net/http"
)
req, _ := http.NewRequest("GET",
BASE+"/api/v2/policies/enforcement",
nil,
)
req.Header = sign("GET", "/api/v2/policies/enforcement", nil)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
body, _ := io.ReadAll(resp.Body)
var result map[string]any
json.Unmarshal(body, &result)bash
eval curl -sf \
$(sign GET /api/v2/policies/enforcement) \
"${BASE}/api/v2/policies/enforcement"Errors
| Error code | HTTP | Meaning |
|---|---|---|
insufficient_scope | 403 | Key does not have the policies:read scope. |
Toggle enforcement rule
PUT /api/v2/policies/enforcement/{rule_id} · Scope: policies:write
Enables or disables a policy rule in the enforcement plane. Setting enabled: false puts the rule into a paused state without deleting it. The rule can be re-enabled without recreating it.
Note on enforcement state transitions: toggling
enabledhere operates on the same rule object as the/rulesendpoint. A rule disabled via this endpoint will not be re-pushed to sensors on the next sync. Use the/rules/{rule_id}PUTendpoint to manage exceptions.
Path parameters
| Parameter | Type | Required | Description |
|---|---|---|---|
rule_id | string | Yes | The enforcement rule to toggle. |
Request body
| Parameter | Type | Required | Default | Description |
|---|---|---|---|---|
enabled | boolean | Yes | - | true to enable, false to disable. |
Response schema
| Field | Type | Description |
|---|---|---|
rule_id | string | The updated rule ID. |
enabled | boolean | The new enabled state. |
updated | boolean | Always true. |
Response example
json
{
"rule_id": "{rule_id}",
"enabled": false,
"updated": true
}python
import json
body = json.dumps({"enabled": False}).encode()
result = requests.put(
f"{BASE}/api/v2/policies/enforcement/{rule_id}",
headers=sign("PUT", "/api/v2/policies/enforcement/{rule_id}", body),
data=body,
).json()
print(result["enabled"]) # Falsetypescript
const body = JSON.stringify({ enabled: false });
const result = await fetch(`${BASE}/api/v2/policies/enforcement/{rule_id}`, {
method: "PUT",
headers: sign("PUT", "/api/v2/policies/enforcement/{rule_id}", Buffer.from(body)),
body,
}).then(r => r.json());
console.log(result.enabled); // falsego
import (
"bytes"
"encoding/json"
"io"
"net/http"
)
payload := []byte(`{"enabled":false}`)
req, _ := http.NewRequest("PUT",
BASE+"/api/v2/policies/enforcement/{rule_id}",
bytes.NewReader(payload),
)
req.Header = sign("PUT", "/api/v2/policies/enforcement/{rule_id}", payload)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
body, _ := io.ReadAll(resp.Body)
var result map[string]any
json.Unmarshal(body, &result)bash
BODY='{"enabled":false}'
eval curl -sf -X PUT \
$(sign PUT /api/v2/policies/enforcement/{rule_id} "$BODY") \
-d "$BODY" \
"${BASE}/api/v2/policies/enforcement/{rule_id}"Errors
| Error code | HTTP | Meaning |
|---|---|---|
insufficient_scope | 403 | Key does not have the policies:write scope. |
rule_not_found | 404 | No enforcement rule exists with the given rule_id. |
enabled_required | 422 | The enabled field is missing from the request body. |
Investigations
List investigations
GET /api/v2/investigations · Scope: investigations:read
Returns up to 200 investigations for the workspace, ordered by updated_at descending.
Response schema
| Field | Type | Description |
|---|---|---|
data | Investigation[] | Array of investigation objects |
total | integer | Number of investigations returned |
Investigation object
| Field | Type | Description |
|---|---|---|
case_id | string | Unique case identifier |
title | string | Short title |
description | string | null | Optional description |
severity | string | low · medium · high · critical |
status | string | Current case status, e.g. open, in_progress, closed, archived |
tags | string[] | Tags attached to the case |
created_at | string (ISO 8601) | Creation timestamp |
event_count | integer | Number of events linked to this case |
note_count | integer | Number of analyst notes |
updated_at | string (ISO 8601) | Last update timestamp |
closed_at | string (ISO 8601) | null | When the case was closed, if applicable |
Response example
json
{
"data": [
{
"case_id": "{case_id}",
"title": "UPF privilege escalation - oslo-dc1",
"description": "Unexplained setuid from /opt/open5gs/bin/upf",
"severity": "high",
"status": "open",
"tags": [],
"created_at": "2026-04-26T08:11:00Z",
"event_count": 4,
"note_count": 1,
"updated_at": "2026-04-26T08:11:00Z",
"closed_at": null
}
],
"total": 1
}python
resp = requests.get(
f"{BASE}/api/v2/investigations",
headers=sign("GET", "/api/v2/investigations"),
).json()
for inv in resp["data"]:
print(inv["case_id"], inv["severity"], inv["title"])typescript
const resp = await fetch(`${BASE}/api/v2/investigations`, {
headers: sign("GET", "/api/v2/investigations"),
}).then(r => r.json());
resp.data.forEach((inv: any) => console.log(inv.case_id, inv.severity, inv.title));go
req, _ := http.NewRequest("GET", BASE+"/api/v2/investigations", nil)
req.Header = sign("GET", "/api/v2/investigations", nil)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
var body map[string]any
json.NewDecoder(resp.Body).Decode(&body)bash
eval curl -sf $(sign GET /api/v2/investigations) \
"${BASE}/api/v2/investigations"Errors
| Error code | HTTP | Meaning |
|---|---|---|
insufficient_scope | 403 | Key does not have the investigations:read scope. |
Create investigation
POST /api/v2/investigations · Scope: investigations:write
Opens a new investigation case. Returns HTTP 201 with the created object.
Request body
| Field | Type | Required | Description |
|---|---|---|---|
title | string | Yes | Short title for the case |
description | string | No | Optional context |
severity | string | No | low · medium (default) · high · critical. Unrecognized values are stored as medium. |
initial_event | json | No | Optional event snapshot to attach immediately when the case is created |
json
{
"title": "UPF privilege escalation - oslo-dc1",
"description": "Unexplained setuid from /opt/open5gs/bin/upf",
"severity": "high"
}Response example
json
{
"case_id": "{case_id}",
"title": "UPF privilege escalation - oslo-dc1",
"description": "Unexplained setuid from /opt/open5gs/bin/upf",
"severity": "high",
"status": "open",
"tags": [],
"event_snapshots": [],
"notes": [],
"created_at": "2026-04-26T08:11:00Z",
"updated_at": "2026-04-26T08:11:00Z",
"closed_at": null
}python
body = json.dumps({
"title": "UPF privilege escalation - oslo-dc1",
"severity": "high",
}).encode()
inv = requests.post(
f"{BASE}/api/v2/investigations",
headers=sign("POST", "/api/v2/investigations", body),
data=body,
).json()
case_id = inv["case_id"]
print("Opened:", case_id)typescript
const body = JSON.stringify({
title: "UPF privilege escalation - oslo-dc1",
severity: "high",
});
const inv = await fetch(`${BASE}/api/v2/investigations`, {
method: "POST",
headers: sign("POST", "/api/v2/investigations", Buffer.from(body)),
body,
}).then(r => r.json());
const caseId = inv.case_id;go
payload := []byte(`{"title":"UPF privilege escalation - oslo-dc1","severity":"high"}`)
req, _ := http.NewRequest("POST", BASE+"/api/v2/investigations", bytes.NewReader(payload))
req.Header = sign("POST", "/api/v2/investigations", payload)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
var inv map[string]any
json.NewDecoder(resp.Body).Decode(&inv)
fmt.Println("Opened:", inv["case_id"])bash
BODY='{"title":"UPF privilege escalation - oslo-dc1","severity":"high"}'
eval curl -sf -X POST $(sign POST /api/v2/investigations "$BODY") \
-d "$BODY" "${BASE}/api/v2/investigations"Errors
| Error code | HTTP | Meaning |
|---|---|---|
insufficient_scope | 403 | Key does not have the investigations:write scope. |
title_required | 400 | The title field is empty. |
Get investigation
GET /api/v2/investigations/{case_id} · Scope: investigations:read
Returns full detail for a single investigation, including all linked event snapshots and analyst notes.
Path parameters
| Parameter | Type | Description |
|---|---|---|
case_id | string | Investigation case ID |
Response schema
Returns a single investigation object.
| Field | Type | Description |
|---|---|---|
case_id | string | Unique case identifier |
title | string | Short title |
description | string | null | Optional description |
severity | string | low · medium · high · critical |
status | string | Current case status, e.g. open, in_progress, closed, archived |
tags | string[] | Tags attached to the case |
event_snapshots | json[] | Stored event snapshot objects attached to the investigation |
notes | InvestigationNote[] | Analyst notes attached to the investigation |
created_at | string (ISO 8601) | Creation timestamp |
updated_at | string (ISO 8601) | Last update timestamp |
closed_at | string (ISO 8601) | null | When the case was closed, if applicable |
Snapshots added through POST /api/v2/investigations/{case_id}/events include fields such as event_id, event_kind, severity, observed_at, process_executable, message, node_name, sensor_id, bookmarked_at, and optional analyst_context.
notes[] - InvestigationNote
| Field | Type | Description |
|---|---|---|
id | string | Unique note ID |
text | string | Note content |
author | string | Author derived from the authenticated identity |
created_at | string (ISO 8601) | Timestamp |
Response example
json
{
"case_id": "{case_id}",
"title": "UPF privilege escalation - oslo-dc1",
"description": "Unexplained setuid from /opt/open5gs/bin/upf",
"severity": "high",
"status": "open",
"tags": [],
"created_at": "2026-04-26T08:11:00Z",
"updated_at": "2026-04-26T08:15:00Z",
"closed_at": null,
"event_snapshots": [
{
"event_id": "{event_id}",
"event_kind": "privilege_change",
"severity": "high",
"observed_at": "2026-04-26T07:58:11Z",
"process_executable": "/opt/open5gs/bin/upf",
"message": "setuid bit changed on /opt/open5gs/bin/upf",
"node_name": "upf-prod-01",
"sensor_id": "{sensor_id}",
"bookmarked_at": "2026-04-26T08:12:00Z",
"analyst_context": "Root cause candidate - triggered 3 min before alarm"
}
],
"notes": [
{
"id": "{note_id}",
"text": "setuid traced to startup script. Escalating to infra team.",
"author": "analyst@acme.com",
"created_at": "2026-04-26T08:15:00Z"
}
]
}python
case = requests.get(
f"{BASE}/api/v2/investigations/{case_id}",
headers=sign("GET", "/api/v2/investigations/{case_id}"),
).json()
print(f"{len(case['event_snapshots'])} events, {len(case['notes'])} notes")typescript
const case_ = await fetch(`${BASE}/api/v2/investigations/{case_id}`, {
headers: sign("GET", "/api/v2/investigations/{case_id}"),
}).then(r => r.json());
console.log(case_.event_snapshots.length, "events");go
req, _ := http.NewRequest("GET", BASE+"/api/v2/investigations/{case_id}", nil)
req.Header = sign("GET", "/api/v2/investigations/{case_id}", nil)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
var inv map[string]any
json.NewDecoder(resp.Body).Decode(&inv)bash
eval curl -sf $(sign GET /api/v2/investigations/{case_id}) \
"${BASE}/api/v2/investigations/{case_id}"Errors
| Error code | HTTP | Meaning |
|---|---|---|
insufficient_scope | 403 | Key does not have the investigations:read scope. |
investigation_not_found | 404 | No investigation exists with the given case_id. |
Add event to investigation
POST /api/v2/investigations/{case_id}/events · Scope: investigations:write
Stores a runtime event snapshot on an investigation. The handler copies selected fields from the supplied event object and stores context as analyst_context when provided.
Request body
| Field | Type | Required | Description |
|---|---|---|---|
event | object | Yes | Event JSON object to snapshot on the investigation |
event.event_id | string | No | Runtime event ID to include in the stored snapshot |
event.event_kind | string | No | Event kind for display purposes |
event.sensor_id | string | No | Source sensor ID |
context | string | No | Analyst note stored as analyst_context in the snapshot |
json
{
"event": {
"event_id": "{event_id}",
"event_kind": "privilege_change",
"sensor_id": "{sensor_id}"
},
"context": "Root cause candidate - triggered 3 min before alarm"
}Response schema
| Field | Type | Description |
|---|---|---|
ok | boolean | true on success |
case_id | string | The investigation case ID |
Response example
json
{
"ok": true,
"case_id": "{case_id}"
}python
body = json.dumps({
"event": {
"event_id": "{event_id}",
"event_kind": "privilege_change",
"sensor_id": "{sensor_id}",
},
"context": "Root cause candidate",
}).encode()
path = f"/api/v2/investigations/{case_id}/events"
resp = requests.post(
f"{BASE}{path}",
headers=sign("POST", path, body),
data=body,
).json()typescript
const body = JSON.stringify({
event: { event_id: "{event_id}", event_kind: "privilege_change", sensor_id: "{sensor_id}" },
context: "Root cause candidate",
});
const path = `/api/v2/investigations/${caseId}/events`;
await fetch(`${BASE}${path}`, {
method: "POST",
headers: sign("POST", path, Buffer.from(body)),
body,
}).then(r => r.json());go
payload := []byte(`{"event":{"event_id":"{event_id}","event_kind":"privilege_change","sensor_id":"{sensor_id}"},"context":"Root cause candidate"}`)
path := "/api/v2/investigations/{case_id}/events"
req, _ := http.NewRequest("POST", BASE+path, bytes.NewReader(payload))
req.Header = sign("POST", path, payload)
http.DefaultClient.Do(req)bash
BODY='{"event":{"event_id":"{event_id}","event_kind":"privilege_change","sensor_id":"{sensor_id}"},"context":"Root cause candidate"}'
eval curl -sf -X POST $(sign POST /api/v2/investigations/{case_id}/events "$BODY") \
-d "$BODY" "${BASE}/api/v2/investigations/{case_id}/events"Errors
| Error code | HTTP | Meaning |
|---|---|---|
insufficient_scope | 403 | Key does not have the investigations:write scope. |
investigation_not_found | 404 | No investigation exists with the given case_id. |
Add note to investigation
POST /api/v2/investigations/{case_id}/notes · Scope: investigations:write
Adds a free-text analyst note to the investigation. The author field is derived from the authenticated identity and cannot be overridden.
Request body
| Field | Type | Required | Description |
|---|---|---|---|
text | string | Yes | Note content (plain text) |
json
{
"text": "setuid traced to startup script. Escalating to infra team."
}Response schema
| Field | Type | Description |
|---|---|---|
ok | boolean | true on success |
note.id | string | Unique note ID |
note.author | string | Author derived from the authenticated identity |
note.text | string | Stored note content |
note.created_at | string (ISO 8601) | Creation timestamp |
Response example
json
{
"ok": true,
"note": {
"id": "{note_id}",
"author": "analyst@acme.com",
"text": "setuid traced to startup script. Escalating to infra team.",
"created_at": "2026-04-26T08:15:00Z"
}
}python
body = json.dumps({"text": "setuid traced to startup script. Escalating to infra team."}).encode()
path = f"/api/v2/investigations/{case_id}/notes"
note = requests.post(
f"{BASE}{path}",
headers=sign("POST", path, body),
data=body,
).json()
print("Note ID:", note["note"]["id"])typescript
const body = JSON.stringify({ text: "setuid traced to startup script. Escalating to infra team." });
const path = `/api/v2/investigations/${caseId}/notes`;
const note = await fetch(`${BASE}${path}`, {
method: "POST",
headers: sign("POST", path, Buffer.from(body)),
body,
}).then(r => r.json());
console.log("Note ID:", note.note.id);go
payload := []byte(`{"text":"setuid traced to startup script. Escalating to infra team."}`)
path := "/api/v2/investigations/{case_id}/notes"
req, _ := http.NewRequest("POST", BASE+path, bytes.NewReader(payload))
req.Header = sign("POST", path, payload)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
var result map[string]any
json.NewDecoder(resp.Body).Decode(&result)bash
BODY='{"text":"setuid traced to startup script. Escalating to infra team."}'
eval curl -sf -X POST $(sign POST /api/v2/investigations/{case_id}/notes "$BODY") \
-d "$BODY" "${BASE}/api/v2/investigations/{case_id}/notes"Errors
| Error code | HTTP | Meaning |
|---|---|---|
insufficient_scope | 403 | Key does not have the investigations:write scope. |
investigation_not_found | 404 | No investigation exists with the given case_id. |
text_required | 400 | The text field is empty. |
SBOM & Vulnerabilities
List SBOM scans
GET /api/v2/sbom/scans · Scope: sbom:read
Returns a paginated list of container image scans. Each scan entry includes status, image metadata, and a per-severity vulnerability count.
Query parameters
| Parameter | Type | Default | Description |
|---|---|---|---|
status | string | - | Filter: pending · scanning · completed · failed |
cluster | string | - | Filter by Kubernetes cluster name |
image | string | - | Substring match against image_name |
limit | integer | 50 | Page size (max 200) |
offset | integer | 0 | Pagination offset |
Response schema
| Field | Type | Description |
|---|---|---|
data | SbomScan[] | Array of scan objects |
total | integer | Total matching scans |
limit | integer | Applied page size |
offset | integer | Applied offset |
SbomScan object
| Field | Type | Description |
|---|---|---|
scan_id | string | Unique scan ID |
image_ref | string | Full image reference |
image_name | string | Image name without tag |
image_tag | string | null | Image tag |
image_digest | string | null | Image digest (sha256:...) |
cluster_name | string | null | Originating cluster |
node_name | string | null | Originating node |
status | string | pending · scanning · completed · failed |
trivy_version | string | null | Scanner version used |
triggered_by | string | auto · manual · API key ID |
error_message | string | null | Set when status is failed |
vuln_critical | integer | Critical CVE count |
vuln_high | integer | High CVE count |
vuln_medium | integer | Medium CVE count |
vuln_low | integer | Low CVE count |
vuln_unknown | integer | Unknown severity count |
queued_at | string (ISO 8601) | When scan was queued |
started_at | string | null | When scan began |
completed_at | string | null | When scan finished |
scan_duration_secs | integer | null | Wall-clock seconds |
Response example
json
{
"data": [
{
"scan_id": "{scan_id}",
"image_ref": "registry.example.com/telovix/sensor:1.4.2",
"image_name": "telovix/sensor",
"image_tag": "1.4.2",
"image_digest": "sha256:a1b2c3...",
"cluster_name": "5g-core-prod",
"node_name": "upf-prod-01",
"status": "completed",
"trivy_version": "0.50.1",
"triggered_by": "auto",
"error_message": null,
"vuln_critical": 0,
"vuln_high": 2,
"vuln_medium": 8,
"vuln_low": 14,
"vuln_unknown": 0,
"queued_at": "2026-04-26T09:00:00Z",
"started_at": "2026-04-26T09:00:03Z",
"completed_at": "2026-04-26T09:00:50Z",
"scan_duration_secs": 47
}
],
"total": 1,
"limit": 50,
"offset": 0
}python
scans = requests.get(
f"{BASE}/api/v2/sbom/scans",
headers=sign("GET", "/api/v2/sbom/scans"),
params={"status": "completed", "cluster": "5g-core-prod"},
).json()
for s in scans["data"]:
print(s["image_ref"], "critical:", s["vuln_critical"])typescript
const scans = await fetch(
`${BASE}/api/v2/sbom/scans?status=completed&cluster=5g-core-prod`,
{ headers: sign("GET", "/api/v2/sbom/scans?status=completed&cluster=5g-core-prod") },
).then(r => r.json());
scans.data.forEach((s: any) => console.log(s.image_ref, "critical:", s.vuln_critical));go
req, _ := http.NewRequest("GET", BASE+"/api/v2/sbom/scans?status=completed&cluster=5g-core-prod", nil)
req.Header = sign("GET", "/api/v2/sbom/scans?status=completed&cluster=5g-core-prod", nil)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
var body map[string]any
json.NewDecoder(resp.Body).Decode(&body)bash
eval curl -sf $(sign GET "/api/v2/sbom/scans?status=completed&cluster=5g-core-prod") \
"${BASE}/api/v2/sbom/scans?status=completed&cluster=5g-core-prod"Errors
| Error code | HTTP | Meaning |
|---|---|---|
insufficient_scope | 403 | Key does not have the sbom:read scope. |
invalid_status | 422 | The status filter value is not one of pending, scanning, completed, failed. |
Get SBOM scan
GET /api/v2/sbom/scans/{scan_id} · Scope: sbom:read
Returns the full SbomScan object for a single scan.
Path parameters
| Parameter | Type | Description |
|---|---|---|
scan_id | string | Scan ID |
Response example
json
{
"scan_id": "{scan_id}",
"image_ref": "registry.example.com/telovix/sensor:1.4.2",
"image_name": "telovix/sensor",
"image_tag": "1.4.2",
"image_digest": "sha256:a1b2c3...",
"cluster_name": "5g-core-prod",
"node_name": "upf-prod-01",
"status": "completed",
"trivy_version": "0.50.1",
"triggered_by": "auto",
"error_message": null,
"vuln_critical": 0,
"vuln_high": 2,
"vuln_medium": 8,
"vuln_low": 14,
"vuln_unknown": 0,
"queued_at": "2026-04-26T09:00:00Z",
"started_at": "2026-04-26T09:00:03Z",
"completed_at": "2026-04-26T09:00:50Z",
"scan_duration_secs": 47
}python
scan = requests.get(
f"{BASE}/api/v2/sbom/scans/{scan_id}",
headers=sign("GET", "/api/v2/sbom/scans/{scan_id}"),
).json()
print("Status:", scan["status"], "| Critical:", scan["vuln_critical"])typescript
const scan = await fetch(`${BASE}/api/v2/sbom/scans/{scan_id}`, {
headers: sign("GET", "/api/v2/sbom/scans/{scan_id}"),
}).then(r => r.json());
console.log("Status:", scan.status, "| Critical:", scan.vuln_critical);go
req, _ := http.NewRequest("GET", BASE+"/api/v2/sbom/scans/{scan_id}", nil)
req.Header = sign("GET", "/api/v2/sbom/scans/{scan_id}", nil)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
var scan map[string]any
json.NewDecoder(resp.Body).Decode(&scan)bash
eval curl -sf $(sign GET /api/v2/sbom/scans/{scan_id}) \
"${BASE}/api/v2/sbom/scans/{scan_id}"Errors
| Error code | HTTP | Meaning |
|---|---|---|
insufficient_scope | 403 | Key does not have the sbom:read scope. |
scan_not_found | 404 | No scan exists with the given scan_id. |
Get vulnerabilities
GET /api/v2/sbom/scans/{scan_id}/vulnerabilities · Scope: sbom:read
Returns the full CVE list for a completed scan. Returns 409 if the scan has not reached completed status.
Path parameters
| Parameter | Type | Description |
|---|---|---|
scan_id | string | Scan ID |
Response schema
| Field | Type | Description |
|---|---|---|
scan_id | string | Scan ID |
image_name | string | Image name |
image_tag | string | null | Image tag |
vulnerability_count | integer | Total CVE entries |
summary.critical | integer | Count by severity |
summary.high | integer | Count by severity |
summary.medium | integer | Count by severity |
summary.low | integer | Count by severity |
summary.unknown | integer | Count by severity |
vulnerabilities | Vulnerability[] | Full CVE list |
Vulnerability object
| Field | Type | Description |
|---|---|---|
vuln_id | string | CVE identifier (e.g. CVE-2024-1234) |
pkg_name | string | Affected package name |
installed_version | string | Installed version |
fixed_version | string | null | Version that fixes the vulnerability, if available |
severity | string | CRITICAL · HIGH · MEDIUM · LOW · UNKNOWN |
title | string | Short CVE title |
description | string | null | Full description |
references | string[] | Advisory and NVD URLs |
Response example
json
{
"scan_id": "{scan_id}",
"image_name": "telovix/sensor",
"image_tag": "1.4.2",
"vulnerability_count": 10,
"summary": {
"critical": 0,
"high": 2,
"medium": 8,
"low": 0,
"unknown": 0
},
"vulnerabilities": [
{
"vuln_id": "CVE-2024-21626",
"pkg_name": "runc",
"installed_version": "1.1.9",
"fixed_version": "1.1.12",
"severity": "HIGH",
"title": "runc container breakout via /proc/self/exe",
"description": "An attacker with access to an runc container can escape to the host via a crafted process that exploits a race in /proc/self/exe handling.",
"references": [
"https://nvd.nist.gov/vuln/detail/CVE-2024-21626",
"https://github.com/opencontainers/runc/security/advisories/GHSA-xr7r-f8xq-vfvv"
]
}
]
}python
vulns = requests.get(
f"{BASE}/api/v2/sbom/scans/{scan_id}/vulnerabilities",
headers=sign("GET", "/api/v2/sbom/scans/{scan_id}/vulnerabilities"),
).json()
critical = [v for v in vulns["vulnerabilities"] if v["severity"] == "CRITICAL"]
print(f"{len(critical)} critical CVEs")typescript
const vulns = await fetch(`${BASE}/api/v2/sbom/scans/{scan_id}/vulnerabilities`, {
headers: sign("GET", "/api/v2/sbom/scans/{scan_id}/vulnerabilities"),
}).then(r => r.json());
const critical = vulns.vulnerabilities.filter((v: any) => v.severity === "CRITICAL");
console.log(`${critical.length} critical CVEs`);go
req, _ := http.NewRequest("GET", BASE+"/api/v2/sbom/scans/{scan_id}/vulnerabilities", nil)
req.Header = sign("GET", "/api/v2/sbom/scans/{scan_id}/vulnerabilities", nil)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
var result map[string]any
json.NewDecoder(resp.Body).Decode(&result)bash
eval curl -sf $(sign GET /api/v2/sbom/scans/{scan_id}/vulnerabilities) \
"${BASE}/api/v2/sbom/scans/{scan_id}/vulnerabilities"Errors
| Error code | HTTP | Meaning |
|---|---|---|
insufficient_scope | 403 | Key does not have the sbom:read scope. |
scan_not_found | 404 | No scan exists with the given scan_id. |
scan_not_completed | 409 | The scan has not yet reached completed status. |
Export CycloneDX SBOM
GET /api/v2/sbom/scans/{scan_id}/cyclonedx · Scope: sbom:read
Returns a CycloneDX 1.4 JSON document for a completed scan. The Content-Type is application/json. Save directly to file for import into vulnerability management tools.
Path parameters
| Parameter | Type | Description |
|---|---|---|
scan_id | string | Scan ID |
python
cdx = requests.get(
f"{BASE}/api/v2/sbom/scans/{scan_id}/cyclonedx",
headers=sign("GET", "/api/v2/sbom/scans/{scan_id}/cyclonedx"),
).json()
with open("sbom.cdx.json", "w") as f:
json.dump(cdx, f, indent=2)
print("Saved sbom.cdx.json")typescript
const cdx = await fetch(`${BASE}/api/v2/sbom/scans/{scan_id}/cyclonedx`, {
headers: sign("GET", "/api/v2/sbom/scans/{scan_id}/cyclonedx"),
}).then(r => r.json());
import { writeFileSync } from "fs";
writeFileSync("sbom.cdx.json", JSON.stringify(cdx, null, 2));go
req, _ := http.NewRequest("GET", BASE+"/api/v2/sbom/scans/{scan_id}/cyclonedx", nil)
req.Header = sign("GET", "/api/v2/sbom/scans/{scan_id}/cyclonedx", nil)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
f, _ := os.Create("sbom.cdx.json")
defer f.Close()
io.Copy(f, resp.Body)bash
eval curl -sf $(sign GET /api/v2/sbom/scans/{scan_id}/cyclonedx) \
"${BASE}/api/v2/sbom/scans/{scan_id}/cyclonedx" \
-o sbom.cdx.jsonErrors
| Error code | HTTP | Meaning |
|---|---|---|
insufficient_scope | 403 | Key does not have the sbom:read scope. |
scan_not_found | 404 | No scan exists with the given scan_id. |
scan_not_completed | 409 | The scan has not yet reached completed status. |
cyclonedx_not_available | 404 | The scan completed but the CycloneDX document was not stored (scan may have been a dry-run or pre-dates CycloneDX export). |
Audit Log
List audit entries
GET /api/v2/audit · Scope: audit:read
Returns an immutable cursor-paginated audit log of all operator and API actions that occurred within the Console. Entries are ordered newest-first.
Query parameters
| Parameter | Type | Default | Description |
|---|---|---|---|
actor_email | string | - | Filter by actor email address |
action_type | string | - | Filter by action type (e.g. sensor_contained, policy_created) |
outcome | string | - | success or failure |
since | ISO 8601 | 30 days ago | Start of time window |
until | ISO 8601 | now | End of time window |
limit | integer | 100 | Page size (max 500) |
cursor | string | - | Base64 opaque cursor from next_cursor of a previous response |
Response schema
| Field | Type | Description |
|---|---|---|
data | AuditEntry[] | Ordered list of audit records |
count | integer | Number of records in this page |
limit | integer | Applied page size |
next_cursor | string | null | Opaque cursor for the next page; null when exhausted |
from | string (ISO 8601) | Applied start of window |
until | string (ISO 8601) | Applied end of window |
AuditEntry object
| Field | Type | Description |
|---|---|---|
created_at | string (ISO 8601) | When the action occurred |
action_type | string | Machine-readable action identifier |
outcome | string | success · failure |
actor_email | string | null | Email of the user who performed the action, or null for API key actions |
sensor_id | string | null | Affected sensor ID, when applicable |
detail | object | Action-specific structured detail |
client_ip | string | null | IP address of the caller |
user_agent | string | null | HTTP User-Agent header |
Response example
json
{
"data": [
{
"created_at": "2026-04-26T09:15:44Z",
"action_type": "sensor_enforcement_state_changed",
"outcome": "success",
"actor_email": "admin@acme.com",
"sensor_id": "{sensor_id}",
"detail": {
"previous_state": "monitor",
"new_state": "enforce",
"rule_name": "block_privilege_change"
},
"client_ip": "10.0.1.42",
"user_agent": "Mozilla/5.0 (X11; Linux x86_64)"
}
],
"count": 1,
"limit": 100,
"next_cursor": null,
"from": "2026-03-27T09:15:44Z",
"until": "2026-04-26T09:15:44Z"
}python
audit = requests.get(
f"{BASE}/api/v2/audit",
headers=sign("GET", "/api/v2/audit"),
params={"action_type": "sensor_enforcement_state_changed", "limit": 50},
).json()
for entry in audit["data"]:
print(entry["created_at"], entry["actor_email"], entry["action_type"])
# Paginate with cursor
cursor = audit.get("next_cursor")
while cursor:
page = requests.get(
f"{BASE}/api/v2/audit",
headers=sign("GET", f"/api/v2/audit?cursor={cursor}"),
params={"cursor": cursor},
).json()
for entry in page["data"]:
print(entry["created_at"], entry["action_type"])
cursor = page.get("next_cursor")typescript
let cursor: string | null = null;
do {
const qs = new URLSearchParams({ limit: "50", ...(cursor ? { cursor } : {}) });
const path = `/api/v2/audit?${qs}`;
const page = await fetch(`${BASE}${path}`, { headers: sign("GET", path) }).then(r => r.json());
for (const entry of page.data) {
console.log(entry.created_at, entry.actor_email, entry.action_type);
}
cursor = page.next_cursor ?? null;
} while (cursor);go
cursor := ""
for {
path := "/api/v2/audit?limit=50"
if cursor != "" {
path += "&cursor=" + cursor
}
req, _ := http.NewRequest("GET", BASE+path, nil)
req.Header = sign("GET", path, nil)
resp, _ := http.DefaultClient.Do(req)
var page map[string]any
json.NewDecoder(resp.Body).Decode(&page)
resp.Body.Close()
// process page["data"]
next, _ := page["next_cursor"].(string)
if next == "" { break }
cursor = next
}bash
eval curl -sf $(sign GET "/api/v2/audit?action_type=sensor_enforcement_state_changed&limit=50") \
"${BASE}/api/v2/audit?action_type=sensor_enforcement_state_changed&limit=50"Errors
| Error code | HTTP | Meaning |
|---|---|---|
insufficient_scope | 403 | Key does not have the audit:read scope. |
invalid_outcome | 422 | The outcome filter is not success or failure. |
invalid_time_range | 422 | The since timestamp is after the until timestamp. |
invalid_cursor | 422 | The cursor value is not a valid pagination cursor from a previous response. |
Network
List active connections
GET /api/v2/network/connections · Scope: events:read
Returns the current or recently observed active TCP/UDP connections across the fleet, as seen by sensors. Use the cursor for pagination.
Query parameters
| Parameter | Type | Default | Description |
|---|---|---|---|
sensor_id | string | - | Filter to a single sensor |
limit | integer | 100 | Page size (max 500) |
cursor | string | - | Base64 opaque cursor for next page |
Response schema
| Field | Type | Description |
|---|---|---|
data | Connection[] | Array of connection records |
count | integer | Records in this page |
total | integer | Total matching records |
limit | integer | Applied page size |
next_cursor | string | null | Cursor for the next page |
Connection object
| Field | Type | Description |
|---|---|---|
sensor_id | string | Reporting sensor |
node_name | string | Source node hostname |
binary | string | Executable that owns the socket |
src_ip | string | Source IP address |
src_port | integer | Source port |
dst_ip | string | Destination IP address |
dst_port | integer | Destination port |
proto | string | tcp · udp |
pid | integer | null | Process ID |
duration_secs | number | null | Connection age in seconds |
bytes_sent | integer | null | Bytes sent (when available) |
state | string | established · half-closed · stale |
observed_at | string (ISO 8601) | Last observation time |
Response example
json
{
"data": [
{
"sensor_id": "{sensor_id}",
"node_name": "upf-prod-01",
"binary": "/opt/open5gs/bin/open5gs-upfd",
"src_ip": "10.10.0.5",
"src_port": 2152,
"dst_ip": "10.10.0.11",
"dst_port": 2152,
"proto": "udp",
"pid": 4821,
"duration_secs": 3720.4,
"bytes_sent": 5242880,
"state": "established",
"observed_at": "2026-04-26T10:00:00Z"
}
],
"count": 1,
"total": 1,
"limit": 100,
"next_cursor": null
}python
conns = requests.get(
f"{BASE}/api/v2/network/connections",
headers=sign("GET", "/api/v2/network/connections"),
params={"sensor_id": "{sensor_id}", "limit": 200},
).json()
for c in conns["data"]:
print(f"{c['binary']} {c['src_ip']}:{c['src_port']} -> {c['dst_ip']}:{c['dst_port']}")typescript
const conns = await fetch(`${BASE}/api/v2/network/connections?sensor_id={sensor_id}&limit=200`, {
headers: sign("GET", "/api/v2/network/connections?sensor_id={sensor_id}&limit=200"),
}).then(r => r.json());
conns.data.forEach((c: any) =>
console.log(`${c.binary} ${c.src_ip}:${c.src_port} -> ${c.dst_ip}:${c.dst_port}`)
);go
req, _ := http.NewRequest("GET", BASE+"/api/v2/network/connections?sensor_id={sensor_id}&limit=200", nil)
req.Header = sign("GET", "/api/v2/network/connections?sensor_id={sensor_id}&limit=200", nil)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
var result map[string]any
json.NewDecoder(resp.Body).Decode(&result)bash
eval curl -sf $(sign GET "/api/v2/network/connections?sensor_id={sensor_id}") \
"${BASE}/api/v2/network/connections?sensor_id={sensor_id}"List listening services
GET /api/v2/network/listening · Scope: events:read
Returns binaries and ports that are actively listening for inbound connections, as observed across the fleet.
Query parameters
| Parameter | Type | Default | Description |
|---|---|---|---|
sensor_id | string | - | Filter to a single sensor |
limit | integer | 100 | Page size (max 500) |
cursor | string | - | Base64 opaque cursor for next page |
Response schema
| Field | Type | Description |
|---|---|---|
data | Listener[] | Array of listener records |
count | integer | Records in this page |
total | integer | Total matching records |
limit | integer | Applied page size |
next_cursor | string | null | Cursor for the next page |
Listener object
| Field | Type | Description |
|---|---|---|
sensor_id | string | Reporting sensor |
node_name | string | Node hostname |
binary | string | Listening executable path |
local_ip | string | Bound IP address |
local_port | integer | Bound port |
proto | string | tcp · udp |
pid | integer | null | Process ID |
first_seen_at | string (ISO 8601) | First observation |
last_seen_at | string (ISO 8601) | Most recent observation |
Response example
json
{
"data": [
{
"sensor_id": "{sensor_id}",
"node_name": "amf-prod-01",
"binary": "/opt/open5gs/bin/open5gs-amfd",
"local_ip": "0.0.0.0",
"local_port": 38412,
"proto": "tcp",
"pid": 3901,
"first_seen_at": "2026-04-01T00:00:00Z",
"last_seen_at": "2026-04-26T10:00:00Z"
}
],
"count": 1,
"total": 1,
"limit": 100,
"next_cursor": null
}python
listeners = requests.get(
f"{BASE}/api/v2/network/listening",
headers=sign("GET", "/api/v2/network/listening"),
params={"sensor_id": "{sensor_id}", "limit": 200},
).json()
for l in listeners["data"]:
print(f"{l['binary']} listening on {l['local_ip']}:{l['local_port']}/{l['proto']}")typescript
const listeners = await fetch(
`${BASE}/api/v2/network/listening?sensor_id={sensor_id}&limit=200`,
{ headers: sign("GET", "/api/v2/network/listening?sensor_id={sensor_id}&limit=200") },
).then(r => r.json());
listeners.data.forEach((l: any) =>
console.log(`${l.binary} listening on ${l.local_ip}:${l.local_port}/${l.proto}`)
);go
req, _ := http.NewRequest("GET", BASE+"/api/v2/network/listening?sensor_id={sensor_id}&limit=200", nil)
req.Header = sign("GET", "/api/v2/network/listening?sensor_id={sensor_id}&limit=200", nil)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
var result map[string]any
json.NewDecoder(resp.Body).Decode(&result)bash
eval curl -sf $(sign GET "/api/v2/network/listening?sensor_id={sensor_id}") \
"${BASE}/api/v2/network/listening?sensor_id={sensor_id}"Kubernetes
List workloads
GET /api/v2/kubernetes/workloads · Scope: sensors:read
Returns Kubernetes workloads discovered by sensors across all enrolled clusters, with replica health summary.
Query parameters
| Parameter | Type | Default | Description |
|---|---|---|---|
cluster | string | - | Filter by cluster name |
namespace | string | - | Filter by namespace |
workload_type | string | - | Deployment · StatefulSet · DaemonSet |
limit | integer | 500 | Page size (max 500) |
Response schema
| Field | Type | Description |
|---|---|---|
data | Workload[] | Array of workload objects |
total | integer | Total matching workloads |
limit | integer | Applied page size |
type_breakdown | object | Count per workload type: {Deployment, StatefulSet, DaemonSet} |
namespace_coverage | object | Map of namespace name to workload count |
health_summary | object | {healthy, degraded, unavailable} counts |
Workload object
| Field | Type | Description |
|---|---|---|
workload_type | string | Deployment · StatefulSet · DaemonSet |
workload_name | string | Workload name |
namespace | string | Kubernetes namespace |
cluster_name | string | Cluster name |
desired_replicas | integer | Desired replica count |
ready_replicas | integer | Ready replica count |
updated_replicas | integer | Up-to-date replica count |
available_replicas | integer | Available replica count |
sensor_ids | string[] | IDs of sensors on nodes running this workload |
Response example
json
{
"data": [
{
"workload_type": "Deployment",
"workload_name": "open5gs-amf",
"namespace": "5g-core",
"cluster_name": "5g-core-prod",
"desired_replicas": 2,
"ready_replicas": 2,
"updated_replicas": 2,
"available_replicas": 2,
"sensor_ids": ["{sensor_id}", "sensor_Kp4nRvWmYqBx8Lz"]
}
],
"total": 1,
"limit": 500,
"type_breakdown": { "Deployment": 1, "StatefulSet": 0, "DaemonSet": 0 },
"namespace_coverage": { "5g-core": 1 },
"health_summary": { "healthy": 1, "degraded": 0, "unavailable": 0 }
}python
workloads = requests.get(
f"{BASE}/api/v2/kubernetes/workloads",
headers=sign("GET", "/api/v2/kubernetes/workloads"),
params={"cluster": "5g-core-prod", "namespace": "5g-core"},
).json()
print(workloads["health_summary"])typescript
const workloads = await fetch(
`${BASE}/api/v2/kubernetes/workloads?cluster=5g-core-prod&namespace=5g-core`,
{ headers: sign("GET", "/api/v2/kubernetes/workloads?cluster=5g-core-prod&namespace=5g-core") },
).then(r => r.json());
console.log(workloads.health_summary);go
req, _ := http.NewRequest("GET", BASE+"/api/v2/kubernetes/workloads?cluster=5g-core-prod", nil)
req.Header = sign("GET", "/api/v2/kubernetes/workloads?cluster=5g-core-prod", nil)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
var result map[string]any
json.NewDecoder(resp.Body).Decode(&result)bash
eval curl -sf $(sign GET "/api/v2/kubernetes/workloads?cluster=5g-core-prod") \
"${BASE}/api/v2/kubernetes/workloads?cluster=5g-core-prod"List container images
GET /api/v2/kubernetes/images · Scope: sensors:read
Returns container images observed running across enrolled clusters. Use this to cross-reference against SBOM scan results.
Query parameters
| Parameter | Type | Default | Description |
|---|---|---|---|
cluster | string | - | Filter by cluster name |
namespace | string | - | Filter by namespace |
image_name | string | - | Substring match against image_name |
registry | string | - | Exact registry hostname filter |
limit | integer | 500 | Page size (max 500) |
Response schema
| Field | Type | Description |
|---|---|---|
data | ContainerImage[] | Array of image records |
total | integer | Total matching records |
limit | integer | Applied page size |
ContainerImage object
| Field | Type | Description |
|---|---|---|
image_ref | string | Full image reference |
image_name | string | Image name without tag |
image_tag | string | null | Image tag |
image_digest | string | null | Image digest (sha256:...) |
registry | string | null | Registry hostname |
cluster_name | string | Cluster where image is running |
namespace | string | Kubernetes namespace |
pod_uid | string | UID of a pod running this image |
pull_policy | string | null | Always · IfNotPresent · Never |
is_init_container | boolean | true if this is an init container |
first_seen_at | string (ISO 8601) | First observed running |
last_seen_at | string (ISO 8601) | Most recent observation |
Response example
json
{
"data": [
{
"image_ref": "registry.example.com/telovix/sensor:1.4.2",
"image_name": "telovix/sensor",
"image_tag": "1.4.2",
"image_digest": "sha256:a1b2c3...",
"registry": "registry.example.com",
"cluster_name": "5g-core-prod",
"namespace": "telovix",
"pod_uid": "abc-123-def",
"pull_policy": "IfNotPresent",
"is_init_container": false,
"first_seen_at": "2026-04-01T00:00:00Z",
"last_seen_at": "2026-04-26T10:00:00Z"
}
],
"total": 1,
"limit": 500
}python
images = requests.get(
f"{BASE}/api/v2/kubernetes/images",
headers=sign("GET", "/api/v2/kubernetes/images"),
params={"cluster": "5g-core-prod", "image_name": "sensor"},
).json()
for img in images["data"]:
print(img["image_ref"], img["pull_policy"])typescript
const images = await fetch(
`${BASE}/api/v2/kubernetes/images?cluster=5g-core-prod&image_name=sensor`,
{ headers: sign("GET", "/api/v2/kubernetes/images?cluster=5g-core-prod&image_name=sensor") },
).then(r => r.json());
images.data.forEach((img: any) => console.log(img.image_ref, img.pull_policy));go
req, _ := http.NewRequest("GET", BASE+"/api/v2/kubernetes/images?cluster=5g-core-prod", nil)
req.Header = sign("GET", "/api/v2/kubernetes/images?cluster=5g-core-prod", nil)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
var result map[string]any
json.NewDecoder(resp.Body).Decode(&result)bash
eval curl -sf $(sign GET "/api/v2/kubernetes/images?cluster=5g-core-prod&image_name=sensor") \
"${BASE}/api/v2/kubernetes/images?cluster=5g-core-prod&image_name=sensor"List admission decisions
GET /api/v2/kubernetes/admission/decisions · Scope: sensors:read
Returns the last 200 Kubernetes admission webhook decisions made by the sensor. Each entry records whether a pod was allowed or denied, and why.
Response schema
| Field | Type | Description |
|---|---|---|
data | AdmissionDecision[] | Array of decision records |
total | integer | Total records (up to 200) |
AdmissionDecision object
| Field | Type | Description |
|---|---|---|
decision_id | string | Unique decision ID |
allowed | boolean | true if the pod was admitted |
reason | string | null | Human-readable reason, present on deny |
rule_name | string | null | Policy rule that triggered the decision |
namespace | string | Target namespace |
pod_name | string | null | Pod name, when available |
image_ref | string | null | Primary container image reference |
decided_at | string (ISO 8601) | Decision timestamp |
Response example
json
{
"data": [
{
"decision_id": "{adm_id}",
"allowed": false,
"reason": "Image does not meet minimum scan age requirement",
"rule_name": "require_recent_scan",
"namespace": "5g-core",
"pod_name": "open5gs-upf-7d9f5-xk2qp",
"image_ref": "registry.example.com/open5gs/upf:latest",
"decided_at": "2026-04-26T10:01:15Z"
}
],
"total": 1
}python
decisions = requests.get(
f"{BASE}/api/v2/kubernetes/admission/decisions",
headers=sign("GET", "/api/v2/kubernetes/admission/decisions"),
).json()
denied = [d for d in decisions["data"] if not d["allowed"]]
print(f"{len(denied)} denied in last 200 decisions")typescript
const decisions = await fetch(`${BASE}/api/v2/kubernetes/admission/decisions`, {
headers: sign("GET", "/api/v2/kubernetes/admission/decisions"),
}).then(r => r.json());
const denied = decisions.data.filter((d: any) => !d.allowed);
console.log(`${denied.length} denied in last 200 decisions`);go
req, _ := http.NewRequest("GET", BASE+"/api/v2/kubernetes/admission/decisions", nil)
req.Header = sign("GET", "/api/v2/kubernetes/admission/decisions", nil)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
var result map[string]any
json.NewDecoder(resp.Body).Decode(&result)bash
eval curl -sf $(sign GET /api/v2/kubernetes/admission/decisions) \
"${BASE}/api/v2/kubernetes/admission/decisions"Webhooks
List webhooks
GET /api/v2/webhooks · Scope: alerts:read
Returns all configured webhook endpoints. The has_signing_secret field indicates whether a secret is configured - the secret value itself is never returned.
Response schema
| Field | Type | Description |
|---|---|---|
data | Webhook[] | Array of webhook objects |
total | integer | Total webhooks |
Webhook object
| Field | Type | Description |
|---|---|---|
id | string | Webhook ID |
display_name | string | Human-readable label |
target_url | string | Delivery URL (http:// or https://) |
has_signing_secret | boolean | Whether a signing secret is configured |
webhook_type | string | generic · slack · discord · teams · pagerduty · opsgenie |
enabled | boolean | Whether deliveries are active |
created_at | string (ISO 8601) | Creation timestamp |
updated_at | string (ISO 8601) | Last update timestamp |
last_delivery_at | string | null | Most recent delivery attempt |
last_delivery_status | string | null | Delivery state of most recent attempt (success or failed) |
supported_events | string[] | Event types this webhook receives |
Response example
json
{
"data": [
{
"id": "{webhook_id}",
"display_name": "SOC PagerDuty",
"target_url": "https://events.pagerduty.com/v2/enqueue",
"has_signing_secret": true,
"webhook_type": "pagerduty",
"enabled": true,
"created_at": "2026-01-15T08:00:00Z",
"updated_at": "2026-04-10T12:00:00Z",
"last_delivery_at": "2026-04-26T09:58:00Z",
"last_delivery_status": "success",
"supported_events": ["anomaly.score.new", "chain.detected", "sensor.health_change", "alert.created", "compliance.score_changed"]
}
],
"total": 1
}python
webhooks = requests.get(
f"{BASE}/api/v2/webhooks",
headers=sign("GET", "/api/v2/webhooks"),
).json()
for wh in webhooks["data"]:
print(wh["display_name"], wh["last_delivery_status"])typescript
const webhooks = await fetch(`${BASE}/api/v2/webhooks`, {
headers: sign("GET", "/api/v2/webhooks"),
}).then(r => r.json());
webhooks.data.forEach((wh: any) => console.log(wh.display_name, wh.last_delivery_status));go
req, _ := http.NewRequest("GET", BASE+"/api/v2/webhooks", nil)
req.Header = sign("GET", "/api/v2/webhooks", nil)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
var result map[string]any
json.NewDecoder(resp.Body).Decode(&result)bash
eval curl -sf $(sign GET /api/v2/webhooks) "${BASE}/api/v2/webhooks"Create webhook
POST /api/v2/webhooks · Scope: alerts:write
Creates a new webhook endpoint. Returns HTTP 201. If a signing secret is configured, the response indicates that with has_signing_secret - the raw secret is not returned.
Request body
| Field | Type | Required | Description |
|---|---|---|---|
display_name | string | Yes | Human-readable label |
target_url | string | Yes | Delivery URL, must start with http:// or https:// |
signing_secret | string | No | Shared secret for payload signing |
webhook_type | string | No | generic (default) · slack · discord · teams · pagerduty · opsgenie |
json
{
"display_name": "SOC PagerDuty",
"target_url": "https://events.pagerduty.com/v2/enqueue",
"signing_secret": "my-routing-key",
"webhook_type": "pagerduty"
}Response example
json
{
"id": "{webhook_id}",
"display_name": "SOC PagerDuty",
"target_url": "https://events.pagerduty.com/v2/enqueue",
"has_signing_secret": true,
"webhook_type": "pagerduty",
"enabled": true,
"created_at": "2026-04-26T10:05:00Z",
"supported_events": ["anomaly.score.new", "chain.detected", "sensor.health_change", "alert.created", "compliance.score_changed"]
}Errors
| Code | HTTP | Description |
|---|---|---|
invalid_notification_webhook | 422 | display_name is missing, empty, or longer than 128 characters |
invalid_notification_webhook | 422 | target_url does not start with http:// or https:// |
invalid_notification_webhook | 422 | webhook_type is not one of the accepted values |
python
body = json.dumps({
"display_name": "SOC PagerDuty",
"target_url": "https://events.pagerduty.com/v2/enqueue",
"webhook_type": "pagerduty",
"signing_secret": "routing-key-here",
}).encode()
wh = requests.post(
f"{BASE}/api/v2/webhooks",
headers=sign("POST", "/api/v2/webhooks", body),
data=body,
).json()
webhook_id = wh["id"]
print("Webhook created:", webhook_id)typescript
const body = JSON.stringify({
display_name: "SOC PagerDuty",
target_url: "https://events.pagerduty.com/v2/enqueue",
webhook_type: "pagerduty",
signing_secret: "routing-key-here",
});
const wh = await fetch(`${BASE}/api/v2/webhooks`, {
method: "POST",
headers: sign("POST", "/api/v2/webhooks", Buffer.from(body)),
body,
}).then(r => r.json());
const webhookId = wh.id;go
payload := []byte(`{"display_name":"SOC PagerDuty","target_url":"https://events.pagerduty.com/v2/enqueue","webhook_type":"pagerduty","signing_secret":"routing-key-here"}`)
req, _ := http.NewRequest("POST", BASE+"/api/v2/webhooks", bytes.NewReader(payload))
req.Header = sign("POST", "/api/v2/webhooks", payload)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
var wh map[string]any
json.NewDecoder(resp.Body).Decode(&wh)
fmt.Println("Webhook created:", wh["id"])bash
BODY='{"display_name":"SOC PagerDuty","target_url":"https://events.pagerduty.com/v2/enqueue","webhook_type":"pagerduty"}'
eval curl -sf -X POST $(sign POST /api/v2/webhooks "$BODY") \
-d "$BODY" "${BASE}/api/v2/webhooks"Update webhook
PUT /api/v2/webhooks/{webhook_id} · Scope: alerts:write
Updates a webhook endpoint. All fields are optional - include only what you want to change. Pass signing_secret: "" to clear the secret; omit the field entirely to leave it unchanged. The response is a reduced webhook summary.
Path parameters
| Parameter | Type | Description |
|---|---|---|
webhook_id | string | Webhook ID |
Request body
| Field | Type | Description |
|---|---|---|
display_name | string | New display name |
target_url | string | New delivery URL (must start with http:// or https://) |
signing_secret | string | New secret; "" to clear |
enabled | boolean | Enable or disable deliveries |
json
{
"display_name": "SOC PagerDuty (production)",
"enabled": true
}Response example
json
{
"id": "{webhook_id}",
"display_name": "SOC PagerDuty (production)",
"target_url": "https://events.pagerduty.com/v2/enqueue",
"has_signing_secret": true,
"webhook_type": "pagerduty",
"enabled": true,
"updated_at": "2026-04-26T10:10:00Z"
}Errors
| Code | HTTP | Description |
|---|---|---|
webhook_not_found | 404 | No webhook with that ID |
invalid_target_url | 400 | target_url does not start with http:// or https:// |
python
body = json.dumps({"display_name": "SOC PagerDuty (production)", "enabled": True}).encode()
path = f"/api/v2/webhooks/{webhook_id}"
updated = requests.put(
f"{BASE}{path}",
headers=sign("PUT", path, body),
data=body,
).json()typescript
const body = JSON.stringify({ display_name: "SOC PagerDuty (production)", enabled: true });
const path = `/api/v2/webhooks/${webhookId}`;
const updated = await fetch(`${BASE}${path}`, {
method: "PUT",
headers: sign("PUT", path, Buffer.from(body)),
body,
}).then(r => r.json());go
payload := []byte(`{"display_name":"SOC PagerDuty (production)","enabled":true}`)
path := "/api/v2/webhooks/{webhook_id}"
req, _ := http.NewRequest("PUT", BASE+path, bytes.NewReader(payload))
req.Header = sign("PUT", path, payload)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()bash
BODY='{"display_name":"SOC PagerDuty (production)","enabled":true}'
eval curl -sf -X PUT $(sign PUT /api/v2/webhooks/{webhook_id} "$BODY") \
-d "$BODY" "${BASE}/api/v2/webhooks/{webhook_id}"Delete webhook
DELETE /api/v2/webhooks/{webhook_id} · Scope: alerts:write
Permanently deletes a webhook endpoint. Returns HTTP 204 with no body.
Path parameters
| Parameter | Type | Description |
|---|---|---|
webhook_id | string | Webhook ID |
Errors
| Code | HTTP | Description |
|---|---|---|
webhook_not_found | 404 | No webhook with that ID |
python
path = f"/api/v2/webhooks/{webhook_id}"
requests.delete(f"{BASE}{path}", headers=sign("DELETE", path))
print("Deleted")typescript
const path = `/api/v2/webhooks/${webhookId}`;
await fetch(`${BASE}${path}`, {
method: "DELETE",
headers: sign("DELETE", path),
});go
path := "/api/v2/webhooks/{webhook_id}"
req, _ := http.NewRequest("DELETE", BASE+path, nil)
req.Header = sign("DELETE", path, nil)
http.DefaultClient.Do(req)bash
eval curl -sf -X DELETE $(sign DELETE /api/v2/webhooks/{webhook_id}) \
"${BASE}/api/v2/webhooks/{webhook_id}"Test webhook
POST /api/v2/webhooks/{webhook_id}/test · Scope: alerts:write
Sends a test_delivery event payload to the configured target_url and returns the delivery result. No body required.
Path parameters
| Parameter | Type | Description |
|---|---|---|
webhook_id | string | Webhook ID |
Response schema
| Field | Type | Description |
|---|---|---|
delivered | boolean | true if the target returned a 2xx status |
http_status_code | integer | null | HTTP status received from the target |
error | string | null | Error message if delivery failed |
Response example
json
{
"delivered": true,
"http_status_code": 200,
"error": null
}json
{
"delivered": false,
"http_status_code": null,
"error": "connect: connection refused"
}Errors
| Code | HTTP | Description |
|---|---|---|
webhook_not_found | 404 | No webhook with that ID |
python
path = f"/api/v2/webhooks/{webhook_id}/test"
result = requests.post(f"{BASE}{path}", headers=sign("POST", path)).json()
if result["delivered"]:
print("Test delivery succeeded:", result["http_status_code"])
else:
print("Delivery failed:", result["error"])typescript
const path = `/api/v2/webhooks/${webhookId}/test`;
const result = await fetch(`${BASE}${path}`, {
method: "POST",
headers: sign("POST", path),
}).then(r => r.json());
console.log(result.delivered ? `OK ${result.http_status_code}` : `Failed: ${result.error}`);go
path := "/api/v2/webhooks/{webhook_id}/test"
req, _ := http.NewRequest("POST", BASE+path, nil)
req.Header = sign("POST", path, nil)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
var result map[string]any
json.NewDecoder(resp.Body).Decode(&result)bash
eval curl -sf -X POST $(sign POST /api/v2/webhooks/{webhook_id}/test) \
"${BASE}/api/v2/webhooks/{webhook_id}/test"Telecom
NF inventory
GET /api/v2/telco/nf-inventory · Scope: sensors:read
Returns the detected network function role inventory for all telecom-flavor sensors. Each entry includes role confidence scores and observed interface bindings.
Response schema
| Field | Type | Description |
|---|---|---|
data | NfInventoryEntry[] | Array of per-sensor inventory snapshots |
total | integer | Total entries |
NfInventoryEntry object
| Field | Type | Description |
|---|---|---|
sensor_id | string | Sensor ID |
snapshot_written_at | string (ISO 8601) | Snapshot timestamp |
nf.detected_roles | array | List of {role, confidence, evidence[]} |
nf.detected_roles[].role | string | Role name (e.g. amf, upf, gnb) |
nf.detected_roles[].confidence | number | Confidence score 0–1 |
nf.detected_roles[].evidence | string[] | Evidence strings supporting the detection |
nf.interface_bindings | object | Map of interface name to array of {local_port, remote_port} |
Response example
json
{
"data": [
{
"sensor_id": "{sensor_id}",
"snapshot_written_at": "2026-04-26T10:00:00Z",
"nf": {
"detected_roles": [
{
"role": "upf",
"confidence": 0.97,
"evidence": ["GTP-U port 2152 bound", "PFCP port 8805 bound", "binary open5gs-upfd"]
}
],
"interface_bindings": {
"eth0": [
{ "local_port": 2152, "remote_port": 2152 },
{ "local_port": 8805, "remote_port": 8805 }
]
}
}
}
],
"total": 1
}python
inventory = requests.get(
f"{BASE}/api/v2/telco/nf-inventory",
headers=sign("GET", "/api/v2/telco/nf-inventory"),
).json()
for entry in inventory["data"]:
for role in entry["nf"]["detected_roles"]:
print(entry["sensor_id"], role["role"], f"{role['confidence']*100:.0f}%")typescript
const inventory = await fetch(`${BASE}/api/v2/telco/nf-inventory`, {
headers: sign("GET", "/api/v2/telco/nf-inventory"),
}).then(r => r.json());
inventory.data.forEach((e: any) =>
e.nf.detected_roles.forEach((r: any) =>
console.log(e.sensor_id, r.role, `${(r.confidence * 100).toFixed(0)}%`)
)
);go
req, _ := http.NewRequest("GET", BASE+"/api/v2/telco/nf-inventory", nil)
req.Header = sign("GET", "/api/v2/telco/nf-inventory", nil)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
var result map[string]any
json.NewDecoder(resp.Body).Decode(&result)bash
eval curl -sf $(sign GET /api/v2/telco/nf-inventory) \
"${BASE}/api/v2/telco/nf-inventory"RAN snapshot
GET /api/v2/telco/ran/snapshot · Scope: sensors:read
Returns the latest Radio Access Network component telemetry snapshot for all telecom sensors that have detected RAN processes.
Response schema
| Field | Type | Description |
|---|---|---|
data | RanSnapshot[] | Array of per-sensor RAN snapshots |
total | integer | Total entries |
RanSnapshot object
| Field | Type | Description |
|---|---|---|
sensor_id | string | Sensor ID |
snapshot_written_at | string (ISO 8601) | Snapshot timestamp |
data.ran_nodes | array | List of {role, binary, detected_via} |
data.ran_nodes[].role | string | RAN role (e.g. du, cu, ric) |
data.ran_nodes[].binary | string | Detected process binary path |
data.ran_nodes[].detected_via | string | Detection method (e.g. port_binding, binary_name) |
Response example
json
{
"data": [
{
"sensor_id": "{sensor_id}",
"snapshot_written_at": "2026-04-26T10:00:00Z",
"data": {
"ran_nodes": [
{
"role": "du",
"binary": "/opt/oai/oai-gnb",
"detected_via": "port_binding"
},
{
"role": "cu",
"binary": "/opt/oai/oai-gnb",
"detected_via": "binary_name"
}
]
}
}
],
"total": 1
}python
snapshot = requests.get(
f"{BASE}/api/v2/telco/ran/snapshot",
headers=sign("GET", "/api/v2/telco/ran/snapshot"),
).json()
for entry in snapshot["data"]:
for node in entry["data"]["ran_nodes"]:
print(entry["sensor_id"], node["role"], node["binary"])typescript
const snapshot = await fetch(`${BASE}/api/v2/telco/ran/snapshot`, {
headers: sign("GET", "/api/v2/telco/ran/snapshot"),
}).then(r => r.json());
snapshot.data.forEach((e: any) =>
e.data.ran_nodes.forEach((n: any) => console.log(e.sensor_id, n.role, n.binary))
);go
req, _ := http.NewRequest("GET", BASE+"/api/v2/telco/ran/snapshot", nil)
req.Header = sign("GET", "/api/v2/telco/ran/snapshot", nil)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
var result map[string]any
json.NewDecoder(resp.Body).Decode(&result)bash
eval curl -sf $(sign GET /api/v2/telco/ran/snapshot) \
"${BASE}/api/v2/telco/ran/snapshot"SLO status
GET /api/v2/telco/slo/{sensor_id} · Scope: sensors:read
Returns the current SLO status and availability metrics for all network function roles detected on a specific sensor.
Path parameters
| Parameter | Type | Description |
|---|---|---|
sensor_id | string | Sensor ID |
Response schema
| Field | Type | Description |
|---|---|---|
sensor_id | string | Sensor ID |
snapshot_written_at | string (ISO 8601) | Snapshot timestamp |
data.role_slo_status | array | Per-role SLO status entries |
role_slo_status entry
| Field | Type | Description |
|---|---|---|
role | string | Network function role |
slo_target_pct | number | Target availability percentage (e.g. 99.99) |
observed_availability_pct | number | Measured availability over the window |
uptime_secs | integer | Total uptime seconds in the window |
mttr_secs | integer | Mean time to recovery in seconds |
restart_count | integer | Number of restarts observed |
breach_status | string | healthy · recovering · breached |
Response example
json
{
"sensor_id": "{sensor_id}",
"snapshot_written_at": "2026-04-26T10:00:00Z",
"data": {
"role_slo_status": [
{
"role": "upf",
"slo_target_pct": 99.99,
"observed_availability_pct": 99.97,
"uptime_secs": 86380,
"mttr_secs": 42,
"restart_count": 1,
"breach_status": "breached"
},
{
"role": "amf",
"slo_target_pct": 99.99,
"observed_availability_pct": 100.0,
"uptime_secs": 86400,
"mttr_secs": 0,
"restart_count": 0,
"breach_status": "healthy"
}
]
}
}Errors
| Code | HTTP | Description |
|---|---|---|
sensor_not_found | 404 | No sensor with that ID |
python
slo = requests.get(
f"{BASE}/api/v2/telco/slo/{sensor_id}",
headers=sign("GET", "/api/v2/telco/slo/{sensor_id}"),
).json()
for role in slo["data"]["role_slo_status"]:
print(role["role"], role["breach_status"], f"{role['observed_availability_pct']}%")typescript
const slo = await fetch(`${BASE}/api/v2/telco/slo/{sensor_id}`, {
headers: sign("GET", "/api/v2/telco/slo/{sensor_id}"),
}).then(r => r.json());
slo.data.role_slo_status.forEach((r: any) =>
console.log(r.role, r.breach_status, `${r.observed_availability_pct}%`)
);go
req, _ := http.NewRequest("GET", BASE+"/api/v2/telco/slo/{sensor_id}", nil)
req.Header = sign("GET", "/api/v2/telco/slo/{sensor_id}", nil)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
var result map[string]any
json.NewDecoder(resp.Body).Decode(&result)bash
eval curl -sf $(sign GET /api/v2/telco/slo/{sensor_id}) \
"${BASE}/api/v2/telco/slo/{sensor_id}"Telecom security alerts
GET /api/v2/telco/security/alerts · Scope: sensors:read
Returns telecom-specific security findings across the fleet. Findings are generated from E2 peer anomalies, O1 unauthorized management connections, SBI compliance failures, and signaling protocol deviations.
Response schema
| Field | Type | Description |
|---|---|---|
data | TelcoSecurityAlert[] | Array of per-sensor security alert snapshots |
total | integer | Total entries |
TelcoSecurityAlert object
| Field | Type | Description |
|---|---|---|
sensor_id | string | Sensor ID |
snapshot_written_at | string (ISO 8601) | Snapshot timestamp |
data.risk_level | string | Overall risk level: critical · high · medium · low |
data.score | integer | Composite risk score 0–100 |
data.findings | array | List of {category, description, severity} |
data.findings[].category | string | Finding category (e.g. e2_peer_anomaly, sbi_compliance) |
data.findings[].description | string | Human-readable finding description |
data.findings[].severity | string | critical · high · medium · low |
Response example
json
{
"data": [
{
"sensor_id": "{sensor_id}",
"snapshot_written_at": "2026-04-26T10:00:00Z",
"data": {
"risk_level": "high",
"score": 72,
"findings": [
{
"category": "e2_peer_anomaly",
"description": "E2 connection from unexpected peer IP 192.168.99.55 (not in allowed xApp list)",
"severity": "high"
},
{
"category": "sbi_compliance",
"description": "NF discovery request missing mandatory NF type parameter",
"severity": "medium"
}
]
}
}
],
"total": 1
}python
alerts = requests.get(
f"{BASE}/api/v2/telco/security/alerts",
headers=sign("GET", "/api/v2/telco/security/alerts"),
).json()
for entry in alerts["data"]:
print(entry["sensor_id"], entry["data"]["risk_level"], f"score={entry['data']['score']}")
for finding in entry["data"]["findings"]:
print(" ", finding["severity"], finding["category"], "-", finding["description"])typescript
const alerts = await fetch(`${BASE}/api/v2/telco/security/alerts`, {
headers: sign("GET", "/api/v2/telco/security/alerts"),
}).then(r => r.json());
alerts.data.forEach((e: any) => {
console.log(e.sensor_id, e.data.risk_level, `score=${e.data.score}`);
e.data.findings.forEach((f: any) => console.log(" ", f.severity, f.category, "-", f.description));
});go
req, _ := http.NewRequest("GET", BASE+"/api/v2/telco/security/alerts", nil)
req.Header = sign("GET", "/api/v2/telco/security/alerts", nil)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
var result map[string]any
json.NewDecoder(resp.Body).Decode(&result)bash
eval curl -sf $(sign GET /api/v2/telco/security/alerts) \
"${BASE}/api/v2/telco/security/alerts"Fleet Energy
Fleet energy metrics
GET /api/v2/energy/fleet · Scope: sensors:read
Returns daily average power consumption (in watts) per sensor over a rolling window. Use this to track fleet-wide energy trends and identify anomalous consumption spikes.
Query parameters
| Parameter | Type | Default | Description |
|---|---|---|---|
days | integer | 30 | Lookback window in days (min 1, max 90) |
Response schema
| Field | Type | Description |
|---|---|---|
days | integer | Applied lookback window |
from | string (ISO 8601) | Start of window |
until | string (ISO 8601) | End of window |
sensor_count | integer | Number of sensors in the result |
rows | array | Daily data rows: {date, sensor_id, avg_watts} |
rows[].date | string | UTC date (YYYY-MM-DD) |
rows[].sensor_id | string | Sensor ID |
rows[].avg_watts | number | Average power draw for that day in watts |
Response example
json
{
"days": 7,
"from": "2026-04-19T00:00:00Z",
"until": "2026-04-26T00:00:00Z",
"sensor_count": 2,
"rows": [
{ "date": "2026-04-25", "sensor_id": "{sensor_id}", "avg_watts": 142.3 },
{ "date": "2026-04-25", "sensor_id": "sensor_Kp4nRvWmYqBx8Lz", "avg_watts": 138.7 },
{ "date": "2026-04-24", "sensor_id": "{sensor_id}", "avg_watts": 145.1 },
{ "date": "2026-04-24", "sensor_id": "sensor_Kp4nRvWmYqBx8Lz", "avg_watts": 141.0 }
]
}python
energy = requests.get(
f"{BASE}/api/v2/energy/fleet",
headers=sign("GET", "/api/v2/energy/fleet"),
params={"days": 7},
).json()
for row in energy["rows"]:
print(row["date"], row["sensor_id"], f"{row['avg_watts']:.1f}W")typescript
const energy = await fetch(`${BASE}/api/v2/energy/fleet?days=7`, {
headers: sign("GET", "/api/v2/energy/fleet?days=7"),
}).then(r => r.json());
energy.rows.forEach((row: any) => console.log(row.date, row.sensor_id, `${row.avg_watts.toFixed(1)}W`));go
req, _ := http.NewRequest("GET", BASE+"/api/v2/energy/fleet?days=7", nil)
req.Header = sign("GET", "/api/v2/energy/fleet?days=7", nil)
resp, _ := http.DefaultClient.Do(req)
defer resp.Body.Close()
var result map[string]any
json.NewDecoder(resp.Body).Decode(&result)bash
eval curl -sf $(sign GET "/api/v2/energy/fleet?days=7") \
"${BASE}/api/v2/energy/fleet?days=7"AI Assistant
Chat (streaming)
POST /api/v2/chat · Scope: chat:read
Sends a message to the AI assistant and receives a Server-Sent Events (SSE) stream in response. The assistant has access to all Console data via internal tools and can answer questions about sensors, anomalies, events, compliance posture, and more.
Rate limit - 20 requests per minute per API key.
Body limit - 64 KB.
Request body
| Field | Type | Required | Description |
|---|---|---|---|
messages | array | Yes | 1–40 message objects, each with role ("user" or "assistant") and content (string) |
session_id | string (UUID) | No | Session identifier for multi-turn conversations. Omit to start a new session; the done event will return the server-assigned UUID to pass in subsequent requests |
json
{
"messages": [
{ "role": "user", "content": "Are there any critical anomalies in the last 2 hours?" }
],
"session_id": "550e8400-e29b-41d4-a716-446655440000"
}Multi-turn conversations - To continue a conversation, include the session_id from the previous done event and append both the assistant's last response and your new message to the messages array. The server maintains no message history on its own; the full context must be sent with each request.
SSE event types
The response Content-Type is text/event-stream. Each SSE frame has an event: line and a data: line containing a JSON object.
| Event | When emitted | Payload fields |
|---|---|---|
tool | When the assistant invokes an internal tool | tool (string), status ("running" or "done"), hint (string) |
delta | For each streamed text chunk | text (string) |
done | When the response is complete | tools_used (string[]), tokens (integer), truncated (boolean), session_id (UUID string) |
error | On a non-recoverable error | code (string), message (string) |
SSE stream example
event: tool
data: {"tool": "get_sensor_detail", "status": "running", "hint": "Fetching sensor upf-prod-01"}
event: tool
data: {"tool": "get_sensor_detail", "status": "done", "hint": "Fetching sensor upf-prod-01"}
event: tool
data: {"tool": "get_anomaly_scores", "status": "running", "hint": "Loading anomaly scores"}
event: tool
data: {"tool": "get_anomaly_scores", "status": "done", "hint": "Loading anomaly scores"}
event: delta
data: {"text": "There are "}
event: delta
data: {"text": "2 critical anomalies on upf-prod-01 in the last 2 hours. "}
event: delta
data: {"text": "The highest-scoring event is a privilege escalation at 08:47 UTC with a score of 94."}
event: done
data: {"tools_used": ["get_sensor_detail", "get_anomaly_scores"], "tokens": 312, "truncated": false, "session_id": "550e8400-e29b-41d4-a716-446655440000"}Errors
| Code | HTTP | Description |
|---|---|---|
messages_required | 422 | messages field is missing or empty |
too_many_messages | 422 | More than 40 messages in the array |
rate_limit_exceeded | 429 | More than 20 requests per minute on this key |
llm_unavailable | 503 | The LLM provider is not configured or unreachable |
python
import json, sys
import requests, sseclient
KEY_ID = "your-key-id"
SECRET = bytes.fromhex("your-hmac-secret-hex")
BASE = "https://console.example.com"
body = json.dumps({
"messages": [
{"role": "user", "content": "Are there any critical anomalies in the last 2 hours?"}
]
}).encode()
response = requests.post(
f"{BASE}/api/v2/chat",
headers=sign("POST", "/api/v2/chat", body),
data=body,
stream=True,
)
response.raise_for_status()
session_id = None
for event in sseclient.SSEClient(response).events():
payload = json.loads(event.data)
if event.event == "tool" and payload["status"] == "running":
print(f"[tool] {payload['hint']}", file=sys.stderr)
elif event.event == "delta":
sys.stdout.write(payload["text"])
sys.stdout.flush()
elif event.event == "done":
session_id = payload["session_id"]
print(f"\n[done] tokens={payload['tokens']} session={session_id}", file=sys.stderr)
break
elif event.event == "error":
raise RuntimeError(f"{payload['code']}: {payload['message']}")
# Continue the conversation
follow_up = json.dumps({
"messages": [
{"role": "user", "content": "Are there any critical anomalies in the last 2 hours?"},
{"role": "assistant", "content": "There are 2 critical anomalies on upf-prod-01..."},
{"role": "user", "content": "What is the process tree for the privilege escalation?"},
],
"session_id": session_id,
}).encode()
# ... repeat the same streaming looptypescript
const body = JSON.stringify({
messages: [{ role: "user", content: "Are there any critical anomalies in the last 2 hours?" }],
});
const response = await fetch(`${BASE}/api/v2/chat`, {
method: "POST",
headers: sign("POST", "/api/v2/chat", Buffer.from(body)),
body,
});
if (!response.ok) throw new Error(`HTTP ${response.status}`);
const reader = response.body!.getReader();
const decoder = new TextDecoder();
let buf = "";
let eventType = "message";
while (true) {
const { done, value } = await reader.read();
if (done) break;
buf += decoder.decode(value, { stream: true });
const lines = buf.split("\n");
buf = lines.pop()!;
for (const line of lines) {
if (line.startsWith("event: ")) {
eventType = line.slice(7).trim();
} else if (line.startsWith("data: ")) {
const payload = JSON.parse(line.slice(6));
if (eventType === "delta") {
process.stdout.write(payload.text);
} else if (eventType === "done") {
process.stderr.write(`\n[done] tokens=${payload.tokens} session=${payload.session_id}\n`);
} else if (eventType === "error") {
throw new Error(`${payload.code}: ${payload.message}`);
}
}
}
}go
package main
import (
"bufio"
"bytes"
"encoding/json"
"fmt"
"net/http"
"os"
"strings"
)
func main() {
body := []byte(`{"messages":[{"role":"user","content":"Are there any critical anomalies in the last 2 hours?"}]}`)
req, _ := http.NewRequest("POST", BASE+"/api/v2/chat", bytes.NewReader(body))
req.Header = sign("POST", "/api/v2/chat", body)
req.Header.Set("Accept", "text/event-stream")
resp, err := http.DefaultClient.Do(req)
if err != nil {
panic(err)
}
defer resp.Body.Close()
scanner := bufio.NewScanner(resp.Body)
eventType := ""
for scanner.Scan() {
line := scanner.Text()
if strings.HasPrefix(line, "event: ") {
eventType = strings.TrimPrefix(line, "event: ")
} else if strings.HasPrefix(line, "data: ") {
var payload map[string]any
json.Unmarshal([]byte(strings.TrimPrefix(line, "data: ")), &payload)
switch eventType {
case "delta":
fmt.Fprint(os.Stdout, payload["text"])
case "done":
fmt.Fprintf(os.Stderr, "\n[done] tokens=%.0f session=%s\n",
payload["tokens"], payload["session_id"])
return
case "error":
fmt.Fprintf(os.Stderr, "error: %s: %s\n", payload["code"], payload["message"])
return
}
}
}
}bash
BODY='{"messages":[{"role":"user","content":"Are there any critical anomalies in the last 2 hours?"}]}'
eval curl -sf -N \
-X POST \
-H "Accept: text/event-stream" \
$(sign POST /api/v2/chat "$BODY") \
-d "$BODY" \
"${BASE}/api/v2/chat"
# -N disables buffering so SSE frames print to the terminal as they arrive