Overview
This endpoint allows you to export alert data in CSV format. You can apply filters, select specific fields, and control the format of the exported data. This is useful for creating reports, data analysis, and integrating with external business intelligence tools.
Authorization Required: Include a valid Bearer Token in the Authorization header.
Endpoint Details
POST /api/elasticsearch/alerts/export/csv Method: POST
Content-Type: application/json
Authentication: Bearer Token required
Response: CSV file download or CSV data
Request Body
Filters to apply when selecting alerts for export
List of field names to include in the CSV export
Starting position for pagination (0-based)
Maximum number of records to export
orderBy
string
default: "@timestamp"
Field to sort results by
Sort direction: “asc” or “desc”
Whether to include column headers in the CSV
dateFormat
string
default: "yyyy-MM-dd HH:mm:ss"
Format for date/time fields in the CSV
Field separator character (comma, semicolon, tab)
Optional filename for the CSV export
JSON Schema
{
"type" : "object" ,
"properties" : {
"filters" : {
"type" : "array" ,
"items" : {
"type" : "object" ,
"properties" : {
"field" : { "type" : "string" },
"operator" : { "type" : "string" },
"value" : { "oneOf" : [{ "type" : "string" }, { "type" : "integer" }, { "type" : "array" }] }
}
},
"description" : "Filters to apply"
},
"fields" : {
"type" : "array" ,
"items" : { "type" : "string" },
"description" : "Fields to include in export"
},
"from" : {
"type" : "integer" ,
"description" : "Starting position for pagination"
},
"size" : {
"type" : "integer" ,
"description" : "Maximum records to export"
},
"orderBy" : {
"type" : "string" ,
"description" : "Field to sort by"
},
"sortDirection" : {
"type" : "string" ,
"enum" : [ "asc" , "desc" ],
"description" : "Sort direction"
},
"includeHeaders" : {
"type" : "boolean" ,
"description" : "Include column headers"
},
"dateFormat" : {
"type" : "string" ,
"description" : "Date format pattern"
},
"separator" : {
"type" : "string" ,
"description" : "Field separator"
},
"filename" : {
"type" : "string" ,
"description" : "Export filename"
}
},
"required" : [ "fields" ]
}
Request & Response Examples
curl -X POST "https://demo.utmstack.com/api/elasticsearch/alerts/export/csv" \
-H "Authorization: Bearer <your_access_token>" \
-H "Content-Type: application/json" \
-d '{
"filters": [
{
"field": "@timestamp",
"operator": "IS_BETWEEN",
"value": ["now-7d", "now"]
},
{
"field": "severity",
"operator": "GREATER_EQUAL",
"value": 3
}
],
"fields": [
"@timestamp",
"name",
"severity",
"status",
"dataSource",
"category",
"description"
],
"from": 0,
"size": 5000,
"orderBy": "@timestamp",
"sortDirection": "desc",
"includeHeaders": true,
"dateFormat": "yyyy-MM-dd HH:mm:ss",
"separator": ",",
"filename": "high_severity_alerts_weekly_report.csv"
}'
@timestamp, name, severity, status, dataSource, category, description
2024-01-15 14:32:15, "Suspicious Login Attempt", 4, 2, "windows-server-01", "Authentication", "Multiple failed login attempts detected"
2024-01-15 14:28:42, "Port Scan Detected", 3, 2, "firewall-main", "Network", "Port scanning activity from external IP"
2024-01-15 14:15:33, "Malware Signature Match", 5, 3, "endpoint-protection", "Malware", "Known malware signature detected in file"
2024-01-15 13:45:18, "Privilege Escalation", 4, 2, "linux-server-02", "System", "Unauthorized privilege escalation attempt"
2024-01-15 13:22:07, "SQL Injection Attempt", 4, 3, "web-app-prod", "Application", "SQL injection pattern in HTTP request"
Additional Code Examples
import axios from "axios" ;
import fs from "fs" ;
const exportAlertsToCSV = async ( exportOptions ) => {
const token = "<your_access_token>" ;
const defaultOptions = {
fields: [
"@timestamp" , "name" , "severity" , "status" ,
"dataSource" , "category" , "description"
],
from: 0 ,
size: 10000 ,
orderBy: "@timestamp" ,
sortDirection: "desc" ,
includeHeaders: true ,
dateFormat: "yyyy-MM-dd HH:mm:ss" ,
separator: "," ,
... exportOptions
};
try {
const response = await axios . post (
"https://demo.utmstack.com/api/elasticsearch/alerts/export/csv" ,
defaultOptions ,
{
headers: {
Authorization: `Bearer ${ token } ` ,
"Content-Type" : "application/json"
},
responseType: 'text'
}
);
// Save to file
const filename = exportOptions . filename ||
`alerts_export_ ${ new Date (). toISOString (). slice ( 0 , 10 ) } .csv` ;
fs . writeFileSync ( filename , response . data );
console . log ( `CSV exported successfully: ${ filename } ` );
return response . data ;
} catch ( error ) {
console . error ( "Error exporting CSV:" , error . response ?. data || error . message );
}
};
// Usage examples
await exportAlertsToCSV ({
filters: [
{
field: "@timestamp" ,
operator: "IS_BETWEEN" ,
value: [ "now-24h" , "now" ]
}
],
filename: "daily_alerts_report.csv"
});
// Export high severity alerts
await exportAlertsToCSV ({
filters: [
{
field: "severity" ,
operator: "GREATER_EQUAL" ,
value: 4
}
],
fields: [ "@timestamp" , "name" , "severity" , "dataSource" , "description" ],
filename: "high_severity_alerts.csv"
});
Response Details
Successful Response
Standard Export
Custom Separator
@timestamp, name, severity, status, dataSource
2024-01-15 14:32:15, "Suspicious Login Attempt", 4, 2, "windows-server-01"
2024-01-15 14:28:42, "Port Scan Detected", 3, 2, "firewall-main"
2024-01-15 14:15:33, "Malware Signature Match", 5, 3, "endpoint-protection"
Status Codes
CSV data successfully exported
Invalid field names, malformed filters, or invalid parameters
Missing or invalid Bearer token
Export size exceeds maximum allowed limit
Internal server error during export
Common Use Cases
Security Incident Reports
// Export security incidents for executive reporting
const exportSecurityIncidents = async () => {
const lastWeekFilters = [
{
field: "@timestamp" ,
operator: "IS_BETWEEN" ,
value: [ "now-7d" , "now" ]
},
{
field: "severity" ,
operator: "GREATER_EQUAL" ,
value: 4
},
{
field: "category" ,
operator: "IS_IN" ,
value: [ "Malware" , "Intrusion" , "Data Breach" ]
}
];
return await exportAlertsToCSV ({
filters: lastWeekFilters ,
fields: [
"@timestamp" , "name" , "severity" , "status" ,
"category" , "dataSource" , "assignedTo" , "description"
],
filename: "weekly_security_incidents.csv" ,
size: 5000
});
};
Compliance Reports
def export_compliance_report ( start_date , end_date ):
"""Export alerts for compliance reporting"""
compliance_fields = [
"@timestamp" , "name" , "severity" , "status" , "category" ,
"dataSource" , "sourceIp" , "destIp" , "user" , "action" ,
"outcome" , "tags" , "notes"
]
filters = [
{
"field" : "@timestamp" ,
"operator" : "IS_BETWEEN" ,
"value" : [start_date, end_date]
},
{
"field" : "tags" ,
"operator" : "CONTAINS" ,
"value" : "compliance"
}
]
return export_alerts_to_csv(
fields = compliance_fields,
filters = filters,
filename = f "compliance_report_ { start_date } _to_ { end_date } .csv" ,
size = 50000 ,
orderBy = "@timestamp" ,
sortDirection = "asc"
)
# Generate quarterly compliance report
quarterly_report = export_compliance_report( "now-90d" , "now" )
Data Analysis Export
// Export for data science analysis
const exportForAnalysis = async () => {
const analysisFields = [
"@timestamp" , "severity" , "status" , "category" , "tactic" ,
"technique" , "dataSource" , "sourceIp" , "destIp" , "sourcePort" ,
"destPort" , "protocol" , "bytes" , "packets" , "duration"
];
return await exportAlertsToCSV ({
filters: [
{
field: "@timestamp" ,
operator: "IS_BETWEEN" ,
value: [ "now-30d" , "now" ]
}
],
fields: analysisFields ,
size: 100000 ,
filename: "alerts_analysis_dataset.csv" ,
separator: "," ,
dateFormat: "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"
});
};
Available Fields
Essential fields for most exports:
@timestamp
- Alert timestamp
name
- Alert name/title
severity
- Severity level (1-5)
status
- Alert status code
category
- Alert category
description
- Alert description
dataSource
- Source system
Network-related alert fields:
sourceIp
- Source IP address
destIp
- Destination IP address
sourcePort
- Source port
destPort
- Destination port
protocol
- Network protocol
bytes
- Bytes transferred
packets
- Packet count
Security-focused fields:
tactic
- MITRE ATT&CK tactic
technique
- MITRE ATT&CK technique
user
- Associated username
process
- Process information
file
- File path
hash
- File/process hash
signature
- Detection signature
Alert management fields:
assignedTo
- Assigned analyst
tags
- Applied tags
notes
- Investigation notes
disposition
- Final disposition
createdAt
- Creation time
updatedAt
- Last update time
Standard Formats
Custom Patterns
yyyy-MM-dd HH:mm:ss → 2024-01-15 14:32:15
yyyy-MM-dd'T'HH:mm:ss'Z' → 2024-01-15T14:32:15Z
MM/dd/yyyy HH:mm:ss → 01/15/2024 14:32:15
dd-MM-yyyy HH:mm → 15-01-2024 14:32
yyyy-MM-dd → 2024-01-15
HH:mm:ss → 14:32:15
Separator Options
{
"separator" : "," , // Comma (default)
"separator" : ";" , // Semicolon (Excel EU)
"separator" : " \t " , // Tab-delimited
"separator" : "|" , // Pipe-delimited
"separator" : " " // Space-delimited
}
Size Limits
Export Limits:
Maximum size: 100,000 records per request
Maximum file size: ~50MB
Timeout: 5 minutes for large exports
Use pagination (from
+ size
) for larger datasets
Consider filtering to reduce result set size
Advanced Examples
Multi-file Export Strategy
const exportLargeDataset = async ( totalRecords , batchSize = 10000 ) => {
const batches = Math . ceil ( totalRecords / batchSize );
const files = [];
for ( let i = 0 ; i < batches ; i ++ ) {
const from = i * batchSize ;
const filename = `alerts_batch_ ${ i + 1 } _of_ ${ batches } .csv` ;
await exportAlertsToCSV ({
fields: [ "@timestamp" , "name" , "severity" , "status" , "dataSource" ],
from: from ,
size: batchSize ,
filename: filename ,
includeHeaders: i === 0 // Only first file has headers
});
files . push ( filename );
}
console . log ( `Export complete: ${ files . length } files generated` );
return files ;
};
// Export 50,000 records in 5 batches
await exportLargeDataset ( 50000 , 10000 );
Custom Field Mapping
def export_with_field_mapping ( field_mapping , ** options ):
"""Export with custom field names"""
# Original field names for the query
original_fields = list (field_mapping.keys())
# Export with original field names
df = export_alerts_to_csv(
fields = original_fields,
** options
)
if df is not None :
# Rename columns to friendly names
df = df.rename( columns = field_mapping)
# Save with new names
filename = options.get( 'filename' , 'alerts_mapped.csv' )
df.to_csv(filename, index = False )
print ( f "Exported with custom field mapping: { filename } " )
return df
return None
# Usage with field mapping
field_mapping = {
"@timestamp" : "Alert Time" ,
"name" : "Alert Name" ,
"severity" : "Risk Level" ,
"status" : "Current Status" ,
"dataSource" : "Source System" ,
"category" : "Threat Category"
}
export_with_field_mapping(
field_mapping = field_mapping,
filename = "executive_report.csv"
)
Scheduled Export Script
#!/bin/bash
# scheduled_export.sh - Daily alert export for reporting
DATE = $( date +%Y%m%d )
EXPORT_DIR = "/reports/daily"
API_TOKEN = "your_token_here"
# Create daily directory
mkdir -p " $EXPORT_DIR / $DATE "
# Export high severity alerts
curl -X POST "https://demo.utmstack.com/api/elasticsearch/alerts/export/csv" \
-H "Authorization: Bearer $API_TOKEN " \
-H "Content-Type: application/json" \
-d '{
"filters": [
{
"field": "@timestamp",
"operator": "IS_BETWEEN",
"value": ["now-24h", "now"]
},
{
"field": "severity",
"operator": "GREATER_EQUAL",
"value": 4
}
],
"fields": [
"@timestamp", "name", "severity", "status",
"dataSource", "category", "description"
],
"filename": "high_severity_daily_' $DATE '.csv"
}' > " $EXPORT_DIR / $DATE /high_severity_alerts.csv"
# Export summary statistics
curl -X POST "https://demo.utmstack.com/api/elasticsearch/alerts/export/csv" \
-H "Authorization: Bearer $API_TOKEN " \
-H "Content-Type: application/json" \
-d '{
"filters": [
{
"field": "@timestamp",
"operator": "IS_BETWEEN",
"value": ["now-24h", "now"]
}
],
"fields": ["@timestamp", "severity", "status", "category", "dataSource"],
"filename": "daily_summary_' $DATE '.csv"
}' > " $EXPORT_DIR / $DATE /daily_summary.csv"
echo "Daily export completed: $EXPORT_DIR / $DATE "
OpenAPI Specification
post :
summary : "Export alert data to CSV format"
tags :
- Alert Export
security :
- bearerAuth : []
requestBody :
required : true
content :
application/json :
schema :
$ref : '#/components/schemas/CSVExportRequest'
responses :
'200' :
description : "CSV data exported successfully"
content :
text/csv :
schema :
type : string
format : binary
example : |
@timestamp,name,severity,status,dataSource
2024-01-15 14:32:15,"Suspicious Login Attempt",4,2,"windows-server-01"
'400' :
description : "Invalid request parameters"
'401' :
description : "Unauthorized"
'413' :
description : "Export size exceeds limit"
'500' :
description : "Internal server error"
components :
schemas :
CSVExportRequest :
type : object
required :
- fields
properties :
filters :
type : array
items :
$ref : '#/components/schemas/FilterType'
description : "Filters to apply"
fields :
type : array
items :
type : string
description : "Fields to include in export"
from :
type : integer
default : 0
description : "Starting position for pagination"
size :
type : integer
default : 10000
description : "Maximum records to export"
orderBy :
type : string
default : "@timestamp"
description : "Field to sort by"
sortDirection :
type : string
enum : [ "asc" , "desc" ]
default : "desc"
description : "Sort direction"
includeHeaders :
type : boolean
default : true
description : "Include column headers"
dateFormat :
type : string
default : "yyyy-MM-dd HH:mm:ss"
description : "Date format pattern"
separator :
type : string
default : ","
description : "Field separator"
filename :
type : string
description : "Export filename"