mirror of
https://github.com/LittleQuartZ/addmon.git
synced 2026-02-07 02:45:28 +07:00
- Add pair_alerts() to pair consecutive Alerting→Normal state transitions - Add parse_labels_from_text() to extract labels from alert text field - Update ProcessedAlert struct with alertTime, resolveTime, isResolved, labels - Display incident name in alert list items instead of incident ID - Show parsed labels in alert row (excluding alertname)
279 lines
8.5 KiB
Rust
279 lines
8.5 KiB
Rust
mod commands;
|
|
|
|
use serde::{Deserialize, Serialize};
|
|
use std::collections::HashMap;
|
|
use thiserror::Error;
|
|
|
|
#[derive(Debug, Error)]
|
|
pub enum AppError {
|
|
#[error("IO error: {0}")]
|
|
Io(#[from] std::io::Error),
|
|
#[error("JSON parse error: {0}")]
|
|
Json(#[from] serde_json::Error),
|
|
#[error("Not found: {0}")]
|
|
NotFound(String),
|
|
}
|
|
|
|
impl Serialize for AppError {
|
|
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
|
where
|
|
S: serde::ser::Serializer,
|
|
{
|
|
serializer.serialize_str(self.to_string().as_ref())
|
|
}
|
|
}
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
#[serde(rename_all = "camelCase")]
|
|
pub struct GrafanaAlert {
|
|
pub id: u64,
|
|
pub alert_id: u32,
|
|
#[serde(default)]
|
|
pub alert_name: Option<String>,
|
|
#[serde(default)]
|
|
pub dashboard_uid: Option<String>,
|
|
pub new_state: String,
|
|
pub prev_state: String,
|
|
pub created: u64,
|
|
pub updated: u64,
|
|
pub time: u64,
|
|
pub time_end: u64,
|
|
pub text: String,
|
|
#[serde(default)]
|
|
pub data: Option<AlertData>,
|
|
}
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
pub struct AlertData {
|
|
#[serde(default, deserialize_with = "deserialize_metric_values")]
|
|
pub values: HashMap<String, f64>,
|
|
}
|
|
|
|
/// Deserialize metric values that may contain "+Inf", "-Inf", "NaN" as strings
|
|
fn deserialize_metric_values<'de, D>(deserializer: D) -> Result<HashMap<String, f64>, D::Error>
|
|
where
|
|
D: serde::Deserializer<'de>,
|
|
{
|
|
use serde::de::{MapAccess, Visitor};
|
|
use std::fmt;
|
|
|
|
struct MetricValuesVisitor;
|
|
|
|
impl<'de> Visitor<'de> for MetricValuesVisitor {
|
|
type Value = HashMap<String, f64>;
|
|
|
|
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
|
formatter.write_str("a map of string to number or special float strings")
|
|
}
|
|
|
|
fn visit_map<M>(self, mut access: M) -> Result<Self::Value, M::Error>
|
|
where
|
|
M: MapAccess<'de>,
|
|
{
|
|
let mut map = HashMap::new();
|
|
|
|
while let Some((key, value)) = access.next_entry::<String, serde_json::Value>()? {
|
|
let num = match value {
|
|
serde_json::Value::Number(n) => n.as_f64().unwrap_or(0.0),
|
|
serde_json::Value::String(s) => match s.as_str() {
|
|
"+Inf" | "Inf" | "infinity" | "-Inf" | "-infinity" | "NaN" | "nan" => 0.0,
|
|
other => other.parse().unwrap_or(0.0),
|
|
},
|
|
_ => 0.0,
|
|
};
|
|
map.insert(key, num);
|
|
}
|
|
|
|
Ok(map)
|
|
}
|
|
}
|
|
|
|
deserializer.deserialize_map(MetricValuesVisitor)
|
|
}
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
#[serde(rename_all = "camelCase")]
|
|
pub struct ProcessedAlert {
|
|
pub id: u64,
|
|
pub alert_id: u32,
|
|
#[serde(default)]
|
|
pub alert_name: Option<String>,
|
|
pub alert_time: u64,
|
|
pub resolve_time: Option<u64>,
|
|
pub text: String,
|
|
pub duration_ms: u64,
|
|
pub is_resolved: bool,
|
|
pub is_invalid: bool,
|
|
pub attached_incident_id: Option<String>,
|
|
pub values: HashMap<String, f64>,
|
|
}
|
|
|
|
pub fn pair_alerts(raw_alerts: Vec<GrafanaAlert>) -> Vec<ProcessedAlert> {
|
|
use std::collections::BTreeMap;
|
|
|
|
let mut by_alert_id: BTreeMap<u32, Vec<GrafanaAlert>> = BTreeMap::new();
|
|
for alert in raw_alerts {
|
|
by_alert_id.entry(alert.alert_id).or_default().push(alert);
|
|
}
|
|
|
|
let mut result = Vec::new();
|
|
|
|
for (_alert_id, mut alerts) in by_alert_id {
|
|
alerts.sort_by_key(|a| a.time);
|
|
|
|
let mut i = 0;
|
|
while i < alerts.len() {
|
|
let current = &alerts[i];
|
|
|
|
if current.new_state == "Alerting" {
|
|
if i + 1 < alerts.len() && alerts[i + 1].new_state == "Normal" {
|
|
let resolve = &alerts[i + 1];
|
|
let duration_ms = resolve.time.saturating_sub(current.time);
|
|
|
|
result.push(ProcessedAlert {
|
|
id: current.id,
|
|
alert_id: current.alert_id,
|
|
alert_name: current.alert_name.clone(),
|
|
alert_time: current.time,
|
|
resolve_time: Some(resolve.time),
|
|
text: current.text.clone(),
|
|
duration_ms,
|
|
is_resolved: true,
|
|
is_invalid: false,
|
|
attached_incident_id: None,
|
|
values: current.data.clone().map(|d| d.values).unwrap_or_default(),
|
|
});
|
|
i += 2;
|
|
} else {
|
|
result.push(ProcessedAlert {
|
|
id: current.id,
|
|
alert_id: current.alert_id,
|
|
alert_name: current.alert_name.clone(),
|
|
alert_time: current.time,
|
|
resolve_time: None,
|
|
text: current.text.clone(),
|
|
duration_ms: 0,
|
|
is_resolved: false,
|
|
is_invalid: false,
|
|
attached_incident_id: None,
|
|
values: current.data.clone().map(|d| d.values).unwrap_or_default(),
|
|
});
|
|
i += 1;
|
|
}
|
|
} else {
|
|
i += 1;
|
|
}
|
|
}
|
|
}
|
|
|
|
result.sort_by(|a, b| b.alert_time.cmp(&a.alert_time));
|
|
result
|
|
}
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
#[serde(rename_all = "camelCase")]
|
|
pub struct Incident {
|
|
pub id: String,
|
|
pub title: String,
|
|
pub description: String,
|
|
pub start_time: u64,
|
|
pub end_time: u64,
|
|
pub attached_alert_ids: Vec<u64>,
|
|
}
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
#[serde(rename_all = "camelCase")]
|
|
pub struct KpiMetrics {
|
|
pub error_coverage_ratio: f64,
|
|
pub total_incidents: usize,
|
|
pub covered_incidents: usize,
|
|
pub overall_downtime_ms: u64,
|
|
pub overall_downtime_formatted: String,
|
|
pub invalid_alert_ratio: f64,
|
|
pub total_firing_alerts: usize,
|
|
pub invalid_alerts: usize,
|
|
}
|
|
|
|
pub fn calculate_kpis(alerts: &[ProcessedAlert], incidents: &[Incident]) -> KpiMetrics {
|
|
let total_incidents = incidents.len();
|
|
let covered_incidents = incidents
|
|
.iter()
|
|
.filter(|incident| !incident.attached_alert_ids.is_empty())
|
|
.count();
|
|
let error_coverage_ratio = if total_incidents > 0 {
|
|
(covered_incidents as f64 / total_incidents as f64) * 100.0
|
|
} else {
|
|
0.0
|
|
};
|
|
|
|
let overall_downtime_ms: u64 = alerts
|
|
.iter()
|
|
.filter(|a| !a.is_invalid)
|
|
.map(|a| a.duration_ms)
|
|
.sum();
|
|
|
|
let overall_downtime_formatted = format_duration(overall_downtime_ms);
|
|
|
|
let total_alerts = alerts.len();
|
|
let invalid_alerts = alerts.iter().filter(|a| a.is_invalid).count();
|
|
let invalid_alert_ratio = if total_alerts > 0 {
|
|
(invalid_alerts as f64 / total_alerts as f64) * 100.0
|
|
} else {
|
|
0.0
|
|
};
|
|
|
|
KpiMetrics {
|
|
error_coverage_ratio,
|
|
total_incidents,
|
|
covered_incidents,
|
|
overall_downtime_ms,
|
|
overall_downtime_formatted,
|
|
invalid_alert_ratio,
|
|
total_firing_alerts: total_alerts,
|
|
invalid_alerts,
|
|
}
|
|
}
|
|
|
|
fn format_duration(ms: u64) -> String {
|
|
let seconds = ms / 1000;
|
|
let minutes = seconds / 60;
|
|
let hours = minutes / 60;
|
|
let days = hours / 24;
|
|
|
|
if days > 0 {
|
|
format!("{}d {}h {}m", days, hours % 24, minutes % 60)
|
|
} else if hours > 0 {
|
|
format!("{}h {}m {}s", hours, minutes % 60, seconds % 60)
|
|
} else if minutes > 0 {
|
|
format!("{}m {}s", minutes, seconds % 60)
|
|
} else {
|
|
format!("{}s", seconds)
|
|
}
|
|
}
|
|
|
|
#[cfg_attr(mobile, tauri::mobile_entry_point)]
|
|
pub fn run() {
|
|
tauri::Builder::default()
|
|
.plugin(tauri_plugin_fs::init())
|
|
.plugin(tauri_plugin_dialog::init())
|
|
.setup(|app| {
|
|
if cfg!(debug_assertions) {
|
|
app.handle().plugin(
|
|
tauri_plugin_log::Builder::default()
|
|
.level(log::LevelFilter::Info)
|
|
.build(),
|
|
)?;
|
|
}
|
|
Ok(())
|
|
})
|
|
.invoke_handler(tauri::generate_handler![
|
|
commands::load_alerts_from_file,
|
|
commands::process_alerts_json,
|
|
commands::calculate_kpis_command,
|
|
commands::set_alert_invalid,
|
|
commands::attach_alert_to_incident,
|
|
commands::detach_alert_from_incident,
|
|
])
|
|
.run(tauri::generate_context!())
|
|
.expect("error while running tauri application");
|
|
}
|