Skip to content

Commit

Permalink
Add LastAlert and LastAlertToIncident and adjust incidents logic to it
Browse files Browse the repository at this point in the history
  • Loading branch information
VladimirFilonov committed Nov 12, 2024
1 parent b6894fd commit c22d6ac
Show file tree
Hide file tree
Showing 11 changed files with 609 additions and 283 deletions.
290 changes: 167 additions & 123 deletions keep/api/core/db.py

Large diffs are not rendered by default.

96 changes: 52 additions & 44 deletions keep/api/models/db/alert.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,13 @@
from typing import List, Optional
from uuid import UUID, uuid4

from pydantic import PrivateAttr
from sqlalchemy import ForeignKey, UniqueConstraint
from sqlalchemy.dialects.mssql import DATETIME2 as MSSQL_DATETIME2
from sqlalchemy.dialects.mysql import DATETIME as MySQL_DATETIME
from sqlalchemy.engine.url import make_url
from sqlalchemy_utils import UUIDType
from sqlmodel import JSON, TEXT, Column, DateTime, Field, Index, Relationship, SQLModel
from sqlmodel import JSON, TEXT, Column, DateTime, Field, Index, Relationship, SQLModel, Session

from keep.api.consts import RUNNING_IN_CLOUD_RUN
from keep.api.core.config import config
Expand Down Expand Up @@ -60,8 +61,8 @@ class AlertToIncident(SQLModel, table=True):
primary_key=True,
)
)
alert: "Alert" = Relationship(back_populates="alert_to_incident_link")
incident: "Incident" = Relationship(back_populates="alert_to_incident_link")
# alert: "Alert" = Relationship(back_populates="alert_to_incident_link")
# incident: "Incident" = Relationship(back_populates="alert_to_incident_link")

is_created_by_ai: bool = Field(default=False)

Expand All @@ -72,6 +73,49 @@ class AlertToIncident(SQLModel, table=True):
default=NULL_FOR_DELETED_AT,
)

class LastAlert(SQLModel, table=True):

tenant_id: str = Field(foreign_key="tenant.id", nullable=False)
fingerprint: str = Field(primary_key=True)
alert_id: UUID = Field(foreign_key="alert.id")
timestamp: datetime = Field(nullable=False, index=True)


class LastAlertToIncident(SQLModel, table=True):
tenant_id: str = Field(foreign_key="tenant.id", nullable=False)
timestamp: datetime = Field(default_factory=datetime.utcnow)

fingerprint: str = Field(foreign_key="lastalert.fingerprint", primary_key=True)
incident_id: UUID = Field(
sa_column=Column(
UUIDType(binary=False),
ForeignKey("incident.id", ondelete="CASCADE"),
primary_key=True,
)
)

is_created_by_ai: bool = Field(default=False)

deleted_at: datetime = Field(
default_factory=None,
nullable=True,
primary_key=True,
default=NULL_FOR_DELETED_AT,
)

# alert: "Alert" = Relationship(
# back_populates="alert_to_incident_link",
# sa_relationship = relationship(
# "Alert",
# secondary="lastalert",
# primaryjoin=f"""LastAlertToIncident.fingerprint == LastAlert.fingerprint""",
# secondaryjoin="LastAlert.alert_id == Alert.id",
# overlaps="alert,lastalert",
# viewonly=True,
# ),
# )
# incident: "Incident" = Relationship(back_populates="alert_to_incident_link")


class Incident(SQLModel, table=True):
id: UUID = Field(default_factory=uuid4, primary_key=True)
Expand All @@ -96,25 +140,10 @@ class Incident(SQLModel, table=True):
end_time: datetime | None
last_seen_time: datetime | None

# map of attributes to values
alerts: List["Alert"] = Relationship(
back_populates="incidents",
link_model=AlertToIncident,
# primaryjoin is used to filter out deleted links for various DB dialects
sa_relationship_kwargs={
"primaryjoin": f"""and_(AlertToIncident.incident_id == Incident.id,
or_(
AlertToIncident.deleted_at == '{NULL_FOR_DELETED_AT.strftime('%Y-%m-%d %H:%M:%S.%f')}',
AlertToIncident.deleted_at == '{NULL_FOR_DELETED_AT.strftime('%Y-%m-%d %H:%M:%S')}'
))""",
"uselist": True,
"overlaps": "alert,incident",
},
)
alert_to_incident_link: List[AlertToIncident] = Relationship(
back_populates="incident",
sa_relationship_kwargs={"overlaps": "alerts,incidents"},
)
# alert_to_incident_link: List[LastAlertToIncident] = Relationship(
# back_populates="incident",
# sa_relationship_kwargs={"overlaps": "alerts,incidents"},
# )

is_predicted: bool = Field(default=False)
is_confirmed: bool = Field(default=False)
Expand Down Expand Up @@ -183,10 +212,7 @@ class Incident(SQLModel, table=True):
),
)

def __init__(self, **kwargs):
super().__init__(**kwargs)
if "alerts" not in kwargs:
self.alerts = []
_alerts: List["Alert"] = PrivateAttr()

class Config:
arbitrary_types_allowed = True
Expand Down Expand Up @@ -224,24 +250,6 @@ class Alert(SQLModel, table=True):
}
)

incidents: List["Incident"] = Relationship(
back_populates="alerts",
link_model=AlertToIncident,
sa_relationship_kwargs={
# primaryjoin is used to filter out deleted links for various DB dialects
"primaryjoin": f"""and_(AlertToIncident.alert_id == Alert.id,
or_(
AlertToIncident.deleted_at == '{NULL_FOR_DELETED_AT.strftime('%Y-%m-%d %H:%M:%S.%f')}',
AlertToIncident.deleted_at == '{NULL_FOR_DELETED_AT.strftime('%Y-%m-%d %H:%M:%S')}'
))""",
"uselist": True,
"overlaps": "alert,incident",
},
)
alert_to_incident_link: List[AlertToIncident] = Relationship(
back_populates="alert", sa_relationship_kwargs={"overlaps": "alerts,incidents"}
)

__table_args__ = (
Index(
"ix_alert_tenant_fingerprint_timestamp",
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,180 @@
"""add lastalert and lastalerttoincident table
Revision ID: bdae8684d0b4
Revises: ef0b5b0df41c
Create Date: 2024-11-05 22:48:04.733192
"""
import warnings

import sqlalchemy as sa
import sqlalchemy_utils
import sqlmodel
from alembic import op
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.orm import Session
from sqlalchemy.sql import expression
from sqlalchemy import exc as sa_exc

# revision identifiers, used by Alembic.
revision = "bdae8684d0b4"
down_revision = "ef0b5b0df41c"
branch_labels = None
depends_on = None

migration_metadata = sa.MetaData()
#
# alert_to_incident_table = sa.Table(
# 'alerttoincident',
# migration_metadata,
# sa.Column("tenant_id", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
# sa.Column('alert_id', UUID(as_uuid=False), sa.ForeignKey('alert.id', ondelete='CASCADE'), primary_key=True),
# sa.Column('incident_id', UUID(as_uuid=False), sa.ForeignKey('incident.id', ondelete='CASCADE'), primary_key=True),
# sa.Column("timestamp", sa.DateTime(), nullable=False, server_default=sa.func.current_timestamp()),
# sa.Column("is_created_by_ai", sa.Boolean(), nullable=False, server_default=expression.false()),
# sa.Column("deleted_at", sa.DateTime(), nullable=False, server_default="1000-01-01 00:00:00"),
#
# )
#
# # The following code will shoow SA warning about dialect, so we suppress it.
# with warnings.catch_warnings():
# warnings.simplefilter("ignore", category=sa_exc.SAWarning)
# incident_table = sa.Table(
# 'incident',
# migration_metadata,
# sa.Column('id', UUID(as_uuid=False), primary_key=True),
# sa.Column('alerts_count', sa.Integer, default=0),
# sa.Column('affected_services', sa.JSON, default_factory=list),
# sa.Column('sources', sa.JSON, default_factory=list)
# )
#
# alert_table = sa.Table(
# 'alert',
# migration_metadata,
# sa.Column('id', UUID(as_uuid=False), primary_key=True),
# sa.Column('fingerprint', sa.String),
# sa.Column('provider_type', sa.String),
# sa.Column('event', sa.JSON)
# )

#
def populate_db():
session = Session(op.get_bind())

if session.bind.dialect.name == "postgresql":
migrate_lastalert_query = """
insert into lastalert (fingerprint, alert_id, timestamp)
select alert.fingerprint, alert.id as alert_id, alert.timestamp
from alert
join (
select
alert.fingerprint, max(alert.timestamp) as last_received
from alert
group by fingerprint
) as a ON alert.fingerprint = a.fingerprint and alert.timestamp = a.last_received
on conflict
do nothing
"""

migrate_lastalerttoincodent_query = """
insert into lastalerttoincident (incident_id, tenant_id, timestamp, fingerprint, is_created_by_ai, deleted_at)
select ati.incident_id, ati.tenant_id, ati.timestamp, lf.fingerprint, ati.is_created_by_ai, ati.deleted_at
from alerttoincident as ati
join
(
select alert.id, alert.fingerprint
from alert
join (
select
alert.fingerprint, max(alert.timestamp) as last_received
from alert
group by fingerprint
) as a on alert.fingerprint = a.fingerprint and alert.timestamp = a.last_received
) as lf on ati.alert_id = lf.id
on conflict
do nothing
"""

else:
migrate_lastalert_query = """
replace into lastalert (fingerprint, alert_id, timestamp)
select alert.fingerprint, alert.id as alert_id, alert.timestamp
from alert
join (
select
alert.fingerprint, max(alert.timestamp) as last_received
from alert
group by fingerprint
) as a ON alert.fingerprint = a.fingerprint and alert.timestamp = a.last_received;
"""

migrate_lastalerttoincodent_query = """
replace into lastalerttoincident (incident_id, tenant_id, timestamp, fingerprint, is_created_by_ai, deleted_at)
select ati.incident_id, ati.tenant_id, ati.timestamp, lf.fingerprint, ati.is_created_by_ai, ati.deleted_at
from alerttoincident as ati
join
(
select alert.id, alert.fingerprint
from alert
join (
select
alert.fingerprint, max(alert.timestamp) as last_received
from alert
group by fingerprint
) as a on alert.fingerprint = a.fingerprint and alert.timestamp = a.last_received
) as lf on ati.alert_id = lf.id
"""

session.execute(migrate_lastalert_query)
session.execute(migrate_lastalerttoincodent_query)


def upgrade() -> None:
op.create_table(
"lastalert",
sa.Column("fingerprint", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("alert_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("timestamp", sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(
["alert_id"],
["alert.id"],
),
sa.PrimaryKeyConstraint("fingerprint"),
)
with op.batch_alter_table("lastalert", schema=None) as batch_op:
batch_op.create_index(
batch_op.f("ix_lastalert_timestamp"), ["timestamp"], unique=False
)

op.create_table(
"lastalerttoincident",
sa.Column(
"incident_id",
sqlalchemy_utils.types.uuid.UUIDType(binary=False),
nullable=False,
),
sa.Column("tenant_id", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("timestamp", sa.DateTime(), nullable=False),
sa.Column("fingerprint", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("is_created_by_ai", sa.Boolean(), nullable=False),
sa.Column("deleted_at", sa.DateTime(), nullable=True),
sa.ForeignKeyConstraint(
["fingerprint"],
["lastalert.fingerprint"],
),
sa.ForeignKeyConstraint(["incident_id"], ["incident.id"], ondelete="CASCADE"),
sa.ForeignKeyConstraint(
["tenant_id"],
["tenant.id"],
),
sa.PrimaryKeyConstraint("incident_id", "fingerprint", "deleted_at"),
)

populate_db()

def downgrade() -> None:
op.drop_table("lastalerttoincident")
with op.batch_alter_table("lastalert", schema=None) as batch_op:
batch_op.drop_index(batch_op.f("ix_lastalert_timestamp"))

op.drop_table("lastalert")
2 changes: 1 addition & 1 deletion keep/api/routes/incidents.py
Original file line number Diff line number Diff line change
Expand Up @@ -614,7 +614,7 @@ def change_incident_status(
# TODO: same this change to audit table with the comment

if change.status == IncidentStatus.RESOLVED:
for alert in incident.alerts:
for alert in incident._alerts:
_enrich_alert(
EnrichAlertRequestBody(
enrichments={"status": "resolved"},
Expand Down
7 changes: 5 additions & 2 deletions keep/api/tasks/process_event_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
get_alerts_by_fingerprint,
get_all_presets,
get_enrichment_with_session,
get_session_sync,
get_session_sync, set_last_alert,
)
from keep.api.core.dependencies import get_pusher_client
from keep.api.core.elastic import ElasticClient
Expand Down Expand Up @@ -184,6 +184,9 @@ def __save_to_db(
)
session.add(audit)
alert_dto = AlertDto(**formatted_event.dict())

set_last_alert(tenant_id, alert, session=session)

# Mapping
try:
enrichments_bl.run_mapping_rules(alert_dto)
Expand Down Expand Up @@ -400,7 +403,7 @@ def __handle_formatted_events(
# logger.info("Adding group alerts to the workflow manager queue")
# workflow_manager.insert_events(tenant_id, grouped_alerts)
# logger.info("Added group alerts to the workflow manager queue")
except Exception:
except Exception as ex:
logger.exception(
"Failed to run rules engine",
extra={
Expand Down
Loading

0 comments on commit c22d6ac

Please sign in to comment.