Skip to content

Commit

Permalink
small refactor
Browse files Browse the repository at this point in the history
  • Loading branch information
cathteng committed Jan 6, 2025
1 parent d4c7826 commit 21456b7
Show file tree
Hide file tree
Showing 2 changed files with 29 additions and 48 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
from sentry.issues.constants import get_issue_tsdb_group_model
from sentry.issues.grouptype import GroupCategory, get_group_type_by_type_id
from sentry.models.group import Group
from sentry.rules.conditions.event_frequency import SNUBA_LIMIT, STANDARD_INTERVALS
from sentry.tsdb.base import TSDBModel
from sentry.utils import json
from sentry.utils.iterators import chunked
Expand All @@ -19,25 +20,6 @@
from sentry.workflow_engine.registry import condition_handler_registry
from sentry.workflow_engine.types import DataConditionHandler, DataConditionResult

SNUBA_LIMIT = 10000
STANDARD_INTERVALS: dict[str, tuple[str, timedelta]] = {
"1m": ("one minute", timedelta(minutes=1)),
"5m": ("5 minutes", timedelta(minutes=5)),
"15m": ("15 minutes", timedelta(minutes=15)),
"1h": ("one hour", timedelta(hours=1)),
"1d": ("one day", timedelta(hours=24)),
"1w": ("one week", timedelta(days=7)),
"30d": ("30 days", timedelta(days=30)),
}
COMPARISON_INTERVALS: dict[str, tuple[str, timedelta]] = {
"5m": ("5 minutes", timedelta(minutes=5)),
"15m": ("15 minutes", timedelta(minutes=15)),
"1h": ("one hour", timedelta(hours=1)),
"1d": ("one day", timedelta(hours=24)),
"1w": ("one week", timedelta(days=7)),
"30d": ("30 days", timedelta(days=30)),
}


class _QSTypedDict(TypedDict):
id: int
Expand Down Expand Up @@ -215,31 +197,30 @@ def batch_query(
error_issue_ids, generic_issue_ids = self.get_error_and_generic_group_ids(groups)
organization_id = self.get_value_from_groups(groups, "project__organization_id")

if error_issue_ids and organization_id:
error_sums = self.get_chunked_result(
if not organization_id:
return batch_sums

def get_result(model, group_ids):
return self.get_chunked_result(
tsdb_function=tsdb.backend.get_sums,
model=get_issue_tsdb_group_model(GroupCategory.ERROR),
group_ids=error_issue_ids,
model=model,
group_ids=group_ids,
organization_id=organization_id,
start=start,
end=end,
environment_id=environment_id,
referrer_suffix="batch_alert_event_frequency",
)
batch_sums.update(error_sums)

if generic_issue_ids and organization_id:
generic_sums = self.get_chunked_result(
tsdb_function=tsdb.backend.get_sums,
# this isn't necessarily performance, just any non-error category
model=get_issue_tsdb_group_model(GroupCategory.PERFORMANCE),
group_ids=generic_issue_ids,
organization_id=organization_id,
start=start,
end=end,
environment_id=environment_id,
referrer_suffix="batch_alert_event_frequency",
if error_issue_ids:
batch_sums.update(
get_result(get_issue_tsdb_group_model(GroupCategory.ERROR), error_issue_ids)
)

if generic_issue_ids:
# this isn't necessarily performance, just any non-error category
batch_sums.update(
get_result(get_issue_tsdb_group_model(GroupCategory.PERFORMANCE), generic_issue_ids)
)
batch_sums.update(generic_sums)

return batch_sums
24 changes: 12 additions & 12 deletions src/sentry/workflow_engine/models/data_condition.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,18 +74,6 @@ class DataCondition(DefaultFieldsModel):
on_delete=models.CASCADE,
)

@property
def slow_conditions(self) -> list[Condition]:
return [
Condition.EVENT_FREQUENCY,
Condition.EVENT_UNIQUE_USER_FREQUENCY,
Condition.EVENT_FREQUENCY_PERCENT,
Condition.EVENT_UNIQUE_USER_FREQUENCY_WITH_CONDITIONS,
]

def is_slow_condition(self):
return Condition(self.type) in self.slow_conditions

def get_condition_result(self) -> DataConditionResult:
match self.condition_result:
case float() | bool():
Expand Down Expand Up @@ -131,3 +119,15 @@ def evaluate_value(self, value: T) -> DataConditionResult:

result = handler.evaluate_value(value, self.comparison)
return self.get_condition_result() if result else None


SLOW_CONDITIONS = [
Condition.EVENT_FREQUENCY,
Condition.EVENT_UNIQUE_USER_FREQUENCY,
Condition.EVENT_FREQUENCY_PERCENT,
Condition.EVENT_UNIQUE_USER_FREQUENCY_WITH_CONDITIONS,
]


def is_slow_condition(cond: DataCondition) -> bool:
return Condition(cond.type) in SLOW_CONDITIONS

0 comments on commit 21456b7

Please sign in to comment.