Skip to content
Open
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ dependencies = [
"sentry-arroyo>=2.38.7",
"sentry-conventions>=0.3.0",
"sentry-kafka-schemas>=2.1.24",
"sentry-protos>=0.7.0",
"sentry-protos>=0.8.9",
"sentry-redis-tools>=0.5.1",
"sentry-relay>=0.9.25",
"sentry-sdk>=2.35.0",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -145,16 +145,28 @@ def transform_expressions(expression: Expression) -> Expression:
return expression
context = mapped_column_to_context.get(str(expression.key.value))
if context:
source_type = (
context.from_column_type
or NORMALIZED_COLUMNS_EAP_ITEMS.get(
context.from_column_name, [AttributeKey.TYPE_STRING]
)[0]
)
assert source_type in (
AttributeKey.Type.TYPE_STRING,
AttributeKey.Type.TYPE_INT,
), "VCC can only map string or int attributes"
attribute_expression = attribute_key_to_expression(
AttributeKey(
name=context.from_column_name,
type=NORMALIZED_COLUMNS_EAP_ITEMS.get(
context.from_column_name, [AttributeKey.TYPE_STRING]
)[0],
type=source_type,
)
)
return f.transform(
f.CAST(f.ifNull(attribute_expression, literal("")), "String"),
if_cond(
f.isNull(attribute_expression),
literal(""),
f.toString(attribute_expression),
),
literals_array(None, [literal(k) for k in context.value_map.keys()]),
literals_array(None, [literal(v) for v in context.value_map.values()]),
literal(context.default_value if context.default_value != "" else "unknown"),
Expand Down Expand Up @@ -608,7 +620,10 @@ def _get_page_token(
# the routing strategy will properly truncate the time window of the next request
return FlexibleTimeWindowPageWithFilters.create(
request,
TimeWindow(original_time_window.start_timestamp, time_window.start_timestamp),
TimeWindow(
original_time_window.start_timestamp,
time_window.start_timestamp,
),
response,
).page_token
else:
Expand Down Expand Up @@ -677,10 +692,15 @@ def resolve(
except Exception as e:
sentry_sdk.capture_message(f"Error merging clickhouse settings: {e}")
original_time_window = TimeWindow(
start_timestamp=in_msg.meta.start_timestamp, end_timestamp=in_msg.meta.end_timestamp
start_timestamp=in_msg.meta.start_timestamp,
end_timestamp=in_msg.meta.end_timestamp,
)
snuba_request = _build_snuba_request(
in_msg, query_settings, routing_decision.time_window, routing_decision.tier, self._timer
in_msg,
query_settings,
routing_decision.time_window,
routing_decision.tier,
self._timer,
)
res = run_query(
dataset=PluggableDataset(name="eap", all_entities=[]),
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import random
import re
from datetime import datetime, timedelta
from datetime import datetime, timedelta, timezone
from math import isclose
from typing import Any
from unittest.mock import MagicMock, call, patch
Expand Down Expand Up @@ -3471,6 +3471,105 @@ def test_multiply_attribute_aggregation(self) -> None:
assert len(res.results) == 1
assert isclose(res.results[0].val_double, expected_avg)

def test_occurrence_virtual_column_mapping(self) -> None:
"""
Reproduces the request shape sent by api.organization-events for the issues
view: OCCURRENCE items with virtual column contexts that remap group_id ->
issue and sentry.project_id -> project / project.name.
"""
org_id = 1
project_id = 1
item_ts = datetime.fromtimestamp(1773929000, tz=timezone.utc)

items_storage = get_storage(StorageKey("eap_items"))
write_raw_unprocessed_events(
items_storage, # type: ignore
[
gen_item_message(
start_timestamp=item_ts,
type=TraceItemType.TRACE_ITEM_TYPE_OCCURRENCE,
attributes={"group_id": AnyValue(int_value=1)},
project_id=project_id,
organization_id=org_id,
remove_default_attributes=True,
)
],
)

message = TraceItemTableRequest(
meta=RequestMeta(
organization_id=org_id,
referrer="api.organization-events",
project_ids=[project_id],
start_timestamp=Timestamp(seconds=1773925780),
end_timestamp=Timestamp(seconds=1773933040),
trace_item_type=TraceItemType.TRACE_ITEM_TYPE_OCCURRENCE,
downsampled_storage_config=DownsampledStorageConfig(
mode=DownsampledStorageConfig.MODE_NORMAL,
),
),
columns=[
Column(
key=AttributeKey(type=AttributeKey.TYPE_STRING, name="issue"),
label="issue",
),
Column(
key=AttributeKey(type=AttributeKey.TYPE_INT, name="group_id"),
label="group_id",
),
Column(
key=AttributeKey(type=AttributeKey.TYPE_STRING, name="project"),
label="project",
),
Column(
key=AttributeKey(type=AttributeKey.TYPE_STRING, name="sentry.item_id"),
label="id",
),
Column(
key=AttributeKey(type=AttributeKey.TYPE_STRING, name="project.name"),
label="project.name",
),
],
limit=101,
page_token=PageToken(offset=0),
virtual_column_contexts=[
VirtualColumnContext(
from_column_name="group_id",
to_column_name="issue",
value_map={"1": "BAR-1"},
from_column_type=AttributeKey.TYPE_INT,
),
VirtualColumnContext(
from_column_name="sentry.project_id",
to_column_name="project",
value_map={str(project_id): "bar"},
),
VirtualColumnContext(
from_column_name="sentry.project_id",
to_column_name="project.name",
value_map={str(project_id): "bar"},
),
],
)

response = EndpointTraceItemTable().execute(message)

assert [c.attribute_name for c in response.column_values] == [
"issue",
"group_id",
"project",
"id",
"project.name",
]

col = {c.attribute_name: c for c in response.column_values}
assert col["issue"].results == [AttributeValue(val_str="BAR-1")]
assert col["group_id"].results == [AttributeValue(val_int=1)]
assert col["project"].results == [AttributeValue(val_str="bar")]
assert col["project.name"].results == [AttributeValue(val_str="bar")]
assert len(col["id"].results) == 1
assert col["id"].results[0].val_str != ""


def _str_array(*values: str) -> AnyValue:
return AnyValue(array_value=ArrayValue(values=[AnyValue(string_value=v) for v in values]))
Expand Down
8 changes: 4 additions & 4 deletions uv.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading