Skip to content

Commit

Permalink
interesting protobuf
Browse files Browse the repository at this point in the history
  • Loading branch information
Rachel Chen authored and Rachel Chen committed Dec 11, 2024
1 parent 81d150b commit 4cd3417
Show file tree
Hide file tree
Showing 3 changed files with 81 additions and 30 deletions.
2 changes: 0 additions & 2 deletions snuba/web/db_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -176,8 +176,6 @@ def execute_query(
# Apply clickhouse query setting overrides
clickhouse_query_settings.update(query_settings.get_clickhouse_settings())

print("lookehre", formatted_query)

result = reader.execute(
formatted_query,
clickhouse_query_settings,
Expand Down
1 change: 0 additions & 1 deletion snuba/web/rpc/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,6 @@ def run_rpc_handler(

try:
deserialized_protobuf = endpoint.parse_from_string(data)
print("requesttttt", deserialized_protobuf)
except DecodeError as e:
return convert_rpc_exception_to_proto(
RPCRequestException(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -947,7 +947,7 @@ def test_same_column_name(self) -> None:
assert result.column_values[3].attribute_name == "tags[foo]"
assert result.column_values[3].results[0].val_str == "five"

def test_rounding(self) -> None:
def test_floats_calculations(self) -> None:
spans_storage = get_storage(StorageKey("eap_spans"))
start = BASE_TIME
messages = [
Expand Down Expand Up @@ -1024,15 +1024,6 @@ def test_rounding(self) -> None:
},
"label": "avg_sample(sampling_rate)",
},
{
"aggregation": {
"aggregate": "FUNCTION_COUNT",
"key": {"type": "TYPE_FLOAT", "name": "sentry.duration_ms"},
"label": "count()",
"extrapolationMode": "EXTRAPOLATION_MODE_SAMPLE_WEIGHTED",
},
"label": "count()",
},
{
"aggregation": {
"aggregate": "FUNCTION_MIN",
Expand All @@ -1042,33 +1033,96 @@ def test_rounding(self) -> None:
},
"label": "min(sampling_rate)",
},
{
"aggregation": {
"aggregate": "FUNCTION_COUNT",
"key": {"type": "TYPE_FLOAT", "name": "sentry.duration_ms"},
"label": "count_sample()",
"extrapolationMode": "EXTRAPOLATION_MODE_NONE",
},
"label": "count_sample()",
},
],
"limit": 101,
}

err_msg = ParseDict(err_req, TraceItemTableRequest())
result = EndpointTraceItemTable().execute(err_msg)

assert result.column_values[0].attribute_name == "avg_sample(sampling_rate)"
assert result.column_values[0].results[0].val_float == 0.475
# this passes
assert result.column_values == [
TraceItemColumnValues(
attribute_name="avg_sample(sampling_rate)",
results=[
AttributeValue(val_float=0.475),
],
),
TraceItemColumnValues(
attribute_name="min(sampling_rate)",
results=[
AttributeValue(val_float=0.1),
],
),
]

# these error
# assert result.column_values[0].attribute_name == "avg_sample(sampling_rate)"
# assert result.column_values[0].results[0].val_float == 0.475
#
# assert result.column_values[1].attribute_name == "min(sampling_rate)"
# assert result.column_values[1].results[0].val_float == 0.1

def test_single_float_retrieval(self) -> None:
spans_storage = get_storage(StorageKey("eap_spans"))
start = BASE_TIME
messages = [
{
"is_segment": False,
"retention_days": 90,
"tags": {},
"sentry_tags": {"status": "success"},
"measurements": {"client_sample_rate": {"value": 0.123456}},
"event_id": "df1626f2c20249368d32cbc7bedc58b6",
"organization_id": 1,
"project_id": 1,
"trace_id": "724cb5bc3e9843e39dba73c7ec2909ab",
"span_id": "3a3ff57148b14923",
"parent_span_id": "87f08db2b78848c7",
"segment_id": "b6684d253c934ea3",
"group_raw": "30cff40b57554d8a",
"profile_id": "1f2e11173706458f9010599631234fc4",
"start_timestamp_ms": int(start.timestamp()) * 1000
- int(random.gauss(1000, 200)),
"start_timestamp_precise": start.timestamp(),
"end_timestamp_precise": start.timestamp() + 1,
"received": 1721319572.877828,
"duration_ms": 152,
"exclusive_time_ms": 0.228,
"description": "foo",
"ingest_in_eap": True,
},
]
write_raw_unprocessed_events(spans_storage, messages) # type: ignore

assert result.column_values[2].attribute_name == "min(sampling_rate)"
assert result.column_values[2].results[0].val_float == 0.1
ts = Timestamp(seconds=int(BASE_TIME.timestamp()))
hour_ago = Timestamp(seconds=int((BASE_TIME - timedelta(hours=1)).timestamp()))
err_req = {
"meta": {
"organizationId": 1,
"referrer": "api.organization-events",
"projectIds": [1],
"startTimestamp": hour_ago.ToJsonString(),
"endTimestamp": ts.ToJsonString(),
},
"columns": [
{
"key": {"type": "TYPE_FLOAT", "name": "sentry.sampling_factor"},
},
],
}

assert result.column_values[3].attribute_name == "count_sample()"
assert result.column_values[3].results[0].val_int == 2
err_msg = ParseDict(err_req, TraceItemTableRequest())
result = EndpointTraceItemTable().execute(err_msg)

assert result.column_values[1].attribute_name == "count()"
assert result.column_values[1].results[0].val_int == 11
assert result.column_values == [
TraceItemColumnValues(
attribute_name="sentry.sampling_factor",
results=[
AttributeValue(val_float=0.123456),
],
),
]


class TestUtils:
Expand Down

0 comments on commit 4cd3417

Please sign in to comment.