Skip to content

Commit c981d91

Browse files
fix(llma): address review feedback for $ai_stop_reason
- Fix ruff formatting in utils.py - Deduplicate extract_gemini_stop_reason_from_chunk to delegate to extract_gemini_stop_reason - Parameterize stop_reason_captured and stop_reason_max_tokens tests
1 parent 0a56e8d commit c981d91

3 files changed

Lines changed: 18 additions & 50 deletions

File tree

posthog/ai/gemini/gemini_converter.py

Lines changed: 1 addition & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -302,14 +302,7 @@ def extract_gemini_stop_reason(response: Any) -> Optional[str]:
302302

303303
def extract_gemini_stop_reason_from_chunk(chunk: Any) -> Optional[str]:
304304
"""Extract stop reason from a Gemini streaming chunk."""
305-
if chunk and hasattr(chunk, "candidates") and chunk.candidates:
306-
candidate = chunk.candidates[0]
307-
finish_reason = getattr(candidate, "finish_reason", None)
308-
if finish_reason is not None:
309-
if hasattr(finish_reason, "name"):
310-
return finish_reason.name
311-
return str(finish_reason)
312-
return None
305+
return extract_gemini_stop_reason(chunk)
313306

314307

315308
def extract_gemini_system_instruction(config: Any) -> Optional[str]:

posthog/ai/utils.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -240,7 +240,9 @@ def extract_stop_reason(response: Any, provider: str) -> Optional[str]:
240240

241241
return extract_openai_stop_reason(response)
242242
elif provider == "anthropic":
243-
from posthog.ai.anthropic.anthropic_converter import extract_anthropic_stop_reason
243+
from posthog.ai.anthropic.anthropic_converter import (
244+
extract_anthropic_stop_reason,
245+
)
244246

245247
return extract_anthropic_stop_reason(response)
246248
elif provider == "gemini":

posthog/test/ai/gemini/test_gemini.py

Lines changed: 14 additions & 41 deletions
Original file line numberDiff line numberDiff line change
@@ -1168,45 +1168,18 @@ def test_empty_array_grounding_metadata_no_web_search(
11681168
assert props["$ai_output_tokens"] == 12
11691169

11701170

1171-
def test_stop_reason_captured(mock_client, mock_google_genai_client):
1172-
mock_response = MagicMock()
1173-
mock_response.text = "Test"
1174-
1175-
mock_usage = MagicMock()
1176-
mock_usage.prompt_token_count = 10
1177-
mock_usage.candidates_token_count = 5
1178-
mock_usage.cached_content_token_count = 0
1179-
mock_usage.thoughts_token_count = 0
1180-
mock_usage.model_dump.return_value = {
1181-
"prompt_token_count": 10,
1182-
"candidates_token_count": 5,
1183-
}
1184-
mock_response.usage_metadata = mock_usage
1185-
1186-
mock_candidate = MagicMock()
1187-
mock_candidate.finish_reason = MagicMock()
1188-
mock_candidate.finish_reason.name = "STOP"
1189-
mock_candidate.content = MagicMock()
1190-
mock_candidate.content.parts = [MagicMock(text="Test")]
1191-
mock_response.candidates = [mock_candidate]
1192-
1193-
mock_google_genai_client.models.generate_content.return_value = mock_response
1194-
1195-
client = Client(api_key="test-key", posthog_client=mock_client)
1196-
client.models.generate_content(
1197-
model="gemini-2.0-flash",
1198-
contents=["Hello"],
1199-
posthog_distinct_id="test-id",
1200-
)
1201-
1202-
assert mock_client.capture.call_count == 1
1203-
props = mock_client.capture.call_args[1]["properties"]
1204-
assert props["$ai_stop_reason"] == "STOP"
1205-
1206-
1207-
def test_stop_reason_max_tokens(mock_client, mock_google_genai_client):
1171+
@pytest.mark.parametrize(
1172+
"finish_reason_name,response_text",
1173+
[
1174+
("STOP", "Test"),
1175+
("MAX_TOKENS", "Truncated"),
1176+
],
1177+
)
1178+
def test_stop_reason_captured(
1179+
mock_client, mock_google_genai_client, finish_reason_name, response_text
1180+
):
12081181
mock_response = MagicMock()
1209-
mock_response.text = "Truncated"
1182+
mock_response.text = response_text
12101183

12111184
mock_usage = MagicMock()
12121185
mock_usage.prompt_token_count = 10
@@ -1221,9 +1194,9 @@ def test_stop_reason_max_tokens(mock_client, mock_google_genai_client):
12211194

12221195
mock_candidate = MagicMock()
12231196
mock_candidate.finish_reason = MagicMock()
1224-
mock_candidate.finish_reason.name = "MAX_TOKENS"
1197+
mock_candidate.finish_reason.name = finish_reason_name
12251198
mock_candidate.content = MagicMock()
1226-
mock_candidate.content.parts = [MagicMock(text="Truncated")]
1199+
mock_candidate.content.parts = [MagicMock(text=response_text)]
12271200
mock_response.candidates = [mock_candidate]
12281201

12291202
mock_google_genai_client.models.generate_content.return_value = mock_response
@@ -1236,7 +1209,7 @@ def test_stop_reason_max_tokens(mock_client, mock_google_genai_client):
12361209
)
12371210

12381211
props = mock_client.capture.call_args[1]["properties"]
1239-
assert props["$ai_stop_reason"] == "MAX_TOKENS"
1212+
assert props["$ai_stop_reason"] == finish_reason_name
12401213

12411214

12421215
def test_stop_reason_streaming(mock_client, mock_google_genai_client):

0 commit comments

Comments
 (0)