From 1f01ca5e39dc2b232d56ab8f04fa8886bea15720 Mon Sep 17 00:00:00 2001 From: Vinicius Mello Date: Tue, 9 Sep 2025 00:13:45 -0300 Subject: [PATCH 1/2] feat(tracing): enable custom inferenceId for trace decorator - Add dedicated inference_id field to Trace class instead of storing in metadata - Allow users to set custom inference ID via update_current_trace(inferenceId=...) - Maintain backward compatibility with auto-generated UUID fallback - Simplify post_process_trace logic by removing special metadata filtering - Update examples to demonstrate custom inference ID usage patterns BREAKING CHANGE: None - fully backward compatible Closes: #OPEN-7312 --- examples/tracing/trace_metadata_updates.py | 26 +++++++++++++--------- src/openlayer/lib/tracing/tracer.py | 3 ++- src/openlayer/lib/tracing/traces.py | 6 +++++ 3 files changed, 24 insertions(+), 11 deletions(-) diff --git a/examples/tracing/trace_metadata_updates.py b/examples/tracing/trace_metadata_updates.py index 67660d89..2f37e9a4 100644 --- a/examples/tracing/trace_metadata_updates.py +++ b/examples/tracing/trace_metadata_updates.py @@ -41,8 +41,11 @@ def handle_user_request(self, request_text: str, session_token: str) -> str: # Get user session (this info isn't available as function arguments) user_session = self.get_user_session(session_token) - # Set trace-level metadata with user context + # Set trace-level metadata with user context and custom inference ID + custom_inference_id = f"chat_{user_session.user_id}_{user_session.interaction_count}_{int(datetime.now().timestamp())}" + update_current_trace( + inferenceId=custom_inference_id, name=f"chat_request_{user_session.user_id}", user_id=user_session.user_id, tags=["chat", "user_request", user_session.preferences.get("tier", "free")], @@ -174,12 +177,22 @@ def make_formal(self, text: str) -> str: def batch_processing_example(): """Example showing batch processing with trace metadata updates.""" - # Set trace metadata for batch job + # Process multiple requests + test_requests = [ + ("Hello there!", "premium_session_123"), + ("What's the weather like?", "free_session_456"), + ("Help me with coding", "premium_session_789") + ] + + # Set trace metadata for batch job with custom batch ID + batch_inference_id = f"batch_{datetime.now().strftime('%Y%m%d_%H%M%S')}_{len(test_requests)}_items" + update_current_trace( + inferenceId=batch_inference_id, name="batch_user_requests", tags=["batch", "processing", "multiple_users"], metadata={ - "batch_size": 3, + "batch_size": len(test_requests), "processing_start": datetime.now().isoformat(), } ) @@ -187,13 +200,6 @@ def batch_processing_example(): app = ChatApplication() results = [] - # Process multiple requests - test_requests = [ - ("Hello there!", "premium_session_123"), - ("What's the weather like?", "free_session_456"), - ("Help me with coding", "premium_session_789") - ] - for i, (request, session) in enumerate(test_requests): result = app.handle_user_request(request, session) results.append(result) diff --git a/src/openlayer/lib/tracing/tracer.py b/src/openlayer/lib/tracing/tracer.py index a616eb2b..5a15a243 100644 --- a/src/openlayer/lib/tracing/tracer.py +++ b/src/openlayer/lib/tracing/tracer.py @@ -779,6 +779,7 @@ def update_current_trace(**kwargs) -> None: >>> def my_function(): >>> # Update trace with user context >>> update_current_trace( + >>> inferenceId="custom_inference_id", >>> user_id="user123", >>> session_id="sess456", >>> custom_field="any_value" @@ -1170,7 +1171,7 @@ def post_process_trace( trace_data = { "inferenceTimestamp": root_step.start_time, - "inferenceId": str(root_step.id), + "inferenceId": trace_obj.inference_id or str(root_step.id), "output": root_step.output, "latency": root_step.latency, "cost": processed_steps[0].get("cost", 0), diff --git a/src/openlayer/lib/tracing/traces.py b/src/openlayer/lib/tracing/traces.py index 2f483fc5..bb5b8e5c 100644 --- a/src/openlayer/lib/tracing/traces.py +++ b/src/openlayer/lib/tracing/traces.py @@ -16,6 +16,7 @@ def __init__(self): self.steps = [] self.current_step = None self.metadata: Optional[Dict[str, Any]] = None + self.inference_id: Optional[str] = None def add_step(self, step: Step) -> None: """Adds a step to the trace.""" @@ -26,11 +27,16 @@ def update_metadata(self, **kwargs) -> None: All provided key-value pairs will be stored in self.metadata. Special handling for 'metadata' key which gets merged with existing metadata. + Special handling for 'inferenceId' which gets stored in dedicated field. """ # Initialize metadata if it doesn't exist if self.metadata is None: self.metadata = {} + # Handle special case for inferenceId - store in dedicated field + if 'inferenceId' in kwargs: + self.inference_id = kwargs.pop('inferenceId') + # Handle special case for 'metadata' key - merge with existing if 'metadata' in kwargs: metadata_to_merge = kwargs.pop('metadata') From ad4984b3dd6786c03bf9a58bee80f231773f40fe Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 9 Sep 2025 04:23:22 +0000 Subject: [PATCH 2/2] release: 0.2.0-alpha.91 --- .release-please-manifest.json | 2 +- CHANGELOG.md | 12 ++++++++++++ pyproject.toml | 2 +- src/openlayer/_version.py | 2 +- 4 files changed, 15 insertions(+), 3 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index a29a45e2..9fbe9bab 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.2.0-alpha.90" + ".": "0.2.0-alpha.91" } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 9ed70526..16d1376a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,18 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). +## 0.2.0-alpha.91 (2025-09-09) + +Full Changelog: [v0.2.0-alpha.90...v0.2.0-alpha.91](https://github.com/openlayer-ai/openlayer-python/compare/v0.2.0-alpha.90...v0.2.0-alpha.91) + +### ⚠ BREAKING CHANGES + +* **tracing:** None - fully backward compatible + +### Features + +* **tracing:** enable custom inferenceId for trace decorator ([1f01ca5](https://github.com/openlayer-ai/openlayer-python/commit/1f01ca5e39dc2b232d56ab8f04fa8886bea15720)) + ## 0.2.0-alpha.90 (2025-09-09) Full Changelog: [v0.2.0-alpha.89...v0.2.0-alpha.90](https://github.com/openlayer-ai/openlayer-python/compare/v0.2.0-alpha.89...v0.2.0-alpha.90) diff --git a/pyproject.toml b/pyproject.toml index d4bd55f1..173831a8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "openlayer" -version = "0.2.0-alpha.90" +version = "0.2.0-alpha.91" description = "The official Python library for the openlayer API" dynamic = ["readme"] license = "Apache-2.0" diff --git a/src/openlayer/_version.py b/src/openlayer/_version.py index e47d2e49..a7acd4a6 100644 --- a/src/openlayer/_version.py +++ b/src/openlayer/_version.py @@ -1,4 +1,4 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. __title__ = "openlayer" -__version__ = "0.2.0-alpha.90" # x-release-please-version +__version__ = "0.2.0-alpha.91" # x-release-please-version