Spaces:
Paused
Paused
Mirrowel commited on
Commit ·
1eca168
1
Parent(s): 10dabd5
ci: pin release to commit SHA and refine client stream logging
Browse files- Pin GitHub release creation to the exact commit SHA that triggered the workflow. This ensures releases are accurately linked to the build source, preventing potential discrepancies from rapid pushes.
- Elevate specific log messages related to JSON stream processing from debug to info level in the client library. Messages regarding incomplete buffers and successful JSON reassembly now provide better default visibility into streaming behavior, enhancing observability.
.github/workflows/build.yml
CHANGED
|
@@ -334,6 +334,7 @@ jobs:
|
|
| 334 |
|
| 335 |
# Create the release using the notes file
|
| 336 |
gh release create ${{ steps.version.outputs.release_tag }} \
|
|
|
|
| 337 |
--title "${{ steps.version.outputs.release_title }}" \
|
| 338 |
--notes-file releasenotes.md \
|
| 339 |
--latest \
|
|
|
|
| 334 |
|
| 335 |
# Create the release using the notes file
|
| 336 |
gh release create ${{ steps.version.outputs.release_tag }} \
|
| 337 |
+
--target ${{ github.sha }} \
|
| 338 |
--title "${{ steps.version.outputs.release_title }}" \
|
| 339 |
--notes-file releasenotes.md \
|
| 340 |
--latest \
|
src/rotator_library/client.py
CHANGED
|
@@ -204,7 +204,7 @@ class RotatingClient:
|
|
| 204 |
except StopAsyncIteration:
|
| 205 |
stream_completed = True
|
| 206 |
if json_buffer:
|
| 207 |
-
lib_logger.
|
| 208 |
break
|
| 209 |
|
| 210 |
except (litellm.RateLimitError, litellm.ServiceUnavailableError, litellm.InternalServerError, APIConnectionError) as e:
|
|
@@ -239,14 +239,14 @@ class RotatingClient:
|
|
| 239 |
parsed_data = json.loads(json_buffer)
|
| 240 |
|
| 241 |
# If parsing succeeds, we have the complete object.
|
| 242 |
-
lib_logger.
|
| 243 |
|
| 244 |
# Wrap the complete error object and raise it. The outer function will decide how to handle it.
|
| 245 |
raise StreamedAPIError("Provider error received in stream", data=parsed_data)
|
| 246 |
|
| 247 |
except json.JSONDecodeError:
|
| 248 |
# This is the expected outcome if the JSON in the buffer is not yet complete.
|
| 249 |
-
lib_logger.
|
| 250 |
continue # Continue to the next loop to get the next chunk.
|
| 251 |
except StreamedAPIError:
|
| 252 |
# Re-raise to be caught by the outer retry handler.
|
|
|
|
| 204 |
except StopAsyncIteration:
|
| 205 |
stream_completed = True
|
| 206 |
if json_buffer:
|
| 207 |
+
lib_logger.info(f"Stream ended with incomplete data in buffer: {json_buffer}")
|
| 208 |
break
|
| 209 |
|
| 210 |
except (litellm.RateLimitError, litellm.ServiceUnavailableError, litellm.InternalServerError, APIConnectionError) as e:
|
|
|
|
| 239 |
parsed_data = json.loads(json_buffer)
|
| 240 |
|
| 241 |
# If parsing succeeds, we have the complete object.
|
| 242 |
+
lib_logger.info(f"Successfully reassembled JSON from stream: {json_buffer}")
|
| 243 |
|
| 244 |
# Wrap the complete error object and raise it. The outer function will decide how to handle it.
|
| 245 |
raise StreamedAPIError("Provider error received in stream", data=parsed_data)
|
| 246 |
|
| 247 |
except json.JSONDecodeError:
|
| 248 |
# This is the expected outcome if the JSON in the buffer is not yet complete.
|
| 249 |
+
lib_logger.info(f"Buffer still incomplete. Waiting for more chunks: {json_buffer}")
|
| 250 |
continue # Continue to the next loop to get the next chunk.
|
| 251 |
except StreamedAPIError:
|
| 252 |
# Re-raise to be caught by the outer retry handler.
|