File size: 2,946 Bytes
fd2ce9d
 
 
 
 
 
9e1c5c2
 
fd2ce9d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9e1c5c2
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
"""
Optimized JSON utilities using orjson for performance-critical operations.
Falls back to standard json if orjson is not available.
"""
import logging
from typing import Any, Union
from geoalchemy2.shape import to_shape


logger = logging.getLogger(__name__)

try:
    import orjson
    HAS_ORJSON = True
    logger.info("Using orjson for optimized JSON operations")
except ImportError:
    import json
    HAS_ORJSON = False
    logger.info("orjson not available, falling back to standard json")


def fast_dumps(obj: Any) -> str:
    """
    Fast JSON serialization using orjson if available, otherwise standard json.
    
    Args:
        obj: Object to serialize
        
    Returns:
        str: JSON string
    """
    if HAS_ORJSON:
        # orjson returns bytes, so we decode to string
        return orjson.dumps(obj).decode('utf-8')
    else:
        return json.dumps(obj, default=_json_serializer)


def fast_loads(json_str: Union[str, bytes]) -> Any:
    """
    Fast JSON deserialization using orjson if available, otherwise standard json.
    
    Args:
        json_str: JSON string or bytes to deserialize
        
    Returns:
        Any: Deserialized object
    """
    if HAS_ORJSON:
        if isinstance(json_str, str):
            json_str = json_str.encode('utf-8')
        return orjson.loads(json_str)
    else:
        if isinstance(json_str, bytes):
            json_str = json_str.decode('utf-8')
        return json.loads(json_str)


def fast_dumps_bytes(obj: Any) -> bytes:
    """
    Fast JSON serialization returning bytes using orjson if available.
    
    Args:
        obj: Object to serialize
        
    Returns:
        bytes: JSON bytes
    """
    if HAS_ORJSON:
        return orjson.dumps(obj)
    else:
        return json.dumps(obj, default=_json_serializer).encode('utf-8')


def _json_serializer(obj):
    """Fallback serializer for standard json module."""
    from enum import Enum
    from datetime import datetime, date
    from pydantic import BaseModel

    if isinstance(obj, (datetime, date)):
        return obj.isoformat()
    elif isinstance(obj, Enum):
        return obj.value
    elif isinstance(obj, BaseModel):
        return obj.dict()
    raise TypeError(f"Type {type(obj)} not serializable")


# Async wrapper functions for use in async contexts
async def async_fast_dumps(obj: Any) -> str:
    """Async wrapper for fast_dumps."""
    return fast_dumps(obj)


async def async_fast_loads(json_str: Union[str, bytes]) -> Any:
    """Async wrapper for fast_loads."""
    return fast_loads(json_str)


async def async_fast_dumps_bytes(obj: Any) -> bytes:
    """Async wrapper for fast_dumps_bytes."""
    return fast_dumps_bytes(obj)



def format_geo_location(wkb_element):
    if not wkb_element:
        return None

    point = to_shape(wkb_element)  # Convert WKB → Shapely object

    return {
        "type": "Point",
        "coordinates": [point.x, point.y]
    }