File size: 3,252 Bytes
2d93720
 
 
 
bef12d1
2d93720
 
 
 
 
 
 
bef12d1
 
 
 
 
 
 
 
 
 
 
 
2d93720
 
 
 
 
 
 
 
 
 
 
 
 
 
 
326f1b4
2d93720
 
 
 
 
 
326f1b4
2d93720
14f06cc
2d93720
 
 
bef12d1
 
 
2d93720
 
4de386a
2d93720
326f1b4
14f06cc
326f1b4
bef12d1
 
 
 
 
 
 
14f06cc
bef12d1
 
14f06cc
bef12d1
14f06cc
 
 
bef12d1
 
14f06cc
2d93720
bef12d1
 
 
 
 
 
4de386a
 
326f1b4
bef12d1
 
4de386a
bef12d1
326f1b4
bef12d1
326f1b4
14f06cc
2d93720
326f1b4
 
2d93720
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
import requests
import random
import time
import threading
from flask import Flask, request, Response, jsonify, stream_with_context

PROXY_LIST_URL = "https://proxies.typegpt.net/ips.txt"
proxies_cache = []
last_refresh = 0

app = Flask(__name__)

# DeepInfra required headers
DEEPINFRA_HEADERS = {
    "accept": "text/event-stream",
    "content-type": "application/json",
    "referer": "https://deepinfra.com/",
    "sec-ch-ua": '"Chromium";v="140", "Not=A?Brand";v="24", "Google Chrome";v="140"',
    "sec-ch-ua-mobile": "?0",
    "sec-ch-ua-platform": '"Windows"',
    "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/140.0.0.0 Safari/537.36",
    "x-deepinfra-source": "web-embed",
}

def fetch_proxies():
    try:
        resp = requests.get(PROXY_LIST_URL, timeout=10)
        resp.raise_for_status()
        proxies = [line.strip() for line in resp.text.splitlines() if line.strip()]
        return proxies
    except Exception as e:
        print(f"[ERROR] Failed to fetch proxies: {e}")
        return []

def get_random_proxy(proxies):
    if not proxies:
        return None
    return random.choice(proxies)

def refresh_proxies_loop():
    global proxies_cache, last_refresh
    while True:
        if time.time() - last_refresh > 300 or not proxies_cache:
            proxies_cache = fetch_proxies()
            last_refresh = time.time()
            print(f"[INFO] Refreshed {len(proxies_cache)} proxies.")
        time.sleep(60)

@app.route("/health", methods=["GET"])
def health():
    return "Healthy", 200

@app.route("/deepinfra", methods=["POST"])
def proxy_deepinfra():
    target_url = "https://api.deepinfra.com/v1/openai/chat/completions"
    proxy = get_random_proxy(proxies_cache)
    if not proxy:
        return jsonify({"error": "No proxies available"}), 500

    proxies = {"http": proxy, "https": proxy}

    try:
        print(f"[INFO] Forwarding POST to {target_url} via {proxy}")

        # Start with required DeepInfra headers, merge client’s headers on top
        forward_headers = dict(DEEPINFRA_HEADERS)
        for k, v in request.headers:
            if k.lower() != "host":
                forward_headers[k] = v

        # Stream request
        upstream = requests.post(
            url=target_url,
            headers=forward_headers,
            data=request.get_data(),
            params=request.args,
            proxies=proxies,
            stream=True,
            timeout=300,
        )

        def generate():
            for chunk in upstream.iter_content(chunk_size=None):
                if chunk:
                    yield chunk

        headers = dict(upstream.headers)
        headers["X-Proxy-Used"] = proxy

        return Response(
            stream_with_context(generate()),
            status=upstream.status_code,
            headers=headers,
            content_type=upstream.headers.get("Content-Type", "application/json")
        )

    except Exception as e:
        return jsonify({"error": "Proxy failed", "proxy": proxy, "details": str(e)}), 502

def main():
    t = threading.Thread(target=refresh_proxies_loop, daemon=True)
    t.start()
    app.run(host="0.0.0.0", port=5000)

if __name__ == "__main__":
    main()