File size: 8,239 Bytes
4754988
fe1636e
283f712
 
4754988
283f712
3aba0e1
f8d987c
4754988
283f712
 
f8d987c
283f712
 
 
 
f8d987c
283f712
f8d987c
283f712
 
 
 
f8d987c
283f712
 
 
 
 
 
 
 
 
 
 
 
f8d987c
 
 
 
 
 
 
 
 
 
4754988
283f712
 
 
 
 
f8d987c
30b734e
283f712
 
 
 
 
30b734e
 
f8d987c
30b734e
 
 
 
 
df66e9c
 
283f712
 
f8d987c
 
 
283f712
4754988
283f712
 
 
f8d987c
283f712
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f8d987c
 
 
 
 
 
 
 
 
283f712
f8d987c
30b734e
f8d987c
 
 
 
283f712
f8d987c
283f712
f8d987c
30b734e
f8d987c
 
 
 
 
 
 
 
 
 
 
 
30b734e
283f712
 
f8d987c
283f712
 
f8d987c
283f712
 
 
 
f8d987c
283f712
 
f8d987c
283f712
 
f8d987c
283f712
 
f8d987c
283f712
f8d987c
283f712
 
f8d987c
283f712
 
 
 
 
f8d987c
4754988
283f712
 
 
 
 
 
 
 
 
 
 
30b734e
 
 
 
 
 
283f712
30b734e
283f712
3aba0e1
cde23f3
283f712
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
import gradio as gr
import random
import string
import re
import json
from difflib import get_close_matches

# Load channel mapping JSON (unchanged)
with open("channels_fixed.json", "r", encoding="utf-8") as f:
    channel_map = json.load(f)

# Random 3-character filename generator (unchanged)
def random_filename(uppercase=True):
    letters = string.ascii_uppercase if uppercase else string.ascii_lowercase
    return "".join(random.choice(letters) for _ in range(3)) + ".m3u"

# Insert tvg-id immediately after #EXTINF:-1 (unchanged)
def insert_tvg_id(extinf_line, tvg_id):
    line = re.sub(r'tvg-id="[^"]*"', '', extinf_line)  # remove existing tvg-id if present
    line = line.replace('#EXTINF:-1', f'#EXTINF:-1 tvg-id="{tvg_id}"', 1)
    line = re.sub(r'\s+', ' ', line).strip()
    return line

# Apply special event rules (unchanged)
def apply_event_special_cases(extinf_line, channel_name):
    line = extinf_line
    if "MLB LEAGUE PASS" in channel_name:
        return insert_tvg_id(line, "MLB.Baseball.Dummy.us")
    if "NHL GAMECENTER" in channel_name:
        return insert_tvg_id(line, "NHL.Hockey.Dummy.us")
    if "ATP" in channel_name or "WTA" in channel_name:
        return insert_tvg_id(line, "Tennis.Channel.us")
    if 'tvg-id="test"' in line:
        return insert_tvg_id(line, "Live.Event.us")
    return line

# Normalize helper: remove non-alphanum, lowercase
def normalize_name(s: str) -> str:
    if not s:
        return ""
    s = s.upper()
    # Remove punctuation but keep letters and digits
    s = re.sub(r'[^A-Z0-9]', '', s)
    return s

# Main processor (Project 1 Base with minimal fix)
def process_m3u(m3u_text):
    lines = m3u_text.splitlines()
    out_247_blocks = []
    out_events_blocks = []
    log = []

    # Hardcoded abbreviation + forced fixes (now includes your new entries)
    hardcoded_fixes = {
        "BTN": "Big.Ten.Network.HD.us2",
        "SNY": "SNY.SportsNet.New.York.HD.us2",
        "MASN": "MASN.-.Mid.Atlantic.Sports.Network.us2",
        "YES": "Yes.Network.us2",
        "MSG": "MSG.National.us2",
        "REELZ USA": "ReelzChannel.HD.us2",
        "ABCNY USA": "ABC.(WABC).New.York,.NY.us",
        "ABC NY USA": "ABC.(WABC).New.York,.NY.us",   # handle space variant
        "OWN USA": "Oprah.Winfrey.Network.HD.us2",
        "DISCOVERY USA": "Discovery.Channel.ca2",
        "OUTDOOR CHANNEL": "Outdoor.Channel.HD.us2",
        "OUTDOOR CHANNEL USA": "Outdoor.Channel.HD.us2",
        "LOITV": "Soccer.Dummy.us",
        "NBCNY USA" : "WNBC-DT.us_locals1",
        "NBC USA" : "WNBC-DT.us_locals1",
    }

    # Precompute normalized hardcoded map for fast comparisons
    normalized_hardcoded = {normalize_name(k): v for k, v in hardcoded_fixes.items()}

    for i, line in enumerate(lines):
        if line.startswith("#EXTINF"):
            url = lines[i+1] if i+1 < len(lines) else ""
            extinf = line

            # Extract tvg-id and channel name (same as base)
            tvg_match = re.search(r'tvg-id="([^"]*)"', extinf)
            tvg_id = tvg_match.group(1) if tvg_match else None
            name_match = re.search(r",(.*)", extinf)
            channel_name = name_match.group(1).strip() if name_match else ""

            # Extract group
            group_match = re.search(r'group-title="([^"]*)"', extinf)
            group_title = group_match.group(1).upper().strip() if group_match else ""

            # --- 24/7 CHANNELS ---
            if group_title and "24/7" in group_title:
                if tvg_id == "test" or not tvg_id:
                    new_ext = insert_tvg_id(extinf, "Info.Guide.Dummy.us")
                    log.append(f"ℹ️ 24/7 | {channel_name} β†’ Info.Guide.Dummy.us")
                else:
                    new_ext = extinf
                out_247_blocks.append((new_ext, url))

            # --- EVENTS ---
            elif group_title and "EVENTS" in group_title:
                # candidate extraction: prefer bracketed network name if present
                bracket_match = re.search(r'\[([^\]]+)\]', channel_name)
                if bracket_match:
                    candidate_raw = bracket_match.group(1).strip()
                else:
                    # fallback to entire display string
                    candidate_raw = channel_name

                candidate_norm = normalize_name(candidate_raw)

                # 1) Check hardcoded fixes FIRST using normalized keys
                matched_fix = False
                if candidate_norm:
                    # exact normalized match
                    if candidate_norm in normalized_hardcoded:
                        fixed_id = normalized_hardcoded[candidate_norm]
                        new_ext = insert_tvg_id(extinf, fixed_id)
                        new_ext = apply_event_special_cases(new_ext, candidate_raw)
                        out_events_blocks.append((new_ext, url))
                        log.append(f"βœ… EVENTS | {candidate_raw} β†’ {fixed_id} (hardcoded exact)")
                        matched_fix = True
                    else:
                        # sometimes bracket may be abbreviation (e.g. BTN), so check startswith any key
                        for key_norm, fixed_id in normalized_hardcoded.items():
                            # if candidate begins with the key (normalized), or key appears within candidate_norm
                            if candidate_norm.startswith(key_norm) or key_norm in candidate_norm:
                                new_ext = insert_tvg_id(extinf, fixed_id)
                                new_ext = apply_event_special_cases(new_ext, candidate_raw)
                                out_events_blocks.append((new_ext, url))
                                log.append(f"βœ… EVENTS | {candidate_raw} β†’ {fixed_id} (hardcoded partial)")
                                matched_fix = True
                                break

                if matched_fix:
                    continue

                # 2) Try matching against JSON (same behavior as base)
                match = None
                for key, data in channel_map.items():
                    if data.get("tvg_id") and key.lower() in candidate_raw.lower():
                        match = data["tvg_id"]
                        break
                if not match:
                    keys = list(channel_map.keys())
                    guesses = get_close_matches(candidate_raw, keys, n=1, cutoff=0.6)
                    if guesses:
                        match = channel_map[guesses[0]].get("tvg_id")

                if match:
                    new_ext = insert_tvg_id(extinf, match)
                    log.append(f"βœ… EVENTS | {candidate_raw} β†’ {match}")
                else:
                    new_ext = insert_tvg_id(extinf, "Live.Event.us")
                    log.append(f"⚠️ EVENTS | {candidate_raw} β†’ Live.Event.us (fallback)")

                new_ext = apply_event_special_cases(new_ext, candidate_raw)
                out_events_blocks.append((new_ext, url))

    # Build output playlists (unchanged formatting)
    out_247 = "\n".join([f"{ext}\n{url}" for ext, url in out_247_blocks])
    out_events = "\n".join([f"{ext}\n{url}" for ext, url in out_events_blocks])

    return out_247, out_events, "\n".join(log)

# Gradio UI with requested sizing (small input, larger log)
def run_app(m3u_text):
    out_247, out_events, log = process_m3u(m3u_text)
    file_247 = random_filename(uppercase=True)
    file_events = random_filename(uppercase=False)
    with open(file_247, "w", encoding="utf-8") as f:
        f.write(out_247)
    with open(file_events, "w", encoding="utf-8") as f:
        f.write(out_events)
    return file_247, file_events, log

iface = gr.Interface(
    fn=run_app,
    inputs=gr.Textbox(lines=5, placeholder="Paste your M3U playlist here..."),  # smaller input
    outputs=[
        gr.File(label="24/7 Playlist"),
        gr.File(label="Events Playlist"),
        gr.Textbox(label="Log", lines=30),  # larger log
    ],
    title="Project 1 Playlist Processor",
    description="Splits 24/7 and Events playlists, applies JSON tvg-id mappings, adds hardcoded + special rules, and outputs two clean M3Us."
)

if __name__ == "__main__":
    iface.launch()