| | import gradio as gr |
| | import pandas as pd |
| | from huggingface_hub import hf_hub_download |
| | import re |
| |
|
| | |
| | REPO_ID = "DontPlanToEnd/UGI-Leaderboard" |
| | FILENAME = "ugi-leaderboard-data.csv" |
| |
|
| | def make_clickable_model(model_name, link): |
| | """Wraps model name in HTML link.""" |
| | if pd.isna(link) or not link or str(link).lower() == "nan": |
| | return model_name |
| | return f'<a target="_blank" href="{link}" style="color: #007bff; text-decoration: underline; font-weight: bold;">{model_name}</a>' |
| |
|
| | def get_data(): |
| | """ |
| | Downloads, processes, and returns the full leaderboard dataframe + status message. |
| | """ |
| | print("π Starting download...") |
| | try: |
| | |
| | file_path = hf_hub_download(repo_id=REPO_ID, filename=FILENAME, repo_type="space") |
| | |
| | |
| | |
| | df = pd.read_csv(file_path, encoding='utf-8-sig') |
| | df.columns = df.columns.str.strip() |
| | |
| | |
| | def get_col(keyword): |
| | matches = [c for c in df.columns if keyword.lower() in c.lower()] |
| | return matches[0] if matches else None |
| |
|
| | |
| | model_col = "author/model_name" if "author/model_name" in df.columns else (get_col("author") or get_col("model")) |
| | link_col = "Model Link" if "Model Link" in df.columns else get_col("link") |
| | ugi_col = "UGI π" if "UGI π" in df.columns else get_col("ugi") |
| | natint_col = "NatInt π‘" if "NatInt π‘" in df.columns else get_col("natint") |
| | w10_col = "W/10 π" if "W/10 π" in df.columns else get_col("w/10") |
| | |
| | |
| | param_col = "Total Parameters" if "Total Parameters" in df.columns else (get_col("param") or get_col("size")) |
| |
|
| | |
| | if not all([model_col, ugi_col, natint_col, w10_col]): |
| | missing = [] |
| | if not model_col: missing.append("Model") |
| | if not ugi_col: missing.append("UGI") |
| | if not natint_col: missing.append("NatInt") |
| | if not w10_col: missing.append("W/10") |
| | return pd.DataFrame(), f"β Error: Could not find columns: {', '.join(missing)}. Found: {list(df.columns)}" |
| |
|
| | |
| | if param_col: |
| | df['Params (B)'] = pd.to_numeric(df[param_col], errors='coerce').fillna(0) |
| | else: |
| | |
| | df['Params (B)'] = df[model_col].astype(str).str.extract(r'(?i)(\d+\.?\d*)[bB]').astype(float).fillna(0) |
| | |
| | |
| | df['Q4_K_M Size (GB)'] = (df['Params (B)'] * 0.6).round(1) |
| | |
| | |
| | df['Q4_K_M Size'] = df['Q4_K_M Size (GB)'].apply(lambda x: f"{x} GB" if x > 0 else "API / Unknown") |
| |
|
| | |
| | for col in [ugi_col, natint_col, w10_col]: |
| | df[col] = pd.to_numeric(df[col], errors='coerce').fillna(0) |
| |
|
| | |
| | |
| | df['UGI Index'] = (df[ugi_col] + df[natint_col]) * (df[w10_col] ** 2) |
| | df['UGI Index'] = df['UGI Index'].round(2) |
| |
|
| | |
| | df = df.sort_values(by='UGI Index', ascending=False) |
| | df.insert(0, 'Rank', range(1, len(df) + 1)) |
| |
|
| | |
| | if link_col: |
| | df['Model'] = df.apply(lambda x: make_clickable_model(x[model_col], x[link_col]), axis=1) |
| | else: |
| | df['Model'] = df[model_col] |
| | |
| | df['UGI'] = df[ugi_col] |
| | df['NatInt'] = df[natint_col] |
| | df['W/10'] = df[w10_col] |
| |
|
| | return df, f"β
Successfully loaded {len(df)} models." |
| |
|
| | except Exception as e: |
| | print(f"Error: {e}") |
| | return pd.DataFrame(), f"β Error: {str(e)}" |
| |
|
| | |
| | CACHED_DF = pd.DataFrame() |
| |
|
| | def search(query, max_size): |
| | """Filters the cached dataframe by search query and Q4_K_M Size.""" |
| | if CACHED_DF.empty: |
| | return CACHED_DF |
| | |
| | df = CACHED_DF.copy() |
| | |
| | |
| | if query: |
| | df = df[df['Model'].astype(str).str.contains(query, case=False, na=False)] |
| | |
| | |
| | if max_size < 128: |
| | |
| | df = df[(df['Q4_K_M Size (GB)'] <= max_size) & (df['Q4_K_M Size (GB)'] > 0)] |
| | |
| | display_cols = ['Rank', 'Model', 'Q4_K_M Size', 'UGI Index', 'UGI', 'NatInt', 'W/10'] |
| | return df[display_cols] |
| |
|
| | def app_load(query, max_size): |
| | """Called when app starts or refreshes.""" |
| | global CACHED_DF |
| | df, status = get_data() |
| | CACHED_DF = df |
| | filtered_df = search(query, max_size) |
| | return filtered_df, status |
| |
|
| | |
| | custom_css = """ |
| | .gradio-container {max-width: 95% !important} |
| | footer {visibility: hidden} |
| | """ |
| |
|
| | with gr.Blocks(css=custom_css, title="UGI Index Leaderboard") as demo: |
| | gr.Markdown("# π UGI Index") |
| | |
| | |
| | gr.Markdown(""" |
| | ### βΉοΈ How is the Score Calculated? |
| | The **UGI Index** ranks LLMs based on data from [DontPlanToEnd/UGI-Leaderboard](https://huggingface.co/spaces/DontPlanToEnd/UGI-Leaderboard) with a simple holistic mathematical equation that ensures top ranked models posses a high amount of Uncensored Information, are Naturally very Intelligent, and most importantly they are OBEDIENT to the user. This is just my personal "rule of thumb" method for choosing the best uncensored model for LOCAL use on any given hardware I have laying around. Ajust the slider to the amount of RAM on your device to see the best uncensored model for your hardware. It uses Q4_K_M as a refrence point for GGUF size, however there are tons of options so it can be flexable. If your brand new and just want to try a uncensored local LLM for the first time do this: Grab a **mradermacher** quant in **i1-IQ4_XS** and run with **LMstudio.ai** |
| | |
| | $$ \\text{UGI Index} = (\\text{UGI} + \\text{NatInt}) \\times \\text{W/10}^2 $$ |
| | |
| | * **UGI:** Uncensored General Intelligence |
| | * **NatInt:** Natural Intelligence |
| | * **W/10:** Willingness (Squared) |
| | |
| | *π‘ **Note on Model Size:** GGUF size is calculated at standard **Q4_K_M** quantization (`Total Parameters Γ 0.6 GB`). Lowering the slider automatically hides closed-source API models.* |
| | """) |
| |
|
| | with gr.Row(): |
| | status_box = gr.Textbox(label="Status", value="Initializing...", interactive=False, scale=4) |
| | refresh_btn = gr.Button("Refresh Data", scale=1) |
| | |
| | with gr.Row(): |
| | search_box = gr.Textbox(label="Search Models", placeholder="Type model name...", interactive=True, scale=1) |
| | size_slider = gr.Slider(minimum=1, maximum=128, value=128, step=1, label="π» Running Local? Max Q4_K_M Size (GB) - Set to 128 to include API models", interactive=True, scale=1) |
| |
|
| | |
| | data_table = gr.Dataframe( |
| | headers=['Rank', 'Model', 'Q4_K_M Size', 'UGI Index', 'UGI', 'NatInt', 'W/10'], |
| | datatype="markdown", |
| | interactive=False, |
| | wrap=True |
| | ) |
| |
|
| | |
| | |
| | demo.load(fn=app_load, inputs=[search_box, size_slider], outputs=[data_table, status_box]) |
| | |
| | |
| | refresh_btn.click(fn=app_load, inputs=[search_box, size_slider], outputs=[data_table, status_box]) |
| | |
| | |
| | search_box.change(fn=search, inputs=[search_box, size_slider], outputs=data_table) |
| | size_slider.change(fn=search, inputs=[search_box, size_slider], outputs=data_table) |
| |
|
| | if __name__ == "__main__": |
| | demo.launch() |