Spaces:
Running
Running
| from typing import Callable | |
| import gradio as gr | |
| import asyncio | |
| import time | |
| import threading | |
| from src.retrieve_data import ( | |
| get_gpus_for_leaderboard, | |
| get_leaderboard_names, | |
| get_leaderboard_submissions, | |
| get_submission_count, | |
| ) | |
| from src.envs import CACHE_TIMEOUT, BACKGROUND_REFRESH_INTERVAL | |
| # key: func_name:args:kwargs, value: (timestamp, data) | |
| cache = {} | |
| active_selections = { | |
| "leaderboard": None, | |
| "gpu": None, | |
| } | |
| loop = asyncio.new_event_loop() | |
| asyncio.set_event_loop(loop) | |
| background_refresh_running = True | |
| def cached_fetch( | |
| func: Callable, *args, force_refresh=False, limit=None, offset=0, **kwargs | |
| ): | |
| """Fetch data with caching to avoid redundant API calls""" | |
| cache_key = ( | |
| f"{func.__name__}:{str(args)}:{str(kwargs)}:limit={limit}:offset={offset}" | |
| ) | |
| current_time = time.time() | |
| if not force_refresh and cache_key in cache: | |
| timestamp, data = cache[cache_key] | |
| if current_time - timestamp < CACHE_TIMEOUT: | |
| return data | |
| print(f"Fetching {cache_key}") | |
| if func.__name__ == "get_leaderboard_submissions": | |
| result = loop.run_until_complete( | |
| func(*args, limit=limit, offset=offset, **kwargs) | |
| ) | |
| else: | |
| result = loop.run_until_complete(func(*args, **kwargs)) | |
| cache[cache_key] = (current_time, result) | |
| return result | |
| def invalidate_cache(prefix=None): | |
| """Invalidate all cache entries or those matching a prefix""" | |
| global cache | |
| if prefix is None: | |
| cache = {} | |
| else: | |
| cache = {k: v for k, v in cache.items() if not k.startswith(prefix)} | |
| def background_refresh(): | |
| """Background thread to refresh active data periodically""" | |
| while background_refresh_running: | |
| try: | |
| time.sleep(BACKGROUND_REFRESH_INTERVAL) | |
| lb_name = active_selections["leaderboard"] | |
| gpu_name = active_selections["gpu"] | |
| if lb_name and gpu_name: | |
| cached_fetch( | |
| get_leaderboard_submissions, lb_name, gpu_name, force_refresh=True | |
| ) | |
| cached_fetch(get_gpus_for_leaderboard, lb_name, force_refresh=True) | |
| cached_fetch(get_leaderboard_names, force_refresh=True) | |
| except Exception as e: | |
| print(f"Background refresh error: {e}") | |
| def build_ui(): | |
| # Define the function first before using it | |
| def create_table_for_lb_with_global_rank(lb_data, offset): | |
| """Create table with global ranks instead of page-specific ranks""" | |
| headers = [ | |
| "Rank", | |
| "User Name", | |
| "Submission ID", | |
| "Submission Name", | |
| "Runtime (ms)", | |
| "Submission Date", | |
| ] | |
| rows = [] | |
| for i, result in enumerate(lb_data.results, 1): | |
| # Calculate global rank by adding offset | |
| global_rank = i + offset | |
| # Only show medals for the top 3 overall and only on the first page | |
| if offset == 0 and global_rank <= 3: # first page and top 3 | |
| if global_rank == 1: | |
| rank_display = "🥇 1" | |
| elif global_rank == 2: | |
| rank_display = "🥈 2" | |
| elif global_rank == 3: | |
| rank_display = "🥉 3" | |
| else: | |
| rank_display = str(global_rank) | |
| score = float(result.submission_score) * 1000 | |
| rows.append( | |
| [ | |
| rank_display, | |
| result.user_name, | |
| str(result.submission_id), # Add submission ID as a new column | |
| result.submission_name, | |
| f"{score:.4f}", | |
| result.submission_time.strftime("%Y-%m-%d %H:%M:%S"), | |
| ] | |
| ) | |
| # Apply different class based on whether it's the first page or not | |
| elem_classes = "" if offset == 0 else "non-first-page-table" | |
| df = gr.Dataframe( | |
| headers=headers, | |
| datatype=[ | |
| "str", | |
| "str", | |
| "str", # Submission ID | |
| "str", | |
| "str", | |
| "timestamp", | |
| ], | |
| value=rows, | |
| interactive=False, | |
| elem_classes=elem_classes, | |
| ) | |
| return df | |
| with gr.Blocks( | |
| title="ML Leaderboards", | |
| theme=gr.themes.Soft(), | |
| css=""" | |
| /* Apply medal colors to all tables by default */ | |
| .gradio-container table tr:nth-child(1) { | |
| background-color: rgba(255, 215, 0, 0.2) !important; /* Gold */ | |
| } | |
| .gradio-container table tr:nth-child(2) { | |
| background-color: rgba(192, 192, 192, 0.2) !important; /* Silver */ | |
| } | |
| .gradio-container table tr:nth-child(3) { | |
| background-color: rgba(205, 127, 50, 0.2) !important; /* Bronze */ | |
| } | |
| /* Remove medal colors for non-first pages */ | |
| .non-first-page-table tr:nth-child(1), | |
| .non-first-page-table tr:nth-child(2), | |
| .non-first-page-table tr:nth-child(3) { | |
| background-color: inherit !important; | |
| } | |
| .pagination-controls { | |
| display: flex; | |
| justify-content: space-between; | |
| align-items: center; | |
| margin-top: 10px; | |
| width: 100%; | |
| } | |
| .pagination-info { | |
| text-align: center; | |
| flex-grow: 1; | |
| } | |
| .pagination-button { | |
| min-width: 100px; | |
| } | |
| """, | |
| ) as app: | |
| gr.Markdown("# 🍿 KernelBot Leaderboard 🍿") | |
| lb_names = cached_fetch(get_leaderboard_names) | |
| selected_lb = lb_names[0] | |
| gpu_names = cached_fetch(get_gpus_for_leaderboard, selected_lb) | |
| selected_gpu = gpu_names[0] | |
| # Set default pagination values | |
| items_per_page = 10 | |
| current_page = 1 | |
| data = cached_fetch( | |
| get_leaderboard_submissions, | |
| selected_lb, | |
| selected_gpu, | |
| limit=items_per_page, | |
| offset=0, | |
| ) | |
| total_count = cached_fetch(get_submission_count, selected_lb, selected_gpu) | |
| total_pages = (total_count + items_per_page - 1) // items_per_page | |
| with gr.Row(): | |
| with gr.Column(scale=1): | |
| lb_dropdown = gr.Dropdown( | |
| choices=lb_names, | |
| label="Select Leaderboard", | |
| interactive=True, | |
| value=selected_lb, | |
| ) | |
| gpu_dropdown = gr.Dropdown( | |
| choices=gpu_names, | |
| label="Select GPU", | |
| interactive=True, | |
| value=selected_gpu, | |
| ) | |
| with gr.Row(): | |
| # Initial table is first page | |
| results_table = create_table_for_lb_with_global_rank(data, 0) | |
| with gr.Row(elem_classes="pagination-controls"): | |
| with gr.Column(scale=1, min_width=100, elem_classes="pagination-button"): | |
| prev_btn = gr.Button("← Previous", interactive=(current_page > 1)) | |
| with gr.Column(scale=2, elem_classes="pagination-info"): | |
| page_info = gr.Markdown(f"Page {current_page} of {total_pages}") | |
| with gr.Column(scale=1, min_width=100, elem_classes="pagination-button"): | |
| next_btn = gr.Button("Next →", interactive=(current_page < total_pages)) | |
| def on_lb_change(lb_name): | |
| gpu_choices = cached_fetch(get_gpus_for_leaderboard, lb_name) | |
| active_selections["leaderboard"] = lb_name | |
| if gpu_choices: | |
| active_selections["gpu"] = gpu_choices[0] | |
| # Reset to page 1 when changing leaderboard | |
| data = cached_fetch( | |
| get_leaderboard_submissions, | |
| lb_name, | |
| gpu_choices[0] if gpu_choices else None, | |
| limit=items_per_page, | |
| offset=0, | |
| ) | |
| # Get total count for pagination | |
| total_count = cached_fetch( | |
| get_submission_count, lb_name, gpu_choices[0] if gpu_choices else None | |
| ) | |
| total_pages = (total_count + items_per_page - 1) // items_per_page | |
| return ( | |
| gr.update( | |
| choices=gpu_choices, value=gpu_choices[0] if gpu_choices else None | |
| ), | |
| create_table_for_lb_with_global_rank(data, 0), | |
| gr.update(value=f"Page 1 of {total_pages}"), | |
| gr.update(interactive=False), # prev button disabled on page 1 | |
| gr.update( | |
| interactive=(total_pages > 1) | |
| ), # next button enabled if more than 1 page | |
| ) | |
| def update_table(lb_name, gpu_name, page=1): | |
| if not gpu_name: | |
| return None, gr.update(), gr.update(), gr.update() | |
| active_selections["gpu"] = gpu_name | |
| offset = (page - 1) * items_per_page | |
| data = cached_fetch( | |
| get_leaderboard_submissions, | |
| lb_name, | |
| gpu_name, | |
| limit=items_per_page, | |
| offset=offset, | |
| ) | |
| # Get total count for pagination | |
| total_count = cached_fetch(get_submission_count, lb_name, gpu_name) | |
| total_pages = (total_count + items_per_page - 1) // items_per_page | |
| # Create table with global ranks | |
| table = create_table_for_lb_with_global_rank(data, offset) | |
| return ( | |
| table, | |
| gr.update(value=f"Page {page} of {total_pages}"), | |
| gr.update(interactive=(page > 1)), | |
| gr.update(interactive=(page < total_pages)), | |
| ) | |
| def next_page(): | |
| nonlocal current_page | |
| lb_name = active_selections["leaderboard"] | |
| gpu_name = active_selections["gpu"] | |
| # Get total count to check if we can go to next page | |
| total_count = cached_fetch(get_submission_count, lb_name, gpu_name) | |
| total_pages = (total_count + items_per_page - 1) // items_per_page | |
| if current_page < total_pages: | |
| current_page += 1 | |
| return update_table(lb_name, gpu_name, current_page) | |
| return update_table(lb_name, gpu_name, current_page) | |
| def prev_page(): | |
| nonlocal current_page | |
| if current_page > 1: | |
| current_page -= 1 | |
| lb_name = active_selections["leaderboard"] | |
| gpu_name = active_selections["gpu"] | |
| return update_table(lb_name, gpu_name, current_page) | |
| lb_dropdown.change( | |
| fn=on_lb_change, | |
| inputs=[lb_dropdown], | |
| outputs=[gpu_dropdown, results_table, page_info, prev_btn, next_btn], | |
| ) | |
| gpu_dropdown.change( | |
| fn=lambda lb, gpu: update_table(lb, gpu, 1), # Reset to page 1 | |
| inputs=[lb_dropdown, gpu_dropdown], | |
| outputs=[results_table, page_info, prev_btn, next_btn], | |
| ) | |
| next_btn.click( | |
| fn=next_page, | |
| inputs=[], | |
| outputs=[results_table, page_info, prev_btn, next_btn], | |
| ) | |
| prev_btn.click( | |
| fn=prev_page, | |
| inputs=[], | |
| outputs=[results_table, page_info, prev_btn, next_btn], | |
| ) | |
| return app | |
| if __name__ == "__main__": | |
| try: | |
| background_thread = threading.Thread(target=background_refresh, daemon=True) | |
| background_thread.start() | |
| app = build_ui() | |
| app.launch() | |
| finally: | |
| background_refresh_running = False | |
| background_thread.join(timeout=1.0) | |
| loop.close() | |