#### INSTALL LIB import subprocess import os token = os.environ.get("GITHUB_TOKEN", None) if not token: raise ValueError("Token not found") # Build the install command command = f"pip install git+https://x-access-token:{token}:x-oauth-basic@github.com/philschmid/model-recommender.git" subprocess.run(command, shell=True, check=True) #### INSTALL LIB from dataclasses import asdict import json import gradio as gr from recommender.main import get_tgi_config def greet(model_id, gpu_memory, num_gpus): try: configs = get_tgi_config(model_id, gpu_memory, num_gpus) except Exception as e: return json.dumps({"error": str(e)}) if configs is None: return json.dumps({"error": f"Couldn't generate TGI config for {model_id}"}) return json.dumps(asdict(configs)) theme = gr.themes.Monochrome( primary_hue="indigo", secondary_hue="blue", neutral_hue="slate", radius_size=gr.themes.sizes.radius_sm, font=[ gr.themes.GoogleFont("Open Sans"), "ui-sans-serif", "system-ui", "sans-serif", ], ) DESCRIPTION = """
This Space helps you generate and validate Hugging Face TGI configurations for your model. Provide you model ID and the amount of GPU memory you have available and we will generate a configuration for you, which you can use to run your model on TGI.