#بسم الله الرحمن الرحيم import gradio as gr import asyncio import base64 from client import run_fistal import asyncio import os # from dotenv import load_dotenv # load_dotenv() REQUIRED_SECRETS = [ "GOOGLE_API_KEY_1", "GOOGLE_API_KEY_2", "GOOGLE_API_KEY_3", "GROQ_API_KEY", "GEMINI_API_KEY", "HF_TOKEN", "MODAL_TOKEN_ID", "MODAL_TOKEN_SECRET" ] missing = [s for s in REQUIRED_SECRETS if not os.getenv(s)] if missing: raise ValueError(f"Missing secrets in HF Space: {', '.join(missing)}\nAdd them in Settings → Variables and secrets") def image_to_base64(filepath): try: with open(filepath, "rb") as image_file: encoded_string = base64.b64encode(image_file.read()).decode('utf-8') mime_type = "image/jpeg" if filepath.lower().endswith((".jpg", ".jpeg")) else "image/png" return f"data:{mime_type};base64,{encoded_string}" except FileNotFoundError: print(f"Error: Image file not found at {filepath}") return "" image_data_url = image_to_base64("static/new.jpg") full_img = image_to_base64("static/fullnew.jpg") css=f""" .gradio-container {{ background: url('{full_img}') !important; background-size: cover !important; }} .gradio-container .block {{ background-color: none !important; }} .gradio-container .wrap {{ background-color: none !important; border: !important; box-shadow: none !important; outline: none !important; }} .features-box {{ padding: 10px; color: white !important; background-color: #white !important; }} .features-box .block {{ border: blue 1px !important; background-color: yellow !important; }} #tuner {{ background: linear-gradient(to right, #008DDA, #6A1AAB, #C71585, #F56C40) !important; color: white !important; margin-top: 5px; }} #flow {{ padding: 8px !important; color: white !important; }} #flow .markdown-text {{ color: white !important; }} .drop li {{ background-color: #bcb9cf !important; color: black !important; }} .drop input {{ background-color: #bcb9cf !important; background-size: cover !important; color: black !important; border: none !important; padding: 6px 10px !important; border-radius: 4px !important; }} .out {{ padding: 10px !important; font-size: 16px !important; color: white !important; background: linear-gradient(90deg, rgba(102,126,234,0.3), rgba(106,26,180,0.3), rgba(245,108,64,0.3)) !important; border-radius: 10px !important; }} .log-container {{ max-height: 600px !important; overflow-y: auto !important; background: rgba(14, 15, 15, 0.5) !important; border-radius: 10px !important; padding: 20px !important; border: 1px solid #3a3a3a !important; }} #stat {{ min-height: 60px !important; }} #stat input, #stat textarea {{ padding: 12px 10px !important; line-height: 1.5 !important; color: black !important; min-height: 60px !important; height: auto !important; display: flex !important; align-items: center !important; }} #stat .wrap {{ min-height: 60px !important; }} .mod {{ background: linear-gradient(to right, #008DDA, #6A1AAB, #C71585, #F56C40) !important; color: white !important; }} .log-container::-webkit-scrollbar {{ width: 10px; }} .log-container::-webkit-scrollbar-track {{ background: #2a2a2a; border-radius: 5px; }} .log-container::-webkit-scrollbar-thumb {{ background: linear-gradient(to bottom, #008DDA, #6A1AAB, #C71585, #F56C40); border-radius: 5px; }} .log-container::-webkit-scrollbar-thumb:hover {{ background: linear-gradient(to bottom, #0099ee, #7722bb, #dd1595, #ff7750); }} #copy-btn {{ background: linear-gradient(to right, #008DDA, #6A1AAB, #C71585, #F56C40) !important; color: white !important; margin-top: 10px !important; }} :root, .gradio-container * {{ --block-background-fill: #0e0f0f !important; --panel-background-fill: none !important; --input-background-fill: #bcb9cf !important; --color-background-primary: #0e0f0f !important; --block-border-width: 0px !important; --block-border-color: #27272a !important; --panel-border-width: 0px !important; --input-text-color: #000000 !important; --input-placeholder-color: #27272a !important; --panel-border-color: linear-gradient(to right, #008DDA, #6A1AAB, #C71585, #F56C40) !important; --neutral-50: #27272a !important; }} """ def app(): with gr.Blocks(title="Fistal AI 🚀") as demo: # Header section with background image gr.HTML(f"""

Fistal AI 🚀

Seamlessly fine-tune LLMs with an Agentic AI powered by MCP, Modal, and Unsloth.

HF Space Python Gemini Modal Unsloth MCP Gradio Agentic AI 1B-3B Models Evaluation Report ReadMe
""") gr.HTML("""

How does Fistal AI revolutionize LLM fine-tuning?

🧩 MCP + Agentic AI : Automates workflows using MCP while running autonomous data and training pipelines through Agentic AI.
🦥 Unsloth : Speeds up training with optimized kernels and memory-efficient 4-bit fine-tuning.
⚡ Modal Labs (with Volumes) : Provides serverless GPU compute with persistent volumes for fast scaling and reproducible experiments.
💾 Hugging Face + Gradio : Model stored securely in HF repositories, Gradio's polished UI makes Fistal AI better.
🔑 Gemini API Key : Enables secure access to the Gemini API that performs model orchestration and automated workflows.
""") gr.HTML("""

🚀 Start Fine-Tuning Now

Add your dataset topic, task type, number of samples, and your preferred Unsloth model.

Then sit back and watch Fistal AI automatically build datasets, fine-tune your LLMs, and deliver results like magic.

Note: The process may take 30-45 minutes, depending on the number of samples and model chosen.

""") with gr.Group(): with gr.Row(): topic = gr.Textbox(label="📚 Dataset topic", placeholder="Python Questions, Return policy FAQS...") samples = gr.Slider(label="📊 Number of samples", minimum=0, maximum=2000, step=5, value=1000) task_type = gr.Dropdown( label="🎯 Task Type", choices=["text-generation", "summarization", "classification", "question-answering"], elem_classes="drop" ) model_name = gr.Dropdown( label="🤖 Model to Fine-tune", choices=[ "unsloth/Llama-3.2-1B-Instruct-bnb-4bit", "unsloth/Phi-3-mini-4k-instruct", "unsloth/Phi-3-medium-4k-instruct", "unsloth/Llama-3.2-3B-Instruct-bnb-4bit", "unsloth/Qwen2.5-3B-Instruct-bnb-4bit", "unsloth/Qwen2.5-1.5B-Instruct-bnb-4bit", "unsloth/Qwen2.5-0.5B-Instruct-bnb-4bit", "unsloth/Qwen2.5-Coder-3B-Instruct-bnb-4bit", "unsloth/gemma-2-2b-it-bnb-4bit", "unsloth/SmolLM2-1.7B-Instruct-bnb-4bit", "unsloth/Phi-3.5-mini-instruct-bnb-4bit", "unsloth/Granite-3.0-2b-instruct-bnb-4bit", "unsloth/granite-4.0-h-1b-bnb-4bit" ], elem_classes="drop" ) tuner = gr.Button("🚀 Start Finetuning", size="lg", elem_id="tuner") gr.Markdown("""## 🔀 Agent Activity Flow""", elem_id="flow") status = gr.Textbox(label="Status", value="Ready to start...", interactive=False, elem_id="stat") with gr.Group(elem_classes="log-container"): output = gr.Markdown(label="Output Log:", value="", elem_classes="out") eval_report_storage = gr.Textbox(visible=False) # Hidden storage copy_btn = gr.Button("Finetuning completed 🚀", visible=False, elem_id="copy-btn") async def run_workflow(dataset_topic, samples, model, task): output_log = "## Under the Hood\n\n" output_log += "📋 **Configuration:**\n\n" output_log += f" • Topic: {dataset_topic}\n\n" output_log += f" • Samples: {samples}\n\n" output_log += f" • Model: {model}\n\n" output_log += f" • Task: {task}\n\n" yield ("Starting workflow...", output_log, "", gr.Button(visible=False)) try: in_eval_report = False eval_report_buffer = "" async for chunk in run_fistal( dataset_topic=dataset_topic, num_samples=samples, model_name=model, task_type=task ): if "evaluating" in str(chunk).lower() or "llm_as_judge" in str(chunk).lower(): in_eval_report = True if in_eval_report: eval_report_buffer += str(chunk) else: output_log += str(chunk) import re urls = re.findall(r'https://huggingface\.co/[^\s\)]+', output_log + eval_report_buffer) model_url = urls[0] if urls else "" model_url = model_url.rstrip('.') model_url = re.sub(r'[^a-zA-Z0-9:/._-].*$', '', model_url) yield ("🟡 Processing...", output_log + eval_report_buffer, eval_report_buffer, gr.Button(visible=False)) await asyncio.sleep(0.1) final_output = output_log if eval_report_buffer: final_output += "📊 **EVALUATION REPORT**\n\n" final_output += eval_report_buffer final_output += "\n\n✨ **Fistal AI has completed the process!**" yield ("🟢 Complete!", final_output, eval_report_buffer, gr.Button(visible=True)) except Exception as e: import traceback error_log = output_log + f"\n\n❌ **ERROR:**\n```\n{str(e)}\n{traceback.format_exc()}\n```" yield ("🔴 Error", error_log, "",gr.Button(visible=False)) tuner.click( run_workflow, [topic, samples, model_name, task_type], [status, output, eval_report_storage, copy_btn] ) def copy_report(report_text): if report_text and report_text.strip(): return report_text return "" copy_btn.click( copy_report, inputs=[eval_report_storage], outputs=[], js=""" (text) => { if (text && text.trim().length > 0) { navigator.clipboard.writeText(text); alert('✅ Evaluation report copied to clipboard!'); } else { alert('Fistal has completed the process.'); } } """ ) return demo if __name__ == "__main__": app().launch(theme= gr.themes.Ocean(), css=css)