import gradio as gr from transformers import AutoTokenizer, AutoModelForCausalLM # مدل Dorna model_id = "PartAI/Dorna2-Llama3.1-8B-Instruct" # بارگذاری توکنایزر و مدل tokenizer = AutoTokenizer.from_pretrained(model_id) model = AutoModelForCausalLM.from_pretrained(model_id) # تابع چت def chat(prompt): inputs = tokenizer(prompt, return_tensors="pt") outputs = model.generate(inputs["input_ids"], max_new_tokens=150) return tokenizer.decode(outputs[0], skip_special_tokens=True) # ساخت رابط Gradio iface = gr.Interface( fn=chat, inputs="text", outputs="text", title="Dorna Chat", description="چت فارسی با مدل Dorna" ) iface.launch()