using UnityEngine; using EasyLocalLLM.LLM.Core; using EasyLocalLLM.LLM.Ollama; using EasyLocalLLM.LLM.Factory; using UnityEngine.UIElements; using System.Collections.Generic; using System; namespace EasyLocalLLM.Samples { /// /// Simple chat screen sample /// Sends prompts to the loaded LLM model and receives responses /// Provides multiple AI types that can be switched to change system prompts and tools /// public class SimpleChat : MonoBehaviour { public UIDocument UIDocument; private OllamaClient client; private class AIType { public string Name { get; private set; } public string SystemPrompt { get; private set; } public string Description { get; private set; } public object FormatSchema { get; private set; } public Action OnChangeCallback { get; private set; } public AIType(string name, string systemPrompt, string description, object formatSchema, Action onChangeCallback) { Name = name; SystemPrompt = systemPrompt; Description = description; FormatSchema = formatSchema; OnChangeCallback = onChangeCallback; } } private readonly Dictionary aiTypes = new(); private AIType currentAIType = null; void Start() { Debug.Log("=== EasyLocalLLM Simple Chat Sample ==="); InitializeAITypes(); InitializeEasyLocalLLMClient(); } private void InitializeAITypes() { aiTypes.Add( "AI Assistant", new AIType( "AI Assistant", "You are a friendly and helpful assistant.", "General AI Assitant.", null, RemoveAllTools)); aiTypes.Add( "Shopper", new AIType( "Shopper", "You are shopper AI in game. You recieve request from customer. You sell your items from your stock. And you also can buy items from your customer.", "Shopper AI in game. Controll money and store.", null, OnShopperSelected)); aiTypes.Add( "Character Generator", new AIType( "Character Generator", "Generate character sheet from input lines.\n" + "Strength (STR), Agility (AGL), and Magic (MGK) should be set from 1 to 10.\n" + "STR and AGL: 5 is average adult male, 8 is Olympic athlete, 2 is child. Magic is usually 1, 5 is average mage, 9 is legendary mage.\n" + "Return only JSON.", "Generate character sheet from your character story telling.", new { type = "object", properties = new { STR = new { type = "integer", minimum = 1, maximum = 10, description = "Strength parameter" }, AGL = new { type = "integer", minimum = 1, maximum = 10, description = "Agility parameter" }, MGK = new { type = "integer", minimum = 1, maximum = 10, description = "Magic parameter" }, }, required = new[] { "STR", "AGL", "MGK" } }, RemoveAllTools) ); } private void InitializeEasyLocalLLMClient() { // Initialize client // If you have ollama.exe running to automatically start the server, please stop it or specify a port that is not in use. var config = new OllamaConfig { ServerUrl = "http://localhost:11434", ExecutablePath = Application.streamingAssetsPath + "/.EasyLocalLLM/Ollama/ollama.exe", ModelsDirectory = Application.streamingAssetsPath + "/.EasyLocalLLM/Ollama/models", DefaultModelName = "kamekichi128/qwen3-4b-instruct-250", AutoStartServer = true, DebugMode = true, }; OllamaServerManager.Initialize(config, OnOllamaServerInitialized); client = LLMClientFactory.CreateOllamaClient(config); Debug.Log("✓ Client initialized"); } private void OnOllamaServerInitialized(bool successed) { if (successed) { Debug.Log("✓ Ollama server initialized successfully."); StartCoroutine(client.LoadModelRunnable(client.GetConfig().DefaultModelName, 180.0f, OnModelRunnable, true)); } else { Debug.LogError("✗ Failed to initialize Ollama server."); } } private void OnModelRunnable(LoadModelProgress progress) { if (progress.IsCompleted) { if (progress.IsSuccessed) { Debug.Log("✓ Model is runnable."); LoadHistory(); EnableUI(); } else { Debug.LogError($"✗ Model failed to load: {progress.Message}"); } } Debug.Log($"Model loading progress: {progress.Progress * 100}% | {progress.Message}"); } private void EnableUI() { var root = UIDocument.rootVisualElement; var sendAsync = root.Q