|
1 | 1 | # frozen_string_literal: true
|
2 | 2 |
|
3 |
| -RSpec.describe "Admin dashboard", type: :system do |
| 3 | +RSpec.describe "Managing LLM configurations", type: :system do |
4 | 4 | fab!(:admin)
|
5 | 5 |
|
6 |
| - it "correctly sets defaults" do |
| 6 | + before do |
7 | 7 | SiteSetting.ai_bot_enabled = true
|
8 |
| - |
9 | 8 | sign_in(admin)
|
| 9 | + end |
10 | 10 |
|
11 |
| - visit "/admin/plugins/discourse-ai/ai-llms" |
12 |
| - |
13 |
| - find(".ai-llms-list-editor__new").click() |
14 |
| - |
| 11 | + def select_preset(option) |
15 | 12 | select_kit = PageObjects::Components::SelectKit.new(".ai-llm-editor__presets")
|
16 | 13 |
|
17 | 14 | select_kit.expand
|
18 | 15 | select_kit.select_row_by_value("anthropic-claude-3-haiku")
|
19 | 16 |
|
20 | 17 | find(".ai-llm-editor__next").click()
|
| 18 | + end |
| 19 | + |
| 20 | + it "correctly sets defaults" do |
| 21 | + visit "/admin/plugins/discourse-ai/ai-llms" |
| 22 | + |
| 23 | + find(".ai-llms-list-editor__new").click() |
| 24 | + select_preset("anthropic-claude-3-haiku") |
| 25 | + |
21 | 26 | find("input.ai-llm-editor__api-key").fill_in(with: "abcd")
|
22 | 27 |
|
23 | 28 | PageObjects::Components::DToggleSwitch.new(".ai-llm-editor__enabled-chat-bot").toggle
|
|
41 | 46 | expect(llm.display_name).to eq(model_preset[:display_name])
|
42 | 47 | expect(llm.user_id).not_to be_nil
|
43 | 48 | end
|
| 49 | + |
| 50 | + it "manually configures an LLM" do |
| 51 | + visit "/admin/plugins/discourse-ai/ai-llms" |
| 52 | + |
| 53 | + find(".ai-llms-list-editor__new").click() |
| 54 | + select_preset("none") |
| 55 | + |
| 56 | + find("input.ai-llm-editor__display-name").fill_in(with: "Self-hosted LLM") |
| 57 | + find("input.ai-llm-editor__name").fill_in(with: "llava-hf/llava-v1.6-mistral-7b-hf") |
| 58 | + find("input.ai-llm-editor__url").fill_in(with: "srv://self-hostest.test") |
| 59 | + find("input.ai-llm-editor__api-key").fill_in(with: "1234") |
| 60 | + find("input.ai-llm-editor__max-prompt-tokens").fill_in(with: 8000) |
| 61 | + |
| 62 | + find(".ai-llm-editor__provider").click |
| 63 | + find(".select-kit-row[data-value=\"vllm\"]").click |
| 64 | + |
| 65 | + find(".ai-llm-editor__tokenizer").click |
| 66 | + find(".select-kit-row[data-name=\"Llama3Tokenizer\"]").click |
| 67 | + |
| 68 | + find(".ai-llm-editor__vision-enabled input").click |
| 69 | + |
| 70 | + PageObjects::Components::DToggleSwitch.new(".ai-llm-editor__enabled-chat-bot").toggle |
| 71 | + |
| 72 | + find(".ai-llm-editor__save").click() |
| 73 | + |
| 74 | + expect(page).to have_current_path("/admin/plugins/discourse-ai/ai-llms") |
| 75 | + |
| 76 | + llm = LlmModel.order(:id).last |
| 77 | + |
| 78 | + expect(llm.display_name).to eq("Self-hosted LLM") |
| 79 | + expect(llm.name).to eq("llava-hf/llava-v1.6-mistral-7b-hf") |
| 80 | + expect(llm.url).to eq("srv://self-hostest.test") |
| 81 | + expect(llm.tokenizer).to eq("DiscourseAi::Tokenizer::Llama3Tokenizer") |
| 82 | + expect(llm.max_prompt_tokens.to_i).to eq(8000) |
| 83 | + expect(llm.provider).to eq("vllm") |
| 84 | + expect(llm.vision_enabled).to eq(true) |
| 85 | + expect(llm.user_id).not_to be_nil |
| 86 | + end |
44 | 87 | end
|
0 commit comments