Skip to content

Commit be0b78c

Browse files
SamSaffronGrubba27keegangeorge
authored
FEATURE: new endpoint for directly accessing a persona (#876)
The new `/admin/plugins/discourse-ai/ai-personas/stream-reply.json` was added. This endpoint streams data direct from a persona and can be used to access a persona from remote systems leaving a paper trail in PMs about the conversation that happened This endpoint is only accessible to admins. --------- Co-authored-by: Gabriel Grubba <70247653+Grubba27@users.noreply.github.com> Co-authored-by: Keegan George <kgeorge13@gmail.com>
1 parent 05790a6 commit be0b78c

File tree

11 files changed

+428
-12
lines changed

11 files changed

+428
-12
lines changed

app/controllers/discourse_ai/admin/ai_personas_controller.rb

Lines changed: 197 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -74,8 +74,205 @@ def destroy
7474
end
7575
end
7676

77+
class << self
78+
POOL_SIZE = 10
79+
def thread_pool
80+
@thread_pool ||=
81+
Concurrent::CachedThreadPool.new(min_threads: 0, max_threads: POOL_SIZE, idletime: 30)
82+
end
83+
84+
def schedule_block(&block)
85+
# think about a better way to handle cross thread connections
86+
if Rails.env.test?
87+
block.call
88+
return
89+
end
90+
91+
db = RailsMultisite::ConnectionManagement.current_db
92+
thread_pool.post do
93+
begin
94+
RailsMultisite::ConnectionManagement.with_connection(db) { block.call }
95+
rescue StandardError => e
96+
Discourse.warn_exception(e, message: "Discourse AI: Unable to stream reply")
97+
end
98+
end
99+
end
100+
end
101+
102+
CRLF = "\r\n"
103+
104+
def stream_reply
105+
persona =
106+
AiPersona.find_by(name: params[:persona_name]) ||
107+
AiPersona.find_by(id: params[:persona_id])
108+
return render_json_error(I18n.t("discourse_ai.errors.persona_not_found")) if persona.nil?
109+
110+
return render_json_error(I18n.t("discourse_ai.errors.persona_disabled")) if !persona.enabled
111+
112+
if persona.default_llm.blank?
113+
return render_json_error(I18n.t("discourse_ai.errors.no_default_llm"))
114+
end
115+
116+
if params[:query].blank?
117+
return render_json_error(I18n.t("discourse_ai.errors.no_query_specified"))
118+
end
119+
120+
if !persona.user_id
121+
return render_json_error(I18n.t("discourse_ai.errors.no_user_for_persona"))
122+
end
123+
124+
if !params[:username] && !params[:user_unique_id]
125+
return render_json_error(I18n.t("discourse_ai.errors.no_user_specified"))
126+
end
127+
128+
user = nil
129+
130+
if params[:username]
131+
user = User.find_by_username(params[:username])
132+
return render_json_error(I18n.t("discourse_ai.errors.user_not_found")) if user.nil?
133+
elsif params[:user_unique_id]
134+
user = stage_user
135+
end
136+
137+
raise Discourse::NotFound if user.nil?
138+
139+
topic_id = params[:topic_id].to_i
140+
topic = nil
141+
post = nil
142+
143+
if topic_id > 0
144+
topic = Topic.find(topic_id)
145+
146+
raise Discourse::NotFound if topic.nil?
147+
148+
if topic.topic_allowed_users.where(user_id: user.id).empty?
149+
return render_json_error(I18n.t("discourse_ai.errors.user_not_allowed"))
150+
end
151+
152+
post =
153+
PostCreator.create!(
154+
user,
155+
topic_id: topic_id,
156+
raw: params[:query],
157+
skip_validations: true,
158+
)
159+
else
160+
post =
161+
PostCreator.create!(
162+
user,
163+
title: I18n.t("discourse_ai.ai_bot.default_pm_prefix"),
164+
raw: params[:query],
165+
archetype: Archetype.private_message,
166+
target_usernames: "#{user.username},#{persona.user.username}",
167+
skip_validations: true,
168+
)
169+
170+
topic = post.topic
171+
end
172+
173+
hijack = request.env["rack.hijack"]
174+
io = hijack.call
175+
176+
user = current_user
177+
178+
self.class.queue_streamed_reply(io, persona, user, topic, post)
179+
end
180+
77181
private
78182

183+
AI_STREAM_CONVERSATION_UNIQUE_ID = "ai-stream-conversation-unique-id"
184+
185+
# keeping this in a static method so we don't capture ENV and other bits
186+
# this allows us to release memory earlier
187+
def self.queue_streamed_reply(io, persona, user, topic, post)
188+
schedule_block do
189+
begin
190+
io.write "HTTP/1.1 200 OK"
191+
io.write CRLF
192+
io.write "Content-Type: text/plain; charset=utf-8"
193+
io.write CRLF
194+
io.write "Transfer-Encoding: chunked"
195+
io.write CRLF
196+
io.write "Cache-Control: no-cache, no-store, must-revalidate"
197+
io.write CRLF
198+
io.write "Connection: close"
199+
io.write CRLF
200+
io.write "X-Accel-Buffering: no"
201+
io.write CRLF
202+
io.write "X-Content-Type-Options: nosniff"
203+
io.write CRLF
204+
io.write CRLF
205+
io.flush
206+
207+
persona_class =
208+
DiscourseAi::AiBot::Personas::Persona.find_by(id: persona.id, user: user)
209+
bot = DiscourseAi::AiBot::Bot.as(persona.user, persona: persona_class.new)
210+
211+
data =
212+
{ topic_id: topic.id, bot_user_id: persona.user.id, persona_id: persona.id }.to_json +
213+
"\n\n"
214+
215+
io.write data.bytesize.to_s(16)
216+
io.write CRLF
217+
io.write data
218+
io.write CRLF
219+
220+
DiscourseAi::AiBot::Playground
221+
.new(bot)
222+
.reply_to(post) do |partial|
223+
next if partial.length == 0
224+
225+
data = { partial: partial }.to_json + "\n\n"
226+
227+
data.force_encoding("UTF-8")
228+
229+
io.write data.bytesize.to_s(16)
230+
io.write CRLF
231+
io.write data
232+
io.write CRLF
233+
io.flush
234+
end
235+
236+
io.write "0"
237+
io.write CRLF
238+
io.write CRLF
239+
240+
io.flush
241+
io.done
242+
rescue StandardError => e
243+
# make it a tiny bit easier to debug in dev, this is tricky
244+
# multi-threaded code that exhibits various limitations in rails
245+
p e if Rails.env.development?
246+
Discourse.warn_exception(e, message: "Discourse AI: Unable to stream reply")
247+
ensure
248+
io.close
249+
end
250+
end
251+
end
252+
253+
def stage_user
254+
unique_id = params[:user_unique_id].to_s
255+
field = UserCustomField.find_by(name: AI_STREAM_CONVERSATION_UNIQUE_ID, value: unique_id)
256+
257+
if field
258+
field.user
259+
else
260+
preferred_username = params[:preferred_username]
261+
username = UserNameSuggester.suggest(preferred_username || unique_id)
262+
263+
user =
264+
User.new(
265+
username: username,
266+
email: "#{SecureRandom.hex}@invalid.com",
267+
staged: true,
268+
active: false,
269+
)
270+
user.custom_fields[AI_STREAM_CONVERSATION_UNIQUE_ID] = unique_id
271+
user.save!
272+
user
273+
end
274+
end
275+
79276
def find_ai_persona
80277
@ai_persona = AiPersona.find(params[:id])
81278
end

app/models/completion_prompt.rb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@ def each_message_length
5656
messages.each_with_index do |msg, idx|
5757
next if msg["content"].length <= 1000
5858

59-
errors.add(:messages, I18n.t("errors.prompt_message_length", idx: idx + 1))
59+
errors.add(:messages, I18n.t("discourse_ai.errors.prompt_message_length", idx: idx + 1))
6060
end
6161
end
6262
end

config/locales/client.en.yml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,8 @@ en:
55
scopes:
66
descriptions:
77
discourse_ai:
8-
search: "Allows semantic search via the /discourse-ai/embeddings/semantic-search endpoint."
8+
search: "Allows semantic search"
9+
stream_completion: "Allows streaming ai persona completions"
910

1011
site_settings:
1112
categories:

config/locales/server.en.yml

Lines changed: 12 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -106,10 +106,6 @@ en:
106106
flagged_by_toxicity: The AI plugin flagged this after classifying it as toxic.
107107
flagged_by_nsfw: The AI plugin flagged this after classifying at least one of the attached images as NSFW.
108108

109-
errors:
110-
prompt_message_length: The message %{idx} is over the 1000 character limit.
111-
invalid_prompt_role: The message %{idx} has an invalid role.
112-
113109
reports:
114110
overall_sentiment:
115111
title: "Overall sentiment"
@@ -169,6 +165,7 @@ en:
169165
failed_to_share: "Failed to share the conversation"
170166
conversation_deleted: "Conversation share deleted successfully"
171167
ai_bot:
168+
default_pm_prefix: "[Untitled AI bot PM]"
172169
personas:
173170
default_llm_required: "Default LLM model is required prior to enabling Chat"
174171
cannot_delete_system_persona: "System personas cannot be deleted, please disable it instead"
@@ -347,3 +344,14 @@ en:
347344
llm_models:
348345
missing_provider_param: "%{param} can't be blank"
349346
bedrock_invalid_url: "Please complete all the fields to contact this model."
347+
348+
errors:
349+
no_query_specified: The query parameter is required, please specify it.
350+
no_user_for_persona: The persona specified does not have a user associated with it.
351+
persona_not_found: The persona specified does not exist. Check the persona_name or persona_id params.
352+
no_user_specified: The username or the user_unique_id parameter is required, please specify it.
353+
user_not_found: The user specified does not exist. Check the username param.
354+
persona_disabled: The persona specified is disabled. Check the persona_name or persona_id params.
355+
no_default_llm: The persona must have a default_llm defined.
356+
user_not_allowed: The user is not allowed to participate in the topic.
357+
prompt_message_length: The message %{idx} is over the 1000 character limit.

config/routes.rb

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -50,6 +50,8 @@
5050
path: "ai-personas",
5151
controller: "discourse_ai/admin/ai_personas"
5252

53+
post "/ai-personas/stream-reply" => "discourse_ai/admin/ai_personas#stream_reply"
54+
5355
resources(
5456
:ai_tools,
5557
only: %i[index create show update destroy],
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
# frozen_string_literal: true
2+
class AddUniqueAiStreamConversationUserIdIndex < ActiveRecord::Migration[7.1]
3+
def change
4+
add_index :user_custom_fields,
5+
[:value],
6+
unique: true,
7+
where: "name = 'ai-stream-conversation-unique-id'"
8+
end
9+
end

lib/ai_bot/bot.rb

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -113,7 +113,7 @@ def reply(context, &update_blk)
113113
tool_found = true
114114
# a bit hacky, but extra newlines do no harm
115115
if needs_newlines
116-
update_blk.call("\n\n", cancel, nil)
116+
update_blk.call("\n\n", cancel)
117117
needs_newlines = false
118118
end
119119

@@ -123,7 +123,7 @@ def reply(context, &update_blk)
123123
end
124124
else
125125
needs_newlines = true
126-
update_blk.call(partial, cancel, nil)
126+
update_blk.call(partial, cancel)
127127
end
128128
end
129129

@@ -191,9 +191,9 @@ def invoke_tool(tool, llm, cancel, context, &update_blk)
191191
tool_details = build_placeholder(tool.summary, tool.details, custom_raw: tool.custom_raw)
192192

193193
if context[:skip_tool_details] && tool.custom_raw.present?
194-
update_blk.call(tool.custom_raw, cancel, nil)
194+
update_blk.call(tool.custom_raw, cancel, nil, :custom_raw)
195195
elsif !context[:skip_tool_details]
196-
update_blk.call(tool_details, cancel, nil)
196+
update_blk.call(tool_details, cancel, nil, :tool_details)
197197
end
198198

199199
result

lib/ai_bot/entry_point.rb

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -189,6 +189,11 @@ def inject_into(plugin)
189189
plugin.register_editable_topic_custom_field(:ai_persona_id)
190190
end
191191

192+
plugin.add_api_key_scope(
193+
:discourse_ai,
194+
{ stream_completion: { actions: %w[discourse_ai/admin/ai_personas#stream_reply] } },
195+
)
196+
192197
plugin.on(:site_setting_changed) do |name, old_value, new_value|
193198
if name == :ai_embeddings_model && SiteSetting.ai_embeddings_enabled? &&
194199
new_value != old_value

lib/ai_bot/playground.rb

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -390,7 +390,12 @@ def get_context(participants:, conversation_context:, user:, skip_tool_details:
390390
result
391391
end
392392

393-
def reply_to(post)
393+
def reply_to(post, &blk)
394+
# this is a multithreading issue
395+
# post custom prompt is needed and it may not
396+
# be properly loaded, ensure it is loaded
397+
PostCustomPrompt.none
398+
394399
reply = +""
395400
start = Time.now
396401

@@ -441,11 +446,13 @@ def reply_to(post)
441446
context[:skip_tool_details] ||= !bot.persona.class.tool_details
442447

443448
new_custom_prompts =
444-
bot.reply(context) do |partial, cancel, placeholder|
449+
bot.reply(context) do |partial, cancel, placeholder, type|
445450
reply << partial
446451
raw = reply.dup
447452
raw << "\n\n" << placeholder if placeholder.present? && !context[:skip_tool_details]
448453

454+
blk.call(partial) if blk && type != :tool_details
455+
449456
if stream_reply && !Discourse.redis.get(redis_stream_key)
450457
cancel&.call
451458
reply_post.update!(raw: reply, cooked: PrettyText.cook(reply))

lib/completions/endpoints/fake.rb

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -72,6 +72,10 @@ def self.with_fake_content(content)
7272
@fake_content = nil
7373
end
7474

75+
def self.fake_content=(content)
76+
@fake_content = content
77+
end
78+
7579
def self.fake_content
7680
@fake_content || STOCK_CONTENT
7781
end
@@ -100,6 +104,13 @@ def self.last_call=(params)
100104
@last_call = params
101105
end
102106

107+
def self.reset!
108+
@last_call = nil
109+
@fake_content = nil
110+
@delays = nil
111+
@chunk_count = nil
112+
end
113+
103114
def perform_completion!(
104115
dialect,
105116
user,
@@ -111,6 +122,8 @@ def perform_completion!(
111122

112123
content = self.class.fake_content
113124

125+
content = content.shift if content.is_a?(Array)
126+
114127
if block_given?
115128
split_indices = (1...content.length).to_a.sample(self.class.chunk_count - 1).sort
116129
indexes = [0, *split_indices, content.length]

0 commit comments

Comments
 (0)