Skip to content

Commit bccedef

Browse files
committed
Update prompts
Signed-off-by: Michael Yuan <michael@secondstate.io>
1 parent a0f739f commit bccedef

File tree

1 file changed

+16
-16
lines changed

1 file changed

+16
-16
lines changed

src/github-pr-summary.rs

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -14,8 +14,8 @@ use llmservice_flows::{
1414
use std::env;
1515

1616
// The soft character limit of the input context size
17-
// THe codestral has a context length of 32k, and we allow 8k context here
18-
static CHAR_SOFT_LIMIT : usize = 8192;
17+
// THe codestral has a context length of 16k tokens, and we allow 16k chars of context here
18+
static CHAR_SOFT_LIMIT : usize = 16384;
1919

2020
#[no_mangle]
2121
#[tokio::main(flavor = "current_thread")]
@@ -89,7 +89,7 @@ async fn handler(payload: EventPayload) {
8989
// return;
9090
// }
9191
// TODO: Makeshift but operational
92-
if body.starts_with("Hello, I am a [code review bot]") {
92+
if body.starts_with("Hello, I am a [PR summary bot]") {
9393
log::info!("Ignore comment via bot");
9494
return;
9595
};
@@ -108,11 +108,11 @@ async fn handler(payload: EventPayload) {
108108
let issues = octo.issues(owner.clone(), repo.clone());
109109
let mut comment_id: CommentId = 0u64.into();
110110
if new_commit {
111-
// Find the first "Hello, I am a [code review bot]" comment to update
111+
// Find the first "Hello, I am a [PR summary bot]" comment to update
112112
match issues.list_comments(pull_number).send().await {
113113
Ok(comments) => {
114114
for c in comments.items {
115-
if c.body.unwrap_or_default().starts_with("Hello, I am a [code review bot]") {
115+
if c.body.unwrap_or_default().starts_with("Hello, I am a [PR summary bot]") {
116116
comment_id = c.id;
117117
break;
118118
}
@@ -125,7 +125,7 @@ async fn handler(payload: EventPayload) {
125125
}
126126
} else {
127127
// PR OPEN or Trigger phrase: create a new comment
128-
match issues.create_comment(pull_number, "Hello, I am a [code review bot](https://github.com/flows-network/github-pr-summary/) on [flows.network](https://flows.network/).\n\nIt could take a few minutes for me to analyze this PR. Relax, grab a cup of coffee and check back later. Thanks!").await {
128+
match issues.create_comment(pull_number, "Hello, I am a [PR summary bot](https://github.com/flows-network/github-pr-summary/) on [flows.network](https://flows.network/).\n\nIt could take a few minutes for me to analyze this PR. Relax, grab a cup of coffee and check back later. Thanks!").await {
129129
Ok(comment) => {
130130
comment_id = comment.id;
131131
}
@@ -168,22 +168,22 @@ async fn handler(payload: EventPayload) {
168168
}
169169

170170
let chat_id = format!("PR#{pull_number}");
171-
let system = &format!("You are an experienced software developer. You will act as a reviewer for a GitHub Pull Request titled \"{}\".", title);
171+
let system = &format!("You are an experienced software developer. You will act as a reviewer for a GitHub Pull Request titled \"{}\". Please be as concise as possible while being accurate.", title);
172172
let mut lf = LLMServiceFlows::new(&llm_api_endpoint);
173-
lf.set_retry_times(3);
173+
// lf.set_retry_times(3);
174174

175175
let mut reviews: Vec<String> = Vec::new();
176176
let mut reviews_text = String::new();
177177
for (_i, commit) in commits.iter().enumerate() {
178178
let commit_hash = &commit[5..45];
179-
log::debug!("Sending patch to OpenAI: {}", commit_hash);
179+
log::debug!("Sending patch to LLM: {}", commit_hash);
180180
let co = ChatOptions {
181181
model: Some(&llm_model_name),
182182
restart: true,
183183
system_prompt: Some(system),
184184
..Default::default()
185185
};
186-
let question = "The following is a GitHub patch. Please summarize the key changes and identify potential problems. Start with the most important findings.\n\n".to_string() + truncate(commit, CHAR_SOFT_LIMIT);
186+
let question = "The following is a GitHub patch. Please summarize the key changes in concise points. Start with the most important findings.\n\n".to_string() + truncate(commit, CHAR_SOFT_LIMIT);
187187
match lf.chat_completion(&chat_id, &question, &co).await {
188188
Ok(r) => {
189189
if reviews_text.len() < CHAR_SOFT_LIMIT {
@@ -196,33 +196,33 @@ async fn handler(payload: EventPayload) {
196196
review.push_str(&r.choice);
197197
review.push_str("\n\n");
198198
reviews.push(review);
199-
log::debug!("Received OpenAI resp for patch: {}", commit_hash);
199+
log::debug!("Received LLM resp for patch: {}", commit_hash);
200200
}
201201
Err(e) => {
202-
log::error!("OpenAI returned an error for commit {commit_hash}: {}", e);
202+
log::error!("LLM returned an error for commit {commit_hash}: {}", e);
203203
}
204204
}
205205
}
206206

207207
let mut resp = String::new();
208-
resp.push_str("Hello, I am a [code review bot](https://github.com/flows-network/github-pr-summary/) on [flows.network](https://flows.network/). Here are my reviews of code commits in this PR.\n\n------\n\n");
208+
resp.push_str("Hello, I am a [PR summary bot](https://github.com/flows-network/github-pr-summary/) on [flows.network](https://flows.network/). Here are my reviews of code commits in this PR.\n\n------\n\n");
209209
if reviews.len() > 1 {
210-
log::debug!("Sending all reviews to OpenAI for summarization");
210+
log::debug!("Sending all reviews to LLM for summarization");
211211
let co = ChatOptions {
212212
model: Some(&llm_model_name),
213213
restart: true,
214214
system_prompt: Some(system),
215215
..Default::default()
216216
};
217-
let question = "Here is a set of summaries for software source code patches. Each summary starts with a ------ line. Please write an overall summary considering all the individual summary. Please present the potential issues and errors first, following by the most important findings, in your summary.\n\n".to_string() + &reviews_text;
217+
let question = "Here is a set of summaries for source code patches in this PR. Each summary starts with a ------ line. Write an overall summary. Present the potential issues and errors first, following by the most important findings, in your summary.\n\n".to_string() + &reviews_text;
218218
match lf.chat_completion(&chat_id, &question, &co).await {
219219
Ok(r) => {
220220
resp.push_str(&r.choice);
221221
resp.push_str("\n\n## Details\n\n");
222222
log::debug!("Received the overall summary");
223223
}
224224
Err(e) => {
225-
log::error!("OpenAI returned an error for the overall summary: {}", e);
225+
log::error!("LLM returned an error for the overall summary: {}", e);
226226
}
227227
}
228228
}

0 commit comments

Comments
 (0)