CICO-111286: Revert the dup comment changes

This commit is contained in:
Arun Murugan 2024-06-12 04:04:06 -04:00
parent c2dee0d45a
commit a712814eeb
3 changed files with 20 additions and 114 deletions

56
dist/index.js vendored
View file

@ -86,27 +86,9 @@ function getDiff(owner, repo, pull_number) {
return response.data; return response.data;
}); });
} }
function getExistingComments(owner, repo, pull_number) { function analyzeCode(parsedDiff, prDetails) {
return __awaiter(this, void 0, void 0, function* () {
const commentsResponse = yield octokit.pulls.listReviewComments({
owner,
repo,
pull_number,
});
return commentsResponse.data
.filter(comment => comment.line !== undefined)
.map(comment => ({
path: comment.path,
line: comment.line,
body: comment.body,
}));
});
}
function analyzeCode(parsedDiff, prDetails, existingComments) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
const comments = []; const comments = [];
// Log the parsed diff for debugging
console.log("Parsed Diff:", JSON.stringify(parsedDiff, null, 2));
for (const file of parsedDiff) { for (const file of parsedDiff) {
if (file.to === "/dev/null") if (file.to === "/dev/null")
continue; // Ignore deleted files continue; // Ignore deleted files
@ -115,22 +97,12 @@ function analyzeCode(parsedDiff, prDetails, existingComments) {
const aiResponse = yield getAIResponse(prompt); const aiResponse = yield getAIResponse(prompt);
if (aiResponse) { if (aiResponse) {
const newComments = createComment(file, chunk, aiResponse); const newComments = createComment(file, chunk, aiResponse);
for (const comment of newComments) { if (newComments) {
console.log("Processing comment:", comment); comments.push(...newComments);
const duplicate = existingComments.some(existingComment => existingComment.path === comment.path &&
existingComment.line === comment.line &&
existingComment.body.trim() === comment.body.trim());
if (!duplicate) {
comments.push(comment);
}
else {
console.log("Duplicate comment found, skipping:", comment);
} }
} }
} }
} }
}
console.log("Final comments to add:", JSON.stringify(comments, null, 2));
return comments; return comments;
}); });
} }
@ -460,16 +432,14 @@ function createComment(file, chunk, aiResponses) {
return []; return [];
} }
const commentLine = "ln" in change ? change.ln : "ln2" in change ? change.ln2 : 0; const commentLine = "ln" in change ? change.ln : "ln2" in change ? change.ln2 : 0;
const diff_hunk = chunk.content + "\n" + chunk.changes.map(c => `${c.type === 'add' ? '+' : c.type === 'del' ? '-' : ' '} ${c.content}`).join("\n");
return { return {
body: aiResponse.reviewComment, body: aiResponse.reviewComment,
path: file.to, path: file.to,
line: commentLine, line: commentLine,
diff_hunk: diff_hunk.trim(),
}; };
}); });
} }
function createReviewComment(owner, repo, pull_number, comments, commit_id) { function createReviewComment(owner, repo, pull_number, comments) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
const validComments = comments.filter(comment => comment.path && comment.line > 0 && comment.body.trim() !== ""); const validComments = comments.filter(comment => comment.path && comment.line > 0 && comment.body.trim() !== "");
if (validComments.length === 0) { if (validComments.length === 0) {
@ -479,18 +449,14 @@ function createReviewComment(owner, repo, pull_number, comments, commit_id) {
console.log("Attempting to create review comments:", JSON.stringify(validComments, null, 2)); console.log("Attempting to create review comments:", JSON.stringify(validComments, null, 2));
for (const comment of validComments) { for (const comment of validComments) {
try { try {
yield octokit.pulls.createReviewComment({ yield octokit.pulls.createReview({
owner, owner,
repo, repo,
pull_number, pull_number,
body: comment.body, body: comment.body,
path: comment.path, path: comment.path,
line: comment.line, line: comment.line,
side: 'RIGHT', event: 'COMMENT',
commit_id,
start_line: comment.line,
start_side: 'RIGHT',
diff_hunk: comment.diff_hunk, // Include diff_hunk in the request
}); });
} }
catch (error) { catch (error) {
@ -500,7 +466,6 @@ function createReviewComment(owner, repo, pull_number, comments, commit_id) {
repo, repo,
pull_number, pull_number,
comment, comment,
commit_id,
}); });
} }
} }
@ -512,10 +477,8 @@ function main() {
const prDetails = yield getPRDetails(); const prDetails = yield getPRDetails();
let diff; let diff;
const eventData = JSON.parse((0, fs_1.readFileSync)((_a = process.env.GITHUB_EVENT_PATH) !== null && _a !== void 0 ? _a : "", "utf8")); const eventData = JSON.parse((0, fs_1.readFileSync)((_a = process.env.GITHUB_EVENT_PATH) !== null && _a !== void 0 ? _a : "", "utf8"));
let commit_id;
if (eventData.action === "opened") { if (eventData.action === "opened") {
diff = yield getDiff(prDetails.owner, prDetails.repo, prDetails.pull_number); diff = yield getDiff(prDetails.owner, prDetails.repo, prDetails.pull_number);
commit_id = eventData.pull_request.head.sha;
} }
else if (eventData.action === "synchronize") { else if (eventData.action === "synchronize") {
const newBaseSha = eventData.before; const newBaseSha = eventData.before;
@ -530,7 +493,6 @@ function main() {
head: newHeadSha, head: newHeadSha,
}); });
diff = String(response.data); diff = String(response.data);
commit_id = newHeadSha;
} }
else { else {
console.log("Unsupported event:", process.env.GITHUB_EVENT_NAME); console.log("Unsupported event:", process.env.GITHUB_EVENT_NAME);
@ -549,11 +511,9 @@ function main() {
const filteredDiff = parsedDiff.filter((file) => { const filteredDiff = parsedDiff.filter((file) => {
return !excludePatterns.some((pattern) => { var _a; return (0, minimatch_1.default)((_a = file.to) !== null && _a !== void 0 ? _a : "", pattern); }); return !excludePatterns.some((pattern) => { var _a; return (0, minimatch_1.default)((_a = file.to) !== null && _a !== void 0 ? _a : "", pattern); });
}); });
const existingComments = yield getExistingComments(prDetails.owner, prDetails.repo, prDetails.pull_number); const comments = yield analyzeCode(filteredDiff, prDetails);
const comments = yield analyzeCode(filteredDiff, prDetails, existingComments);
if (comments.length > 0) { if (comments.length > 0) {
yield createReviewComment(prDetails.owner, prDetails.repo, prDetails.pull_number, comments, commit_id // Pass commit_id to the function yield createReviewComment(prDetails.owner, prDetails.repo, prDetails.pull_number, comments);
);
} }
}); });
} }

2
dist/index.js.map vendored

File diff suppressed because one or more lines are too long

View file

@ -57,34 +57,11 @@ async function getDiff(
return response.data; return response.data;
} }
async function getExistingComments(
owner: string,
repo: string,
pull_number: number
): Promise<Array<{ path: string; line: number; body: string }>> {
const commentsResponse = await octokit.pulls.listReviewComments({
owner,
repo,
pull_number,
});
return commentsResponse.data
.filter(comment => comment.line !== undefined)
.map(comment => ({
path: comment.path,
line: comment.line!,
body: comment.body,
}));
}
async function analyzeCode( async function analyzeCode(
parsedDiff: File[], parsedDiff: File[],
prDetails: PRDetails, prDetails: PRDetails
existingComments: Array<{ path: string; line: number; body: string }> ): Promise<Array<{ body: string; path: string; line: number }>> {
): Promise<Array<{ body: string; path: string; line: number; diff_hunk: string }>> { const comments: Array<{ body: string; path: string; line: number }> = [];
const comments: Array<{ body: string; path: string; line: number; diff_hunk: string }> = [];
// Log the parsed diff for debugging
console.log("Parsed Diff:", JSON.stringify(parsedDiff, null, 2));
for (const file of parsedDiff) { for (const file of parsedDiff) {
if (file.to === "/dev/null") continue; // Ignore deleted files if (file.to === "/dev/null") continue; // Ignore deleted files
@ -93,24 +70,12 @@ async function analyzeCode(
const aiResponse = await getAIResponse(prompt); const aiResponse = await getAIResponse(prompt);
if (aiResponse) { if (aiResponse) {
const newComments = createComment(file, chunk, aiResponse); const newComments = createComment(file, chunk, aiResponse);
for (const comment of newComments) { if (newComments) {
console.log("Processing comment:", comment); comments.push(...newComments);
const duplicate = existingComments.some(
existingComment =>
existingComment.path === comment.path &&
existingComment.line === comment.line &&
existingComment.body.trim() === comment.body.trim()
);
if (!duplicate) {
comments.push(comment);
} else {
console.log("Duplicate comment found, skipping:", comment);
} }
} }
} }
} }
}
console.log("Final comments to add:", JSON.stringify(comments, null, 2));
return comments; return comments;
} }
@ -440,7 +405,7 @@ function createComment(
lineNumber: string; lineNumber: string;
reviewComment: string; reviewComment: string;
}> }>
): Array<{ body: string; path: string; line: number; diff_hunk: string }> { ): Array<{ body: string; path: string; line: number }> {
return aiResponses.flatMap((aiResponse) => { return aiResponses.flatMap((aiResponse) => {
if (!file.to) { if (!file.to) {
return []; return [];
@ -461,13 +426,11 @@ function createComment(
} }
const commentLine = "ln" in change ? change.ln : "ln2" in change ? change.ln2 : 0; const commentLine = "ln" in change ? change.ln : "ln2" in change ? change.ln2 : 0;
const diff_hunk = chunk.content + "\n" + chunk.changes.map(c => `${c.type === 'add' ? '+' : c.type === 'del' ? '-' : ' '} ${c.content}`).join("\n");
return { return {
body: aiResponse.reviewComment, body: aiResponse.reviewComment,
path: file.to, path: file.to,
line: commentLine, line: commentLine,
diff_hunk: diff_hunk.trim(),
}; };
}); });
} }
@ -476,8 +439,7 @@ async function createReviewComment(
owner: string, owner: string,
repo: string, repo: string,
pull_number: number, pull_number: number,
comments: Array<{ body: string; path: string; line: number; diff_hunk: string }>, comments: Array<{ body: string; path: string; line: number }>
commit_id: string
): Promise<void> { ): Promise<void> {
const validComments = comments.filter(comment => comment.path && comment.line > 0 && comment.body.trim() !== ""); const validComments = comments.filter(comment => comment.path && comment.line > 0 && comment.body.trim() !== "");
@ -490,18 +452,14 @@ async function createReviewComment(
for (const comment of validComments) { for (const comment of validComments) {
try { try {
await octokit.pulls.createReviewComment({ await octokit.pulls.createReview({
owner, owner,
repo, repo,
pull_number, pull_number,
body: comment.body, body: comment.body,
path: comment.path, path: comment.path,
line: comment.line, line: comment.line,
side: 'RIGHT', // Ensure the comment is on the right side of the diff event: 'COMMENT',
commit_id, // Include commit_id in the request
start_line: comment.line,
start_side: 'RIGHT',
diff_hunk: comment.diff_hunk, // Include diff_hunk in the request
}); });
} catch (error) { } catch (error) {
console.error("Error creating review comment:", error); console.error("Error creating review comment:", error);
@ -510,7 +468,6 @@ async function createReviewComment(
repo, repo,
pull_number, pull_number,
comment, comment,
commit_id,
}); });
} }
} }
@ -523,15 +480,12 @@ async function main() {
readFileSync(process.env.GITHUB_EVENT_PATH ?? "", "utf8") readFileSync(process.env.GITHUB_EVENT_PATH ?? "", "utf8")
); );
let commit_id: string;
if (eventData.action === "opened") { if (eventData.action === "opened") {
diff = await getDiff( diff = await getDiff(
prDetails.owner, prDetails.owner,
prDetails.repo, prDetails.repo,
prDetails.pull_number prDetails.pull_number
); );
commit_id = eventData.pull_request.head.sha;
} else if (eventData.action === "synchronize") { } else if (eventData.action === "synchronize") {
const newBaseSha = eventData.before; const newBaseSha = eventData.before;
const newHeadSha = eventData.after; const newHeadSha = eventData.after;
@ -547,7 +501,6 @@ async function main() {
}); });
diff = String(response.data); diff = String(response.data);
commit_id = newHeadSha;
} else { } else {
console.log("Unsupported event:", process.env.GITHUB_EVENT_NAME); console.log("Unsupported event:", process.env.GITHUB_EVENT_NAME);
return; return;
@ -573,20 +526,13 @@ async function main() {
); );
}); });
const existingComments = await getExistingComments( const comments = await analyzeCode(filteredDiff, prDetails);
prDetails.owner,
prDetails.repo,
prDetails.pull_number
);
const comments = await analyzeCode(filteredDiff, prDetails, existingComments);
if (comments.length > 0) { if (comments.length > 0) {
await createReviewComment( await createReviewComment(
prDetails.owner, prDetails.owner,
prDetails.repo, prDetails.repo,
prDetails.pull_number, prDetails.pull_number,
comments, comments
commit_id // Pass commit_id to the function
); );
} }
} }