Added tests, debug logging and fix max tokens bug

This commit is contained in:
Will Poynter 2024-10-01 14:53:33 +01:00
parent c1c54576e4
commit 7027b3ba7b
No known key found for this signature in database
GPG key ID: AB3BE72A7CA5B1A9
5 changed files with 165 additions and 44 deletions

View file

@ -12,7 +12,7 @@ jobs:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v3 uses: actions/checkout@v3
- name: Code Review - name: Code Review
uses: researchwiseai/ai-codereviewer@moving-to-o1 uses: researchwiseai/ai-codereviewer@main
with: with:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY_V2 }} OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY_V2 }}

74
dist/index.js vendored
View file

@ -101,6 +101,7 @@ require("./sourcemap-register.js");
function (mod) { function (mod) {
return mod && mod.__esModule ? mod : { default: mod }; return mod && mod.__esModule ? mod : { default: mod };
}; };
var _a, _b, _c, _d;
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const fs_1 = __nccwpck_require__(7147); const fs_1 = __nccwpck_require__(7147);
const core = __importStar(__nccwpck_require__(2186)); const core = __importStar(__nccwpck_require__(2186));
@ -108,9 +109,18 @@ require("./sourcemap-register.js");
const rest_1 = __nccwpck_require__(5375); const rest_1 = __nccwpck_require__(5375);
const parse_diff_1 = __importDefault(__nccwpck_require__(4833)); const parse_diff_1 = __importDefault(__nccwpck_require__(4833));
const minimatch_1 = __importDefault(__nccwpck_require__(2002)); const minimatch_1 = __importDefault(__nccwpck_require__(2002));
const GITHUB_TOKEN = core.getInput("GITHUB_TOKEN"); const GITHUB_TOKEN =
const OPENAI_API_KEY = core.getInput("OPENAI_API_KEY"); (_a = process.env.GITHUB_TOKEN) !== null && _a !== void 0
const OPENAI_API_MODEL = core.getInput("OPENAI_API_MODEL"); ? _a
: core.getInput("GITHUB_TOKEN");
const OPENAI_API_KEY =
(_b = process.env.OPENAI_API_KEY) !== null && _b !== void 0
? _b
: core.getInput("OPENAI_API_KEY");
const OPENAI_API_MODEL =
(_c = process.env.OPENAI_API_MODEL) !== null && _c !== void 0
? _c
: core.getInput("OPENAI_API_MODEL");
const octokit = new rest_1.Octokit({ auth: GITHUB_TOKEN }); const octokit = new rest_1.Octokit({ auth: GITHUB_TOKEN });
const openai = new openai_1.default({ const openai = new openai_1.default({
apiKey: OPENAI_API_KEY, apiKey: OPENAI_API_KEY,
@ -149,14 +159,16 @@ require("./sourcemap-register.js");
return response.data; return response.data;
}); });
} }
function analyzeCode(parsedDiff, prDetails) { function analyzeCode(parsedDiff, prDetails, verbose = false) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
const comments = []; const comments = [];
for (const file of parsedDiff) { for (const file of parsedDiff) {
if (file.to === "/dev/null") continue; // Ignore deleted files if (file.to === "/dev/null") continue; // Ignore deleted files
for (const chunk of file.chunks) { for (const chunk of file.chunks) {
const prompt = createPrompt(file, chunk, prDetails); const prompt = createPrompt(file, chunk, prDetails);
const aiResponse = yield getAIResponse(prompt); if (verbose) console.log("Prompt:", prompt);
const aiResponse = yield getAIResponse(prompt, verbose);
if (verbose) console.log("AI Response:", aiResponse);
if (aiResponse) { if (aiResponse) {
const newComments = createComment(file, chunk, aiResponse); const newComments = createComment(file, chunk, aiResponse);
if (newComments) { if (newComments) {
@ -199,12 +211,11 @@ ${chunk.changes
\`\`\` \`\`\`
`; `;
} }
function getAIResponse(prompt) { function getAIResponse(prompt, verbose = false) {
var _a, _b; var _a, _b;
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
const queryConfig = { const queryConfig = {
model: OPENAI_API_MODEL, model: OPENAI_API_MODEL,
max_completion_tokens: 700,
}; };
try { try {
const response = yield openai.chat.completions.create( const response = yield openai.chat.completions.create(
@ -217,6 +228,7 @@ ${chunk.changes
], ],
}) })
); );
if (verbose) console.log("Response:", response);
const res = const res =
((_b = ((_b =
(_a = response.choices[0].message) === null || _a === void 0 (_a = response.choices[0].message) === null || _a === void 0
@ -249,19 +261,25 @@ ${chunk.changes
} }
function createReviewComment(owner, repo, pull_number, comments) { function createReviewComment(owner, repo, pull_number, comments) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
yield octokit.pulls.createReview({ try {
owner, yield octokit.pulls.createReview({
repo, owner,
pull_number, repo,
comments, pull_number,
event: "COMMENT", comments,
}); event: "COMMENT",
});
} catch (error) {
console.error(error);
throw error;
}
}); });
} }
function main() { function main(verbose = false) {
var _a; var _a, _b;
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
const prDetails = yield getPRDetails(); const prDetails = yield getPRDetails();
if (verbose) console.log("PR Details:", prDetails);
let diff; let diff;
const eventData = JSON.parse( const eventData = JSON.parse(
(0, fs_1.readFileSync)( (0, fs_1.readFileSync)(
@ -271,13 +289,16 @@ ${chunk.changes
"utf8" "utf8"
) )
); );
if (verbose) console.log("Event Data:", eventData);
if (eventData.action === "opened") { if (eventData.action === "opened") {
if (verbose) console.log('Event action is "opened"');
diff = yield getDiff( diff = yield getDiff(
prDetails.owner, prDetails.owner,
prDetails.repo, prDetails.repo,
prDetails.pull_number prDetails.pull_number
); );
} else if (eventData.action === "synchronize") { } else if (eventData.action === "synchronize") {
if (verbose) console.log('Event action is "synchronize"');
const newBaseSha = eventData.before; const newBaseSha = eventData.before;
const newHeadSha = eventData.after; const newHeadSha = eventData.after;
const response = yield octokit.repos.compareCommits({ const response = yield octokit.repos.compareCommits({
@ -289,20 +310,27 @@ ${chunk.changes
base: newBaseSha, base: newBaseSha,
head: newHeadSha, head: newHeadSha,
}); });
if (verbose) console.log("Response:", response);
diff = String(response.data); diff = String(response.data);
} else { } else {
console.log("Unsupported event:", process.env.GITHUB_EVENT_NAME); console.log("Unsupported event:", process.env.GITHUB_EVENT_NAME);
return; return;
} }
if (verbose) console.log("Diff:", diff);
if (!diff) { if (!diff) {
console.log("No diff found"); console.log("No diff found");
return; return;
} }
const parsedDiff = (0, parse_diff_1.default)(diff); const parsedDiff = (0, parse_diff_1.default)(diff);
const excludePatterns = core if (verbose) console.log("Parsed Diff:", parsedDiff);
.getInput("exclude") const excludePatterns = (
(_b = process.env.EXCLUDE) !== null && _b !== void 0
? _b
: core.getInput("exclude")
)
.split(",") .split(",")
.map((s) => s.trim()); .map((s) => s.trim());
if (verbose) console.log("Exclude Patterns:", excludePatterns);
const filteredDiff = parsedDiff.filter((file) => { const filteredDiff = parsedDiff.filter((file) => {
return !excludePatterns.some((pattern) => { return !excludePatterns.some((pattern) => {
var _a; var _a;
@ -312,7 +340,9 @@ ${chunk.changes
); );
}); });
}); });
const comments = yield analyzeCode(filteredDiff, prDetails); if (verbose) console.log("Filtered Diff:", filteredDiff);
const comments = yield analyzeCode(filteredDiff, prDetails, verbose);
if (verbose) console.log("Comments:", comments);
if (comments.length > 0) { if (comments.length > 0) {
yield createReviewComment( yield createReviewComment(
prDetails.owner, prDetails.owner,
@ -323,7 +353,11 @@ ${chunk.changes
} }
}); });
} }
main().catch((error) => { main(
((_d = process.env.VERBOSE) !== null && _d !== void 0
? _d
: core.getInput("VERBOSE")) === "true"
).catch((error) => {
console.error("Error:", error); console.error("Error:", error);
process.exit(1); process.exit(1);
}); });

2
dist/index.js.map vendored

File diff suppressed because one or more lines are too long

View file

@ -5,9 +5,12 @@ import { Octokit } from "@octokit/rest";
import parseDiff, { Chunk, File } from "parse-diff"; import parseDiff, { Chunk, File } from "parse-diff";
import minimatch from "minimatch"; import minimatch from "minimatch";
const GITHUB_TOKEN: string = core.getInput("GITHUB_TOKEN"); const GITHUB_TOKEN: string =
const OPENAI_API_KEY: string = core.getInput("OPENAI_API_KEY"); process.env.GITHUB_TOKEN ?? core.getInput("GITHUB_TOKEN");
const OPENAI_API_MODEL: string = core.getInput("OPENAI_API_MODEL"); const OPENAI_API_KEY: string =
process.env.OPENAI_API_KEY ?? core.getInput("OPENAI_API_KEY");
const OPENAI_API_MODEL: string =
process.env.OPENAI_API_MODEL ?? core.getInput("OPENAI_API_MODEL");
const octokit = new Octokit({ auth: GITHUB_TOKEN }); const octokit = new Octokit({ auth: GITHUB_TOKEN });
@ -58,7 +61,8 @@ async function getDiff(
async function analyzeCode( async function analyzeCode(
parsedDiff: File[], parsedDiff: File[],
prDetails: PRDetails prDetails: PRDetails,
verbose = false
): Promise<Array<{ body: string; path: string; line: number }>> { ): Promise<Array<{ body: string; path: string; line: number }>> {
const comments: Array<{ body: string; path: string; line: number }> = []; const comments: Array<{ body: string; path: string; line: number }> = [];
@ -66,7 +70,13 @@ async function analyzeCode(
if (file.to === "/dev/null") continue; // Ignore deleted files if (file.to === "/dev/null") continue; // Ignore deleted files
for (const chunk of file.chunks) { for (const chunk of file.chunks) {
const prompt = createPrompt(file, chunk, prDetails); const prompt = createPrompt(file, chunk, prDetails);
const aiResponse = await getAIResponse(prompt);
if (verbose) console.log("Prompt:", prompt);
const aiResponse = await getAIResponse(prompt, verbose);
if (verbose) console.log("AI Response:", aiResponse);
if (aiResponse) { if (aiResponse) {
const newComments = createComment(file, chunk, aiResponse); const newComments = createComment(file, chunk, aiResponse);
if (newComments) { if (newComments) {
@ -110,13 +120,15 @@ ${chunk.changes
`; `;
} }
async function getAIResponse(prompt: string): Promise<Array<{ async function getAIResponse(
prompt: string,
verbose = false
): Promise<Array<{
lineNumber: string; lineNumber: string;
reviewComment: string; reviewComment: string;
}> | null> { }> | null> {
const queryConfig = { const queryConfig = {
model: OPENAI_API_MODEL, model: OPENAI_API_MODEL,
max_completion_tokens: 700,
}; };
try { try {
@ -130,6 +142,8 @@ async function getAIResponse(prompt: string): Promise<Array<{
], ],
}); });
if (verbose) console.log("Response:", response);
const res = response.choices[0].message?.content?.trim() || "{}"; const res = response.choices[0].message?.content?.trim() || "{}";
// Remove ```json from the response if it exists // Remove ```json from the response if it exists
if (res.startsWith("```json")) { if (res.startsWith("```json")) {
@ -168,29 +182,44 @@ async function createReviewComment(
pull_number: number, pull_number: number,
comments: Array<{ body: string; path: string; line: number }> comments: Array<{ body: string; path: string; line: number }>
): Promise<void> { ): Promise<void> {
await octokit.pulls.createReview({ try {
owner, await octokit.pulls.createReview({
repo, owner,
pull_number, repo,
comments, pull_number,
event: "COMMENT", comments,
}); event: "COMMENT",
});
} catch (error) {
console.error(error);
throw error;
}
} }
async function main() { async function main(verbose = false) {
const prDetails = await getPRDetails(); const prDetails = await getPRDetails();
if (verbose) console.log("PR Details:", prDetails);
let diff: string | null; let diff: string | null;
const eventData = JSON.parse( const eventData = JSON.parse(
readFileSync(process.env.GITHUB_EVENT_PATH ?? "", "utf8") readFileSync(process.env.GITHUB_EVENT_PATH ?? "", "utf8")
); );
if (verbose) console.log("Event Data:", eventData);
if (eventData.action === "opened") { if (eventData.action === "opened") {
if (verbose) console.log('Event action is "opened"');
diff = await getDiff( diff = await getDiff(
prDetails.owner, prDetails.owner,
prDetails.repo, prDetails.repo,
prDetails.pull_number prDetails.pull_number
); );
} else if (eventData.action === "synchronize") { } else if (eventData.action === "synchronize") {
if (verbose) console.log('Event action is "synchronize"');
const newBaseSha = eventData.before; const newBaseSha = eventData.before;
const newHeadSha = eventData.after; const newHeadSha = eventData.after;
@ -204,12 +233,16 @@ async function main() {
head: newHeadSha, head: newHeadSha,
}); });
if (verbose) console.log("Response:", response);
diff = String(response.data); diff = String(response.data);
} else { } else {
console.log("Unsupported event:", process.env.GITHUB_EVENT_NAME); console.log("Unsupported event:", process.env.GITHUB_EVENT_NAME);
return; return;
} }
if (verbose) console.log("Diff:", diff);
if (!diff) { if (!diff) {
console.log("No diff found"); console.log("No diff found");
return; return;
@ -217,18 +250,26 @@ async function main() {
const parsedDiff = parseDiff(diff); const parsedDiff = parseDiff(diff);
const excludePatterns = core if (verbose) console.log("Parsed Diff:", parsedDiff);
.getInput("exclude")
const excludePatterns = (process.env.EXCLUDE ?? core.getInput("exclude"))
.split(",") .split(",")
.map((s) => s.trim()); .map((s) => s.trim());
if (verbose) console.log("Exclude Patterns:", excludePatterns);
const filteredDiff = parsedDiff.filter((file) => { const filteredDiff = parsedDiff.filter((file) => {
return !excludePatterns.some((pattern) => return !excludePatterns.some((pattern) =>
minimatch(file.to ?? "", pattern) minimatch(file.to ?? "", pattern)
); );
}); });
const comments = await analyzeCode(filteredDiff, prDetails); if (verbose) console.log("Filtered Diff:", filteredDiff);
const comments = await analyzeCode(filteredDiff, prDetails, verbose);
if (verbose) console.log("Comments:", comments);
if (comments.length > 0) { if (comments.length > 0) {
await createReviewComment( await createReviewComment(
prDetails.owner, prDetails.owner,
@ -239,7 +280,9 @@ async function main() {
} }
} }
main().catch((error) => { main((process.env.VERBOSE ?? core.getInput("VERBOSE")) === "true").catch(
console.error("Error:", error); (error) => {
process.exit(1); console.error("Error:", error);
}); process.exit(1);
}
);

44
tests/main.test.ts Normal file
View file

@ -0,0 +1,44 @@
// test/index.test.ts
import { test, expect, beforeAll } from "bun:test";
import { writeFileSync, mkdtempSync } from "fs";
import { tmpdir } from "os";
import { join } from "path";
// Set up environment variables
beforeAll(() => {
// Create a temporary directory
const tempDir = mkdtempSync(join(tmpdir(), "github-action-"));
// Create a temporary event file
const event = {
action: "opened",
number: 6, // Replace with the PR number you want to test
repository: {
owner: { login: "researchwiseai" },
name: "langgraphjs-checkpoint-dynamodb",
},
};
const eventPath = join(tempDir, "event.json");
writeFileSync(eventPath, JSON.stringify(event, null, 2));
process.env.GITHUB_EVENT_PATH = eventPath;
process.env.GITHUB_EVENT_NAME = "pull_request";
});
test(
"GitHub Action runs without errors",
async () => {
// Import your main script
// You may need to adjust the import path
await import("../src/main");
// You can add assertions here if your script exports functions
// For now, we just ensure it runs without throwing
expect(true).toBe(true);
},
{
timeout: 60_000,
}
);