- [Bugfix] - [JSON Format not supported for some of OpenAI models]

- [Improvement] - [Added max token for in case of bigger file requirement]

Find more here for supported JSON versions :
https://platform.openai.com/docs/guides/structured-outputs?api-mode=chat
This commit is contained in:
karapinarokk 2025-03-15 19:57:53 +04:00
parent a9a064dfa1
commit c8d1cc7b9e
2 changed files with 23 additions and 4 deletions

View file

@ -11,6 +11,10 @@ inputs:
description: "OpenAI API model."
required: false
default: "gpt-4"
MAX_TOKEN:
description: "Maximum number of tokens that can be generated per analysis."
required: false
default: "700"
exclude:
description: "Glob patterns to exclude files from the diff analysis"
required: false

View file

@ -8,6 +8,18 @@ import minimatch from "minimatch";
const GITHUB_TOKEN: string = core.getInput("GITHUB_TOKEN");
const OPENAI_API_KEY: string = core.getInput("OPENAI_API_KEY");
const OPENAI_API_MODEL: string = core.getInput("OPENAI_API_MODEL");
const MAX_TOKEN: number = Number(core.getInput("MAX_TOKEN"));
const SUPPORTED_JSON_FORMAT_MODELS = [
"gpt-4o",
"gpt-4-turbo-preview",
"gpt-4-turbo",
"gpt-3.5-turbo",
"gpt-4-0125-preview",
"gpt-4-1106-preview",
"gpt-3.5-turbo-0125",
"gpt-3.5-turbo-1106",
];
const octokit = new Octokit({ auth: GITHUB_TOKEN });
@ -117,7 +129,7 @@ async function getAIResponse(prompt: string): Promise<Array<{
const queryConfig = {
model: OPENAI_API_MODEL,
temperature: 0.2,
max_tokens: 700,
max_tokens: MAX_TOKEN,
top_p: 1,
frequency_penalty: 0,
presence_penalty: 0,
@ -127,7 +139,7 @@ async function getAIResponse(prompt: string): Promise<Array<{
const response = await openai.chat.completions.create({
...queryConfig,
// return JSON if the model supports it:
...(OPENAI_API_MODEL === "gpt-4-1106-preview"
...(SUPPORTED_JSON_FORMAT_MODELS.includes(OPENAI_API_MODEL)
? { response_format: { type: "json_object" } }
: {}),
messages: [
@ -139,6 +151,9 @@ async function getAIResponse(prompt: string): Promise<Array<{
});
const res = response.choices[0].message?.content?.trim() || "{}";
console.log(`Trimmed Response: ${res}`);
return JSON.parse(res).reviews;
} catch (error) {
console.error("Error:", error);