Merge pull request #13 from lfsevergnini/chore/add-model-version-parameter

add customizable OpenAI API model parameter
This commit is contained in:
Ville Saukkonen 2023-07-31 18:21:21 +03:00 committed by GitHub
commit d3abad1bc9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 1560 additions and 1473 deletions

@ -42,6 +42,7 @@ jobs:
with: with:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
OPENAI_API_MODEL: "gpt-4" # Optional: defaults to "gpt-4"
exclude: "**/*.json, **/*.md" # Optional: exclude patterns separated by commas exclude: "**/*.json, **/*.md" # Optional: exclude patterns separated by commas
``` ```

@ -7,6 +7,10 @@ inputs:
OPENAI_API_KEY: OPENAI_API_KEY:
description: "OpenAI API key for GPT." description: "OpenAI API key for GPT."
required: true required: true
OPENAI_API_MODEL:
description: "OpenAI API model."
required: false
default: "gpt-4"
exclude: exclude:
description: "Glob patterns to exclude files from the diff analysis" description: "Glob patterns to exclude files from the diff analysis"
required: false required: false

3023
dist/index.js vendored

File diff suppressed because it is too large Load Diff

2
dist/index.js.map vendored

File diff suppressed because one or more lines are too long

@ -7,6 +7,7 @@ import minimatch from "minimatch";
const GITHUB_TOKEN: string = core.getInput("GITHUB_TOKEN"); const GITHUB_TOKEN: string = core.getInput("GITHUB_TOKEN");
const OPENAI_API_KEY: string = core.getInput("OPENAI_API_KEY"); const OPENAI_API_KEY: string = core.getInput("OPENAI_API_KEY");
const OPENAI_API_MODEL: string = core.getInput("OPENAI_API_MODEL");
const octokit = new Octokit({ auth: GITHUB_TOKEN }); const octokit = new Octokit({ auth: GITHUB_TOKEN });
@ -132,7 +133,7 @@ async function getAIResponse(prompt: string): Promise<Array<{
reviewComment: string; reviewComment: string;
}> | null> { }> | null> {
const queryConfig = { const queryConfig = {
model: "gpt-4", model: OPENAI_API_MODEL,
temperature: 0.2, temperature: 0.2,
max_tokens: 700, max_tokens: 700,
top_p: 1, top_p: 1,