mirror of
https://codeberg.org/forgejo/forgejo.git
synced 2025-09-18 00:35:56 +00:00
fix: prevent initial 'blank' display of action logs view, remove unnecessary API calls (#9099)
When you access Actions logs, there is a momentary render of a partially blank screen such as this:  This is caused by the `RepoActionView` Vue component having no data. It then performs a fetch to two APIs in parallel (shown here as `/0` and `/artifacts`) to retrieve the steps and artifacts for the action run, and then it renders the page. And then, even if the job is complete and the user hasn't yet interacted to expand any logs, it performs the same API calls a second time.  This PR fixes both problems by: - Injecting the initial data for the Action's steps & Artifacts into the initial page render, and using that data into the `RepoActionView` to serve as its initial data load. - No longer unconditionally starting a second invocation of `loadJob` 1 second later, as the initial data can be used to determine whether the job is still running and requires an interval refresh. ## Checklist The [contributor guide](https://forgejo.org/docs/next/contributor/) contains information that will be helpful to first time contributors. There also are a few [conditions for merging Pull Requests in Forgejo repositories](https://codeberg.org/forgejo/governance/src/branch/main/PullRequestsAgreement.md). You are also welcome to join the [Forgejo development chatroom](https://matrix.to/#/#forgejo-development:matrix.org). ### Tests **WIP** - I added test coverage for Go changes... - [ ] in their respective `*_test.go` for unit tests. - [x] in the `tests/integration` directory if it involves interactions with a live Forgejo server. - I added test coverage for JavaScript changes... - [x] in `web_src/js/*.test.js` if it can be unit tested. - [ ] in `tests/e2e/*.test.e2e.js` if it requires interactions with a live Forgejo server (see also the [developer guide for JavaScript testing](https://codeberg.org/forgejo/forgejo/src/branch/forgejo/tests/e2e/README.md#end-to-end-tests)). ### Documentation - [ ] I created a pull request [to the documentation](https://codeberg.org/forgejo/docs) to explain to Forgejo users how to use this change. - [x] I did not document these changes and I do not expect someone else to do it. ### Release notes - [ ] I do not want this change to show in the release notes. - [x] I want the title to show in the release notes with a link to this pull request. - [ ] I want the content of the `release-notes/<pull request number>.md` to be be used for the release notes instead of the title. <!--start release-notes-assistant--> ## Release notes <!--URL:https://codeberg.org/forgejo/forgejo--> - User Interface bug fixes - [PR](https://codeberg.org/forgejo/forgejo/pulls/9099): <!--number 9099 --><!--line 0 --><!--description cHJldmVudCBpbml0aWFsICdibGFuaycgZGlzcGxheSBvZiBhY3Rpb24gbG9ncyB2aWV3LCByZW1vdmUgdW5uZWNlc3NhcnkgQVBJIGNhbGxz-->prevent initial 'blank' display of action logs view, remove unnecessary API calls<!--description--> <!--end release-notes-assistant--> Reviewed-on: https://codeberg.org/forgejo/forgejo/pulls/9099 Reviewed-by: Earl Warren <earl-warren@noreply.codeberg.org> Reviewed-by: Gusted <gusted@noreply.codeberg.org> Co-authored-by: Mathieu Fenniak <mathieu@fenniak.net> Co-committed-by: Mathieu Fenniak <mathieu@fenniak.net>
This commit is contained in:
parent
e0d6705b71
commit
a082555c04
6 changed files with 278 additions and 152 deletions
|
@ -26,6 +26,7 @@ import (
|
|||
"forgejo.org/modules/actions"
|
||||
"forgejo.org/modules/base"
|
||||
"forgejo.org/modules/git"
|
||||
"forgejo.org/modules/json"
|
||||
"forgejo.org/modules/log"
|
||||
"forgejo.org/modules/setting"
|
||||
"forgejo.org/modules/storage"
|
||||
|
@ -81,6 +82,28 @@ func View(ctx *context_module.Context) {
|
|||
ctx.Data["WorkflowName"] = workflowName
|
||||
ctx.Data["WorkflowURL"] = ctx.Repo.RepoLink + "/actions?workflow=" + workflowName
|
||||
|
||||
viewResponse := getViewResponse(ctx, &ViewRequest{}, runIndex, jobIndex, attemptNumber)
|
||||
if ctx.Written() {
|
||||
return
|
||||
}
|
||||
artifactsViewResponse := getArtifactsViewResponse(ctx, runIndex)
|
||||
if ctx.Written() {
|
||||
return
|
||||
}
|
||||
|
||||
var buf1, buf2 strings.Builder
|
||||
if err := json.NewEncoder(&buf1).Encode(viewResponse); err != nil {
|
||||
ctx.ServerError("EncodingError", err)
|
||||
return
|
||||
}
|
||||
ctx.Data["InitialData"] = buf1.String()
|
||||
|
||||
if err := json.NewEncoder(&buf2).Encode(artifactsViewResponse); err != nil {
|
||||
ctx.ServerError("EncodingError", err)
|
||||
return
|
||||
}
|
||||
ctx.Data["InitialArtifactsData"] = buf2.String()
|
||||
|
||||
ctx.HTML(http.StatusOK, tplViewActions)
|
||||
}
|
||||
|
||||
|
@ -230,14 +253,23 @@ func ViewPost(ctx *context_module.Context) {
|
|||
// which uses 1-based numbering... would be confusing as "Index" if it later can't be used to index an slice/array.
|
||||
attemptNumber := ctx.ParamsInt64("attempt")
|
||||
|
||||
current, jobs := getRunJobs(ctx, runIndex, jobIndex)
|
||||
resp := getViewResponse(ctx, req, runIndex, jobIndex, attemptNumber)
|
||||
if ctx.Written() {
|
||||
return
|
||||
}
|
||||
|
||||
ctx.JSON(http.StatusOK, resp)
|
||||
}
|
||||
|
||||
func getViewResponse(ctx *context_module.Context, req *ViewRequest, runIndex, jobIndex, attemptNumber int64) *ViewResponse {
|
||||
current, jobs := getRunJobs(ctx, runIndex, jobIndex)
|
||||
if ctx.Written() {
|
||||
return nil
|
||||
}
|
||||
run := current.Run
|
||||
if err := run.LoadAttributes(ctx); err != nil {
|
||||
ctx.Error(http.StatusInternalServerError, err.Error())
|
||||
return
|
||||
return nil
|
||||
}
|
||||
|
||||
resp := &ViewResponse{}
|
||||
|
@ -309,12 +341,12 @@ func ViewPost(ctx *context_module.Context) {
|
|||
task, err = actions_model.GetTaskByJobAttempt(ctx, current.ID, attemptNumber)
|
||||
if err != nil {
|
||||
ctx.Error(http.StatusInternalServerError, err.Error())
|
||||
return
|
||||
return nil
|
||||
}
|
||||
task.Job = current
|
||||
if err := task.LoadAttributes(ctx); err != nil {
|
||||
ctx.Error(http.StatusInternalServerError, err.Error())
|
||||
return
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -329,7 +361,7 @@ func ViewPost(ctx *context_module.Context) {
|
|||
taskAttempts, err := task.GetAllAttempts(ctx)
|
||||
if err != nil {
|
||||
ctx.Error(http.StatusInternalServerError, err.Error())
|
||||
return
|
||||
return nil
|
||||
}
|
||||
allAttempts := make([]*TaskAttempt, len(taskAttempts))
|
||||
for i, actionTask := range taskAttempts {
|
||||
|
@ -396,7 +428,7 @@ func ViewPost(ctx *context_module.Context) {
|
|||
logRows, err := actions.ReadLogs(ctx, task.LogInStorage, task.LogFilename, offset, length)
|
||||
if err != nil {
|
||||
ctx.Error(http.StatusInternalServerError, err.Error())
|
||||
return
|
||||
return nil
|
||||
}
|
||||
|
||||
for i, row := range logRows {
|
||||
|
@ -417,7 +449,7 @@ func ViewPost(ctx *context_module.Context) {
|
|||
}
|
||||
}
|
||||
|
||||
ctx.JSON(http.StatusOK, resp)
|
||||
return resp
|
||||
}
|
||||
|
||||
// Rerun will rerun jobs in the given run
|
||||
|
@ -685,19 +717,27 @@ type ArtifactsViewItem struct {
|
|||
|
||||
func ArtifactsView(ctx *context_module.Context) {
|
||||
runIndex := ctx.ParamsInt64("run")
|
||||
artifactsResponse := getArtifactsViewResponse(ctx, runIndex)
|
||||
if ctx.Written() {
|
||||
return
|
||||
}
|
||||
ctx.JSON(http.StatusOK, artifactsResponse)
|
||||
}
|
||||
|
||||
func getArtifactsViewResponse(ctx *context_module.Context, runIndex int64) *ArtifactsViewResponse {
|
||||
run, err := actions_model.GetRunByIndex(ctx, ctx.Repo.Repository.ID, runIndex)
|
||||
if err != nil {
|
||||
if errors.Is(err, util.ErrNotExist) {
|
||||
ctx.Error(http.StatusNotFound, err.Error())
|
||||
return
|
||||
return nil
|
||||
}
|
||||
ctx.Error(http.StatusInternalServerError, err.Error())
|
||||
return
|
||||
return nil
|
||||
}
|
||||
artifacts, err := actions_model.ListUploadedArtifactsMeta(ctx, run.ID)
|
||||
if err != nil {
|
||||
ctx.Error(http.StatusInternalServerError, err.Error())
|
||||
return
|
||||
return nil
|
||||
}
|
||||
artifactsResponse := ArtifactsViewResponse{
|
||||
Artifacts: make([]*ArtifactsViewItem, 0, len(artifacts)),
|
||||
|
@ -713,7 +753,7 @@ func ArtifactsView(ctx *context_module.Context) {
|
|||
Status: status,
|
||||
})
|
||||
}
|
||||
ctx.JSON(http.StatusOK, artifactsResponse)
|
||||
return &artifactsResponse
|
||||
}
|
||||
|
||||
func ArtifactsDeleteView(ctx *context_module.Context) {
|
||||
|
|
|
@ -10,6 +10,8 @@
|
|||
data-actions-url="{{.ActionsURL}}"
|
||||
data-workflow-name="{{.WorkflowName}}"
|
||||
data-workflow-url="{{.WorkflowURL}}"
|
||||
data-initial-post-response="{{.InitialData}}"
|
||||
data-initial-artifacts-response="{{.InitialArtifactsData}}"
|
||||
data-locale-approve="{{ctx.Locale.Tr "repo.diff.review.approve"}}"
|
||||
data-locale-cancel="{{ctx.Locale.Tr "cancel"}}"
|
||||
data-locale-rerun="{{ctx.Locale.Tr "rerun"}}"
|
||||
|
|
|
@ -8,6 +8,7 @@ import (
|
|||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
|
@ -128,7 +129,7 @@ func TestActionViewsArtifactDownload(t *testing.T) {
|
|||
func TestActionViewsView(t *testing.T) {
|
||||
defer tests.PrepareTestEnv(t)()
|
||||
|
||||
req := NewRequest(t, "GET", "/user2/repo1/actions/runs/187")
|
||||
req := NewRequest(t, "GET", "/user5/repo4/actions/runs/187")
|
||||
intermediateRedirect := MakeRequest(t, req, http.StatusTemporaryRedirect)
|
||||
|
||||
finalURL := intermediateRedirect.Result().Header.Get("Location")
|
||||
|
@ -141,4 +142,17 @@ func TestActionViewsView(t *testing.T) {
|
|||
htmlDoc.AssertAttrEqual(t, selector, "data-run-index", "187")
|
||||
htmlDoc.AssertAttrEqual(t, selector, "data-job-index", "0")
|
||||
htmlDoc.AssertAttrEqual(t, selector, "data-attempt-number", "1")
|
||||
htmlDoc.AssertAttrPredicate(t, selector, "data-initial-post-response", func(actual string) bool {
|
||||
// Remove dynamic "duration" fields for comparison.
|
||||
pattern := `"duration":"[^"]*"`
|
||||
re := regexp.MustCompile(pattern)
|
||||
actualClean := re.ReplaceAllString(actual, `"duration":"_duration_"`)
|
||||
// Remove "time_since_started_html" fields for comparison since they're TZ-sensitive in the test
|
||||
pattern = `"time_since_started_html":".*?\\u003c/relative-time\\u003e"`
|
||||
re = regexp.MustCompile(pattern)
|
||||
actualClean = re.ReplaceAllString(actualClean, `"time_since_started_html":"_time_"`)
|
||||
|
||||
return assert.JSONEq(t, "{\"state\":{\"run\":{\"link\":\"/user5/repo4/actions/runs/187\",\"title\":\"update actions\",\"titleHTML\":\"update actions\",\"status\":\"success\",\"canCancel\":false,\"canApprove\":false,\"canRerun\":false,\"canDeleteArtifact\":false,\"done\":true,\"jobs\":[{\"id\":192,\"name\":\"job_2\",\"status\":\"success\",\"canRerun\":false,\"duration\":\"_duration_\"}],\"commit\":{\"localeCommit\":\"Commit\",\"localePushedBy\":\"pushed by\",\"localeWorkflow\":\"Workflow\",\"shortSHA\":\"c2d72f5484\",\"link\":\"/user5/repo4/commit/c2d72f548424103f01ee1dc02889c1e2bff816b0\",\"pusher\":{\"displayName\":\"user1\",\"link\":\"/user1\"},\"branch\":{\"name\":\"master\",\"link\":\"/user5/repo4/src/branch/master\",\"isDeleted\":false}}},\"currentJob\":{\"title\":\"job_2\",\"detail\":\"Success\",\"steps\":[{\"summary\":\"Set up job\",\"duration\":\"_duration_\",\"status\":\"success\"},{\"summary\":\"Complete job\",\"duration\":\"_duration_\",\"status\":\"success\"}],\"allAttempts\":[{\"number\":3,\"time_since_started_html\":\"_time_\",\"status\":\"running\"},{\"number\":2,\"time_since_started_html\":\"_time_\",\"status\":\"success\"},{\"number\":1,\"time_since_started_html\":\"_time_\",\"status\":\"success\"}]}},\"logs\":{\"stepsLog\":[]}}\n", actualClean)
|
||||
})
|
||||
htmlDoc.AssertAttrEqual(t, selector, "data-initial-artifacts-response", "{\"artifacts\":[{\"name\":\"multi-file-download\",\"size\":2048,\"status\":\"completed\"}]}\n")
|
||||
}
|
||||
|
|
|
@ -26,7 +26,7 @@ func NewHTMLParser(t testing.TB, body *bytes.Buffer) *HTMLDoc {
|
|||
return &HTMLDoc{doc: doc}
|
||||
}
|
||||
|
||||
func (doc *HTMLDoc) AssertAttrEqual(t testing.TB, selector, attr, expected string) bool {
|
||||
func (doc *HTMLDoc) AssertAttrPredicate(t testing.TB, selector, attr string, predicate func(attrValue string) bool) bool {
|
||||
t.Helper()
|
||||
selection := doc.doc.Find(selector)
|
||||
require.NotEmpty(t, selection, selector)
|
||||
|
@ -34,7 +34,14 @@ func (doc *HTMLDoc) AssertAttrEqual(t testing.TB, selector, attr, expected strin
|
|||
actual, exists := selection.Attr(attr)
|
||||
require.True(t, exists, "%s not found in %s", attr, selection.Text())
|
||||
|
||||
return assert.Equal(t, expected, actual)
|
||||
return predicate(actual)
|
||||
}
|
||||
|
||||
func (doc *HTMLDoc) AssertAttrEqual(t testing.TB, selector, attr, expected string) bool {
|
||||
t.Helper()
|
||||
return doc.AssertAttrPredicate(t, selector, attr, func(actual string) bool {
|
||||
return assert.Equal(t, expected, actual)
|
||||
})
|
||||
}
|
||||
|
||||
// GetInputValueByID for get input value by id
|
||||
|
|
|
@ -2,6 +2,58 @@ import {mount, flushPromises} from '@vue/test-utils';
|
|||
import {toAbsoluteUrl} from '../utils.js';
|
||||
import RepoActionView from './RepoActionView.vue';
|
||||
|
||||
const testLocale = {
|
||||
approve: 'Locale Approve',
|
||||
cancel: 'Locale Cancel',
|
||||
rerun: 'Locale Re-run',
|
||||
artifactsTitle: 'artifactTitleHere',
|
||||
areYouSure: '',
|
||||
confirmDeleteArtifact: '',
|
||||
rerun_all: '',
|
||||
showTimeStamps: '',
|
||||
showLogSeconds: '',
|
||||
showFullScreen: '',
|
||||
downloadLogs: '',
|
||||
runAttemptLabel: 'Run attempt %[1]s %[2]s',
|
||||
viewingOutOfDateRun: 'oh no, out of date since %[1]s give or take or so',
|
||||
viewMostRecentRun: '',
|
||||
status: {
|
||||
unknown: '',
|
||||
waiting: '',
|
||||
running: '',
|
||||
success: '',
|
||||
failure: '',
|
||||
cancelled: '',
|
||||
skipped: '',
|
||||
blocked: '',
|
||||
},
|
||||
};
|
||||
const minimalInitialJobData = {
|
||||
state: {
|
||||
run: {
|
||||
status: 'success',
|
||||
commit: {
|
||||
pusher: {},
|
||||
},
|
||||
},
|
||||
currentJob: {
|
||||
steps: [
|
||||
{
|
||||
summary: 'Test Job',
|
||||
duration: '1s',
|
||||
status: 'success',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
logs: {
|
||||
stepsLog: [],
|
||||
},
|
||||
};
|
||||
const minimalInitialArtifactData = {
|
||||
artifacts: [],
|
||||
};
|
||||
|
||||
test('processes ##[group] and ##[endgroup]', async () => {
|
||||
Object.defineProperty(document.documentElement, 'lang', {value: 'en'});
|
||||
vi.spyOn(global, 'fetch').mockImplementation((url, opts) => {
|
||||
|
@ -56,32 +108,9 @@ test('processes ##[group] and ##[endgroup]', async () => {
|
|||
props: {
|
||||
jobIndex: '1',
|
||||
attemptNumber: '1',
|
||||
locale: {
|
||||
approve: '',
|
||||
cancel: '',
|
||||
rerun: '',
|
||||
artifactsTitle: '',
|
||||
areYouSure: '',
|
||||
confirmDeleteArtifact: '',
|
||||
rerun_all: '',
|
||||
showTimeStamps: '',
|
||||
showLogSeconds: '',
|
||||
showFullScreen: '',
|
||||
downloadLogs: '',
|
||||
runAttemptLabel: '',
|
||||
viewingOutOfDateRun: '',
|
||||
viewMostRecentRun: '',
|
||||
status: {
|
||||
unknown: '',
|
||||
waiting: '',
|
||||
running: '',
|
||||
success: '',
|
||||
failure: '',
|
||||
cancelled: '',
|
||||
skipped: '',
|
||||
blocked: '',
|
||||
},
|
||||
},
|
||||
initialJobData: minimalInitialJobData,
|
||||
initialArtifactData: minimalInitialArtifactData,
|
||||
locale: testLocale,
|
||||
},
|
||||
});
|
||||
await flushPromises();
|
||||
|
@ -181,37 +210,15 @@ test('load multiple steps on a finished action', async () => {
|
|||
const wrapper = mount(RepoActionView, {
|
||||
props: {
|
||||
actionsURL: 'https://example.com/example-org/example-repo/actions',
|
||||
initialJobData: minimalInitialJobData,
|
||||
initialArtifactData: minimalInitialArtifactData,
|
||||
runIndex: '1',
|
||||
jobIndex: '2',
|
||||
attemptNumber: '1',
|
||||
locale: {
|
||||
approve: '',
|
||||
cancel: '',
|
||||
rerun: '',
|
||||
artifactsTitle: '',
|
||||
areYouSure: '',
|
||||
confirmDeleteArtifact: '',
|
||||
rerun_all: '',
|
||||
showTimeStamps: '',
|
||||
showLogSeconds: '',
|
||||
showFullScreen: '',
|
||||
downloadLogs: '',
|
||||
runAttemptLabel: '',
|
||||
viewingOutOfDateRun: '',
|
||||
viewMostRecentRun: '',
|
||||
status: {
|
||||
unknown: '',
|
||||
waiting: '',
|
||||
running: '',
|
||||
success: '',
|
||||
failure: '',
|
||||
cancelled: '',
|
||||
skipped: '',
|
||||
blocked: '',
|
||||
},
|
||||
},
|
||||
locale: testLocale,
|
||||
},
|
||||
});
|
||||
wrapper.vm.loadJob(); // simulate intermittent reload immediately so UI switches from minimalInitialJobData to the mock data from the test's fetch spy.
|
||||
await flushPromises();
|
||||
// Click on both steps to start their log loading in fast succession...
|
||||
await wrapper.get('.job-step-section:nth-of-type(1) .job-step-summary').trigger('click');
|
||||
|
@ -229,6 +236,30 @@ test('load multiple steps on a finished action', async () => {
|
|||
|
||||
function configureForMultipleAttemptTests({viewHistorical}) {
|
||||
Object.defineProperty(document.documentElement, 'lang', {value: 'en'});
|
||||
const myJobState = {
|
||||
run: {
|
||||
canApprove: true,
|
||||
canCancel: true,
|
||||
canRerun: true,
|
||||
status: 'success',
|
||||
commit: {
|
||||
pusher: {},
|
||||
},
|
||||
},
|
||||
currentJob: {
|
||||
steps: [
|
||||
{
|
||||
summary: 'Test Job',
|
||||
duration: '1s',
|
||||
status: 'success',
|
||||
},
|
||||
],
|
||||
allAttempts: [
|
||||
{number: 2, time_since_started_html: 'yesterday', status: 'success'},
|
||||
{number: 1, time_since_started_html: 'two days ago', status: 'failure'},
|
||||
],
|
||||
},
|
||||
};
|
||||
vi.spyOn(global, 'fetch').mockImplementation((url, opts) => {
|
||||
const artifacts_value = {
|
||||
artifacts: [],
|
||||
|
@ -241,30 +272,7 @@ function configureForMultipleAttemptTests({viewHistorical}) {
|
|||
},
|
||||
];
|
||||
const jobs_value = {
|
||||
state: {
|
||||
run: {
|
||||
canApprove: true,
|
||||
canCancel: true,
|
||||
canRerun: true,
|
||||
status: 'success',
|
||||
commit: {
|
||||
pusher: {},
|
||||
},
|
||||
},
|
||||
currentJob: {
|
||||
steps: [
|
||||
{
|
||||
summary: 'Test Job',
|
||||
duration: '1s',
|
||||
status: 'success',
|
||||
},
|
||||
],
|
||||
allAttempts: [
|
||||
{number: 2, time_since_started_html: 'yesterday', status: 'success'},
|
||||
{number: 1, time_since_started_html: 'two days ago', status: 'failure'},
|
||||
],
|
||||
},
|
||||
},
|
||||
state: myJobState,
|
||||
logs: {
|
||||
stepsLog: opts.body?.includes('"cursor":null') ? stepsLog_value : [],
|
||||
},
|
||||
|
@ -284,32 +292,9 @@ function configureForMultipleAttemptTests({viewHistorical}) {
|
|||
jobIndex: '1',
|
||||
attemptNumber: viewHistorical ? '1' : '2',
|
||||
actionsURL: toAbsoluteUrl('/user1/repo2/actions'),
|
||||
locale: {
|
||||
approve: 'Locale Approve',
|
||||
cancel: 'Locale Cancel',
|
||||
rerun: 'Locale Re-run',
|
||||
artifactsTitle: '',
|
||||
areYouSure: '',
|
||||
confirmDeleteArtifact: '',
|
||||
rerun_all: '',
|
||||
showTimeStamps: '',
|
||||
showLogSeconds: '',
|
||||
showFullScreen: '',
|
||||
downloadLogs: '',
|
||||
runAttemptLabel: 'Run attempt %[1]s %[2]s',
|
||||
viewingOutOfDateRun: 'oh no, out of date since %[1]s give or take or so',
|
||||
viewMostRecentRun: '',
|
||||
status: {
|
||||
unknown: '',
|
||||
waiting: '',
|
||||
running: '',
|
||||
success: '',
|
||||
failure: '',
|
||||
cancelled: '',
|
||||
skipped: '',
|
||||
blocked: '',
|
||||
},
|
||||
},
|
||||
initialJobData: {...minimalInitialJobData, state: myJobState},
|
||||
initialArtifactData: minimalInitialArtifactData,
|
||||
locale: testLocale,
|
||||
},
|
||||
});
|
||||
return wrapper;
|
||||
|
@ -482,38 +467,95 @@ test('artifacts download links', async () => {
|
|||
const wrapper = mount(RepoActionView, {
|
||||
props: {
|
||||
actionsURL: 'https://example.com/example-org/example-repo/actions',
|
||||
initialJobData: minimalInitialJobData,
|
||||
initialArtifactData: minimalInitialArtifactData,
|
||||
runIndex: '10',
|
||||
runID: '1001',
|
||||
jobIndex: '2',
|
||||
attemptNumber: '1',
|
||||
locale: {
|
||||
approve: '',
|
||||
cancel: '',
|
||||
rerun: '',
|
||||
artifactsTitle: 'artifactTitleHere',
|
||||
areYouSure: '',
|
||||
confirmDeleteArtifact: '',
|
||||
rerun_all: '',
|
||||
showTimeStamps: '',
|
||||
showLogSeconds: '',
|
||||
showFullScreen: '',
|
||||
downloadLogs: '',
|
||||
status: {
|
||||
unknown: '',
|
||||
waiting: '',
|
||||
running: '',
|
||||
success: '',
|
||||
failure: '',
|
||||
cancelled: '',
|
||||
skipped: '',
|
||||
blocked: '',
|
||||
},
|
||||
},
|
||||
locale: testLocale,
|
||||
},
|
||||
});
|
||||
wrapper.vm.loadJob(); // simulate intermittent reload immediately so UI switches from minimalInitialJobData to the mock data from the test's fetch spy.
|
||||
await flushPromises();
|
||||
|
||||
expect(wrapper.get('.job-artifacts .job-artifacts-title').text()).toEqual('artifactTitleHere');
|
||||
expect(wrapper.get('.job-artifacts .job-artifacts-item:nth-of-type(1) .job-artifacts-link').attributes('href')).toEqual('https://example.com/example-org/example-repo/actions/runs/1001/artifacts/artifactname1');
|
||||
expect(wrapper.get('.job-artifacts .job-artifacts-item:nth-of-type(2) .job-artifacts-link').attributes('href')).toEqual('https://example.com/example-org/example-repo/actions/runs/1001/artifacts/artifactname2');
|
||||
});
|
||||
|
||||
test('initial load schedules refresh when job is not done', async () => {
|
||||
Object.defineProperty(document.documentElement, 'lang', {value: 'en'});
|
||||
vi.spyOn(global, 'fetch').mockImplementation((url, _opts) => {
|
||||
return Promise.resolve({
|
||||
ok: true,
|
||||
json: vi.fn().mockResolvedValue(
|
||||
url.endsWith('/artifacts') ? minimalInitialArtifactData : minimalInitialJobData,
|
||||
),
|
||||
});
|
||||
});
|
||||
|
||||
// Provide a job that is "done" so that the component doesn't start incremental refresh...
|
||||
{
|
||||
const doneInitialJobData = structuredClone(minimalInitialJobData);
|
||||
doneInitialJobData.state.run.done = true;
|
||||
const wrapper = mount(RepoActionView, {
|
||||
props: {
|
||||
jobIndex: '1',
|
||||
attemptNumber: '1',
|
||||
initialJobData: doneInitialJobData,
|
||||
initialArtifactData: minimalInitialArtifactData,
|
||||
locale: testLocale,
|
||||
},
|
||||
});
|
||||
await flushPromises();
|
||||
const container = wrapper.find('.action-view-container');
|
||||
expect(container.exists()).toBe(true);
|
||||
expect(container.classes()).not.toContain('interval-pending');
|
||||
wrapper.unmount();
|
||||
}
|
||||
|
||||
// Provide a job that is *not* "done" so that the component does start incremental refresh...
|
||||
{
|
||||
const runningInitialJobData = structuredClone(minimalInitialJobData);
|
||||
runningInitialJobData.state.run.done = false;
|
||||
const wrapper = mount(RepoActionView, {
|
||||
props: {
|
||||
jobIndex: '1',
|
||||
attemptNumber: '1',
|
||||
initialJobData: runningInitialJobData,
|
||||
initialArtifactData: minimalInitialArtifactData,
|
||||
locale: testLocale,
|
||||
},
|
||||
});
|
||||
await flushPromises();
|
||||
const container = wrapper.find('.action-view-container');
|
||||
expect(container.exists()).toBe(true);
|
||||
expect(container.classes()).toContain('interval-pending');
|
||||
wrapper.unmount();
|
||||
}
|
||||
});
|
||||
|
||||
test('initial load data is used without calling fetch()', async () => {
|
||||
Object.defineProperty(document.documentElement, 'lang', {value: 'en'});
|
||||
const fetchSpy = vi.spyOn(global, 'fetch').mockImplementation((url, _opts) => {
|
||||
return Promise.resolve({
|
||||
ok: true,
|
||||
json: vi.fn().mockResolvedValue(
|
||||
url.endsWith('/artifacts') ? minimalInitialArtifactData : minimalInitialJobData,
|
||||
),
|
||||
});
|
||||
});
|
||||
|
||||
mount(RepoActionView, {
|
||||
props: {
|
||||
jobIndex: '1',
|
||||
attemptNumber: '1',
|
||||
initialJobData: minimalInitialJobData,
|
||||
initialArtifactData: minimalInitialArtifactData,
|
||||
locale: testLocale,
|
||||
},
|
||||
});
|
||||
await flushPromises();
|
||||
expect(fetchSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
|
|
@ -14,6 +14,8 @@ const sfc = {
|
|||
ActionRunStatus,
|
||||
},
|
||||
props: {
|
||||
initialJobData: Object,
|
||||
initialArtifactData: Object,
|
||||
runIndex: String,
|
||||
runID: String,
|
||||
jobIndex: String,
|
||||
|
@ -96,10 +98,10 @@ const sfc = {
|
|||
},
|
||||
|
||||
async mounted() {
|
||||
// load job data and then auto-reload periodically
|
||||
// need to await first loadJob so this.currentJobStepsStates is initialized and can be used in hashChangeListener
|
||||
await this.loadJob();
|
||||
this.intervalID = setInterval(this.loadJob, 1000);
|
||||
// Need to await first loadJob so this.currentJobStepsStates is initialized and can be used in hashChangeListener,
|
||||
// but with the initializing data being passed in this should end up as a synchronous invocation. loadJob is
|
||||
// responsible for setting up its refresh interval during this first invocation.
|
||||
await this.loadJob({initialJobData: this.initialJobData, initialArtifactData: this.initialArtifactData});
|
||||
document.body.addEventListener('click', this.closeDropdown);
|
||||
this.hashChangeListener();
|
||||
window.addEventListener('hashchange', this.hashChangeListener);
|
||||
|
@ -312,7 +314,8 @@ const sfc = {
|
|||
return await resp.json();
|
||||
},
|
||||
|
||||
async loadJob() {
|
||||
async loadJob(initializationData) {
|
||||
const isInitializing = initializationData !== undefined;
|
||||
let myLoadingLogCursors = this.getLogCursors();
|
||||
if (this.loading) {
|
||||
// loadJob is already executing; but it's possible that our log cursor request has changed since it started. If
|
||||
|
@ -335,10 +338,18 @@ const sfc = {
|
|||
|
||||
while (true) {
|
||||
try {
|
||||
[job, artifacts] = await Promise.all([
|
||||
this.fetchJob(myLoadingLogCursors),
|
||||
this.fetchArtifacts(), // refresh artifacts if upload-artifact step done
|
||||
]);
|
||||
if (initializationData) {
|
||||
job = initializationData.initialJobData;
|
||||
artifacts = initializationData.initialArtifactData;
|
||||
// don't think it's possible that we loop retrying for 'needLoadingWithLogCursors' during initialization,
|
||||
// but just in case, we'll ensure initializationData can only be used once and go to the network on retry
|
||||
initializationData = undefined;
|
||||
} else {
|
||||
[job, artifacts] = await Promise.all([
|
||||
this.fetchJob(myLoadingLogCursors),
|
||||
this.fetchArtifacts(), // refresh artifacts if upload-artifact step done
|
||||
]);
|
||||
}
|
||||
} catch (err) {
|
||||
if (err instanceof TypeError) return; // avoid network error while unloading page
|
||||
throw err;
|
||||
|
@ -374,9 +385,14 @@ const sfc = {
|
|||
this.appendLogs(logs.step, logs.lines, logs.started);
|
||||
}
|
||||
|
||||
if (this.run.done && this.intervalID) {
|
||||
clearInterval(this.intervalID);
|
||||
this.intervalID = null;
|
||||
if (this.run.done) {
|
||||
if (this.intervalID) {
|
||||
clearInterval(this.intervalID);
|
||||
this.intervalID = null;
|
||||
}
|
||||
} else if (isInitializing) {
|
||||
// Begin refresh interval since we know this job isn't done.
|
||||
this.intervalID = setInterval(this.loadJob, 1000);
|
||||
}
|
||||
} finally {
|
||||
this.loading = false;
|
||||
|
@ -485,7 +501,12 @@ export function initRepositoryActionView() {
|
|||
const parentFullHeight = document.querySelector('body > div.full.height');
|
||||
if (parentFullHeight) parentFullHeight.style.paddingBottom = '0';
|
||||
|
||||
const initialJobData = JSON.parse(el.getAttribute('data-initial-post-response'));
|
||||
const initialArtifactData = JSON.parse(el.getAttribute('data-initial-artifacts-response'));
|
||||
|
||||
const view = createApp(sfc, {
|
||||
initialJobData,
|
||||
initialArtifactData,
|
||||
runIndex: el.getAttribute('data-run-index'),
|
||||
runID: el.getAttribute('data-run-id'),
|
||||
jobIndex: el.getAttribute('data-job-index'),
|
||||
|
@ -524,7 +545,7 @@ export function initRepositoryActionView() {
|
|||
}
|
||||
</script>
|
||||
<template>
|
||||
<div class="ui container fluid padded action-view-container">
|
||||
<div class="ui container fluid padded action-view-container" :class="{ 'interval-pending': intervalID }">
|
||||
<div class="action-view-header job-out-of-date-warning" v-if="!currentingViewingMostRecentAttempt">
|
||||
<div class="ui warning message">
|
||||
<!-- eslint-disable-next-line vue/no-v-html -->
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue