diff --git a/Makefile b/Makefile index e4ed9043c0fc..9482233d7773 100644 --- a/Makefile +++ b/Makefile @@ -169,7 +169,10 @@ hub: descriptions agent: descriptions # syz-agent uses codesearch clang tool which requires cgo. - CGO_ENABLED=1 GOOS=$(HOSTOS) GOARCH=$(HOSTARCH) $(HOSTGO) build $(GOHOSTFLAGS) -o ./bin/syz-agent github.com/google/syzkaller/syz-agent + CGO_ENABLED=1 GOOS=$(HOSTOS) GOARCH=$(HOSTARCH) $(HOSTGO) build $(GOHOSTFLAGS) -o ./bin/syz-agent github.com/google/syzkaller/syz-agent/agent + +lore-relay: descriptions + CGO_ENABLED=1 GOOS=$(HOSTOS) GOARCH=$(HOSTARCH) $(HOSTGO) build $(GOHOSTFLAGS) -o ./bin/syz-lore-relay github.com/google/syzkaller/syz-agent/lore-relay repro: descriptions GOOS=$(HOSTOS) GOARCH=$(HOSTARCH) $(HOSTGO) build $(GOHOSTFLAGS) -o ./bin/syz-repro github.com/google/syzkaller/tools/syz-repro diff --git a/dashboard/app/ai.go b/dashboard/app/ai.go index cc6096b77df9..915c073035a3 100644 --- a/dashboard/app/ai.go +++ b/dashboard/app/ai.go @@ -12,6 +12,7 @@ import ( "net/http" "slices" "sort" + "strconv" "strings" "time" @@ -20,6 +21,7 @@ import ( "github.com/google/syzkaller/dashboard/dashapi" "github.com/google/syzkaller/pkg/aflow/ai" "github.com/google/syzkaller/pkg/email" + "github.com/google/syzkaller/pkg/email/lore" "github.com/google/syzkaller/pkg/gerrit" "github.com/google/syzkaller/pkg/osutil" "github.com/google/syzkaller/pkg/report/crash" @@ -45,12 +47,23 @@ type uiAIJobPage struct { Trajectory []*uiAITrajectorySpan History []*uiJobReviewHistory TrajectoryJSON template.JS + CurrentStage string + NextStage string + Reportings []*uiJobReporting +} + +type uiJobReporting struct { + Reporting *aidb.JobReporting + Comments []*aidb.JobComment + Link string } type uiJobReviewHistory struct { Date time.Time User string Correct string + Source string + Stage string } type uiAIJob struct { @@ -143,6 +156,107 @@ func handleAIJobsPage(ctx context.Context, w http.ResponseWriter, r *http.Reques return serveTemplate(w, "ai_jobs.html", page) } +func getJobStageInfo(ctx context.Context, job *aidb.Job) (*aidb.JobReporting, *AIPatchStageConfig, error) { + reportings, err := aidb.LoadJobReportings(ctx, job.ID) + if err != nil { + return nil, nil, err + } + var latest *aidb.JobReporting + currentStage := "" + nsCfg := getNsConfig(ctx, job.Namespace) + + if len(reportings) > 0 && nsCfg.AI != nil && len(nsCfg.AI.Stages) > 0 { + stageMap := make(map[string]*aidb.JobReporting) + for _, r := range reportings { + stageMap[r.Stage] = r + } + for i := len(nsCfg.AI.Stages) - 1; i >= 0; i-- { + stageName := nsCfg.AI.Stages[i].Name + if r, ok := stageMap[stageName]; ok { + latest = r + currentStage = stageName + break + } + } + } + + // Fallback to timestamp if not found in stages or stages not configured. + if latest == nil && len(reportings) > 0 { + latest = reportings[0] + for _, r := range reportings { + if r.CreatedAt.After(latest.CreatedAt) { + latest = r + } + } + currentStage = latest.Stage + } + + var nextStageCfg *AIPatchStageConfig + if nsCfg.AI != nil && len(nsCfg.AI.Stages) > 0 { + nextStageCfg, _ = determineNextStage(ctx, nsCfg.AI, job, currentStage) + } + return latest, nextStageCfg, nil +} + +func handleAIJobPagePost(ctx context.Context, job *aidb.Job, r *http.Request, hdr *uiHeader) error { + correct := r.FormValue("correct") + if correct == "" { + return nil + } + if !hdr.AIActions { + return ErrAccess + } + if !job.Finished.Valid || job.Error != "" { + return fmt.Errorf("job is in wrong state to set correct status") + } + user := currentUser(ctx) + if user == nil { + return fmt.Errorf("user is not authenticated") + } + userEmail := user.Email + + switch correct { + case aiCorrectnessCorrect: + currentReporting, _, err := getJobStageInfo(ctx, job) + if err != nil { + return err + } + err = processUpstreamSubcommand(ctx, job, currentReporting, &dashapi.SendExternalCommandReq{ + Source: SourceWebUI, + Author: userEmail, + }) + if err != nil { + return err + } + job, err = aidb.LoadJob(ctx, job.ID) + if err != nil { + return err + } + if err := aiJobApplyLabels(ctx, job); err != nil { + return err + } + case aiCorrectnessIncorrect: + err := aidb.RejectReportCommand(ctx, aidb.RejectReportArgs{ + Job: job, + CommandSource: SourceWebUI, + CommandExtID: "", + User: userEmail, + Reason: "", + }) + if err != nil { + return err + } + job, err = aidb.LoadJob(ctx, job.ID) + if err != nil { + return err + } + if err := aiJobApplyLabels(ctx, job); err != nil { + return err + } + } + return nil +} + func handleAIJobPage(ctx context.Context, w http.ResponseWriter, r *http.Request) error { job, err := aidb.LoadJob(ctx, r.FormValue("id")) if err != nil { @@ -160,46 +274,34 @@ func handleAIJobPage(ctx context.Context, w http.ResponseWriter, r *http.Request if err != nil { return err } - if correct := r.FormValue("correct"); correct != "" { - if !hdr.AIActions { - return ErrAccess - } - if !job.Finished.Valid || job.Error != "" { - return fmt.Errorf("job is in wrong state to set correct status") - } - switch correct { - case aiCorrectnessCorrect: - job.Correct = spanner.NullBool{Bool: true, Valid: true} - case aiCorrectnessIncorrect: - job.Correct = spanner.NullBool{Bool: false, Valid: true} - default: - job.Correct = spanner.NullBool{} - } - userEmail := "" - if user := currentUser(ctx); user != nil { - userEmail = user.Email - } - if err := aidb.AddJournalEntry(ctx, &aidb.Journal{ - JobID: spanner.NullString{StringVal: job.ID, Valid: true}, - Date: timeNow(ctx), - User: userEmail, - Action: aidb.ActionJobReview, - Details: spanner.NullJSON{Value: aidb.JobReviewDetails{Correct: job.Correct.Bool}, Valid: true}, - }); err != nil { - return err - } - if err := aiJobUpdate(ctx, job); err != nil { - return err - } + + if err := handleAIJobPagePost(ctx, job, r, hdr); err != nil { + return err } + trajectory, err := aidb.LoadTrajectory(ctx, job.ID) if err != nil { return err } - history, err := aidb.LoadJobJournal(ctx, job.ID, aidb.ActionJobReview) + uiHistory, err := LoadUIJobReviewHistory(ctx, job.ID) + if err != nil { + return err + } + + currentReporting, nextStageCfg, err := getJobStageInfo(ctx, job) if err != nil { return err } + + currentStageStr := "" + if currentReporting != nil { + currentStageStr = currentReporting.Stage + } + nextStageStr := "" + if nextStageCfg != nil { + nextStageStr = nextStageCfg.Name + } + var args map[string]any if job.Args.Valid { args = job.Args.Value.(map[string]any) @@ -214,18 +316,92 @@ func handleAIJobPage(ctx context.Context, w http.ResponseWriter, r *http.Request } uiJob := makeUIAIJob(job) trajectoryJSON, _ := json.Marshal(makeUIAITrajectory(trajectory)) + uiReportings, err := loadJobReportingsWithComments(ctx, job.ID) + if err != nil { + return err + } + page := &uiAIJobPage{ Header: hdr, Job: uiJob, Jobs: []*uiAIJob{uiJob}, CrashReport: crashReport, Trajectory: makeUIAITrajectory(trajectory), - History: makeUIJobReviewHistory(history), + History: uiHistory, TrajectoryJSON: template.JS(trajectoryJSON), + CurrentStage: currentStageStr, + NextStage: nextStageStr, + Reportings: uiReportings, } return serveTemplate(w, "ai_job.html", page) } +func loadJobReportingsWithComments(ctx context.Context, jobID string) ([]*uiJobReporting, error) { + allReportings, err := aidb.LoadJobReportings(ctx, jobID) + if err != nil { + return nil, err + } + allComments, err := aidb.LoadJobComments(ctx, jobID) + if err != nil { + return nil, err + } + var uris []string + for _, c := range allComments { + uris = append(uris, c.BodyURI) + } + resolved, err := loadContent(ctx, uris) + if err != nil { + return nil, err + } + for _, c := range allComments { + if text, ok := resolved[c.BodyURI]; ok { + c.BodyURI = text + } + } + + var uiReportings []*uiJobReporting + for _, r := range allReportings { + var comments []*aidb.JobComment + for _, c := range allComments { + if c.ReportingID == r.ID { + comments = append(comments, c) + } + } + link := "" + if r.Source == dashapi.AIJobSourceLore && r.ExtID.Valid { + link = lore.LinkToMessage(r.ExtID.StringVal) + } + uiReportings = append(uiReportings, &uiJobReporting{ + Reporting: r, + Comments: comments, + Link: link, + }) + } + return uiReportings, nil +} + +func loadContent(ctx context.Context, uris []string) (map[string]string, error) { + res := make(map[string]string) + for _, uri := range uris { + if !strings.HasPrefix(uri, "text://") { + return nil, fmt.Errorf("unrecognized content prefix: %q", uri) + } + idStr := strings.TrimPrefix(uri, "text://") + id, err := strconv.ParseInt(idStr, 10, 64) + if err != nil { + return nil, fmt.Errorf("invalid content id %q: %w", idStr, err) + } + if id != 0 { + body, _, err := getText(ctx, textJobComment, id) + if err != nil { + return nil, fmt.Errorf("failed to fetch content for %v: %w", id, err) + } + res[uri] = string(body) + } + } + return res, nil +} + func filterJobsAccess(ctx context.Context, r *http.Request, jobs []*aidb.Job) ([]*aidb.Job, error) { if accessLevel(ctx, r) == AccessAdmin { return jobs, nil @@ -352,28 +528,56 @@ func makeUIAITrajectory(trajetory []*aidb.TrajectorySpan) []*uiAITrajectorySpan return res } -func makeUIJobReviewHistory(history []*aidb.Journal) []*uiJobReviewHistory { +func makeUIJobReviewHistory(history []*aidb.Journal, reportings []*aidb.JobReporting) []*uiJobReviewHistory { + stageMap := make(map[string]string) + for _, r := range reportings { + stageMap[r.ID] = r.Stage + } var res []*uiJobReviewHistory for _, h := range history { val := aiCorrectnessUnset - if h.Details.Valid { - if details, err := parseJSON[aidb.JobReviewDetails](h.Details); err == nil { - if details.Correct { - val = aiCorrectnessCorrect - } else { - val = aiCorrectnessIncorrect + switch h.Action { + case aidb.ActionApprove: + val = aiCorrectnessCorrect + case aidb.ActionReject: + val = aiCorrectnessIncorrect + case aidb.ActionJobReview: + // ActionJobReview is obsolete, we only keep it because there are entities in the DB. + if h.Details.Valid { + if details, err := parseJSON[aidb.JobReviewDetails](h.Details); err == nil { + if details.Correct { + val = aiCorrectnessCorrect + } else { + val = aiCorrectnessIncorrect + } } } + default: + val = "?" } res = append(res, &uiJobReviewHistory{ Date: h.Date, User: h.User, Correct: val, + Source: h.Source.StringVal, + Stage: stageMap[h.ReportingID.StringVal], }) } return res } +func LoadUIJobReviewHistory(ctx context.Context, jobID string) ([]*uiJobReviewHistory, error) { + history, err := aidb.LoadJobJournal(ctx, jobID) + if err != nil { + return nil, err + } + reportings, err := aidb.LoadJobReportings(ctx, jobID) + if err != nil { + return nil, err + } + return makeUIJobReviewHistory(history, reportings), nil +} + func apiAIJobPoll(ctx context.Context, req *dashapi.AIJobPollReq) (any, error) { if len(req.Workflows) == 0 || req.CodeRevision == "" || req.AgentName == "" { return nil, fmt.Errorf("invalid request") @@ -478,25 +682,52 @@ func apiAIJobDone(ctx context.Context, req *dashapi.AIJobDoneReq) (any, error) { if job.Finished.Valid { return nil, fmt.Errorf("the job %v is already finished", req.ID) } - job.Finished = spanner.NullTime{Time: timeNow(ctx), Valid: true} - job.Error = req.Error[:min(len(req.Error), 4<<10)] - if len(req.Results) != 0 { - job.Results = spanner.NullJSON{Value: req.Results, Valid: true} + finished := timeNow(ctx) + errStr := req.Error[:min(len(req.Error), 4<<10)] + job, err = aidb.SetJobDone(ctx, req.ID, finished, errStr, req.Results) + if err != nil { + return nil, err } - if err = aiJobUpdate(ctx, job); err != nil { + if err = aiJobApplyLabels(ctx, job); err != nil { return nil, err } - if job.Type == ai.WorkflowPatching && job.BugID.Valid && job.Finished.Valid && job.Error == "" { - nsCfg := getNsConfig(ctx, job.Namespace) - if nsCfg.AI != nil && nsCfg.AI.UploadPatchesToGerrit { - if err := createGerritChange(ctx, job); err != nil { - log.Errorf(ctx, "failed to create gerrit change for job %v: %v", job.ID, err) - } + if !shouldReportJob(job) { + return nil, nil + } + nsCfg := getNsConfig(ctx, job.Namespace) + if nsCfg.AI == nil { + return nil, nil + } + if nsCfg.AI.UploadPatchesToGerrit { + if err := createGerritChange(ctx, job); err != nil { + log.Errorf(ctx, "failed to create gerrit change for job %v: %v", job.ID, err) } } + stageCfg, err := determineNextStage(ctx, nsCfg.AI, job, "") + if err != nil { + log.Errorf(ctx, "failed to determine next stage for job %v: %v", job.ID, err) + return nil, nil + } + if stageCfg == nil { + return nil, nil + } + reporting := &aidb.JobReporting{ + Stage: stageCfg.Name, + Source: stageCfg.ServingIntegration, + } + if err := aidb.AddJobReportingTransactional(ctx, job, reporting, stageCfg.NoParallelReports); err != nil { + log.Errorf(ctx, "failed to add initial job reporting for job %v: %v", job.ID, err) + } return nil, nil } +func shouldReportJob(job *aidb.Job) bool { + if job.Type != ai.WorkflowPatching { + return false + } + return job.BugID.Valid && job.Finished.Valid && job.Error == "" +} + func aiCheckClientWorkflow(ctx context.Context, workflow string) error { suffix := apiContext(ctx).client.AIWorkflowSuffix if !strings.HasSuffix(workflow, suffix) { @@ -505,10 +736,7 @@ func aiCheckClientWorkflow(ctx context.Context, workflow string) error { return nil } -func aiJobUpdate(ctx context.Context, job *aidb.Job) error { - if err := aidb.UpdateJob(ctx, job); err != nil { - return err - } +func aiJobApplyLabels(ctx context.Context, job *aidb.Job) error { if !job.BugID.Valid || !job.Finished.Valid || job.Error != "" { return nil } diff --git a/dashboard/app/ai_report.go b/dashboard/app/ai_report.go new file mode 100644 index 000000000000..34fba9bf2ed1 --- /dev/null +++ b/dashboard/app/ai_report.go @@ -0,0 +1,297 @@ +// Copyright 2026 syzkaller project authors. All rights reserved. +// Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. + +package main + +import ( + "context" + "errors" + "fmt" + "strings" + + "cloud.google.com/go/spanner" + "github.com/google/syzkaller/dashboard/app/aidb" + "github.com/google/syzkaller/dashboard/dashapi" + "github.com/google/syzkaller/pkg/aflow/ai" + "google.golang.org/appengine/v2/log" +) + +const SourceWebUI = "web ui" + +func apiAIReportCommand(ctx context.Context, req *dashapi.SendExternalCommandReq) (any, error) { + var resp *dashapi.SendExternalCommandResp + var err error + if req.Upstream != nil { + resp, err = handleUpstreamCommand(ctx, req) + } else if req.Reject != nil { + resp, err = handleRejectCommand(ctx, req) + } else if req.Comment != nil { + resp, err = handleCommentCommand(ctx, req) + } else { + return nil, fmt.Errorf("unknown command") + } + + if err != nil { + if errors.Is(err, dashapi.ErrReportNotFound) { + return &dashapi.SendExternalCommandResp{Error: dashapi.ErrReportNotFound.Error()}, nil + } + return &dashapi.SendExternalCommandResp{Error: err.Error()}, nil + } + return resp, nil +} + +func handleUpstreamCommand(ctx context.Context, req *dashapi.SendExternalCommandReq, +) (*dashapi.SendExternalCommandResp, error) { + reporting, job, err := lookupJobByExtReq(ctx, req) + if err != nil { + return nil, err + } + + err = processUpstreamSubcommand(ctx, job, reporting, req) + if err != nil { + return nil, err + } + + return &dashapi.SendExternalCommandResp{}, nil +} + +func processUpstreamSubcommand(ctx context.Context, job *aidb.Job, + currentReporting *aidb.JobReporting, req *dashapi.SendExternalCommandReq) error { + nsCfg := getNsConfig(ctx, job.Namespace) + if nsCfg.AI == nil || len(nsCfg.AI.Stages) == 0 { + return aidb.UpstreamReportCommand(ctx, aidb.UpstreamReportArgs{ + Job: job, + CommandSource: req.Source, + CommandExtID: req.MessageExtID, + User: req.Author, + }) + } + + currentStage := "" + if currentReporting != nil { + currentStage = currentReporting.Stage + } + + nextStageCfg, err := determineNextStage(ctx, nsCfg.AI, job, currentStage) + if err != nil { + return err + } + nextStage := nextStageCfg.Name + + return aidb.UpstreamReportCommand(ctx, aidb.UpstreamReportArgs{ + Job: job, + Reporting: &aidb.JobReporting{ + Stage: nextStage, + Source: nextStageCfg.ServingIntegration, + UpstreamedAt: spanner.NullTime{Time: aidb.TimeNow(ctx), Valid: true}, + }, + NoParallel: nextStageCfg.NoParallelReports, + CommandSource: req.Source, + CommandExtID: req.MessageExtID, + User: req.Author, + Reason: "", + }) +} + +func determineNextStage(ctx context.Context, cfg *AIConfig, job *aidb.Job, + currentStage string) (*AIPatchStageConfig, error) { + reportings, err := aidb.LoadJobReportings(ctx, job.ID) + if err != nil { + return nil, fmt.Errorf("failed to load job reportings: %w", err) + } + reported := make(map[string]bool) + for _, r := range reportings { + reported[r.Stage] = true + } + currentIndex := -1 + if currentStage == "" { + currentIndex = -1 + } else { + currentIndex = cfg.StageIndexByName(currentStage) + if currentIndex == -1 { + return nil, fmt.Errorf("current stage %s not found in config", currentStage) + } + } + + // Check if any stage after currentStage has already been reported. + for i := currentIndex + 1; i < len(cfg.Stages); i++ { + if reported[cfg.Stages[i].Name] { + return nil, fmt.Errorf("cannot proceed to next stage, a later stage %s was already reported", cfg.Stages[i].Name) + } + } + + if currentIndex+1 >= len(cfg.Stages) { + return nil, fmt.Errorf("no valid next stage found, all stages reported") + } + + return &cfg.Stages[currentIndex+1], nil +} + +func handleRejectCommand(ctx context.Context, req *dashapi.SendExternalCommandReq, +) (*dashapi.SendExternalCommandResp, error) { + _, job, err := lookupJobByExtReq(ctx, req) + if err != nil { + return nil, err + } + + reason := "" + if req.Reject != nil { + reason = req.Reject.Reason + } + + err = aidb.RejectReportCommand(ctx, aidb.RejectReportArgs{ + Job: job, + CommandSource: req.Source, + CommandExtID: req.MessageExtID, + User: req.Author, + Reason: reason, + }) + if err != nil { + return nil, err + } + + return &dashapi.SendExternalCommandResp{}, nil +} + +func apiAIPollReport(ctx context.Context, req *dashapi.PollExternalReportReq) (any, error) { + reportings, err := aidb.LoadPendingJobReportingBySource(ctx, req.Source) + if err != nil { + return nil, fmt.Errorf("failed to load pending reportings: %w", err) + } + for _, r := range reportings { + job, err := aidb.LoadJob(ctx, r.JobID) + if err != nil { + return nil, fmt.Errorf("failed to load job %v: %w", r.JobID, err) + } + nsCfg := getNsConfig(ctx, job.Namespace) + if nsCfg.AI == nil { + log.Errorf(ctx, "ai is disabled for namespace %s, yet job %v has reportings", job.Namespace, job.ID) + continue + } + idx := nsCfg.AI.StageIndexByName(r.Stage) + if idx == -1 { + // TODO: this could only happen if the config changed between the reporting creation + // and now. In this case, we should probably just delete / reject this reporting. + log.Errorf(ctx, "ai job reporting stage %s not found in config (id %v)", r.Stage, r.ID) + continue + } + stageCfg := &nsCfg.AI.Stages[idx] + if job.Type != ai.WorkflowPatching { + log.Errorf(ctx, "unsupported job type for external reporting: %s (job %v)", job.Type, job.ID) + return nil, fmt.Errorf("unsupported job type: %s", job.Type) + } + patchResult, err := makeNewReportResult(job) + if err != nil { + return nil, err + } + isLastStage := false + if len(nsCfg.AI.Stages) > 0 { + isLastStage = nsCfg.AI.Stages[len(nsCfg.AI.Stages)-1].Name == r.Stage + } + to := []string{stageCfg.MailingList} + var cc []string + if isLastStage { + to = append(to, patchResult.To...) + cc = append(cc, patchResult.Cc...) + } + return &dashapi.PollExternalReportResp{ + Result: &dashapi.ReportPollResult{ + ID: r.ID, + Moderation: !isLastStage, + To: to, + Cc: cc, + Patch: patchResult, + }, + }, nil + } + return &dashapi.PollExternalReportResp{}, nil +} + +func makeNewReportResult(job *aidb.Job) (*dashapi.NewReportResult, error) { + res, err := castJobResults[ai.PatchingOutputs](job) + if err != nil { + return nil, fmt.Errorf("failed to cast job results: %w", err) + } + var subject string + if lines := strings.Split(res.PatchDescription, "\n"); len(lines) > 0 && lines[0] != "" { + subject = lines[0] + } else { + return nil, fmt.Errorf("failed to extract subject from patch description") + } + var to, cc []string + for _, rec := range res.Recipients { + if rec.To { + to = append(to, rec.Email) + } else { + cc = append(cc, rec.Email) + } + } + return &dashapi.NewReportResult{ + Subject: subject, + Body: res.PatchDescription, + GitDiff: res.PatchDiff, + BaseCommit: res.KernelCommit, + BaseTree: res.KernelRepo, + Version: 1, // TODO: track and increase it. + To: to, + Cc: cc, + }, nil +} + +func apiAIConfirmReport(ctx context.Context, req *dashapi.ConfirmPublishedReq) (any, error) { + if err := aidb.JobReportingPublished(ctx, req.ReportID, req.PublishedExtID); err != nil { + return nil, fmt.Errorf("failed to mark published: %w", err) + } + return nil, nil +} + +func lookupJobByExtReq(ctx context.Context, req *dashapi.SendExternalCommandReq) ( + *aidb.JobReporting, *aidb.Job, error) { + extID := req.RootExtID + if extID == "" { + return nil, nil, fmt.Errorf("RootExtID must be provided") + } + + reporting, err := aidb.LoadJobReportingByExtID(ctx, extID) + if err != nil { + return nil, nil, fmt.Errorf("failed to lookup job reporting: %w", err) + } + if reporting == nil { + return nil, nil, dashapi.ErrReportNotFound + } + + job, err := aidb.LoadJob(ctx, reporting.JobID) + if err != nil { + return nil, nil, fmt.Errorf("failed to load job: %w", err) + } + if job == nil { + return nil, nil, fmt.Errorf("job %v not found", reporting.ID) + } + return reporting, job, nil +} + +func handleCommentCommand(ctx context.Context, req *dashapi.SendExternalCommandReq, +) (*dashapi.SendExternalCommandResp, error) { + reporting, job, err := lookupJobByExtReq(ctx, req) + if err != nil { + return nil, err + } + + textID, err := putText(ctx, job.Namespace, textJobComment, []byte(req.Comment.Body)) + if err != nil { + return nil, fmt.Errorf("failed to store comment body: %w", err) + } + + err = aidb.SaveJobComment(ctx, &aidb.JobComment{ + ReportingID: reporting.ID, + ExtID: req.MessageExtID, + Author: req.Author, + BodyURI: fmt.Sprintf("text://%v", textID), + Date: aidb.TimeNow(ctx), + }) + if err != nil { + return nil, err + } + + return &dashapi.SendExternalCommandResp{}, nil +} diff --git a/dashboard/app/ai_report_lore_test.go b/dashboard/app/ai_report_lore_test.go new file mode 100644 index 000000000000..8d41c49e7f09 --- /dev/null +++ b/dashboard/app/ai_report_lore_test.go @@ -0,0 +1,363 @@ +// Copyright 2026 syzkaller project authors. All rights reserved. +// Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. + +package main + +import ( + "context" + "fmt" + "testing" + "time" + + "github.com/google/syzkaller/dashboard/dashapi" + "github.com/google/syzkaller/pkg/aflow/ai" + "github.com/google/syzkaller/pkg/debugtracer" + "github.com/google/syzkaller/pkg/email/lore" + "github.com/google/syzkaller/pkg/email/sender" + lorerelay "github.com/google/syzkaller/pkg/lore-relay" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestAILoreIntegration(t *testing.T) { + c := NewSpannerCtx(t) + defer c.Close() + + c.SetAIConfig(&AIConfig{ + Stages: []AIPatchStageConfig{ + {Name: "moderation", ServingIntegration: "lore", MailingList: "moderation@test.com"}, + {Name: "public", ServingIntegration: "lore", MailingList: "public@test.com"}, + }, + }) + + repoDir := t.TempDir() + loreArchive := lore.NewTestLoreArchive(t, repoDir) + + now := time.Now() + + pollerCfg := lore.PollerConfig{ + RepoDir: t.TempDir(), + URL: loreArchive.Repo.Dir, + Tracer: &debugtracer.TestTracer{T: t}, + } + + poller, err := lore.NewPoller(pollerCfg) + require.NoError(t, err) + + mockSnd := &integrationMockSender{} + + relay := lorerelay.NewRelay(&lorerelay.Config{ + DocsLink: "http://docs.link", + LoreArchive: "archive@lore.com", + }, c.globalClient, poller, mockSnd) + + // 1. Create a bug and AI job. + build := testBuild(1) + c.aiClient.UploadBuild(build) + crash := testCrashWithRepro(build, 1) + c.aiClient.ReportCrash(crash) + extID := c.aiClient.pollEmailExtID() + + _, err = c.agentClient.AIJobPoll(&dashapi.AIJobPollReq{ + AgentName: "test-agent", + CodeRevision: "test-rev", + Workflows: []dashapi.AIWorkflow{{Type: ai.WorkflowPatching, Name: "patching"}}, + }) + require.NoError(t, err) + jobID := c.createAIJob(extID, string(ai.WorkflowPatching), "") + + err = c.agentClient.AIJobDone(&dashapi.AIJobDoneReq{ + ID: jobID, + Results: map[string]any{ + "PatchDescription": "Test Description", + "PatchDiff": "diff", + "KernelRepo": "repo", + "KernelCommit": "commit", + "Recipients": []map[string]any{ + {"Name": "Maintainer", "Email": "maintainer@email.com", "To": true}, + {"Name": "Reviewer", "Email": "reviewer@email.com", "To": false}, + }, + }, + }) + require.NoError(t, err) + + // 2. Poll Dashboard - should report to moderation. + err = relay.PollDashboardOnce(context.Background()) + require.NoError(t, err) + + require.Len(t, mockSnd.sent, 1) + assert.Equal(t, []string{"moderation@test.com"}, mockSnd.sent[0].To) + assert.Equal(t, "[PATCH RFC] Test Description", mockSnd.sent[0].Subject) + assert.Equal(t, []string{"archive@lore.com"}, mockSnd.sent[0].Cc) + + body := string(mockSnd.sent[0].Body) + assert.Contains(t, body, "Final To: maintainer@email.com") + assert.Contains(t, body, "Final Cc: reviewer@email.com") + + // 3. Approval (#syz upstream). + loreArchive.SaveMessageAt(t, `From: user@email +Subject: Re: [PATCH RFC] Test Description +Message-ID: +In-Reply-To: + +#syz upstream +`, now.Add(time.Minute)) + + err = relay.PollLoreOnce(context.Background()) + require.NoError(t, err) + + // 4. Poll Dashboard Again - should report to public. + err = relay.PollDashboardOnce(context.Background()) + require.NoError(t, err) + + require.Len(t, mockSnd.sent, 2) // Moderation email + Public email. + assert.Equal(t, []string{"public@test.com", "maintainer@email.com"}, mockSnd.sent[1].To) + assert.Equal(t, "[PATCH] Test Description", mockSnd.sent[1].Subject) + assert.Equal(t, []string{"reviewer@email.com", "archive@lore.com"}, mockSnd.sent[1].Cc) + + bodyPublic := string(mockSnd.sent[1].Body) + assert.NotContains(t, bodyPublic, "Final To:") + + // 5. Duplicate Approval (#syz upstream) - should fail because already upstreamed. + loreArchive.SaveMessageAt(t, `From: user@email +Subject: Re: [PATCH RFC] Test Description +Message-ID: +In-Reply-To: + +#syz upstream +`, now.Add(time.Minute*2)) + + err = relay.PollLoreOnce(context.Background()) + require.NoError(t, err) + + require.Len(t, mockSnd.sent, 3) + assert.Equal(t, []string{"user@email"}, mockSnd.sent[2].To) + expectedBody := "> #syz upstream\n\nCommand failed:\n\nno valid next stage found, all stages reported\n\n" + assert.Equal(t, expectedBody, string(mockSnd.sent[2].Body)) + + // Poll Lore again - nothing new should be found. + err = relay.PollLoreOnce(context.Background()) + require.NoError(t, err) + + // Poll Dashboard again - still nothing. + err = relay.PollDashboardOnce(context.Background()) + require.NoError(t, err) + + require.Len(t, mockSnd.sent, 3) +} + +func TestAILoreIntegrationReject(t *testing.T) { + c := NewSpannerCtx(t) + defer c.Close() + + c.SetAIConfig(&AIConfig{ + Stages: []AIPatchStageConfig{ + {Name: "moderation", ServingIntegration: "lore", MailingList: "moderation@test.com"}, + {Name: "public", ServingIntegration: "lore", MailingList: "public@test.com"}, + }, + }) + + repoDir := t.TempDir() + loreArchive := lore.NewTestLoreArchive(t, repoDir) + + now := time.Now() + + pollerCfg := lore.PollerConfig{ + RepoDir: t.TempDir(), + URL: loreArchive.Repo.Dir, + Tracer: &debugtracer.TestTracer{T: t}, + } + + poller, err := lore.NewPoller(pollerCfg) + require.NoError(t, err) + + mockSnd := &integrationMockSender{} + + relay := lorerelay.NewRelay(&lorerelay.Config{ + DocsLink: "http://docs.link", + LoreArchive: "archive@lore.com", + }, c.globalClient, poller, mockSnd) + + // 1. Create a bug and AI job. + build := testBuild(1) + c.aiClient.UploadBuild(build) + crash := testCrashWithRepro(build, 1) + c.aiClient.ReportCrash(crash) + extID := c.aiClient.pollEmailExtID() + + _, err = c.agentClient.AIJobPoll(&dashapi.AIJobPollReq{ + AgentName: "test-agent", + CodeRevision: "test-rev", + Workflows: []dashapi.AIWorkflow{{Type: ai.WorkflowPatching, Name: "patching"}}, + }) + require.NoError(t, err) + jobID := c.createAIJob(extID, string(ai.WorkflowPatching), "") + + err = c.agentClient.AIJobDone(&dashapi.AIJobDoneReq{ + ID: jobID, + Results: map[string]any{ + "PatchDescription": "Test Description", + "PatchDiff": "diff", + "KernelRepo": "repo", + "KernelCommit": "commit", + }, + }) + require.NoError(t, err) + + // 2. Poll Dashboard - should report to moderation. + err = relay.PollDashboardOnce(context.Background()) + require.NoError(t, err) + + require.Len(t, mockSnd.sent, 1) + assert.Equal(t, []string{"moderation@test.com"}, mockSnd.sent[0].To) + assert.Equal(t, "[PATCH RFC] Test Description", mockSnd.sent[0].Subject) + assert.Equal(t, []string{"archive@lore.com"}, mockSnd.sent[0].Cc) + + // 3. Reject (#syz reject). + loreArchive.SaveMessageAt(t, `From: user@email +Subject: Re: [PATCH RFC] Test Description +Message-ID: +In-Reply-To: + +#syz reject +`, now.Add(time.Minute)) + + err = relay.PollLoreOnce(context.Background()) + require.NoError(t, err) + + // 4. Poll Dashboard Again - should NOT report anywhere! + err = relay.PollDashboardOnce(context.Background()) + require.NoError(t, err) + + require.Len(t, mockSnd.sent, 1) +} + +func TestAILoreUnknownMessageID(t *testing.T) { + c := NewSpannerCtx(t) + defer c.Close() + + loreArchive := lore.NewTestLoreArchive(t, t.TempDir()) + + cfg := &lorerelay.Config{ + DashboardPollInterval: time.Hour, + LorePollInterval: time.Hour, + } + + mockSnd := &integrationMockSender{} + lorePoller, err := lore.NewPoller(lore.PollerConfig{ + RepoDir: t.TempDir(), + URL: loreArchive.Repo.Dir, + Tracer: &debugtracer.TestTracer{T: t}, + OwnEmails: []string{"own@email.com"}, + }) + require.NoError(t, err) + relay := lorerelay.NewRelay(cfg, c.globalClient, lorePoller, mockSnd) + + now := time.Now() + loreArchive.SaveMessageAt(t, `From: user@email +Subject: Re: [PATCH RFC] Test Description +Message-ID: +In-Reply-To: + +#syz upstream +`, now) + + // We should stay silent. + err = relay.PollLoreOnce(context.Background()) + require.NoError(t, err) + require.Len(t, mockSnd.sent, 0) +} + +func TestAILoreIntegrationComment(t *testing.T) { + c := NewSpannerCtx(t) + defer c.Close() + + c.SetAIConfig(&AIConfig{ + Stages: []AIPatchStageConfig{ + {Name: "moderation", ServingIntegration: "lore", MailingList: "moderation@test.com"}, + }, + }) + + repoDir := t.TempDir() + loreArchive := lore.NewTestLoreArchive(t, repoDir) + + now := time.Now() + + pollerCfg := lore.PollerConfig{ + RepoDir: t.TempDir(), + URL: loreArchive.Repo.Dir, + Tracer: &debugtracer.TestTracer{T: t}, + } + + poller, err := lore.NewPoller(pollerCfg) + require.NoError(t, err) + + mockSnd := &integrationMockSender{} + + relay := lorerelay.NewRelay(&lorerelay.Config{ + DocsLink: "http://docs.link", + LoreArchive: "archive@lore.com", + }, c.globalClient, poller, mockSnd) + + // 1. Create a bug and AI job. + build := testBuild(1) + c.aiClient.UploadBuild(build) + crash := testCrashWithRepro(build, 1) + c.aiClient.ReportCrash(crash) + extID := c.aiClient.pollEmailExtID() + + _, err = c.agentClient.AIJobPoll(&dashapi.AIJobPollReq{ + AgentName: "test-agent", + CodeRevision: "test-rev", + Workflows: []dashapi.AIWorkflow{{Type: ai.WorkflowPatching, Name: "patching"}}, + }) + require.NoError(t, err) + jobID := c.createAIJob(extID, string(ai.WorkflowPatching), "") + + err = c.agentClient.AIJobDone(&dashapi.AIJobDoneReq{ + ID: jobID, + Results: map[string]any{ + "PatchDescription": "Test Description", + "PatchDiff": "diff", + "KernelRepo": "repo", + "KernelCommit": "commit", + }, + }) + require.NoError(t, err) + + // 2. Poll Dashboard - should report to moderation. + err = relay.PollDashboardOnce(context.Background()) + require.NoError(t, err) + require.Len(t, mockSnd.sent, 1) + + // 3. Send a plain comment. + loreArchive.SaveMessageAt(t, `From: reviewer@email +Subject: Re: [PATCH RFC] Test Description +Message-ID: +In-Reply-To: + +This is just a normal review comment with some context. +`, now.Add(time.Minute)) + + err = relay.PollLoreOnce(context.Background()) + require.NoError(t, err) + + // Verify that NO error reply was sent, meaning sent length is still exactly 1! + require.Len(t, mockSnd.sent, 1) + + reportings, err := loadJobReportingsWithComments(c.ctx, jobID) + require.NoError(t, err) + require.Len(t, reportings, 1) + require.Len(t, reportings[0].Comments, 1) + assert.Equal(t, "reviewer@email", reportings[0].Comments[0].Author) + assert.Contains(t, reportings[0].Comments[0].BodyURI, "This is just a normal review comment with some context.") +} + +type integrationMockSender struct { + sent []*sender.Email +} + +func (m *integrationMockSender) Send(ctx context.Context, email *sender.Email) (string, error) { + m.sent = append(m.sent, email) + return fmt.Sprintf("", len(m.sent)), nil +} diff --git a/dashboard/app/ai_report_test.go b/dashboard/app/ai_report_test.go new file mode 100644 index 000000000000..79c12d5ac8f6 --- /dev/null +++ b/dashboard/app/ai_report_test.go @@ -0,0 +1,420 @@ +// Copyright 2026 syzkaller project authors. All rights reserved. +// Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. + +package main + +import ( + "errors" + "testing" + "time" + + "github.com/google/syzkaller/dashboard/app/aidb" + "github.com/google/syzkaller/dashboard/dashapi" + "github.com/google/syzkaller/pkg/aflow/ai" + "github.com/stretchr/testify/require" +) + +func TestAIExternalReporting(t *testing.T) { + c := NewSpannerCtx(t) + defer c.Close() + + c.SetAIConfig(&AIConfig{ + Stages: []AIPatchStageConfig{ + {Name: "moderation", ServingIntegration: "lore", MailingList: "moderation@test.com"}, + {Name: "public", ServingIntegration: "lore", MailingList: "public@test.com"}, + }, + }) + + // Report a crash to create a bug. + build := testBuild(1) + c.aiClient.UploadBuild(build) + crash := testCrashWithRepro(build, 1) + c.aiClient.ReportCrash(crash) + extID := c.aiClient.pollEmailExtID() + + // Register workflow and create a job. + _, err := c.agentClient.AIJobPoll(&dashapi.AIJobPollReq{ + AgentName: "test-agent", + CodeRevision: "test-rev", + Workflows: []dashapi.AIWorkflow{{Type: ai.WorkflowPatching, Name: "patching"}}, + }) + require.NoError(t, err) + jobID := c.createAIJob(extID, string(ai.WorkflowPatching), "") + + // Mark job as done with results. + err = c.agentClient.AIJobDone(&dashapi.AIJobDoneReq{ + ID: jobID, + Results: map[string]any{ + "PatchDescription": "Test Description", + "PatchDiff": "diff", + "KernelRepo": "repo", + "KernelCommit": "commit", + }, + }) + require.NoError(t, err) + + // Poll for pending reports and confirm published. + pollResp, err := c.globalClient.AIPollReport(&dashapi.PollExternalReportReq{ + Source: "lore", + }) + require.NoError(t, err) + require.NotNil(t, pollResp.Result) + require.True(t, pollResp.Result.Moderation) + + err = c.globalClient.AIConfirmReport(&dashapi.ConfirmPublishedReq{ + ReportID: pollResp.Result.ID, + PublishedExtID: "moderation-msg-id", + }) + require.NoError(t, err) + + // Upstream the result. + resp, err := c.globalClient.AIReportCommand(&dashapi.SendExternalCommandReq{ + RootExtID: "moderation-msg-id", + Upstream: &dashapi.UpstreamCommand{}, + Author: "test-user", + Source: "lore", + }) + require.NoError(t, err) + require.Empty(t, resp.Error) + uiHistory, err := LoadUIJobReviewHistory(c.ctx, jobID) + require.NoError(t, err) + require.Equal(t, []*uiJobReviewHistory{ + { + Date: c.mockedTime, + User: "test-user", + Correct: aiCorrectnessCorrect, + Source: "lore", + Stage: "public", + }, + }, uiHistory) + + // Verify Job.Correct = true. + job, err := aidb.LoadJob(c.ctx, jobID) + require.NoError(t, err) + require.True(t, job.Correct.Valid) + require.True(t, job.Correct.Bool) + + t0 := c.mockedTime + c.advanceTime(time.Second) + + // "Report" to the public lists. + pollResp, err = c.globalClient.AIPollReport(&dashapi.PollExternalReportReq{ + Source: "lore", + }) + require.NoError(t, err) + require.NotNil(t, pollResp.Result) + require.False(t, pollResp.Result.Moderation) + require.Equal(t, &dashapi.NewReportResult{ + Subject: "Test Description", + Body: "Test Description", + Version: 1, + GitDiff: "diff", + BaseCommit: "commit", + BaseTree: "repo", + }, pollResp.Result.Patch) + require.Equal(t, []string{"public@test.com"}, pollResp.Result.To) + + err = c.globalClient.AIConfirmReport(&dashapi.ConfirmPublishedReq{ + ReportID: pollResp.Result.ID, + PublishedExtID: "msg-id-123", + }) + require.NoError(t, err) + + // Verify no more pending. + pollResp, err = c.globalClient.AIPollReport(&dashapi.PollExternalReportReq{ + Source: "lore", + }) + require.NoError(t, err) + require.Nil(t, pollResp.Result) + + // Reject the patch. + resp, err = c.globalClient.AIReportCommand(&dashapi.SendExternalCommandReq{ + RootExtID: "moderation-msg-id", + Reject: &dashapi.RejectCommand{Reason: "Bad patch"}, + Author: "test-user", + Source: "lore", + }) + require.NoError(t, err) + require.Empty(t, resp.Error) + uiHistory, err = LoadUIJobReviewHistory(c.ctx, jobID) + require.NoError(t, err) + require.Equal(t, []*uiJobReviewHistory{ + { + Date: c.mockedTime, + User: "test-user", + Correct: aiCorrectnessIncorrect, + Source: "lore", + Stage: "", // Rejections are not per-stage. + }, + { + Date: t0, + User: "test-user", + Correct: aiCorrectnessCorrect, + Source: "lore", + Stage: "public", + }, + }, uiHistory) + + // Verify Job.Correct = false. + job, err = aidb.LoadJob(c.ctx, jobID) + require.NoError(t, err) + require.True(t, job.Correct.Valid) + require.False(t, job.Correct.Bool) +} + +func TestAIReportNotFound(t *testing.T) { + c := NewSpannerCtx(t) + defer c.Close() + + req := &dashapi.SendExternalCommandReq{ + RootExtID: "non-existent-id", + Upstream: &dashapi.UpstreamCommand{}, + } + _, err := c.globalClient.AIReportCommand(req) + require.Error(t, err) + require.True(t, errors.Is(err, dashapi.ErrReportNotFound), "expected ErrReportNotFound, got %+v", err) +} + +func TestAINoFailedJobReported(t *testing.T) { + c := NewSpannerCtx(t) + defer c.Close() + + c.SetAIConfig(&AIConfig{ + Stages: []AIPatchStageConfig{ + {Name: "public", ServingIntegration: "lore", MailingList: "public@test.com"}, + }, + }) + + // Report a crash to create a bug. + build := testBuild(1) + c.aiClient.UploadBuild(build) + crash := testCrashWithRepro(build, 1) + c.aiClient.ReportCrash(crash) + extID := c.aiClient.pollEmailExtID() + + // Register workflow. + _, err := c.agentClient.AIJobPoll(&dashapi.AIJobPollReq{ + AgentName: "test-agent", + CodeRevision: "test-rev", + Workflows: []dashapi.AIWorkflow{{Type: ai.WorkflowPatching, Name: "patching"}}, + }) + require.NoError(t, err) + jobID := c.createAIJob(extID, string(ai.WorkflowPatching), "") + + // Mark job as failed. + err = c.agentClient.AIJobDone(&dashapi.AIJobDoneReq{ + ID: jobID, + Error: "Something went wrong", + }) + require.NoError(t, err) + + // Nothing is reported. + pollResp, err := c.globalClient.AIPollReport(&dashapi.PollExternalReportReq{ + Source: "lore", + }) + require.NoError(t, err) + require.Nil(t, pollResp.Result) +} + +func TestAINoParallelReports(t *testing.T) { + c := NewSpannerCtx(t) + defer c.Close() + + aiCfg := &AIConfig{ + Stages: []AIPatchStageConfig{ + {Name: "review", ServingIntegration: "lore", MailingList: "review@test.com", NoParallelReports: false}, + {Name: "lkml", ServingIntegration: "lore", MailingList: "lkml@test.com", NoParallelReports: true}, + }, + } + c.SetAIConfig(aiCfg) + + // Report a crash to create a bug. + build := testBuild(1) + build.Manager = "ains" + c.aiClient.UploadBuild(build) + crash := testCrashWithRepro(build, 1) + c.aiClient.ReportCrash(crash) + extID := c.aiClient.pollEmailExtID() + + // Register workflow. + _, err := c.agentClient.AIJobPoll(&dashapi.AIJobPollReq{ + AgentName: "test-agent", + CodeRevision: "test-rev", + Workflows: []dashapi.AIWorkflow{{Type: ai.WorkflowPatching, Name: "patching"}}, + }) + require.NoError(t, err) + + // Create two jobs for the same bug. + jobID1 := c.createAIJob(extID, string(ai.WorkflowPatching), "") + jobID2 := c.createAIJob(extID, string(ai.WorkflowPatching), "") + + err = c.agentClient.AIJobDone(&dashapi.AIJobDoneReq{ + ID: jobID1, + Results: map[string]any{ + "PatchDescription": "Job 1 Description", + "PatchDiff": "diff1", + "KernelRepo": "repo", + "KernelCommit": "commit1", + }, + }) + require.NoError(t, err) + + err = c.agentClient.AIJobDone(&dashapi.AIJobDoneReq{ + ID: jobID2, + Results: map[string]any{ + "PatchDescription": "Job 2 Description", + "PatchDiff": "diff2", + "KernelRepo": "repo", + "KernelCommit": "commit2", + }, + }) + require.NoError(t, err) + + // Poll for pending reports. + // Stage 0 ("review") allows parallel reports. + pollResp, err := c.globalClient.AIPollReport(&dashapi.PollExternalReportReq{ + Source: "lore", + }) + require.NoError(t, err) + require.NotNil(t, pollResp.Result) + id1 := pollResp.Result.ID + + // Confirm first report. + err = c.globalClient.AIConfirmReport(&dashapi.ConfirmPublishedReq{ + ReportID: id1, + PublishedExtID: "msg-id-1", + }) + require.NoError(t, err) + + // Poll again. Should return the second report. + pollResp, err = c.globalClient.AIPollReport(&dashapi.PollExternalReportReq{ + Source: "lore", + }) + require.NoError(t, err) + require.NotNil(t, pollResp.Result) + id2 := pollResp.Result.ID + require.NotEqual(t, id1, id2) + + // Confirm second report. + err = c.globalClient.AIConfirmReport(&dashapi.ConfirmPublishedReq{ + ReportID: id2, + PublishedExtID: "msg-id-2", + }) + require.NoError(t, err) + + // Upstream job 1's result. + resp, err := c.globalClient.AIReportCommand(&dashapi.SendExternalCommandReq{ + RootExtID: "msg-id-1", + Upstream: &dashapi.UpstreamCommand{}, + }) + require.NoError(t, err) + require.Empty(t, resp.Error) + + // Second upstream should fail because that stage is exclusive. + resp, err = c.globalClient.AIReportCommand(&dashapi.SendExternalCommandReq{ + RootExtID: "msg-id-2", + Upstream: &dashapi.UpstreamCommand{}, + }) + require.NoError(t, err) + require.NotEmpty(t, resp.Error) + require.Contains(t, resp.Error, "already upstreamed") + + // Invalidate the first job using reject command. + resp, err = c.globalClient.AIReportCommand(&dashapi.SendExternalCommandReq{ + RootExtID: "msg-id-1", + Reject: &dashapi.RejectCommand{}, + }) + require.NoError(t, err) + require.Empty(t, resp.Error) + + // Now we can push the second job to the second stage. + resp, err = c.globalClient.AIReportCommand(&dashapi.SendExternalCommandReq{ + RootExtID: "msg-id-2", + Upstream: &dashapi.UpstreamCommand{}, + }) + require.NoError(t, err) + require.Empty(t, resp.Error) +} + +func TestAIUpstreamTwice(t *testing.T) { + c := NewSpannerCtx(t) + defer c.Close() + + c.SetAIConfig(&AIConfig{ + Stages: []AIPatchStageConfig{ + {Name: "moderation", ServingIntegration: "lore", MailingList: "moderation@test.com"}, + {Name: "public", ServingIntegration: "lore", MailingList: "public@test.com"}, + }, + }) + + // Report a crash to create a bug. + build := testBuild(1) + c.aiClient.UploadBuild(build) + crash := testCrashWithRepro(build, 1) + c.aiClient.ReportCrash(crash) + extID := c.aiClient.pollEmailExtID() + + // Register workflow and create a job. + _, err := c.agentClient.AIJobPoll(&dashapi.AIJobPollReq{ + AgentName: "test-agent", + CodeRevision: "test-rev", + Workflows: []dashapi.AIWorkflow{{Type: ai.WorkflowPatching, Name: "patching"}}, + }) + require.NoError(t, err) + jobID := c.createAIJob(extID, string(ai.WorkflowPatching), "") + + // Mark job as done. + err = c.agentClient.AIJobDone(&dashapi.AIJobDoneReq{ + ID: jobID, + Results: map[string]any{ + "PatchDescription": "Test Description", + "PatchDiff": "diff", + }, + }) + require.NoError(t, err) + + // Poll and confirm report for "moderation" stage. + pollResp, err := c.globalClient.AIPollReport(&dashapi.PollExternalReportReq{ + Source: "lore", + }) + require.NoError(t, err) + require.NotNil(t, pollResp.Result) + require.Equal(t, "moderation@test.com", pollResp.Result.To[0]) + + err = c.globalClient.AIConfirmReport(&dashapi.ConfirmPublishedReq{ + ReportID: pollResp.Result.ID, + PublishedExtID: "msg-id-moderation", + }) + require.NoError(t, err) + + // Upstream the result (moves to "public"). + resp, err := c.globalClient.AIReportCommand(&dashapi.SendExternalCommandReq{ + RootExtID: "msg-id-moderation", + Upstream: &dashapi.UpstreamCommand{}, + }) + require.NoError(t, err) + require.Empty(t, resp.Error) + + // Poll and confirm report for "public" stage. + pollResp, err = c.globalClient.AIPollReport(&dashapi.PollExternalReportReq{ + Source: "lore", + }) + require.NoError(t, err) + require.NotNil(t, pollResp.Result) + require.Equal(t, "public@test.com", pollResp.Result.To[0]) + + err = c.globalClient.AIConfirmReport(&dashapi.ConfirmPublishedReq{ + ReportID: pollResp.Result.ID, + PublishedExtID: "msg-id-public", + }) + require.NoError(t, err) + + // Try to upstream again. Should fail at determineNextStage level. + resp, err = c.globalClient.AIReportCommand(&dashapi.SendExternalCommandReq{ + RootExtID: "msg-id-moderation", + Upstream: &dashapi.UpstreamCommand{}, + }) + require.NoError(t, err) + require.NotEmpty(t, resp.Error) + require.Contains(t, resp.Error, "a later stage public was already reported") +} diff --git a/dashboard/app/ai_test.go b/dashboard/app/ai_test.go index efefc24672cb..e8eaa1489062 100644 --- a/dashboard/app/ai_test.go +++ b/dashboard/app/ai_test.go @@ -397,9 +397,8 @@ func TestAIAssessmentKCSAN(t *testing.T) { require.NoError(t, err) // Verify history via UI helper to also test parsing logic. - history, err := aidb.LoadJobJournal(c.ctx, resp.ID, aidb.ActionJobReview) + uiHistory, err := LoadUIJobReviewHistory(c.ctx, resp.ID) require.NoError(t, err) - uiHistory := makeUIJobReviewHistory(history) require.Len(t, uiHistory, 1) require.Equal(t, uiHistory[0].Correct, aiCorrectnessCorrect) require.NotEmpty(t, uiHistory[0].User) @@ -415,9 +414,8 @@ func TestAIAssessmentKCSAN(t *testing.T) { _, err = c.GET(fmt.Sprintf("/ai_job?id=%v&correct=%v", resp.ID, aiCorrectnessIncorrect)) require.NoError(t, err) - history, err = aidb.LoadJobJournal(c.ctx, resp.ID, aidb.ActionJobReview) + uiHistory, err = LoadUIJobReviewHistory(c.ctx, resp.ID) require.NoError(t, err) - uiHistory = makeUIJobReviewHistory(history) require.Len(t, uiHistory, 2) require.Equal(t, uiHistory[0].Correct, aiCorrectnessIncorrect) require.Equal(t, uiHistory[1].Correct, aiCorrectnessCorrect) diff --git a/dashboard/app/aidb/crud.go b/dashboard/app/aidb/crud.go index 8e095e7be921..ab7a3d33ae7c 100644 --- a/dashboard/app/aidb/crud.go +++ b/dashboard/app/aidb/crud.go @@ -18,6 +18,7 @@ import ( "github.com/google/syzkaller/pkg/aflow/trajectory" "github.com/google/uuid" "google.golang.org/appengine/v2" + "google.golang.org/grpc/codes" ) const ( @@ -27,6 +28,14 @@ const ( var ErrNotFound = errors.New("entity not found") +type AlreadyUpstreamedError struct { + Stage string +} + +func (e *AlreadyUpstreamedError) Error() string { + return fmt.Sprintf("already upstreamed at stage %s", e.Stage) +} + func init() { // This forces unmarshalling of JSON integers into json.Number rather than float64. spanner.UseNumberWithJSONDecoderEncoder(true) @@ -112,12 +121,12 @@ func CreateJob(ctx context.Context, job *Job) (string, error) { return job.ID, err } -func UpdateJob(ctx context.Context, job *Job) error { +func saveEntity[T any](ctx context.Context, table string, obj *T) error { client, err := dbClient(ctx) if err != nil { return err } - mut, err := spanner.UpdateStruct("Jobs", job) + mut, err := spanner.InsertOrUpdateStruct(table, obj) if err != nil { return err } @@ -125,6 +134,10 @@ func UpdateJob(ctx context.Context, job *Job) error { return err } +func UpdateJob(ctx context.Context, job *Job) error { + return saveEntity(ctx, "Jobs", job) +} + func startJob(ctx context.Context, req *dashapi.AIJobPollReq, job *Job) (*spanner.Mutation, error) { job.Started = spanner.NullTime{Time: TimeNow(ctx), Valid: true} job.CodeRevision = req.CodeRevision @@ -402,6 +415,10 @@ func dbClient(ctx context.Context) (*spanner.Client, error) { return client, nil } +func GetClientForTest(ctx context.Context) (*spanner.Client, error) { + return dbClient(ctx) +} + func CloseClient(ctx context.Context) { appID := appengine.AppID(ctx) if v, ok := clients.LoadAndDelete(appID); ok { @@ -429,6 +446,269 @@ func selectJournal() string { return selectAllFrom[Journal]("Journal") } +func selectJobReporting() string { + return selectAllFrom[JobReporting]("JobReporting") +} + +func AddJobReporting(ctx context.Context, entry *JobReporting) error { + return saveEntity(ctx, "JobReporting", entry) +} + +func checkNoParallelConflict(ctx context.Context, txn *spanner.ReadWriteTransaction, job *Job, stage string) error { + iterConflict := txn.Query(ctx, spanner.Statement{ + SQL: `SELECT Jobs.ID + FROM Jobs + JOIN JobReporting ON Jobs.ID = JobReporting.JobID + WHERE Jobs.BugID = @bugID + AND JobReporting.Stage = @stage + AND (Jobs.Correct IS NULL OR Jobs.Correct = true) + AND Jobs.ID != @currentJobID + LIMIT 1`, + Params: map[string]any{ + "bugID": job.BugID.StringVal, + "stage": stage, + "currentJobID": job.ID, + }, + }) + defer iterConflict.Stop() + var conflicts []string + if err := spanner.SelectAll(iterConflict, &conflicts); err != nil { + return err + } + if len(conflicts) > 0 { + return &AlreadyUpstreamedError{Stage: stage} + } + return nil +} + +func AddJobReportingTransactional(ctx context.Context, job *Job, entry *JobReporting, noParallel bool) error { + client, err := dbClient(ctx) + if err != nil { + return err + } + entry.ID = uuid.NewString() + entry.JobID = job.ID + entry.CreatedAt = TimeNow(ctx) + + _, err = client.ReadWriteTransaction(ctx, func(ctx context.Context, txn *spanner.ReadWriteTransaction) error { + if noParallel && job.BugID.Valid { + if err := checkNoParallelConflict(ctx, txn, job, entry.Stage); err != nil { + return err + } + } + + mut, err := spanner.InsertStruct("JobReporting", entry) + if err != nil { + return err + } + return txn.BufferWrite([]*spanner.Mutation{mut}) + }) + if err != nil { + if spanner.ErrCode(err) == codes.AlreadyExists { + return &AlreadyUpstreamedError{Stage: entry.Stage} + } + return err + } + return nil +} + +func UpstreamReportCommand(ctx context.Context, args UpstreamReportArgs) error { + client, err := dbClient(ctx) + if err != nil { + return err + } + if args.Reporting != nil { + args.Reporting.ID = uuid.NewString() + args.Reporting.JobID = args.Job.ID + args.Reporting.CreatedAt = TimeNow(ctx) + } + + _, err = client.ReadWriteTransaction(ctx, func(ctx context.Context, txn *spanner.ReadWriteTransaction) error { + var job Job + stmt := spanner.Statement{ + SQL: selectJobs() + ` WHERE ID = @id`, + Params: map[string]any{"id": args.Job.ID}, + } + iter := txn.Query(ctx, stmt) + row, err := iter.Next() + if err != nil { + iter.Stop() + return err + } + if err := row.ToStruct(&job); err != nil { + iter.Stop() + return err + } + iter.Stop() + + if args.NoParallel && job.BugID.Valid && args.Reporting != nil { + if err := checkNoParallelConflict(ctx, txn, &job, args.Reporting.Stage); err != nil { + return err + } + } + + journal := &Journal{ + ID: uuid.NewString(), + JobID: toNullString(args.Job.ID), + Date: TimeNow(ctx), + User: args.User, + Action: ActionApprove, + Source: toNullString(args.CommandSource), + SourceExtID: toNullString(args.CommandExtID), + } + if args.Reporting != nil { + journal.ReportingID = toNullString(args.Reporting.ID) + } + if args.Reason != "" { + journal.Details = spanner.NullJSON{Value: map[string]string{"reason": args.Reason}, Valid: true} + } + journalMut, err := spanner.InsertStruct("Journal", journal) + if err != nil { + return err + } + jobMut := spanner.Update("Jobs", + []string{"ID", "Correct"}, + []any{args.Job.ID, spanner.NullBool{Bool: true, Valid: true}}) + var mutations []*spanner.Mutation + mutations = append(mutations, jobMut, journalMut) + + if args.Reporting != nil { + reportingMut, err := spanner.InsertStruct("JobReporting", args.Reporting) + if err != nil { + return err + } + mutations = append(mutations, reportingMut) + } + + return txn.BufferWrite(mutations) + }) + if err != nil { + if spanner.ErrCode(err) == codes.AlreadyExists { + return nil // Idempotent no-op. + } + return err + } + return nil +} + +func RejectReportCommand(ctx context.Context, args RejectReportArgs) error { + client, err := dbClient(ctx) + if err != nil { + return err + } + + _, err = client.ReadWriteTransaction(ctx, func(ctx context.Context, txn *spanner.ReadWriteTransaction) error { + journal := &Journal{ + ID: uuid.NewString(), + JobID: toNullString(args.Job.ID), + Date: TimeNow(ctx), + User: args.User, + Action: ActionReject, + Source: toNullString(args.CommandSource), + SourceExtID: toNullString(args.CommandExtID), + } + if args.Reason != "" { + journal.Details = spanner.NullJSON{Value: map[string]string{"reason": args.Reason}, Valid: true} + } + journalMut, err := spanner.InsertStruct("Journal", journal) + if err != nil { + return err + } + + jobMut := spanner.Update("Jobs", + []string{"ID", "Correct"}, + []any{args.Job.ID, spanner.NullBool{Bool: false, Valid: true}}) + return txn.BufferWrite([]*spanner.Mutation{jobMut, journalMut}) + }) + if err != nil { + if spanner.ErrCode(err) == codes.AlreadyExists { + return nil // Idempotent no-op. + } + return err + } + return nil +} + +func LoadPendingJobReporting(ctx context.Context) ([]*JobReporting, error) { + return selectAll[JobReporting](ctx, spanner.Statement{ + SQL: selectJobReporting() + `WHERE ReportedAt IS NULL`, + }) +} + +func LoadPendingJobReportingBySource(ctx context.Context, source string) ([]*JobReporting, error) { + return selectAll[JobReporting](ctx, spanner.Statement{ + SQL: selectJobReporting() + `WHERE Source = @source AND ReportedAt IS NULL`, + Params: map[string]any{"source": source}, + }) +} + +func LoadJobReportings(ctx context.Context, jobID string) ([]*JobReporting, error) { + return selectAll[JobReporting](ctx, spanner.Statement{ + SQL: selectJobReporting() + `WHERE JobID = @jobID`, + Params: map[string]any{"jobID": jobID}, + }) +} + +func JobReportingPublished(ctx context.Context, id, extID string) error { + client, err := dbClient(ctx) + if err != nil { + return err + } + _, err = client.ReadWriteTransaction(ctx, func(ctx context.Context, tx *spanner.ReadWriteTransaction) error { + iter := tx.Query(ctx, spanner.Statement{ + SQL: selectJobReporting() + ` WHERE ID = @id`, + Params: map[string]any{"id": id}, + }) + defer iter.Stop() + var reportings []*JobReporting + if err := spanner.SelectAll(iter, &reportings); err != nil { + return err + } + if len(reportings) == 0 { + return ErrNotFound + } + r := reportings[0] + r.ReportedAt = spanner.NullTime{Time: TimeNow(ctx), Valid: true} + r.ExtID = spanner.NullString{StringVal: extID, Valid: extID != ""} + + mut, err := spanner.InsertOrUpdateStruct("JobReporting", r) + if err != nil { + return err + } + return tx.BufferWrite([]*spanner.Mutation{mut}) + }) + return err +} + +type UpstreamReportArgs struct { + Job *Job + Reporting *JobReporting + NoParallel bool + CommandSource string + CommandExtID string + Reason string + User string +} + +type RejectReportArgs struct { + Job *Job + CommandSource string + CommandExtID string + User string + Reason string +} + +func LoadJobReportingByExtID(ctx context.Context, extID string) (*JobReporting, error) { + res, err := selectOne[JobReporting](ctx, spanner.Statement{ + SQL: selectJobReporting() + `WHERE ExtID = @extID LIMIT 2`, + Params: map[string]any{"extID": extID}, + }) + if errors.Is(err, ErrNotFound) { + return nil, nil + } + return res, err +} + func AddJournalEntry(ctx context.Context, entry *Journal) error { entry.ID = uuid.NewString() client, err := dbClient(ctx) @@ -443,16 +723,59 @@ func AddJournalEntry(ctx context.Context, entry *Journal) error { return err } -func LoadJobJournal(ctx context.Context, jobID, action string) ([]*Journal, error) { +func LoadJobJournal(ctx context.Context, jobID string) ([]*Journal, error) { return selectAll[Journal](ctx, spanner.Statement{ - SQL: selectJournal() + `WHERE JobID = @jobID AND Action = @action ORDER BY Date DESC`, + SQL: selectJournal() + `WHERE JobID = @jobID ORDER BY Date DESC`, Params: map[string]any{ - "jobID": jobID, - "action": action, + "jobID": jobID, }, }) } +func SetJobDone(ctx context.Context, jobID string, finished time.Time, + errStr string, results map[string]any) (*Job, error) { + client, err := dbClient(ctx) + if err != nil { + return nil, err + } + var job Job + _, err = client.ReadWriteTransaction(ctx, func(ctx context.Context, txn *spanner.ReadWriteTransaction) error { + stmt := spanner.Statement{ + SQL: selectJobs() + ` WHERE ID = @id`, + Params: map[string]any{"id": jobID}, + } + iter := txn.Query(ctx, stmt) + row, err := iter.Next() + if err != nil { + iter.Stop() + return err + } + if err := row.ToStruct(&job); err != nil { + iter.Stop() + return err + } + iter.Stop() + + if job.Finished.Valid { + return fmt.Errorf("job %s is already finished", jobID) + } + + job.Finished = spanner.NullTime{Time: finished, Valid: true} + job.Error = errStr + job.Results = toNullJSON(results) + + mut, err := spanner.UpdateStruct("Jobs", &job) + if err != nil { + return err + } + return txn.BufferWrite([]*spanner.Mutation{mut}) + }) + if err != nil { + return nil, err + } + return &job, nil +} + func selectAllFrom[T any](table string) string { var fields []string for _, field := range reflect.VisibleFields(reflect.TypeFor[T]()) { @@ -488,3 +811,27 @@ func toNullInt64(v int) spanner.NullInt64 { } return spanner.NullInt64{Int64: int64(v), Valid: true} } + +func RunInTransaction(ctx context.Context, f func(ctx context.Context, txn *spanner.ReadWriteTransaction) error) error { + client, err := dbClient(ctx) + if err != nil { + return err + } + _, err = client.ReadWriteTransaction(ctx, f) + return err +} + +func SaveJobComment(ctx context.Context, entry *JobComment) error { + entry.ID = uuid.NewString() + return saveEntity(ctx, "JobComments", entry) +} + +func LoadJobComments(ctx context.Context, jobID string) ([]*JobComment, error) { + return selectAll[JobComment](ctx, spanner.Statement{ + SQL: `SELECT JobComments.* FROM JobComments JOIN JobReporting ` + + `ON JobComments.ReportingID = JobReporting.ID ` + + `WHERE JobReporting.JobID = @jobID ` + + `ORDER BY JobComments.Date ASC`, + Params: map[string]any{"jobID": jobID}, + }) +} diff --git a/dashboard/app/aidb/entities.go b/dashboard/app/aidb/entities.go index 4c62f9a42956..8003649e37a5 100644 --- a/dashboard/app/aidb/entities.go +++ b/dashboard/app/aidb/entities.go @@ -11,7 +11,9 @@ import ( ) const ( - ActionJobReview = "JobReview" + ActionJobReview = "JobReview" // Outdated. Use ActionApprove/ActionReject. + ActionApprove = "Approve" + ActionReject = "Reject" ) const ( @@ -59,6 +61,8 @@ type Job struct { Results spanner.NullJSON Correct spanner.NullBool Aborted bool + ParentJobID spanner.NullString + Version spanner.NullInt64 } type TrajectorySpan struct { @@ -84,10 +88,33 @@ type TrajectorySpan struct { } type Journal struct { - ID string - JobID spanner.NullString - Date time.Time - User string - Action string - Details spanner.NullJSON + ID string + JobID spanner.NullString + Date time.Time + User string + Action string + Details spanner.NullJSON + SourceExtID spanner.NullString + Source spanner.NullString + ReportingID spanner.NullString +} + +type JobReporting struct { + ID string + JobID string + Stage string + Source string + ReportedAt spanner.NullTime + UpstreamedAt spanner.NullTime + ExtID spanner.NullString + CreatedAt time.Time +} + +type JobComment struct { + ID string + ReportingID string + ExtID string + Author string + BodyURI string + Date time.Time } diff --git a/dashboard/app/aidb/migrations/10_add_patch_integration.down.sql b/dashboard/app/aidb/migrations/10_add_patch_integration.down.sql new file mode 100644 index 000000000000..2c29b252a1b6 --- /dev/null +++ b/dashboard/app/aidb/migrations/10_add_patch_integration.down.sql @@ -0,0 +1,6 @@ +DROP INDEX JobReportingByExtID; +DROP INDEX JobReportingByJobStage; +DROP TABLE JobReporting; +ALTER TABLE Jobs DROP CONSTRAINT FK_Jobs_ParentJob; +ALTER TABLE Jobs DROP COLUMN Version; +ALTER TABLE Jobs DROP COLUMN ParentJobID; diff --git a/dashboard/app/aidb/migrations/10_add_patch_integration.up.sql b/dashboard/app/aidb/migrations/10_add_patch_integration.up.sql new file mode 100644 index 000000000000..6c2fa6715ea8 --- /dev/null +++ b/dashboard/app/aidb/migrations/10_add_patch_integration.up.sql @@ -0,0 +1,18 @@ +ALTER TABLE Jobs ADD COLUMN ParentJobID STRING(36); +ALTER TABLE Jobs ADD COLUMN Version INT64; +ALTER TABLE Jobs ADD CONSTRAINT FK_Jobs_ParentJob FOREIGN KEY (ParentJobID) REFERENCES Jobs (ID); + +CREATE TABLE JobReporting ( + ID STRING(36) NOT NULL, + JobID STRING(36) NOT NULL, + Stage STRING(255) NOT NULL, + Source STRING(255) NOT NULL, + ReportedAt TIMESTAMP, + UpstreamedAt TIMESTAMP, + ExtID STRING(255), + CreatedAt TIMESTAMP NOT NULL, + CONSTRAINT FK_JobReporting_Job FOREIGN KEY (JobID) REFERENCES Jobs (ID), +) PRIMARY KEY (ID); + +CREATE UNIQUE INDEX JobReportingByExtID ON JobReporting(ExtID) WHERE ExtID IS NOT NULL; +CREATE UNIQUE INDEX JobReportingByJobStage ON JobReporting(JobID, Stage); diff --git a/dashboard/app/aidb/migrations/11_extend_journal.down.sql b/dashboard/app/aidb/migrations/11_extend_journal.down.sql new file mode 100644 index 000000000000..a2941c7ab8f1 --- /dev/null +++ b/dashboard/app/aidb/migrations/11_extend_journal.down.sql @@ -0,0 +1,5 @@ +DROP INDEX idx_journal_msg_ext_id; +ALTER TABLE Journal DROP CONSTRAINT FK_Journal_Reporting; +ALTER TABLE Journal DROP COLUMN SourceExtID; +ALTER TABLE Journal DROP COLUMN Source; +ALTER TABLE Journal DROP COLUMN ReportingID; diff --git a/dashboard/app/aidb/migrations/11_extend_journal.up.sql b/dashboard/app/aidb/migrations/11_extend_journal.up.sql new file mode 100644 index 000000000000..b3aa21f35801 --- /dev/null +++ b/dashboard/app/aidb/migrations/11_extend_journal.up.sql @@ -0,0 +1,7 @@ +ALTER TABLE Journal ADD COLUMN SourceExtID STRING(1000); +ALTER TABLE Journal ADD COLUMN Source STRING(100); +ALTER TABLE Journal ADD COLUMN ReportingID STRING(36); + +CREATE UNIQUE INDEX idx_journal_msg_ext_id ON Journal(Source, SourceExtID) WHERE SourceExtID IS NOT NULL; + +ALTER TABLE Journal ADD CONSTRAINT FK_Journal_Reporting FOREIGN KEY (ReportingID) REFERENCES JobReporting (ID); diff --git a/dashboard/app/aidb/migrations/12_add_job_comments.down.sql b/dashboard/app/aidb/migrations/12_add_job_comments.down.sql new file mode 100644 index 000000000000..5f81e1d4c260 --- /dev/null +++ b/dashboard/app/aidb/migrations/12_add_job_comments.down.sql @@ -0,0 +1,3 @@ +DROP INDEX JobCommentsByReportingID; +DROP INDEX JobCommentsByExtID; +DROP TABLE JobComments; diff --git a/dashboard/app/aidb/migrations/12_add_job_comments.up.sql b/dashboard/app/aidb/migrations/12_add_job_comments.up.sql new file mode 100644 index 000000000000..001c1d03f920 --- /dev/null +++ b/dashboard/app/aidb/migrations/12_add_job_comments.up.sql @@ -0,0 +1,12 @@ +CREATE TABLE JobComments ( + ID STRING(36) NOT NULL, + ReportingID STRING(36) NOT NULL, + ExtID STRING(1000) NOT NULL, + Author STRING(1000) NOT NULL, + BodyURI STRING(MAX) NOT NULL, + Date TIMESTAMP NOT NULL, + CONSTRAINT FK_JobComments_Reporting FOREIGN KEY (ReportingID) REFERENCES JobReporting (ID), +) PRIMARY KEY (ID); + +CREATE UNIQUE INDEX JobCommentsByExtID ON JobComments(ExtID); +CREATE INDEX JobCommentsByReportingID ON JobComments(ReportingID); diff --git a/dashboard/app/api.go b/dashboard/app/api.go index 2170d1e471e7..ae33074ecf6f 100644 --- a/dashboard/app/api.go +++ b/dashboard/app/api.go @@ -60,6 +60,9 @@ var apiHandlers = map[string]APIHandler{ "save_discussion": globalHandler(apiSaveDiscussion), "create_upload_url": globalHandler(apiCreateUploadURL), "send_email": globalHandler(apiSendEmail), + "ai_report_command": globalHandler(apiAIReportCommand), + "ai_poll_report": globalHandler(apiAIPollReport), + "ai_confirm_report": globalHandler(apiAIConfirmReport), "ai_job_poll": globalHandler(apiAIJobPoll), "ai_job_done": globalHandler(apiAIJobDone), "ai_trajectory_log": globalHandler(apiAITrajectoryLog), diff --git a/dashboard/app/config.go b/dashboard/app/config.go index 47862c4a6394..ca7403ce7ae7 100644 --- a/dashboard/app/config.go +++ b/dashboard/app/config.go @@ -148,6 +148,27 @@ type Config struct { type AIConfig struct { // Whether to upload generated patches to gerrit. UploadPatchesToGerrit bool + Stages []AIPatchStageConfig +} + +// AIPatchStageConfig describes a single stage in the AI patch reporting pipeline. +type AIPatchStageConfig struct { + Name string // "moderation", "public" + ServingIntegration string // e.g. "lore" + MailingList string + NoParallelReports bool +} + +func (cfg *AIConfig) StageIndexByName(name string) int { + if cfg == nil { + return -1 + } + for i := range cfg.Stages { + if cfg.Stages[i].Name == name { + return i + } + } + return -1 } type APIClient struct { @@ -166,6 +187,11 @@ var ( "ai_job_done": true, "ai_trajectory_log": true, } + AIReportMethods = map[string]bool{ + "ai_report_command": true, + "ai_poll_report": true, + "ai_confirm_report": true, + } ) // ACLItem is an Access Control List item. @@ -643,6 +669,9 @@ func checkNamespace(ns string, cfg *Config, namespaces, clientNames map[string]b if cfg.Kcidb != nil { checkKcidb(ns, cfg.Kcidb) } + if cfg.AI != nil { + checkAIConfig(ns, cfg.AI) + } checkKernelRepos(ns, cfg, cfg.Repos) checkNamespaceReporting(ns, cfg) checkSubsystems(ns, cfg) @@ -658,6 +687,22 @@ func checkCoverageConfig(ns string, cfg *Config) { } } +func checkAIConfig(ns string, cfg *AIConfig) { + if cfg == nil { + return + } + stageNames := make(map[string]bool) + for _, stage := range cfg.Stages { + if stage.Name == "" { + panic(fmt.Sprintf("%v: AI stage name cannot be empty", ns)) + } + if stageNames[stage.Name] { + panic(fmt.Sprintf("%v: duplicate AI stage name %q", ns, stage.Name)) + } + stageNames[stage.Name] = true + } +} + func checkSubsystems(ns string, cfg *Config) { if cfg.Subsystems.Reminder == nil { // Nothing to validate. diff --git a/dashboard/app/entities_datastore.go b/dashboard/app/entities_datastore.go index f652e85f6ec5..345a28e81ee7 100644 --- a/dashboard/app/entities_datastore.go +++ b/dashboard/app/entities_datastore.go @@ -678,6 +678,7 @@ const ( textError = "Error" textReproLog = "ReproLog" textFsckLog = "FsckLog" + textJobComment = "JobComment" ) const ( diff --git a/dashboard/app/local_ui_test.go b/dashboard/app/local_ui_test.go index 0f92e08bed3d..c43c56608a62 100644 --- a/dashboard/app/local_ui_test.go +++ b/dashboard/app/local_ui_test.go @@ -98,8 +98,13 @@ var localUIConfig = &GlobalConfig{ "upstream": { DisplayTitle: "Linux", AccessLevel: AccessPublic, - AI: &AIConfig{}, - Key: password1, + AI: &AIConfig{ + Stages: []AIPatchStageConfig{ + {Name: "moderation", ServingIntegration: "lore", MailingList: "moderation@test.com"}, + {Name: "public", ServingIntegration: "lore", MailingList: "test@syzkaller.com"}, + }, + }, + Key: password1, Clients: map[string]APIClient{ localUIClient: {Key: localUIPassword}, }, @@ -133,9 +138,7 @@ const ( localUIGlobalPassword = "localuiglobalpasswordlocaluiglobalpasswordlocaluiglobalpassword" ) -func populateLocalUIDB(t *testing.T, c *Ctx) { - client := c.makeClient(localUIClient, localUIPassword, true) - globalClient := c.makeClient(localUIGlobalClient, localUIGlobalPassword, true) +func populateBuildsAndCrashes(t *testing.T, client *apiClient) { bugTitles := []string{ "KASAN: slab-use-after-free Write in nr_neigh_put", "KCSAN: data-race in mISDN_ioctl / mISDN_read", @@ -174,6 +177,13 @@ func populateLocalUIDB(t *testing.T, c *Ctx) { } } } +} + +func populateLocalUIDB(t *testing.T, c *Ctx) { + client := c.makeClient(localUIClient, localUIPassword, true) + globalClient := c.makeClient(localUIGlobalClient, localUIGlobalPassword, true) + + populateBuildsAndCrashes(t, client) c.advanceTime(24 * time.Hour) fixedBugs := []struct { @@ -218,6 +228,7 @@ func populateLocalUIDB(t *testing.T, c *Ctx) { }) } + var lastBugExtID string for i := 0; i < 4; i++ { t.Logf("polling bugs iteration %v", i) respBugs, err := globalClient.ReportingPollBugs("email") @@ -229,6 +240,7 @@ func populateLocalUIDB(t *testing.T, c *Ctx) { } var fixCommits []dashapi.Commit for _, rep := range respBugs.Reports { + lastBugExtID = rep.ID isFixed := false for _, bug := range fixedBugs { if rep.Title == bug.Title { @@ -310,10 +322,13 @@ func populateLocalUIDB(t *testing.T, c *Ctx) { {Type: ai.WorkflowAssessmentKCSAN, Name: string(ai.WorkflowAssessmentKCSAN)}, }, }) + jobID1 := resp.ID + jobID2 := c.createAIJob(lastBugExtID, string(ai.WorkflowPatching), "") + seq := 1 ts := c.mockedTime globalClient.AITrajectoryLog(&dashapi.AITrajectoryReq{ - JobID: resp.ID, + JobID: jobID1, Span: &trajectory.Span{ Seq: seq, Nesting: 1, @@ -331,7 +346,7 @@ func populateLocalUIDB(t *testing.T, c *Ctx) { seq++ for llmCall := 1; llmCall <= 3; llmCall++ { globalClient.AITrajectoryLog(&dashapi.AITrajectoryReq{ - JobID: resp.ID, + JobID: jobID1, Span: &trajectory.Span{ Seq: seq, Nesting: 2, @@ -346,7 +361,7 @@ func populateLocalUIDB(t *testing.T, c *Ctx) { }) seq++ globalClient.AITrajectoryLog(&dashapi.AITrajectoryReq{ - JobID: resp.ID, + JobID: jobID1, Span: &trajectory.Span{ Seq: seq, Nesting: 2, @@ -358,7 +373,7 @@ func populateLocalUIDB(t *testing.T, c *Ctx) { }) seq++ globalClient.AITrajectoryLog(&dashapi.AITrajectoryReq{ - JobID: resp.ID, + JobID: jobID1, Span: &trajectory.Span{ Seq: seq, Nesting: 2, @@ -371,7 +386,7 @@ func populateLocalUIDB(t *testing.T, c *Ctx) { seq++ } globalClient.AITrajectoryLog(&dashapi.AITrajectoryReq{ - JobID: resp.ID, + JobID: jobID1, Span: &trajectory.Span{ Seq: agentSeq, Nesting: 1, @@ -383,7 +398,7 @@ func populateLocalUIDB(t *testing.T, c *Ctx) { }) } globalClient.AITrajectoryLog(&dashapi.AITrajectoryReq{ - JobID: resp.ID, + JobID: jobID1, Span: &trajectory.Span{ Seq: 0, Nesting: 0, @@ -393,14 +408,53 @@ func populateLocalUIDB(t *testing.T, c *Ctx) { Finished: ts, }, }) + globalClient.AIJobDone(&dashapi.AIJobDoneReq{ - ID: resp.ID, + ID: jobID1, Results: map[string]any{ "Benign": false, "Confident": true, "Explanation": "ISO C says data races result in undefined program behavior.", }, }) + + _, err = globalClient.AIJobPoll(&dashapi.AIJobPollReq{ + AgentName: "agent-local-ui", + CodeRevision: "xxx", + Workflows: []dashapi.AIWorkflow{ + {Type: ai.WorkflowPatching, Name: string(ai.WorkflowPatching)}, + }, + }) + require.NoError(t, err) + + globalClient.AIJobDone(&dashapi.AIJobDoneReq{ + ID: jobID2, + Results: map[string]any{ + "PatchDescription": "Test Patch Description", + "PatchDiff": "diff --git a/test b/test", + }, + }) + + pollExt, err := globalClient.AIPollReport(&dashapi.PollExternalReportReq{ + Source: dashapi.AIJobSourceLore, + }) + require.NoError(t, err) + require.NotNil(t, pollExt) + require.NotNil(t, pollExt.Result) + + _ = globalClient.AIConfirmReport(&dashapi.ConfirmPublishedReq{ + ReportID: pollExt.Result.ID, + PublishedExtID: "", + }) + _, _ = globalClient.AIReportCommand(&dashapi.SendExternalCommandReq{ + Source: dashapi.AIJobSourceLore, + RootExtID: "", + MessageExtID: "", + Author: "reviewer@example.com", + Comment: &dashapi.CommentCommand{ + Body: "This is a mock comment added via client API.", + }, + }) } // Advance the timer with random duration. Return the (copied) old time. diff --git a/dashboard/app/templates/ai_job.html b/dashboard/app/templates/ai_job.html index 21ef1fdd37ae..c3c6765be8f2 100644 --- a/dashboard/app/templates/ai_job.html +++ b/dashboard/app/templates/ai_job.html @@ -157,6 +157,11 @@ Agent: {{.Job.AgentName}}

{{end}} + {{if .CurrentStage}} + Current Stage: {{.CurrentStage}}
+ Next Stage: {{.NextStage}}

+ {{end}} + {{if .Header.AIActions}} {{if and (ne .Job.Correct "⏳") (ne .Job.Correct "💥")}}
@@ -175,6 +180,49 @@ {{end}} {{end}} + {{if .Reportings}} + + + + + + + + + + + + + {{range .Reportings}} + + + + + + + + {{end}} + +
Reporting Stages and Comments:
StageSourceReported AtExt IDComments
{{.Reporting.Stage}}{{.Reporting.Source}}{{if .Reporting.ReportedAt.Valid}}{{formatTime .Reporting.ReportedAt.Time}}{{else}}-{{end}}{{if .Link}}{{.Reporting.ExtID.StringVal}}{{else if .Reporting.ExtID.Valid}}{{.Reporting.ExtID.StringVal}}{{else}}-{{end}} + {{if .Comments}} +
+ {{len .Comments}} Comments +
    + {{range .Comments}} +
  • + {{.Author}} ({{formatTime .Date}}): +
    {{.BodyURI}}
    +
  • + {{end}} +
+
+ {{else}} + No comments + {{end}} +
+
+ {{end}} + {{if .History}} @@ -183,6 +231,7 @@ + @@ -191,6 +240,7 @@ + {{end}} diff --git a/dashboard/app/util_test.go b/dashboard/app/util_test.go index 6900d2340db4..11457493441e 100644 --- a/dashboard/app/util_test.go +++ b/dashboard/app/util_test.go @@ -21,13 +21,14 @@ import ( "reflect" "runtime" "slices" + "strconv" "strings" "sync" "sync/atomic" "testing" "time" - "cloud.google.com/go/spanner/admin/database/apiv1" + database "cloud.google.com/go/spanner/admin/database/apiv1" "cloud.google.com/go/spanner/admin/database/apiv1/databasepb" "github.com/google/go-cmp/cmp" "github.com/google/syzkaller/dashboard/api" @@ -162,12 +163,12 @@ func loadDDLStatements(wildcard string, sortOrder int) ([]string, error) { if len(files) == 0 { return nil, fmt.Errorf("loadDDLStatements: wildcard did not match any files: %q", wildcard) } - // We prefix DDL file names with sequence numbers. - slices.SortFunc(files, func(a, b string) int { - return strings.Compare(a, b) * sortOrder - }) + sortedFiles, err := sortMigrationFiles(files, sortOrder) + if err != nil { + return nil, err + } var all []string - for _, file := range files { + for _, file := range sortedFiles { data, err := os.ReadFile(file) if err != nil { return nil, err @@ -180,6 +181,42 @@ func loadDDLStatements(wildcard string, sortOrder int) ([]string, error) { return all, nil } +func sortMigrationFiles(files []string, sortOrder int) ([]string, error) { + type migrationFile struct { + num int + file string + } + var mFiles []migrationFile + seen := map[int]string{} + for _, file := range files { + basename := filepath.Base(file) + parts := strings.Split(basename, "_") + if len(parts) == 0 { + return nil, fmt.Errorf("invalid migration filename: %v", basename) + } + num, err := strconv.Atoi(parts[0]) + if err != nil { + return nil, fmt.Errorf("migration file %v must start with a number (%w)", file, err) + } + if old, ok := seen[num]; ok { + return nil, fmt.Errorf("duplicate migration number %v: %v and %v", num, old, file) + } + seen[num] = file + mFiles = append(mFiles, migrationFile{num: num, file: file}) + } + slices.SortFunc(mFiles, func(a, b migrationFile) int { + if a.num != b.num { + return (a.num - b.num) * sortOrder + } + return strings.Compare(a.file, b.file) * sortOrder + }) + var result []string + for _, f := range mFiles { + result = append(result, f.file) + } + return result, nil +} + func (ctx *Ctx) config() *GlobalConfig { return getConfig(ctx.ctx) } @@ -903,3 +940,18 @@ func replaceReporting(ctx context.Context, ns, name string, f func(Reporting) Re return &ret }) } + +// SetAIConfig patches the config for all namespaces to use the given AIConfig. +func (ctx *Ctx) SetAIConfig(aiCfg *AIConfig) { + ctx.transformContext = func(ctx context.Context) context.Context { + cfg := getConfig(ctx) + newCfg := *cfg + newCfg.Namespaces = make(map[string]*Config) + for k, v := range cfg.Namespaces { + nc := *v + nc.AI = aiCfg + newCfg.Namespaces[k] = &nc + } + return contextWithConfig(ctx, &newCfg) + } +} diff --git a/dashboard/dashapi/ai.go b/dashboard/dashapi/ai.go index 62844b9a1942..90acf5c78952 100644 --- a/dashboard/dashapi/ai.go +++ b/dashboard/dashapi/ai.go @@ -4,10 +4,14 @@ package dashapi import ( + "errors" + "github.com/google/syzkaller/pkg/aflow/ai" "github.com/google/syzkaller/pkg/aflow/trajectory" ) +const AIJobSourceLore = "lore" + type AIJobPollReq struct { AgentName string CodeRevision string // git commit of the syz-agent server @@ -52,3 +56,96 @@ func (dash *Dashboard) AIJobDone(req *AIJobDoneReq) error { func (dash *Dashboard) AITrajectoryLog(req *AITrajectoryReq) error { return dash.Query("ai_trajectory_log", req, nil) } + +// SendExternalCommandReq represents a request to report a patch action externally (upstream or reject). +type SendExternalCommandReq struct { + Source string + RootExtID string + MessageExtID string + Author string + // Only one must be set. + Upstream *UpstreamCommand `json:",omitempty"` + Reject *RejectCommand `json:",omitempty"` + Comment *CommentCommand `json:",omitempty"` +} + +type UpstreamCommand struct { +} + +type RejectCommand struct { + Reason string +} + +type CommentCommand struct { + Body string +} + +type SendExternalCommandResp struct { + Error string +} + +// PollExternalReportReq represents a request to poll for pending reports to be sent externally. +type PollExternalReportReq struct { + Source string // e.g., "lore" +} + +type PollExternalReportResp struct { + Result *ReportPollResult +} + +type ReportPollResult struct { + ID string // JobReporting ID + Moderation bool + To []string + Cc []string + Patch *NewReportResult `json:",omitempty"` + Replies []*ReplyResult `json:",omitempty"` +} + +type NewReportResult struct { + Subject string + Body string + Version int + GitDiff string + To []string + Cc []string + BaseCommit string + BaseTree string +} + +type ReplyResult struct { + Quote string + Body string + ReplyExtID string +} + +// ConfirmPublishedReq represents a request to confirm that a report has been published externally. +type ConfirmPublishedReq struct { + ReportID string + PublishedExtID string +} + +var ErrReportNotFound = errors.New("report not found") + +func (dash *Dashboard) AIReportCommand(req *SendExternalCommandReq) (*SendExternalCommandResp, error) { + resp := new(SendExternalCommandResp) + if err := dash.Query("ai_report_command", req, resp); err != nil { + return nil, err + } + if resp.Error == ErrReportNotFound.Error() { + return nil, ErrReportNotFound + } + return resp, nil +} + +func (dash *Dashboard) AIPollReport(req *PollExternalReportReq) (*PollExternalReportResp, error) { + resp := new(PollExternalReportResp) + if err := dash.Query("ai_poll_report", req, resp); err != nil { + return nil, err + } + return resp, nil +} + +func (dash *Dashboard) AIConfirmReport(req *ConfirmPublishedReq) error { + return dash.Query("ai_confirm_report", req, nil) +} diff --git a/pkg/email/lore/parse_test.go b/pkg/email/lore/parse_test.go index 183cf2ac4aa0..557e07c0fce4 100644 --- a/pkg/email/lore/parse_test.go +++ b/pkg/email/lore/parse_test.go @@ -235,6 +235,8 @@ Bug report`, t.Fatal(err) } msg.RawCc = nil + msg.Body = "" + msg.Patch = "" emails = append(emails, msg) } diff --git a/pkg/email/lore/poller.go b/pkg/email/lore/poller.go new file mode 100644 index 000000000000..df306af1b73b --- /dev/null +++ b/pkg/email/lore/poller.go @@ -0,0 +1,188 @@ +// Copyright 2026 syzkaller project authors. All rights reserved. +// Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. + +// The approach uses an in-memory graph of ancestors (Message-ID -> In-Reply-To) +// to resolve the root of any email thread. This is acceptable as long as the +// archive size is reasonable and fits in memory. +package lore + +import ( + "bytes" + "context" + "fmt" + "net/mail" + "time" + + "github.com/google/syzkaller/pkg/debugtracer" + "github.com/google/syzkaller/pkg/email" + "github.com/google/syzkaller/pkg/vcs" +) + +type PollerConfig struct { + RepoDir string + URL string + Tracer debugtracer.DebugTracer + Since time.Time // in case the last polled commit time is known + OwnEmails []string + now func() time.Time // for testing +} + +type PolledEmail struct { + Email *Email + RootMessageID string +} + +type Poller struct { + cfg PollerConfig + repo vcs.Repo + ancestors map[string]string // Message-ID -> In-Reply-To. + lastCommit string + lastCommitTime time.Time + initialized bool +} + +func NewPoller(cfg PollerConfig) (*Poller, error) { + if cfg.Tracer == nil { + cfg.Tracer = &debugtracer.NullTracer{} + } + if cfg.now == nil { + cfg.now = time.Now + } + if cfg.RepoDir == "" { + return nil, fmt.Errorf("RepoDir must be specified") + } + if cfg.URL == "" { + return nil, fmt.Errorf("URL must be specified") + } + return &Poller{ + cfg: cfg, + ancestors: make(map[string]string), + lastCommitTime: cfg.Since, + }, nil +} + +func (p *Poller) Poll(ctx context.Context, out chan<- *PolledEmail) error { + if !p.initialized { + if err := p.initialize(ctx); err != nil { + return err + } + p.initialized = true + } + _, err := p.repo.Poll(p.cfg.URL, "master") + if err != nil { + return fmt.Errorf("git poll failed: %w", err) + } + var messages []EmailReader + if p.lastCommit != "" { + messages, err = ReadArchive(p.repo, p.lastCommit, time.Time{}) + } else if !p.lastCommitTime.IsZero() { + messages, err = ReadArchive(p.repo, "", p.lastCommitTime) + } else { + since := p.cfg.now().Add(-24 * time.Hour) + messages, err = ReadArchive(p.repo, "", since) + } + if err != nil { + return fmt.Errorf("failed to read archive: %w", err) + } + for i := len(messages) - 1; i >= 0; i-- { + er := messages[i] + parsed, err := er.Parse(p.cfg.OwnEmails, nil) + if err != nil { + p.cfg.Tracer.Logf("failed to parse email %s: %v", er.Hash, err) + continue + } + // We cannot fully trust the date specified in the message itself, so let's sanitize it + // using the commit date. It will at least help us prevent weird client.lastReply() responses. + if parsed.Date.After(er.CommitDate) { + parsed.Date = er.CommitDate + } + if parsed.MessageID == "" { + p.cfg.Tracer.Logf("ignoring email without Message-ID %s", er.Hash) + continue + } + p.ancestors[parsed.MessageID] = parsed.InReplyTo + if err := p.push(ctx, parsed, parsed.MessageID, out); err != nil { + return err + } + p.lastCommit = er.Hash + } + return nil +} + +func (p *Poller) initialize(ctx context.Context) error { + p.repo = vcs.NewLKMLRepo(p.cfg.RepoDir) + p.cfg.Tracer.Logf("initialize: polling %s branch master", p.cfg.URL) + _, err := p.repo.Poll(p.cfg.URL, "master") + if err != nil { + return fmt.Errorf("initial git poll failed: %w", err) + } + messages, err := ReadArchive(p.repo, "", time.Time{}) + if err != nil { + return fmt.Errorf("failed to read archive for initialization: %w", err) + } + for _, er := range messages { + body, err := er.Read() + if err != nil { + return fmt.Errorf("failed to read email %s: %w", er.Hash, err) + } + msg, err := mail.ReadMessage(bytes.NewReader(body)) + if err != nil { + p.cfg.Tracer.Logf("failed to parse email headers %s: %v", er.Hash, err) + continue + } + msgID := msg.Header.Get("Message-ID") + if msgID == "" { + continue + } + inReplyTo := email.ExtractInReplyTo(msg.Header) + p.ancestors[msgID] = inReplyTo + } + return nil +} + +func (p *Poller) push(ctx context.Context, email *Email, msgID string, out chan<- *PolledEmail) error { + root := p.resolveRoot(msgID) + if root == "" { + return nil // Skip loops. + } + select { + case out <- &PolledEmail{ + Email: email, + RootMessageID: root, + }: + return nil + case <-ctx.Done(): + return ctx.Err() + } +} + +func (p *Poller) resolveRoot(msgID string) string { + visited := make(map[string]bool) + current := msgID + for { + parent, ok := p.ancestors[current] + if !ok || parent == "" { + return current + } + if visited[parent] { + return "" // Loop detected. + } + visited[current] = true + current = parent + } +} + +func (p *Poller) Loop(ctx context.Context, pollPeriod time.Duration, out chan<- *PolledEmail) error { + ticker := time.NewTicker(pollPeriod) + defer ticker.Stop() + for { + if err := p.Poll(ctx, out); err != nil { + p.cfg.Tracer.Logf("poller error: %v", err) + } + select { + case <-ctx.Done(): + return ctx.Err() + case <-ticker.C: + } + } +} diff --git a/pkg/email/lore/poller_test.go b/pkg/email/lore/poller_test.go new file mode 100644 index 000000000000..f30c8065562b --- /dev/null +++ b/pkg/email/lore/poller_test.go @@ -0,0 +1,218 @@ +// Copyright 2026 syzkaller project authors. All rights reserved. +// Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. + +package lore + +import ( + "context" + "fmt" + "testing" + "time" + + "github.com/stretchr/testify/assert" +) + +func TestPoller(t *testing.T) { + repoDir := t.TempDir() + loreArchive := NewTestLoreArchive(t, repoDir) + + output := make(chan *PolledEmail, 16) + now := time.Date(2026, 4, 6, 10, 0, 0, 0, time.UTC) + cfg := PollerConfig{ + RepoDir: t.TempDir(), + URL: loreArchive.Repo.Dir, + OwnEmails: []string{"bot@syzbot.com"}, + now: func() time.Time { return now }, + } + + poller, err := NewPoller(cfg) + assert.NoError(t, err) + + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + // 1. Save some messages to build initial graph. + t.Logf("saving initial messages") + dateOldEmail := now.Add(-48 * time.Hour).Format(time.RFC1123Z) + dateAEmail := now.Add(-2 * time.Hour).Format(time.RFC1123Z) + dateBEmail := now.Add(-1 * time.Hour).Format(time.RFC1123Z) + + loreArchive.SaveMessageAt(t, fmt.Sprintf(`From: someone@domain.com +Date: %s +Subject: Old Message +Message-ID: +Content-Type: text/plain + +`, dateOldEmail), now.Add(-48*time.Hour)) + + loreArchive.SaveMessageAt(t, fmt.Sprintf(`From: someone@domain.com +Date: %s +Subject: Root Message +Message-ID: +Content-Type: text/plain + +`, dateAEmail), now.Add(-2*time.Hour)) + + loreArchive.SaveMessageAt(t, fmt.Sprintf(`From: someone@domain.com +Date: %s +Subject: Reply to Root +Message-ID: +In-Reply-To: +Content-Type: text/plain + +`, dateBEmail), now.Add(-1*time.Hour)) + + // 2. Poll for the first time. + t.Logf("first poll (initialize)") + + err = poller.Poll(ctx, output) + assert.NoError(t, err) + + t.Logf("first poll (actual)") + err = poller.Poll(ctx, output) + assert.NoError(t, err) + + // Since it's the first poll and messages are within 24 hours (we didn't mock time, so they are fresh), + // they should be pushed to the channel. + + msg1 := <-output + parsed1 := msg1.Email + assert.Equal(t, "", parsed1.MessageID) + assert.Equal(t, "", msg1.RootMessageID) + + msg2 := <-output + parsed2 := msg2.Email + assert.Equal(t, "", parsed2.MessageID) + assert.Equal(t, "", msg2.RootMessageID) + + // Verify no more messages (specifically '') were pushed. + select { + case msg := <-output: + parsed := msg.Email + t.Errorf("unexpected message in channel: %s", parsed.MessageID) + default: + // OK + } + + // 3. Save a new message (reply to reply1). + t.Logf("saving new message") + dateCEmail := now.Format(time.RFC1123Z) + loreArchive.SaveMessageAt(t, fmt.Sprintf(`From: someone@domain.com +Date: %s +Subject: Reply to Reply +Message-ID: +In-Reply-To: +Content-Type: text/plain + +`, dateCEmail), now) + + // 4. Poll again. + t.Logf("second poll") + err = poller.Poll(ctx, output) + assert.NoError(t, err) + + msg3 := <-output + parsed3 := msg3.Email + assert.Equal(t, "", parsed3.MessageID) + assert.Equal(t, "", msg3.RootMessageID) + + // 5. Save a message from own email. + t.Logf("saving own email message") + dateDEmail := now.Add(1 * time.Minute).Format(time.RFC1123Z) + loreArchive.SaveMessageAt(t, fmt.Sprintf(`From: bot@syzbot.com +Date: %s +Subject: Message from Bot +Message-ID: +In-Reply-To: +Content-Type: text/plain + +`, dateDEmail), now.Add(1*time.Minute)) + + err = poller.Poll(ctx, output) + assert.NoError(t, err) + + msg4 := <-output + assert.True(t, msg4.Email.OwnEmail) +} + +func TestPollerLoop(t *testing.T) { + repoDir := t.TempDir() + loreArchive := NewTestLoreArchive(t, repoDir) + + now := time.Date(2026, 4, 6, 10, 0, 0, 0, time.UTC) + dateStr := now.Format(time.RFC1123Z) + + loreArchive.SaveMessageAt(t, fmt.Sprintf(`From: someone@domain.com +Date: %s +Subject: Loop 1 +Message-ID: +In-Reply-To: +Content-Type: text/plain + +`, dateStr), now) + + loreArchive.SaveMessageAt(t, fmt.Sprintf(`From: someone@domain.com +Date: %s +Subject: Loop 2 +Message-ID: +In-Reply-To: +Content-Type: text/plain + +`, dateStr), now) + + output := make(chan *PolledEmail, 16) + cfg := PollerConfig{ + RepoDir: t.TempDir(), + URL: loreArchive.Repo.Dir, + now: func() time.Time { return now }, + } + + poller, err := NewPoller(cfg) + assert.NoError(t, err) + + ctx := context.Background() + err = poller.Poll(ctx, output) + assert.NoError(t, err) + + select { + case msg := <-output: + t.Errorf("unexpected message in channel: %s", msg.Email.MessageID) + default: + } +} + +func TestPollerDateSanitization(t *testing.T) { + repoDir := t.TempDir() + loreArchive := NewTestLoreArchive(t, repoDir) + + output := make(chan *PolledEmail, 16) + now := time.Date(2026, 4, 6, 10, 0, 0, 0, time.UTC) + cfg := PollerConfig{ + RepoDir: t.TempDir(), + URL: loreArchive.Repo.Dir, + OwnEmails: []string{"bot@syzbot.com"}, + now: func() time.Time { return now }, + } + + poller, err := NewPoller(cfg) + assert.NoError(t, err) + + ctx := context.Background() + + commitDate := now.Add(-1 * time.Hour) + loreArchive.SaveMessageAt(t, fmt.Sprintf(`From: someone@domain.com +Date: %s +Subject: Future Dated Message +Message-ID: +Content-Type: text/plain + +`, now.Format(time.RFC1123Z)), commitDate) + + err = poller.Poll(ctx, output) + assert.NoError(t, err) + + msg := <-output + assert.Equal(t, "", msg.Email.MessageID) + // The date should be set to commitDate. + assert.Equal(t, commitDate.UTC(), msg.Email.Date.UTC()) +} diff --git a/pkg/email/lore/read.go b/pkg/email/lore/read.go index 133061a28f94..1c56fbb0da04 100644 --- a/pkg/email/lore/read.go +++ b/pkg/email/lore/read.go @@ -54,10 +54,5 @@ func emailFromRaw(body []byte, emails, domains []string) (*Email, error) { if err != nil { return nil, err } - ret := &Email{Email: msg, HasPatch: msg.Patch != ""} - // Keep memory consumption low. - ret.Body = "" - ret.Patch = "" - // TODO: If emails/domains are nil, we also don't need to parse the body at all. - return ret, nil + return &Email{Email: msg, HasPatch: msg.Patch != ""}, nil } diff --git a/pkg/email/lore/test_util.go b/pkg/email/lore/test_util.go new file mode 100644 index 000000000000..f329f924bf82 --- /dev/null +++ b/pkg/email/lore/test_util.go @@ -0,0 +1,35 @@ +// Copyright 2026 syzkaller project authors. All rights reserved. +// Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. + +package lore + +import ( + "os" + "path/filepath" + "testing" + "time" + + "github.com/google/syzkaller/pkg/vcs" + "github.com/stretchr/testify/assert" +) + +type TestLoreArchive struct { + Repo *vcs.TestRepo +} + +func NewTestLoreArchive(t *testing.T, dir string) *TestLoreArchive { + return &TestLoreArchive{ + Repo: vcs.MakeTestRepo(t, dir), + } +} + +func (a *TestLoreArchive) SaveMessage(t *testing.T, raw string) { + a.SaveMessageAt(t, raw, time.Now()) +} + +func (a *TestLoreArchive) SaveMessageAt(t *testing.T, raw string, date time.Time) { + err := os.WriteFile(filepath.Join(a.Repo.Dir, "m"), []byte(raw), 0666) + assert.NoError(t, err) + a.Repo.Git("add", "m") + a.Repo.CommitChangeAt("message", date) +} diff --git a/pkg/email/parser.go b/pkg/email/parser.go index b7404988a238..667a00429690 100644 --- a/pkg/email/parser.go +++ b/pkg/email/parser.go @@ -58,6 +58,7 @@ const ( CmdSet CmdUnset CmdRegenerate + CmdReject cmdTest5 ) @@ -182,7 +183,7 @@ func Parse(r io.Reader, ownEmails, goodLists, domains []string) (*Email, error) email := &Email{ BugIDs: unique(bugIDs), MessageID: msg.Header.Get("Message-ID"), - InReplyTo: extractInReplyTo(msg.Header), + InReplyTo: ExtractInReplyTo(msg.Header), Date: date, Link: link, Author: author, @@ -358,6 +359,8 @@ func strToCmd(str string) Command { return CmdUnset case "regenerate": return CmdRegenerate + case "reject": + return CmdReject case "test_5_arg_cmd": return cmdTest5 } @@ -459,7 +462,7 @@ func parseBody(r io.Reader, headers mail.Header) ([]byte, [][]byte, error) { var extractMessageIDs = regexp.MustCompile(`<.+?>`) -func extractInReplyTo(header mail.Header) string { +func ExtractInReplyTo(header mail.Header) string { value := header.Get("In-Reply-To") // Normally there should be just one message, to which we reply. // However, there have been some cases when multiple addresses were mentioned. diff --git a/pkg/email/parser_test.go b/pkg/email/parser_test.go index 5af0cd9b8628..bb964a5c7457 100644 --- a/pkg/email/parser_test.go +++ b/pkg/email/parser_test.go @@ -406,6 +406,14 @@ baz Args: "abcd", }, }, + { + body: `#syz reject`, + cmd: &SingleCommand{ + Command: CmdReject, + Str: "reject", + Args: "", + }, + }, } type ParseTest struct { diff --git a/pkg/email/sender/dashapi.go b/pkg/email/sender/dashapi.go new file mode 100644 index 000000000000..a823758b0604 --- /dev/null +++ b/pkg/email/sender/dashapi.go @@ -0,0 +1,55 @@ +// Copyright 2026 syzkaller project authors. All rights reserved. +// Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. + +package sender + +import ( + "context" + "net/mail" + + "github.com/google/syzkaller/dashboard/dashapi" + "github.com/google/syzkaller/pkg/email" +) + +// DashapiConfig holds configuration for the Dashapi sender. +type DashapiConfig struct { + Client string + Addr string + From mail.Address + ContextPrefix string + SubjectPrefix string +} + +type dashapiSender struct { + cfg DashapiConfig + dash *dashapi.Dashboard +} + +// NewDashapiSender creates a new Dashapi sender. +func NewDashapiSender(cfg DashapiConfig) (Sender, error) { + dash, err := dashapi.New(cfg.Client, cfg.Addr, "") + if err != nil { + return nil, err + } + return &dashapiSender{cfg: cfg, dash: dash}, nil +} + +// Send sends an email via Dashapi. +func (s *dashapiSender) Send(ctx context.Context, item *Email) (string, error) { + senderAddr := s.cfg.From.String() + if item.BugID != "" { + var err error + senderAddr, err = email.AddAddrContext(senderAddr, s.cfg.ContextPrefix+item.BugID) + if err != nil { + return "", err + } + } + return "", s.dash.SendEmail(&dashapi.SendEmailReq{ + Sender: senderAddr, + To: item.To, + Cc: item.Cc, + Subject: s.cfg.SubjectPrefix + item.Subject, + InReplyTo: item.InReplyTo, + Body: string(item.Body), + }) +} diff --git a/pkg/email/sender/sender.go b/pkg/email/sender/sender.go new file mode 100644 index 000000000000..2c66fca3e23d --- /dev/null +++ b/pkg/email/sender/sender.go @@ -0,0 +1,21 @@ +// Copyright 2026 syzkaller project authors. All rights reserved. +// Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. + +package sender + +import "context" + +// Email represents an email to be sent. +type Email struct { + To []string + Cc []string + Subject string + InReplyTo string + Body []byte + BugID string +} + +// Sender defines the interface for sending emails. +type Sender interface { + Send(ctx context.Context, email *Email) (string, error) +} diff --git a/pkg/email/sender/smtp.go b/pkg/email/sender/smtp.go new file mode 100644 index 000000000000..07f6ec3f7896 --- /dev/null +++ b/pkg/email/sender/smtp.go @@ -0,0 +1,90 @@ +// Copyright 2026 syzkaller project authors. All rights reserved. +// Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. + +package sender + +import ( + "bytes" + "context" + "fmt" + "net/mail" + "net/smtp" + "strings" + + "github.com/google/uuid" +) + +type SMTPConfig struct { + Host string + Port int + User string + Password string + From mail.Address +} + +type smtpSender struct { + cfg SMTPConfig +} + +func NewSMTPSender(cfg SMTPConfig) Sender { + return &smtpSender{cfg: cfg} +} + +func (s *smtpSender) Send(ctx context.Context, item *Email) (string, error) { + msgID := fmt.Sprintf("<%s@%s>", uuid.NewString(), s.cfg.Host) + msg := s.rawEmail(item, msgID) + auth := smtp.PlainAuth("", s.cfg.User, s.cfg.Password, s.cfg.Host) + smtpAddr := fmt.Sprintf("%s:%d", s.cfg.Host, s.cfg.Port) + + // Create a slice of recipients (To + Cc) without duplicates. + recipients := s.unique(append(item.To, item.Cc...)) + + err := smtp.SendMail(smtpAddr, auth, s.cfg.From.Address, recipients, msg) + if err != nil { + return "", err + } + return msgID, nil +} + +func (s *smtpSender) rawEmail(item *Email, id string) []byte { + var msg bytes.Buffer + + fmt.Fprintf(&msg, "From: %s\r\n", s.cfg.From.String()) + fmt.Fprintf(&msg, "To: %s\r\n", strings.Join(item.To, ", ")) + if len(item.Cc) > 0 { + fmt.Fprintf(&msg, "Cc: %s\r\n", strings.Join(item.Cc, ", ")) + } + fmt.Fprintf(&msg, "Subject: %s\r\n", item.Subject) + if item.InReplyTo != "" { + inReplyTo := item.InReplyTo + if inReplyTo[0] != '<' { + inReplyTo = "<" + inReplyTo + ">" + } + fmt.Fprintf(&msg, "In-Reply-To: %s\r\n", inReplyTo) + } + if id != "" { + if id[0] != '<' { + id = "<" + id + ">" + } + fmt.Fprintf(&msg, "Message-ID: %s\r\n", id) + } + msg.WriteString("MIME-Version: 1.0\r\n") + msg.WriteString("Content-Type: text/plain; charset=UTF-8\r\n") + msg.WriteString("Content-Transfer-Encoding: 8bit\r\n") + msg.WriteString("\r\n") + msg.Write(item.Body) + return msg.Bytes() +} + +func (s *smtpSender) unique(list []string) []string { + var ret []string + seen := map[string]struct{}{} + for _, str := range list { + if _, ok := seen[str]; ok { + continue + } + seen[str] = struct{}{} + ret = append(ret, str) + } + return ret +} diff --git a/syz-cluster/pkg/emailclient/smtp_sender_test.go b/pkg/email/sender/smtp_test.go similarity index 76% rename from syz-cluster/pkg/emailclient/smtp_sender_test.go rename to pkg/email/sender/smtp_test.go index 00b36588739c..d5be9859b336 100644 --- a/syz-cluster/pkg/emailclient/smtp_sender_test.go +++ b/pkg/email/sender/smtp_test.go @@ -1,10 +1,11 @@ -// Copyright 2025 syzkaller project authors. All rights reserved. +// Copyright 2026 syzkaller project authors. All rights reserved. // Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. -package emailclient +package sender import ( "fmt" + "net/mail" "testing" "github.com/stretchr/testify/assert" @@ -25,7 +26,7 @@ func TestRawEmail(t *testing.T) { Body: []byte("Email body"), }, id: "", - result: "From: name \r\n" + + result: "From: \"name\" \r\n" + "To: 1@to.com, 2@to.com\r\n" + "Cc: 1@cc.com, 2@cc.com\r\n" + "Subject: subject\r\n" + @@ -38,9 +39,17 @@ func TestRawEmail(t *testing.T) { }, } + cfg := SMTPConfig{ + From: mail.Address{ + Name: "name", + Address: "a@b.com", + }, + } + s := &smtpSender{cfg: cfg} + for i, test := range tests { t.Run(fmt.Sprintf("%d", i), func(t *testing.T) { - ret := rawEmail(TestEmailConfig(), test.item, test.id) + ret := s.rawEmail(test.item, test.id) assert.Equal(t, test.result, string(ret)) }) } diff --git a/pkg/gcpsecret/secret.go b/pkg/gcpsecret/secret.go index a801615f2631..97f8186ec1b4 100644 --- a/pkg/gcpsecret/secret.go +++ b/pkg/gcpsecret/secret.go @@ -6,6 +6,8 @@ package gcpsecret import ( "context" "fmt" + "os" + "strings" "cloud.google.com/go/compute/metadata" secretmanager "cloud.google.com/go/secretmanager/apiv1" @@ -59,3 +61,26 @@ func ProjectName(ctx context.Context) (string, error) { } return projectID, nil } + +// Resolve returns the resolved value of the string, handling "env:" and "gcp-secret:" prefixes. +func Resolve(ctx context.Context, val string) (string, error) { + const gcpSecretPrefix = "gcp-secret:" + const envPrefix = "env:" + + if strings.HasPrefix(val, envPrefix) { + return os.Getenv(val[len(envPrefix):]), nil + } + if strings.HasPrefix(val, gcpSecretPrefix) { + secretName := val[len(gcpSecretPrefix):] + proj, err := ProjectName(ctx) + if err != nil { + return "", fmt.Errorf("failed to get GCP project: %w", err) + } + data, err := LatestGcpSecret(ctx, proj, secretName) + if err != nil { + return "", fmt.Errorf("failed to get GCP secret %s: %w", secretName, err) + } + return string(data), nil + } + return val, nil +} diff --git a/pkg/lore-relay/commands.go b/pkg/lore-relay/commands.go new file mode 100644 index 000000000000..28344c7afc02 --- /dev/null +++ b/pkg/lore-relay/commands.go @@ -0,0 +1,49 @@ +// Copyright 2026 syzkaller project authors. All rights reserved. +// Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. + +package lorerelay + +import ( + "github.com/google/syzkaller/dashboard/dashapi" + "github.com/google/syzkaller/pkg/email" + "github.com/google/syzkaller/pkg/email/lore" +) + +// extractCommands converts parsed email commands into Dashboard API requests. +func extractCommands(polled *lore.PolledEmail) []*dashapi.SendExternalCommandReq { + var reqs []*dashapi.SendExternalCommandReq + + for _, cmd := range polled.Email.Commands { + req := &dashapi.SendExternalCommandReq{ + Source: dashapi.AIJobSourceLore, + RootExtID: polled.RootMessageID, + MessageExtID: polled.Email.MessageID, + Author: polled.Email.Author, + } + + switch cmd.Command { + case email.CmdUpstream: + req.Upstream = &dashapi.UpstreamCommand{} + reqs = append(reqs, req) + case email.CmdReject: + req.Reject = &dashapi.RejectCommand{ + Reason: polled.Email.Body, + } + reqs = append(reqs, req) + } + } + + if len(reqs) == 0 && polled.Email.Body != "" { + reqs = append(reqs, &dashapi.SendExternalCommandReq{ + Source: dashapi.AIJobSourceLore, + RootExtID: polled.RootMessageID, + MessageExtID: polled.Email.MessageID, + Author: polled.Email.Author, + Comment: &dashapi.CommentCommand{ + Body: polled.Email.Body, + }, + }) + } + + return reqs +} diff --git a/pkg/lore-relay/commands_test.go b/pkg/lore-relay/commands_test.go new file mode 100644 index 000000000000..0a5a0edb60a1 --- /dev/null +++ b/pkg/lore-relay/commands_test.go @@ -0,0 +1,78 @@ +// Copyright 2026 syzkaller project authors. All rights reserved. +// Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. + +package lorerelay + +import ( + "testing" + + "github.com/google/syzkaller/dashboard/dashapi" + "github.com/google/syzkaller/pkg/email" + "github.com/google/syzkaller/pkg/email/lore" + "github.com/stretchr/testify/assert" +) + +func TestMapCommands(t *testing.T) { + tests := []struct { + name string + polled *lore.PolledEmail + want []*dashapi.SendExternalCommandReq + }{ + { + name: "upstream", + polled: &lore.PolledEmail{ + RootMessageID: "", + Email: &lore.Email{ + Email: &email.Email{ + MessageID: "", + Author: "user@example.com", + Commands: []*email.SingleCommand{ + {Command: email.CmdUpstream}, + }, + }, + }, + }, + want: []*dashapi.SendExternalCommandReq{ + { + Source: dashapi.AIJobSourceLore, + RootExtID: "", + MessageExtID: "", + Author: "user@example.com", + Upstream: &dashapi.UpstreamCommand{}, + }, + }, + }, + { + name: "reject", + polled: &lore.PolledEmail{ + RootMessageID: "", + Email: &lore.Email{ + Email: &email.Email{ + MessageID: "", + Author: "user@example.com", + Body: "some reason", + Commands: []*email.SingleCommand{ + {Command: email.CmdReject}, + }, + }, + }, + }, + want: []*dashapi.SendExternalCommandReq{ + { + Source: dashapi.AIJobSourceLore, + RootExtID: "", + MessageExtID: "", + Author: "user@example.com", + Reject: &dashapi.RejectCommand{Reason: "some reason"}, + }, + }, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + got := extractCommands(tc.polled) + assert.Equal(t, tc.want, got) + }) + } +} diff --git a/pkg/lore-relay/relay.go b/pkg/lore-relay/relay.go new file mode 100644 index 000000000000..3723c10ca4b8 --- /dev/null +++ b/pkg/lore-relay/relay.go @@ -0,0 +1,222 @@ +// Copyright 2026 syzkaller project authors. All rights reserved. +// Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. + +package lorerelay + +import ( + "context" + "errors" + "fmt" + "log" + "strings" + "time" + + "github.com/google/syzkaller/dashboard/dashapi" + "github.com/google/syzkaller/pkg/debugtracer" + "github.com/google/syzkaller/pkg/email" + "github.com/google/syzkaller/pkg/email/lore" + "github.com/google/syzkaller/pkg/email/sender" + "golang.org/x/sync/errgroup" +) + +// DashboardClient defines the subset of dashapi.Dashboard required by the relay. +type DashboardClient interface { + AIReportCommand(req *dashapi.SendExternalCommandReq) (*dashapi.SendExternalCommandResp, error) + AIPollReport(req *dashapi.PollExternalReportReq) (*dashapi.PollExternalReportResp, error) + AIConfirmReport(req *dashapi.ConfirmPublishedReq) error +} + +// Config holds configuration for the Lore Relay. +type Config struct { + // DashboardPollInterval is how often to poll the Dashboard for new reports. + DashboardPollInterval time.Duration `yaml:"dashboard_poll_interval"` + // LorePollInterval is how often to poll Lore archive. + LorePollInterval time.Duration `yaml:"lore_poll_interval"` + // DocsLink is the link to the documentation. + DocsLink string `yaml:"docs_link"` + // Tracer is used for debug logging. + Tracer debugtracer.DebugTracer `yaml:"-"` + // LoreArchive is an optional mailing list that will be added to Cc on all sent emails. + LoreArchive string `yaml:"lore_archive"` +} + +// Relay orchestrates the flow between Lore and Dashboard. +type Relay struct { + cfg *Config + dash DashboardClient + poller *lore.Poller + emailSender sender.Sender + emailChan chan *lore.PolledEmail + backoffs []time.Duration +} + +// NewRelay creates a new Relay instance. +func NewRelay(cfg *Config, dash DashboardClient, poller *lore.Poller, + emailSender sender.Sender) *Relay { + if cfg.Tracer == nil { + cfg.Tracer = &debugtracer.NullTracer{} + } + if cfg.DashboardPollInterval == 0 { + cfg.DashboardPollInterval = 30 * time.Second + } + if cfg.LorePollInterval == 0 { + cfg.LorePollInterval = 5 * time.Minute + } + emailChan := make(chan *lore.PolledEmail, 16) + return &Relay{ + cfg: cfg, + dash: dash, + poller: poller, + emailSender: emailSender, + emailChan: emailChan, + backoffs: []time.Duration{5 * time.Second, 30 * time.Second, 60 * time.Second}, + } +} + +// Run starts the relay loop. +func (r *Relay) Run(ctx context.Context) error { + g, ctx := errgroup.WithContext(ctx) + + g.Go(func() error { + r.cfg.Tracer.Logf("starting lore poller loop") + return r.poller.Loop(ctx, r.cfg.LorePollInterval, r.emailChan) + }) + g.Go(func() error { + r.cfg.Tracer.Logf("starting dashboard poller loop") + return r.pollDashboard(ctx) + }) + g.Go(func() error { + for { + select { + case <-ctx.Done(): + return ctx.Err() + case polled := <-r.emailChan: + if err := r.HandleIncomingEmail(ctx, polled); err != nil { + log.Printf("failed to handle incoming email: %v", err) + } + } + } + }) + + return g.Wait() +} + +func (r *Relay) pollDashboard(ctx context.Context) error { + ticker := time.NewTicker(r.cfg.DashboardPollInterval) + defer ticker.Stop() + for { + select { + case <-ctx.Done(): + return ctx.Err() + case <-ticker.C: + if err := r.PollDashboardOnce(ctx); err != nil { + log.Printf("failed to poll dashboard: %v", err) + } + } + } +} + +// PollDashboardOnce performs a single poll of the dashboard. +// Exported for testing. +func (r *Relay) PollDashboardOnce(ctx context.Context) error { + r.cfg.Tracer.Logf("polling dashboard for reports") + resp, err := r.dash.AIPollReport(&dashapi.PollExternalReportReq{Source: dashapi.AIJobSourceLore}) + if err != nil { + return err + } + if resp.Result == nil { + return nil + } + body, err := RenderBody(r.cfg, resp.Result) + if err != nil { + return err + } + subject := GenerateSubject(resp.Result) + cc := append([]string{}, resp.Result.Cc...) + if r.cfg.LoreArchive != "" { + cc = append(cc, r.cfg.LoreArchive) + } + email := &sender.Email{ + To: resp.Result.To, + Cc: cc, + Subject: subject, + Body: []byte(body), + } + r.cfg.Tracer.Logf("sending email: %s", subject) + msgID, err := r.emailSender.Send(ctx, email) + if err != nil { + return err + } + return r.dash.AIConfirmReport(&dashapi.ConfirmPublishedReq{ + ReportID: resp.Result.ID, + PublishedExtID: msgID, + }) +} + +// PollLoreOnce polls the lore archive once and processes all received emails. +func (r *Relay) PollLoreOnce(ctx context.Context) error { + if err := r.poller.Poll(ctx, r.emailChan); err != nil { + return err + } + for { + select { + case polled := <-r.emailChan: + if err := r.HandleIncomingEmail(ctx, polled); err != nil { + log.Printf("failed to handle incoming email: %v", err) + } + default: + return nil + } + } +} + +func (r *Relay) HandleIncomingEmail(ctx context.Context, polled *lore.PolledEmail) error { + r.cfg.Tracer.Logf("handling incoming email from %s", polled.Email.Author) + reqs := extractCommands(polled) + if len(reqs) == 0 { + return nil + } + if len(reqs) > 1 { + return r.replyError(ctx, polled, "multiple commands in a single message are not supported") + } + var resp *dashapi.SendExternalCommandResp + var err error + backoffs := r.backoffs + for i := 0; ; i++ { + resp, err = r.dash.AIReportCommand(reqs[0]) + if err == nil { + break + } + if errors.Is(err, dashapi.ErrReportNotFound) { + return nil // Stay silent. + } + if i >= len(backoffs) { + return fmt.Errorf("API call failed after %d retries: %w", len(backoffs), err) + } + r.cfg.Tracer.Logf("API call failed: %v, retrying in %v", err, backoffs[i]) + select { + case <-time.After(backoffs[i]): + case <-ctx.Done(): + return ctx.Err() + } + } + if resp.Error != "" && reqs[0].Comment == nil { + return r.replyError(ctx, polled, resp.Error) + } + return nil +} + +func (r *Relay) replyError(ctx context.Context, polled *lore.PolledEmail, errorMsg string) error { + subj := polled.Email.Subject + if !strings.HasPrefix(strings.ToLower(subj), "re:") { + subj = "Re: " + subj + } + email := &sender.Email{ + To: []string{polled.Email.Author}, + Subject: subj, + InReplyTo: polled.Email.MessageID, + Body: []byte(email.FormReply(polled.Email.Email, fmt.Sprintf("Command failed:\n\n%s\n", errorMsg))), + } + _, err := r.emailSender.Send(ctx, email) + return err +} diff --git a/pkg/lore-relay/relay_test.go b/pkg/lore-relay/relay_test.go new file mode 100644 index 000000000000..1f2d2515d5fd --- /dev/null +++ b/pkg/lore-relay/relay_test.go @@ -0,0 +1,437 @@ +// Copyright 2026 syzkaller project authors. All rights reserved. +// Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. + +package lorerelay + +import ( + "context" + "fmt" + "testing" + "time" + + "github.com/google/syzkaller/dashboard/dashapi" + "github.com/google/syzkaller/pkg/debugtracer" + "github.com/google/syzkaller/pkg/email/lore" + "github.com/google/syzkaller/pkg/email/sender" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +type mockSender struct { + sent []*sender.Email + id int +} + +func (m *mockSender) Send(ctx context.Context, email *sender.Email) (string, error) { + m.sent = append(m.sent, email) + m.id++ + return fmt.Sprintf("", m.id), nil +} + +func TestMainScenario(t *testing.T) { + repoDir := t.TempDir() + loreArchive := lore.NewTestLoreArchive(t, repoDir) + + now := time.Now() + + pollerCfg := lore.PollerConfig{ + RepoDir: t.TempDir(), + URL: loreArchive.Repo.Dir, + Tracer: &debugtracer.TestTracer{T: t}, + } + + poller, err := lore.NewPoller(pollerCfg) + require.NoError(t, err) + + mockDash := &mockDashboard{ + pollResp: &dashapi.PollExternalReportResp{ + Result: &dashapi.ReportPollResult{ + ID: "job1", + To: []string{"maintainer@email"}, + Cc: []string{"cc@email"}, + Patch: &dashapi.NewReportResult{ + Subject: "Fix bug", + Body: "Fix body", + To: []string{"intended_to@email"}, + Cc: []string{"intended_cc@email"}, + }, + Moderation: true, + }, + }, + } + mockSnd := &mockSender{} + relay := NewRelay(&Config{ + LoreArchive: "archive@lore.com", + }, mockDash, poller, mockSnd) + + // 1. Dashboard comes up with a patch. + err = relay.PollDashboardOnce(context.Background()) + require.NoError(t, err) + + require.Len(t, mockSnd.sent, 1) + assert.Equal(t, []string{"maintainer@email"}, mockSnd.sent[0].To) + assert.Equal(t, []string{"cc@email", "archive@lore.com"}, mockSnd.sent[0].Cc) + assert.Equal(t, "[PATCH RFC] Fix bug", mockSnd.sent[0].Subject) + + require.Len(t, mockDash.confirmed, 1) + assert.Equal(t, "job1", mockDash.confirmed[0].ReportID) + assert.Equal(t, "", mockDash.confirmed[0].PublishedExtID) + + // 2. User reply that's unrelated to it. + loreArchive.SaveMessageAt(t, `From: user@email +Subject: Re: [PATCH] Fix bug +Message-ID: +In-Reply-To: + +This looks interesting. +`, now.Add(time.Minute)) + + err = relay.PollLoreOnce(context.Background()) + require.NoError(t, err) + + // 3. #syz upstream to (2). + loreArchive.SaveMessageAt(t, `From: user@email +Subject: Re: [PATCH] Fix bug +Message-ID: +In-Reply-To: + +#syz upstream +`, now.Add(2*time.Minute)) + + err = relay.PollLoreOnce(context.Background()) + require.NoError(t, err) + + // 4. Verify dashboard receives command and root id is good. + assert.Equal(t, mockMainScenarioCommands()[:2], mockDash.commands) + + // 5. #syz reject to (2). + loreArchive.SaveMessageAt(t, `From: user@email +Subject: Re: [PATCH] Fix bug +Message-ID: +In-Reply-To: + +#syz reject +`, now.Add(3*time.Minute)) + + err = relay.PollLoreOnce(context.Background()) + require.NoError(t, err) + + // 6. Verify dashboard receives command and root it is good. + assert.Equal(t, mockMainScenarioCommands(), mockDash.commands) +} + +func TestRestartScenario(t *testing.T) { + repoDir := t.TempDir() + loreArchive := lore.NewTestLoreArchive(t, repoDir) + + now := time.Now() + + pollerRepoDir := t.TempDir() + createRelay := func(mockDash *mockDashboard) (*Relay, *lore.Poller) { + pollerCfg := lore.PollerConfig{ + RepoDir: pollerRepoDir, + URL: loreArchive.Repo.Dir, + Tracer: &debugtracer.TestTracer{T: t}, + } + poller, err := lore.NewPoller(pollerCfg) + require.NoError(t, err) + mockSnd := &mockSender{} + relay := NewRelay(&Config{}, mockDash, poller, mockSnd) + return relay, poller + } + + mockDash := &mockDashboard{ + pollResp: &dashapi.PollExternalReportResp{ + Result: &dashapi.ReportPollResult{ + ID: "job1", + To: []string{"maintainer@email"}, + Patch: &dashapi.NewReportResult{ + Subject: "Fix bug", + Body: "Fix body", + }, + }, + }, + } + + relay, _ := createRelay(mockDash) + + // 1. Dashboard comes up with a patch. + err := relay.PollDashboardOnce(context.Background()) + require.NoError(t, err) + + // 2. User reply that's unrelated to it. + loreArchive.SaveMessageAt(t, `From: user@email +Subject: Re: [PATCH] Fix bug +Message-ID: +In-Reply-To: + +This looks interesting. +`, now.Add(time.Minute)) + + err = relay.PollLoreOnce(context.Background()) + require.NoError(t, err) + mockDash.commands = nil + + t.Logf("restarting relay") + relay, _ = createRelay(mockDash) + + err = relay.PollLoreOnce(context.Background()) + require.NoError(t, err) + require.Len(t, mockDash.commands, 1) + assert.Equal(t, "", mockDash.commands[0].MessageExtID) + mockDash.commands = nil + + // 3. #syz upstream to (2). + loreArchive.SaveMessageAt(t, `From: user@email +Subject: Re: [PATCH] Fix bug +Message-ID: +In-Reply-To: + +#syz upstream +`, now.Add(2*time.Minute)) + + err = relay.PollLoreOnce(context.Background()) + require.NoError(t, err) + + // 4. Verify that dashboard receives the command and root id is good. + assert.Equal(t, []*dashapi.SendExternalCommandReq{ + { + Source: dashapi.AIJobSourceLore, + RootExtID: "", + MessageExtID: "", + Author: "user@email", + Upstream: &dashapi.UpstreamCommand{}, + }, + }, mockDash.commands) + mockDash.commands = nil + + t.Logf("restarting relay") + relay, _ = createRelay(mockDash) + + err = relay.PollLoreOnce(context.Background()) + require.NoError(t, err) + + assert.Equal(t, mockMainScenarioCommands()[:2], mockDash.commands) + + // 5. #syz reject to (2). + loreArchive.SaveMessageAt(t, `From: user@email +Subject: Re: [PATCH] Fix bug +Message-ID: +In-Reply-To: + +#syz reject +`, now.Add(3*time.Minute)) + + err = relay.PollLoreOnce(context.Background()) + require.NoError(t, err) + + // 6. Verify that dashboard receives the command and root it is good. + assert.Equal(t, mockMainScenarioCommands(), mockDash.commands) +} + +func TestErrorReply(t *testing.T) { + loreArchive := lore.NewTestLoreArchive(t, t.TempDir()) + + cfg := &Config{ + DashboardPollInterval: time.Hour, + LorePollInterval: time.Hour, + } + + mockDash := &mockDashboard{ + cmdResp: &dashapi.SendExternalCommandResp{Error: "invalid command syntax"}, + } + mockSnd := &mockSender{} + + lorePoller, err := lore.NewPoller(lore.PollerConfig{ + RepoDir: t.TempDir(), + URL: loreArchive.Repo.Dir, + Tracer: &debugtracer.TestTracer{T: t}, + OwnEmails: []string{"own@email.com"}, + }) + require.NoError(t, err) + relay := NewRelay(cfg, mockDash, lorePoller, mockSnd) + + now := time.Now() + loreArchive.SaveMessageAt(t, `From: user@email +Subject: [PATCH] Fix bug +Message-ID: + +#syz upstream +`, now) + + err = relay.PollLoreOnce(context.Background()) + require.NoError(t, err) + + require.Len(t, mockDash.commands, 1) + require.Len(t, mockSnd.sent, 1) + assert.Equal(t, []string{"user@email"}, mockSnd.sent[0].To) + assert.Equal(t, "Re: [PATCH] Fix bug", mockSnd.sent[0].Subject) + assert.Equal(t, "", mockSnd.sent[0].InReplyTo) + expectedBody := "> #syz upstream\n\nCommand failed:\n\ninvalid command syntax\n\n" + assert.Equal(t, expectedBody, string(mockSnd.sent[0].Body)) +} + +func TestErrorReplyComment(t *testing.T) { + loreArchive := lore.NewTestLoreArchive(t, t.TempDir()) + + cfg := &Config{ + DashboardPollInterval: time.Hour, + LorePollInterval: time.Hour, + } + + mockDash := &mockDashboard{ + cmdResp: &dashapi.SendExternalCommandResp{Error: "some internal error"}, + } + mockSnd := &mockSender{} + + lorePoller, err := lore.NewPoller(lore.PollerConfig{ + RepoDir: t.TempDir(), + URL: loreArchive.Repo.Dir, + Tracer: &debugtracer.TestTracer{T: t}, + OwnEmails: []string{"own@email.com"}, + }) + require.NoError(t, err) + relay := NewRelay(cfg, mockDash, lorePoller, mockSnd) + + now := time.Now() + loreArchive.SaveMessageAt(t, `From: user@email +Subject: [PATCH] Fix bug +Message-ID: + +This is just a normal comment. +`, now) + + err = relay.PollLoreOnce(context.Background()) + require.NoError(t, err) + + require.Len(t, mockDash.commands, 1) + // For comments, error reply must be completely suppressed! + require.Len(t, mockSnd.sent, 0) +} + +func TestMultipleCommandsReply(t *testing.T) { + loreArchive := lore.NewTestLoreArchive(t, t.TempDir()) + cfg := &Config{LorePollInterval: time.Hour} + mockDash := &mockDashboard{} + mockSnd := &mockSender{} + lorePoller, err := lore.NewPoller(lore.PollerConfig{ + RepoDir: t.TempDir(), + URL: loreArchive.Repo.Dir, + Tracer: &debugtracer.TestTracer{T: t}, + }) + require.NoError(t, err) + relay := NewRelay(cfg, mockDash, lorePoller, mockSnd) + + loreArchive.SaveMessageAt(t, `From: user@email +Subject: [PATCH] Fix bug +Message-ID: + +#syz upstream +#syz reject +`, time.Now()) + + err = relay.PollLoreOnce(context.Background()) + require.NoError(t, err) + + require.Len(t, mockSnd.sent, 1) + assert.Equal(t, []string{"user@email"}, mockSnd.sent[0].To) + expectedBody := "> #syz upstream\n> #syz reject\n\n" + + "Command failed:\n\nmultiple commands in a single message are not supported\n\n" + assert.Equal(t, expectedBody, string(mockSnd.sent[0].Body)) +} + +func TestBackoff(t *testing.T) { + loreArchive := lore.NewTestLoreArchive(t, t.TempDir()) + cfg := &Config{ + LorePollInterval: time.Hour, + Tracer: &debugtracer.TestTracer{T: t}, + } + + mockDash := &mockDashboard{ + cmdErr: fmt.Errorf("transient error"), + } + mockSnd := &mockSender{} + lorePoller, err := lore.NewPoller(lore.PollerConfig{ + RepoDir: t.TempDir(), + URL: loreArchive.Repo.Dir, + Tracer: &debugtracer.TestTracer{T: t}, + }) + require.NoError(t, err) + relay := NewRelay(cfg, mockDash, lorePoller, mockSnd) + relay.backoffs = []time.Duration{time.Millisecond, time.Millisecond, time.Millisecond} + + loreArchive.SaveMessageAt(t, `From: user@email +Subject: [PATCH] Fix bug +Message-ID: + +#syz upstream +`, time.Now()) + + err = relay.PollLoreOnce(context.Background()) + require.NoError(t, err) + + // 1 initial failure + 1 success on retry = 2 calls! + require.Len(t, mockDash.commands, 2) +} + +func (m *mockDashboard) AIPollReport(req *dashapi.PollExternalReportReq) (*dashapi.PollExternalReportResp, error) { + resp := m.pollResp + m.pollResp = nil + return resp, nil +} + +func (m *mockDashboard) AIConfirmReport(req *dashapi.ConfirmPublishedReq) error { + m.confirmed = append(m.confirmed, req) + return nil +} + +type mockDashboard struct { + commands []*dashapi.SendExternalCommandReq + pollResp *dashapi.PollExternalReportResp + confirmed []*dashapi.ConfirmPublishedReq + cmdResp *dashapi.SendExternalCommandResp + cmdErr error +} + +func (m *mockDashboard) AIReportCommand(req *dashapi.SendExternalCommandReq) (*dashapi.SendExternalCommandResp, error) { + m.commands = append(m.commands, req) + if m.cmdErr != nil { + err := m.cmdErr + m.cmdErr = nil // Use only once! + return nil, err + } + if m.cmdResp != nil { + return m.cmdResp, nil + } + return &dashapi.SendExternalCommandResp{}, nil +} + +func mockMainScenarioCommands() []*dashapi.SendExternalCommandReq { + return []*dashapi.SendExternalCommandReq{ + { + Source: dashapi.AIJobSourceLore, + RootExtID: "", + MessageExtID: "", + Author: "user@email", + Comment: &dashapi.CommentCommand{ + Body: "This looks interesting.\n", + }, + }, + { + Source: dashapi.AIJobSourceLore, + RootExtID: "", + MessageExtID: "", + Author: "user@email", + Upstream: &dashapi.UpstreamCommand{}, + }, + { + Source: dashapi.AIJobSourceLore, + RootExtID: "", + MessageExtID: "", + Author: "user@email", + Reject: &dashapi.RejectCommand{ + Reason: "#syz reject\n", + }, + }, + } +} diff --git a/pkg/lore-relay/templates.go b/pkg/lore-relay/templates.go new file mode 100644 index 000000000000..f1b2d027e755 --- /dev/null +++ b/pkg/lore-relay/templates.go @@ -0,0 +1,91 @@ +// Copyright 2026 syzkaller project authors. All rights reserved. +// Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. + +package lorerelay + +import ( + "bytes" + "embed" + "fmt" + "strings" + "text/template" + + "github.com/google/syzkaller/dashboard/dashapi" +) + +//go:embed templates/*.txt +var templatesFS embed.FS + +// TemplateData holds data for rendering email templates. +type TemplateData struct { + Patch *dashapi.NewReportResult + Replies []*dashapi.ReplyResult + DocsLink string + Moderation bool +} + +func renderTemplate(name, tmplStr string, data TemplateData) (string, error) { + t, err := template.New(name).Funcs(template.FuncMap{ + "quote": quote, + }).Parse(tmplStr) + if err != nil { + return "", fmt.Errorf("failed to parse template: %w", err) + } + var buf bytes.Buffer + if err := t.Execute(&buf, data); err != nil { + return "", fmt.Errorf("failed to execute template: %w", err) + } + return buf.String(), nil +} + +// RenderBody renders the email body based on the poll result. +func RenderBody(cfg *Config, res *dashapi.ReportPollResult) (string, error) { + data := TemplateData{ + DocsLink: cfg.DocsLink, + Moderation: res.Moderation, + } + if res.Patch != nil { + data.Patch = res.Patch + tmpl, err := templatesFS.ReadFile("templates/new_patch.txt") + if err != nil { + return "", err + } + return renderTemplate("new_patch", string(tmpl), data) + } + + if len(res.Replies) > 0 { + data.Replies = res.Replies + tmpl, err := templatesFS.ReadFile("templates/replies.txt") + if err != nil { + return "", err + } + return renderTemplate("replies", string(tmpl), data) + } + return "", fmt.Errorf("empty report result") +} + +// GenerateSubject generates the email subject based on the poll result. +func GenerateSubject(res *dashapi.ReportPollResult) string { + if res.Patch == nil { + return "" + } + prefix := "PATCH" + if res.Moderation { + prefix += " RFC" + } + if res.Patch.Version > 1 { + prefix += fmt.Sprintf(" v%d", res.Patch.Version) + } + return fmt.Sprintf("[%s] %s", prefix, res.Patch.Subject) +} + +func quote(s string) string { + if s == "" { + return "" + } + lines := strings.Split(s, "\n") + for i, line := range lines { + lines[i] = "> " + line + } + return strings.Join(lines, "\n") + "\n" +} diff --git a/pkg/lore-relay/templates/new_patch.txt b/pkg/lore-relay/templates/new_patch.txt new file mode 100644 index 000000000000..eba21e3bc62c --- /dev/null +++ b/pkg/lore-relay/templates/new_patch.txt @@ -0,0 +1,15 @@ +{{.Patch.Body}} + +--- +{{.Patch.GitDiff}} + +base-commit: {{.Patch.BaseCommit}} +-- +{{ if .Moderation }}This is an AI-generated patch subject to moderation. +Reply with '#syz upstream' to send it to the mailing list. +Reply with '#syz reject' to reject it. + +{{if .Patch.To}}Final To: {{range $i, $e := .Patch.To}}{{if $i}}, {{end}}{{$e}}{{end}} +{{end}}{{if .Patch.Cc}}Final Cc: {{range $i, $e := .Patch.Cc}}{{if $i}}, {{end}}{{$e}}{{end}} +{{end}} +{{ end }}See {{.DocsLink}} for more information. diff --git a/pkg/lore-relay/templates/replies.txt b/pkg/lore-relay/templates/replies.txt new file mode 100644 index 000000000000..84f16988dc9f --- /dev/null +++ b/pkg/lore-relay/templates/replies.txt @@ -0,0 +1,6 @@ +{{ range .Replies }}{{ quote .Quote }}{{ .Body }} + +{{ end }}This reply was generated by an AI model. + +-- +See {{.DocsLink}} for more information. diff --git a/pkg/lore-relay/templates_test.go b/pkg/lore-relay/templates_test.go new file mode 100644 index 000000000000..3fa58ca6c72f --- /dev/null +++ b/pkg/lore-relay/templates_test.go @@ -0,0 +1,62 @@ +// Copyright 2026 syzkaller project authors. All rights reserved. +// Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. + +package lorerelay + +import ( + "encoding/json" + "flag" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/google/syzkaller/dashboard/dashapi" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +var flagWrite = flag.Bool("write_lore_tests", false, "overwrite out.txt files") + +func TestRender(t *testing.T) { + flag.Parse() + basePath := "testdata" + files, err := os.ReadDir(basePath) + if err != nil { + t.Fatal(err) + } + hasTests := false + for _, file := range files { + if !strings.HasSuffix(file.Name(), ".in.json") { + continue + } + hasTests = true + fullName := file.Name() + name := strings.TrimSuffix(fullName, ".in.json") + t.Run(name, func(t *testing.T) { + inPath := filepath.Join(basePath, fullName) + inputData, err := os.ReadFile(inPath) + require.NoError(t, err) + + var res dashapi.ReportPollResult + err = json.Unmarshal(inputData, &res) + require.NoError(t, err) + + output, err := RenderBody(&Config{DocsLink: "http://docs.link"}, &res) + require.NoError(t, err) + subject := GenerateSubject(&res) + fullOutput := "Subject: " + subject + "\n\n" + output + outPath := filepath.Join(basePath, name+".out.txt") + if *flagWrite { + err := os.WriteFile(outPath, []byte(fullOutput), 0644) + require.NoError(t, err) + } else { + expected, err := os.ReadFile(outPath) + require.NoError(t, err) + assert.Equal(t, string(expected), fullOutput) + } + }) + } + + require.True(t, hasTests, "no test cases found in %s", basePath) +} diff --git a/pkg/lore-relay/testdata/patch_moderation.in.json b/pkg/lore-relay/testdata/patch_moderation.in.json new file mode 100644 index 000000000000..a62b72abdc07 --- /dev/null +++ b/pkg/lore-relay/testdata/patch_moderation.in.json @@ -0,0 +1,12 @@ +{ + "Moderation": true, + "Patch": { + "Subject": "Fix a bug in subsystem", + "Body": "This patch fixes a bug by doing X and Y.", + "Version": 1, + "GitDiff": "diff --git a/file b/file\nindex 123..456 100644\n--- a/file\n+++ b/file\n@@ -1 +1 @@\n-old\n+new", + "BaseCommit": "abcdef123456", + "To": ["maintainer@email.com", "maintainer2@email.com"], + "Cc": ["reviewer@email.com", "reviewer2@email.com"] + } +} \ No newline at end of file diff --git a/pkg/lore-relay/testdata/patch_moderation.out.txt b/pkg/lore-relay/testdata/patch_moderation.out.txt new file mode 100644 index 000000000000..fe4ba70193e7 --- /dev/null +++ b/pkg/lore-relay/testdata/patch_moderation.out.txt @@ -0,0 +1,23 @@ +Subject: [PATCH RFC] Fix a bug in subsystem + +This patch fixes a bug by doing X and Y. + +--- +diff --git a/file b/file +index 123..456 100644 +--- a/file ++++ b/file +@@ -1 +1 @@ +-old ++new + +base-commit: abcdef123456 +-- +This is an AI-generated patch subject to moderation. +Reply with '#syz upstream' to send it to the mailing list. +Reply with '#syz reject' to reject it. + +Final To: maintainer@email.com, maintainer2@email.com +Final Cc: reviewer@email.com, reviewer2@email.com + +See http://docs.link for more information. diff --git a/pkg/lore-relay/testdata/patch_v2.in.json b/pkg/lore-relay/testdata/patch_v2.in.json new file mode 100644 index 000000000000..153f7fcd0960 --- /dev/null +++ b/pkg/lore-relay/testdata/patch_v2.in.json @@ -0,0 +1,10 @@ +{ + "Moderation": false, + "Patch": { + "Subject": "Fix another bug", + "Body": "This is v2 of the patch with fixed z.", + "Version": 2, + "GitDiff": "diff --git a/file2 b/file2\n--- a/file2\n+++ b/file2\n@@ -1 +1 @@\n-old2\n+new2", + "BaseCommit": "123456abcdef" + } +} diff --git a/pkg/lore-relay/testdata/patch_v2.out.txt b/pkg/lore-relay/testdata/patch_v2.out.txt new file mode 100644 index 000000000000..31a4457c9211 --- /dev/null +++ b/pkg/lore-relay/testdata/patch_v2.out.txt @@ -0,0 +1,15 @@ +Subject: [PATCH v2] Fix another bug + +This is v2 of the patch with fixed z. + +--- +diff --git a/file2 b/file2 +--- a/file2 ++++ b/file2 +@@ -1 +1 @@ +-old2 ++new2 + +base-commit: 123456abcdef +-- +See http://docs.link for more information. diff --git a/pkg/lore-relay/testdata/reply_quote.in.json b/pkg/lore-relay/testdata/reply_quote.in.json new file mode 100644 index 000000000000..1706a1934d7a --- /dev/null +++ b/pkg/lore-relay/testdata/reply_quote.in.json @@ -0,0 +1,14 @@ +{ + "Replies": [ + { + "Quote": "What is the cause of this crash?", + "Body": "This crash is caused by a null pointer dereference in foo().", + "ReplyExtID": "reply123" + }, + { + "Quote": "How to fix it?", + "Body": "We need to add a null check.", + "ReplyExtID": "reply124" + } + ] +} diff --git a/pkg/lore-relay/testdata/reply_quote.out.txt b/pkg/lore-relay/testdata/reply_quote.out.txt new file mode 100644 index 000000000000..75a9f67f1c24 --- /dev/null +++ b/pkg/lore-relay/testdata/reply_quote.out.txt @@ -0,0 +1,12 @@ +Subject: + +> What is the cause of this crash? +This crash is caused by a null pointer dereference in foo(). + +> How to fix it? +We need to add a null check. + +This reply was generated by an AI model. + +-- +See http://docs.link for more information. diff --git a/pkg/vcs/git_test_util.go b/pkg/vcs/git_test_util.go index 3f2116a4b443..e9aff1e3ad0e 100644 --- a/pkg/vcs/git_test_util.go +++ b/pkg/vcs/git_test_util.go @@ -81,6 +81,19 @@ func (repo *TestRepo) CommitChange(description string) *Commit { return repo.CommitChangeset(description) } +func (repo *TestRepo) CommitChangeAt(description string, date time.Time) { + dateStr := date.Format(time.RFC3339) + cmd := osutil.Command("git", "commit", "--allow-empty", "-m", description) + cmd.Dir = repo.Dir + cmd.Env = append(os.Environ(), + "GIT_AUTHOR_DATE="+dateStr, + "GIT_COMMITTER_DATE="+dateStr, + ) + if _, err := osutil.Run(time.Minute, cmd); err != nil { + repo.t.Fatal(err) + } +} + type FileContent struct { File string Content string diff --git a/syz-agent/Makefile b/syz-agent/Makefile index 1f857b06bd63..5fdcc0c900c3 100644 --- a/syz-agent/Makefile +++ b/syz-agent/Makefile @@ -5,12 +5,19 @@ include ../tools/version.mk IMAGE_NAME ?= local/syz-agent IMAGE_TAG ?= latest +LORE_RELAY_IMAGE_NAME ?= local/syz-lore-relay +LORE_RELAY_IMAGE_TAG ?= latest -.PHONY: container k8s-minikube k8s-prod +.PHONY: all-containers agent-container lore-relay-container k8s-minikube k8s-prod -container: +agent-container: DOCKER_BUILDKIT=1 docker build --build-arg REV="$(REV)" --build-arg GITREVDATE="$(GITREVDATE)" -t $(IMAGE_NAME):$(IMAGE_TAG) -f ./Dockerfile ../ +lore-relay-container: + DOCKER_BUILDKIT=1 docker build -t $(LORE_RELAY_IMAGE_NAME):$(LORE_RELAY_IMAGE_TAG) -f ./lore-relay/Dockerfile ../ + +all-containers: agent-container lore-relay-container + k8s-minikube: @if [ -z "$(GOOGLE_API_KEY)" ]; then echo "GOOGLE_API_KEY is not set" && exit 1; fi @if [ -z "$(DASHBOARD_KEY)" ]; then echo "DASHBOARD_KEY is not set" && exit 1; fi diff --git a/syz-agent/agent.go b/syz-agent/agent/agent.go similarity index 100% rename from syz-agent/agent.go rename to syz-agent/agent/agent.go diff --git a/syz-agent/config.go b/syz-agent/agent/config.go similarity index 71% rename from syz-agent/config.go rename to syz-agent/agent/config.go index 5b3a07a890fe..17f89f113d89 100644 --- a/syz-agent/config.go +++ b/syz-agent/agent/config.go @@ -8,17 +8,11 @@ import ( "encoding/json" "fmt" "os" - "strings" "github.com/google/syzkaller/pkg/config" "github.com/google/syzkaller/pkg/gcpsecret" ) -const ( - gcpSecretPrefix = "gcp-secret:" - envPrefix = "env:" -) - type Config struct { HTTP string `json:"http"` MCP bool `json:"mcp"` // Start MCP server on the HTTP address, and don't connect to dashboard. @@ -47,19 +41,19 @@ func loadConfig(configFile string) (*Config, error) { SyzkallerRepo: "https://github.com/google/syzkaller.git", SyzkallerBranch: "master", CacheSize: 1 << 40, // 1TB - GeminiAPIKey: envPrefix + "GOOGLE_API_KEY", + GeminiAPIKey: "env:GOOGLE_API_KEY", } if err := config.LoadFile(configFile, cfg); err != nil { return nil, fmt.Errorf("failed to load config: %w", err) } - resolvedDashKey, err := resolvePrefix(cfg.DashboardKey) + resolvedDashKey, err := gcpsecret.Resolve(context.Background(), cfg.DashboardKey) if err != nil { return nil, fmt.Errorf("failed to resolve DashboardKey: %w", err) } cfg.DashboardKey = resolvedDashKey - resolvedGeminiKey, err := resolvePrefix(cfg.GeminiAPIKey) + resolvedGeminiKey, err := gcpsecret.Resolve(context.Background(), cfg.GeminiAPIKey) if err != nil { return nil, fmt.Errorf("failed to resolve GeminiAPIKey: %w", err) } @@ -70,22 +64,3 @@ func loadConfig(configFile string) (*Config, error) { return cfg, nil } - -func resolvePrefix(val string) (string, error) { - if strings.HasPrefix(val, envPrefix) { - return os.Getenv(val[len(envPrefix):]), nil - } - if strings.HasPrefix(val, gcpSecretPrefix) { - secretName := val[len(gcpSecretPrefix):] - proj, err := gcpsecret.ProjectName(context.Background()) - if err != nil { - return "", fmt.Errorf("failed to get GCP project: %w", err) - } - data, err := gcpsecret.LatestGcpSecret(context.Background(), proj, secretName) - if err != nil { - return "", fmt.Errorf("failed to get GCP secret %s: %w", secretName, err) - } - return string(data), nil - } - return val, nil -} diff --git a/syz-agent/config_test.go b/syz-agent/agent/config_test.go similarity index 100% rename from syz-agent/config_test.go rename to syz-agent/agent/config_test.go diff --git a/syz-agent/mcp.go b/syz-agent/agent/mcp.go similarity index 100% rename from syz-agent/mcp.go rename to syz-agent/agent/mcp.go diff --git a/syz-agent/k8s/common/lore-relay/kustomization.yaml b/syz-agent/k8s/common/lore-relay/kustomization.yaml new file mode 100644 index 000000000000..98b8cc8c8844 --- /dev/null +++ b/syz-agent/k8s/common/lore-relay/kustomization.yaml @@ -0,0 +1,7 @@ +# Copyright 2026 syzkaller project authors. All rights reserved. +# Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. + +apiVersion: kustomize.config.k8s.io/v1beta1 +kind: Kustomization +resources: + - lore-relay.yaml diff --git a/syz-agent/k8s/common/lore-relay/lore-relay.yaml b/syz-agent/k8s/common/lore-relay/lore-relay.yaml new file mode 100644 index 000000000000..4945a08d9cd7 --- /dev/null +++ b/syz-agent/k8s/common/lore-relay/lore-relay.yaml @@ -0,0 +1,46 @@ +# Copyright 2026 syzkaller project authors. All rights reserved. +# Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. + +apiVersion: apps/v1 +kind: Deployment +metadata: + name: lore-relay + labels: + app: lore-relay +spec: + replicas: 1 + selector: + matchLabels: + app: lore-relay + template: + metadata: + labels: + app: lore-relay + spec: + serviceAccountName: lore-relay-ksa + containers: + - name: lore-relay + image: gcr.io/syzkaller/lore-relay:latest + args: ["-config=/config/lore-relay-config.yaml"] + env: + - name: GIT_DISCOVERY_ACROSS_FILESYSTEM + value: "1" + volumeMounts: + - name: config-volume + mountPath: /config + - name: lore-repo-volume + mountPath: /lore-repo + resources: + requests: + cpu: 2 + memory: 8Gi + limits: + cpu: 4 + memory: 16Gi + volumes: + - name: config-volume + configMap: + name: lore-relay-config + - name: lore-repo-volume + persistentVolumeClaim: + claimName: lore-relay-pvc diff --git a/syz-agent/k8s/overlays/prod/kustomization.yaml b/syz-agent/k8s/overlays/prod/kustomization.yaml index fd620d8e44fb..944ce4a63275 100644 --- a/syz-agent/k8s/overlays/prod/kustomization.yaml +++ b/syz-agent/k8s/overlays/prod/kustomization.yaml @@ -9,16 +9,23 @@ namePrefix: prod- resources: - ../../common + - ../../common/lore-relay images: - name: gcr.io/syzkaller/syz-agent newName: ${IMAGE_NAME} newTag: ${IMAGE_TAG} + - name: gcr.io/syzkaller/lore-relay + newName: ${LORE_RELAY_IMAGE_NAME} + newTag: ${LORE_RELAY_IMAGE_TAG} configMapGenerator: - name: syz-agent-config files: - config.json=agent-config.json + - name: lore-relay-config + files: + - lore-relay-config.yaml=lore-relay-config.yaml patches: - target: diff --git a/syz-agent/k8s/overlays/prod/lore-relay-config.yaml b/syz-agent/k8s/overlays/prod/lore-relay-config.yaml new file mode 100644 index 000000000000..9ad90db3d14b --- /dev/null +++ b/syz-agent/k8s/overlays/prod/lore-relay-config.yaml @@ -0,0 +1,17 @@ +# Copyright 2026 syzkaller project authors. All rights reserved. +# Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. + +dashboard_addr: "https://syzkaller.appspot.com" +dashboard_client: "lore-relay" +dashboard_key: "gcp-secret:lore-relay-dashboard-key" +lore_url: "https://lore.kernel.org/all/" +own_emails: + - "syzbot@syzkaller.appspotmail.com" + - "syzbot@kernel.org" +lore_archive: "syzbot@lists.linux.dev" +smtp: + host: "gcp-secret:lore-relay-smtp-host" + port: "gcp-secret:lore-relay-smtp-port" + user: "gcp-secret:lore-relay-smtp-user" + password: "gcp-secret:lore-relay-smtp-password" + from: "syzbot " diff --git a/syz-agent/lore-relay/Dockerfile b/syz-agent/lore-relay/Dockerfile new file mode 100644 index 000000000000..85e86a2a3d0e --- /dev/null +++ b/syz-agent/lore-relay/Dockerfile @@ -0,0 +1,22 @@ +# syntax=docker.io/docker/dockerfile:1.7-labs +# Copyright 2026 syzkaller project authors. All rights reserved. +# Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. + +FROM gcr.io/syzkaller/env AS lore-relay-builder +WORKDIR /syzkaller + +# Prepare the dependencies. +COPY go.mod go.sum ./ +RUN go mod download +COPY . . + +RUN make lore-relay + +FROM alpine:latest + +RUN apk update && \ + apk add git + +WORKDIR /app +COPY --from=lore-relay-builder /syzkaller/bin/syz-lore-relay /app/syz-lore-relay +ENTRYPOINT ["/app/syz-lore-relay"] diff --git a/syz-agent/lore-relay/config.go b/syz-agent/lore-relay/config.go new file mode 100644 index 000000000000..da9504cf69f6 --- /dev/null +++ b/syz-agent/lore-relay/config.go @@ -0,0 +1,88 @@ +// Copyright 2026 syzkaller project authors. All rights reserved. +// Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. + +package main + +import ( + "context" + "fmt" + "net/mail" + "os" + "time" + + "github.com/google/syzkaller/pkg/gcpsecret" + "gopkg.in/yaml.v3" +) + +type Config struct { + DashboardAddr string `yaml:"dashboard_addr"` + DashboardClient string `yaml:"dashboard_client"` + DashboardKey string `yaml:"dashboard_key"` + LoreURL string `yaml:"lore_url"` + OwnEmails []string `yaml:"own_emails"` + DashboardPollInterval time.Duration `yaml:"dashboard_poll_interval"` + LorePollInterval time.Duration `yaml:"lore_poll_interval"` + DocsLink string `yaml:"docs_link"` + LoreArchive string `yaml:"lore_archive"` + SMTP SMTPConfig `yaml:"smtp"` +} + +type SMTPConfig struct { + Host string `yaml:"host"` + Port string `yaml:"port"` + User string `yaml:"user"` + Password string `yaml:"password"` + From string `yaml:"from"` +} + +func loadConfig(configFile string) (*Config, error) { + data, err := os.ReadFile(configFile) + if err != nil { + return nil, fmt.Errorf("failed to read config file: %w", err) + } + + cfg := &Config{ + DashboardPollInterval: 30 * time.Second, + LorePollInterval: 5 * time.Minute, + } + if err := yaml.Unmarshal(data, cfg); err != nil { + return nil, fmt.Errorf("failed to parse config file: %w", err) + } + resolvedDashKey, err := gcpsecret.Resolve(context.Background(), cfg.DashboardKey) + if err != nil { + return nil, fmt.Errorf("failed to resolve DashboardKey: %w", err) + } + cfg.DashboardKey = resolvedDashKey + + resolvedSMTPHost, err := gcpsecret.Resolve(context.Background(), cfg.SMTP.Host) + if err != nil { + return nil, fmt.Errorf("failed to resolve SMTP Host: %w", err) + } + cfg.SMTP.Host = resolvedSMTPHost + + resolvedSMTPUser, err := gcpsecret.Resolve(context.Background(), cfg.SMTP.User) + if err != nil { + return nil, fmt.Errorf("failed to resolve SMTP User: %w", err) + } + cfg.SMTP.User = resolvedSMTPUser + + resolvedSMTPPort, err := gcpsecret.Resolve(context.Background(), cfg.SMTP.Port) + if err != nil { + return nil, fmt.Errorf("failed to resolve SMTP Port: %w", err) + } + cfg.SMTP.Port = resolvedSMTPPort + resolvedSMTPPassword, err := gcpsecret.Resolve(context.Background(), cfg.SMTP.Password) + if err != nil { + return nil, fmt.Errorf("failed to resolve SMTP Password: %w", err) + } + cfg.SMTP.Password = resolvedSMTPPassword + return cfg, nil +} + +func (cfg *Config) ParseFrom() (mail.Address, error) { + addr, err := mail.ParseAddress(cfg.SMTP.From) + if err != nil { + return mail.Address{}, fmt.Errorf("failed to parse SMTP From address: %w", err) + } + return *addr, nil +} diff --git a/syz-agent/lore-relay/main.go b/syz-agent/lore-relay/main.go new file mode 100644 index 000000000000..55652ccbc7af --- /dev/null +++ b/syz-agent/lore-relay/main.go @@ -0,0 +1,93 @@ +// Copyright 2026 syzkaller project authors. All rights reserved. +// Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. + +package main + +import ( + "context" + "flag" + "log" + "os" + "strconv" + + "github.com/google/syzkaller/dashboard/dashapi" + "github.com/google/syzkaller/pkg/debugtracer" + "github.com/google/syzkaller/pkg/email/lore" + "github.com/google/syzkaller/pkg/email/sender" + lorerelay "github.com/google/syzkaller/pkg/lore-relay" + "github.com/google/syzkaller/pkg/osutil" + "github.com/google/syzkaller/pkg/tool" +) + +func main() { + var ( + flagConfig = flag.String("config", "", "config file") + ) + defer tool.Init()() + flag.Parse() + if *flagConfig == "" { + log.Fatalf("config file is required") + } + + cfg, err := loadConfig(*flagConfig) + if err != nil { + log.Fatalf("failed to load config: %v", err) + } + dash, err := dashapi.New(cfg.DashboardClient, cfg.DashboardAddr, cfg.DashboardKey) + if err != nil { + log.Fatalf("failed to create dashboard client: %v", err) + } + + const repoDir = "/lore-repo/checkout" + poller, err := lore.NewPoller(lore.PollerConfig{ + RepoDir: repoDir, + URL: cfg.LoreURL, + OwnEmails: cfg.OwnEmails, + Tracer: &debugtracer.GenericTracer{TraceWriter: os.Stdout, WithTime: true}, + }) + if err != nil { + log.Fatalf("failed to create lore poller: %v", err) + } + + fromAddr, err := cfg.ParseFrom() + if err != nil { + log.Fatalf("failed to parse SMTP from address: %v", err) + } + smtpPort, err := strconv.Atoi(cfg.SMTP.Port) + if err != nil { + log.Fatalf("failed to parse SMTP port %q: %v", cfg.SMTP.Port, err) + } + emailSender := sender.NewSMTPSender(sender.SMTPConfig{ + Host: cfg.SMTP.Host, + Port: smtpPort, + User: cfg.SMTP.User, + Password: cfg.SMTP.Password, + From: fromAddr, + }) + + relayCfg := &lorerelay.Config{ + DashboardPollInterval: cfg.DashboardPollInterval, + LorePollInterval: cfg.LorePollInterval, + DocsLink: cfg.DocsLink, + LoreArchive: cfg.LoreArchive, + Tracer: &debugtracer.GenericTracer{TraceWriter: os.Stderr, WithTime: true}, + } + relay := lorerelay.NewRelay(relayCfg, dash, poller, emailSender) + + ctx := context.Background() + shutdownPending := make(chan struct{}) + osutil.HandleInterrupts(shutdownPending) + + ctx, cancel := context.WithCancel(ctx) + defer cancel() + go func() { + <-shutdownPending + cancel() + }() + + log.Printf("starting lore-relay") + if err := relay.Run(ctx); err != nil && err != context.Canceled { + log.Fatalf("relay failed: %v", err) + } + log.Printf("lore-relay stopped") +} diff --git a/syz-cluster/email-reporter/handler.go b/syz-cluster/email-reporter/handler.go index d0e60826ce5f..5dc5200854ad 100644 --- a/syz-cluster/email-reporter/handler.go +++ b/syz-cluster/email-reporter/handler.go @@ -5,12 +5,15 @@ package main import ( "context" + "errors" "fmt" "log" "strings" "time" "github.com/google/syzkaller/pkg/email" + "github.com/google/syzkaller/pkg/email/lore" + "github.com/google/syzkaller/pkg/email/sender" "github.com/google/syzkaller/syz-cluster/pkg/api" "github.com/google/syzkaller/syz-cluster/pkg/app" @@ -18,6 +21,11 @@ import ( "github.com/google/syzkaller/syz-cluster/pkg/report" ) +var ( + ErrOwnEmail = errors.New("email is from ourselves") + ErrUnknownReport = errors.New("cannot identify report") +) + type Handler struct { reporter string reporterClient *api.ReporterClient @@ -73,7 +81,7 @@ func (h *Handler) report(ctx context.Context, rep *api.SessionReport) error { // This should never be happening.. return fmt.Errorf("failed to render the template: %w", err) } - toSend := &emailclient.Email{ + toSend := &sender.Email{ Subject: "Re: " + rep.Series.Title, // TODO: use the original rather than the stripped title. To: rep.Series.Cc, Body: body, @@ -117,11 +125,11 @@ func (h *Handler) report(ctx context.Context, rep *api.SessionReport) error { func (h *Handler) IncomingEmail(ctx context.Context, msg *email.Email) error { if len(msg.BugIDs) == 0 { // Unrelated email. - return nil + return ErrUnknownReport } if msg.OwnEmail && !strings.HasPrefix(msg.Subject, email.ForwardedPrefix) { // We normally ignore our own emails, with the exception of the emails forwarded from the dashboard. - return nil + return ErrOwnEmail } reportID := msg.BugIDs[0] @@ -171,7 +179,7 @@ func (h *Handler) IncomingEmail(ctx context.Context, msg *email.Email) error { if reply == "" { return nil } - _, err := h.sender(ctx, &emailclient.Email{ + _, err := h.sender(ctx, &sender.Email{ To: []string{msg.Author}, Cc: msg.Cc, Subject: "Re: " + msg.Subject, @@ -180,3 +188,47 @@ func (h *Handler) IncomingEmail(ctx context.Context, msg *email.Email) error { }) return err } + +func (h *Handler) ProcessPolledEmail(ctx context.Context, polled *lore.PolledEmail) error { + parsed := polled.Email + var reportID string + if h.emailConfig.Dashapi != nil && h.emailConfig.Dashapi.ContextPrefix != "" { + for _, id := range parsed.BugIDs { + if strings.HasPrefix(id, h.emailConfig.Dashapi.ContextPrefix) { + reportID = strings.TrimPrefix(id, h.emailConfig.Dashapi.ContextPrefix) + break + } + } + } + if dashapi := h.emailConfig.Dashapi; dashapi != nil && dashapi.ContextPrefix != "" { + for i, id := range parsed.BugIDs { + if strings.HasPrefix(id, dashapi.ContextPrefix) { + parsed.BugIDs[i] = strings.TrimPrefix(id, dashapi.ContextPrefix) + } + } + } + // Record reply for idempotency. + res, err := h.reporterClient.RecordReply(ctx, &api.RecordReplyReq{ + MessageID: parsed.MessageID, + ReportID: reportID, + RootMessageID: polled.RootMessageID, + Reporter: h.reporter, + Time: parsed.Date, + }) + if err != nil { + app.Errorf("email %q: failed to record reply: %v", parsed.MessageID, err) + } + if res.ReportID == "" { + if len(parsed.BugIDs) == 0 { + return ErrUnknownReport + } + } else if !res.New { + log.Printf("email %q: already seen, skipping", parsed.MessageID) + return nil + } else { + if len(parsed.BugIDs) == 0 { + parsed.BugIDs = []string{res.ReportID} + } + } + return h.IncomingEmail(ctx, parsed.Email) +} diff --git a/syz-cluster/email-reporter/handler_test.go b/syz-cluster/email-reporter/handler_test.go index 33144a86ca3e..f554ca16c1a2 100644 --- a/syz-cluster/email-reporter/handler_test.go +++ b/syz-cluster/email-reporter/handler_test.go @@ -8,6 +8,7 @@ import ( "testing" "github.com/google/syzkaller/pkg/email" + "github.com/google/syzkaller/pkg/email/sender" "github.com/google/syzkaller/syz-cluster/pkg/api" "github.com/google/syzkaller/syz-cluster/pkg/app" "github.com/google/syzkaller/syz-cluster/pkg/controller" @@ -18,8 +19,6 @@ import ( "github.com/stretchr/testify/require" ) -var testEmailConfig = emailclient.TestEmailConfig() - func TestModerationReportFlow(t *testing.T) { env, ctx := app.TestEnvironment(t) testSeries := controller.DummySeries() @@ -31,7 +30,8 @@ func TestModerationReportFlow(t *testing.T) { receivedEmail := emailServer.email() assert.NotNil(t, receivedEmail, "a moderation email must be sent") receivedEmail.Body = nil // for now don't validate the body - assert.Equal(t, &emailclient.Email{ + testEmailConfig := emailclient.TestEmailConfig() + assert.Equal(t, &sender.Email{ To: []string{testEmailConfig.ModerationList}, Cc: []string{testEmailConfig.ArchiveList}, Subject: "[moderation/CI] Re: " + testSeries.Title, @@ -57,9 +57,9 @@ func TestModerationReportFlow(t *testing.T) { receivedEmail = emailServer.email() assert.NotNil(t, receivedEmail, "an email must be sent upstream") receivedEmail.Body = nil - assert.Equal(t, &emailclient.Email{ + assert.Equal(t, &sender.Email{ To: testSeries.Cc, - Cc: append([]string{testEmailConfig.ArchiveList}, testEmailConfig.ReportCC...), + Cc: append([]string{emailclient.TestEmailConfig().ArchiveList}, emailclient.TestEmailConfig().ReportCC...), Subject: "[name] Re: " + testSeries.Title, InReplyTo: testSeries.ExtID, BugID: report.ID, @@ -118,7 +118,7 @@ func TestInvalidReply(t *testing.T) { }, }, }) - assert.NoError(t, err) + assert.ErrorIs(t, err, ErrUnknownReport) _, err = handler.PollAndReport(ctx) assert.NoError(t, err) // No email must be sent in reply. @@ -143,7 +143,7 @@ func TestInvalidReply(t *testing.T) { assert.NoError(t, err) reply := emailServer.email() assert.NotNil(t, reply) - assert.Equal(t, &emailclient.Email{ + assert.Equal(t, &sender.Email{ To: []string{"user@email.com"}, Cc: []string{"a@a.com", "b@b.com"}, Subject: "Re: Command", @@ -166,7 +166,7 @@ syzbot-ci does not support` + " `fix:` " + `command }, }, }) - assert.NoError(t, err) + assert.ErrorIs(t, err, ErrOwnEmail) _, err = handler.PollAndReport(ctx) assert.NoError(t, err) // No email must be sent in reply. @@ -232,6 +232,7 @@ func TestSyzTestFlow(t *testing.T) { require.NoError(t, err) reportReply := emailServer.email() + testEmailConfig := emailclient.TestEmailConfig() require.NotNil(t, reportReply, "an email must be sent with the test results") assert.Equal(t, "user-reply-msg-id", reportReply.InReplyTo) assert.Equal(t, []string{"user@email.com", "test-cc@email.com", "other@email.com"}, reportReply.To) @@ -301,7 +302,7 @@ func setupHandlerTest(t *testing.T, ctx context.Context, env *app.AppEnvironment reporter: api.LKMLReporter, reporterClient: reporterClient, apiClient: client, - emailConfig: testEmailConfig, + emailConfig: emailclient.TestEmailConfig(), sender: emailServer.send, } @@ -309,21 +310,21 @@ func setupHandlerTest(t *testing.T, ctx context.Context, env *app.AppEnvironment } type fakeSender struct { - ch chan *emailclient.Email + ch chan *sender.Email } func makeFakeSender() *fakeSender { return &fakeSender{ - ch: make(chan *emailclient.Email, 16), + ch: make(chan *sender.Email, 16), } } -func (f *fakeSender) send(ctx context.Context, e *emailclient.Email) (string, error) { +func (f *fakeSender) send(ctx context.Context, e *sender.Email) (string, error) { f.ch <- e return "email-id", nil } -func (f *fakeSender) email() *emailclient.Email { +func (f *fakeSender) email() *sender.Email { select { case e := <-f.ch: return e diff --git a/syz-cluster/email-reporter/main.go b/syz-cluster/email-reporter/main.go index b01e293db449..1e6514671897 100644 --- a/syz-cluster/email-reporter/main.go +++ b/syz-cluster/email-reporter/main.go @@ -7,9 +7,11 @@ package main import ( "context" - "log" + "errors" + "os" "time" + "github.com/google/syzkaller/pkg/debugtracer" "github.com/google/syzkaller/pkg/email/lore" "github.com/google/syzkaller/syz-cluster/pkg/api" "github.com/google/syzkaller/syz-cluster/pkg/app" @@ -49,29 +51,23 @@ func main() { emailConfig: cfg.EmailReporting, sender: sender, } - msgCh := make(chan *lore.Email, 16) + msgCh := make(chan *lore.PolledEmail, 16) eg, loopCtx := errgroup.WithContext(ctx) if cfg.EmailReporting.LoreArchiveURL != "" { - fetcher := NewLKMLEmailStream("/lore-repo/checkout", reporterClient, cfg.EmailReporting, msgCh) - eg.Go(func() error { return fetcher.Loop(loopCtx, fetcherPollPeriod) }) - } - eg.Go(func() error { - for { - var newEmail *lore.Email - select { - case newEmail = <-msgCh: - case <-loopCtx.Done(): + poller, err := MakeLorePoller("/lore-repo/checkout", cfg.EmailReporting, msgCh) + if err != nil { + app.Fatalf("failed to create poller: %v", err) + } + eg.Go(func() error { + err := poller.Loop(loopCtx, fetcherPollPeriod, msgCh) + if err == context.Canceled { return nil } - log.Printf("received email %q", newEmail.MessageID) - err := handler.IncomingEmail(loopCtx, newEmail.Email) - if err != nil { - // Note that we just print an error and go on instead of retrying. - // Some retrying may be reasonable, but it also comes with a risk of flooding - // the mailing lists. - app.Errorf("email %q: failed to process: %v", newEmail.MessageID, err) - } - } + return err + }) + } + eg.Go(func() error { + return runConsumerLoop(loopCtx, msgCh, handler) }) eg.Go(func() error { handler.PollReportsLoop(loopCtx, senderPollPeriod) @@ -81,3 +77,33 @@ func main() { app.Errorf("failed: %s", err) } } + +func runConsumerLoop(ctx context.Context, msgCh <-chan *lore.PolledEmail, handler *Handler) error { + for { + select { + case polled := <-msgCh: + err := handler.ProcessPolledEmail(ctx, polled) + if err != nil && !errors.Is(err, ErrOwnEmail) && !errors.Is(err, ErrUnknownReport) { + app.Errorf("failed to process email: %v", err) + } + case <-ctx.Done(): + return nil + } + } +} + +func MakeLorePoller(repoDir string, emailCfg *app.EmailConfig, msgCh chan *lore.PolledEmail) (*lore.Poller, error) { + var ownEmails []string + if emailCfg.Dashapi != nil { + ownEmails = append(ownEmails, emailCfg.Dashapi.From) + } + if emailCfg.SMTP != nil { + ownEmails = append(ownEmails, emailCfg.SMTP.From) + } + return lore.NewPoller(lore.PollerConfig{ + RepoDir: repoDir, + URL: emailCfg.LoreArchiveURL, + OwnEmails: ownEmails, + Tracer: &debugtracer.GenericTracer{TraceWriter: os.Stdout, WithTime: true}, + }) +} diff --git a/syz-cluster/email-reporter/stream.go b/syz-cluster/email-reporter/stream.go deleted file mode 100644 index 7b14a33368a8..000000000000 --- a/syz-cluster/email-reporter/stream.go +++ /dev/null @@ -1,154 +0,0 @@ -// Copyright 2025 syzkaller project authors. All rights reserved. -// Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. - -package main - -import ( - "context" - "fmt" - "log" - "strings" - "time" - - "github.com/google/syzkaller/pkg/email/lore" - "github.com/google/syzkaller/pkg/vcs" - "github.com/google/syzkaller/syz-cluster/pkg/api" - "github.com/google/syzkaller/syz-cluster/pkg/app" -) - -type LKMLEmailStream struct { - cfg *app.EmailConfig - ownEmails []string - reporterName string - repoFolder string - client *api.ReporterClient - newMessages chan *lore.Email - lastCommitDate time.Time - lastCommit string -} - -func NewLKMLEmailStream(repoFolder string, client *api.ReporterClient, - cfg *app.EmailConfig, writeTo chan *lore.Email) *LKMLEmailStream { - var ownEmails []string - if cfg.Dashapi != nil { - ownEmails = append(ownEmails, cfg.Dashapi.From) - } - if cfg.SMTP != nil { - ownEmails = append(ownEmails, cfg.SMTP.From) - } - return &LKMLEmailStream{ - cfg: cfg, - ownEmails: ownEmails, - reporterName: api.LKMLReporter, - repoFolder: repoFolder, - client: client, - newMessages: writeTo, - } -} - -const ( - // Don't consider older replies. - relevantPeriod = 7 * 24 * time.Hour -) - -func (s *LKMLEmailStream) Loop(ctx context.Context, pollPeriod time.Duration) error { - defer log.Printf("lore archive polling aborted") - log.Printf("lore archive %s polling started", s.cfg.LoreArchiveURL) - - last, err := s.client.LastReply(ctx, s.reporterName) - if err != nil { - return fmt.Errorf("failed to query the last reply: %w", err) - } - // We assume that the archive mostly consists of relevant emails, so after the restart - // we just start with the last saved message's date. - s.lastCommitDate = time.Now().Add(-relevantPeriod) - if last != nil && last.Time.After(s.lastCommitDate) { - s.lastCommitDate = last.Time - } - for { - err := s.fetchMessages(ctx) - if err != nil { - // Occasional errors are fine. - log.Printf("failed to poll the lore archive messages: %v", err) - } - select { - case <-ctx.Done(): - return nil - case <-time.After(pollPeriod): - } - } -} - -func (s *LKMLEmailStream) fetchMessages(ctx context.Context) error { - gitRepo := vcs.NewLKMLRepo(s.repoFolder) - _, err := gitRepo.Poll(s.cfg.LoreArchiveURL, "master") - if err != nil { - return err - } - var messages []lore.EmailReader - if s.lastCommit != "" { - // If it's not the first iteration, it's better to rely on the last commit hash. - messages, err = lore.ReadArchive(gitRepo, s.lastCommit, time.Time{}) - } else { - messages, err = lore.ReadArchive(gitRepo, "", s.lastCommitDate) - } - if err != nil { - return err - } - // From oldest to newest. - for i := len(messages) - 1; i >= 0; i-- { - msg := messages[i] - parsed, err := msg.Parse(s.ownEmails, nil) - if err != nil || parsed == nil { - log.Printf("failed to parse the email from hash %q: %v", msg.Hash, err) - continue - } - if msg.CommitDate.After(s.lastCommitDate) { - s.lastCommitDate = msg.CommitDate - } - s.lastCommit = msg.Hash - - // We cannot fully trust the date specified in the message itself, so let's sanitize it - // using the commit date. It will at least help us prevent weird client.lastReply() responses. - messageDate := parsed.Date - if messageDate.After(msg.CommitDate) { - messageDate = msg.CommitDate - } - resp, err := s.client.RecordReply(ctx, &api.RecordReplyReq{ - MessageID: parsed.MessageID, - ReportID: s.extractMessageID(parsed), - InReplyTo: parsed.InReplyTo, - Reporter: s.reporterName, - Time: messageDate, - }) - if err != nil || resp == nil { - // TODO: retry? - app.Errorf("failed to report email %q: %v", parsed.MessageID, err) - continue - } else if resp.ReportID != "" { - if !resp.New { - continue - } - parsed.BugIDs = []string{resp.ReportID} - } - select { - case s.newMessages <- parsed: - case <-ctx.Done(): - } - } - return nil -} - -// If the message was sent via the dashapi sender, the report ID wil be a part of the email address. -func (s *LKMLEmailStream) extractMessageID(msg *lore.Email) string { - if s.cfg.Dashapi == nil { - // The mode is not configured. - return "" - } - for _, id := range msg.BugIDs { - if strings.HasPrefix(id, s.cfg.Dashapi.ContextPrefix) { - return strings.TrimPrefix(id, s.cfg.Dashapi.ContextPrefix) - } - } - return "" -} diff --git a/syz-cluster/email-reporter/stream_test.go b/syz-cluster/email-reporter/stream_test.go index 6fa954175260..9c5f8935c20e 100644 --- a/syz-cluster/email-reporter/stream_test.go +++ b/syz-cluster/email-reporter/stream_test.go @@ -1,35 +1,32 @@ -// Copyright 2025 syzkaller project authors. All rights reserved. +// Copyright 2026 syzkaller project authors. All rights reserved. // Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. package main import ( - "context" - "os" - "path/filepath" "testing" - "time" "github.com/google/syzkaller/pkg/email/lore" - "github.com/google/syzkaller/pkg/vcs" "github.com/google/syzkaller/syz-cluster/pkg/api" "github.com/google/syzkaller/syz-cluster/pkg/app" "github.com/google/syzkaller/syz-cluster/pkg/controller" - "github.com/google/syzkaller/syz-cluster/pkg/reporter" + "github.com/google/syzkaller/syz-cluster/pkg/emailclient" "github.com/stretchr/testify/assert" ) -func TestEmailStream(t *testing.T) { +func TestPollerIntegration(t *testing.T) { env, ctx := app.TestEnvironment(t) testSeries := controller.DummySeries() handler, reporterClient, emailServer, _ := setupHandlerTest(t, ctx, env, testSeries) + + // Send a report to get a report ID. report, err := handler.PollAndReport(ctx) assert.NoError(t, err) - _ = emailServer.email() // Consume the moderation email for the original bug report. + _ = emailServer.email() // Consume moderation email. - // Simulate our reply. err = reporterClient.ConfirmReport(ctx, report.ID) assert.NoError(t, err) + const messageID = "" _, err = reporterClient.RecordReply(ctx, &api.RecordReplyReq{ MessageID: messageID, @@ -38,181 +35,125 @@ func TestEmailStream(t *testing.T) { }) assert.NoError(t, err) - // Emulate the lore archive and set up the loop. - loreArchive := newLoreArchive(t) - writeTo := make(chan *lore.Email, 16) - emailCfg := &app.EmailConfig{ - LoreArchiveURL: loreArchive.remoteRef(), - SMTP: &app.SMTPConfig{ - From: `syzbot@syzkaller.appspotmail.com`, - }, - Dashapi: &app.DashapiConfig{ - From: "bot@syzbot.org", - ContextPrefix: "ci_", - }, - } - stream := NewLKMLEmailStream(t.TempDir(), reporterClient, emailCfg, writeTo) - cancel := startStreamLoop(t, ctx, stream) - - t.Logf("sending a direct reply") - loreArchive.saveMessage(t, `Date: Sun, 7 May 2017 19:54:00 -0700 + // Emulate lore archive. + loreArchive := lore.NewTestLoreArchive(t, t.TempDir()) + writeTo := make(chan *lore.PolledEmail, 16) + emailCfg := emailclient.TestEmailConfig() + emailCfg.LoreArchiveURL = loreArchive.Repo.Dir + + poller, err := MakeLorePoller(t.TempDir(), emailCfg, writeTo) + assert.NoError(t, err) + + t.Logf("sending a reply") + loreArchive.SaveMessage(t, `Date: Sun, 7 May 2017 19:54:00 -0700 Subject: Reply to the Report Message-ID: In-Reply-To: `+messageID+` From: Someone Content-Type: text/plain - + `) - msg := <-writeTo - assert.Equal(t, "", msg.MessageID) - assert.Equal(t, []string{report.ID}, msg.BugIDs) - t.Logf("sending an indirect reply") - loreArchive.saveMessage(t, `Date: Sun, 7 May 2017 19:55:00 -0700 -Subject: Reply to the Reply -Message-ID: -In-Reply-To: -From: Someone Else -Content-Type: text/plain - -`) - msg = <-writeTo - assert.Equal(t, []string{report.ID}, msg.BugIDs) - - t.Logf("simulating a reply under a test job result") - submitResp, err := handler.apiClient.SubmitJob(ctx, &api.SubmitJobRequest{ - Type: api.JobPatchTest, - ReportID: report.ID, - Reporter: api.LKMLReporter, - User: "testuser@domain.com", - ExtID: "doesnt-matter", - Cc: []string{"testuser@domain.com", "stream-test-cc@domain.com"}, - PatchData: []byte("--- a\n+++ b\n"), - }) + err = poller.Poll(ctx, writeTo) assert.NoError(t, err) - _ = controller.FakeJobSession(t, env, handler.apiClient, submitResp.SessionID) - generator := reporter.NewGenerator(env) - err = generator.Process(ctx, 1) - assert.NoError(t, err) - patchTestReport, err := handler.PollAndReport(ctx) - assert.NoError(t, err) + polled := <-writeTo + assert.Equal(t, messageID, polled.RootMessageID) - reportReply := emailServer.email() - assert.NotNil(t, reportReply, "an email must be sent") - assert.Equal(t, []string{"testuser@domain.com", "stream-test-cc@domain.com"}, reportReply.To) + err = handler.ProcessPolledEmail(ctx, polled) + assert.NoError(t, err) - const patchTestMessageID = "" - _, err = reporterClient.RecordReply(ctx, &api.RecordReplyReq{ - MessageID: patchTestMessageID, - ReportID: patchTestReport.ID, - Reporter: api.LKMLReporter, + // Verify it was recorded by checking if we can record it again as new. + res, err := reporterClient.RecordReply(ctx, &api.RecordReplyReq{ + MessageID: "", + RootMessageID: polled.RootMessageID, + Reporter: api.LKMLReporter, }) assert.NoError(t, err) + assert.False(t, res.New) - // In replies under patch test reports we must recognize the original report ID. - loreArchive.saveMessage(t, `Date: Sun, 7 May 2017 19:55:00 -0700 -Subject: Reply to the Patch Test Reply -Message-ID: -In-Reply-To: `+patchTestMessageID+` -From: Someone Else + t.Run("ignore-own-email", func(t *testing.T) { + loreArchive.SaveMessage(t, `Date: Sun, 7 May 2017 19:54:00 -0700 +Subject: Reply from Bot +Message-ID: +In-Reply-To: `+messageID+` +From: Bot Content-Type: text/plain +#syz upstream `) - msg = <-writeTo - assert.Equal(t, []string{report.ID}, msg.BugIDs) + err = poller.Poll(ctx, writeTo) + assert.NoError(t, err) - t.Logf("sending an unrelated message") - loreArchive.saveMessage(t, `Date: Sun, 7 May 2017 19:56:00 -0700 -Subject: Reply to the Reply -Message-ID: -From: Someone Else -Content-Type: text/plain + polled := <-writeTo + err = handler.ProcessPolledEmail(ctx, polled) + assert.ErrorIs(t, err, ErrOwnEmail) -`) - msg = <-writeTo - assert.Len(t, msg.BugIDs, 0) + // Verify no email was sent in reply (it should be ignored). + assert.Nil(t, emailServer.email()) + }) - t.Logf("identify by email context") - loreArchive.saveMessage(t, `Date: Sun, 7 May 2017 19:55:00 -0700 -Subject: New thread -Message-ID: -In-Reply-To: -From: Someone Else -Cc: + t.Run("indirect-reply", func(t *testing.T) { + loreArchive.SaveMessage(t, `Date: Sun, 7 May 2017 19:55:00 -0700 +Subject: Reply to the Reply +Message-ID: +In-Reply-To: +From: Someone Else Content-Type: text/plain `) - msg = <-writeTo - assert.Equal(t, []string{report.ID}, msg.BugIDs) + err = poller.Poll(ctx, writeTo) + assert.NoError(t, err) - t.Logf("own email (SMTP)") - loreArchive.saveMessage(t, `Date: Sun, 7 May 2017 19:55:00 -0700 -Subject: New thread -Message-ID: -In-Reply-To: -From: Ourselves <`+emailCfg.SMTP.From+`> -Content-Type: text/plain + polled := <-writeTo + assert.Equal(t, messageID, polled.RootMessageID) -`) - msg = <-writeTo - assert.True(t, msg.OwnEmail) + err = handler.ProcessPolledEmail(ctx, polled) + assert.NoError(t, err) - t.Logf("own email (dashapi)") - loreArchive.saveMessage(t, `Date: Sun, 7 May 2017 19:55:00 -0700 + // Verify it was recorded. + res, err := reporterClient.RecordReply(ctx, &api.RecordReplyReq{ + MessageID: "", + RootMessageID: polled.RootMessageID, + Reporter: api.LKMLReporter, + }) + assert.NoError(t, err) + assert.False(t, res.New) + }) + + t.Run("identify-by-email-context", func(t *testing.T) { + loreArchive.SaveMessage(t, `Date: Sun, 7 May 2017 19:55:00 -0700 Subject: New thread Message-ID: In-Reply-To: -From: Ourselves <`+emailCfg.Dashapi.From+`> +From: Someone Else +Cc: Content-Type: text/plain `) - msg = <-writeTo - assert.True(t, msg.OwnEmail) - - t.Logf("stopping the loop") - cancel() - - // Emulate service restart. - stream = NewLKMLEmailStream(t.TempDir(), reporterClient, emailCfg, writeTo) - cancel = startStreamLoop(t, ctx, stream) - defer cancel() - // Only the unrelated message is expected to pop up. - msg = <-writeTo - assert.Equal(t, "", msg.MessageID) -} + err = poller.Poll(ctx, writeTo) + assert.NoError(t, err) -func startStreamLoop(t *testing.T, ctx context.Context, stream *LKMLEmailStream) func() { - done := make(chan struct{}) - loopCtx, cancel := context.WithCancel(ctx) - go func() { - err := stream.Loop(loopCtx, time.Second/10) + polled := <-writeTo + err = handler.ProcessPolledEmail(ctx, polled) assert.NoError(t, err) - close(done) - }() - return func() { - cancel() - <-done - } -} -type loreArchive struct { - repo *vcs.TestRepo -} + assert.Equal(t, []string{report.ID}, polled.Email.BugIDs) + }) -func newLoreArchive(t *testing.T) *loreArchive { - return &loreArchive{ - repo: vcs.MakeTestRepo(t, t.TempDir()), - } -} + t.Run("unknown-report", func(t *testing.T) { + loreArchive.SaveMessage(t, `Date: Sun, 7 May 2017 19:56:00 -0700 +Subject: Unrelated Message +Message-ID: +From: Someone Else +Content-Type: text/plain -func (a *loreArchive) remoteRef() string { - return a.repo.Dir -} +`) + err = poller.Poll(ctx, writeTo) + assert.NoError(t, err) -func (a *loreArchive) saveMessage(t *testing.T, raw string) { - err := os.WriteFile(filepath.Join(a.repo.Dir, "m"), []byte(raw), 0666) - assert.NoError(t, err) - a.repo.Git("add", "m") - a.repo.CommitChange("some title") + polled := <-writeTo + err = handler.ProcessPolledEmail(ctx, polled) + assert.ErrorIs(t, err, ErrUnknownReport) + }) } diff --git a/syz-cluster/pkg/api/reporter.go b/syz-cluster/pkg/api/reporter.go index c01c2b4ae3b4..7b03b0f31790 100644 --- a/syz-cluster/pkg/api/reporter.go +++ b/syz-cluster/pkg/api/reporter.go @@ -53,10 +53,10 @@ func (client ReporterClient) InvalidateReport(ctx context.Context, id string) er type RecordReplyReq struct { MessageID string `json:"message_id"` ReportID string `json:"report_id"` - // If ReportID is not set, InReplyTo will help identify the original report. - InReplyTo string `json:"in_reply_to"` - Reporter string `json:"reporter"` - Time time.Time `json:"time"` + // If ReportID is not set, RootMessageID will help identify the original report. + RootMessageID string `json:"root_message_id"` + Reporter string `json:"reporter"` + Time time.Time `json:"time"` } type RecordReplyResp struct { diff --git a/syz-cluster/pkg/emailclient/dashapi_sender.go b/syz-cluster/pkg/emailclient/dashapi_sender.go deleted file mode 100644 index 7b69d7511610..000000000000 --- a/syz-cluster/pkg/emailclient/dashapi_sender.go +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright 2025 syzkaller project authors. All rights reserved. -// Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. - -package emailclient - -import ( - "context" - "github.com/google/syzkaller/dashboard/dashapi" - "github.com/google/syzkaller/pkg/email" - "github.com/google/syzkaller/syz-cluster/pkg/app" - "net/mail" -) - -func makeDashapiSender(cfg *app.EmailConfig) (Sender, error) { - dash, err := dashapi.New(cfg.Dashapi.Client, cfg.Dashapi.Addr, "") - if err != nil { - return nil, err - } - return func(_ context.Context, item *Email) (string, error) { - sender := (&mail.Address{ - Name: cfg.Name, - Address: cfg.Dashapi.From, - }).String() - if item.BugID != "" { - var err error - sender, err = email.AddAddrContext(sender, cfg.Dashapi.ContextPrefix+item.BugID) - if err != nil { - return "", err - } - } - return "", dash.SendEmail(&dashapi.SendEmailReq{ - Sender: sender, - To: item.To, - Cc: item.Cc, - Subject: cfg.SubjectPrefix + item.Subject, - InReplyTo: item.InReplyTo, - Body: string(item.Body), - }) - }, nil -} diff --git a/syz-cluster/pkg/emailclient/sender.go b/syz-cluster/pkg/emailclient/sender.go index 33a781d85954..8a28192c60bb 100644 --- a/syz-cluster/pkg/emailclient/sender.go +++ b/syz-cluster/pkg/emailclient/sender.go @@ -6,55 +6,121 @@ package emailclient import ( "context" "fmt" + "net/mail" + "strconv" + "github.com/google/syzkaller/pkg/email/sender" + "github.com/google/syzkaller/pkg/gcpsecret" "github.com/google/syzkaller/syz-cluster/pkg/app" ) -type Email struct { - To []string - Cc []string - Subject string - InReplyTo string - Body []byte - BugID string // In case it's to be included into Sender. -} - -func (item *Email) recipients() []string { - var ret []string - ret = append(ret, item.To...) - ret = append(ret, item.Cc...) - return unique(ret) -} - -type Sender func(context.Context, *Email) (string, error) +// Sender is the function type used by syz-cluster to send emails. +type Sender func(context.Context, *sender.Email) (string, error) +// MakeSender creates a Sender based on the configuration. func MakeSender(ctx context.Context, cfg *app.EmailConfig) (Sender, error) { switch cfg.Sender { case app.SenderSMTP: - sender, err := newSMTPSender(ctx, cfg) + s, err := newSMTPSender(ctx, cfg) if err != nil { return nil, err } - return sender.Send, nil + return s.Send, nil case app.SenderDashapi: - return makeDashapiSender(cfg) + s, err := sender.NewDashapiSender(sender.DashapiConfig{ + Client: cfg.Dashapi.Client, + Addr: cfg.Dashapi.Addr, + From: mail.Address{ + Name: cfg.Name, + Address: cfg.Dashapi.From, + }, + ContextPrefix: cfg.Dashapi.ContextPrefix, + SubjectPrefix: cfg.SubjectPrefix, + }) + if err != nil { + return nil, err + } + return s.Send, nil } return nil, fmt.Errorf("unsupported sender type: %q", cfg.Sender) } -func unique(list []string) []string { - var ret []string - seen := map[string]struct{}{} - for _, str := range list { - if _, ok := seen[str]; ok { - continue +func newSMTPSender(ctx context.Context, cfg *app.EmailConfig) (sender.Sender, error) { + project, err := gcpsecret.ProjectName(ctx) + if err != nil { + return nil, fmt.Errorf("failed to query project name: %w", err) + } + + creds, err := queryCredentials(ctx, project) + if err != nil { + return nil, err + } + + smtpCfg := sender.SMTPConfig{ + Host: creds.host, + Port: creds.port, + User: creds.user, + Password: creds.password, + From: mail.Address{ + Name: cfg.Name, + Address: cfg.SMTP.From, + }, + } + + return sender.NewSMTPSender(smtpCfg), nil +} + +const ( + SecretSMTPHost string = "smtp_host" + SecretSMTPPort string = "smtp_port" + SecretSMTPUser string = "smtp_user" + SecretSMTPPassword string = "smtp_password" +) + +type smtpCredentials struct { + host string + port int + user string + password string +} + +func queryCredentials(ctx context.Context, projectName string) (smtpCredentials, error) { + values := map[string]string{} + for _, key := range []string{ + SecretSMTPHost, SecretSMTPPort, SecretSMTPUser, SecretSMTPPassword, + } { + var err error + values[key], err = querySecret(ctx, projectName, key) + if err != nil { + return smtpCredentials{}, err } - seen[str] = struct{}{} - ret = append(ret, str) } - return ret + port, err := strconv.Atoi(values[SecretSMTPPort]) + if err != nil { + return smtpCredentials{}, fmt.Errorf("failed to parse SMTP port: not a valid integer") + } + return smtpCredentials{ + host: values[SecretSMTPHost], + port: port, + user: values[SecretSMTPUser], + password: values[SecretSMTPPassword], + }, nil +} + +func querySecret(ctx context.Context, projectName, key string) (string, error) { + const retries = 3 + var err error + for i := 0; i < retries; i++ { + var val []byte + val, err = gcpsecret.LatestGcpSecret(ctx, projectName, key) + if err == nil { + return string(val), nil + } + } + return "", fmt.Errorf("failed to query %v: %w", key, err) } +// TestEmailConfig returns a standard configuration for testing. func TestEmailConfig() *app.EmailConfig { return &app.EmailConfig{ Name: "name", @@ -66,5 +132,9 @@ func TestEmailConfig() *app.EmailConfig { SMTP: &app.SMTPConfig{ From: "a@b.com", }, + Dashapi: &app.DashapiConfig{ + From: "bot@syzbot.com", + ContextPrefix: "ci_", + }, } } diff --git a/syz-cluster/pkg/emailclient/smtp_sender.go b/syz-cluster/pkg/emailclient/smtp_sender.go deleted file mode 100644 index 7a0e3829d563..000000000000 --- a/syz-cluster/pkg/emailclient/smtp_sender.go +++ /dev/null @@ -1,126 +0,0 @@ -// Copyright 2025 syzkaller project authors. All rights reserved. -// Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. - -package emailclient - -import ( - "bytes" - "context" - "fmt" - "net/smtp" - "strconv" - "strings" - - "github.com/google/syzkaller/pkg/gcpsecret" - "github.com/google/syzkaller/syz-cluster/pkg/app" - "github.com/google/uuid" -) - -type smtpSender struct { - cfg *app.EmailConfig - projectName string // needed for querying credentials -} - -func newSMTPSender(ctx context.Context, cfg *app.EmailConfig) (*smtpSender, error) { - project, err := gcpsecret.ProjectName(ctx) - if err != nil { - return nil, fmt.Errorf("failed to query project name: %w", err) - } - return &smtpSender{ - cfg: cfg, - projectName: project, - }, nil -} - -// Send constructs a raw email from EmailToSend and sends it over SMTP. -func (sender *smtpSender) Send(ctx context.Context, item *Email) (string, error) { - creds, err := sender.queryCredentials(ctx) - if err != nil { - return "", fmt.Errorf("failed to query credentials: %w", err) - } - msgID := fmt.Sprintf("<%s@%s>", uuid.NewString(), creds.host) - msg := rawEmail(sender.cfg, item, msgID) - auth := smtp.PlainAuth("", creds.host, creds.password, creds.host) - smtpAddr := fmt.Sprintf("%s:%d", creds.host, creds.port) - return msgID, smtp.SendMail(smtpAddr, auth, sender.cfg.SMTP.From, item.recipients(), msg) -} - -func rawEmail(cfg *app.EmailConfig, item *Email, id string) []byte { - var msg bytes.Buffer - - fmt.Fprintf(&msg, "From: %s <%s>\r\n", cfg.Name, cfg.SMTP.From) - fmt.Fprintf(&msg, "To: %s\r\n", strings.Join(item.To, ", ")) - if len(item.Cc) > 0 { - fmt.Fprintf(&msg, "Cc: %s\r\n", strings.Join(item.Cc, ", ")) - } - fmt.Fprintf(&msg, "Subject: %s\r\n", item.Subject) - if item.InReplyTo != "" { - inReplyTo := item.InReplyTo - if inReplyTo[0] != '<' { - inReplyTo = "<" + inReplyTo + ">" - } - fmt.Fprintf(&msg, "In-Reply-To: %s\r\n", inReplyTo) - } - if id != "" { - if id[0] != '<' { - id = "<" + id + ">" - } - fmt.Fprintf(&msg, "Message-ID: %s\r\n", id) - } - msg.WriteString("MIME-Version: 1.0\r\n") - msg.WriteString("Content-Type: text/plain; charset=UTF-8\r\n") - msg.WriteString("Content-Transfer-Encoding: 8bit\r\n") - msg.WriteString("\r\n") - msg.Write(item.Body) - return msg.Bytes() -} - -const ( - SecretSMTPHost string = "smtp_host" - SecretSMTPPort string = "smtp_port" - SecretSMTPUser string = "smtp_user" - SecretSMTPPassword string = "smtp_password" -) - -type smtpCredentials struct { - host string - port int - user string - password string -} - -func (sender *smtpSender) queryCredentials(ctx context.Context) (smtpCredentials, error) { - values := map[string]string{} - for _, key := range []string{ - SecretSMTPHost, SecretSMTPPort, SecretSMTPUser, SecretSMTPPassword, - } { - var err error - values[key], err = sender.querySecret(ctx, key) - if err != nil { - return smtpCredentials{}, err - } - } - port, err := strconv.Atoi(values[SecretSMTPPort]) - if err != nil { - return smtpCredentials{}, fmt.Errorf("failed to parse SMTP port: not a valid integer") - } - return smtpCredentials{ - host: values[SecretSMTPHost], - port: port, - user: values[SecretSMTPUser], - password: values[SecretSMTPPassword], - }, nil -} - -func (sender *smtpSender) querySecret(ctx context.Context, key string) (string, error) { - const retries = 3 - var err error - for i := 0; i < retries; i++ { - var val []byte - val, err := gcpsecret.LatestGcpSecret(ctx, sender.projectName, key) - if err == nil { - return string(val), nil - } - } - return "", fmt.Errorf("failed to query %v: %w", key, err) -} diff --git a/syz-cluster/pkg/reporter/api_test.go b/syz-cluster/pkg/reporter/api_test.go index dbc2517d491c..bf9820a89a53 100644 --- a/syz-cluster/pkg/reporter/api_test.go +++ b/syz-cluster/pkg/reporter/api_test.go @@ -150,10 +150,10 @@ func TestReplyReporting(t *testing.T) { // Direct reply to the report. resp, err := reportClient.RecordReply(ctx, &api.RecordReplyReq{ - MessageID: "direct-reply-id", - InReplyTo: reportMessageID, - Reporter: api.LKMLReporter, - Time: time.Now(), + MessageID: "direct-reply-id", + RootMessageID: reportMessageID, + Reporter: api.LKMLReporter, + Time: time.Now(), }) assert.NoError(t, err) assert.Equal(t, &api.RecordReplyResp{ @@ -161,34 +161,11 @@ func TestReplyReporting(t *testing.T) { ReportID: reportID, }, resp) - // Reply to the reply. - replyToReply := &api.RecordReplyReq{ - MessageID: "reply-to-reply-id", - InReplyTo: "direct-reply-id", - Reporter: api.LKMLReporter, - Time: time.Now(), - } - resp, err = reportClient.RecordReply(ctx, replyToReply) - assert.NoError(t, err) - assert.Equal(t, &api.RecordReplyResp{ - New: true, - ReportID: reportID, - }, resp) - - t.Run("dup-report", func(t *testing.T) { - resp, err := reportClient.RecordReply(ctx, replyToReply) - assert.NoError(t, err) - assert.Equal(t, &api.RecordReplyResp{ - New: false, - ReportID: reportID, - }, resp) - }) - t.Run("unknown-message", func(t *testing.T) { resp, err := reportClient.RecordReply(ctx, &api.RecordReplyReq{ - MessageID: "whatever", - InReplyTo: "unknown-id", - Reporter: api.LKMLReporter, + MessageID: "whatever", + RootMessageID: "unknown-id", + Reporter: api.LKMLReporter, }) assert.NoError(t, err) assert.Equal(t, &api.RecordReplyResp{ diff --git a/syz-cluster/pkg/service/discussion.go b/syz-cluster/pkg/service/discussion.go index 399dafa103a2..3890c1cd98d1 100644 --- a/syz-cluster/pkg/service/discussion.go +++ b/syz-cluster/pkg/service/discussion.go @@ -78,48 +78,14 @@ func (d *DiscussionService) identifyReport(ctx context.Context, req *api.RecordR if err != nil { return "", fmt.Errorf("failed to query the report: %w", err) } else if report != nil { - return d.findRootReportID(ctx, report.ID) + return report.ID, nil } return "", nil } - // Now try to find a matching reply. - reportID, err := d.reportReplyRepo.FindParentReportID(ctx, req.Reporter, req.InReplyTo) + // Now try to find a matching reply using RootMessageID. + reportID, err := d.reportReplyRepo.FindParentReportID(ctx, req.Reporter, req.RootMessageID) if err != nil { return "", fmt.Errorf("search among the replies failed: %w", err) } - if reportID != "" { - return d.findRootReportID(ctx, reportID) - } - return "", nil -} - -// Job results are reported with a separate reportID, and normally we are not -// accepting commands for them. findRootReportID follows the chain of reports -// until it finds the original bug report. -func (d *DiscussionService) findRootReportID(ctx context.Context, reportID string) (string, error) { - for { - report, err := d.reportRepo.GetByID(ctx, reportID) - if err != nil { - return "", fmt.Errorf("failed to get report %s: %w", reportID, err) - } - if report == nil { - return "", nil - } - session, err := d.sessionRepo.GetByID(ctx, report.SessionID) - if err != nil { - return "", fmt.Errorf("failed to get session %s: %w", report.SessionID, err) - } - if session == nil || !session.JobID.Valid { - break - } - job, err := d.jobRepo.GetByID(ctx, session.JobID.StringVal) - if err != nil { - return "", fmt.Errorf("failed to get job %s: %w", session.JobID.StringVal, err) - } - if job == nil { - break - } - reportID = job.ReportID - } return reportID, nil } diff --git a/syz-cluster/tools/send-test-email/main.go b/syz-cluster/tools/send-test-email/main.go index 9c489636a206..2143b7ab6bfc 100644 --- a/syz-cluster/tools/send-test-email/main.go +++ b/syz-cluster/tools/send-test-email/main.go @@ -6,6 +6,7 @@ package main import ( "context" + "github.com/google/syzkaller/pkg/email/sender" "github.com/google/syzkaller/syz-cluster/pkg/app" "github.com/google/syzkaller/syz-cluster/pkg/emailclient" ) @@ -20,11 +21,11 @@ func main() { if emailConfig == nil { app.Fatalf("reporting is not configured: %v", err) } - sender, err := emailclient.MakeSender(ctx, emailConfig) + emailSender, err := emailclient.MakeSender(ctx, emailConfig) if err != nil { app.Fatalf("failed to create a sender: %s", err) } - sender(ctx, &emailclient.Email{ + emailSender(ctx, &sender.Email{ Subject: "test email subject", To: []string{emailConfig.ModerationList}, Body: []byte("an test email sent from syz-cluster"), diff --git a/tools/syz-lore/query_lkml.go b/tools/syz-lore/query_lkml.go index df04953d353e..485485a399a1 100644 --- a/tools/syz-lore/query_lkml.go +++ b/tools/syz-lore/query_lkml.go @@ -132,6 +132,8 @@ func processArchives(paths, emails, domains []string) []*lore.Thread { skipped.Add(1) continue } + msg.Body = "" + msg.Patch = "" mu.Lock() repoEmails = append(repoEmails, msg) mu.Unlock()
Decision History:
Time User DecisionSource
{{formatTime .Date}} {{.User}} {{.Correct}}{{.Source}}