diff --git a/configs/config.hcl b/configs/config.hcl
index 1c343814e..c28515ba0 100644
--- a/configs/config.hcl
+++ b/configs/config.hcl
@@ -90,6 +90,14 @@ email {
from_address = "hermes@yourorganization.com"
}
+// FeatureFlags contain available feature flags.
+feature_flags {
+ // api_v2 enables v2 of the API.
+ flag "api_v2" {
+ enabled = false
+ }
+}
+
// google_workspace configures Hermes to work with Google Workspace.
google_workspace {
// create_doc_shortcuts enables creating a shortcut in the shortcuts_folder
diff --git a/internal/api/documents.go b/internal/api/documents.go
index 991da736c..302af258d 100644
--- a/internal/api/documents.go
+++ b/internal/api/documents.go
@@ -717,7 +717,7 @@ Hermes
// Summary.
if req.Summary != nil {
- model.Summary = *req.Summary
+ model.Summary = req.Summary
}
// Title.
@@ -739,7 +739,11 @@ Hermes
}
w.WriteHeader(http.StatusOK)
- l.Info("patched document", "doc_id", docID)
+ l.Info("patched document",
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
// Compare Algolia and database documents to find data inconsistencies.
// Get document object from Algolia.
diff --git a/internal/api/drafts.go b/internal/api/drafts.go
index 1c5ac4670..bf2140c7d 100644
--- a/internal/api/drafts.go
+++ b/internal/api/drafts.go
@@ -251,7 +251,7 @@ func DraftsHandler(
Name: req.Product,
},
Status: models.WIPDocumentStatus,
- Summary: req.Summary,
+ Summary: &req.Summary,
Title: req.Title,
}
if err := model.Create(db); err != nil {
@@ -1104,7 +1104,7 @@ func DraftsDocumentHandler(
// Summary.
if req.Summary != nil {
doc.Summary = *req.Summary
- model.Summary = *req.Summary
+ model.Summary = req.Summary
}
// Title.
@@ -1176,7 +1176,11 @@ func DraftsDocumentHandler(
fmt.Sprintf("[%s] %s", doc.DocNumber, doc.Title))
w.WriteHeader(http.StatusOK)
- l.Info("patched draft document", "doc_id", docId)
+ l.Info("patched draft document",
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docId,
+ )
// Compare Algolia and database documents to find data inconsistencies.
// Get document object from Algolia.
diff --git a/internal/api/helpers.go b/internal/api/helpers.go
index dd0d0fa68..119cc03bd 100644
--- a/internal/api/helpers.go
+++ b/internal/api/helpers.go
@@ -14,6 +14,7 @@ import (
"github.com/hashicorp/go-hclog"
"github.com/hashicorp/go-multierror"
"github.com/iancoleman/strcase"
+ "github.com/stretchr/testify/assert"
)
// contains returns true if a string is present in a slice of strings.
@@ -115,6 +116,12 @@ func respondError(
http.Error(w, userErrMsg, httpCode)
}
+// fakeT fulfills the assert.TestingT interface so we can use
+// assert.ElementsMatch.
+type fakeT struct{}
+
+func (t fakeT) Errorf(string, ...interface{}) {}
+
// compareAlgoliaAndDatabaseDocument compares data for a document stored in
// Algolia and the database to determine any inconsistencies, which are returned
// back as a (multierror) error.
@@ -224,7 +231,7 @@ func compareAlgoliaAndDatabaseDocument(
dbApprovedBy = append(dbApprovedBy, r.User.EmailAddress)
}
}
- if !reflect.DeepEqual(algoApprovedBy, dbApprovedBy) {
+ if !assert.ElementsMatch(fakeT{}, algoApprovedBy, dbApprovedBy) {
result = multierror.Append(result,
fmt.Errorf(
"approvedBy not equal, algolia=%v, db=%v",
@@ -242,7 +249,7 @@ func compareAlgoliaAndDatabaseDocument(
for _, a := range dbDoc.Approvers {
dbApprovers = append(dbApprovers, a.EmailAddress)
}
- if !reflect.DeepEqual(algoApprovers, dbApprovers) {
+ if !assert.ElementsMatch(fakeT{}, algoApprovers, dbApprovers) {
result = multierror.Append(result,
fmt.Errorf(
"approvers not equal, algolia=%v, db=%v",
@@ -263,7 +270,7 @@ func compareAlgoliaAndDatabaseDocument(
dbChangesRequestedBy = append(dbChangesRequestedBy, r.User.EmailAddress)
}
}
- if !reflect.DeepEqual(algoChangesRequestedBy, dbChangesRequestedBy) {
+ if !assert.ElementsMatch(fakeT{}, algoChangesRequestedBy, dbChangesRequestedBy) {
result = multierror.Append(result,
fmt.Errorf(
"changesRequestedBy not equal, algolia=%v, db=%v",
@@ -281,7 +288,7 @@ func compareAlgoliaAndDatabaseDocument(
for _, c := range dbDoc.Contributors {
dbContributors = append(dbContributors, c.EmailAddress)
}
- if !reflect.DeepEqual(algoContributors, dbContributors) {
+ if !assert.ElementsMatch(fakeT{}, algoContributors, dbContributors) {
result = multierror.Append(result,
fmt.Errorf(
"contributors not equal, algolia=%v, db=%v",
@@ -353,7 +360,7 @@ func compareAlgoliaAndDatabaseDocument(
)
}
- if !reflect.DeepEqual(algoCFVal, dbCFVal) {
+ if !assert.ElementsMatch(fakeT{}, algoCFVal, dbCFVal) {
result = multierror.Append(result,
fmt.Errorf(
"custom field %s not equal, algolia=%v, db=%v",
@@ -379,8 +386,24 @@ func compareAlgoliaAndDatabaseDocument(
"doc type %q not found", algoDocType))
}
- // Compare file revisions.
- // TODO: need to store this in the database first.
+ // Compare fileRevisions.
+ algoFileRevisions, err := getMapStringStringValue(algoDoc, "fileRevisions")
+ if err != nil {
+ result = multierror.Append(
+ result, fmt.Errorf("error getting fileRevisions value: %w", err))
+ } else {
+ dbFileRevisions := make(map[string]string)
+ for _, fr := range dbDoc.FileRevisions {
+ dbFileRevisions[fr.GoogleDriveFileRevisionID] = fr.Name
+ }
+ if !reflect.DeepEqual(algoFileRevisions, dbFileRevisions) {
+ result = multierror.Append(result,
+ fmt.Errorf(
+ "fileRevisions not equal, algolia=%v, db=%v",
+ algoFileRevisions, dbFileRevisions),
+ )
+ }
+ }
// Compare modifiedTime.
algoModifiedTime, err := getInt64Value(algoDoc, "modifiedTime")
@@ -473,7 +496,7 @@ func compareAlgoliaAndDatabaseDocument(
result, fmt.Errorf("error getting summary value: %w", err))
} else {
dbSummary := dbDoc.Summary
- if algoSummary != dbSummary {
+ if dbSummary != nil && algoSummary != *dbSummary {
result = multierror.Append(result,
fmt.Errorf(
"summary not equal, algolia=%v, db=%v",
@@ -517,6 +540,30 @@ func getInt64Value(in map[string]any, key string) (int64, error) {
return result, nil
}
+func getMapStringStringValue(in map[string]any, key string) (
+ map[string]string, error,
+) {
+ result := make(map[string]string)
+
+ if v, ok := in[key]; ok {
+ if reflect.TypeOf(v).Kind() == reflect.Map {
+ for vk, vv := range v.(map[string]any) {
+ if vv, ok := vv.(string); ok {
+ result[vk] = vv
+ } else {
+ return nil, fmt.Errorf(
+ "invalid type: map value element is not a string")
+ }
+ }
+ return result, nil
+ } else {
+ return nil, fmt.Errorf("invalid type: value is not a map")
+ }
+ }
+
+ return result, nil
+}
+
func getStringValue(in map[string]any, key string) (string, error) {
var result string
diff --git a/internal/api/helpers_test.go b/internal/api/helpers_test.go
index 66f032bf5..5f562e467 100644
--- a/internal/api/helpers_test.go
+++ b/internal/api/helpers_test.go
@@ -138,6 +138,10 @@ func TestCompareAlgoliaAndDatabaseDocument(t *testing.T) {
"createdTime": float64(time.Date(
2023, time.April, 5, 1, 0, 0, 0, time.UTC).Unix()),
"currentVersion": "1.2.3",
+ "fileRevisions": map[string]any{
+ "1": "FileRevision1",
+ "2": "FileRevision2",
+ },
"modifiedTime": float64(time.Date(
2023, time.April, 5, 23, 0, 0, 0, time.UTC).Unix()),
"owners": []any{
@@ -204,10 +208,20 @@ func TestCompareAlgoliaAndDatabaseDocument(t *testing.T) {
2023, time.April, 5, 1, 0, 0, 0, time.UTC),
DocumentModifiedAt: time.Date(
2023, time.April, 5, 23, 0, 0, 0, time.UTC),
+ FileRevisions: []models.DocumentFileRevision{
+ {
+ GoogleDriveFileRevisionID: "1",
+ Name: "FileRevision1",
+ },
+ {
+ GoogleDriveFileRevisionID: "2",
+ Name: "FileRevision2",
+ },
+ },
Owner: &models.User{
EmailAddress: "owner1@hashicorp.com",
},
- Summary: "Summary1",
+ Summary: &[]string{"Summary1"}[0],
Status: models.InReviewDocumentStatus,
},
dbDocReviews: models.DocumentReviews{
@@ -270,6 +284,124 @@ func TestCompareAlgoliaAndDatabaseDocument(t *testing.T) {
},
},
+ "good with different order of slice and map fields": {
+ algoDoc: map[string]any{
+ "appCreated": true,
+ "approvedBy": []any{
+ "approver2@hashicorp.com",
+ "approver1@hashicorp.com",
+ },
+ "approvers": []any{
+ "approver2@hashicorp.com",
+ "approver1@hashicorp.com",
+ },
+ "changesRequestedBy": []any{
+ "changerequester2@hashicorp.com",
+ "changerequester1@hashicorp.com",
+ },
+ "contributors": []any{
+ "contributor2@hashicorp.com",
+ "contributor1@hashicorp.com",
+ },
+ "createdTime": float64(time.Date(
+ 2023, time.April, 5, 1, 0, 0, 0, time.UTC).Unix()),
+ "docNumber": "ABC-123",
+ "docType": "RFC",
+ "fileRevisions": map[string]any{
+ "2": "FileRevision2",
+ "1": "FileRevision1",
+ },
+ "modifiedTime": float64(time.Date(
+ 2023, time.April, 5, 23, 0, 0, 0, time.UTC).Unix()),
+ "owners": []any{"owner1@hashicorp.com"},
+ "product": "Product1",
+ "stakeholders": []any{
+ "stakeholder2@hashicorp.com",
+ "stakeholder1@hashicorp.com",
+ },
+ },
+ dbDoc: models.Document{
+ DocumentNumber: 123,
+ DocumentType: models.DocumentType{
+ Name: "RFC",
+ },
+ Product: models.Product{
+ Name: "Product1",
+ Abbreviation: "ABC",
+ },
+ Approvers: []*models.User{
+ {
+ EmailAddress: "approver1@hashicorp.com",
+ },
+ {
+ EmailAddress: "approver2@hashicorp.com",
+ },
+ },
+ Contributors: []*models.User{
+ {
+ EmailAddress: "contributor1@hashicorp.com",
+ },
+ {
+ EmailAddress: "contributor2@hashicorp.com",
+ },
+ },
+ CustomFields: []*models.DocumentCustomField{
+ {
+ DocumentTypeCustomField: models.DocumentTypeCustomField{
+ Name: "Stakeholders",
+ DocumentType: models.DocumentType{
+ Name: "RFC",
+ },
+ },
+ Value: `["stakeholder1@hashicorp.com","stakeholder2@hashicorp.com"]`,
+ },
+ },
+ DocumentCreatedAt: time.Date(
+ 2023, time.April, 5, 1, 0, 0, 0, time.UTC),
+ DocumentModifiedAt: time.Date(
+ 2023, time.April, 5, 23, 0, 0, 0, time.UTC),
+ FileRevisions: []models.DocumentFileRevision{
+ {
+ GoogleDriveFileRevisionID: "1",
+ Name: "FileRevision1",
+ },
+ {
+ GoogleDriveFileRevisionID: "2",
+ Name: "FileRevision2",
+ },
+ },
+ Owner: &models.User{
+ EmailAddress: "owner1@hashicorp.com",
+ },
+ },
+ dbDocReviews: models.DocumentReviews{
+ {
+ Status: models.ApprovedDocumentReviewStatus,
+ User: models.User{
+ EmailAddress: "approver1@hashicorp.com",
+ },
+ },
+ {
+ Status: models.ApprovedDocumentReviewStatus,
+ User: models.User{
+ EmailAddress: "approver2@hashicorp.com",
+ },
+ },
+ {
+ Status: models.ChangesRequestedDocumentReviewStatus,
+ User: models.User{
+ EmailAddress: "changerequester1@hashicorp.com",
+ },
+ },
+ {
+ Status: models.ChangesRequestedDocumentReviewStatus,
+ User: models.User{
+ EmailAddress: "changerequester2@hashicorp.com",
+ },
+ },
+ },
+ },
+
"bad objectID": {
algoDoc: map[string]any{
"objectID": "GoogleFileID1",
@@ -839,7 +971,7 @@ func TestCompareAlgoliaAndDatabaseDocument(t *testing.T) {
Owner: &models.User{
EmailAddress: "owner1@hashicorp.com",
},
- Summary: "BadSummary1",
+ Summary: &[]string{"BadSummary1"}[0],
},
shouldErr: true,
errContains: "summary not equal",
diff --git a/internal/api/v2/analytics.go b/internal/api/v2/analytics.go
new file mode 100644
index 000000000..9d3d7546b
--- /dev/null
+++ b/internal/api/v2/analytics.go
@@ -0,0 +1,65 @@
+package api
+
+import (
+ "encoding/json"
+ "net/http"
+
+ "github.com/hashicorp-forge/hermes/internal/server"
+)
+
+type AnalyticsRequest struct {
+ DocumentID string `json:"document_id"`
+ ProductName string `json:"product_name"`
+}
+
+type AnalyticsResponse struct {
+ Recorded bool `json:"recorded"`
+}
+
+// Analytics handles user events for analytics
+func AnalyticsHandler(srv server.Server) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ // Only allow POST requests.
+ if r.Method != http.MethodPost {
+ w.WriteHeader(http.StatusMethodNotAllowed)
+ return
+ }
+
+ decoder := json.NewDecoder(r.Body)
+ var req AnalyticsRequest
+ if err := decoder.Decode(&req); err != nil {
+ srv.Logger.Error("error decoding analytics request", "error", err)
+ http.Error(w, "Error decoding analytics request",
+ http.StatusBadRequest)
+ return
+ }
+
+ response := &AnalyticsResponse{
+ Recorded: false,
+ }
+
+ // Check if document id is set, product name is optional
+ if req.DocumentID != "" {
+ srv.Logger.Info(
+ "document view event",
+ "method", r.Method,
+ "path", r.URL.Path,
+ "document_id", req.DocumentID,
+ "product_name", req.ProductName,
+ )
+ response.Recorded = true
+ }
+
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(http.StatusOK)
+
+ enc := json.NewEncoder(w)
+ err := enc.Encode(response)
+ if err != nil {
+ srv.Logger.Error("error encoding analytics response", "error", err)
+ http.Error(w, "Error encoding analytics response",
+ http.StatusInternalServerError)
+ return
+ }
+ })
+}
diff --git a/internal/api/v2/approvals.go b/internal/api/v2/approvals.go
new file mode 100644
index 000000000..a505c5046
--- /dev/null
+++ b/internal/api/v2/approvals.go
@@ -0,0 +1,651 @@
+package api
+
+import (
+ "fmt"
+ "net/http"
+
+ "github.com/hashicorp-forge/hermes/internal/helpers"
+ "github.com/hashicorp-forge/hermes/internal/server"
+ "github.com/hashicorp-forge/hermes/pkg/document"
+ hcd "github.com/hashicorp-forge/hermes/pkg/hashicorpdocs"
+ "github.com/hashicorp-forge/hermes/pkg/models"
+ "gorm.io/gorm"
+)
+
+func ApprovalsHandler(srv server.Server) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ switch r.Method {
+ case "DELETE":
+ // Validate request.
+ docID, err := parseResourceIDFromURL(r.URL.Path, "approvals")
+ if err != nil {
+ srv.Logger.Error("error parsing document ID",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ )
+ http.Error(w, "Document ID not found", http.StatusNotFound)
+ return
+ }
+
+ // Check if document is locked.
+ locked, err := hcd.IsLocked(docID, srv.DB, srv.GWService, srv.Logger)
+ if err != nil {
+ srv.Logger.Error("error checking document locked status",
+ "error", err,
+ "path", r.URL.Path,
+ "method", r.Method,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error getting document status", http.StatusNotFound)
+ return
+ }
+ // Don't continue if document is locked.
+ if locked {
+ http.Error(w, "Document is locked", http.StatusLocked)
+ return
+ }
+
+ // Get document from database.
+ model := models.Document{
+ GoogleFileID: docID,
+ }
+ if err := model.Get(srv.DB); err != nil {
+ srv.Logger.Error("error getting document from database",
+ "error", err,
+ "path", r.URL.Path,
+ "method", r.Method,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error accessing document",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Get reviews for the document.
+ var reviews models.DocumentReviews
+ if err := reviews.Find(srv.DB, models.DocumentReview{
+ Document: models.Document{
+ GoogleFileID: docID,
+ },
+ }); err != nil {
+ srv.Logger.Error("error getting reviews for document",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ return
+ }
+
+ // Convert database model to a document.
+ doc, err := document.NewFromDatabaseModel(
+ model, reviews)
+ if err != nil {
+ srv.Logger.Error("error converting database model to document type",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error accessing document",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Authorize request.
+ userEmail := r.Context().Value("userEmail").(string)
+ if doc.Status != "In-Review" {
+ http.Error(w,
+ "Can only request changes of documents in the \"In-Review\" status",
+ http.StatusBadRequest)
+ return
+ }
+ if !contains(doc.Approvers, userEmail) {
+ http.Error(w, "Not authorized as a document approver",
+ http.StatusUnauthorized)
+ return
+ }
+ if contains(doc.ChangesRequestedBy, userEmail) {
+ http.Error(w, "Document already has changes requested by user",
+ http.StatusBadRequest)
+ return
+ }
+
+ // Add email to slice of users who have requested changes of the document.
+ doc.ChangesRequestedBy = append(doc.ChangesRequestedBy, userEmail)
+
+ // If user had previously approved, delete email from slice of users who
+ // have approved the document.
+ var newApprovedBy []string
+ for _, a := range doc.ApprovedBy {
+ if a != userEmail {
+ newApprovedBy = append(newApprovedBy, a)
+ }
+ }
+ doc.ApprovedBy = newApprovedBy
+
+ // Get latest Google Drive file revision.
+ latestRev, err := srv.GWService.GetLatestRevision(docID)
+ if err != nil {
+ srv.Logger.Error("error getting latest revision",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID)
+ http.Error(w, "Error requesting changes of document",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Mark latest revision to be kept forever.
+ _, err = srv.GWService.KeepRevisionForever(docID, latestRev.Id)
+ if err != nil {
+ srv.Logger.Error("error marking revision to keep forever",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ "rev_id", latestRev.Id)
+ http.Error(w, "Error updating document status",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Record file revision in the Algolia document object.
+ revisionName := fmt.Sprintf("Changes requested by %s", userEmail)
+ doc.SetFileRevision(latestRev.Id, revisionName)
+
+ // Create file revision in the database.
+ fr := models.DocumentFileRevision{
+ Document: models.Document{
+ GoogleFileID: docID,
+ },
+ GoogleDriveFileRevisionID: latestRev.Id,
+ Name: revisionName,
+ }
+ if err := fr.Create(srv.DB); err != nil {
+ srv.Logger.Error("error creating document file revision",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ "rev_id", latestRev.Id)
+ http.Error(w, "Error updating document status",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Update document reviews in the database.
+ if err := updateDocumentReviewsInDatabase(*doc, srv.DB); err != nil {
+ srv.Logger.Error("error updating document reviews in the database",
+ "error", err,
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path,
+ )
+ http.Error(w, "Error updating document status",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Replace the doc header.
+ if err := doc.ReplaceHeader(
+ srv.Config.BaseURL, false, srv.GWService,
+ ); err != nil {
+ srv.Logger.Error("error replacing doc header",
+ "error", err,
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path,
+ )
+ http.Error(w, "Error updating document status",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Write response.
+ w.WriteHeader(http.StatusOK)
+
+ // Log success.
+ srv.Logger.Info("changes requested successfully",
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path,
+ )
+
+ // Request post-processing.
+ go func() {
+ // Convert document to Algolia object.
+ docObj, err := doc.ToAlgoliaObject(true)
+ if err != nil {
+ srv.Logger.Error("error converting document to Algolia object",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error updating document status",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Save new modified doc object in Algolia.
+ res, err := srv.AlgoWrite.Docs.SaveObject(docObj)
+ if err != nil {
+ srv.Logger.Error("error saving approved document in Algolia",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID)
+ http.Error(w, "Error updating document status",
+ http.StatusInternalServerError)
+ return
+ }
+ err = res.Wait()
+ if err != nil {
+ srv.Logger.Error("error saving patched document in Algolia",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID)
+ http.Error(w, "Error updating document status",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Compare Algolia and database documents to find data inconsistencies.
+ // Get document object from Algolia.
+ var algoDoc map[string]any
+ err = srv.AlgoSearch.Docs.GetObject(docID, &algoDoc)
+ if err != nil {
+ srv.Logger.Error("error getting Algolia object for data comparison",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ return
+ }
+ // Get document from database.
+ dbDoc := models.Document{
+ GoogleFileID: docID,
+ }
+ if err := dbDoc.Get(srv.DB); err != nil {
+ srv.Logger.Error(
+ "error getting document from database for data comparison",
+ "error", err,
+ "path", r.URL.Path,
+ "method", r.Method,
+ "doc_id", docID,
+ )
+ return
+ }
+ // Get all reviews for the document.
+ var reviews models.DocumentReviews
+ if err := reviews.Find(srv.DB, models.DocumentReview{
+ Document: models.Document{
+ GoogleFileID: docID,
+ },
+ }); err != nil {
+ srv.Logger.Error(
+ "error getting all reviews for document for data comparison",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ return
+ }
+ if err := compareAlgoliaAndDatabaseDocument(
+ algoDoc, dbDoc, reviews, srv.Config.DocumentTypes.DocumentType,
+ ); err != nil {
+ srv.Logger.Warn(
+ "inconsistencies detected between Algolia and database docs",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ }
+ }()
+
+ case "POST":
+ // Validate request.
+ docID, err := parseResourceIDFromURL(r.URL.Path, "approvals")
+ if err != nil {
+ srv.Logger.Error("error parsing document ID from approvals path",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ )
+ http.Error(w, "Document ID not found", http.StatusNotFound)
+ return
+ }
+
+ // Check if document is locked.
+ locked, err := hcd.IsLocked(docID, srv.DB, srv.GWService, srv.Logger)
+ if err != nil {
+ srv.Logger.Error("error checking document locked status",
+ "error", err,
+ "path", r.URL.Path,
+ "method", r.Method,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error getting document status", http.StatusNotFound)
+ return
+ }
+ // Don't continue if document is locked.
+ if locked {
+ http.Error(w, "Document is locked", http.StatusLocked)
+ return
+ }
+
+ // Get document from database.
+ model := models.Document{
+ GoogleFileID: docID,
+ }
+ if err := model.Get(srv.DB); err != nil {
+ srv.Logger.Error("error getting document from database",
+ "error", err,
+ "path", r.URL.Path,
+ "method", r.Method,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error accessing document",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Get reviews for the document.
+ var reviews models.DocumentReviews
+ if err := reviews.Find(srv.DB, models.DocumentReview{
+ Document: models.Document{
+ GoogleFileID: docID,
+ },
+ }); err != nil {
+ srv.Logger.Error("error getting reviews for document",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ return
+ }
+
+ // Convert database model to a document.
+ doc, err := document.NewFromDatabaseModel(
+ model, reviews)
+ if err != nil {
+ srv.Logger.Error("error converting database model to document type",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error accessing document",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Authorize request.
+ userEmail := r.Context().Value("userEmail").(string)
+ if doc.Status != "In-Review" && doc.Status != "In Review" {
+ http.Error(w,
+ "Only documents in the \"In-Review\" status can be approved",
+ http.StatusBadRequest)
+ return
+ }
+ if !contains(doc.Approvers, userEmail) {
+ http.Error(w,
+ "Not authorized as a document approver",
+ http.StatusUnauthorized)
+ return
+ }
+ if contains(doc.ApprovedBy, userEmail) {
+ http.Error(w,
+ "Document already approved by user",
+ http.StatusBadRequest)
+ return
+ }
+
+ // Add email to slice of users who have approved the document.
+ doc.ApprovedBy = append(doc.ApprovedBy, userEmail)
+
+ // If the user had previously requested changes, delete email from slice
+ // of users who have requested changes of the document.
+ var newChangesRequestedBy []string
+ for _, a := range doc.ChangesRequestedBy {
+ if a != userEmail {
+ newChangesRequestedBy = append(newChangesRequestedBy, a)
+ }
+ }
+ doc.ChangesRequestedBy = newChangesRequestedBy
+
+ // Get latest Google Drive file revision.
+ latestRev, err := srv.GWService.GetLatestRevision(docID)
+ if err != nil {
+ srv.Logger.Error("error getting latest revision",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID)
+ http.Error(w, "Error creating review",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Mark latest revision to be kept forever.
+ _, err = srv.GWService.KeepRevisionForever(docID, latestRev.Id)
+ if err != nil {
+ srv.Logger.Error("error marking revision to keep forever",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ "rev_id", latestRev.Id)
+ http.Error(w, "Error creating review",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Record file revision in the Algolia document object.
+ revisionName := fmt.Sprintf("Approved by %s", userEmail)
+ doc.SetFileRevision(latestRev.Id, revisionName)
+
+ // Create file revision in the database.
+ fr := models.DocumentFileRevision{
+ Document: models.Document{
+ GoogleFileID: docID,
+ },
+ GoogleDriveFileRevisionID: latestRev.Id,
+ Name: revisionName,
+ }
+ if err := fr.Create(srv.DB); err != nil {
+ srv.Logger.Error("error creating document file revision",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ "rev_id", latestRev.Id)
+ http.Error(w, "Error updating document status",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Update document reviews in the database.
+ if err := updateDocumentReviewsInDatabase(*doc, srv.DB); err != nil {
+ srv.Logger.Error("error updating document reviews in the database",
+ "error", err,
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path,
+ )
+ http.Error(w, "Error approving document",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Replace the doc header.
+ err = doc.ReplaceHeader(srv.Config.BaseURL, false, srv.GWService)
+ if err != nil {
+ srv.Logger.Error("error replacing doc header",
+ "error", err,
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path,
+ )
+ http.Error(w, "Error approving document",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Write response.
+ w.WriteHeader(http.StatusOK)
+
+ // Log success.
+ srv.Logger.Info("approval created",
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path,
+ )
+
+ // Request post-processing.
+ go func() {
+ // Convert document to Algolia object.
+ docObj, err := doc.ToAlgoliaObject(true)
+ if err != nil {
+ srv.Logger.Error("error converting document to Algolia object",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error updating document status",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Save new modified doc object in Algolia.
+ res, err := srv.AlgoWrite.Docs.SaveObject(docObj)
+ if err != nil {
+ srv.Logger.Error("error saving approved document in Algolia",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID)
+ http.Error(w, "Error updating document status",
+ http.StatusInternalServerError)
+ return
+ }
+ err = res.Wait()
+ if err != nil {
+ srv.Logger.Error("error saving approved document in Algolia",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID)
+ http.Error(w, "Error updating document status",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Compare Algolia and database documents to find data inconsistencies.
+ // Get document object from Algolia.
+ var algoDoc map[string]any
+ err = srv.AlgoSearch.Docs.GetObject(docID, &algoDoc)
+ if err != nil {
+ srv.Logger.Error("error getting Algolia object for data comparison",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ return
+ }
+ // Get document from database.
+ dbDoc := models.Document{
+ GoogleFileID: docID,
+ }
+ if err := dbDoc.Get(srv.DB); err != nil {
+ srv.Logger.Error(
+ "error getting document from database for data comparison",
+ "error", err,
+ "path", r.URL.Path,
+ "method", r.Method,
+ "doc_id", docID,
+ )
+ return
+ }
+ // Get all reviews for the document.
+ var reviews models.DocumentReviews
+ if err := reviews.Find(srv.DB, models.DocumentReview{
+ Document: models.Document{
+ GoogleFileID: docID,
+ },
+ }); err != nil {
+ srv.Logger.Error(
+ "error getting all reviews for document for data comparison",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ return
+ }
+ if err := compareAlgoliaAndDatabaseDocument(
+ algoDoc, dbDoc, reviews, srv.Config.DocumentTypes.DocumentType,
+ ); err != nil {
+ srv.Logger.Warn(
+ "inconsistencies detected between Algolia and database docs",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ }
+ }()
+
+ default:
+ w.WriteHeader(http.StatusMethodNotAllowed)
+ return
+ }
+ })
+}
+
+// updateDocumentReviewsInDatabase takes a document and updates the associated
+// document reviews in the database.
+func updateDocumentReviewsInDatabase(doc document.Document, db *gorm.DB) error {
+ var docReviews []models.DocumentReview
+ for _, a := range doc.Approvers {
+ u := models.User{
+ EmailAddress: a,
+ }
+ if helpers.StringSliceContains(doc.ApprovedBy, a) {
+ docReviews = append(docReviews, models.DocumentReview{
+ Document: models.Document{
+ GoogleFileID: doc.ObjectID,
+ },
+ User: u,
+ Status: models.ApprovedDocumentReviewStatus,
+ })
+ } else if helpers.StringSliceContains(doc.ChangesRequestedBy, a) {
+ docReviews = append(docReviews, models.DocumentReview{
+ Document: models.Document{
+ GoogleFileID: doc.ObjectID,
+ },
+ User: u,
+ Status: models.ChangesRequestedDocumentReviewStatus,
+ })
+ }
+ }
+
+ // Upsert document reviews in database.
+ for _, dr := range docReviews {
+ if err := dr.Update(db); err != nil {
+ return fmt.Errorf("error upserting document review: %w", err)
+ }
+ }
+
+ return nil
+}
diff --git a/internal/api/v2/document_types.go b/internal/api/v2/document_types.go
new file mode 100644
index 000000000..751bc30e0
--- /dev/null
+++ b/internal/api/v2/document_types.go
@@ -0,0 +1,33 @@
+package api
+
+import (
+ "encoding/json"
+ "net/http"
+
+ "github.com/hashicorp-forge/hermes/internal/server"
+)
+
+func DocumentTypesHandler(srv server.Server) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ switch r.Method {
+ case "GET":
+ w.Header().Set("Content-Type", "application/json")
+
+ enc := json.NewEncoder(w)
+ err := enc.Encode(srv.Config.DocumentTypes.DocumentType)
+ if err != nil {
+ srv.Logger.Error("error encoding document types",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path)
+ http.Error(w, "{\"error\": \"Error getting document types\"}",
+ http.StatusInternalServerError)
+ return
+ }
+
+ default:
+ w.WriteHeader(http.StatusMethodNotAllowed)
+ return
+ }
+ })
+}
diff --git a/internal/api/v2/documents.go b/internal/api/v2/documents.go
new file mode 100644
index 000000000..392035d1f
--- /dev/null
+++ b/internal/api/v2/documents.go
@@ -0,0 +1,965 @@
+package api
+
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+ "net/http"
+ "reflect"
+ "regexp"
+ "time"
+
+ "github.com/hashicorp-forge/hermes/internal/server"
+ "github.com/hashicorp-forge/hermes/pkg/document"
+ hcd "github.com/hashicorp-forge/hermes/pkg/hashicorpdocs"
+ "github.com/hashicorp-forge/hermes/pkg/models"
+ "gorm.io/gorm"
+)
+
+// DocumentPatchRequest contains a subset of documents fields that are allowed
+// to be updated with a PATCH request.
+type DocumentPatchRequest struct {
+ Approvers *[]string `json:"approvers,omitempty"`
+ Contributors *[]string `json:"contributors,omitempty"`
+ CustomFields *[]document.CustomField `json:"customFields,omitempty"`
+ Status *string `json:"status,omitempty"`
+ Summary *string `json:"summary,omitempty"`
+ // Tags []string `json:"tags,omitempty"`
+ Title *string `json:"title,omitempty"`
+}
+
+type documentSubcollectionRequestType int
+
+const (
+ unspecifiedDocumentSubcollectionRequestType documentSubcollectionRequestType = iota
+ noSubcollectionRequestType
+ relatedResourcesDocumentSubcollectionRequestType
+ shareableDocumentSubcollectionRequestType
+)
+
+func DocumentHandler(srv server.Server) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ // Parse document ID and request type from the URL path.
+ docID, reqType, err := parseDocumentsURLPath(
+ r.URL.Path, "documents")
+ if err != nil {
+ srv.Logger.Error("error parsing documents URL path",
+ "error", err,
+ "path", r.URL.Path,
+ "method", r.Method,
+ )
+ http.Error(w, "Bad request", http.StatusBadRequest)
+ return
+ }
+
+ // Get document from database.
+ model := models.Document{
+ GoogleFileID: docID,
+ }
+ if err := model.Get(srv.DB); err != nil {
+ srv.Logger.Error("error getting document draft from database",
+ "error", err,
+ "path", r.URL.Path,
+ "method", r.Method,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error requesting document draft",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Get reviews for the document.
+ var reviews models.DocumentReviews
+ if err := reviews.Find(srv.DB, models.DocumentReview{
+ Document: models.Document{
+ GoogleFileID: docID,
+ },
+ }); err != nil {
+ srv.Logger.Error("error getting reviews for document",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ return
+ }
+
+ // Convert database model to a document.
+ doc, err := document.NewFromDatabaseModel(
+ model, reviews)
+ if err != nil {
+ srv.Logger.Error("error converting database model to document type",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error accessing draft document",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Pass request off to associated subcollection (part of the URL after the
+ // document ID) handler, if appropriate.
+ switch reqType {
+ case relatedResourcesDocumentSubcollectionRequestType:
+ documentsResourceRelatedResourcesHandler(
+ w, r, docID, *doc, srv.Config, srv.Logger, srv.AlgoSearch, srv.DB)
+ return
+ case shareableDocumentSubcollectionRequestType:
+ srv.Logger.Warn("invalid shareable request for documents collection",
+ "error", err,
+ "path", r.URL.Path,
+ "method", r.Method,
+ )
+ http.Error(w, "Bad request", http.StatusBadRequest)
+ return
+ }
+
+ switch r.Method {
+ case "GET":
+ now := time.Now()
+
+ // Get file from Google Drive so we can return the latest modified time.
+ file, err := srv.GWService.GetFile(docID)
+ if err != nil {
+ srv.Logger.Error("error getting document file from Google",
+ "error", err,
+ "path", r.URL.Path,
+ "method", r.Method,
+ "doc_id", docID,
+ )
+ http.Error(w,
+ "Error requesting document", http.StatusInternalServerError)
+ return
+ }
+
+ // Parse and set modified time.
+ modifiedTime, err := time.Parse(time.RFC3339Nano, file.ModifiedTime)
+ if err != nil {
+ srv.Logger.Error("error parsing modified time",
+ "error", err,
+ "path", r.URL.Path,
+ "method", r.Method,
+ "doc_id", docID,
+ )
+ http.Error(w,
+ "Error requesting document", http.StatusInternalServerError)
+ return
+ }
+ doc.ModifiedTime = modifiedTime.Unix()
+
+ // Get owner photo by searching Google Workspace directory.
+ if len(doc.Owners) > 0 {
+ people, err := srv.GWService.SearchPeople(doc.Owners[0], "photos")
+ if err != nil {
+ srv.Logger.Error("error searching directory for person",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "person", doc.Owners[0],
+ )
+ }
+ if len(people) > 0 {
+ if len(people[0].Photos) > 0 {
+ doc.OwnerPhotos = []string{people[0].Photos[0].Url}
+ }
+ }
+ }
+
+ // Convert document to Algolia object because this is how it is expected
+ // by the frontend.
+ docObj, err := doc.ToAlgoliaObject(false)
+ if err != nil {
+ srv.Logger.Error("error converting document to Algolia object",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error getting document",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Write response.
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(http.StatusOK)
+
+ enc := json.NewEncoder(w)
+ err = enc.Encode(docObj)
+ if err != nil {
+ srv.Logger.Error("error encoding document",
+ "error", err,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error requesting document",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Update recently viewed documents if this is a document view event. The
+ // Add-To-Recently-Viewed header is set in the request from the frontend
+ // to differentiate between document views and requests to only retrieve
+ // document metadata.
+ if r.Header.Get("Add-To-Recently-Viewed") != "" {
+ // Get authenticated user's email address.
+ email := r.Context().Value("userEmail").(string)
+
+ if err := updateRecentlyViewedDocs(
+ email, docID, srv.DB, now,
+ ); err != nil {
+ // If we get an error, log it but don't return an error response
+ // because this would degrade UX.
+ // TODO: change this log back to an error when this handles incomplete
+ // data in the database.
+ srv.Logger.Warn("error updating recently viewed docs",
+ "error", err,
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path,
+ )
+ return
+ }
+ }
+
+ srv.Logger.Info("retrieved document",
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path,
+ )
+
+ // Request post-processing.
+ go func() {
+ // Compare Algolia and database documents to find data inconsistencies.
+ // Get document object from Algolia.
+ var algoDoc map[string]any
+ err = srv.AlgoSearch.Docs.GetObject(docID, &algoDoc)
+ if err != nil {
+ // Only warn because we might be in the process of saving the Algolia
+ // object for a new document.
+ srv.Logger.Warn("error getting Algolia object for data comparison",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ return
+ }
+ // Get document from database.
+ dbDoc := models.Document{
+ GoogleFileID: docID,
+ }
+ if err := dbDoc.Get(srv.DB); err != nil {
+ srv.Logger.Error(
+ "error getting document from database for data comparison",
+ "error", err,
+ "path", r.URL.Path,
+ "method", r.Method,
+ "doc_id", docID,
+ )
+ return
+ }
+ // Get all reviews for the document.
+ var reviews models.DocumentReviews
+ if err := reviews.Find(srv.DB, models.DocumentReview{
+ Document: models.Document{
+ GoogleFileID: docID,
+ },
+ }); err != nil {
+ srv.Logger.Error(
+ "error getting all reviews for document for data comparison",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ return
+ }
+ if err := compareAlgoliaAndDatabaseDocument(
+ algoDoc, dbDoc, reviews, srv.Config.DocumentTypes.DocumentType,
+ ); err != nil {
+ srv.Logger.Warn(
+ "inconsistencies detected between Algolia and database docs",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ }
+ }()
+
+ case "PATCH":
+ // Authorize request (only the owner can PATCH the doc).
+ userEmail := r.Context().Value("userEmail").(string)
+ if doc.Owners[0] != userEmail {
+ http.Error(w, "Not a document owner", http.StatusUnauthorized)
+ return
+ }
+
+ // Decode request. The request struct validates that the request only
+ // contains fields that are allowed to be patched.
+ var req DocumentPatchRequest
+ if err := decodeRequest(r, &req); err != nil {
+ srv.Logger.Error("error decoding document patch request", "error", err)
+ http.Error(w, fmt.Sprintf("Bad request: %q", err),
+ http.StatusBadRequest)
+ return
+ }
+
+ // Validate custom fields.
+ if req.CustomFields != nil {
+ for _, cf := range *req.CustomFields {
+ cef, ok := doc.CustomEditableFields[cf.Name]
+ if !ok {
+ srv.Logger.Error("custom field not found",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "custom_field", cf.Name,
+ "doc_id", docID)
+ http.Error(w, "Bad request: invalid custom field",
+ http.StatusBadRequest)
+ return
+ }
+ if cf.DisplayName != cef.DisplayName {
+ srv.Logger.Error("invalid custom field display name",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "custom_field", cf.Name,
+ "custom_field_display_name", cf.DisplayName,
+ "doc_id", docID)
+ http.Error(w, "Bad request: invalid custom field display name",
+ http.StatusBadRequest)
+ return
+ }
+ if cf.Type != cef.Type {
+ srv.Logger.Error("invalid custom field type",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "custom_field", cf.Name,
+ "custom_field_type", cf.Type,
+ "doc_id", docID)
+ http.Error(w, "Bad request: invalid custom field type",
+ http.StatusBadRequest)
+ return
+ }
+ }
+ }
+
+ // Check if document is locked.
+ locked, err := hcd.IsLocked(docID, srv.DB, srv.GWService, srv.Logger)
+ if err != nil {
+ srv.Logger.Error("error checking document locked status",
+ "error", err,
+ "path", r.URL.Path,
+ "method", r.Method,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error getting document status", http.StatusNotFound)
+ return
+ }
+ // Don't continue if document is locked.
+ if locked {
+ http.Error(w, "Document is locked", http.StatusLocked)
+ return
+ }
+
+ // Patch document (for Algolia).
+ // Approvers.
+ if req.Approvers != nil {
+ doc.Approvers = *req.Approvers
+ }
+ // Contributors.
+ if req.Contributors != nil {
+ doc.Contributors = *req.Contributors
+ }
+ // Custom fields.
+ if req.CustomFields != nil {
+ for _, cf := range *req.CustomFields {
+ switch cf.Type {
+ case "STRING":
+ if _, ok := cf.Value.(string); ok {
+ if err := doc.UpsertCustomField(cf); err != nil {
+ srv.Logger.Error("error upserting custom string field",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "custom_field", cf.Name,
+ "doc_id", docID,
+ )
+ http.Error(w,
+ "Error patching document",
+ http.StatusInternalServerError)
+ return
+ }
+ }
+ case "PEOPLE":
+ if reflect.TypeOf(cf.Value).Kind() != reflect.Slice {
+ srv.Logger.Error("invalid value type for people custom field",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "custom_field", cf.Name,
+ "doc_id", docID)
+ http.Error(w,
+ fmt.Sprintf(
+ "Bad request: invalid value type for custom field %q",
+ cf.Name,
+ ),
+ http.StatusBadRequest)
+ return
+ }
+ for _, v := range cf.Value.([]any) {
+ if _, ok := v.(string); !ok {
+ srv.Logger.Error("invalid value type for people custom field",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "custom_field", cf.Name,
+ "doc_id", docID)
+ http.Error(w,
+ fmt.Sprintf(
+ "Bad request: invalid value type for custom field %q",
+ cf.Name,
+ ),
+ http.StatusBadRequest)
+ return
+ }
+ }
+ if err := doc.UpsertCustomField(cf); err != nil {
+ srv.Logger.Error("error upserting custom people field",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "custom_field", cf.Name,
+ "doc_id", docID,
+ )
+ http.Error(w,
+ "Error patching document",
+ http.StatusInternalServerError)
+ return
+ }
+ default:
+ srv.Logger.Error("invalid custom field type",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "custom_field", cf.Name,
+ "custom_field_type", cf.Type,
+ "doc_id", docID)
+ http.Error(w,
+ fmt.Sprintf(
+ "Bad request: invalid type for custom field %q",
+ cf.Name,
+ ),
+ http.StatusBadRequest)
+ return
+ }
+ }
+ }
+ // Status.
+ // TODO: validate status.
+ if req.Status != nil {
+ doc.Status = *req.Status
+ }
+ // Summary.
+ if req.Summary != nil {
+ doc.Summary = *req.Summary
+ }
+ // Title.
+ if req.Title != nil {
+ doc.Title = *req.Title
+ }
+
+ // Compare approvers in req and stored object in Algolia
+ // before we save the patched objected
+ var approversToEmail []string
+ if len(doc.Approvers) == 0 && req.Approvers != nil &&
+ len(*req.Approvers) != 0 {
+ // If there are no approvers of the document
+ // email the approvers in the request
+ approversToEmail = *req.Approvers
+ } else if req.Approvers != nil && len(*req.Approvers) != 0 {
+ // Only compare when there are stored approvers
+ // and approvers in the request
+ approversToEmail = compareSlices(doc.Approvers, *req.Approvers)
+ }
+
+ // Send emails to new approvers.
+ if srv.Config.Email != nil && srv.Config.Email.Enabled {
+ if len(approversToEmail) > 0 {
+ // TODO: use a template for email content.
+ rawBody := `
+
+
+Hi!
+
+Your review has been requested for a new document, [%s] %s.
+
+
+Cheers,
+Hermes
+
+
+`
+
+ docURL, err := getDocumentURL(srv.Config.BaseURL, docID)
+ if err != nil {
+ srv.Logger.Error("error getting document URL",
+ "error", err,
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path,
+ )
+ http.Error(w, "Error patching review",
+ http.StatusInternalServerError)
+ return
+ }
+ body := fmt.Sprintf(rawBody, docURL, doc.DocNumber, doc.Title)
+
+ // TODO: use an asynchronous method for sending emails because we
+ // can't currently recover gracefully on a failure here.
+ for _, approverEmail := range approversToEmail {
+ _, err = srv.GWService.SendEmail(
+ []string{approverEmail},
+ srv.Config.Email.FromAddress,
+ fmt.Sprintf("Document review requested for %s", doc.DocNumber),
+ body,
+ )
+ if err != nil {
+ srv.Logger.Error("error sending email",
+ "error", err,
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path,
+ )
+ http.Error(w, "Error patching review",
+ http.StatusInternalServerError)
+ return
+ }
+ }
+ srv.Logger.Info("approver emails sent",
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path,
+ )
+ }
+ }
+
+ // Replace the doc header.
+ if err := doc.ReplaceHeader(
+ srv.Config.BaseURL, false, srv.GWService,
+ ); err != nil {
+ srv.Logger.Error("error replacing document header",
+ "error", err, "doc_id", docID)
+ http.Error(w, "Error patching document",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Rename file with new title.
+ srv.GWService.RenameFile(docID,
+ fmt.Sprintf("[%s] %s", doc.DocNumber, doc.Title))
+
+ // Get document record from database so we can modify it for updating.
+ model := models.Document{
+ GoogleFileID: docID,
+ }
+ if err := model.Get(srv.DB); err != nil {
+ srv.Logger.Error("error getting document from database",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error patching document",
+ http.StatusInternalServerError)
+ return
+ } else {
+ // Approvers.
+ if req.Approvers != nil {
+ var approvers []*models.User
+ for _, a := range doc.Approvers {
+ u := models.User{
+ EmailAddress: a,
+ }
+ approvers = append(approvers, &u)
+ }
+ model.Approvers = approvers
+ }
+
+ // Contributors.
+ if req.Contributors != nil {
+ var contributors []*models.User
+ for _, a := range doc.Contributors {
+ u := &models.User{
+ EmailAddress: a,
+ }
+ contributors = append(contributors, u)
+ }
+ model.Contributors = contributors
+ }
+
+ // Custom fields.
+ if req.CustomFields != nil {
+ for _, cf := range *req.CustomFields {
+ switch cf.Type {
+ case "STRING":
+ if v, ok := cf.Value.(string); ok {
+ model.CustomFields = models.UpsertStringDocumentCustomField(
+ model.CustomFields,
+ doc.DocType,
+ cf.DisplayName,
+ v,
+ )
+ } else {
+ srv.Logger.Error("invalid value type for string custom field",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "custom_field", cf.Name,
+ "doc_id", docID)
+ http.Error(w, "Error patching document",
+ http.StatusInternalServerError)
+ return
+ }
+ case "PEOPLE":
+ if reflect.TypeOf(cf.Value).Kind() != reflect.Slice {
+ srv.Logger.Error("invalid value type for people custom field",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "custom_field", cf.Name,
+ "doc_id", docID)
+ http.Error(w, "Error patching document",
+ http.StatusInternalServerError)
+ return
+ }
+ cfVal := []string{}
+ for _, v := range cf.Value.([]any) {
+ if v, ok := v.(string); ok {
+ cfVal = append(cfVal, v)
+ } else {
+ srv.Logger.Error("invalid value type for people custom field",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "custom_field", cf.Name,
+ "doc_id", docID)
+ http.Error(w, "Error patching document",
+ http.StatusInternalServerError)
+ return
+ }
+ }
+
+ model.CustomFields, err = models.
+ UpsertStringSliceDocumentCustomField(
+ model.CustomFields,
+ doc.DocType,
+ cf.DisplayName,
+ cfVal,
+ )
+ if err != nil {
+ srv.Logger.Error("invalid value type for people custom field",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "custom_field", cf.Name,
+ "doc_id", docID)
+ http.Error(w, "Error patching document",
+ http.StatusInternalServerError)
+ return
+ }
+ default:
+ srv.Logger.Error("invalid custom field type",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "custom_field", cf.Name,
+ "custom_field_type", cf.Type,
+ "doc_id", docID)
+ http.Error(w,
+ fmt.Sprintf(
+ "Bad request: invalid type for custom field %q",
+ cf.Name,
+ ),
+ http.StatusBadRequest)
+ return
+ }
+ }
+ }
+ // Make sure all custom fields have the document ID.
+ for _, cf := range model.CustomFields {
+ cf.DocumentID = model.ID
+ }
+
+ // Document modified time.
+ model.DocumentModifiedAt = time.Unix(doc.ModifiedTime, 0)
+
+ // Summary.
+ if req.Summary != nil {
+ // model.Summary = *req.Summary
+ model.Summary = req.Summary
+ }
+
+ // Title.
+ if req.Title != nil {
+ model.Title = *req.Title
+ }
+
+ // Update document in the database.
+ if err := model.Upsert(srv.DB); err != nil {
+ srv.Logger.Error("error updating document",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error patching document",
+ http.StatusInternalServerError)
+ return
+ }
+ }
+
+ w.WriteHeader(http.StatusOK)
+ srv.Logger.Info("patched document",
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path,
+ )
+
+ // Request post-processing.
+ go func() {
+ // Convert document to Algolia object.
+ docObj, err := doc.ToAlgoliaObject(true)
+ if err != nil {
+ srv.Logger.Error("error converting document to Algolia object",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ return
+ }
+
+ // Save new modified doc object in Algolia.
+ res, err := srv.AlgoWrite.Docs.SaveObject(docObj)
+ if err != nil {
+ srv.Logger.Error("error saving patched document in Algolia",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID)
+ return
+ }
+ err = res.Wait()
+ if err != nil {
+ srv.Logger.Error("error saving patched document in Algolia",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID)
+ return
+ }
+
+ // Compare Algolia and database documents to find data inconsistencies.
+ // Get document object from Algolia.
+ var algoDoc map[string]any
+ err = srv.AlgoSearch.Docs.GetObject(docID, &algoDoc)
+ if err != nil {
+ srv.Logger.Error("error getting Algolia object for data comparison",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ return
+ }
+ // Get document from database.
+ dbDoc := models.Document{
+ GoogleFileID: docID,
+ }
+ if err := dbDoc.Get(srv.DB); err != nil {
+ srv.Logger.Error(
+ "error getting document from database for data comparison",
+ "error", err,
+ "path", r.URL.Path,
+ "method", r.Method,
+ "doc_id", docID,
+ )
+ return
+ }
+ // Get all reviews for the document.
+ var reviews models.DocumentReviews
+ if err := reviews.Find(srv.DB, models.DocumentReview{
+ Document: models.Document{
+ GoogleFileID: docID,
+ },
+ }); err != nil {
+ srv.Logger.Error(
+ "error getting all reviews for document for data comparison",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ return
+ }
+ if err := compareAlgoliaAndDatabaseDocument(
+ algoDoc, dbDoc, reviews, srv.Config.DocumentTypes.DocumentType,
+ ); err != nil {
+ srv.Logger.Warn(
+ "inconsistencies detected between Algolia and database docs",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ }
+ }()
+
+ default:
+ w.WriteHeader(http.StatusMethodNotAllowed)
+ return
+ }
+ })
+}
+
+// updateRecentlyViewedDocs updates the recently viewed docs for a user with the
+// provided email address, using the document file ID and viewed at time for a
+// document view event.
+func updateRecentlyViewedDocs(
+ email, docID string, db *gorm.DB, viewedAt time.Time) error {
+ // Get user (if exists).
+ u := models.User{
+ EmailAddress: email,
+ }
+ if err := u.Get(db); err != nil && !errors.Is(
+ err, gorm.ErrRecordNotFound) {
+ return fmt.Errorf("error getting user in database: %w", err)
+ }
+
+ // Get viewed document in database.
+ doc := models.Document{
+ GoogleFileID: docID,
+ }
+ if err := doc.Get(db); err != nil {
+ return fmt.Errorf("error getting viewed document: %w", err)
+ }
+
+ // Find recently viewed documents.
+ var rvd []models.RecentlyViewedDoc
+ if err := db.Where(&models.RecentlyViewedDoc{UserID: int(u.ID)}).
+ Order("viewed_at desc").
+ Find(&rvd).Error; err != nil {
+ return fmt.Errorf("error finding recently viewed docs for user: %w", err)
+ }
+
+ // Prepend viewed document to recently viewed documents.
+ rvd = append(
+ []models.RecentlyViewedDoc{{
+ DocumentID: int(doc.ID),
+ UserID: int(u.ID),
+ }},
+ rvd...)
+
+ // Get document records for recently viewed docs.
+ docs := []models.Document{}
+ for _, d := range rvd {
+ dd := models.Document{
+ Model: gorm.Model{
+ ID: uint(d.DocumentID),
+ },
+ }
+ if err := dd.Get(db); err != nil {
+ return fmt.Errorf("error getting document: %w", err)
+ }
+ docs = append(docs, dd)
+ }
+
+ // Trim recently viewed documents to a length of 5.
+ if len(docs) > 5 {
+ docs = docs[:5]
+ }
+
+ // Update user.
+ u.RecentlyViewedDocs = docs
+ if err := u.Upsert(db); err != nil {
+ return fmt.Errorf("error upserting user: %w", err)
+ }
+
+ // Update ViewedAt time for this document.
+ viewedDoc := models.RecentlyViewedDoc{
+ UserID: int(u.ID),
+ DocumentID: int(doc.ID),
+ ViewedAt: viewedAt,
+ }
+ if err := db.Updates(&viewedDoc).Error; err != nil {
+ return fmt.Errorf(
+ "error updating recently viewed document in database: %w", err)
+ }
+
+ return nil
+}
+
+// parseDocumentsURLPath parses the document ID and subcollection request type
+// from a documents/drafts API URL path.
+func parseDocumentsURLPath(path, collection string) (
+ docID string,
+ reqType documentSubcollectionRequestType,
+ err error,
+) {
+ noSubcollectionRE := regexp.MustCompile(
+ fmt.Sprintf(
+ `^\/api\/v2\/%s\/([0-9A-Za-z_\-]+)$`,
+ collection))
+ relatedResourcesSubcollectionRE := regexp.MustCompile(
+ fmt.Sprintf(
+ `^\/api\/v2\/%s\/([0-9A-Za-z_\-]+)\/related-resources$`,
+ collection))
+ // shareable isn't really a subcollection, but we'll go with it.
+ shareableRE := regexp.MustCompile(
+ fmt.Sprintf(
+ `^\/api\/v2\/%s\/([0-9A-Za-z_\-]+)\/shareable$`,
+ collection))
+
+ switch {
+ case noSubcollectionRE.MatchString(path):
+ matches := noSubcollectionRE.FindStringSubmatch(path)
+ if len(matches) != 2 {
+ return "", unspecifiedDocumentSubcollectionRequestType, fmt.Errorf(
+ "wrong number of string submatches for resource URL path")
+ }
+ return matches[1], noSubcollectionRequestType, nil
+
+ case relatedResourcesSubcollectionRE.MatchString(path):
+ matches := relatedResourcesSubcollectionRE.
+ FindStringSubmatch(path)
+ if len(matches) != 2 {
+ return "",
+ relatedResourcesDocumentSubcollectionRequestType,
+ fmt.Errorf(
+ "wrong number of string submatches for related resources subcollection URL path")
+ }
+ return matches[1], relatedResourcesDocumentSubcollectionRequestType, nil
+
+ case shareableRE.MatchString(path):
+ matches := shareableRE.
+ FindStringSubmatch(path)
+ if len(matches) != 2 {
+ return "",
+ shareableDocumentSubcollectionRequestType,
+ fmt.Errorf(
+ "wrong number of string submatches for shareable subcollection URL path")
+ }
+ return matches[1], shareableDocumentSubcollectionRequestType, nil
+
+ default:
+ return "",
+ unspecifiedDocumentSubcollectionRequestType,
+ fmt.Errorf("path did not match any URL strings")
+ }
+}
diff --git a/internal/api/v2/documents_related_resources.go b/internal/api/v2/documents_related_resources.go
new file mode 100644
index 000000000..30a63d86b
--- /dev/null
+++ b/internal/api/v2/documents_related_resources.go
@@ -0,0 +1,253 @@
+package api
+
+import (
+ "encoding/json"
+ "net/http"
+
+ "github.com/hashicorp-forge/hermes/internal/config"
+ "github.com/hashicorp-forge/hermes/pkg/algolia"
+ "github.com/hashicorp-forge/hermes/pkg/document"
+ "github.com/hashicorp-forge/hermes/pkg/models"
+ "github.com/hashicorp/go-hclog"
+ "gorm.io/gorm"
+)
+
+type relatedResourcesPutRequest struct {
+ ExternalLinks []externalLinkRelatedResourcePutRequest `json:"externalLinks,omitempty"`
+ HermesDocuments []hermesDocumentRelatedResourcePutRequest `json:"hermesDocuments,omitempty"`
+}
+
+type externalLinkRelatedResourcePutRequest struct {
+ Name string `json:"name"`
+ URL string `json:"url"`
+ SortOrder int `json:"sortOrder"`
+}
+
+type hermesDocumentRelatedResourcePutRequest struct {
+ GoogleFileID string `json:"googleFileID"`
+ SortOrder int `json:"sortOrder"`
+}
+
+type relatedResourcesGetResponse struct {
+ ExternalLinks []externalLinkRelatedResourceGetResponse `json:"externalLinks,omitempty"`
+ HermesDocuments []hermesDocumentRelatedResourceGetResponse `json:"hermesDocuments,omitempty"`
+}
+
+type externalLinkRelatedResourceGetResponse struct {
+ Name string `json:"name"`
+ URL string `json:"url"`
+ SortOrder int `json:"sortOrder"`
+}
+
+type hermesDocumentRelatedResourceGetResponse struct {
+ GoogleFileID string `json:"googleFileID"`
+ Title string `json:"title"`
+ DocumentType string `json:"documentType"`
+ DocumentNumber string `json:"documentNumber"`
+ SortOrder int `json:"sortOrder"`
+}
+
+func documentsResourceRelatedResourcesHandler(
+ w http.ResponseWriter,
+ r *http.Request,
+ docID string,
+ doc document.Document,
+ cfg *config.Config,
+ l hclog.Logger,
+ algoRead *algolia.Client,
+ db *gorm.DB,
+) {
+ switch r.Method {
+ case "GET":
+ d := models.Document{
+ GoogleFileID: docID,
+ }
+ if err := d.Get(db); err != nil {
+ l.Error("error getting document from database",
+ "error", err,
+ "path", r.URL.Path,
+ "method", r.Method,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error accessing document resources",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Get typed related resources.
+ elrrs, hdrrs, err := d.GetRelatedResources(db)
+ if err != nil {
+ l.Error("error getting related resources",
+ "error", err,
+ "path", r.URL.Path,
+ "method", r.Method,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error accessing document",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Build response.
+ resp := relatedResourcesGetResponse{
+ ExternalLinks: []externalLinkRelatedResourceGetResponse{},
+ HermesDocuments: []hermesDocumentRelatedResourceGetResponse{},
+ }
+ // Add external link related resources.
+ for _, elrr := range elrrs {
+ if err := elrr.Get(db); err != nil {
+ l.Error("error getting external link related resource from database",
+ "error", err,
+ "path", r.URL.Path,
+ "method", r.Method,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error accessing document",
+ http.StatusInternalServerError)
+ return
+ }
+
+ resp.ExternalLinks = append(resp.ExternalLinks,
+ externalLinkRelatedResourceGetResponse{
+ Name: elrr.Name,
+ URL: elrr.URL,
+ SortOrder: elrr.RelatedResource.SortOrder,
+ })
+ }
+ // Add Hermes document related resources.
+ for _, hdrr := range hdrrs {
+ // Get document object from Algolia.
+ var algoObj map[string]any
+ err = algoRead.Docs.GetObject(hdrr.Document.GoogleFileID, &algoObj)
+ if err != nil {
+ l.Error("error getting related resource document from Algolia",
+ "error", err,
+ "path", r.URL.Path,
+ "method", r.Method,
+ "doc_id", docID,
+ "target_doc_id", hdrr.Document.GoogleFileID,
+ )
+ http.Error(w, "Error accessing document",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Convert Algolia object to a document.
+ doc, err := document.NewFromAlgoliaObject(
+ algoObj, cfg.DocumentTypes.DocumentType)
+ if err != nil {
+ l.Error("error converting Algolia object to document type",
+ "error", err,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error accessing draft document",
+ http.StatusInternalServerError)
+ return
+ }
+
+ resp.HermesDocuments = append(
+ resp.HermesDocuments,
+ hermesDocumentRelatedResourceGetResponse{
+ GoogleFileID: hdrr.Document.GoogleFileID,
+ Title: doc.Title,
+ DocumentType: doc.DocType,
+ DocumentNumber: doc.DocNumber,
+ SortOrder: hdrr.RelatedResource.SortOrder,
+ })
+ }
+
+ // Write response.
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(http.StatusOK)
+ enc := json.NewEncoder(w)
+ err = enc.Encode(resp)
+ if err != nil {
+ l.Error("error encoding response",
+ "error", err,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error accessing document", http.StatusInternalServerError)
+ return
+ }
+
+ case "POST":
+ fallthrough
+ case "PUT":
+ // Authorize request (only the document owner can replace related
+ // resources).
+ userEmail := r.Context().Value("userEmail").(string)
+ if doc.Owners[0] != userEmail {
+ http.Error(w, "Not a document owner", http.StatusUnauthorized)
+ return
+ }
+
+ // Decode request.
+ var req relatedResourcesPutRequest
+ if err := decodeRequest(r, &req); err != nil {
+ l.Error("error decoding request",
+ "error", err,
+ "path", r.URL.Path,
+ "method", r.Method,
+ "doc_id", docID,
+ )
+ http.Error(w, "Bad request", http.StatusBadRequest)
+ return
+ }
+
+ // Build external link related resources for database model.
+ elrrs := []models.DocumentRelatedResourceExternalLink{}
+ for _, elrr := range req.ExternalLinks {
+ elrrs = append(elrrs, models.DocumentRelatedResourceExternalLink{
+ RelatedResource: models.DocumentRelatedResource{
+ Document: models.Document{
+ GoogleFileID: docID,
+ },
+ SortOrder: elrr.SortOrder,
+ },
+ Name: elrr.Name,
+ URL: elrr.URL,
+ })
+ }
+
+ // Build Hermes document related resources for database model.
+ hdrrs := []models.DocumentRelatedResourceHermesDocument{}
+ for _, hdrr := range req.HermesDocuments {
+ hdrrs = append(hdrrs, models.DocumentRelatedResourceHermesDocument{
+ RelatedResource: models.DocumentRelatedResource{
+ Document: models.Document{
+ GoogleFileID: docID,
+ },
+ SortOrder: hdrr.SortOrder,
+ },
+ Document: models.Document{
+ GoogleFileID: hdrr.GoogleFileID,
+ },
+ })
+ }
+
+ // Replace related resources for document.
+ doc := models.Document{
+ GoogleFileID: docID,
+ }
+ if err := doc.ReplaceRelatedResources(db, elrrs, hdrrs); err != nil {
+ l.Error("error replacing related resources for document",
+ "error", err,
+ "path", r.URL.Path,
+ "method", r.Method,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error accessing document", http.StatusInternalServerError)
+ return
+ }
+
+ l.Info("replaced related resources for document",
+ "path", r.URL.Path,
+ "method", r.Method,
+ "doc_id", docID,
+ )
+
+ default:
+ w.WriteHeader(http.StatusMethodNotAllowed)
+ return
+ }
+}
diff --git a/internal/api/v2/documents_test.go b/internal/api/v2/documents_test.go
new file mode 100644
index 000000000..62cc85d9d
--- /dev/null
+++ b/internal/api/v2/documents_test.go
@@ -0,0 +1,62 @@
+package api
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestParseDocumentIDFromURLPath(t *testing.T) {
+ cases := map[string]struct {
+ path string
+ collection string
+ wantReqType documentSubcollectionRequestType
+ wantDocID string
+ shouldErr bool
+ }{
+ "good documents collection URL with related resources": {
+ path: "/api/v2/documents/doc123/related-resources",
+ collection: "documents",
+ wantReqType: relatedResourcesDocumentSubcollectionRequestType,
+ wantDocID: "doc123",
+ },
+ "good drafts collection URL with related resources": {
+ path: "/api/v2/drafts/doc123/related-resources",
+ collection: "drafts",
+ wantReqType: relatedResourcesDocumentSubcollectionRequestType,
+ wantDocID: "doc123",
+ },
+ "good drafts collection URL with shareable": {
+ path: "/api/v2/drafts/doc123/shareable",
+ collection: "drafts",
+ wantReqType: shareableDocumentSubcollectionRequestType,
+ wantDocID: "doc123",
+ },
+ "extra frontslash after related-resources": {
+ path: "/api/v2/documents/doc123/related-resources/",
+ collection: "documents",
+ wantReqType: relatedResourcesDocumentSubcollectionRequestType,
+ shouldErr: true,
+ },
+ "no document resource ID": {
+ path: "/api/v2/documents/",
+ collection: "documents",
+ shouldErr: true,
+ },
+ }
+
+ for name, c := range cases {
+ t.Run(name, func(t *testing.T) {
+ assert := assert.New(t)
+ docID, reqType, err := parseDocumentsURLPath(c.path, c.collection)
+
+ if c.shouldErr {
+ assert.Error(err)
+ } else {
+ assert.NoError(err)
+ assert.Equal(c.wantDocID, docID)
+ assert.Equal(c.wantReqType, reqType)
+ }
+ })
+ }
+}
diff --git a/internal/api/v2/drafts.go b/internal/api/v2/drafts.go
new file mode 100644
index 000000000..7d255b76e
--- /dev/null
+++ b/internal/api/v2/drafts.go
@@ -0,0 +1,1434 @@
+package api
+
+import (
+ "encoding/json"
+ "fmt"
+ "net/http"
+ "reflect"
+ "strconv"
+ "strings"
+ "time"
+
+ "github.com/algolia/algoliasearch-client-go/v3/algolia/opt"
+ "github.com/algolia/algoliasearch-client-go/v3/algolia/search"
+ "github.com/hashicorp-forge/hermes/internal/config"
+ "github.com/hashicorp-forge/hermes/internal/server"
+ "github.com/hashicorp-forge/hermes/pkg/document"
+ gw "github.com/hashicorp-forge/hermes/pkg/googleworkspace"
+ hcd "github.com/hashicorp-forge/hermes/pkg/hashicorpdocs"
+ "github.com/hashicorp-forge/hermes/pkg/models"
+)
+
+type DraftsRequest struct {
+ Approvers []string `json:"approvers,omitempty"`
+ Contributors []string `json:"contributors,omitempty"`
+ DocType string `json:"docType,omitempty"`
+ Product string `json:"product,omitempty"`
+ ProductAbbreviation string `json:"productAbbreviation,omitempty"`
+ Summary string `json:"summary,omitempty"`
+ Tags []string `json:"tags,omitempty"`
+ Title string `json:"title"`
+}
+
+// DraftsPatchRequest contains a subset of drafts fields that are allowed to
+// be updated with a PATCH request.
+type DraftsPatchRequest struct {
+ Approvers *[]string `json:"approvers,omitempty"`
+ Contributors *[]string `json:"contributors,omitempty"`
+ CustomFields *[]document.CustomField `json:"customFields,omitempty"`
+ Product *string `json:"product,omitempty"`
+ Summary *string `json:"summary,omitempty"`
+ // Tags []string `json:"tags,omitempty"`
+ Title *string `json:"title,omitempty"`
+}
+
+type DraftsResponse struct {
+ ID string `json:"id"`
+}
+
+func DraftsHandler(srv server.Server) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ errResp := func(httpCode int, userErrMsg, logErrMsg string, err error) {
+ srv.Logger.Error(logErrMsg,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "error", err,
+ )
+ http.Error(w, userErrMsg, httpCode)
+ }
+
+ // Authorize request.
+ userEmail := r.Context().Value("userEmail").(string)
+ if userEmail == "" {
+ errResp(
+ http.StatusUnauthorized,
+ "No authorization information for request",
+ "no user email found in request context",
+ nil,
+ )
+ return
+ }
+
+ switch r.Method {
+ case "POST":
+ // Decode request.
+ var req DraftsRequest
+ if err := decodeRequest(r, &req); err != nil {
+ srv.Logger.Error("error decoding drafts request", "error", err)
+ http.Error(w, fmt.Sprintf("Bad request: %q", err),
+ http.StatusBadRequest)
+ return
+ }
+
+ // Validate document type.
+ if !validateDocType(srv.Config.DocumentTypes.DocumentType, req.DocType) {
+ srv.Logger.Error("invalid document type",
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_type", req.DocType,
+ )
+ http.Error(
+ w, "Bad request: invalid document type", http.StatusBadRequest)
+ return
+ }
+
+ if req.Title == "" {
+ http.Error(w, "Bad request: title is required", http.StatusBadRequest)
+ return
+ }
+
+ // Get doc type template.
+ template := getDocTypeTemplate(
+ srv.Config.DocumentTypes.DocumentType, req.DocType)
+ if template == "" {
+ srv.Logger.Error("Bad request: no template configured for doc type",
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_type", req.DocType,
+ )
+ http.Error(w,
+ "Bad request: no template configured for doc type",
+ http.StatusBadRequest)
+ return
+ }
+
+ // Build title.
+ if req.ProductAbbreviation == "" {
+ req.ProductAbbreviation = "TODO"
+ }
+ title := fmt.Sprintf("[%s-???] %s", req.ProductAbbreviation, req.Title)
+
+ // Copy template to new draft file.
+ f, err := srv.GWService.CopyFile(
+ template, title, srv.Config.GoogleWorkspace.DraftsFolder)
+ if err != nil {
+ srv.Logger.Error("error creating draft",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "template", template,
+ "drafts_folder", srv.Config.GoogleWorkspace.DraftsFolder,
+ )
+ http.Error(w, "Error creating document draft",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Build created date.
+ ct, err := time.Parse(time.RFC3339Nano, f.CreatedTime)
+ if err != nil {
+ srv.Logger.Error("error parsing draft created time",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", f.Id,
+ )
+ http.Error(w, "Error creating document draft",
+ http.StatusInternalServerError)
+ return
+ }
+ cd := ct.Format("Jan 2, 2006")
+
+ // Get owner photo by searching Google Workspace directory.
+ op := []string{}
+ people, err := srv.GWService.SearchPeople(userEmail, "photos")
+ if err != nil {
+ srv.Logger.Error(
+ "error searching directory for person",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "person", userEmail,
+ )
+ }
+ if len(people) > 0 {
+ if len(people[0].Photos) > 0 {
+ op = append(op, people[0].Photos[0].Url)
+ }
+ }
+
+ // Create tag
+ // Note: The o_id tag may be empty for environments such as development.
+ // For environments like pre-prod and prod, it will be set as
+ // Okta authentication is enforced before this handler is called for
+ // those environments. Maybe, if id isn't set we use
+ // owner emails in the future?
+ id := r.Header.Get("x-amzn-oidc-identity")
+ metaTags := []string{
+ "o_id:" + id,
+ }
+
+ // Build document.
+ doc := &document.Document{
+ ObjectID: f.Id,
+ Title: req.Title,
+ AppCreated: true,
+ Contributors: req.Contributors,
+ Created: cd,
+ CreatedTime: ct.Unix(),
+ DocNumber: fmt.Sprintf("%s-???", req.ProductAbbreviation),
+ DocType: req.DocType,
+ MetaTags: metaTags,
+ ModifiedTime: ct.Unix(),
+ Owners: []string{userEmail},
+ OwnerPhotos: op,
+ Product: req.Product,
+ Status: "WIP",
+ Summary: req.Summary,
+ // Tags: req.Tags,
+ }
+
+ // Replace the doc header.
+ if err = doc.ReplaceHeader(
+ srv.Config.BaseURL, true, srv.GWService,
+ ); err != nil {
+ srv.Logger.Error("error replacing draft doc header",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", f.Id,
+ )
+ http.Error(w, "Error creating document draft",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Create document in the database.
+ var approvers []*models.User
+ for _, c := range req.Approvers {
+ approvers = append(approvers, &models.User{
+ EmailAddress: c,
+ })
+ }
+ var contributors []*models.User
+ for _, c := range req.Contributors {
+ contributors = append(contributors, &models.User{
+ EmailAddress: c,
+ })
+ }
+ createdTime, err := time.Parse(time.RFC3339Nano, f.CreatedTime)
+ if err != nil {
+ srv.Logger.Error("error parsing document created time",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", f.Id,
+ )
+ http.Error(w, "Error creating document draft",
+ http.StatusInternalServerError)
+ return
+ }
+ model := models.Document{
+ GoogleFileID: f.Id,
+ Approvers: approvers,
+ Contributors: contributors,
+ DocumentCreatedAt: createdTime,
+ DocumentModifiedAt: createdTime,
+ DocumentType: models.DocumentType{
+ Name: req.DocType,
+ },
+ Owner: &models.User{
+ EmailAddress: userEmail,
+ },
+ Product: models.Product{
+ Name: req.Product,
+ },
+ Status: models.WIPDocumentStatus,
+ Summary: &req.Summary,
+ Title: req.Title,
+ }
+ if err := model.Create(srv.DB); err != nil {
+ srv.Logger.Error("error creating document in database",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", f.Id,
+ )
+ http.Error(w, "Error creating document draft",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Share file with the owner
+ if err := srv.GWService.ShareFile(f.Id, userEmail, "writer"); err != nil {
+ srv.Logger.Error("error sharing file with the owner",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", f.Id,
+ )
+ http.Error(w, "Error creating document draft",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Share file with contributors.
+ // Google Drive API limitation is that you can only share files with one
+ // user at a time.
+ for _, c := range req.Contributors {
+ if err := srv.GWService.ShareFile(f.Id, c, "writer"); err != nil {
+ srv.Logger.Error("error sharing file with the contributor",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", f.Id,
+ "contributor", c,
+ )
+ http.Error(w, "Error creating document draft",
+ http.StatusInternalServerError)
+ return
+ }
+ }
+
+ // TODO: Delete draft file in the case of an error.
+
+ // Write response.
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(http.StatusOK)
+
+ resp := &DraftsResponse{
+ ID: f.Id,
+ }
+
+ enc := json.NewEncoder(w)
+ err = enc.Encode(resp)
+ if err != nil {
+ srv.Logger.Error("error encoding drafts response",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", f.Id,
+ )
+ http.Error(w, "Error creating document draft",
+ http.StatusInternalServerError)
+ return
+ }
+
+ srv.Logger.Info("created draft",
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", f.Id,
+ )
+
+ // Request post-processing.
+ go func() {
+ // Save document object in Algolia.
+ res, err := srv.AlgoWrite.Drafts.SaveObject(doc)
+ if err != nil {
+ srv.Logger.Error("error saving draft doc in Algolia",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", f.Id,
+ )
+ http.Error(w, "Error creating document draft",
+ http.StatusInternalServerError)
+ return
+ }
+ err = res.Wait()
+ if err != nil {
+ srv.Logger.Error("error saving draft doc in Algolia",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", f.Id,
+ )
+ http.Error(w, "Error creating document draft",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Compare Algolia and database documents to find data inconsistencies.
+ // Get document object from Algolia.
+ var algoDoc map[string]any
+ err = srv.AlgoSearch.Drafts.GetObject(f.Id, &algoDoc)
+ if err != nil {
+ srv.Logger.Error("error getting Algolia object for data comparison",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", f.Id,
+ )
+ return
+ }
+ // Get document from database.
+ dbDoc := models.Document{
+ GoogleFileID: f.Id,
+ }
+ if err := dbDoc.Get(srv.DB); err != nil {
+ srv.Logger.Error(
+ "error getting document from database for data comparison",
+ "error", err,
+ "path", r.URL.Path,
+ "method", r.Method,
+ "doc_id", f.Id,
+ )
+ return
+ }
+ // Get all reviews for the document.
+ var reviews models.DocumentReviews
+ if err := reviews.Find(srv.DB, models.DocumentReview{
+ Document: models.Document{
+ GoogleFileID: f.Id,
+ },
+ }); err != nil {
+ srv.Logger.Error(
+ "error getting all reviews for document for data comparison",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", f.Id,
+ )
+ return
+ }
+ if err := compareAlgoliaAndDatabaseDocument(
+ algoDoc, dbDoc, reviews, srv.Config.DocumentTypes.DocumentType,
+ ); err != nil {
+ srv.Logger.Warn(
+ "inconsistencies detected between Algolia and database docs",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", f.Id,
+ )
+ }
+ }()
+
+ case "GET":
+ // Get OIDC ID
+ id := r.Header.Get("x-amzn-oidc-identity")
+
+ // Parse query
+ q := r.URL.Query()
+ facetFiltersStr := q.Get("facetFilters")
+ facetsStr := q.Get("facets")
+ hitsPerPageStr := q.Get("hitsPerPage")
+ maxValuesPerFacetStr := q.Get("maxValuesPerFacet")
+ pageStr := q.Get("page")
+
+ facetFilters := strings.Split(facetFiltersStr, ",")
+ facets := strings.Split(facetsStr, ",")
+ hitsPerPage, err := strconv.Atoi(hitsPerPageStr)
+ if err != nil {
+ srv.Logger.Error("error converting to int",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "hits_per_page", hitsPerPageStr,
+ )
+ http.Error(w, "Error retrieving document drafts",
+ http.StatusInternalServerError)
+ return
+ }
+ maxValuesPerFacet, err := strconv.Atoi(maxValuesPerFacetStr)
+ if err != nil {
+ srv.Logger.Error("error converting to int",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "max_values_per_facet", maxValuesPerFacetStr,
+ )
+ http.Error(w, "Error retrieving document drafts",
+ http.StatusInternalServerError)
+ return
+ }
+ page, err := strconv.Atoi(pageStr)
+ if err != nil {
+ srv.Logger.Error("error converting to int",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "page", pageStr,
+ )
+ http.Error(w, "Error retrieving document drafts",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Build params
+ params := []interface{}{
+ opt.Facets(facets...),
+ // FacetFilters are supplied as follows:
+ // ['attribute1:value', 'attribute2:value'], 'owners:owner_email_value'
+ opt.FacetFilterAnd(
+ facetFilters,
+ opt.FacetFilterOr("owners:"+userEmail, "contributors:"+userEmail),
+ ),
+ opt.HitsPerPage(hitsPerPage),
+ opt.MaxValuesPerFacet(maxValuesPerFacet),
+ opt.Page(page),
+ }
+
+ // Retrieve all documents
+ var resp search.QueryRes
+ sortBy := q.Get("sortBy")
+ if sortBy == "dateAsc" {
+ resp, err = srv.AlgoSearch.DraftsCreatedTimeAsc.Search("", params...)
+ } else {
+ resp, err = srv.AlgoSearch.DraftsCreatedTimeDesc.Search("", params...)
+ }
+ if err != nil {
+ srv.Logger.Error("error retrieving document drafts from Algolia",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ )
+ http.Error(w, "Error retrieving document drafts",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Write response.
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(http.StatusOK)
+
+ enc := json.NewEncoder(w)
+ err = enc.Encode(resp)
+ if err != nil {
+ srv.Logger.Error("error encoding document drafts",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ )
+ http.Error(w, "Error requesting document draft",
+ http.StatusInternalServerError)
+ return
+ }
+
+ srv.Logger.Info("retrieved document drafts",
+ "method", r.Method,
+ "path", r.URL.Path,
+ "o_id", id,
+ )
+
+ default:
+ w.WriteHeader(http.StatusMethodNotAllowed)
+ return
+ }
+ })
+}
+
+func DraftsDocumentHandler(srv server.Server) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ // Parse document ID and request type from the URL path.
+ docID, reqType, err := parseDocumentsURLPath(
+ r.URL.Path, "drafts")
+ if err != nil {
+ srv.Logger.Error("error parsing drafts URL path",
+ "error", err,
+ "path", r.URL.Path,
+ "method", r.Method,
+ )
+ http.Error(w, "Bad request", http.StatusBadRequest)
+ return
+ }
+
+ // Get document from database.
+ model := models.Document{
+ GoogleFileID: docID,
+ }
+ if err := model.Get(srv.DB); err != nil {
+ srv.Logger.Error("error getting document draft from database",
+ "error", err,
+ "path", r.URL.Path,
+ "method", r.Method,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error requesting document draft",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Get reviews for the document.
+ var reviews models.DocumentReviews
+ if err := reviews.Find(srv.DB, models.DocumentReview{
+ Document: models.Document{
+ GoogleFileID: docID,
+ },
+ }); err != nil {
+ srv.Logger.Error("error getting reviews for document",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ return
+ }
+
+ // Convert database model to a document.
+ doc, err := document.NewFromDatabaseModel(
+ model, reviews)
+ if err != nil {
+ srv.Logger.Error("error converting database model to document type",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error accessing draft document",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Authorize request (only allow owners or contributors to get past this
+ // point in the handler). We further authorize some methods later that
+ // require owner access only.
+ userEmail := r.Context().Value("userEmail").(string)
+ var isOwner, isContributor bool
+ if doc.Owners[0] == userEmail {
+ isOwner = true
+ }
+ if contains(doc.Contributors, userEmail) {
+ isContributor = true
+ }
+ if !isOwner && !isContributor && !model.ShareableAsDraft {
+ http.Error(w,
+ "Only owners or contributors can access a non-shared draft document",
+ http.StatusUnauthorized)
+ return
+ }
+
+ // Pass request off to associated subcollection (part of the URL after the
+ // draft document ID) handler, if appropriate.
+ switch reqType {
+ case relatedResourcesDocumentSubcollectionRequestType:
+ documentsResourceRelatedResourcesHandler(
+ w, r, docID, *doc, srv.Config, srv.Logger, srv.AlgoSearch, srv.DB)
+ return
+ case shareableDocumentSubcollectionRequestType:
+ draftsShareableHandler(w, r, docID, *doc, *srv.Config, srv.Logger,
+ srv.AlgoSearch, srv.GWService, srv.DB)
+ return
+ }
+
+ switch r.Method {
+ case "GET":
+ now := time.Now()
+
+ // Get file from Google Drive so we can return the latest modified time.
+ file, err := srv.GWService.GetFile(docID)
+ if err != nil {
+ srv.Logger.Error("error getting document file from Google",
+ "error", err,
+ "path", r.URL.Path,
+ "method", r.Method,
+ "doc_id", docID,
+ )
+ http.Error(w,
+ "Error requesting document draft", http.StatusInternalServerError)
+ return
+ }
+
+ // Parse and set modified time.
+ modifiedTime, err := time.Parse(time.RFC3339Nano, file.ModifiedTime)
+ if err != nil {
+ srv.Logger.Error("error parsing modified time",
+ "error", err,
+ "path", r.URL.Path,
+ "method", r.Method,
+ "doc_id", docID,
+ )
+ http.Error(w,
+ "Error requesting document draft", http.StatusInternalServerError)
+ return
+ }
+ doc.ModifiedTime = modifiedTime.Unix()
+
+ // Get owner photo by searching Google Workspace directory.
+ ppl, err := srv.GWService.SearchPeople(userEmail, "photos")
+ if err != nil {
+ srv.Logger.Error(
+ "error searching directory for owner",
+ "error", err,
+ "path", r.URL.Path,
+ "method", r.Method,
+ "doc_id", docID,
+ )
+ }
+ if len(ppl) > 0 && len(ppl[0].Photos) > 0 {
+ doc.OwnerPhotos = []string{ppl[0].Photos[0].Url}
+ }
+
+ // Convert document to Algolia object because this is how it is expected
+ // by the frontend.
+ docObj, err := doc.ToAlgoliaObject(false)
+ if err != nil {
+ srv.Logger.Error("error converting document to Algolia object",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error getting document draft",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Write response.
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(http.StatusOK)
+
+ enc := json.NewEncoder(w)
+ err = enc.Encode(docObj)
+ if err != nil {
+ srv.Logger.Error("error encoding document draft",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error requesting document draft",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Update recently viewed documents if this is a document view event. The
+ // Add-To-Recently-Viewed header is set in the request from the frontend
+ // to differentiate between document views and requests to only retrieve
+ // document metadata.
+ if r.Header.Get("Add-To-Recently-Viewed") != "" {
+ if err := updateRecentlyViewedDocs(
+ userEmail, docID, srv.DB, now,
+ ); err != nil {
+ // If we get an error, log it but don't return an error response
+ // because this would degrade UX.
+ // TODO: change this log back to an error when this handles incomplete
+ // data in the database.
+ srv.Logger.Warn("error updating recently viewed docs",
+ "error", err,
+ "path", r.URL.Path,
+ "method", r.Method,
+ "doc_id", docID,
+ )
+ return
+ }
+ }
+
+ srv.Logger.Info("retrieved document draft",
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+
+ // Request post-processing.
+ go func() {
+ // Compare Algolia and database documents to find data inconsistencies.
+ // Get document object from Algolia.
+ var algoDoc map[string]any
+ err = srv.AlgoSearch.Drafts.GetObject(docID, &algoDoc)
+ if err != nil {
+ // Only warn because we might be in the process of saving the Algolia
+ // object for a new draft.
+ srv.Logger.Warn("error getting Algolia object for data comparison",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ return
+ }
+ // Get document from database.
+ dbDoc := models.Document{
+ GoogleFileID: docID,
+ }
+ if err := dbDoc.Get(srv.DB); err != nil {
+ srv.Logger.Error(
+ "error getting document from database for data comparison",
+ "error", err,
+ "path", r.URL.Path,
+ "method", r.Method,
+ "doc_id", docID,
+ )
+ return
+ }
+ // Get all reviews for the document.
+ var reviews models.DocumentReviews
+ if err := reviews.Find(srv.DB, models.DocumentReview{
+ Document: models.Document{
+ GoogleFileID: docID,
+ },
+ }); err != nil {
+ srv.Logger.Error(
+ "error getting all reviews for document for data comparison",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ return
+ }
+ if err := compareAlgoliaAndDatabaseDocument(
+ algoDoc, dbDoc, reviews, srv.Config.DocumentTypes.DocumentType,
+ ); err != nil {
+ srv.Logger.Warn(
+ "inconsistencies detected between Algolia and database docs",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ }
+ }()
+
+ case "DELETE":
+ // Authorize request.
+ if !isOwner {
+ http.Error(w,
+ "Only owners can delete a draft document",
+ http.StatusUnauthorized)
+ return
+ }
+
+ // Delete document in Google Drive.
+ err = srv.GWService.DeleteFile(docID)
+ if err != nil {
+ srv.Logger.Error(
+ "error deleting document",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error deleting document draft",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Delete object in Algolia.
+ res, err := srv.AlgoWrite.Drafts.DeleteObject(docID)
+ if err != nil {
+ srv.Logger.Error(
+ "error deleting document draft from Algolia",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error deleting document draft",
+ http.StatusInternalServerError)
+ return
+ }
+ err = res.Wait()
+ if err != nil {
+ srv.Logger.Error(
+ "error deleting document draft from Algolia",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error deleting document draft",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Delete document in the database.
+ d := models.Document{
+ GoogleFileID: docID,
+ }
+ if err := d.Delete(srv.DB); err != nil {
+ srv.Logger.Error(
+ "error deleting document draft in database",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error deleting document draft",
+ http.StatusInternalServerError)
+ return
+ }
+
+ resp := &DraftsResponse{
+ ID: docID,
+ }
+
+ // Write response.
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(http.StatusOK)
+
+ enc := json.NewEncoder(w)
+ err = enc.Encode(resp)
+ if err != nil {
+ srv.Logger.Error(
+ "error encoding response",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error deleting document draft",
+ http.StatusInternalServerError)
+ return
+ }
+
+ case "PATCH":
+ // Decode request. The request struct validates that the request only
+ // contains fields that are allowed to be patched.
+ var req DraftsPatchRequest
+ if err := decodeRequest(r, &req); err != nil {
+ srv.Logger.Error("error decoding draft patch request", "error", err)
+ http.Error(w, fmt.Sprintf("Bad request: %q", err),
+ http.StatusBadRequest)
+ return
+ }
+
+ // Validate product if it is in the patch request.
+ var productAbbreviation string
+ if req.Product != nil && *req.Product != "" {
+ p := models.Product{Name: *req.Product}
+ if err := p.Get(srv.DB); err != nil {
+ srv.Logger.Error("error getting product",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "product", req.Product,
+ "doc_id", docID)
+ http.Error(w, "Bad request: invalid product",
+ http.StatusBadRequest)
+ return
+ }
+
+ // Set product abbreviation because we use this later to update the
+ // doc number in the Algolia object.
+ productAbbreviation = p.Abbreviation
+ }
+
+ // Validate custom fields.
+ if req.CustomFields != nil {
+ for _, cf := range *req.CustomFields {
+ cef, ok := doc.CustomEditableFields[cf.Name]
+ if !ok {
+ srv.Logger.Error("custom field not found",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "custom_field", cf.Name,
+ "doc_id", docID)
+ http.Error(w, "Bad request: invalid custom field",
+ http.StatusBadRequest)
+ return
+ }
+ if cf.DisplayName != cef.DisplayName {
+ srv.Logger.Error("invalid custom field display name",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "custom_field", cf.Name,
+ "custom_field_display_name", cf.DisplayName,
+ "doc_id", docID)
+ http.Error(w, "Bad request: invalid custom field display name",
+ http.StatusBadRequest)
+ return
+ }
+ if cf.Type != cef.Type {
+ srv.Logger.Error("invalid custom field type",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "custom_field", cf.Name,
+ "custom_field_type", cf.Type,
+ "doc_id", docID)
+ http.Error(w, "Bad request: invalid custom field type",
+ http.StatusBadRequest)
+ return
+ }
+ }
+ }
+
+ // Check if document is locked.
+ locked, err := hcd.IsLocked(docID, srv.DB, srv.GWService, srv.Logger)
+ if err != nil {
+ srv.Logger.Error("error checking document locked status",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error getting document status", http.StatusNotFound)
+ return
+ }
+ // Don't continue if document is locked.
+ if locked {
+ http.Error(w, "Document is locked", http.StatusLocked)
+ return
+ }
+
+ // Compare contributors in request and stored object in Algolia
+ // before we save the patched objected
+ // Find out contributors to share the document with
+ var contributorsToAddSharing []string
+ var contributorsToRemoveSharing []string
+ if req.Contributors != nil {
+ if len(doc.Contributors) == 0 && len(*req.Contributors) != 0 {
+ // If there are no contributors of the document
+ // add the contributors in the request
+ contributorsToAddSharing = *req.Contributors
+ } else if len(*req.Contributors) != 0 {
+ // Only compare when there are stored contributors
+ // and contributors in the request
+ contributorsToAddSharing = compareSlices(
+ doc.Contributors, *req.Contributors)
+ }
+ // Find out contributors to remove from sharing the document
+ // var contributorsToRemoveSharing []string
+ // TODO: figure out how we want to handle user removing all contributors
+ // from the sidebar select
+ if len(doc.Contributors) != 0 && len(*req.Contributors) != 0 {
+ // Compare contributors when there are stored contributors
+ // and there are contributors in the request
+ contributorsToRemoveSharing = compareSlices(
+ *req.Contributors, doc.Contributors)
+ }
+ }
+
+ // Share file with contributors.
+ // Google Drive API limitation is that you can only share files with one
+ // user at a time.
+ for _, c := range contributorsToAddSharing {
+ if err := srv.GWService.ShareFile(docID, c, "writer"); err != nil {
+ srv.Logger.Error("error sharing file with the contributor",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ "contributor", c)
+ http.Error(w, "Error patching document draft",
+ http.StatusInternalServerError)
+ return
+ }
+ }
+ if len(contributorsToAddSharing) > 0 {
+ srv.Logger.Info("shared document with contributors",
+ "method", r.Method,
+ "path", r.URL.Path,
+ "contributors_count", len(contributorsToAddSharing),
+ )
+ }
+
+ // Remove contributors from file.
+ // This unfortunately needs to be done one user at a time
+ for _, c := range contributorsToRemoveSharing {
+ // Only remove contributor if the email
+ // associated with the permission doesn't
+ // match owner email(s).
+ if !contains(doc.Owners, c) {
+ if err := removeSharing(srv.GWService, docID, c); err != nil {
+ srv.Logger.Error("error removing contributor from file",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ "contributor", c)
+ http.Error(w, "Error patching document draft",
+ http.StatusInternalServerError)
+ return
+ }
+ }
+ }
+ if len(contributorsToRemoveSharing) > 0 {
+ srv.Logger.Info("removed contributors from document",
+ "method", r.Method,
+ "path", r.URL.Path,
+ "contributors_count", len(contributorsToRemoveSharing),
+ )
+ }
+
+ // Approvers.
+ if req.Approvers != nil {
+ doc.Approvers = *req.Approvers
+
+ var approvers []*models.User
+ for _, a := range doc.Approvers {
+ u := models.User{
+ EmailAddress: a,
+ }
+ approvers = append(approvers, &u)
+ }
+ model.Approvers = approvers
+ }
+
+ // Contributors.
+ if req.Contributors != nil {
+ doc.Contributors = *req.Contributors
+
+ var contributors []*models.User
+ for _, a := range doc.Contributors {
+ u := &models.User{
+ EmailAddress: a,
+ }
+ contributors = append(contributors, u)
+ }
+ model.Contributors = contributors
+ }
+
+ // Custom fields.
+ if req.CustomFields != nil {
+ for _, cf := range *req.CustomFields {
+ switch cf.Type {
+ case "STRING":
+ if v, ok := cf.Value.(string); ok {
+ if err := doc.UpsertCustomField(cf); err != nil {
+ srv.Logger.Error("error upserting custom string field",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "custom_field", cf.Name,
+ "doc_id", docID,
+ )
+ http.Error(w,
+ "Error patching document",
+ http.StatusInternalServerError)
+ return
+ }
+
+ model.CustomFields = models.UpsertStringDocumentCustomField(
+ model.CustomFields,
+ doc.DocType,
+ cf.DisplayName,
+ v,
+ )
+ } else {
+ srv.Logger.Error("invalid value type for string custom field",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "custom_field", cf.Name,
+ "doc_id", docID)
+ http.Error(w,
+ fmt.Sprintf(
+ "Bad request: invalid value type for custom field %q",
+ cf.Name,
+ ),
+ http.StatusBadRequest)
+ return
+ }
+ case "PEOPLE":
+ if reflect.TypeOf(cf.Value).Kind() != reflect.Slice {
+ srv.Logger.Error("invalid value type for people custom field",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "custom_field", cf.Name,
+ "doc_id", docID)
+ http.Error(w,
+ fmt.Sprintf(
+ "Bad request: invalid value type for custom field %q",
+ cf.Name,
+ ),
+ http.StatusBadRequest)
+ return
+ }
+ cfVal := []string{}
+ for _, v := range cf.Value.([]any) {
+ if v, ok := v.(string); ok {
+ cfVal = append(cfVal, v)
+ } else {
+ srv.Logger.Error("invalid value type for people custom field",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "custom_field", cf.Name,
+ "doc_id", docID)
+ http.Error(w,
+ fmt.Sprintf(
+ "Bad request: invalid value type for custom field %q",
+ cf.Name,
+ ),
+ http.StatusBadRequest)
+ return
+ }
+ }
+
+ if err := doc.UpsertCustomField(cf); err != nil {
+ srv.Logger.Error("error upserting custom people field",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "custom_field", cf.Name,
+ "doc_id", docID,
+ )
+ http.Error(w,
+ "Error patching document",
+ http.StatusInternalServerError)
+ return
+ }
+
+ model.CustomFields, err = models.
+ UpsertStringSliceDocumentCustomField(
+ model.CustomFields,
+ doc.DocType,
+ cf.DisplayName,
+ cfVal,
+ )
+ if err != nil {
+ srv.Logger.Error("invalid value type for people custom field",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "custom_field", cf.Name,
+ "doc_id", docID)
+ http.Error(w,
+ fmt.Sprintf(
+ "Bad request: invalid value type for custom field %q",
+ cf.Name,
+ ),
+ http.StatusBadRequest)
+ return
+ }
+ default:
+ srv.Logger.Error("invalid custom field type",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "custom_field", cf.Name,
+ "custom_field_type", cf.Type,
+ "doc_id", docID)
+ http.Error(w,
+ fmt.Sprintf(
+ "Bad request: invalid type for custom field %q",
+ cf.Name,
+ ),
+ http.StatusBadRequest)
+ return
+ }
+ }
+ }
+
+ // Make sure all custom fields in the database model have the document ID.
+ for _, cf := range model.CustomFields {
+ cf.DocumentID = model.ID
+ }
+
+ // Document modified time.
+ model.DocumentModifiedAt = time.Unix(doc.ModifiedTime, 0)
+
+ // Product.
+ if req.Product != nil {
+ doc.Product = *req.Product
+ model.Product = models.Product{Name: *req.Product}
+
+ // Update doc number in document.
+ doc.DocNumber = fmt.Sprintf("%s-???", productAbbreviation)
+ }
+
+ // Summary.
+ if req.Summary != nil {
+ doc.Summary = *req.Summary
+ // model.Summary = *req.Summary
+ model.Summary = req.Summary
+ }
+
+ // Title.
+ if req.Title != nil {
+ doc.Title = *req.Title
+ model.Title = *req.Title
+ }
+
+ // Update document in the database.
+ if err := model.Upsert(srv.DB); err != nil {
+ srv.Logger.Error("error updating document in the database",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error updating document draft",
+ http.StatusInternalServerError)
+ return
+ }
+ // }
+
+ // Replace the doc header.
+ if err := doc.ReplaceHeader(
+ srv.Config.BaseURL, true, srv.GWService,
+ ); err != nil {
+ srv.Logger.Error("error replacing draft doc header",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error replacing header of document draft",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Rename file with new title.
+ srv.GWService.RenameFile(docID,
+ fmt.Sprintf("[%s] %s", doc.DocNumber, doc.Title))
+
+ w.WriteHeader(http.StatusOK)
+
+ srv.Logger.Info("patched draft document",
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+
+ // Request post-processing.
+ go func() {
+ // Convert document to Algolia object.
+ docObj, err := doc.ToAlgoliaObject(true)
+ if err != nil {
+ srv.Logger.Error("error converting document to Algolia object",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ return
+ }
+
+ // Save new modified draft doc object in Algolia.
+ res, err := srv.AlgoWrite.Drafts.SaveObject(docObj)
+ if err != nil {
+ srv.Logger.Error("error saving patched draft doc in Algolia",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ return
+ }
+ err = res.Wait()
+ if err != nil {
+ srv.Logger.Error("error saving patched draft doc in Algolia",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ return
+ }
+
+ // Compare Algolia and database documents to find data inconsistencies.
+ // Get document object from Algolia.
+ var algoDoc map[string]any
+ err = srv.AlgoSearch.Drafts.GetObject(docID, &algoDoc)
+ if err != nil {
+ srv.Logger.Error("error getting Algolia object for data comparison",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ return
+ }
+ // Get document from database.
+ dbDoc := models.Document{
+ GoogleFileID: docID,
+ }
+ if err := dbDoc.Get(srv.DB); err != nil {
+ srv.Logger.Error(
+ "error getting document from database for data comparison",
+ "error", err,
+ "path", r.URL.Path,
+ "method", r.Method,
+ "doc_id", docID,
+ )
+ return
+ }
+ // Get all reviews for the document.
+ var reviews models.DocumentReviews
+ if err := reviews.Find(srv.DB, models.DocumentReview{
+ Document: models.Document{
+ GoogleFileID: docID,
+ },
+ }); err != nil {
+ srv.Logger.Error(
+ "error getting all reviews for document for data comparison",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ return
+ }
+ if err := compareAlgoliaAndDatabaseDocument(
+ algoDoc, dbDoc, reviews, srv.Config.DocumentTypes.DocumentType,
+ ); err != nil {
+ srv.Logger.Warn(
+ "inconsistencies detected between Algolia and database docs",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ }
+ }()
+
+ default:
+ w.WriteHeader(http.StatusMethodNotAllowed)
+ return
+ }
+ })
+}
+
+// getDocTypeTemplate returns the file ID of the template for a specified
+// document type or an empty string if not found.
+func getDocTypeTemplate(
+ docTypes []*config.DocumentType,
+ docType string,
+) string {
+ template := ""
+
+ for _, t := range docTypes {
+ if t.Name == docType {
+ template = t.Template
+ break
+ }
+ }
+
+ return template
+}
+
+// validateDocType returns true if the name (docType) is contained in the a
+// slice of configured document types.
+func validateDocType(
+ docTypes []*config.DocumentType,
+ docType string,
+) bool {
+ for _, t := range docTypes {
+ if t.Name == docType {
+ return true
+ }
+ }
+
+ return false
+}
+
+// removeSharing lists permissions for a document and then
+// deletes the permission for the supplied user email
+func removeSharing(s *gw.Service, docID, email string) error {
+ permissions, err := s.ListPermissions(docID)
+ if err != nil {
+ return err
+ }
+ for _, p := range permissions {
+ if p.EmailAddress == email {
+ return s.DeletePermission(docID, p.Id)
+ }
+ }
+ return nil
+}
diff --git a/internal/api/v2/drafts_shareable.go b/internal/api/v2/drafts_shareable.go
new file mode 100644
index 000000000..efb30571f
--- /dev/null
+++ b/internal/api/v2/drafts_shareable.go
@@ -0,0 +1,192 @@
+package api
+
+import (
+ "encoding/json"
+ "net/http"
+
+ "github.com/hashicorp-forge/hermes/internal/config"
+ "github.com/hashicorp-forge/hermes/pkg/algolia"
+ "github.com/hashicorp-forge/hermes/pkg/document"
+ gw "github.com/hashicorp-forge/hermes/pkg/googleworkspace"
+ "github.com/hashicorp-forge/hermes/pkg/models"
+ "github.com/hashicorp/go-hclog"
+ "gorm.io/gorm"
+)
+
+type draftsShareablePutRequest struct {
+ IsShareable *bool `json:"isShareable"`
+}
+
+type draftsShareableGetResponse struct {
+ IsShareable bool `json:"isShareable"`
+}
+
+func draftsShareableHandler(
+ w http.ResponseWriter,
+ r *http.Request,
+ docID string,
+ doc document.Document,
+ cfg config.Config,
+ l hclog.Logger,
+ algoRead *algolia.Client,
+ goog *gw.Service,
+ db *gorm.DB,
+) {
+ switch r.Method {
+ case "GET":
+ // Get document from database.
+ d := models.Document{
+ GoogleFileID: docID,
+ }
+ if err := d.Get(db); err != nil {
+ l.Error("error getting document from database",
+ "error", err,
+ "path", r.URL.Path,
+ "method", r.Method,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error accessing document",
+ http.StatusInternalServerError)
+ return
+ }
+
+ resp := draftsShareableGetResponse{
+ IsShareable: d.ShareableAsDraft,
+ }
+
+ // Write response.
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(http.StatusOK)
+ enc := json.NewEncoder(w)
+ if err := enc.Encode(resp); err != nil {
+ l.Error("error encoding response",
+ "error", err,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error building response", http.StatusInternalServerError)
+ return
+ }
+
+ case "PUT":
+ // Authorize request (only the document owner is authorized).
+ userEmail := r.Context().Value("userEmail").(string)
+ if doc.Owners[0] != userEmail {
+ http.Error(w, "Only the document owner can change shareable settings",
+ http.StatusForbidden)
+ return
+ }
+
+ // Decode request.
+ var req draftsShareablePutRequest
+ if err := decodeRequest(r, &req); err != nil {
+ l.Error("error decoding request",
+ "error", err,
+ "path", r.URL.Path,
+ "method", r.Method,
+ "doc_id", docID,
+ )
+ http.Error(w, "Bad request", http.StatusBadRequest)
+ return
+ }
+
+ // Validate request.
+ if req.IsShareable == nil {
+ l.Warn("bad request: missing required 'isShareable' field",
+ "path", r.URL.Path,
+ "method", r.Method,
+ "doc_id", docID,
+ )
+ http.Error(w,
+ "Bad request: missing required 'isShareable' field",
+ http.StatusBadRequest)
+ return
+ }
+
+ // Get document from database.
+ doc := models.Document{
+ GoogleFileID: docID,
+ }
+ if err := doc.Get(db); err != nil {
+ l.Error("error getting document from database",
+ "error", err,
+ "path", r.URL.Path,
+ "method", r.Method,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error accessing document",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Find out if the draft is already shared with the domain.
+ perms, err := goog.ListPermissions(docID)
+ if err != nil {
+ l.Error("error listing Google Drive permissions",
+ "error", err,
+ "path", r.URL.Path,
+ "method", r.Method,
+ "doc_id", docID,
+ )
+ http.Error(w,
+ "Error updating document permissions",
+ http.StatusInternalServerError)
+ return
+ }
+ alreadySharedPermIDs := []string{}
+ for _, p := range perms {
+ isInherited := false
+ for _, pd := range p.PermissionDetails {
+ if pd.Inherited {
+ isInherited = true
+ }
+ }
+ if p.Domain == cfg.GoogleWorkspace.Domain &&
+ p.Role == "commenter" &&
+ !isInherited {
+ alreadySharedPermIDs = append(alreadySharedPermIDs, p.Id)
+ }
+ }
+
+ // Update file permissions, if necessary.
+ if *req.IsShareable {
+ if len(alreadySharedPermIDs) == 0 {
+ // File is not already shared with domain, so share it.
+ goog.ShareFileWithDomain(docID, cfg.GoogleWorkspace.Domain, "commenter")
+ }
+ } else {
+ for _, id := range alreadySharedPermIDs {
+ // File is already shared with domain, so remove the permission.
+ goog.DeletePermission(docID, id)
+ }
+ }
+
+ // Update ShareableAsDraft for document in the database.
+ if err := db.Model(&doc).
+ // We need to update using Select because ShareableAsDraft is a
+ // boolean.
+ Select("ShareableAsDraft").
+ Updates(models.Document{ShareableAsDraft: *req.IsShareable}).
+ Error; err != nil {
+ l.Error("error updating ShareableAsDraft in the database",
+ "error", err,
+ "path", r.URL.Path,
+ "method", r.Method,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error updating document draft",
+ http.StatusInternalServerError)
+ return
+ }
+
+ l.Info("updated ShareableAsDraft for document",
+ "path", r.URL.Path,
+ "method", r.Method,
+ "doc_id", docID,
+ "shareable_as_draft", doc.ShareableAsDraft,
+ )
+
+ default:
+ w.WriteHeader(http.StatusMethodNotAllowed)
+ return
+ }
+}
diff --git a/internal/api/v2/helpers.go b/internal/api/v2/helpers.go
new file mode 100644
index 000000000..6c47d0173
--- /dev/null
+++ b/internal/api/v2/helpers.go
@@ -0,0 +1,604 @@
+package api
+
+import (
+ "encoding/json"
+ "fmt"
+ "io"
+ "net/http"
+ "reflect"
+ "regexp"
+ "strings"
+
+ "github.com/hashicorp-forge/hermes/internal/config"
+ "github.com/hashicorp-forge/hermes/pkg/models"
+ "github.com/hashicorp/go-hclog"
+ "github.com/hashicorp/go-multierror"
+ "github.com/iancoleman/strcase"
+ "github.com/stretchr/testify/assert"
+)
+
+// contains returns true if a string is present in a slice of strings.
+func contains(values []string, s string) bool {
+ for _, v := range values {
+ if s == v {
+ return true
+ }
+ }
+ return false
+}
+
+// compareSlices compares the first slice with the second
+// and returns the elements that exist in the second slice
+// that don't exist in the first
+func compareSlices(a, b []string) []string {
+ // Create a map with the length of slice "a"
+ tempA := make(map[string]bool, len(a))
+ for _, j := range a {
+ tempA[j] = true
+ }
+
+ diffElems := []string{}
+ for _, k := range b {
+ // If elements in slice "b" are
+ // not present in slice "a" then
+ // append to diffElems slice
+ if !tempA[k] {
+ diffElems = append(diffElems, k)
+ }
+ }
+
+ return diffElems
+}
+
+// decodeRequest decodes the JSON contents of a HTTP request body to a request
+// struct. An error is returned if the request contains fields that do not exist
+// in the request struct.
+func decodeRequest(r *http.Request, reqStruct interface{}) error {
+ dec := json.NewDecoder(r.Body)
+ dec.DisallowUnknownFields()
+ for {
+ if err := dec.Decode(&reqStruct); err == io.EOF {
+ break
+ } else if err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+// parseResourceIDFromURL parses a URL path with the format
+// "/api/v2/{apiPath}/{resourceID}" and returns the resource ID.
+func parseResourceIDFromURL(url, apiPath string) (string, error) {
+ // Remove API path from URL.
+ url = strings.TrimPrefix(url, fmt.Sprintf("/api/v2/%s", apiPath))
+
+ // Remove empty entries and validate path.
+ urlPath := strings.Split(url, "/")
+ var resultPath []string
+ for _, v := range urlPath {
+ // Only append non-empty values, this removes any empty strings in the
+ // slice.
+ if v != "" {
+ resultPath = append(resultPath, v)
+ }
+ }
+ resultPathLen := len(resultPath)
+ // Only allow 1 value to be set in the resultPath slice. For example, if the
+ // urlPath is set to "/{document_id}" then the resultPath slice would be
+ // ["{document_id}"].
+ if resultPathLen > 1 {
+ return "", fmt.Errorf("invalid URL path")
+ }
+ // If there are no entries in the resultPath slice, then there was no resource
+ // ID set in the URL path. Return an empty string.
+ if resultPathLen == 0 {
+ return "", fmt.Errorf("no document ID set in url path")
+ }
+
+ // Return resource ID.
+ return resultPath[0], nil
+}
+
+// respondError responds to an HTTP request and logs an error.
+func respondError(
+ w http.ResponseWriter, r *http.Request, l hclog.Logger,
+ httpCode int, userErrMsg, logErrMsg string, err error,
+ extraArgs ...interface{},
+) {
+ l.Error(logErrMsg,
+ append([]interface{}{
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ }, extraArgs...)...,
+ )
+ http.Error(w, userErrMsg, httpCode)
+}
+
+// fakeT fulfills the assert.TestingT interface so we can use
+// assert.ElementsMatch.
+type fakeT struct{}
+
+func (t fakeT) Errorf(string, ...interface{}) {}
+
+// compareAlgoliaAndDatabaseDocument compares data for a document stored in
+// Algolia and the database to determine any inconsistencies, which are returned
+// back as a (multierror) error.
+func compareAlgoliaAndDatabaseDocument(
+ algoDoc map[string]any,
+ dbDoc models.Document,
+ dbDocReviews models.DocumentReviews,
+ docTypes []*config.DocumentType,
+) error {
+
+ var result *multierror.Error
+
+ // Compare objectID.
+ algoGoogleFileID, err := getStringValue(algoDoc, "objectID")
+ if err != nil {
+ result = multierror.Append(
+ result, fmt.Errorf("error getting objectID value: %w", err))
+ }
+ if algoGoogleFileID != dbDoc.GoogleFileID {
+ result = multierror.Append(result,
+ fmt.Errorf(
+ "objectID not equal, algolia=%v, db=%v",
+ algoGoogleFileID, dbDoc.GoogleFileID),
+ )
+ }
+
+ // Compare title.
+ algoTitle, err := getStringValue(algoDoc, "title")
+ if err != nil {
+ result = multierror.Append(
+ result, fmt.Errorf("error getting title value: %w", err))
+ } else {
+ if algoTitle != dbDoc.Title {
+ result = multierror.Append(result,
+ fmt.Errorf(
+ "title not equal, algolia=%v, db=%v",
+ algoTitle, dbDoc.Title),
+ )
+ }
+ }
+
+ // Compare docType.
+ algoDocType, err := getStringValue(algoDoc, "docType")
+ if err != nil {
+ result = multierror.Append(
+ result, fmt.Errorf("error getting docType value: %w", err))
+ } else {
+ dbDocType := dbDoc.DocumentType.Name
+ if algoDocType != dbDocType {
+ result = multierror.Append(result,
+ fmt.Errorf(
+ "docType not equal, algolia=%v, db=%v",
+ algoTitle, dbDocType),
+ )
+ }
+ }
+
+ // Compare docNumber.
+ algoDocNumber, err := getStringValue(algoDoc, "docNumber")
+ if err != nil {
+ result = multierror.Append(
+ result, fmt.Errorf("error getting docNumber value: %w", err))
+ } else {
+ // Replace "-???" (how draft doc numbers are defined in Algolia) with a
+ // zero.
+ re := regexp.MustCompile(`-\?\?\?$`)
+ algoDocNumber = re.ReplaceAllString(algoDocNumber, "-000")
+
+ // Note that we pad the database document number to three digits here like
+ // we do when assigning a document number when a doc review is requested.
+ dbDocNumber := fmt.Sprintf(
+ "%s-%03d", dbDoc.Product.Abbreviation, dbDoc.DocumentNumber)
+ if algoDocNumber != dbDocNumber {
+ result = multierror.Append(result,
+ fmt.Errorf(
+ "docNumber not equal, algolia=%v, db=%v",
+ algoDocNumber, dbDocNumber),
+ )
+ }
+ }
+
+ // Compare appCreated.
+ algoAppCreated, err := getBooleanValue(algoDoc, "appCreated")
+ if err != nil {
+ result = multierror.Append(
+ result, fmt.Errorf("error getting appCreated value: %w", err))
+ } else {
+ dbAppCreated := !dbDoc.Imported
+ if algoAppCreated != dbAppCreated {
+ result = multierror.Append(result,
+ fmt.Errorf(
+ "appCreated not equal, algolia=%v, db=%v",
+ algoAppCreated, dbAppCreated),
+ )
+ }
+ }
+
+ // Compare approvedBy.
+ algoApprovedBy, err := getStringSliceValue(algoDoc, "approvedBy")
+ if err != nil {
+ result = multierror.Append(
+ result, fmt.Errorf("error getting approvedBy value: %w", err))
+ }
+ dbApprovedBy := []string{}
+ for _, r := range dbDocReviews {
+ if r.Status == models.ApprovedDocumentReviewStatus {
+ dbApprovedBy = append(dbApprovedBy, r.User.EmailAddress)
+ }
+ }
+ if !assert.ElementsMatch(fakeT{}, algoApprovedBy, dbApprovedBy) {
+ result = multierror.Append(result,
+ fmt.Errorf(
+ "approvedBy not equal, algolia=%v, db=%v",
+ algoApprovedBy, dbApprovedBy),
+ )
+ }
+
+ // Compare approvers.
+ algoApprovers, err := getStringSliceValue(algoDoc, "approvers")
+ if err != nil {
+ result = multierror.Append(
+ result, fmt.Errorf("error getting approvers value: %w", err))
+ }
+ dbApprovers := []string{}
+ for _, a := range dbDoc.Approvers {
+ dbApprovers = append(dbApprovers, a.EmailAddress)
+ }
+ if !assert.ElementsMatch(fakeT{}, algoApprovers, dbApprovers) {
+ result = multierror.Append(result,
+ fmt.Errorf(
+ "approvers not equal, algolia=%v, db=%v",
+ algoApprovers, dbApprovers),
+ )
+ }
+
+ // Compare changesRequestedBy.
+ algoChangesRequestedBy, err := getStringSliceValue(
+ algoDoc, "changesRequestedBy")
+ if err != nil {
+ result = multierror.Append(
+ result, fmt.Errorf("error getting changesRequestedBy value: %w", err))
+ }
+ dbChangesRequestedBy := []string{}
+ for _, r := range dbDocReviews {
+ if r.Status == models.ChangesRequestedDocumentReviewStatus {
+ dbChangesRequestedBy = append(dbChangesRequestedBy, r.User.EmailAddress)
+ }
+ }
+ if !assert.ElementsMatch(
+ fakeT{}, algoChangesRequestedBy, dbChangesRequestedBy,
+ ) {
+ result = multierror.Append(result,
+ fmt.Errorf(
+ "changesRequestedBy not equal, algolia=%v, db=%v",
+ algoChangesRequestedBy, dbChangesRequestedBy),
+ )
+ }
+
+ // Compare contributors.
+ algoContributors, err := getStringSliceValue(algoDoc, "contributors")
+ if err != nil {
+ result = multierror.Append(
+ result, fmt.Errorf("error getting contributors value: %w", err))
+ }
+ dbContributors := []string{}
+ for _, c := range dbDoc.Contributors {
+ dbContributors = append(dbContributors, c.EmailAddress)
+ }
+ if !assert.ElementsMatch(fakeT{}, algoContributors, dbContributors) {
+ result = multierror.Append(result,
+ fmt.Errorf(
+ "contributors not equal, algolia=%v, db=%v",
+ algoContributors, dbContributors),
+ )
+ }
+
+ // Compare createdTime.
+ algoCreatedTime, err := getInt64Value(algoDoc, "createdTime")
+ if err != nil {
+ result = multierror.Append(
+ result, fmt.Errorf("error getting createdTime value: %w", err))
+ } else {
+ dbCreatedTime := dbDoc.DocumentCreatedAt.Unix()
+ if algoCreatedTime != dbCreatedTime {
+ result = multierror.Append(result,
+ fmt.Errorf(
+ "createdTime not equal, algolia=%v, db=%v",
+ algoCreatedTime, dbCreatedTime),
+ )
+ }
+ }
+
+ // Compare custom fields.
+ foundDocType := false
+ for _, dt := range docTypes {
+ if dt.Name == algoDocType {
+ foundDocType = true
+ for _, cf := range dt.CustomFields {
+ algoCFName := strcase.ToLowerCamel(cf.Name)
+
+ switch cf.Type {
+ case "string":
+ algoCFVal, err := getStringValue(algoDoc, algoCFName)
+ if err != nil {
+ result = multierror.Append(
+ result, fmt.Errorf(
+ "error getting custom field (%s) value: %w", algoCFName, err))
+ } else {
+ for _, c := range dbDoc.CustomFields {
+ if c.DocumentTypeCustomField.Name == cf.Name {
+ if algoCFVal != c.Value {
+ result = multierror.Append(result,
+ fmt.Errorf(
+ "custom field %s not equal, algolia=%v, db=%v",
+ algoCFName, algoCFVal, c.Value),
+ )
+ }
+ break
+ }
+ }
+ }
+ case "people":
+ algoCFVal, err := getStringSliceValue(algoDoc, algoCFName)
+ if err != nil {
+ result = multierror.Append(
+ result, fmt.Errorf(
+ "error getting custom field (%s) value: %w", algoCFName, err))
+ } else {
+ for _, c := range dbDoc.CustomFields {
+ if c.DocumentTypeCustomField.Name == cf.Name {
+ // Unmarshal person custom field value to string slice.
+ var dbCFVal []string
+ if err := json.Unmarshal(
+ []byte(c.Value), &dbCFVal,
+ ); err != nil {
+ result = multierror.Append(result,
+ fmt.Errorf(
+ "error unmarshaling custom field %s to string slice",
+ algoCFName),
+ )
+ }
+
+ if !assert.ElementsMatch(fakeT{}, algoCFVal, dbCFVal) {
+ result = multierror.Append(result,
+ fmt.Errorf(
+ "custom field %s not equal, algolia=%v, db=%v",
+ algoCFName, algoCFVal, dbCFVal),
+ )
+ }
+ break
+ }
+ }
+ }
+ default:
+ result = multierror.Append(result,
+ fmt.Errorf(
+ "unknown type for custom field key %q: %s", dt.Name, cf.Type))
+ }
+ }
+ break
+ }
+ }
+ if !foundDocType {
+ result = multierror.Append(result,
+ fmt.Errorf(
+ "doc type %q not found", algoDocType))
+ }
+
+ // Compare fileRevisions.
+ algoFileRevisions, err := getMapStringStringValue(algoDoc, "fileRevisions")
+ if err != nil {
+ result = multierror.Append(
+ result, fmt.Errorf("error getting fileRevisions value: %w", err))
+ } else {
+ dbFileRevisions := make(map[string]string)
+ for _, fr := range dbDoc.FileRevisions {
+ dbFileRevisions[fr.GoogleDriveFileRevisionID] = fr.Name
+ }
+ if !reflect.DeepEqual(algoFileRevisions, dbFileRevisions) {
+ result = multierror.Append(result,
+ fmt.Errorf(
+ "fileRevisions not equal, algolia=%v, db=%v",
+ algoFileRevisions, dbFileRevisions),
+ )
+ }
+ }
+
+ // Compare modifiedTime.
+ algoModifiedTime, err := getInt64Value(algoDoc, "modifiedTime")
+ if err != nil {
+ result = multierror.Append(
+ result, fmt.Errorf("error getting modifiedTime value: %w", err))
+ } else {
+ dbModifiedTime := dbDoc.DocumentModifiedAt.Unix()
+ if algoModifiedTime != dbModifiedTime {
+ result = multierror.Append(result,
+ fmt.Errorf(
+ "modifiedTime not equal, algolia=%v, db=%v",
+ algoModifiedTime, dbModifiedTime),
+ )
+ }
+ }
+
+ // Compare owner.
+ // NOTE: this does not address multiple owners, which can exist for Algolia
+ // document objects (documents in the database currently only have one owner).
+ algoOwners, err := getStringSliceValue(algoDoc, "owners")
+ if err != nil {
+ result = multierror.Append(
+ result, fmt.Errorf("error getting owners value: %w", err))
+ } else {
+ var dbOwner string
+ if dbDoc.Owner != nil {
+ dbOwner = dbDoc.Owner.EmailAddress
+ }
+ if len(algoOwners) > 0 {
+ if algoOwners[0] != dbOwner {
+ result = multierror.Append(result,
+ fmt.Errorf(
+ "owners not equal, algolia=%#v, db=%#v",
+ algoOwners, dbOwner),
+ )
+ }
+ } else {
+ result = multierror.Append(
+ result, fmt.Errorf("owners in Algolia was length %d", len(algoOwners)))
+ }
+ }
+
+ // Compare product.
+ algoProduct, err := getStringValue(algoDoc, "product")
+ if err != nil {
+ result = multierror.Append(
+ result, fmt.Errorf("error getting product value: %w", err))
+ } else {
+ dbProduct := dbDoc.Product.Name
+ if algoProduct != dbProduct {
+ result = multierror.Append(result,
+ fmt.Errorf(
+ "product not equal, algolia=%v, db=%v",
+ algoProduct, dbProduct),
+ )
+ }
+ }
+
+ // Compare status.
+ algoStatus, err := getStringValue(algoDoc, "status")
+ if err != nil {
+ result = multierror.Append(
+ result, fmt.Errorf("error getting status value: %w", err))
+ } else {
+ var dbStatus string
+ switch dbDoc.Status {
+ case models.WIPDocumentStatus:
+ dbStatus = "WIP"
+ case models.InReviewDocumentStatus:
+ dbStatus = "In-Review"
+ case models.ApprovedDocumentStatus:
+ dbStatus = "Approved"
+ case models.ObsoleteDocumentStatus:
+ dbStatus = "Obsolete"
+ }
+ if algoStatus != dbStatus {
+ result = multierror.Append(result,
+ fmt.Errorf(
+ "status not equal, algolia=%v, db=%v",
+ algoStatus, dbStatus),
+ )
+ }
+ }
+
+ // Compare summary.
+ algoSummary, err := getStringValue(algoDoc, "summary")
+ if err != nil {
+ result = multierror.Append(
+ result, fmt.Errorf("error getting summary value: %w", err))
+ } else {
+ dbSummary := dbDoc.Summary
+ if dbSummary != nil && algoSummary != *dbSummary {
+ result = multierror.Append(result,
+ fmt.Errorf(
+ "summary not equal, algolia=%v, db=%v",
+ algoSummary, dbSummary),
+ )
+ }
+ }
+
+ return result.ErrorOrNil()
+}
+
+func getBooleanValue(in map[string]any, key string) (bool, error) {
+ var result bool
+
+ if v, ok := in[key]; ok {
+ if vv, ok := v.(bool); ok {
+ return vv, nil
+ } else {
+ return false, fmt.Errorf(
+ "invalid type: value is not a boolean, type: %T", v)
+ }
+ }
+
+ return result, nil
+}
+
+func getInt64Value(in map[string]any, key string) (int64, error) {
+ var result int64
+
+ if v, ok := in[key]; ok {
+ // These interface{} values are inferred as float64 and need to be converted
+ // to int64.
+ if vv, ok := v.(float64); ok {
+ return int64(vv), nil
+ } else {
+ return 0, fmt.Errorf(
+ "invalid type: value is not an float64 (expected), type: %T", v)
+ }
+ }
+
+ return result, nil
+}
+
+func getMapStringStringValue(in map[string]any, key string) (
+ map[string]string, error,
+) {
+ result := make(map[string]string)
+
+ if v, ok := in[key]; ok {
+ if reflect.TypeOf(v).Kind() == reflect.Map {
+ for vk, vv := range v.(map[string]any) {
+ if vv, ok := vv.(string); ok {
+ result[vk] = vv
+ } else {
+ return nil, fmt.Errorf(
+ "invalid type: map value element is not a string")
+ }
+ }
+ return result, nil
+ } else {
+ return nil, fmt.Errorf("invalid type: value is not a map")
+ }
+ }
+
+ return result, nil
+}
+
+func getStringValue(in map[string]any, key string) (string, error) {
+ var result string
+
+ if v, ok := in[key]; ok {
+ if vv, ok := v.(string); ok {
+ return vv, nil
+ } else {
+ return "", fmt.Errorf("invalid type: value is not a string, type: %T", v)
+ }
+ }
+
+ return result, nil
+}
+
+func getStringSliceValue(in map[string]any, key string) ([]string, error) {
+ result := []string{}
+
+ if v, ok := in[key]; ok {
+ if reflect.TypeOf(v).Kind() == reflect.Slice {
+ for _, vv := range v.([]any) {
+ if vv, ok := vv.(string); ok {
+ result = append(result, vv)
+ } else {
+ return nil, fmt.Errorf("invalid type: slice element is not a string")
+ }
+ }
+ return result, nil
+ } else {
+ return nil, fmt.Errorf("invalid type: value is not a slice")
+ }
+ }
+
+ return result, nil
+}
diff --git a/internal/api/v2/helpers_test.go b/internal/api/v2/helpers_test.go
new file mode 100644
index 000000000..9b0114500
--- /dev/null
+++ b/internal/api/v2/helpers_test.go
@@ -0,0 +1,1014 @@
+package api
+
+import (
+ "reflect"
+ "testing"
+ "time"
+
+ "github.com/hashicorp-forge/hermes/internal/config"
+ "github.com/hashicorp-forge/hermes/pkg/models"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestParseResourceIDFromURL(t *testing.T) {
+ cases := map[string]struct {
+ url string
+ apiPath string
+
+ want string
+ shouldErr bool
+ }{
+ "good": {
+ url: "/api/v2/drafts/myID",
+ apiPath: "drafts",
+
+ want: "myID",
+ },
+ "extra path after resource ID": {
+ url: "/api/v2/drafts/myID/something",
+ apiPath: "drafts",
+
+ shouldErr: true,
+ },
+ "no resource ID": {
+ url: "/api/v2/drafts",
+ apiPath: "drafts",
+
+ shouldErr: true,
+ },
+ }
+
+ for name, c := range cases {
+ t.Run(name, func(t *testing.T) {
+ if got, err := parseResourceIDFromURL(c.url, c.apiPath); err != nil {
+ if !c.shouldErr {
+ t.Error(err)
+ }
+ } else {
+ if got != c.want {
+ t.Errorf("got %q, want %q", got, c.want)
+ }
+ }
+ })
+ }
+}
+
+func TestCompareSlices(t *testing.T) {
+ cases := map[string]struct {
+ firstSlice []string
+ secondSlice []string
+
+ want []string
+ }{
+ "second slice has an element that first slice doesn't": {
+ firstSlice: []string{"a", "b", "c"},
+ secondSlice: []string{"a", "d"},
+
+ want: []string{"d"},
+ },
+ "empty slices": {
+ firstSlice: []string{},
+ secondSlice: []string{},
+
+ want: []string{},
+ },
+ "identical slices": {
+ firstSlice: []string{"a", "b", "c"},
+ secondSlice: []string{"a", "b", "c"},
+
+ want: []string{},
+ },
+ "first slice has elements and second slice is empty": {
+ firstSlice: []string{"a", "b", "c"},
+ secondSlice: []string{},
+
+ want: []string{},
+ },
+ "first slice is empty and second slice has elements": {
+ firstSlice: []string{},
+ secondSlice: []string{"a", "b", "c"},
+
+ want: []string{"a", "b", "c"},
+ },
+ }
+
+ for name, c := range cases {
+ t.Run(name, func(t *testing.T) {
+ got := compareSlices(c.firstSlice, c.secondSlice)
+ if !reflect.DeepEqual(got, c.want) {
+ t.Errorf("got %q, want %q", got, c.want)
+ }
+ })
+ }
+}
+
+func TestCompareAlgoliaAndDatabaseDocument(t *testing.T) {
+ cases := map[string]struct {
+ algoDoc map[string]any
+ dbDoc models.Document
+ dbDocReviews models.DocumentReviews
+
+ shouldErr bool
+ errContains string
+ }{
+ "good": {
+ algoDoc: map[string]any{
+ "objectID": "GoogleFileID1",
+ "title": "Title1",
+ "docType": "RFC",
+ "docNumber": "ABC-123",
+ "appCreated": true,
+ "approvedBy": []any{
+ "approver1@hashicorp.com",
+ "approver2@hashicorp.com",
+ },
+ "approvers": []any{
+ "approver1@hashicorp.com",
+ "approver2@hashicorp.com",
+ },
+ "changesRequestedBy": []any{
+ "changerequester1@hashicorp.com",
+ "changerequester2@hashicorp.com",
+ },
+ "contributors": []any{
+ "contributor1@hashicorp.com",
+ "contributor2@hashicorp.com",
+ },
+ "createdTime": float64(time.Date(
+ 2023, time.April, 5, 1, 0, 0, 0, time.UTC).Unix()),
+ "currentVersion": "1.2.3",
+ "fileRevisions": map[string]any{
+ "1": "FileRevision1",
+ "2": "FileRevision2",
+ },
+ "modifiedTime": float64(time.Date(
+ 2023, time.April, 5, 23, 0, 0, 0, time.UTC).Unix()),
+ "owners": []any{
+ "owner1@hashicorp.com",
+ "owner2@hashicorp.com",
+ },
+ "product": "Product1",
+ "stakeholders": []any{
+ "stakeholder1@hashicorp.com",
+ "stakeholder2@hashicorp.com",
+ },
+ "summary": "Summary1",
+ "status": "In-Review",
+ },
+ dbDoc: models.Document{
+ GoogleFileID: "GoogleFileID1",
+ Title: "Title1",
+ DocumentType: models.DocumentType{
+ Name: "RFC",
+ },
+ DocumentNumber: 123,
+ Product: models.Product{
+ Name: "Product1",
+ Abbreviation: "ABC",
+ },
+ Imported: false,
+ Approvers: []*models.User{
+ {
+ EmailAddress: "approver1@hashicorp.com",
+ },
+ {
+ EmailAddress: "approver2@hashicorp.com",
+ },
+ },
+ Contributors: []*models.User{
+ {
+ EmailAddress: "contributor1@hashicorp.com",
+ },
+ {
+ EmailAddress: "contributor2@hashicorp.com",
+ },
+ },
+ CustomFields: []*models.DocumentCustomField{
+ {
+ DocumentTypeCustomField: models.DocumentTypeCustomField{
+ Name: "Current Version",
+ DocumentType: models.DocumentType{
+ Name: "RFC",
+ },
+ },
+ Value: "1.2.3",
+ },
+ {
+ DocumentTypeCustomField: models.DocumentTypeCustomField{
+ Name: "Stakeholders",
+ DocumentType: models.DocumentType{
+ Name: "RFC",
+ },
+ },
+ Value: `["stakeholder1@hashicorp.com","stakeholder2@hashicorp.com"]`,
+ },
+ },
+ DocumentCreatedAt: time.Date(
+ 2023, time.April, 5, 1, 0, 0, 0, time.UTC),
+ DocumentModifiedAt: time.Date(
+ 2023, time.April, 5, 23, 0, 0, 0, time.UTC),
+ FileRevisions: []models.DocumentFileRevision{
+ {
+ GoogleDriveFileRevisionID: "1",
+ Name: "FileRevision1",
+ },
+ {
+ GoogleDriveFileRevisionID: "2",
+ Name: "FileRevision2",
+ },
+ },
+ Owner: &models.User{
+ EmailAddress: "owner1@hashicorp.com",
+ },
+ Summary: &[]string{"Summary1"}[0],
+ Status: models.InReviewDocumentStatus,
+ },
+ dbDocReviews: models.DocumentReviews{
+ {
+ Status: models.ApprovedDocumentReviewStatus,
+ User: models.User{
+ EmailAddress: "approver1@hashicorp.com",
+ },
+ },
+ {
+ Status: models.ApprovedDocumentReviewStatus,
+ User: models.User{
+ EmailAddress: "approver2@hashicorp.com",
+ },
+ },
+ {
+ Status: models.ChangesRequestedDocumentReviewStatus,
+ User: models.User{
+ EmailAddress: "changerequester1@hashicorp.com",
+ },
+ },
+ {
+ Status: models.ChangesRequestedDocumentReviewStatus,
+ User: models.User{
+ EmailAddress: "changerequester2@hashicorp.com",
+ },
+ },
+ },
+ shouldErr: false,
+ },
+
+ "good draft doc number (test 'ABC-???')": {
+ algoDoc: map[string]any{
+ "appCreated": true,
+ "docNumber": "ABC-???",
+ "docType": "RFC",
+ "createdTime": float64(time.Date(
+ 2023, time.April, 5, 1, 0, 0, 0, time.UTC).Unix()),
+ "modifiedTime": float64(time.Date(
+ 2023, time.April, 5, 23, 0, 0, 0, time.UTC).Unix()),
+ "owners": []any{"owner1@hashicorp.com"},
+ "product": "Product1",
+ },
+ dbDoc: models.Document{
+ DocumentNumber: 0,
+ DocumentType: models.DocumentType{
+ Name: "RFC",
+ },
+ Product: models.Product{
+ Name: "Product1",
+ Abbreviation: "ABC",
+ },
+ DocumentCreatedAt: time.Date(
+ 2023, time.April, 5, 1, 0, 0, 0, time.UTC),
+ DocumentModifiedAt: time.Date(
+ 2023, time.April, 5, 23, 0, 0, 0, time.UTC),
+ Owner: &models.User{
+ EmailAddress: "owner1@hashicorp.com",
+ },
+ },
+ },
+
+ "good with different order of slice and map fields": {
+ algoDoc: map[string]any{
+ "appCreated": true,
+ "approvedBy": []any{
+ "approver2@hashicorp.com",
+ "approver1@hashicorp.com",
+ },
+ "approvers": []any{
+ "approver2@hashicorp.com",
+ "approver1@hashicorp.com",
+ },
+ "changesRequestedBy": []any{
+ "changerequester2@hashicorp.com",
+ "changerequester1@hashicorp.com",
+ },
+ "contributors": []any{
+ "contributor2@hashicorp.com",
+ "contributor1@hashicorp.com",
+ },
+ "createdTime": float64(time.Date(
+ 2023, time.April, 5, 1, 0, 0, 0, time.UTC).Unix()),
+ "docNumber": "ABC-123",
+ "docType": "RFC",
+ "fileRevisions": map[string]any{
+ "2": "FileRevision2",
+ "1": "FileRevision1",
+ },
+ "modifiedTime": float64(time.Date(
+ 2023, time.April, 5, 23, 0, 0, 0, time.UTC).Unix()),
+ "owners": []any{"owner1@hashicorp.com"},
+ "product": "Product1",
+ "stakeholders": []any{
+ "stakeholder2@hashicorp.com",
+ "stakeholder1@hashicorp.com",
+ },
+ },
+ dbDoc: models.Document{
+ DocumentNumber: 123,
+ DocumentType: models.DocumentType{
+ Name: "RFC",
+ },
+ Product: models.Product{
+ Name: "Product1",
+ Abbreviation: "ABC",
+ },
+ Approvers: []*models.User{
+ {
+ EmailAddress: "approver1@hashicorp.com",
+ },
+ {
+ EmailAddress: "approver2@hashicorp.com",
+ },
+ },
+ Contributors: []*models.User{
+ {
+ EmailAddress: "contributor1@hashicorp.com",
+ },
+ {
+ EmailAddress: "contributor2@hashicorp.com",
+ },
+ },
+ CustomFields: []*models.DocumentCustomField{
+ {
+ DocumentTypeCustomField: models.DocumentTypeCustomField{
+ Name: "Stakeholders",
+ DocumentType: models.DocumentType{
+ Name: "RFC",
+ },
+ },
+ Value: `["stakeholder1@hashicorp.com","stakeholder2@hashicorp.com"]`,
+ },
+ },
+ DocumentCreatedAt: time.Date(
+ 2023, time.April, 5, 1, 0, 0, 0, time.UTC),
+ DocumentModifiedAt: time.Date(
+ 2023, time.April, 5, 23, 0, 0, 0, time.UTC),
+ FileRevisions: []models.DocumentFileRevision{
+ {
+ GoogleDriveFileRevisionID: "1",
+ Name: "FileRevision1",
+ },
+ {
+ GoogleDriveFileRevisionID: "2",
+ Name: "FileRevision2",
+ },
+ },
+ Owner: &models.User{
+ EmailAddress: "owner1@hashicorp.com",
+ },
+ },
+ dbDocReviews: models.DocumentReviews{
+ {
+ Status: models.ApprovedDocumentReviewStatus,
+ User: models.User{
+ EmailAddress: "approver1@hashicorp.com",
+ },
+ },
+ {
+ Status: models.ApprovedDocumentReviewStatus,
+ User: models.User{
+ EmailAddress: "approver2@hashicorp.com",
+ },
+ },
+ {
+ Status: models.ChangesRequestedDocumentReviewStatus,
+ User: models.User{
+ EmailAddress: "changerequester1@hashicorp.com",
+ },
+ },
+ {
+ Status: models.ChangesRequestedDocumentReviewStatus,
+ User: models.User{
+ EmailAddress: "changerequester2@hashicorp.com",
+ },
+ },
+ },
+ },
+
+ "bad objectID": {
+ algoDoc: map[string]any{
+ "objectID": "GoogleFileID1",
+ "appCreated": true,
+ "docNumber": "ABC-123",
+ "createdTime": float64(time.Date(
+ 2023, time.April, 5, 1, 0, 0, 0, time.UTC).Unix()),
+ "modifiedTime": float64(time.Date(
+ 2023, time.April, 5, 23, 0, 0, 0, time.UTC).Unix()),
+ "owners": []any{"owner1@hashicorp.com"},
+ "product": "Product1",
+ },
+ dbDoc: models.Document{
+ GoogleFileID: "BadGoogleFileID",
+ DocumentNumber: 123,
+ Product: models.Product{
+ Name: "Product1",
+ Abbreviation: "ABC",
+ },
+ DocumentCreatedAt: time.Date(
+ 2023, time.April, 5, 1, 0, 0, 0, time.UTC),
+ DocumentModifiedAt: time.Date(
+ 2023, time.April, 5, 23, 0, 0, 0, time.UTC),
+ Owner: &models.User{
+ EmailAddress: "owner1@hashicorp.com",
+ },
+ },
+ shouldErr: true,
+ errContains: "objectID not equal",
+ },
+
+ "bad title": {
+ algoDoc: map[string]any{
+ "title": "Title1",
+ "appCreated": true,
+ "docNumber": "ABC-123",
+ "createdTime": float64(time.Date(
+ 2023, time.April, 5, 1, 0, 0, 0, time.UTC).Unix()),
+ "modifiedTime": float64(time.Date(
+ 2023, time.April, 5, 23, 0, 0, 0, time.UTC).Unix()),
+ "owners": []any{"owner1@hashicorp.com"},
+ "product": "Product1",
+ },
+ dbDoc: models.Document{
+ Title: "BadTitle",
+ DocumentNumber: 123,
+ Product: models.Product{
+ Name: "Product1",
+ Abbreviation: "ABC",
+ },
+ DocumentCreatedAt: time.Date(
+ 2023, time.April, 5, 1, 0, 0, 0, time.UTC),
+ DocumentModifiedAt: time.Date(
+ 2023, time.April, 5, 23, 0, 0, 0, time.UTC),
+ Owner: &models.User{
+ EmailAddress: "owner1@hashicorp.com",
+ },
+ },
+ shouldErr: true,
+ errContains: "title not equal",
+ },
+
+ "bad docType": {
+ algoDoc: map[string]any{
+ "docType": "DocType1",
+ "appCreated": true,
+ "docNumber": "ABC-123",
+ "createdTime": float64(time.Date(
+ 2023, time.April, 5, 1, 0, 0, 0, time.UTC).Unix()),
+ "modifiedTime": float64(time.Date(
+ 2023, time.April, 5, 23, 0, 0, 0, time.UTC).Unix()),
+ "owners": []any{"owner1@hashicorp.com"},
+ "product": "Product1",
+ },
+ dbDoc: models.Document{
+ DocumentType: models.DocumentType{
+ Name: "BadDocType",
+ },
+ DocumentNumber: 123,
+ Product: models.Product{
+ Name: "Product1",
+ Abbreviation: "ABC",
+ },
+ DocumentCreatedAt: time.Date(
+ 2023, time.April, 5, 1, 0, 0, 0, time.UTC),
+ DocumentModifiedAt: time.Date(
+ 2023, time.April, 5, 23, 0, 0, 0, time.UTC),
+ Owner: &models.User{
+ EmailAddress: "owner1@hashicorp.com",
+ },
+ },
+ shouldErr: true,
+ errContains: "docType not equal",
+ },
+
+ "bad appCreated": {
+ algoDoc: map[string]any{
+ "appCreated": false,
+ "docNumber": "ABC-123",
+ "createdTime": float64(time.Date(
+ 2023, time.April, 5, 1, 0, 0, 0, time.UTC).Unix()),
+ "modifiedTime": float64(time.Date(
+ 2023, time.April, 5, 23, 0, 0, 0, time.UTC).Unix()),
+ "owners": []any{"owner1@hashicorp.com"},
+ "product": "Product1",
+ },
+ dbDoc: models.Document{
+ DocumentNumber: 123,
+ Product: models.Product{
+ Name: "Product1",
+ Abbreviation: "ABC",
+ },
+ DocumentCreatedAt: time.Date(
+ 2023, time.April, 5, 1, 0, 0, 0, time.UTC),
+ DocumentModifiedAt: time.Date(
+ 2023, time.April, 5, 23, 0, 0, 0, time.UTC),
+ Owner: &models.User{
+ EmailAddress: "owner1@hashicorp.com",
+ },
+ },
+ shouldErr: true,
+ errContains: "appCreated not equal",
+ },
+
+ "bad approvedBy": {
+ algoDoc: map[string]any{
+ "approvedBy": []any{
+ "approver1@hashicorp.com",
+ "approver2@hashicorp.com",
+ },
+ "appCreated": true,
+ "docNumber": "ABC-123",
+ "createdTime": float64(time.Date(
+ 2023, time.April, 5, 1, 0, 0, 0, time.UTC).Unix()),
+ "modifiedTime": float64(time.Date(
+ 2023, time.April, 5, 23, 0, 0, 0, time.UTC).Unix()),
+ "owners": []any{"owner1@hashicorp.com"},
+ "product": "Product1",
+ },
+ dbDoc: models.Document{
+ GoogleFileID: "BadGoogleFileID",
+ DocumentNumber: 123,
+ Product: models.Product{
+ Name: "Product1",
+ Abbreviation: "ABC",
+ },
+ DocumentCreatedAt: time.Date(
+ 2023, time.April, 5, 1, 0, 0, 0, time.UTC),
+ DocumentModifiedAt: time.Date(
+ 2023, time.April, 5, 23, 0, 0, 0, time.UTC),
+ Owner: &models.User{
+ EmailAddress: "owner1@hashicorp.com",
+ },
+ },
+ dbDocReviews: models.DocumentReviews{
+ {
+ Status: models.ApprovedDocumentReviewStatus,
+ User: models.User{
+ EmailAddress: "badapprover1@hashicorp.com",
+ },
+ },
+ {
+ Status: models.ApprovedDocumentReviewStatus,
+ User: models.User{
+ EmailAddress: "badapprover2@hashicorp.com",
+ },
+ },
+ },
+ shouldErr: true,
+ errContains: "approvedBy not equal",
+ },
+
+ "bad approvers": {
+ algoDoc: map[string]any{
+ "appCreated": true,
+ "approvers": []any{
+ "approver1@hashicorp.com",
+ "approver2@hashicorp.com",
+ },
+ "docNumber": "ABC-123",
+ "createdTime": float64(time.Date(
+ 2023, time.April, 5, 1, 0, 0, 0, time.UTC).Unix()),
+ "modifiedTime": float64(time.Date(
+ 2023, time.April, 5, 23, 0, 0, 0, time.UTC).Unix()),
+ "owners": []any{"owner1@hashicorp.com"},
+ "product": "Product1",
+ },
+ dbDoc: models.Document{
+ Title: "BadTitle",
+ DocumentNumber: 123,
+ Product: models.Product{
+ Name: "Product1",
+ Abbreviation: "ABC",
+ },
+ DocumentCreatedAt: time.Date(
+ 2023, time.April, 5, 1, 0, 0, 0, time.UTC),
+ DocumentModifiedAt: time.Date(
+ 2023, time.April, 5, 23, 0, 0, 0, time.UTC),
+ Owner: &models.User{
+ EmailAddress: "owner1@hashicorp.com",
+ },
+ Approvers: []*models.User{
+ {
+ EmailAddress: "badapprover1@hashicorp.com",
+ },
+ {
+ EmailAddress: "badapprover2@hashicorp.com",
+ },
+ },
+ },
+ shouldErr: true,
+ errContains: "approvers not equal",
+ },
+
+ "bad changesRequestedBy": {
+ algoDoc: map[string]any{
+ "appCreated": true,
+ "changesRequestedBy": []any{
+ "changerequester1@hashicorp.com",
+ "changerequester2@hashicorp.com",
+ },
+ "docNumber": "ABC-123",
+ "createdTime": float64(time.Date(
+ 2023, time.April, 5, 1, 0, 0, 0, time.UTC).Unix()),
+ "modifiedTime": float64(time.Date(
+ 2023, time.April, 5, 23, 0, 0, 0, time.UTC).Unix()),
+ "owners": []any{"owner1@hashicorp.com"},
+ "product": "Product1",
+ },
+ dbDoc: models.Document{
+ GoogleFileID: "BadGoogleFileID",
+ DocumentNumber: 123,
+ Product: models.Product{
+ Name: "Product1",
+ Abbreviation: "ABC",
+ },
+ DocumentCreatedAt: time.Date(
+ 2023, time.April, 5, 1, 0, 0, 0, time.UTC),
+ DocumentModifiedAt: time.Date(
+ 2023, time.April, 5, 23, 0, 0, 0, time.UTC),
+ Owner: &models.User{
+ EmailAddress: "owner1@hashicorp.com",
+ },
+ },
+ dbDocReviews: models.DocumentReviews{
+ {
+ Status: models.ChangesRequestedDocumentReviewStatus,
+ User: models.User{
+ EmailAddress: "badchangerequester1@hashicorp.com",
+ },
+ },
+ {
+ Status: models.ChangesRequestedDocumentReviewStatus,
+ User: models.User{
+ EmailAddress: "badchangerequester2@hashicorp.com",
+ },
+ },
+ },
+ shouldErr: true,
+ errContains: "changesRequestedBy not equal",
+ },
+
+ "bad contributors": {
+ algoDoc: map[string]any{
+ "appCreated": true,
+ "contributors": []any{
+ "contributor1@hashicorp.com",
+ "contributor2@hashicorp.com",
+ },
+ "docNumber": "ABC-123",
+ "createdTime": float64(time.Date(
+ 2023, time.April, 5, 1, 0, 0, 0, time.UTC).Unix()),
+ "modifiedTime": float64(time.Date(
+ 2023, time.April, 5, 23, 0, 0, 0, time.UTC).Unix()),
+ "owners": []any{"owner1@hashicorp.com"},
+ "product": "Product1",
+ },
+ dbDoc: models.Document{
+ Title: "BadTitle",
+ DocumentNumber: 123,
+ Product: models.Product{
+ Name: "Product1",
+ Abbreviation: "ABC",
+ },
+ DocumentCreatedAt: time.Date(
+ 2023, time.April, 5, 1, 0, 0, 0, time.UTC),
+ DocumentModifiedAt: time.Date(
+ 2023, time.April, 5, 23, 0, 0, 0, time.UTC),
+ Owner: &models.User{
+ EmailAddress: "owner1@hashicorp.com",
+ },
+ Contributors: []*models.User{
+ {
+ EmailAddress: "badcontributor1@hashicorp.com",
+ },
+ {
+ EmailAddress: "badcontributor2@hashicorp.com",
+ },
+ },
+ },
+ shouldErr: true,
+ errContains: "contributors not equal",
+ },
+
+ "bad createdTime": {
+ algoDoc: map[string]any{
+ "appCreated": true,
+ "docNumber": "ABC-123",
+ "createdTime": float64(time.Date(
+ 2023, time.April, 5, 1, 0, 0, 0, time.UTC).Unix()),
+ "modifiedTime": float64(time.Date(
+ 2023, time.April, 5, 23, 0, 0, 0, time.UTC).Unix()),
+ "owners": []any{"owner1@hashicorp.com"},
+ "product": "Product1",
+ },
+ dbDoc: models.Document{
+ DocumentNumber: 123,
+ Product: models.Product{
+ Name: "Product1",
+ Abbreviation: "ABC",
+ },
+ DocumentCreatedAt: time.Date(
+ 2013, time.April, 5, 1, 0, 0, 0, time.UTC),
+ DocumentModifiedAt: time.Date(
+ 2023, time.April, 5, 23, 0, 0, 0, time.UTC),
+ Owner: &models.User{
+ EmailAddress: "owner1@hashicorp.com",
+ },
+ },
+ shouldErr: true,
+ errContains: "createdTime not equal",
+ },
+
+ "bad string custom field currentVersion": {
+ algoDoc: map[string]any{
+ "appCreated": true,
+ "docNumber": "ABC-123",
+ "createdTime": float64(time.Date(
+ 2023, time.April, 5, 1, 0, 0, 0, time.UTC).Unix()),
+ "currentVersion": "1",
+ "docType": "RFC",
+ "modifiedTime": float64(time.Date(
+ 2023, time.April, 5, 23, 0, 0, 0, time.UTC).Unix()),
+ "owners": []any{"owner1@hashicorp.com"},
+ "product": "Product1",
+ },
+ dbDoc: models.Document{
+ DocumentType: models.DocumentType{
+ Name: "RFC",
+ },
+ DocumentNumber: 123,
+ Product: models.Product{
+ Name: "Product1",
+ Abbreviation: "ABC",
+ },
+ CustomFields: []*models.DocumentCustomField{
+ {
+ DocumentTypeCustomField: models.DocumentTypeCustomField{
+ Name: "Current Version",
+ DocumentType: models.DocumentType{
+ Name: "RFC",
+ },
+ },
+ Value: "2",
+ },
+ },
+ DocumentCreatedAt: time.Date(
+ 2023, time.April, 5, 1, 0, 0, 0, time.UTC),
+ DocumentModifiedAt: time.Date(
+ 2023, time.April, 5, 23, 0, 0, 0, time.UTC),
+ Owner: &models.User{
+ EmailAddress: "owner1@hashicorp.com",
+ },
+ },
+ shouldErr: true,
+ errContains: "custom field currentVersion not equal",
+ },
+
+ "bad people custom field stakeholders": {
+ algoDoc: map[string]any{
+ "appCreated": true,
+ "docNumber": "ABC-123",
+ "createdTime": float64(time.Date(
+ 2023, time.April, 5, 1, 0, 0, 0, time.UTC).Unix()),
+ "docType": "RFC",
+ "modifiedTime": float64(time.Date(
+ 2023, time.April, 5, 23, 0, 0, 0, time.UTC).Unix()),
+ "owners": []any{"owner1@hashicorp.com"},
+ "product": "Product1",
+ "stakeholders": []any{
+ "stakeholder1@hashicorp.com",
+ "stakeholder2@hashicorp.com",
+ },
+ },
+ dbDoc: models.Document{
+ DocumentType: models.DocumentType{
+ Name: "RFC",
+ },
+ DocumentNumber: 123,
+ Product: models.Product{
+ Name: "Product1",
+ Abbreviation: "ABC",
+ },
+ CustomFields: []*models.DocumentCustomField{
+ {
+ DocumentTypeCustomField: models.DocumentTypeCustomField{
+ Name: "Stakeholders",
+ DocumentType: models.DocumentType{
+ Name: "RFC",
+ },
+ },
+ Value: `["stakeholder1@hashicorp.com","badstakeholder2@hashicorp.com"]`,
+ },
+ },
+ DocumentCreatedAt: time.Date(
+ 2023, time.April, 5, 1, 0, 0, 0, time.UTC),
+ DocumentModifiedAt: time.Date(
+ 2023, time.April, 5, 23, 0, 0, 0, time.UTC),
+ Owner: &models.User{
+ EmailAddress: "owner1@hashicorp.com",
+ },
+ },
+ shouldErr: true,
+ errContains: "custom field stakeholders not equal",
+ },
+
+ "bad modifiedTime": {
+ algoDoc: map[string]any{
+ "appCreated": true,
+ "docNumber": "ABC-123",
+ "createdTime": float64(time.Date(
+ 2023, time.April, 5, 1, 0, 0, 0, time.UTC).Unix()),
+ "modifiedTime": float64(time.Date(
+ 2023, time.April, 5, 23, 0, 0, 0, time.UTC).Unix()),
+ "owners": []any{"owner1@hashicorp.com"},
+ "product": "Product1",
+ },
+ dbDoc: models.Document{
+ DocumentNumber: 123,
+ Product: models.Product{
+ Name: "Product1",
+ Abbreviation: "ABC",
+ },
+ DocumentCreatedAt: time.Date(
+ 2023, time.April, 5, 1, 0, 0, 0, time.UTC),
+ DocumentModifiedAt: time.Date(
+ 2013, time.April, 5, 23, 0, 0, 0, time.UTC),
+ Owner: &models.User{
+ EmailAddress: "owner1@hashicorp.com",
+ },
+ },
+ shouldErr: true,
+ errContains: "modifiedTime not equal",
+ },
+
+ "bad owners": {
+ algoDoc: map[string]any{
+ "appCreated": true,
+ "docNumber": "ABC-123",
+ "createdTime": float64(time.Date(
+ 2023, time.April, 5, 1, 0, 0, 0, time.UTC).Unix()),
+ "modifiedTime": float64(time.Date(
+ 2023, time.April, 5, 23, 0, 0, 0, time.UTC).Unix()),
+ "owners": []any{"owner1@hashicorp.com"},
+ "product": "Product1",
+ },
+ dbDoc: models.Document{
+ DocumentNumber: 123,
+ Product: models.Product{
+ Name: "Product1",
+ Abbreviation: "ABC",
+ },
+ DocumentCreatedAt: time.Date(
+ 2023, time.April, 5, 1, 0, 0, 0, time.UTC),
+ DocumentModifiedAt: time.Date(
+ 2023, time.April, 5, 23, 0, 0, 0, time.UTC),
+ Owner: &models.User{
+ EmailAddress: "badowner1@hashicorp.com",
+ },
+ },
+ shouldErr: true,
+ errContains: "owners not equal",
+ },
+
+ "bad product": {
+ algoDoc: map[string]any{
+ "appCreated": true,
+ "docNumber": "ABC-123",
+ "createdTime": float64(time.Date(
+ 2023, time.April, 5, 1, 0, 0, 0, time.UTC).Unix()),
+ "modifiedTime": float64(time.Date(
+ 2023, time.April, 5, 23, 0, 0, 0, time.UTC).Unix()),
+ "owners": []any{"owner1@hashicorp.com"},
+ "product": "Product1",
+ },
+ dbDoc: models.Document{
+ DocumentNumber: 123,
+ Product: models.Product{
+ Name: "BadProduct1",
+ Abbreviation: "ABC",
+ },
+ DocumentCreatedAt: time.Date(
+ 2023, time.April, 5, 1, 0, 0, 0, time.UTC),
+ DocumentModifiedAt: time.Date(
+ 2023, time.April, 5, 23, 0, 0, 0, time.UTC),
+ Owner: &models.User{
+ EmailAddress: "owner1@hashicorp.com",
+ },
+ },
+ shouldErr: true,
+ errContains: "product not equal",
+ },
+
+ "bad status": {
+ algoDoc: map[string]any{
+ "appCreated": true,
+ "docNumber": "ABC-123",
+ "createdTime": float64(time.Date(
+ 2023, time.April, 5, 1, 0, 0, 0, time.UTC).Unix()),
+ "modifiedTime": float64(time.Date(
+ 2023, time.April, 5, 23, 0, 0, 0, time.UTC).Unix()),
+ "owners": []any{"owner1@hashicorp.com"},
+ "product": "Product1",
+ "status": "Approved",
+ },
+ dbDoc: models.Document{
+ DocumentNumber: 123,
+ Product: models.Product{
+ Name: "Product1",
+ Abbreviation: "ABC",
+ },
+ DocumentCreatedAt: time.Date(
+ 2023, time.April, 5, 1, 0, 0, 0, time.UTC),
+ DocumentModifiedAt: time.Date(
+ 2023, time.April, 5, 23, 0, 0, 0, time.UTC),
+ Owner: &models.User{
+ EmailAddress: "owner1@hashicorp.com",
+ },
+ Status: models.InReviewDocumentStatus,
+ },
+ shouldErr: true,
+ errContains: "status not equal",
+ },
+
+ "bad summary": {
+ algoDoc: map[string]any{
+ "appCreated": true,
+ "docNumber": "ABC-123",
+ "createdTime": float64(time.Date(
+ 2023, time.April, 5, 1, 0, 0, 0, time.UTC).Unix()),
+ "modifiedTime": float64(time.Date(
+ 2023, time.April, 5, 23, 0, 0, 0, time.UTC).Unix()),
+ "owners": []any{"owner1@hashicorp.com"},
+ "product": "Product1",
+ "summary": "Summary1",
+ },
+ dbDoc: models.Document{
+ DocumentNumber: 123,
+ Product: models.Product{
+ Name: "Product1",
+ Abbreviation: "ABC",
+ },
+ DocumentCreatedAt: time.Date(
+ 2023, time.April, 5, 1, 0, 0, 0, time.UTC),
+ DocumentModifiedAt: time.Date(
+ 2023, time.April, 5, 23, 0, 0, 0, time.UTC),
+ Owner: &models.User{
+ EmailAddress: "owner1@hashicorp.com",
+ },
+ Summary: &[]string{"BadSummary1"}[0],
+ },
+ shouldErr: true,
+ errContains: "summary not equal",
+ },
+ }
+
+ for name, c := range cases {
+ t.Run(name, func(t *testing.T) {
+ assert, require := assert.New(t), require.New(t)
+
+ // Define minimum document types configuration for tests.
+ docTypes := []*config.DocumentType{
+ {
+ Name: "RFC",
+ CustomFields: []*config.DocumentTypeCustomField{
+ {
+ Name: "Current Version",
+ Type: "string",
+ },
+ {
+ Name: "Stakeholders",
+ Type: "people",
+ },
+ },
+ },
+ }
+
+ if err := compareAlgoliaAndDatabaseDocument(
+ c.algoDoc, c.dbDoc, c.dbDocReviews, docTypes,
+ ); err != nil {
+ if c.shouldErr {
+ require.Error(err)
+ assert.ErrorContains(err, c.errContains)
+ } else {
+ require.NoError(err)
+ }
+ }
+ })
+ }
+}
diff --git a/internal/api/v2/me.go b/internal/api/v2/me.go
new file mode 100644
index 000000000..71d3ac11d
--- /dev/null
+++ b/internal/api/v2/me.go
@@ -0,0 +1,197 @@
+package api
+
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+ "net/http"
+
+ "github.com/hashicorp-forge/hermes/internal/server"
+ gw "github.com/hashicorp-forge/hermes/pkg/googleworkspace"
+ "google.golang.org/api/people/v1"
+)
+
+// MeGetResponse mimics the response from Google's `userinfo/me` API
+// (https://www.googleapis.com/userinfo/v2/me).
+type MeGetResponse struct {
+ ID string `json:"id"`
+ Email string `json:"email"`
+ VerifiedEmail bool `json:"verified_email"`
+ Name string `json:"name"`
+ GivenName string `json:"given_name"`
+ FamilyName string `json:"family_name"`
+ Picture string `json:"picture"`
+ Locale string `json:"locale,omitempty"`
+ HD string `json:"hd,omitempty"`
+}
+
+func MeHandler(srv server.Server) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ errResp := func(httpCode int, userErrMsg, logErrMsg string, err error) {
+ srv.Logger.Error(logErrMsg,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "error", err,
+ )
+ http.Error(w, userErrMsg, httpCode)
+ }
+
+ // Authorize request.
+ userEmail := r.Context().Value("userEmail").(string)
+ if userEmail == "" {
+ errResp(
+ http.StatusUnauthorized,
+ "No authorization information for request",
+ "no user email found in request context",
+ nil,
+ )
+ return
+ }
+
+ switch r.Method {
+ // The HEAD method is used to determine if the user is currently
+ // authenticated.
+ case "HEAD":
+ w.WriteHeader(http.StatusOK)
+ return
+
+ case "GET":
+ errResp := func(
+ httpCode int, userErrMsg, logErrMsg string, err error,
+ extraArgs ...interface{}) {
+ srv.Logger.Error(logErrMsg,
+ append([]interface{}{
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ }, extraArgs...)...,
+ )
+ http.Error(w, userErrMsg, httpCode)
+ }
+
+ ppl, err := srv.GWService.SearchPeople(
+ userEmail, "emailAddresses,names,photos")
+ if err != nil {
+ errResp(
+ http.StatusInternalServerError,
+ "Error getting user information",
+ "error searching people directory",
+ err,
+ )
+ return
+ }
+
+ // Verify that the result only contains one person.
+ if len(ppl) != 1 {
+ errResp(
+ http.StatusInternalServerError,
+ "Error getting user information",
+ fmt.Sprintf(
+ "wrong number of people in search result: %d", len(ppl)),
+ nil,
+ "user_email", userEmail,
+ )
+ return
+ }
+ p := ppl[0]
+
+ // Make sure that the result's email address is the same as the
+ // authenticated user, is the primary email address, and is verified.
+ if len(p.EmailAddresses) == 0 ||
+ p.EmailAddresses[0].Value != userEmail ||
+ !p.EmailAddresses[0].Metadata.Primary ||
+ !p.EmailAddresses[0].Metadata.Verified {
+ errResp(
+ http.StatusInternalServerError,
+ "Error getting user information",
+ "wrong user in search result",
+ err,
+ )
+ return
+ }
+
+ // Replace the names in the People API result with data from the Admin
+ // Directory API.
+ // TODO: remove this when the bug in the People API is fixed:
+ // https://issuetracker.google.com/issues/196235775
+ if err := replaceNamesWithAdminAPIResponse(
+ p, srv.GWService,
+ ); err != nil {
+ errResp(
+ http.StatusInternalServerError,
+ "Error getting user information",
+ "error replacing names with Admin API response",
+ err,
+ )
+ return
+ }
+
+ // Verify other required values are set.
+ if len(p.Names) == 0 {
+ errResp(
+ http.StatusInternalServerError,
+ "Error getting user information",
+ "no names in result",
+ err,
+ )
+ return
+ }
+
+ // Write response.
+ resp := MeGetResponse{
+ ID: p.EmailAddresses[0].Metadata.Source.Id,
+ Email: p.EmailAddresses[0].Value,
+ VerifiedEmail: p.EmailAddresses[0].Metadata.Verified,
+ Name: p.Names[0].DisplayName,
+ GivenName: p.Names[0].GivenName,
+ FamilyName: p.Names[0].FamilyName,
+ }
+ if len(p.Photos) > 0 {
+ resp.Picture = p.Photos[0].Url
+ }
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(http.StatusOK)
+ enc := json.NewEncoder(w)
+ err = enc.Encode(resp)
+ if err != nil {
+ errResp(
+ http.StatusInternalServerError,
+ "Error getting user information",
+ "error encoding response",
+ err,
+ )
+ return
+ }
+
+ default:
+ w.WriteHeader(http.StatusMethodNotAllowed)
+ return
+ }
+ })
+}
+
+// Replace the names in the People API result with data from the Admin Directory
+// API.
+// TODO: remove this when the bug in the People API is fixed:
+// https://issuetracker.google.com/issues/196235775
+func replaceNamesWithAdminAPIResponse(
+ p *people.Person, s *gw.Service,
+) error {
+ if len(p.EmailAddresses) == 0 {
+ return errors.New("email address not found")
+ }
+ u, err := s.GetUser(p.EmailAddresses[0].Value)
+ if err != nil {
+ return fmt.Errorf("error getting user: %w", err)
+ }
+
+ p.Names = []*people.Name{
+ {
+ DisplayName: u.Name.FullName,
+ FamilyName: u.Name.FamilyName,
+ GivenName: u.Name.GivenName,
+ },
+ }
+
+ return nil
+}
diff --git a/internal/api/v2/me_recently_viewed_docs.go b/internal/api/v2/me_recently_viewed_docs.go
new file mode 100644
index 000000000..7fa290ce6
--- /dev/null
+++ b/internal/api/v2/me_recently_viewed_docs.go
@@ -0,0 +1,123 @@
+package api
+
+import (
+ "encoding/json"
+ "net/http"
+
+ "github.com/hashicorp-forge/hermes/internal/server"
+ "github.com/hashicorp-forge/hermes/pkg/models"
+ "gorm.io/gorm"
+)
+
+type recentlyViewedDoc struct {
+ ID string `json:"id"`
+ IsDraft bool `json:"isDraft"`
+}
+
+func MeRecentlyViewedDocsHandler(srv server.Server) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ errResp := func(
+ httpCode int, userErrMsg, logErrMsg string, err error,
+ extraArgs ...interface{}) {
+ respondError(w, r, srv.Logger, httpCode, userErrMsg, logErrMsg, err,
+ extraArgs...)
+ }
+
+ // Authorize request.
+ userEmail := r.Context().Value("userEmail").(string)
+ if userEmail == "" {
+ errResp(
+ http.StatusUnauthorized,
+ "No authorization information for request",
+ "no user email found in request context",
+ nil,
+ )
+ return
+ }
+
+ switch r.Method {
+ case "GET":
+ // Find or create user.
+ u := models.User{
+ EmailAddress: userEmail,
+ }
+ if err := u.FirstOrCreate(srv.DB); err != nil {
+ errResp(
+ http.StatusInternalServerError,
+ "Error authorizing the request",
+ "error finding or creating user",
+ err,
+ )
+ return
+ }
+
+ // Get recently viewed documents for the user.
+ var rvds []models.RecentlyViewedDoc
+ if err := srv.DB.Where(&models.RecentlyViewedDoc{UserID: int(u.ID)}).
+ Order("viewed_at desc").
+ Find(&rvds).Error; err != nil {
+
+ errResp(
+ http.StatusInternalServerError,
+ "Error finding recently viewed documents",
+ "error finding recently viewed documents in database",
+ err,
+ )
+ return
+ }
+
+ // Build response.
+ var res []recentlyViewedDoc
+ for _, d := range rvds {
+ // Get document in database.
+ doc := models.Document{
+ Model: gorm.Model{
+ ID: uint(d.DocumentID),
+ },
+ }
+ if err := doc.Get(srv.DB); err != nil {
+ // If we get an error, log it but don't return an error response
+ // because this would degrade UX.
+ // TODO: change this log back to an error when this handles incomplete
+ // data in the database.
+ srv.Logger.Warn("error getting document in database",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "document_db_id", d.DocumentID,
+ )
+ continue
+ }
+
+ isDraft := false
+ // The document is a draft if it's in WIP status and wasn't imported.
+ if doc.Status == models.WIPDocumentStatus && !doc.Imported {
+ isDraft = true
+ }
+
+ res = append(res, recentlyViewedDoc{
+ ID: doc.GoogleFileID,
+ IsDraft: isDraft,
+ })
+ }
+
+ // Write response.
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(http.StatusOK)
+ enc := json.NewEncoder(w)
+ if err := enc.Encode(res); err != nil {
+ errResp(
+ http.StatusInternalServerError,
+ "Error finding recently viewed documents",
+ "error encoding response to JSON",
+ err,
+ )
+ return
+ }
+
+ default:
+ w.WriteHeader(http.StatusMethodNotAllowed)
+ return
+ }
+ })
+}
diff --git a/internal/api/v2/me_subscriptions.go b/internal/api/v2/me_subscriptions.go
new file mode 100644
index 000000000..aad7fd5e6
--- /dev/null
+++ b/internal/api/v2/me_subscriptions.go
@@ -0,0 +1,127 @@
+package api
+
+import (
+ "encoding/json"
+ "net/http"
+
+ "github.com/hashicorp-forge/hermes/internal/server"
+ "github.com/hashicorp-forge/hermes/pkg/models"
+)
+
+type MeSubscriptionsPostRequest struct {
+ Subscriptions []string `json:"subscriptions"`
+}
+
+func MeSubscriptionsHandler(srv server.Server) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ errResp := func(httpCode int, userErrMsg, logErrMsg string, err error) {
+ srv.Logger.Error(logErrMsg,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "error", err,
+ )
+ http.Error(w, userErrMsg, httpCode)
+ }
+
+ // Authorize request.
+ userEmail := r.Context().Value("userEmail").(string)
+ if userEmail == "" {
+ errResp(
+ http.StatusUnauthorized,
+ "No authorization information for request",
+ "no user email found in request context",
+ nil,
+ )
+ return
+ }
+
+ switch r.Method {
+ case "GET":
+ // Find or create user.
+ u := models.User{
+ EmailAddress: userEmail,
+ }
+ if err := u.FirstOrCreate(srv.DB); err != nil {
+ errResp(
+ http.StatusInternalServerError,
+ "Error authorizing the request",
+ "error finding or creating user",
+ err,
+ )
+ return
+ }
+
+ // Build response of product subscriptions.
+ var products []string
+ for _, p := range u.ProductSubscriptions {
+ products = append(products, p.Name)
+ }
+
+ // Write response.
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(http.StatusOK)
+ enc := json.NewEncoder(w)
+ if err := enc.Encode(products); err != nil {
+ errResp(
+ http.StatusInternalServerError,
+ "Error finding product subscriptions",
+ "error encoding products to JSON",
+ err,
+ )
+ return
+ }
+
+ case "POST":
+ // Decode request.
+ var req MeSubscriptionsPostRequest
+ if err := decodeRequest(r, &req); err != nil {
+ errResp(
+ http.StatusBadRequest,
+ "Bad request",
+ "error decoding request",
+ err,
+ )
+ return
+ }
+
+ // Find or create user.
+ u := models.User{
+ EmailAddress: userEmail,
+ }
+ if err := u.FirstOrCreate(srv.DB); err != nil {
+ errResp(
+ http.StatusInternalServerError,
+ "Error authorizing the request",
+ "error finding or creating user",
+ err,
+ )
+ return
+ }
+
+ // Build user product subscriptions.
+ var subs []models.Product
+ for _, p := range req.Subscriptions {
+ subs = append(subs, models.Product{Name: p})
+ }
+ u.ProductSubscriptions = subs
+
+ // Upsert user.
+ if err := u.Upsert(srv.DB); err != nil {
+ errResp(
+ http.StatusInternalServerError,
+ "Error updating user subscriptions",
+ "error upserting user",
+ err,
+ )
+ return
+ }
+
+ // Write response.
+ w.WriteHeader(http.StatusOK)
+
+ default:
+ w.WriteHeader(http.StatusMethodNotAllowed)
+ return
+ }
+ })
+}
diff --git a/internal/api/v2/people.go b/internal/api/v2/people.go
new file mode 100644
index 000000000..c62cc6df1
--- /dev/null
+++ b/internal/api/v2/people.go
@@ -0,0 +1,106 @@
+package api
+
+import (
+ "encoding/json"
+ "fmt"
+ "net/http"
+ "strings"
+
+ "github.com/hashicorp-forge/hermes/internal/server"
+ "google.golang.org/api/people/v1"
+)
+
+// PeopleDataRequest contains the fields that are allowed to
+// make the POST request.
+type PeopleDataRequest struct {
+ Query string `json:"query,omitempty"`
+}
+
+// PeopleDataHandler returns people related data from the Google API
+// to the Hermes frontend.
+func PeopleDataHandler(srv server.Server) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ req := &PeopleDataRequest{}
+ switch r.Method {
+ // Using POST method to avoid logging the query in browser history
+ // and server logs
+ case "POST":
+ if err := decodeRequest(r, &req); err != nil {
+ srv.Logger.Error("error decoding people request", "error", err)
+ http.Error(w, fmt.Sprintf("Bad request: %q", err),
+ http.StatusBadRequest)
+ return
+ }
+
+ users, err := srv.GWService.People.SearchDirectoryPeople().
+ Query(req.Query).
+ // Only query for photos and email addresses
+ // This may be expanded based on use case
+ // in the future
+ ReadMask("photos,emailAddresses").
+ Sources("DIRECTORY_SOURCE_TYPE_DOMAIN_PROFILE").
+ Do()
+ if err != nil {
+ srv.Logger.Error("error searching people directory", "error", err)
+ http.Error(w, fmt.Sprintf("Error searching people directory: %q", err),
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Write response.
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(http.StatusOK)
+
+ enc := json.NewEncoder(w)
+ err = enc.Encode(users.People)
+ if err != nil {
+ srv.Logger.Error("error encoding people response", "error", err)
+ http.Error(w, "Error searching people directory",
+ http.StatusInternalServerError)
+ return
+ }
+ case "GET":
+ query := r.URL.Query()
+ if len(query["emails"]) != 1 {
+ srv.Logger.Error(
+ "attempted to get users without providing any email addresses")
+ http.Error(w,
+ "Attempted to get users without providing a single value for the emails query parameter.",
+ http.StatusBadRequest)
+ } else {
+ emails := strings.Split(query["emails"][0], ",")
+ var people []*people.Person
+
+ for _, email := range emails {
+ result, err := srv.GWService.People.SearchDirectoryPeople().
+ Query(email).
+ ReadMask("photos,emailAddresses").
+ Sources("DIRECTORY_SOURCE_TYPE_DOMAIN_PROFILE").
+ Do()
+
+ if err == nil && len(result.People) > 0 {
+ people = append(people, result.People[0])
+ } else {
+ srv.Logger.Warn("Email lookup miss", "error", err)
+ }
+ }
+
+ // Write response.
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(http.StatusOK)
+
+ enc := json.NewEncoder(w)
+ err := enc.Encode(people)
+ if err != nil {
+ srv.Logger.Error("error encoding people response", "error", err)
+ http.Error(w, "Error getting people responses",
+ http.StatusInternalServerError)
+ return
+ }
+ }
+ default:
+ w.WriteHeader(http.StatusMethodNotAllowed)
+ return
+ }
+ })
+}
diff --git a/internal/api/v2/products.go b/internal/api/v2/products.go
new file mode 100644
index 000000000..ea5a33f00
--- /dev/null
+++ b/internal/api/v2/products.go
@@ -0,0 +1,60 @@
+package api
+
+import (
+ "encoding/json"
+ "net/http"
+
+ "github.com/hashicorp-forge/hermes/internal/server"
+ "github.com/hashicorp-forge/hermes/internal/structs"
+ "github.com/hashicorp-forge/hermes/pkg/algolia"
+)
+
+// ProductsHandler returns the product mappings to the Hermes frontend.
+func ProductsHandler(srv server.Server) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ // Only allow GET requests.
+ if r.Method != http.MethodGet {
+ w.WriteHeader(http.StatusMethodNotAllowed)
+ return
+ }
+
+ // Get products and associated data from Algolia
+ products, err := getProductsData(srv.AlgoSearch)
+ if err != nil {
+ srv.Logger.Error("error getting products from algolia", "error", err)
+ http.Error(w, "Error getting product mappings",
+ http.StatusInternalServerError)
+ return
+ }
+
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(http.StatusOK)
+
+ enc := json.NewEncoder(w)
+ err = enc.Encode(products)
+ if err != nil {
+ srv.Logger.Error("error encoding products response", "error", err)
+ http.Error(w, "Error getting products",
+ http.StatusInternalServerError)
+ return
+ }
+ })
+}
+
+// getProducts gets the product or area name and their associated
+// data from Algolia
+func getProductsData(a *algolia.Client) (
+ map[string]structs.ProductData, error,
+) {
+ p := structs.Products{
+ ObjectID: "products",
+ Data: make(map[string]structs.ProductData, 0),
+ }
+
+ err := a.Internal.GetObject("products", &p)
+ if err != nil {
+ return nil, err
+ }
+
+ return p.Data, nil
+}
diff --git a/internal/api/v2/reviews.go b/internal/api/v2/reviews.go
new file mode 100644
index 000000000..7f056ba31
--- /dev/null
+++ b/internal/api/v2/reviews.go
@@ -0,0 +1,823 @@
+package api
+
+import (
+ "fmt"
+ "net/http"
+ "net/url"
+ "path"
+ "strings"
+ "time"
+
+ "github.com/hashicorp-forge/hermes/internal/config"
+ "github.com/hashicorp-forge/hermes/internal/email"
+ "github.com/hashicorp-forge/hermes/internal/server"
+ "github.com/hashicorp-forge/hermes/pkg/document"
+ gw "github.com/hashicorp-forge/hermes/pkg/googleworkspace"
+ hcd "github.com/hashicorp-forge/hermes/pkg/hashicorpdocs"
+ "github.com/hashicorp-forge/hermes/pkg/links"
+ "github.com/hashicorp-forge/hermes/pkg/models"
+ "github.com/hashicorp/go-multierror"
+ "google.golang.org/api/drive/v3"
+)
+
+func ReviewsHandler(srv server.Server) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ switch r.Method {
+ case "POST":
+ // revertFuncs is a slice of functions to execute in the event of an error
+ // that requires reverting previous logic.
+ var revertFuncs []func() error
+
+ // Validate request.
+ docID, err := parseResourceIDFromURL(r.URL.Path, "reviews")
+ if err != nil {
+ srv.Logger.Error("error parsing document ID from reviews path",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ )
+ http.Error(w, "Document ID not found", http.StatusNotFound)
+ return
+ }
+
+ // Check if document is locked.
+ locked, err := hcd.IsLocked(docID, srv.DB, srv.GWService, srv.Logger)
+ if err != nil {
+ srv.Logger.Error("error checking document locked status",
+ "error", err,
+ "path", r.URL.Path,
+ "method", r.Method,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error getting document status", http.StatusNotFound)
+ return
+ }
+ // Don't continue if document is locked.
+ if locked {
+ http.Error(w, "Document is locked", http.StatusLocked)
+ return
+ }
+
+ // Begin database transaction.
+ tx := srv.DB.Begin()
+ revertFuncs = append(revertFuncs, func() error {
+ // Rollback database transaction.
+ if err = tx.Rollback().Error; err != nil {
+ return fmt.Errorf("error rolling back database transaction: %w", err)
+ }
+
+ return nil
+ })
+
+ // Get document from database.
+ model := models.Document{
+ GoogleFileID: docID,
+ }
+ if err := model.Get(tx); err != nil {
+ srv.Logger.Error("error getting document from database",
+ "error", err,
+ "path", r.URL.Path,
+ "method", r.Method,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error accessing document",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Get reviews for the document.
+ var reviews models.DocumentReviews
+ if err := reviews.Find(tx, models.DocumentReview{
+ Document: models.Document{
+ GoogleFileID: docID,
+ },
+ }); err != nil {
+ srv.Logger.Error("error getting reviews for document",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ return
+ }
+
+ // Convert database model to a document.
+ doc, err := document.NewFromDatabaseModel(
+ model, reviews)
+ if err != nil {
+ srv.Logger.Error("error converting database model to document type",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error accessing document",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Get latest product number.
+ latestNum, err := models.GetLatestProductNumber(
+ tx, doc.DocType, doc.Product)
+ if err != nil {
+ srv.Logger.Error("error getting product document number",
+ "error", err,
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path,
+ )
+ http.Error(w, "Error creating review",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Get product from database so we can get the product abbreviation.
+ product := models.Product{
+ Name: doc.Product,
+ }
+ if err := product.Get(tx); err != nil {
+ srv.Logger.Error("error getting product",
+ "error", err,
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path,
+ )
+ http.Error(w, "Error creating review",
+ http.StatusInternalServerError)
+ return
+ }
+
+ // Set the document number.
+ nextDocNum := latestNum + 1
+ doc.DocNumber = fmt.Sprintf("%s-%03d",
+ product.Abbreviation,
+ nextDocNum)
+
+ // Change document status to "In-Review".
+ doc.Status = "In-Review"
+
+ // Replace the doc header.
+ err = doc.ReplaceHeader(srv.Config.BaseURL, false, srv.GWService)
+ revertFuncs = append(revertFuncs, func() error {
+ // Change back document number to "ABC-???" and status to "WIP".
+ doc.DocNumber = fmt.Sprintf("%s-???", product.Abbreviation)
+ doc.Status = "WIP"
+
+ if err = doc.ReplaceHeader(
+ srv.Config.BaseURL, false, srv.GWService,
+ ); err != nil {
+ return fmt.Errorf("error replacing doc header: %w", err)
+ }
+
+ return nil
+ })
+ if err != nil {
+ srv.Logger.Error("error replacing doc header",
+ "error", err, "doc_id", docID)
+ http.Error(w, "Error creating review",
+ http.StatusInternalServerError)
+
+ if err := revertReviewsPost(revertFuncs); err != nil {
+ srv.Logger.Error("error reverting review creation",
+ "error", err,
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path)
+ }
+ return
+ }
+ srv.Logger.Info("doc header replaced",
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path,
+ )
+
+ // Get file from Google Drive so we can get the latest modified time.
+ file, err := srv.GWService.GetFile(docID)
+ if err != nil {
+ srv.Logger.Error("error getting document file from Google",
+ "error", err,
+ "path", r.URL.Path,
+ "method", r.Method,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error creating review", http.StatusInternalServerError)
+ if err := revertReviewsPost(revertFuncs); err != nil {
+ srv.Logger.Error("error reverting review creation",
+ "error", err,
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path)
+ }
+ return
+ }
+
+ // Parse and set modified time.
+ modifiedTime, err := time.Parse(time.RFC3339Nano, file.ModifiedTime)
+ if err != nil {
+ srv.Logger.Error("error parsing modified time",
+ "error", err,
+ "path", r.URL.Path,
+ "method", r.Method,
+ "doc_id", docID,
+ )
+ http.Error(w, "Error creating review", http.StatusInternalServerError)
+ if err := revertReviewsPost(revertFuncs); err != nil {
+ srv.Logger.Error("error reverting review creation",
+ "error", err,
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path)
+ }
+ return
+ }
+ doc.ModifiedTime = modifiedTime.Unix()
+
+ // Get latest Google Drive file revision.
+ latestRev, err := srv.GWService.GetLatestRevision(docID)
+ if err != nil {
+ srv.Logger.Error("error getting latest revision",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID)
+ http.Error(w, "Error creating review",
+ http.StatusInternalServerError)
+
+ if err := revertReviewsPost(revertFuncs); err != nil {
+ srv.Logger.Error("error reverting review creation",
+ "error", err,
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path)
+ }
+ return
+ }
+
+ // Mark latest revision to be kept forever.
+ _, err = srv.GWService.KeepRevisionForever(docID, latestRev.Id)
+ revertFuncs = append(revertFuncs, func() error {
+ // Mark latest revision to not be kept forever.
+ if err = srv.GWService.UpdateKeepRevisionForever(
+ docID, latestRev.Id, false,
+ ); err != nil {
+ return fmt.Errorf(
+ "error marking revision to not be kept forever: %w", err)
+ }
+
+ return nil
+ })
+ if err != nil {
+ srv.Logger.Error("error marking revision to keep forever",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ "rev_id", latestRev.Id)
+ http.Error(w, "Error creating review",
+ http.StatusInternalServerError)
+
+ if err := revertReviewsPost(revertFuncs); err != nil {
+ srv.Logger.Error("error reverting review creation",
+ "error", err,
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path)
+ }
+ return
+ }
+ srv.Logger.Info("doc revision set to be kept forever",
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path,
+ )
+
+ // Record file revision in the Algolia document object.
+ revisionName := "Requested review"
+ doc.SetFileRevision(latestRev.Id, revisionName)
+
+ // Create file revision in the database.
+ fr := models.DocumentFileRevision{
+ Document: models.Document{
+ GoogleFileID: docID,
+ },
+ GoogleDriveFileRevisionID: latestRev.Id,
+ Name: revisionName,
+ }
+ if err := fr.Create(tx); err != nil {
+ srv.Logger.Error("error creating document file revision",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ "rev_id", latestRev.Id)
+ http.Error(w, "Error creating review",
+ http.StatusInternalServerError)
+
+ if err := revertReviewsPost(revertFuncs); err != nil {
+ srv.Logger.Error("error reverting review creation",
+ "error", err,
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path)
+ }
+ return
+ }
+
+ // Move document to published docs location in Google Drive.
+ _, err = srv.GWService.MoveFile(
+ docID, srv.Config.GoogleWorkspace.DocsFolder)
+ revertFuncs = append(revertFuncs, func() error {
+ // Move document back to drafts folder in Google Drive.
+ if _, err := srv.GWService.MoveFile(
+ doc.ObjectID, srv.Config.GoogleWorkspace.DraftsFolder); err != nil {
+
+ return fmt.Errorf("error moving doc back to drafts folder: %w", err)
+
+ }
+
+ return nil
+ })
+ if err != nil {
+ srv.Logger.Error("error moving file to docs folder",
+ "error", err,
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path)
+ http.Error(w, "Error creating review",
+ http.StatusInternalServerError)
+
+ if err := revertReviewsPost(revertFuncs); err != nil {
+ srv.Logger.Error("error reverting review creation",
+ "error", err,
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path)
+ }
+ return
+ }
+ srv.Logger.Info("doc moved to published document folder",
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path,
+ )
+
+ // Create shortcut in hierarchical folder structure.
+ shortcut, err := createShortcut(srv.Config, *doc, srv.GWService)
+ revertFuncs = append(revertFuncs, func() error {
+ if err := srv.GWService.DeleteFile(shortcut.Id); err != nil {
+ return fmt.Errorf("error deleting shortcut: %w", err)
+ }
+
+ return nil
+ })
+ if err != nil {
+ srv.Logger.Error("error creating shortcut",
+ "error", err,
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path)
+ http.Error(w, "Error creating review",
+ http.StatusInternalServerError)
+
+ if err := revertReviewsPost(revertFuncs); err != nil {
+ srv.Logger.Error("error reverting review creation",
+ "error", err,
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path)
+ }
+ return
+ }
+ srv.Logger.Info("doc shortcut created",
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path,
+ )
+
+ // Create go-link.
+ // TODO: use database for this instead of Algolia.
+ err = links.SaveDocumentRedirectDetails(
+ srv.AlgoWrite, docID, doc.DocType, doc.DocNumber)
+ revertFuncs = append(revertFuncs, func() error {
+ if err := links.DeleteDocumentRedirectDetails(
+ srv.AlgoWrite, doc.ObjectID, doc.DocType, doc.DocNumber,
+ ); err != nil {
+ return fmt.Errorf("error deleting go-link: %w", err)
+ }
+
+ return nil
+ })
+ if err != nil {
+ srv.Logger.Error("error creating go-link",
+ "error", err,
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path)
+ http.Error(w, "Error creating review",
+ http.StatusInternalServerError)
+ if err := revertReviewsPost(revertFuncs); err != nil {
+ srv.Logger.Error("error reverting review creation",
+ "error", err,
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path)
+ }
+ return
+ }
+ srv.Logger.Info("doc redirect details saved",
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path,
+ )
+
+ // Update document in the database.
+ d := models.Document{
+ GoogleFileID: docID,
+ }
+ if err := d.Get(tx); err != nil {
+ srv.Logger.Error("error getting document in database",
+ "error", err,
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path)
+ http.Error(w, "Error creating review",
+ http.StatusInternalServerError)
+
+ if err := revertReviewsPost(revertFuncs); err != nil {
+ srv.Logger.Error("error reverting review creation",
+ "error", err,
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path)
+ }
+ return
+ }
+ d.Status = models.InReviewDocumentStatus
+ d.DocumentNumber = nextDocNum
+ d.DocumentModifiedAt = modifiedTime
+ if err := d.Upsert(tx); err != nil {
+ srv.Logger.Error("error upserting document in database",
+ "error", err,
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path)
+ http.Error(w, "Error creating review",
+ http.StatusInternalServerError)
+
+ if err := revertReviewsPost(revertFuncs); err != nil {
+ srv.Logger.Error("error reverting review creation",
+ "error", err,
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path)
+ }
+ return
+ }
+
+ // Get document URL.
+ docURL, err := getDocumentURL(srv.Config.BaseURL, docID)
+ if err != nil {
+ srv.Logger.Error("error getting document URL",
+ "error", err,
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path,
+ )
+ http.Error(w, "Error creating review",
+ http.StatusInternalServerError)
+ if err := revertReviewsPost(revertFuncs); err != nil {
+ srv.Logger.Error("error reverting review creation",
+ "error", err,
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path)
+ }
+ return
+ }
+
+ // Send emails to approvers, if enabled.
+ if srv.Config.Email != nil && srv.Config.Email.Enabled {
+ if len(doc.Approvers) > 0 {
+ // TODO: use an asynchronous method for sending emails because we
+ // can't currently recover gracefully from a failure here.
+ for _, approverEmail := range doc.Approvers {
+ err := email.SendReviewRequestedEmail(
+ email.ReviewRequestedEmailData{
+ BaseURL: srv.Config.BaseURL,
+ DocumentOwner: doc.Owners[0],
+ DocumentShortName: doc.DocNumber,
+ DocumentTitle: doc.Title,
+ DocumentURL: docURL,
+ },
+ []string{approverEmail},
+ srv.Config.Email.FromAddress,
+ srv.GWService,
+ )
+ if err != nil {
+ srv.Logger.Error("error sending approver email",
+ "error", err,
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path,
+ )
+ http.Error(w, "Error creating review",
+ http.StatusInternalServerError)
+ if err := revertReviewsPost(revertFuncs); err != nil {
+ srv.Logger.Error("error reverting review creation",
+ "error", err,
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path)
+ }
+ return
+ }
+ srv.Logger.Info("doc approver email sent",
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path,
+ )
+ }
+ }
+ }
+
+ // Commit the database transaction.
+ if err := tx.Commit().Error; err != nil {
+ srv.Logger.Error("error committing database transaction",
+ "error", err,
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path,
+ )
+ http.Error(w, "Error creating review",
+ http.StatusInternalServerError)
+
+ if err := revertReviewsPost(revertFuncs); err != nil {
+ srv.Logger.Error("error reverting review creation",
+ "error", err,
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path)
+ }
+ return
+ }
+
+ // Write response.
+ w.WriteHeader(http.StatusOK)
+
+ // Log success.
+ srv.Logger.Info("review created",
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path,
+ )
+
+ // Request post-processing.
+ go func() {
+ // Convert document to Algolia object.
+ docObj, err := doc.ToAlgoliaObject(true)
+ if err != nil {
+ srv.Logger.Error("error converting document to Algolia object",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ return
+ }
+
+ // Save document object in Algolia.
+ res, err := srv.AlgoWrite.Docs.SaveObject(docObj)
+ if err != nil {
+ srv.Logger.Error("error saving document in Algolia",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ return
+ }
+ err = res.Wait()
+ if err != nil {
+ srv.Logger.Error("error saving document in Algolia",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ return
+ }
+
+ // Delete document object from drafts Algolia index.
+ delRes, err := srv.AlgoWrite.Drafts.DeleteObject(docID)
+ if err != nil {
+ srv.Logger.Error("error deleting draft in Algolia",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ return
+ }
+ err = delRes.Wait()
+ if err != nil {
+ srv.Logger.Error("error deleting draft in Algolia",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ return
+ }
+
+ // Send emails to product subscribers, if enabled.
+ if srv.Config.Email != nil && srv.Config.Email.Enabled {
+ p := models.Product{
+ Name: doc.Product,
+ }
+ if err := p.Get(srv.DB); err != nil {
+ srv.Logger.Error("error getting product from database",
+ "error", err,
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path,
+ )
+ return
+ }
+
+ if len(p.UserSubscribers) > 0 {
+ // TODO: use an asynchronous method for sending emails because we
+ // can't currently recover gracefully from a failure here.
+ for _, subscriber := range p.UserSubscribers {
+ err := email.SendSubscriberDocumentPublishedEmail(
+ email.SubscriberDocumentPublishedEmailData{
+ BaseURL: srv.Config.BaseURL,
+ DocumentOwner: doc.Owners[0],
+ DocumentShortName: doc.DocNumber,
+ DocumentTitle: doc.Title,
+ DocumentType: doc.DocType,
+ DocumentURL: docURL,
+ Product: doc.Product,
+ },
+ []string{subscriber.EmailAddress},
+ srv.Config.Email.FromAddress,
+ srv.GWService,
+ )
+ if err != nil {
+ srv.Logger.Error("error sending subscriber email",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ } else {
+ srv.Logger.Info("doc subscriber email sent",
+ "doc_id", docID,
+ "method", r.Method,
+ "path", r.URL.Path,
+ )
+ }
+ }
+ }
+ }
+
+ // Compare Algolia and database documents to find data inconsistencies.
+ // Get document object from Algolia.
+ var algoDoc map[string]any
+ err = srv.AlgoSearch.Docs.GetObject(docID, &algoDoc)
+ if err != nil {
+ srv.Logger.Error("error getting Algolia object for data comparison",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ return
+ }
+ // Get document from database.
+ dbDoc := models.Document{
+ GoogleFileID: docID,
+ }
+ if err := dbDoc.Get(srv.DB); err != nil {
+ srv.Logger.Error(
+ "error getting document from database for data comparison",
+ "error", err,
+ "path", r.URL.Path,
+ "method", r.Method,
+ "doc_id", docID,
+ )
+ return
+ }
+ // Get all reviews for the document.
+ var reviews models.DocumentReviews
+ if err := reviews.Find(srv.DB, models.DocumentReview{
+ Document: models.Document{
+ GoogleFileID: docID,
+ },
+ }); err != nil {
+ srv.Logger.Error(
+ "error getting all reviews for document for data comparison",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ return
+ }
+ if err := compareAlgoliaAndDatabaseDocument(
+ algoDoc, dbDoc, reviews, srv.Config.DocumentTypes.DocumentType,
+ ); err != nil {
+ srv.Logger.Warn(
+ "inconsistencies detected between Algolia and database docs",
+ "error", err,
+ "method", r.Method,
+ "path", r.URL.Path,
+ "doc_id", docID,
+ )
+ }
+ }()
+
+ default:
+ w.WriteHeader(http.StatusMethodNotAllowed)
+ return
+ }
+ })
+}
+
+// createShortcut creates a shortcut in the hierarchical folder structure
+// ("Shortcuts Folder/RFC/MyProduct/") under docsFolder.
+func createShortcut(
+ cfg *config.Config,
+ doc document.Document,
+ s *gw.Service) (shortcut *drive.File, retErr error) {
+
+ // Get folder for doc type.
+ docTypeFolder, err := s.GetSubfolder(
+ cfg.GoogleWorkspace.ShortcutsFolder, doc.DocType)
+ if err != nil {
+ return nil, fmt.Errorf("error getting doc type subfolder: %w", err)
+ }
+
+ // Doc type folder wasn't found, so create it.
+ if docTypeFolder == nil {
+ docTypeFolder, err = s.CreateFolder(
+ doc.DocType, cfg.GoogleWorkspace.ShortcutsFolder)
+ if err != nil {
+ return nil, fmt.Errorf("error creating doc type subfolder: %w", err)
+ }
+ }
+
+ // Get folder for doc type + product.
+ productFolder, err := s.GetSubfolder(docTypeFolder.Id, doc.Product)
+ if err != nil {
+ return nil, fmt.Errorf("error getting product subfolder: %w", err)
+ }
+
+ // Product folder wasn't found, so create it.
+ if productFolder == nil {
+ productFolder, err = s.CreateFolder(
+ doc.Product, docTypeFolder.Id)
+ if err != nil {
+ return nil, fmt.Errorf("error creating product subfolder: %w", err)
+ }
+ }
+
+ // Create shortcut.
+ if shortcut, err = s.CreateShortcut(
+ doc.ObjectID,
+ productFolder.Id); err != nil {
+
+ return nil, fmt.Errorf("error creating shortcut: %w", err)
+ }
+
+ return
+}
+
+// getDocumentURL returns a Hermes document URL.
+func getDocumentURL(baseURL, docID string) (string, error) {
+ docURL, err := url.Parse(baseURL)
+ if err != nil {
+ return "", fmt.Errorf("error parsing base URL: %w", err)
+ }
+
+ docURL.Path = path.Join(docURL.Path, "document", docID)
+ docURLString := docURL.String()
+ docURLString = strings.TrimRight(docURLString, "/")
+
+ return docURLString, nil
+}
+
+// revertReviewsPost attempts to revert the actions that occur when a review is
+// created. This is to be used in the case of an error during the review-
+// creation process.
+func revertReviewsPost(funcs []func() error) error {
+ var result *multierror.Error
+
+ for _, fn := range funcs {
+ if err := fn(); err != nil {
+ result = multierror.Append(result, err)
+ }
+ }
+
+ return result.ErrorOrNil()
+}
diff --git a/internal/cmd/commands/server/server.go b/internal/cmd/commands/server/server.go
index 691de62ce..906e20d7e 100644
--- a/internal/cmd/commands/server/server.go
+++ b/internal/cmd/commands/server/server.go
@@ -11,6 +11,7 @@ import (
"time"
"github.com/hashicorp-forge/hermes/internal/api"
+ apiv2 "github.com/hashicorp-forge/hermes/internal/api/v2"
"github.com/hashicorp-forge/hermes/internal/auth"
"github.com/hashicorp-forge/hermes/internal/cmd/base"
"github.com/hashicorp-forge/hermes/internal/config"
@@ -18,6 +19,7 @@ import (
"github.com/hashicorp-forge/hermes/internal/db"
"github.com/hashicorp-forge/hermes/internal/pkg/doctypes"
"github.com/hashicorp-forge/hermes/internal/pub"
+ "github.com/hashicorp-forge/hermes/internal/server"
"github.com/hashicorp-forge/hermes/internal/structs"
"github.com/hashicorp-forge/hermes/pkg/algolia"
gw "github.com/hashicorp-forge/hermes/pkg/googleworkspace"
@@ -308,12 +310,22 @@ func (c *Command) Run(args []string) int {
mux = http.NewServeMux()
}
+ srv := server.Server{
+ AlgoSearch: algoSearch,
+ AlgoWrite: algoWrite,
+ Config: cfg,
+ DB: db,
+ GWService: goog,
+ Logger: c.Log,
+ }
+
// Define handlers for authenticated endpoints.
- // TODO: stop passing around all these arguments to handlers and use a struct
- // with (functional) options.
authenticatedEndpoints := []endpoint{
+ // Algolia proxy.
{"/1/indexes/",
algolia.AlgoliaProxyHandler(algoSearch, cfg.Algolia, c.Log)},
+
+ // API v1.
{"/api/v1/approvals/",
api.ApprovalHandler(cfg, c.Log, algoSearch, algoWrite, goog, db)},
{"/api/v1/document-types", api.DocumentTypesHandler(*cfg, c.Log)},
@@ -333,6 +345,20 @@ func (c *Command) Run(args []string) int {
{"/api/v1/reviews/",
api.ReviewHandler(cfg, c.Log, algoSearch, algoWrite, goog, db)},
{"/api/v1/web/analytics", api.AnalyticsHandler(c.Log)},
+
+ // API v2.
+ {"/api/v2/approvals/", apiv2.ApprovalsHandler(srv)},
+ {"/api/v2/document-types", apiv2.DocumentTypesHandler(srv)},
+ {"/api/v2/documents/", apiv2.DocumentHandler(srv)},
+ {"/api/v2/drafts", apiv2.DraftsHandler(srv)},
+ {"/api/v2/drafts/", apiv2.DraftsDocumentHandler(srv)},
+ {"/api/v2/me", apiv2.MeHandler(srv)},
+ {"/api/v2/me/recently-viewed-docs", apiv2.MeRecentlyViewedDocsHandler(srv)},
+ {"/api/v2/me/subscriptions", apiv2.MeSubscriptionsHandler(srv)},
+ {"/api/v2/people", apiv2.PeopleDataHandler(srv)},
+ {"/api/v2/products", apiv2.ProductsHandler(srv)},
+ {"/api/v2/reviews/", apiv2.ReviewsHandler(srv)},
+ {"/api/v2/web/analytics", apiv2.AnalyticsHandler(srv)},
}
// Define handlers for unauthenticated endpoints.
@@ -345,6 +371,7 @@ func (c *Command) Run(args []string) int {
webEndpoints := []endpoint{
{"/", web.Handler()},
{"/api/v1/web/config", web.ConfigHandler(cfg, algoSearch, c.Log)},
+ {"/api/v2/web/config", web.ConfigHandler(cfg, algoSearch, c.Log)},
{"/l/", links.RedirectHandler(algoSearch, cfg.Algolia, c.Log)},
}
diff --git a/internal/server/server.go b/internal/server/server.go
new file mode 100644
index 000000000..eb4d4eebe
--- /dev/null
+++ b/internal/server/server.go
@@ -0,0 +1,30 @@
+package server
+
+import (
+ "github.com/hashicorp-forge/hermes/internal/config"
+ "github.com/hashicorp-forge/hermes/pkg/algolia"
+ gw "github.com/hashicorp-forge/hermes/pkg/googleworkspace"
+ "github.com/hashicorp/go-hclog"
+ "gorm.io/gorm"
+)
+
+// Server contains the server configuration.
+type Server struct {
+ // AlgoSearch is the Algolia search client for the server.
+ AlgoSearch *algolia.Client
+
+ // AlgoWrite is the Algolia write client for the server.
+ AlgoWrite *algolia.Client
+
+ // Config is the config for the server.
+ Config *config.Config
+
+ // DB is the database for the server.
+ DB *gorm.DB
+
+ // GWService is the Google Workspace service for the server.
+ GWService *gw.Service
+
+ // Logger is the logger for the server.
+ Logger hclog.Logger
+}
diff --git a/pkg/document/document.go b/pkg/document/document.go
index f9e75b045..d890d1536 100644
--- a/pkg/document/document.go
+++ b/pkg/document/document.go
@@ -6,6 +6,7 @@ import (
"reflect"
"github.com/hashicorp-forge/hermes/internal/config"
+ "github.com/hashicorp-forge/hermes/pkg/models"
"github.com/iancoleman/strcase"
"github.com/mitchellh/mapstructure"
)
@@ -49,7 +50,8 @@ type Document struct {
// Content is the plaintext content of the document.
Content string `json:"content,omitempty"`
- // Created is the UTC time of document creation, in a RFC 3339 string format.
+ // Created is the UTC time of document creation, in a "Jan 2, 2006" string
+ // format.
Created string `json:"created,omitempty"`
// CreatedTime is the time of document creation, in Unix time.
@@ -225,6 +227,149 @@ func NewFromAlgoliaObject(
return doc, nil
}
+// NewFromDatabaseModel creates a document from a document database model.
+func NewFromDatabaseModel(
+ model models.Document, reviews models.DocumentReviews,
+) (*Document, error) {
+ doc := &Document{}
+
+ // ObjectID.
+ doc.ObjectID = model.GoogleFileID
+
+ // Title.
+ doc.Title = model.Title
+
+ // DocType.
+ doc.DocType = model.DocumentType.Name
+
+ // DocNumber.
+ doc.DocNumber = fmt.Sprintf(
+ "%s-%03d", model.Product.Abbreviation, model.DocumentNumber)
+ if model.DocumentNumber == 0 {
+ doc.DocNumber = fmt.Sprintf("%s-???", model.Product.Abbreviation)
+ }
+
+ // AppCreated.
+ doc.AppCreated = !model.Imported
+
+ // ApprovedBy, Approvers, ChangesRequestedBy.
+ var approvedBy, approvers, changesRequestedBy []string
+ for _, r := range reviews {
+ approvers = append(approvers, r.User.EmailAddress)
+
+ switch r.Status {
+ case models.ApprovedDocumentReviewStatus:
+ approvedBy = append(approvedBy, r.User.EmailAddress)
+ case models.ChangesRequestedDocumentReviewStatus:
+ changesRequestedBy = append(changesRequestedBy, r.User.EmailAddress)
+ }
+ }
+ doc.ApprovedBy = approvedBy
+ doc.Approvers = approvers
+ doc.ChangesRequestedBy = changesRequestedBy
+
+ // Contributors.
+ contributors := []string{}
+ for _, c := range model.Contributors {
+ contributors = append(contributors, c.EmailAddress)
+ }
+ doc.Contributors = contributors
+
+ // Created.
+ doc.Created = model.CreatedAt.Format("Jan 2, 2006")
+
+ // CreatedTime.
+ doc.CreatedTime = model.DocumentCreatedAt.Unix()
+
+ // CustomEditableFields.
+ customEditableFields := make(map[string]CustomDocTypeField)
+ for _, c := range model.DocumentType.CustomFields {
+ var cType string
+ switch c.Type {
+ case models.PeopleDocumentTypeCustomFieldType:
+ cType = "PEOPLE"
+ case models.PersonDocumentTypeCustomFieldType:
+ cType = "PERSON"
+ case models.StringDocumentTypeCustomFieldType:
+ cType = "STRING"
+ }
+ customEditableFields[strcase.ToLowerCamel(c.Name)] = CustomDocTypeField{
+ DisplayName: c.Name,
+ Type: cType,
+ }
+ }
+ doc.CustomEditableFields = customEditableFields
+
+ // CustomFields.
+ var customFields []CustomField
+ for _, c := range model.CustomFields {
+ cf := CustomField{
+ Name: strcase.ToLowerCamel(c.DocumentTypeCustomField.Name),
+ DisplayName: c.DocumentTypeCustomField.Name,
+ }
+ switch c.DocumentTypeCustomField.Type {
+ case models.PeopleDocumentTypeCustomFieldType:
+ cf.Type = "PEOPLE"
+ var val []string
+ if err := json.Unmarshal([]byte(c.Value), &val); err != nil {
+ return nil, fmt.Errorf("error unmarshaling value for field %q: %w",
+ c.DocumentTypeCustomField.Name, err)
+ }
+ cf.Value = val
+ case models.PersonDocumentTypeCustomFieldType:
+ cf.Type = "PERSON"
+ cf.Value = c.Value
+ case models.StringDocumentTypeCustomFieldType:
+ cf.Type = "STRING"
+ cf.Value = c.Value
+ }
+ customFields = append(customFields, cf)
+ }
+ doc.CustomFields = customFields
+
+ // FileRevisions.
+ fileRevisions := make(map[string]string)
+ for _, fr := range model.FileRevisions {
+ fileRevisions[fr.GoogleDriveFileRevisionID] = fr.Name
+ }
+ doc.FileRevisions = fileRevisions
+
+ // Locked is true if the document is locked for editing.
+ doc.Locked = model.Locked
+
+ // ModifiedTime.
+ doc.ModifiedTime = model.DocumentModifiedAt.Unix()
+
+ // Owners.
+ doc.Owners = []string{model.Owner.EmailAddress}
+
+ // Note: OwnerPhotos is not stored in the database.
+
+ // Product.
+ doc.Product = model.Product.Name
+
+ // Summary.
+ doc.Summary = *model.Summary
+
+ // Status.
+ var status string
+ switch model.Status {
+ case models.ApprovedDocumentStatus:
+ status = "Approved"
+ case models.InReviewDocumentStatus:
+ status = "In-Review"
+ case models.ObsoleteDocumentStatus:
+ status = "Obsolete"
+ case models.WIPDocumentStatus:
+ status = "WIP"
+ }
+ doc.Status = status
+
+ // Note: ThumbnailLink is not stored in the database.
+
+ return doc, nil
+}
+
// ToAlgoliaObject converts a document to a document Algolia object.
func (d Document) ToAlgoliaObject(
removeCustomEditableFields bool) (map[string]any, error) {
diff --git a/pkg/googleworkspace/drive_helpers.go b/pkg/googleworkspace/drive_helpers.go
index 6f7851c2e..b3b366b28 100644
--- a/pkg/googleworkspace/drive_helpers.go
+++ b/pkg/googleworkspace/drive_helpers.go
@@ -229,6 +229,19 @@ func (s *Service) KeepRevisionForever(
return resp, nil
}
+// KeepRevisionForever keeps a Google Drive file revision forever.
+func (s *Service) UpdateKeepRevisionForever(
+ fileID, revisionID string, keepForever bool) error {
+
+ _, err := s.Drive.Revisions.Update(fileID, revisionID, &drive.Revision{
+ KeepForever: keepForever,
+ }).
+ Fields("keepForever").
+ Do()
+
+ return err
+}
+
// ListFiles lists files in a Google Drive folder using the provided query.
func (s *Service) ListFiles(folderID, query string) ([]*drive.File, error) {
var files []*drive.File
diff --git a/pkg/models/document.go b/pkg/models/document.go
index d3f186c66..e221b166f 100644
--- a/pkg/models/document.go
+++ b/pkg/models/document.go
@@ -42,6 +42,9 @@ type Document struct {
DocumentType DocumentType
DocumentTypeID uint
+ // DocumentFileRevision are the file revisions for the document.
+ FileRevisions []DocumentFileRevision
+
// Imported is true if the document was not created through the application.
Imported bool
@@ -67,7 +70,7 @@ type Document struct {
ShareableAsDraft bool
// Summary is a summary of the document.
- Summary string
+ Summary *string
// Title is the title of the document. It only contains the title, and not the
// product abbreviation, document number, or document type.
@@ -447,6 +450,7 @@ func (d *Document) Upsert(db *gorm.DB) error {
if err := tx.
Model(&d).
Where(Document{GoogleFileID: d.GoogleFileID}).
+ Select("*").
Omit(clause.Associations). // We manage associations in the BeforeSave hook.
Assign(*d).
FirstOrCreate(&d).
diff --git a/pkg/models/document_file_revision.go b/pkg/models/document_file_revision.go
new file mode 100644
index 000000000..36c1fd32c
--- /dev/null
+++ b/pkg/models/document_file_revision.go
@@ -0,0 +1,108 @@
+package models
+
+import (
+ "fmt"
+ "time"
+
+ validation "github.com/go-ozzo/ozzo-validation/v4"
+ "gorm.io/gorm"
+ "gorm.io/gorm/clause"
+)
+
+// DocumentFileRevision is a model for a document's Google Drive file revisions.
+type DocumentFileRevision struct {
+ CreatedAt time.Time
+ UpdatedAt time.Time
+ DeletedAt gorm.DeletedAt `gorm:"index"`
+
+ Document Document
+ DocumentID uint `gorm:"primaryKey"`
+
+ // GoogleDriveFileRevisionID is the ID of the Google Drive file revision.
+ GoogleDriveFileRevisionID string `gorm:"primaryKey"`
+
+ // Name is the name of the document file revision.
+ Name string `gorm:"primaryKey"`
+}
+
+// DocumentFileRevisions is a slice of document file revisions.
+type DocumentFileRevisions []DocumentFileRevision
+
+// Create creates a file revision for a document.
+// Required fields in the receiver:
+// - Document ID or Google File ID
+// - Google Drive file revision ID
+// - Name of file revision
+func (fr *DocumentFileRevision) Create(db *gorm.DB) error {
+ // Preload Document.
+ if fr.DocumentID == 0 {
+ if err := fr.Document.Get(db); err != nil {
+ return fmt.Errorf("error preloading Document: %w", err)
+ }
+ fr.DocumentID = fr.Document.ID
+ }
+
+ // Validate fields.
+ if err := validation.ValidateStruct(fr,
+ validation.Field(&fr.DocumentID, validation.Required),
+ validation.Field(&fr.GoogleDriveFileRevisionID, validation.Required),
+ validation.Field(&fr.Name, validation.Required),
+ ); err != nil {
+ return err
+ }
+
+ return db.
+ Omit("Document").
+ Create(&fr).
+ Error
+}
+
+// Find finds all file revisions for a provided document, and assigns them to
+// the receiver.
+func (frs *DocumentFileRevisions) Find(db *gorm.DB, doc Document) error {
+ // Preload Document.
+ if doc.ID == 0 {
+ if err := doc.Get(db); err != nil {
+ return fmt.Errorf("error preloading document: %w", err)
+ }
+ }
+
+ // Validate fields.
+ if err := validation.ValidateStruct(&doc,
+ validation.Field(&doc.ID, validation.Required),
+ ); err != nil {
+ return err
+ }
+
+ return db.
+ Where(DocumentFileRevision{
+ DocumentID: doc.ID,
+ }).
+ Preload(clause.Associations).
+ Find(&frs).
+ Error
+}
+
+func (fr *DocumentFileRevision) Get(db *gorm.DB) error {
+ // Preload Document.
+ if fr.DocumentID == 0 {
+ if err := fr.Document.Get(db); err != nil {
+ return fmt.Errorf("error preloading Document: %w", err)
+ }
+ fr.DocumentID = fr.Document.ID
+ }
+
+ // Validate fields.
+ if err := validation.ValidateStruct(fr,
+ validation.Field(&fr.DocumentID, validation.Required),
+ validation.Field(&fr.GoogleDriveFileRevisionID, validation.Required),
+ validation.Field(&fr.Name, validation.Required),
+ ); err != nil {
+ return err
+ }
+
+ return db.
+ Preload(clause.Associations).
+ First(&fr).
+ Error
+}
diff --git a/pkg/models/document_file_revision_test.go b/pkg/models/document_file_revision_test.go
new file mode 100644
index 000000000..6ffa47159
--- /dev/null
+++ b/pkg/models/document_file_revision_test.go
@@ -0,0 +1,111 @@
+package models
+
+import (
+ "os"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestDocumentFileRevisionwModel(t *testing.T) {
+ dsn := os.Getenv("HERMES_TEST_POSTGRESQL_DSN")
+ if dsn == "" {
+ t.Skip("HERMES_TEST_POSTGRESQL_DSN environment variable isn't set")
+ }
+
+ t.Run("Create and Find", func(t *testing.T) {
+ db, tearDownTest := setupTest(t, dsn)
+ defer tearDownTest(t)
+
+ t.Run("Create a document type", func(t *testing.T) {
+ _, require := assert.New(t), require.New(t)
+ dt := DocumentType{
+ Name: "DT1",
+ LongName: "DocumentType1",
+ }
+ err := dt.FirstOrCreate(db)
+ require.NoError(err)
+ })
+
+ t.Run("Create a product", func(t *testing.T) {
+ _, require := assert.New(t), require.New(t)
+ p := Product{
+ Name: "Product1",
+ Abbreviation: "P1",
+ }
+ err := p.FirstOrCreate(db)
+ require.NoError(err)
+ })
+
+ t.Run("Create a document", func(t *testing.T) {
+ assert, require := assert.New(t), require.New(t)
+ d := Document{
+ GoogleFileID: "GoogleFileID1",
+ DocumentType: DocumentType{
+ Name: "DT1",
+ },
+ Product: Product{
+ Name: "Product1",
+ },
+ }
+ err := d.Create(db)
+ require.NoError(err)
+ assert.EqualValues(1, d.ID)
+ })
+
+ t.Run("Create a file revision", func(t *testing.T) {
+ _, require := assert.New(t), require.New(t)
+ fr := DocumentFileRevision{
+ Document: Document{
+ GoogleFileID: "GoogleFileID1",
+ },
+ GoogleDriveFileRevisionID: "GoogleDriveFileRevisionID1",
+ Name: "Name1",
+ }
+ err := fr.Create(db)
+ require.NoError(err)
+ })
+
+ t.Run("Find file revisions for the document", func(t *testing.T) {
+ assert, require := assert.New(t), require.New(t)
+ var frs DocumentFileRevisions
+ err := frs.Find(db, Document{GoogleFileID: "GoogleFileID1"})
+ require.NoError(err)
+ require.Len(frs, 1)
+ assert.EqualValues(1, frs[0].DocumentID)
+ assert.Equal(
+ "GoogleDriveFileRevisionID1", frs[0].GoogleDriveFileRevisionID)
+ assert.Equal("Name1", frs[0].Name)
+ })
+
+ t.Run("Create a second file revision", func(t *testing.T) {
+ _, require := assert.New(t), require.New(t)
+ fr := DocumentFileRevision{
+ Document: Document{
+ GoogleFileID: "GoogleFileID1",
+ },
+ GoogleDriveFileRevisionID: "GoogleDriveFileRevisionID2",
+ Name: "Name2",
+ }
+ err := fr.Create(db)
+ require.NoError(err)
+ })
+
+ t.Run("Find file revisions for the document", func(t *testing.T) {
+ assert, require := assert.New(t), require.New(t)
+ var frs DocumentFileRevisions
+ err := frs.Find(db, Document{GoogleFileID: "GoogleFileID1"})
+ require.NoError(err)
+ require.Len(frs, 2)
+ assert.EqualValues(1, frs[0].DocumentID)
+ assert.Equal(
+ "GoogleDriveFileRevisionID1", frs[0].GoogleDriveFileRevisionID)
+ assert.Equal("Name1", frs[0].Name)
+ assert.EqualValues(1, frs[1].DocumentID)
+ assert.Equal(
+ "GoogleDriveFileRevisionID2", frs[1].GoogleDriveFileRevisionID)
+ assert.Equal("Name2", frs[1].Name)
+ })
+ })
+}
diff --git a/pkg/models/document_test.go b/pkg/models/document_test.go
index aa24fb385..f7b26d09d 100644
--- a/pkg/models/document_test.go
+++ b/pkg/models/document_test.go
@@ -100,7 +100,7 @@ func TestDocumentModel(t *testing.T) {
Name: "Product1",
},
Status: InReviewDocumentStatus,
- Summary: "test summary",
+ Summary: &[]string{"test summary"}[0],
Title: "test title",
}
err = d.Create(db)
@@ -164,7 +164,7 @@ func TestDocumentModel(t *testing.T) {
assert.Equal(InReviewDocumentStatus, d.Status)
// Summary.
- assert.Equal("test summary", d.Summary)
+ assert.Equal("test summary", *d.Summary)
// Title.
assert.Equal("test title", d.Title)
@@ -592,13 +592,13 @@ func TestDocumentModel(t *testing.T) {
Name: "Product1",
Abbreviation: "P1",
},
- Summary: "summary1",
+ Summary: &[]string{"summary1"}[0],
}
err := d.Upsert(db)
require.NoError(err)
assert.EqualValues(1, d.ID)
assert.Equal("fileID1", d.GoogleFileID)
- assert.Equal("summary1", d.Summary)
+ assert.Equal("summary1", *d.Summary)
})
t.Run("Get the document", func(t *testing.T) {
@@ -610,20 +610,58 @@ func TestDocumentModel(t *testing.T) {
require.NoError(err)
assert.EqualValues(1, d.ID)
assert.Equal("fileID1", d.GoogleFileID)
- assert.Equal("summary1", d.Summary)
+ assert.Equal("summary1", *d.Summary)
})
t.Run("Update the Summary field by Upsert", func(t *testing.T) {
assert, require := assert.New(t), require.New(t)
d := Document{
GoogleFileID: "fileID1",
- Summary: "summary2",
+ Summary: &[]string{"summary2"}[0],
}
err := d.Upsert(db)
require.NoError(err)
assert.EqualValues(1, d.ID)
assert.Equal("fileID1", d.GoogleFileID)
- assert.Equal("summary2", d.Summary)
+ assert.Equal("summary2", *d.Summary)
+ })
+
+ t.Run("Get the document", func(t *testing.T) {
+ assert, require := assert.New(t), require.New(t)
+ d := Document{
+ GoogleFileID: "fileID1",
+ }
+ err := d.Get(db)
+ require.NoError(err)
+ assert.EqualValues(1, d.ID)
+ assert.Equal("fileID1", d.GoogleFileID)
+ assert.Equal("summary2", *d.Summary)
+ })
+
+ t.Run("Update the Summary field to an empty string by Upsert",
+ func(t *testing.T) {
+ assert, require := assert.New(t), require.New(t)
+ d := Document{
+ GoogleFileID: "fileID1",
+ Summary: &[]string{""}[0],
+ }
+ err := d.Upsert(db)
+ require.NoError(err)
+ assert.EqualValues(1, d.ID)
+ assert.Equal("fileID1", d.GoogleFileID)
+ assert.Equal("", *d.Summary)
+ })
+
+ t.Run("Get the document", func(t *testing.T) {
+ assert, require := assert.New(t), require.New(t)
+ d := Document{
+ GoogleFileID: "fileID1",
+ }
+ err := d.Get(db)
+ require.NoError(err)
+ assert.EqualValues(1, d.ID)
+ assert.Equal("fileID1", d.GoogleFileID)
+ assert.Equal("", *d.Summary)
})
})
diff --git a/pkg/models/gorm.go b/pkg/models/gorm.go
index a3498b0d0..41e71864c 100644
--- a/pkg/models/gorm.go
+++ b/pkg/models/gorm.go
@@ -5,6 +5,7 @@ func ModelsToAutoMigrate() []interface{} {
&DocumentType{},
&Document{},
&DocumentCustomField{},
+ &DocumentFileRevision{},
&DocumentRelatedResource{},
&DocumentRelatedResourceExternalLink{},
&DocumentRelatedResourceHermesDocument{},
diff --git a/web/app/authenticators/torii.ts b/web/app/authenticators/torii.ts
index 1517e1790..194cd4c4b 100644
--- a/web/app/authenticators/torii.ts
+++ b/web/app/authenticators/torii.ts
@@ -1,9 +1,11 @@
// @ts-ignore -- TODO: Add Types
import Torii from "ember-simple-auth/authenticators/torii";
import { inject as service } from "@ember/service";
+import ConfigService from "hermes/services/config";
import FetchService from "hermes/services/fetch";
export default class ToriiAuthenticator extends Torii {
+ @service("config") declare configSvc: ConfigService;
@service("fetch") declare fetchSvc: FetchService;
// Appears unused, but necessary for the session service
@@ -16,7 +18,7 @@ export default class ToriiAuthenticator extends Torii {
* in the session being invalidated or remaining unauthenticated.
*/
return this.fetchSvc
- .fetch("/api/v1/me", {
+ .fetch(`/api/${this.configSvc.config.api_version}/me`, {
method: "HEAD",
headers: {
"Hermes-Google-Access-Token": data.access_token,
diff --git a/web/app/components/document/index.ts b/web/app/components/document/index.ts
index 189e2067d..53cb616d7 100644
--- a/web/app/components/document/index.ts
+++ b/web/app/components/document/index.ts
@@ -3,6 +3,7 @@ import { inject as service } from "@ember/service";
import { dropTask } from "ember-concurrency";
import { HermesDocument } from "hermes/types/document";
import { AuthenticatedUser } from "hermes/services/authenticated-user";
+import ConfigService from "hermes/services/config";
import FetchService from "hermes/services/fetch";
import RouterService from "@ember/routing/router-service";
import FlashMessageService from "ember-cli-flash/services/flash-messages";
@@ -21,6 +22,7 @@ interface DocumentIndexComponentSignature {
export default class DocumentIndexComponent extends Component {
@service declare authenticatedUser: AuthenticatedUser;
+ @service("config") declare configSvc: ConfigService;
@service("fetch") declare fetchSvc: FetchService;
@service declare router: RouterService;
@service declare flashMessages: FlashMessageService;
@@ -35,10 +37,13 @@ export default class DocumentIndexComponent extends Component {
try {
- let fetchResponse = await this.fetchSvc.fetch("/api/v1/drafts/" + docID, {
- method: "DELETE",
- headers: { "Content-Type": "application/json" },
- });
+ let fetchResponse = await this.fetchSvc.fetch(
+ `/api/${this.configSvc.config.api_version}/drafts/` + docID,
+ {
+ method: "DELETE",
+ headers: { "Content-Type": "application/json" },
+ },
+ );
if (!fetchResponse?.ok) {
this.showError(fetchResponse?.statusText);
diff --git a/web/app/components/document/sidebar.ts b/web/app/components/document/sidebar.ts
index 18e603303..c6dc6fef9 100644
--- a/web/app/components/document/sidebar.ts
+++ b/web/app/components/document/sidebar.ts
@@ -544,13 +544,16 @@ export default class DocumentSidebarComponent extends Component {
try {
const response = await this.fetchSvc
- .fetch(`/api/v1/drafts/${this.docID}/shareable`)
+ .fetch(
+ `/api/${this.configSvc.config.api_version}/drafts/${this.docID}/shareable`,
+ )
.then((response) => response?.json());
if (response?.isShareable) {
this._docIsShareable = true;
@@ -799,10 +813,13 @@ export default class DocumentSidebarComponent extends Component {
try {
- await this.fetchSvc.fetch(`/api/v1/approvals/${this.docID}`, {
- method: "POST",
- headers: { "Content-Type": "application/json" },
- });
+ await this.fetchSvc.fetch(
+ `/api/${this.configSvc.config.api_version}/approvals/${this.docID}`,
+ {
+ method: "POST",
+ headers: { "Content-Type": "application/json" },
+ },
+ );
this.showFlashSuccess("Done!", "Document approved");
} catch (error: unknown) {
this.maybeShowFlashError(error as Error, "Unable to approve");
@@ -814,10 +831,13 @@ export default class DocumentSidebarComponent extends Component {
try {
- await this.fetchSvc.fetch(`/api/v1/approvals/${this.docID}`, {
- method: "DELETE",
- headers: { "Content-Type": "application/json" },
- });
+ await this.fetchSvc.fetch(
+ `/api/${this.configSvc.config.api_version}/approvals/${this.docID}`,
+ {
+ method: "DELETE",
+ headers: { "Content-Type": "application/json" },
+ },
+ );
// Add a notification for the user
let msg = "Requested changes for document";
// FRDs are a special case that can be approved or not approved.
diff --git a/web/app/components/document/sidebar/related-resources.ts b/web/app/components/document/sidebar/related-resources.ts
index b9324f55d..06f0d1fd4 100644
--- a/web/app/components/document/sidebar/related-resources.ts
+++ b/web/app/components/document/sidebar/related-resources.ts
@@ -218,9 +218,9 @@ export default class DocumentSidebarRelatedResourcesComponent extends Component<
try {
const resources = await this.fetchSvc
.fetch(
- `/api/v1/${this.args.documentIsDraft ? "drafts" : "documents"}/${
- this.args.objectID
- }/related-resources`,
+ `/api/${this.configSvc.config.api_version}/${
+ this.args.documentIsDraft ? "drafts" : "documents"
+ }/${this.args.objectID}/related-resources`,
)
.then((response) => response?.json());
@@ -315,9 +315,9 @@ export default class DocumentSidebarRelatedResourcesComponent extends Component<
try {
await this.fetchSvc.fetch(
- `/api/v1/${this.args.documentIsDraft ? "drafts" : "documents"}/${
- this.args.objectID
- }/related-resources`,
+ `/api/${this.configSvc.config.api_version}/${
+ this.args.documentIsDraft ? "drafts" : "documents"
+ }/${this.args.objectID}/related-resources`,
{
method: "PUT",
body: JSON.stringify(this.formattedRelatedResources),
diff --git a/web/app/components/inputs/people-select.ts b/web/app/components/inputs/people-select.ts
index 2c54abfa0..bb8d0604b 100644
--- a/web/app/components/inputs/people-select.ts
+++ b/web/app/components/inputs/people-select.ts
@@ -3,6 +3,7 @@ import { tracked } from "@glimmer/tracking";
import { inject as service } from "@ember/service";
import { restartableTask, timeout } from "ember-concurrency";
import { action } from "@ember/object";
+import ConfigService from "hermes/services/config";
import FetchService from "hermes/services/fetch";
import { HermesUser } from "hermes/types/document";
import Ember from "ember";
@@ -27,6 +28,7 @@ const MAX_RETRIES = 3;
const INITIAL_RETRY_DELAY = Ember.testing ? 0 : 500;
export default class InputsPeopleSelectComponent extends Component {
+ @service("config") declare configSvc: ConfigService;
@service("fetch") declare fetchSvc: FetchService;
/**
@@ -65,13 +67,16 @@ export default class InputsPeopleSelectComponent extends Component {
+ @service("config") declare configSvc: ConfigService;
@service("fetch") declare fetchSvc: FetchService;
@service declare authenticatedUser: AuthenticatedUserService;
@service declare flashMessages: FlashService;
@@ -195,7 +197,7 @@ export default class NewDocFormComponent extends Component response?.json())
.then((json) => {
this.config.setConfig(json);
diff --git a/web/app/routes/authenticated/document.ts b/web/app/routes/authenticated/document.ts
index d63c09a00..e9157ebde 100644
--- a/web/app/routes/authenticated/document.ts
+++ b/web/app/routes/authenticated/document.ts
@@ -2,6 +2,7 @@ import Route from "@ember/routing/route";
import { inject as service } from "@ember/service";
import htmlElement from "hermes/utils/html-element";
import { schedule } from "@ember/runloop";
+import ConfigService from "hermes/services/config";
import FetchService from "hermes/services/fetch";
import FlashMessageService from "ember-cli-flash/services/flash-messages";
import RouterService from "@ember/routing/router-service";
@@ -31,6 +32,7 @@ interface DocumentRouteModel {
}
export default class AuthenticatedDocumentRoute extends Route {
+ @service("config") declare configSvc: ConfigService;
@service("fetch") declare fetchSvc: FetchService;
@service("recently-viewed-docs")
declare recentDocs: RecentlyViewedDocsService;
@@ -60,7 +62,7 @@ export default class AuthenticatedDocumentRoute extends Route {
async docType(doc: HermesDocument) {
const docTypes = (await this.fetchSvc
- .fetch("/api/v1/document-types")
+ .fetch(`/api/${this.configSvc.config.api_version}/document-types`)
.then((r) => r?.json())) as HermesDocumentType[];
assert("docTypes must exist", docTypes);
@@ -82,14 +84,18 @@ export default class AuthenticatedDocumentRoute extends Route {
if (params.draft) {
try {
doc = await this.fetchSvc
- .fetch("/api/v1/drafts/" + params.document_id, {
- method: "GET",
- headers: {
- // We set this header to differentiate between document views and
- // requests to only retrieve document metadata.
- "Add-To-Recently-Viewed": "true",
+ .fetch(
+ `/api/${this.configSvc.config.api_version}/drafts/` +
+ params.document_id,
+ {
+ method: "GET",
+ headers: {
+ // We set this header to differentiate between document views and
+ // requests to only retrieve document metadata.
+ "Add-To-Recently-Viewed": "true",
+ },
},
- })
+ )
.then((r) => r?.json());
(doc as HermesDocument).isDraft = params.draft;
draftFetched = true;
@@ -108,14 +114,18 @@ export default class AuthenticatedDocumentRoute extends Route {
if (!draftFetched) {
try {
doc = await this.fetchSvc
- .fetch("/api/v1/documents/" + params.document_id, {
- method: "GET",
- headers: {
- // We set this header to differentiate between document views and
- // requests to only retrieve document metadata.
- "Add-To-Recently-Viewed": "true",
+ .fetch(
+ `/api/${this.configSvc.config.api_version}/documents/` +
+ params.document_id,
+ {
+ method: "GET",
+ headers: {
+ // We set this header to differentiate between document views and
+ // requests to only retrieve document metadata.
+ "Add-To-Recently-Viewed": "true",
+ },
},
- })
+ )
.then((r) => r?.json());
(doc as HermesDocument).isDraft = false;
@@ -134,7 +144,11 @@ export default class AuthenticatedDocumentRoute extends Route {
// Preload avatars for all approvers in the Algolia index.
if (typedDoc.contributors?.length) {
const contributors = await this.fetchSvc
- .fetch(`/api/v1/people?emails=${typedDoc.contributors.join(",")}`)
+ .fetch(
+ `/api/${
+ this.configSvc.config.api_version
+ }/people?emails=${typedDoc.contributors.join(",")}`,
+ )
.then((r) => r?.json());
if (contributors) {
@@ -145,7 +159,11 @@ export default class AuthenticatedDocumentRoute extends Route {
}
if (typedDoc.approvers?.length) {
const approvers = await this.fetchSvc
- .fetch(`/api/v1/people?emails=${typedDoc.approvers.join(",")}`)
+ .fetch(
+ `/api/${
+ this.configSvc.config.api_version
+ }/people?emails=${typedDoc.approvers.join(",")}`,
+ )
.then((r) => r?.json());
if (approvers) {
@@ -171,14 +189,17 @@ export default class AuthenticatedDocumentRoute extends Route {
/**
* Record the document view with the analytics backend.
*/
- void this.fetchSvc.fetch("/api/v1/web/analytics", {
- method: "POST",
- headers: { "Content-Type": "application/json" },
- body: JSON.stringify({
- document_id: model.doc.objectID,
- product_name: model.doc.product,
- }),
- });
+ void this.fetchSvc.fetch(
+ `/api/${this.configSvc.config.api_version}/web/analytics`,
+ {
+ method: "POST",
+ headers: { "Content-Type": "application/json" },
+ body: JSON.stringify({
+ document_id: model.doc.objectID,
+ product_name: model.doc.product,
+ }),
+ },
+ );
/**
* Once the model has resolved, check if the document is loading from
diff --git a/web/app/routes/authenticated/drafts.ts b/web/app/routes/authenticated/drafts.ts
index 352276e97..3ed6e74fa 100644
--- a/web/app/routes/authenticated/drafts.ts
+++ b/web/app/routes/authenticated/drafts.ts
@@ -7,6 +7,7 @@ import AlgoliaService, {
HITS_PER_PAGE,
MAX_VALUES_PER_FACET,
} from "hermes/services/algolia";
+import ConfigService from "hermes/services/config";
import { DocumentsRouteParams } from "hermes/types/document-routes";
import { FacetRecords } from "hermes/types/facets";
import AuthenticatedUserService from "hermes/services/authenticated-user";
@@ -24,6 +25,7 @@ interface DraftResponseJSON {
}
export default class AuthenticatedDraftsRoute extends Route {
+ @service("config") declare configSvc: ConfigService;
@service("fetch") declare fetchSvc: FetchService;
@service declare algolia: AlgoliaService;
@service declare activeFilters: ActiveFiltersService;
@@ -55,7 +57,7 @@ export default class AuthenticatedDraftsRoute extends Route {
*/
private createDraftURLSearchParams(
params: AlgoliaSearchParams,
- ownerFacetOnly: boolean
+ ownerFacetOnly: boolean,
): URLSearchParams {
/**
* In the case of facets, we want to filter by just the owner facet.
@@ -76,7 +78,7 @@ export default class AuthenticatedDraftsRoute extends Route {
ownerEmail: this.authenticatedUser.info.email,
})
.map(([key, val]) => `${key}=${val}`)
- .join("&")
+ .join("&"),
);
}
@@ -86,20 +88,20 @@ export default class AuthenticatedDraftsRoute extends Route {
private getDraftResults = task(
async (
params: AlgoliaSearchParams,
- ownerFacetOnly = false
+ ownerFacetOnly = false,
): Promise => {
try {
let response = await this.fetchSvc
.fetch(
- "/api/v1/drafts?" +
- this.createDraftURLSearchParams(params, ownerFacetOnly)
+ `/api/${this.configSvc.config.api_version}/drafts?` +
+ this.createDraftURLSearchParams(params, ownerFacetOnly),
)
.then((response) => response?.json());
return response;
} catch (e: unknown) {
console.error(e);
}
- }
+ },
);
/**
* Gets facets for the drafts page. Scoped to the current user.
@@ -116,7 +118,7 @@ export default class AuthenticatedDraftsRoute extends Route {
* Map the facets to a new object with additional nested properties
*/
let facets: FacetRecords = this.algolia.mapStatefulFacetKeys(
- algoliaFacets.facets
+ algoliaFacets.facets,
);
Object.entries(facets).forEach(([name, facet]) => {
@@ -130,7 +132,7 @@ export default class AuthenticatedDraftsRoute extends Route {
} catch (e) {
console.error(e);
}
- }
+ },
);
async model(params: DocumentsRouteParams) {
diff --git a/web/app/routes/authenticated/new/index.ts b/web/app/routes/authenticated/new/index.ts
index 46f2f6548..36812e4fb 100644
--- a/web/app/routes/authenticated/new/index.ts
+++ b/web/app/routes/authenticated/new/index.ts
@@ -1,14 +1,16 @@
import Route from "@ember/routing/route";
import { inject as service } from "@ember/service";
+import ConfigService from "hermes/services/config";
import FetchService from "hermes/services/fetch";
import { HermesDocumentType } from "hermes/types/document-type";
export default class AuthenticatedNewIndexRoute extends Route {
+ @service("config") declare configSvc: ConfigService;
@service("fetch") declare fetchSvc: FetchService;
async model() {
return (await this.fetchSvc
- .fetch("/api/v1/document-types")
+ .fetch(`/api/${this.configSvc.config.api_version}/document-types`)
.then((r) => r?.json())) as HermesDocumentType[];
}
}
diff --git a/web/app/routes/authenticated/settings.ts b/web/app/routes/authenticated/settings.ts
index ed7a82c52..b974870ad 100644
--- a/web/app/routes/authenticated/settings.ts
+++ b/web/app/routes/authenticated/settings.ts
@@ -14,7 +14,7 @@ export default class SettingsRoute extends Route {
async model(): Promise {
const allProducts = await this.fetchSvc
- .fetch("/api/v1/products")
+ .fetch(`/api/${this.configSvc.config.api_version}/products`)
.then((resp) => {
return resp?.json();
})
diff --git a/web/app/services/_session.ts b/web/app/services/_session.ts
index 8580f4059..9deebda74 100644
--- a/web/app/services/_session.ts
+++ b/web/app/services/_session.ts
@@ -77,7 +77,11 @@ export default class SessionService extends EmberSimpleAuthSessionService {
// Make a HEAD request to the back end.
// On 401, the fetch service will set `this.pollResponseIs401` true.
- await this.fetch.fetch("/api/v1/me", { method: "HEAD" }, true);
+ await this.fetch.fetch(
+ `/api/${this.configSvc.config.api_version}/me`,
+ { method: "HEAD" },
+ true,
+ );
if (this.isUsingOkta) {
this.tokenIsValid = !this.pollResponseIs401;
@@ -105,7 +109,7 @@ export default class SessionService extends EmberSimpleAuthSessionService {
"warning",
() => {
this.preventReauthMessage = true;
- }
+ },
);
}
@@ -123,7 +127,7 @@ export default class SessionService extends EmberSimpleAuthSessionService {
title: string,
message: string,
type: "warning" | "critical",
- onDestroy?: () => void
+ onDestroy?: () => void,
) {
const buttonIcon = this.isUsingOkta ? "okta" : "google";
@@ -218,7 +222,7 @@ export default class SessionService extends EmberSimpleAuthSessionService {
transition = this.router.transitionTo(redirectTarget);
} else {
transition = this.router.transitionTo(
- `authenticated.${routeAfterAuthentication}`
+ `authenticated.${routeAfterAuthentication}`,
);
}
transition.followRedirects().then(() => {
diff --git a/web/app/services/authenticated-user.ts b/web/app/services/authenticated-user.ts
index 2ea2cdbec..19e13c36e 100644
--- a/web/app/services/authenticated-user.ts
+++ b/web/app/services/authenticated-user.ts
@@ -4,6 +4,7 @@ import { inject as service } from "@ember/service";
import Store from "@ember-data/store";
import { assert } from "@ember/debug";
import { task } from "ember-concurrency";
+import ConfigService from "hermes/services/config";
import FetchService from "hermes/services/fetch";
import SessionService from "./session";
@@ -26,6 +27,7 @@ enum SubscriptionType {
}
export default class AuthenticatedUserService extends Service {
+ @service("config") declare configSvc: ConfigService;
@service("fetch") declare fetchSvc: FetchService;
@service declare session: SessionService;
@service declare store: Store;
@@ -46,7 +48,7 @@ export default class AuthenticatedUserService extends Service {
private get subscriptionsPostBody(): string {
assert("subscriptions must be defined", this.subscriptions);
let subscriptions = this.subscriptions.map(
- (subscription: Subscription) => subscription.productArea
+ (subscription: Subscription) => subscription.productArea,
);
return JSON.stringify({ subscriptions });
}
@@ -69,7 +71,7 @@ export default class AuthenticatedUserService extends Service {
loadInfo = task(async () => {
try {
this._info = await this.fetchSvc
- .fetch("/api/v1/me")
+ .fetch(`/api/${this.configSvc.config.api_version}/me`)
.then((response) => response?.json());
} catch (e: unknown) {
console.error("Error getting user information: ", e);
@@ -84,7 +86,7 @@ export default class AuthenticatedUserService extends Service {
fetchSubscriptions = task(async () => {
try {
let subscriptions = await this.fetchSvc
- .fetch("/api/v1/me/subscriptions", {
+ .fetch(`/api/${this.configSvc.config.api_version}/me/subscriptions`, {
method: "GET",
})
.then((response) => response?.json());
@@ -113,11 +115,11 @@ export default class AuthenticatedUserService extends Service {
addSubscription = task(
async (
productArea: string,
- subscriptionType = SubscriptionType.Instant
+ subscriptionType = SubscriptionType.Instant,
) => {
assert(
"removeSubscription expects a valid subscriptions array",
- this.subscriptions
+ this.subscriptions,
);
let cached = this.subscriptions;
@@ -128,17 +130,20 @@ export default class AuthenticatedUserService extends Service {
});
try {
- await this.fetchSvc.fetch(`/api/v1/me/subscriptions`, {
- method: "POST",
- headers: this.subscriptionsPostHeaders,
- body: this.subscriptionsPostBody,
- });
+ await this.fetchSvc.fetch(
+ `/api/${this.configSvc.config.api_version}/me/subscriptions`,
+ {
+ method: "POST",
+ headers: this.subscriptionsPostHeaders,
+ body: this.subscriptionsPostBody,
+ },
+ );
} catch (e: unknown) {
console.error("Error updating subscriptions: ", e);
this.subscriptions = cached;
throw e;
}
- }
+ },
);
/**
@@ -147,36 +152,39 @@ export default class AuthenticatedUserService extends Service {
removeSubscription = task(
async (
productArea: string,
- subscriptionType = SubscriptionType.Instant
+ subscriptionType = SubscriptionType.Instant,
) => {
assert(
"removeSubscription expects a subscriptions array",
- this.subscriptions
+ this.subscriptions,
);
let cached = this.subscriptions;
let subscriptionToRemove = this.subscriptions.find(
- (subscription) => subscription.productArea === productArea
+ (subscription) => subscription.productArea === productArea,
);
assert(
"removeSubscription expects a valid productArea",
- subscriptionToRemove
+ subscriptionToRemove,
);
this.subscriptions.removeObject(subscriptionToRemove);
try {
- await this.fetchSvc.fetch("/api/v1/me/subscriptions", {
- method: "POST",
- headers: this.subscriptionsPostHeaders,
- body: this.subscriptionsPostBody,
- });
+ await this.fetchSvc.fetch(
+ `/api/${this.configSvc.config.api_version}/me/subscriptions`,
+ {
+ method: "POST",
+ headers: this.subscriptionsPostHeaders,
+ body: this.subscriptionsPostBody,
+ },
+ );
} catch (e: unknown) {
console.error("Error updating subscriptions: ", e);
this.subscriptions = cached;
throw e;
}
- }
+ },
);
}
diff --git a/web/app/services/config.ts b/web/app/services/config.ts
index 47a36594e..4fb29fca5 100644
--- a/web/app/services/config.ts
+++ b/web/app/services/config.ts
@@ -9,6 +9,7 @@ export default class ConfigService extends Service {
algolia_docs_index_name: config.algolia.docsIndexName,
algolia_drafts_index_name: config.algolia.draftsIndexName,
algolia_internal_index_name: config.algolia.internalIndexName,
+ api_version: "v1",
feature_flags: config.featureFlags,
google_doc_folders: config.google.docFolders ?? "",
short_link_base_url: config.shortLinkBaseURL,
@@ -21,6 +22,12 @@ export default class ConfigService extends Service {
setConfig(param) {
this.set("config", param);
+
+ // Set API version.
+ this.config["api_version"] = "v1";
+ if (this.config.feature_flags["api_v2"]) {
+ this.config["api_version"] = "v2";
+ }
}
}
diff --git a/web/app/services/product-areas.ts b/web/app/services/product-areas.ts
index 7730ad915..7dc3e3490 100644
--- a/web/app/services/product-areas.ts
+++ b/web/app/services/product-areas.ts
@@ -1,6 +1,7 @@
import Service, { inject as service } from "@ember/service";
import { tracked } from "@glimmer/tracking";
import { task } from "ember-concurrency";
+import ConfigService from "hermes/services/config";
import FetchService from "./fetch";
export type ProductArea = {
@@ -8,6 +9,7 @@ export type ProductArea = {
};
export default class ProductAreasService extends Service {
+ @service("config") declare configSvc: ConfigService;
@service("fetch") declare fetchSvc: FetchService;
@tracked index: Record | null = null;
@@ -15,7 +17,7 @@ export default class ProductAreasService extends Service {
fetch = task(async () => {
try {
this.index = await this.fetchSvc
- .fetch("/api/v1/products")
+ .fetch(`/api/${this.configSvc.config.api_version}/products`)
.then((resp) => resp?.json());
} catch (err) {
this.index = null;
diff --git a/web/app/services/recently-viewed-docs.ts b/web/app/services/recently-viewed-docs.ts
index 83542f212..86aa7d2ba 100644
--- a/web/app/services/recently-viewed-docs.ts
+++ b/web/app/services/recently-viewed-docs.ts
@@ -3,6 +3,7 @@ import { inject as service } from "@ember/service";
import { keepLatestTask } from "ember-concurrency";
import FetchService from "./fetch";
import { tracked } from "@glimmer/tracking";
+import ConfigService from "hermes/services/config";
import { HermesDocument } from "hermes/types/document";
import { assert } from "@ember/debug";
@@ -20,6 +21,7 @@ export type RecentlyViewedDoc = {
};
export default class RecentlyViewedDocsService extends Service {
+ @service("config") declare configSvc: ConfigService;
@service("fetch") declare fetchSvc: FetchService;
@service declare session: any;
@@ -47,7 +49,7 @@ export default class RecentlyViewedDocsService extends Service {
* Fetch the file IDs from the backend.
*/
let fetchResponse = await this.fetchSvc.fetch(
- "/api/v1/me/recently-viewed-docs"
+ `/api/${this.configSvc.config.api_version}/me/recently-viewed-docs`,
);
this.index = (await fetchResponse?.json()) || [];
@@ -68,13 +70,15 @@ export default class RecentlyViewedDocsService extends Service {
(this.index as IndexedDoc[]).map(async ({ id, isDraft }) => {
let endpoint = isDraft ? "drafts" : "documents";
let doc = await this.fetchSvc
- .fetch(`/api/v1/${endpoint}/${id}`)
+ .fetch(
+ `/api/${this.configSvc.config.api_version}/${endpoint}/${id}`,
+ )
.then((resp) => resp?.json());
doc.isDraft = isDraft;
return { doc, isDraft };
- })
+ }),
);
/**
* Set up an empty array to hold the documents.