-
Notifications
You must be signed in to change notification settings - Fork 173
refactor(docs): update doc generation #2626
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 5 commits
a25eef5
782680b
59a8840
614b3e1
474026a
0cc5907
2ba3d85
460b027
8ac9b31
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -7,6 +7,21 @@ | |
| set -e | ||
|
|
||
| echo "Generating docs" | ||
|
|
||
| #generate custom swag tags | ||
| go run docs/annotate_dtos/main.go | ||
|
|
||
| #generate v2 openapi specs | ||
| swag init --generatedTime --parseDependency --parseDependencyLevel 3 --parseInternal -g handlers/main.go -d api/ api/* | ||
| swag fmt -d ./api | ||
| go run v3gen/main.go | ||
|
|
||
| # fix openapi2 specs (structural fixes, add proper produce/consume tags, replace x-nullable..) | ||
| bash docs/fix_openapi_spec.sh | ||
|
|
||
| #generate v3 specs | ||
| api-spec-converter --from=swagger_2 --to=openapi_3 -s yaml ./docs/swagger.yaml > ./docs/v3/openapi3.yaml | ||
|
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Looks good. Please add a small required-tools preflight (
Collaborator
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Same thing here. Forcing people to install a tool they'll only use to generate docs and making sure everyone is on the same version is a problem we struggled with for a long time and it makes no sense to keep supporting it |
||
| api-spec-converter --from=swagger_2 --to=openapi_3 ./docs/swagger.json > ./docs/v3/openapi3.json | ||
|
|
||
| # add region descriptions and EU server (swag only supports a single host) | ||
| yq -i '.servers[0].description = "US Region" | .servers += [{"url": "https://eu.getconvoy.cloud/api", "description": "EU Region"}]' ./docs/v3/openapi3.yaml | ||
| jq '.servers[0].description = "US Region" | .servers += [{"url": "https://eu.getconvoy.cloud/api", "description": "EU Region"}]' ./docs/v3/openapi3.json > ./docs/v3/openapi3.json.tmp && mv ./docs/v3/openapi3.json.tmp ./docs/v3/openapi3.json | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,208 @@ | ||
| // tools/annotate_dtos/main.go | ||
| // | ||
| //nolint:all | ||
| package main | ||
|
|
||
| import ( | ||
| "fmt" | ||
| "go/ast" | ||
| "go/parser" | ||
| "go/token" | ||
| "io/ioutil" | ||
| "os" | ||
| "path/filepath" | ||
| "strings" | ||
| ) | ||
|
|
||
| func main() { | ||
| dryRun := len(os.Args) > 1 && os.Args[1] == "--dry-run" | ||
|
|
||
| dtoDir := "./api/models" | ||
| if len(os.Args) > 2 { | ||
|
jirevwe marked this conversation as resolved.
Outdated
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Still open. Custom directory parsing still uses
Collaborator
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. It will never be used like that. We generate docs for the whole project, not per folder. |
||
| dtoDir = os.Args[2] | ||
| } | ||
|
|
||
| fmt.Printf("Using AST to add nullable extensions in: %s\n", dtoDir) | ||
|
|
||
| err := filepath.Walk(dtoDir, func(path string, info os.FileInfo, err error) error { | ||
| if err != nil { | ||
| return err | ||
| } | ||
|
|
||
| if !info.IsDir() && strings.HasSuffix(path, ".go") { | ||
| processFileWithAST(path, dryRun) | ||
| } | ||
| return nil | ||
| }) | ||
|
|
||
| if err != nil { | ||
| fmt.Printf("Error: %v\n", err) | ||
| } | ||
| } | ||
|
|
||
| func processFileWithAST(filePath string, dryRun bool) { | ||
| content, err := ioutil.ReadFile(filePath) | ||
| if err != nil { | ||
| fmt.Printf("Error reading %s: %v\n", filePath, err) | ||
| return | ||
| } | ||
|
|
||
| // Check if file contains Response structs | ||
| if !strings.Contains(string(content), "Response") { | ||
| return | ||
| } | ||
|
|
||
| fset := token.NewFileSet() | ||
| node, err := parser.ParseFile(fset, filePath, content, parser.ParseComments) | ||
| if err != nil { | ||
| fmt.Printf("Error parsing %s: %v\n", filePath, err) | ||
| return | ||
| } | ||
|
|
||
| var fieldsToFix []fieldInfo | ||
|
|
||
| ast.Inspect(node, func(n ast.Node) bool { | ||
| typeSpec, ok := n.(*ast.TypeSpec) | ||
| if !ok { | ||
| return true | ||
| } | ||
|
|
||
| structType, ok := typeSpec.Type.(*ast.StructType) | ||
| if !ok { | ||
| return true | ||
| } | ||
|
|
||
| // Only process Response structs | ||
| if !strings.Contains(typeSpec.Name.Name, "Response") { | ||
| return true | ||
| } | ||
|
|
||
| for _, field := range structType.Fields.List { | ||
| if field.Tag == nil || len(field.Names) == 0 { | ||
| continue | ||
| } | ||
|
|
||
| tagValue := field.Tag.Value | ||
| // Remove backticks | ||
| tagValue = strings.Trim(tagValue, "`") | ||
|
|
||
| // Skip if already has extensions:"x-nullable" | ||
| if strings.Contains(tagValue, `extensions:"x-nullable"`) { | ||
| continue | ||
| } | ||
|
|
||
| // Check if field type is nullable | ||
| fieldType := getFieldTypeString(field.Type) | ||
| if isNullableTypeAST(fieldType) { | ||
| pos := fset.Position(field.Pos()) | ||
| fieldsToFix = append(fieldsToFix, fieldInfo{ | ||
| name: field.Names[0].Name, | ||
| typeStr: fieldType, | ||
| tag: tagValue, | ||
| line: pos.Line, | ||
| filename: filePath, | ||
| }) | ||
| } | ||
| } | ||
| return true | ||
| }) | ||
|
|
||
| if len(fieldsToFix) == 0 { | ||
| return | ||
| } | ||
|
|
||
| if dryRun { | ||
| fmt.Printf("[DRY] %s: %d fields need extensions:\"x-nullable\"\n", | ||
| filepath.Base(filePath), len(fieldsToFix)) | ||
| for _, field := range fieldsToFix { | ||
| fmt.Printf(" - %s (%s)\n", field.name, field.typeStr) | ||
| } | ||
| return | ||
| } | ||
|
|
||
| // Actually fix the file | ||
| lines := strings.Split(string(content), "\n") | ||
| modified := false | ||
|
|
||
| for _, field := range fieldsToFix { | ||
| lineIndex := field.line - 1 | ||
| if lineIndex >= len(lines) { | ||
| continue | ||
| } | ||
|
|
||
| line := lines[lineIndex] | ||
|
|
||
| // Find the tag and add extensions:"x-nullable" | ||
| tagStart := strings.Index(line, "`") | ||
| tagEnd := strings.LastIndex(line, "`") | ||
| if tagStart == -1 || tagEnd == -1 { | ||
| continue | ||
| } | ||
|
|
||
| tagContent := line[tagStart+1 : tagEnd] | ||
| newTagContent := tagContent + ` extensions:"x-nullable"` | ||
| newLine := line[:tagStart+1] + newTagContent + line[tagEnd:] | ||
|
|
||
| lines[lineIndex] = newLine | ||
| modified = true | ||
|
|
||
| fmt.Printf("✓ %s: %s\n", filepath.Base(filePath), field.name) | ||
| } | ||
|
|
||
| if modified { | ||
| output := strings.Join(lines, "\n") | ||
| if err := ioutil.WriteFile(filePath, []byte(output), 0644); err != nil { | ||
| fmt.Printf("Error writing %s: %v\n", filePath, err) | ||
| } | ||
| } | ||
| } | ||
|
|
||
| type fieldInfo struct { | ||
| name string | ||
| typeStr string | ||
| tag string | ||
| line int | ||
| filename string | ||
| } | ||
|
|
||
| func getFieldTypeString(expr ast.Expr) string { | ||
| switch t := expr.(type) { | ||
| case *ast.Ident: | ||
| return t.Name | ||
| case *ast.SelectorExpr: | ||
| xIdent, ok := t.X.(*ast.Ident) | ||
| if !ok { | ||
| return "" | ||
| } | ||
| return fmt.Sprintf("%s.%s", xIdent.Name, t.Sel.Name) | ||
| case *ast.StarExpr: | ||
| return "*" + getFieldTypeString(t.X) | ||
| case *ast.ArrayType: | ||
| return "[]" + getFieldTypeString(t.Elt) | ||
| default: | ||
| return "" | ||
| } | ||
| } | ||
|
|
||
| func isNullableTypeAST(typeStr string) bool { | ||
| // Check for null package types | ||
| if strings.HasPrefix(typeStr, "null.") { | ||
| return true | ||
| } | ||
|
|
||
| // Check for sql.Null types | ||
| if strings.HasPrefix(typeStr, "sql.Null") { | ||
| return true | ||
| } | ||
|
|
||
| // Check for pointer types | ||
| if strings.HasPrefix(typeStr, "*") { | ||
| // Don't treat pointers to slices/maps as nullable for JSON | ||
| if strings.HasPrefix(typeStr, "*[]") || strings.HasPrefix(typeStr, "*map[") { | ||
| return false | ||
| } | ||
| return true | ||
| } | ||
|
|
||
| return false | ||
| } | ||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
This hook now requires
jq,yq, andapi-spec-converter. Please addcommand -vchecks with install hints so failures are clear for non-misesetups.There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
We should not require anyone to install these on their machines imo. That's why the dependency manager exists. These tools will never run in CI or in prod, it will always run locally.