From 473cee2941e09d74fe9e4a53d621d8931ceb94c7 Mon Sep 17 00:00:00 2001 From: Frederic BIDON Date: Mon, 23 Mar 2026 18:12:50 +0100 Subject: [PATCH 1/2] chore: package disentanglement This PR restructures the codescan package in order to: * get a better control over exposed API vs internal (currently: Run(), Options) * organize the functionality in smaller bits, more easily identifiable and testable * establish a clear chain of responsibility between the various components How this has been tested: * all the original tests remain the same with the same assertions and have been reshuffled across packages * all tests that produce a spec or partial spec output on master got their output captured as a "golden" JSON file: all reshuffled tests compare their output against this golden copy. * added more tests to the baseline, producing golden with an extended coverage (~ 85%) * added more tests to the baseline, exercising reachable error paths * added regression test harness with full golden comparison Signed-off-by: Frederic BIDON --- .gitignore | 1 + .mockery.yml | 21 + api.go | 31 + api_test.go | 91 + application.go | 833 ------ assertions.go | 37 - enum.go | 35 - enum_test.go | 39 - errors.go | 12 +- fixtures/enhancements/alias-expand/api.go | 108 + fixtures/enhancements/alias-response/api.go | 34 + .../enhancements/all-http-methods/handlers.go | 70 + fixtures/enhancements/allof-edges/types.go | 74 + .../enhancements/defaults-examples/types.go | 42 + fixtures/enhancements/embedded-types/types.go | 70 + fixtures/enhancements/enum-docs/types.go | 52 + .../enhancements/interface-methods/types.go | 121 + .../enhancements/malformed/bad-contact/doc.go | 15 + .../malformed/bad-response-tag/handlers.go | 14 + .../malformed/bad-sec-defs/doc.go | 17 + .../enhancements/malformed/default-int/api.go | 24 + .../malformed/duplicate-body-tag/handlers.go | 14 + .../enhancements/malformed/example-int/api.go | 23 + .../malformed/info-bad-ext-key/doc.go | 17 + .../malformed/meta-bad-ext-key/doc.go | 17 + fixtures/enhancements/named-basic/types.go | 40 + .../enhancements/named-struct-tags/types.go | 39 + .../enhancements/ref-alias-chain/types.go | 54 + fixtures/enhancements/response-edges/api.go | 63 + fixtures/enhancements/strfmt-arrays/types.go | 47 + fixtures/enhancements/text-marshal/types.go | 54 + .../enhancements/top-level-kinds/types.go | 45 + .../enhancements/unknown-annotation/types.go | 13 + fixtures/goparsing/petstore/enums/priority.go | 29 + fixtures/goparsing/spec/api.go | 1 + .../integration/golden/api_spec_go111.json | 134 + .../golden/api_spec_go111_ref.json | 137 + .../golden/api_spec_go111_transparent.json | 135 + .../integration/golden/bookings_spec.json | 113 + .../integration/golden/bugs_3125_schema.json | 22 + .../golden/classification_operations.json | 206 ++ .../golden/classification_params.json | 677 +++++ .../golden/classification_params_file.json | 677 +++++ .../classification_params_issue2007.json | 17 + .../classification_params_issue2011.json | 13 + .../golden/classification_responses.json | 251 ++ .../classification_responses_issue2007.json | 11 + .../classification_responses_issue2011.json | 6 + .../golden/classification_routes.json | 332 ++ .../golden/classification_routes_body.json | 535 ++++ .../classification_schema_AllOfModel.json | 49 + .../classification_schema_NamedWithType.json | 13 + .../golden/classification_schema_NoModel.json | 261 ++ .../classification_schema_NullString.json | 6 + ...ation_schema_interface_discriminators.json | 229 ++ ...fication_schema_struct_discriminators.json | 84 + .../golden/enhancements_alias_expand.json | 187 ++ .../golden/enhancements_alias_ref.json | 99 + .../enhancements_alias_response_ref.json | 51 + .../golden/enhancements_all_http_methods.json | 91 + .../golden/enhancements_allof_edges.json | 100 + .../enhancements_defaults_examples.json | 80 + .../golden/enhancements_embedded_types.json | 112 + .../golden/enhancements_enum_docs.json | 42 + .../golden/enhancements_input_overlay.json | 32 + .../enhancements_interface_methods.json | 119 + ...ancements_interface_methods_xnullable.json | 121 + .../golden/enhancements_named_basic.json | 33 + .../enhancements_named_struct_tags.json | 43 + .../enhancements_pointers_no_xnullable.json | 60 + .../enhancements_pointers_xnullable.json | 62 + .../golden/enhancements_ref_alias_chain.json | 68 + .../golden/enhancements_response_edges.json | 60 + .../golden/enhancements_strfmt_arrays.json | 47 + .../golden/enhancements_text_marshal.json | 31 + .../golden/enhancements_top_level_kinds.json | 59 + .../golden/go118_params_issue2011.json | 13 + .../golden/go118_responses_issue2011.json | 6 + .../golden/go118_schema_Interfaced.json | 12 + .../golden/go118_schema_NamedWithType.json | 13 + .../golden/go118_schema_aliased.json | 8 + .../golden/go118_schema_transportErr.json | 20 + .../integration/golden/go119_operations.json | 58 + .../golden/go123_aliased_spec.json | 318 ++ .../golden/go123_special_spec.json | 297 ++ .../golden/petstore_schema_Order.json | 61 + .../golden/petstore_schema_Pet.json | 63 + .../golden/petstore_schema_Tag.json | 26 + .../integration/golden/petstore_spec.json | 556 ++++ .../integration/golden/product_responses.json | 11 + .../golden/transparentalias_params.json | 40 + .../golden/transparentalias_responses.json | 15 + internal/builders/items/errors.go | 15 + internal/builders/items/taggers.go | 77 + internal/builders/items/typable.go | 59 + internal/builders/items/validations.go | 36 + internal/builders/operations/errors.go | 9 + internal/builders/operations/operations.go | 97 + .../operations/operations_go119_test.go | 14 +- .../builders/operations/operations_test.go | 19 +- internal/builders/parameters/errors.go | 9 + internal/builders/parameters/parameters.go | 468 +++ .../builders/parameters/parameters_test.go | 85 +- internal/builders/parameters/taggers.go | 62 + internal/builders/parameters/typable.go | 101 + internal/builders/resolvers/assertions.go | 102 + internal/builders/resolvers/errors.go | 9 + internal/builders/resolvers/resolvers.go | 195 ++ internal/builders/resolvers/resolvers_test.go | 34 + internal/builders/responses/errors.go | 9 + internal/builders/responses/responses.go | 415 +++ .../builders/responses/responses_test.go | 87 +- internal/builders/responses/taggers.go | 43 + internal/builders/responses/typable.go | 132 + internal/builders/routes/errors.go | 9 + internal/builders/routes/routes.go | 64 + .../builders/routes/routes_test.go | 50 +- internal/builders/routes/setters.go | 48 + internal/builders/routes/taggers.go | 19 + internal/builders/schema/errors.go | 9 + internal/builders/schema/schema.go | 1443 +++++++++ internal/builders/schema/schema_go118_test.go | 109 + internal/builders/schema/schema_test.go | 1506 ++++++++++ internal/builders/schema/taggers.go | 120 + internal/builders/schema/typable.go | 132 + spec.go => internal/builders/spec/spec.go | 151 +- internal/ifaces/ifaces.go | 105 + .../integration/coverage_enhancements_test.go | 291 ++ .../integration/coverage_malformed_test.go | 81 + internal/integration/doc.go | 2 + internal/integration/go_swagger_spec_test.go | 54 + internal/integration/helpers_test.go | 63 + .../integration/petstore_test.go | 181 +- internal/integration/schema_aliased_test.go | 551 ++++ internal/integration/schema_special_test.go | 451 +++ internal/logger/debug.go | 18 + internal/parsers/enum.go | 149 + internal/parsers/enum_test.go | 145 + internal/parsers/errors.go | 9 + internal/parsers/extensions.go | 242 ++ internal/parsers/extensions_test.go | 196 ++ internal/parsers/lines.go | 43 + internal/parsers/lines_test.go | 66 + internal/parsers/matchers.go | 180 ++ internal/parsers/matchers_test.go | 299 ++ meta.go => internal/parsers/meta.go | 59 +- internal/parsers/meta_test.go | 270 ++ internal/parsers/parsed_path_content.go | 66 + internal/parsers/parsed_path_content_test.go | 144 + internal/parsers/parsers.go | 134 + .../parsers/parsers_helpers.go | 2 +- internal/parsers/parsers_helpers_test.go | 73 + internal/parsers/parsers_test.go | 99 + regexprs.go => internal/parsers/regexprs.go | 13 +- .../parsers/regexprs_test.go | 32 +- internal/parsers/responses.go | 221 ++ internal/parsers/responses_test.go | 264 ++ .../parsers/route_params.go | 50 +- internal/parsers/route_params_test.go | 250 ++ internal/parsers/sectioned_parser.go | 286 ++ .../parsers/sectioned_parser_go119_test.go | 6 +- .../parsers/sectioned_parser_test.go | 197 +- internal/parsers/security.go | 103 + internal/parsers/security_test.go | 84 + internal/parsers/tag_parsers.go | 86 + internal/parsers/validations.go | 610 ++++ internal/parsers/validations_test.go | 608 ++++ internal/parsers/yaml_parser.go | 106 + internal/parsers/yaml_parser_test.go | 138 + internal/parsers/yaml_spec_parser.go | 199 ++ .../parsers/yaml_spec_parser_test.go | 188 +- internal/scanner/declaration.go | 145 + internal/scanner/declaration_test.go | 272 ++ internal/scanner/errors.go | 9 + internal/scanner/export_test.go | 14 + internal/scanner/index.go | 398 +++ internal/scanner/index_test.go | 551 ++++ internal/scanner/options.go | 22 + internal/scanner/scan_context.go | 399 +++ internal/scanner/scan_context_test.go | 616 ++++ internal/scantest/classification/verify.go | 35 + internal/scantest/doc.go | 2 + internal/scantest/golden.go | 59 + internal/scantest/load.go | 89 + internal/scantest/mocks/doc.go | 4 + internal/scantest/mocks/mocks.go | 1752 +++++++++++ internal/scantest/property.go | 65 + meta_test.go | 183 -- operations.go | 128 - parameters.go | 627 ---- parser.go | 1808 ----------- responses.go | 548 ---- resume | 1 - routes.go | 95 - schema.go | 1754 ----------- schema_go118_test.go | 152 - schema_test.go | 2661 ----------------- taggers.go | 136 - 198 files changed, 25220 insertions(+), 9575 deletions(-) create mode 100644 .mockery.yml create mode 100644 api.go create mode 100644 api_test.go delete mode 100644 application.go delete mode 100644 assertions.go delete mode 100644 enum.go delete mode 100644 enum_test.go create mode 100644 fixtures/enhancements/alias-expand/api.go create mode 100644 fixtures/enhancements/alias-response/api.go create mode 100644 fixtures/enhancements/all-http-methods/handlers.go create mode 100644 fixtures/enhancements/allof-edges/types.go create mode 100644 fixtures/enhancements/defaults-examples/types.go create mode 100644 fixtures/enhancements/embedded-types/types.go create mode 100644 fixtures/enhancements/enum-docs/types.go create mode 100644 fixtures/enhancements/interface-methods/types.go create mode 100644 fixtures/enhancements/malformed/bad-contact/doc.go create mode 100644 fixtures/enhancements/malformed/bad-response-tag/handlers.go create mode 100644 fixtures/enhancements/malformed/bad-sec-defs/doc.go create mode 100644 fixtures/enhancements/malformed/default-int/api.go create mode 100644 fixtures/enhancements/malformed/duplicate-body-tag/handlers.go create mode 100644 fixtures/enhancements/malformed/example-int/api.go create mode 100644 fixtures/enhancements/malformed/info-bad-ext-key/doc.go create mode 100644 fixtures/enhancements/malformed/meta-bad-ext-key/doc.go create mode 100644 fixtures/enhancements/named-basic/types.go create mode 100644 fixtures/enhancements/named-struct-tags/types.go create mode 100644 fixtures/enhancements/ref-alias-chain/types.go create mode 100644 fixtures/enhancements/response-edges/api.go create mode 100644 fixtures/enhancements/strfmt-arrays/types.go create mode 100644 fixtures/enhancements/text-marshal/types.go create mode 100644 fixtures/enhancements/top-level-kinds/types.go create mode 100644 fixtures/enhancements/unknown-annotation/types.go create mode 100644 fixtures/goparsing/petstore/enums/priority.go create mode 100644 fixtures/integration/golden/api_spec_go111.json create mode 100644 fixtures/integration/golden/api_spec_go111_ref.json create mode 100644 fixtures/integration/golden/api_spec_go111_transparent.json create mode 100644 fixtures/integration/golden/bookings_spec.json create mode 100644 fixtures/integration/golden/bugs_3125_schema.json create mode 100644 fixtures/integration/golden/classification_operations.json create mode 100644 fixtures/integration/golden/classification_params.json create mode 100644 fixtures/integration/golden/classification_params_file.json create mode 100644 fixtures/integration/golden/classification_params_issue2007.json create mode 100644 fixtures/integration/golden/classification_params_issue2011.json create mode 100644 fixtures/integration/golden/classification_responses.json create mode 100644 fixtures/integration/golden/classification_responses_issue2007.json create mode 100644 fixtures/integration/golden/classification_responses_issue2011.json create mode 100644 fixtures/integration/golden/classification_routes.json create mode 100644 fixtures/integration/golden/classification_routes_body.json create mode 100644 fixtures/integration/golden/classification_schema_AllOfModel.json create mode 100644 fixtures/integration/golden/classification_schema_NamedWithType.json create mode 100644 fixtures/integration/golden/classification_schema_NoModel.json create mode 100644 fixtures/integration/golden/classification_schema_NullString.json create mode 100644 fixtures/integration/golden/classification_schema_interface_discriminators.json create mode 100644 fixtures/integration/golden/classification_schema_struct_discriminators.json create mode 100644 fixtures/integration/golden/enhancements_alias_expand.json create mode 100644 fixtures/integration/golden/enhancements_alias_ref.json create mode 100644 fixtures/integration/golden/enhancements_alias_response_ref.json create mode 100644 fixtures/integration/golden/enhancements_all_http_methods.json create mode 100644 fixtures/integration/golden/enhancements_allof_edges.json create mode 100644 fixtures/integration/golden/enhancements_defaults_examples.json create mode 100644 fixtures/integration/golden/enhancements_embedded_types.json create mode 100644 fixtures/integration/golden/enhancements_enum_docs.json create mode 100644 fixtures/integration/golden/enhancements_input_overlay.json create mode 100644 fixtures/integration/golden/enhancements_interface_methods.json create mode 100644 fixtures/integration/golden/enhancements_interface_methods_xnullable.json create mode 100644 fixtures/integration/golden/enhancements_named_basic.json create mode 100644 fixtures/integration/golden/enhancements_named_struct_tags.json create mode 100644 fixtures/integration/golden/enhancements_pointers_no_xnullable.json create mode 100644 fixtures/integration/golden/enhancements_pointers_xnullable.json create mode 100644 fixtures/integration/golden/enhancements_ref_alias_chain.json create mode 100644 fixtures/integration/golden/enhancements_response_edges.json create mode 100644 fixtures/integration/golden/enhancements_strfmt_arrays.json create mode 100644 fixtures/integration/golden/enhancements_text_marshal.json create mode 100644 fixtures/integration/golden/enhancements_top_level_kinds.json create mode 100644 fixtures/integration/golden/go118_params_issue2011.json create mode 100644 fixtures/integration/golden/go118_responses_issue2011.json create mode 100644 fixtures/integration/golden/go118_schema_Interfaced.json create mode 100644 fixtures/integration/golden/go118_schema_NamedWithType.json create mode 100644 fixtures/integration/golden/go118_schema_aliased.json create mode 100644 fixtures/integration/golden/go118_schema_transportErr.json create mode 100644 fixtures/integration/golden/go119_operations.json create mode 100644 fixtures/integration/golden/go123_aliased_spec.json create mode 100644 fixtures/integration/golden/go123_special_spec.json create mode 100644 fixtures/integration/golden/petstore_schema_Order.json create mode 100644 fixtures/integration/golden/petstore_schema_Pet.json create mode 100644 fixtures/integration/golden/petstore_schema_Tag.json create mode 100644 fixtures/integration/golden/petstore_spec.json create mode 100644 fixtures/integration/golden/product_responses.json create mode 100644 fixtures/integration/golden/transparentalias_params.json create mode 100644 fixtures/integration/golden/transparentalias_responses.json create mode 100644 internal/builders/items/errors.go create mode 100644 internal/builders/items/taggers.go create mode 100644 internal/builders/items/typable.go create mode 100644 internal/builders/items/validations.go create mode 100644 internal/builders/operations/errors.go create mode 100644 internal/builders/operations/operations.go rename operations_go119_test.go => internal/builders/operations/operations_go119_test.go (86%) rename operations_test.go => internal/builders/operations/operations_test.go (89%) create mode 100644 internal/builders/parameters/errors.go create mode 100644 internal/builders/parameters/parameters.go rename parameters_test.go => internal/builders/parameters/parameters_test.go (89%) create mode 100644 internal/builders/parameters/taggers.go create mode 100644 internal/builders/parameters/typable.go create mode 100644 internal/builders/resolvers/assertions.go create mode 100644 internal/builders/resolvers/errors.go create mode 100644 internal/builders/resolvers/resolvers.go create mode 100644 internal/builders/resolvers/resolvers_test.go create mode 100644 internal/builders/responses/errors.go create mode 100644 internal/builders/responses/responses.go rename responses_test.go => internal/builders/responses/responses_test.go (86%) create mode 100644 internal/builders/responses/taggers.go create mode 100644 internal/builders/responses/typable.go create mode 100644 internal/builders/routes/errors.go create mode 100644 internal/builders/routes/routes.go rename routes_test.go => internal/builders/routes/routes_test.go (90%) create mode 100644 internal/builders/routes/setters.go create mode 100644 internal/builders/routes/taggers.go create mode 100644 internal/builders/schema/errors.go create mode 100644 internal/builders/schema/schema.go create mode 100644 internal/builders/schema/schema_go118_test.go create mode 100644 internal/builders/schema/schema_test.go create mode 100644 internal/builders/schema/taggers.go create mode 100644 internal/builders/schema/typable.go rename spec.go => internal/builders/spec/spec.go (54%) create mode 100644 internal/ifaces/ifaces.go create mode 100644 internal/integration/coverage_enhancements_test.go create mode 100644 internal/integration/coverage_malformed_test.go create mode 100644 internal/integration/doc.go create mode 100644 internal/integration/go_swagger_spec_test.go create mode 100644 internal/integration/helpers_test.go rename application_test.go => internal/integration/petstore_test.go (76%) create mode 100644 internal/integration/schema_aliased_test.go create mode 100644 internal/integration/schema_special_test.go create mode 100644 internal/logger/debug.go create mode 100644 internal/parsers/enum.go create mode 100644 internal/parsers/enum_test.go create mode 100644 internal/parsers/errors.go create mode 100644 internal/parsers/extensions.go create mode 100644 internal/parsers/extensions_test.go create mode 100644 internal/parsers/lines.go create mode 100644 internal/parsers/lines_test.go create mode 100644 internal/parsers/matchers.go create mode 100644 internal/parsers/matchers_test.go rename meta.go => internal/parsers/meta.go (73%) create mode 100644 internal/parsers/meta_test.go create mode 100644 internal/parsers/parsed_path_content.go create mode 100644 internal/parsers/parsed_path_content_test.go create mode 100644 internal/parsers/parsers.go rename parser_helpers.go => internal/parsers/parsers_helpers.go (98%) create mode 100644 internal/parsers/parsers_helpers_test.go create mode 100644 internal/parsers/parsers_test.go rename regexprs.go => internal/parsers/regexprs.go (97%) rename regexprs_test.go => internal/parsers/regexprs_test.go (89%) create mode 100644 internal/parsers/responses.go create mode 100644 internal/parsers/responses_test.go rename route_params.go => internal/parsers/route_params.go (85%) create mode 100644 internal/parsers/route_params_test.go create mode 100644 internal/parsers/sectioned_parser.go rename parser_go119_test.go => internal/parsers/sectioned_parser_go119_test.go (94%) rename parser_test.go => internal/parsers/sectioned_parser_test.go (55%) create mode 100644 internal/parsers/security.go create mode 100644 internal/parsers/security_test.go create mode 100644 internal/parsers/tag_parsers.go create mode 100644 internal/parsers/validations.go create mode 100644 internal/parsers/validations_test.go create mode 100644 internal/parsers/yaml_parser.go create mode 100644 internal/parsers/yaml_parser_test.go create mode 100644 internal/parsers/yaml_spec_parser.go rename yamlparser_test.go => internal/parsers/yaml_spec_parser_test.go (75%) create mode 100644 internal/scanner/declaration.go create mode 100644 internal/scanner/declaration_test.go create mode 100644 internal/scanner/errors.go create mode 100644 internal/scanner/export_test.go create mode 100644 internal/scanner/index.go create mode 100644 internal/scanner/index_test.go create mode 100644 internal/scanner/options.go create mode 100644 internal/scanner/scan_context.go create mode 100644 internal/scanner/scan_context_test.go create mode 100644 internal/scantest/classification/verify.go create mode 100644 internal/scantest/doc.go create mode 100644 internal/scantest/golden.go create mode 100644 internal/scantest/load.go create mode 100644 internal/scantest/mocks/doc.go create mode 100644 internal/scantest/mocks/mocks.go create mode 100644 internal/scantest/property.go delete mode 100644 meta_test.go delete mode 100644 operations.go delete mode 100644 parameters.go delete mode 100644 parser.go delete mode 100644 responses.go delete mode 100644 resume delete mode 100644 routes.go delete mode 100644 schema.go delete mode 100644 schema_go118_test.go delete mode 100644 schema_test.go delete mode 100644 taggers.go diff --git a/.gitignore b/.gitignore index 4ab96fe..e01b887 100644 --- a/.gitignore +++ b/.gitignore @@ -27,3 +27,4 @@ profile.cov # .idea/ # .vscode/ .mcp.json +.worktrees diff --git a/.mockery.yml b/.mockery.yml new file mode 100644 index 0000000..b3c75d7 --- /dev/null +++ b/.mockery.yml @@ -0,0 +1,21 @@ +all: false +dir: '{{.InterfaceDir}}' +filename: mocks_test.go +force-file-write: true +formatter: goimports +include-auto-generated: false +log-level: info +structname: '{{.Mock}}{{.InterfaceName}}' +pkgname: '{{.SrcPackageName}}' +recursive: false +require-template-schema-exists: true +template: matryer +template-schema: '{{.Template}}.schema.json' +packages: + github.com/go-openapi/codescan/internal/ifaces: + config: + dir: internal/scantest/mocks + filename: mocks.go + pkgname: 'mocks' + force-file-write: true + all: true diff --git a/api.go b/api.go new file mode 100644 index 0000000..0424b9f --- /dev/null +++ b/api.go @@ -0,0 +1,31 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package codescan + +import ( + "fmt" + + "github.com/go-openapi/codescan/internal/builders/spec" + "github.com/go-openapi/codescan/internal/scanner" + oaispec "github.com/go-openapi/spec" +) + +// Options for the scanner. +type Options = scanner.Options + +// Run the scanner to produce a swagger spec with the options provided. +func Run(opts *Options) (*oaispec.Swagger, error) { // TODO(fred/claude): use option functors pattern + ctx, err := scanner.NewScanCtx(opts) + if err != nil { + return nil, fmt.Errorf("could not scan source: %w: %w", err, ErrCodeScan) + } + + builder := spec.NewBuilder(opts.InputSpec, ctx, opts.ScanModels) // TODO(fred/claude): use option functors pattern + sp, err := builder.Build() + if err != nil { + return nil, fmt.Errorf("could not build spec: %w: %w", err, ErrCodeScan) + } + + return sp, nil +} diff --git a/api_test.go b/api_test.go new file mode 100644 index 0000000..ee5c8a2 --- /dev/null +++ b/api_test.go @@ -0,0 +1,91 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package codescan + +import ( + "flag" + "io" + "log" + "os" + "path/filepath" + "testing" + + "github.com/go-openapi/testify/v2/require" +) + +// Public-API smoke suite. Fixture-heavy tests live in internal/integration. + +var enableDebug bool //nolint:gochecknoglobals // test flag registered in init + +func init() { //nolint:gochecknoinits // registers test flags before TestMain + flag.BoolVar(&enableDebug, "enable-debug", false, "enable debug output in tests") +} + +func TestMain(m *testing.M) { + flag.Parse() + + if !enableDebug { + log.SetOutput(io.Discard) + } else { + log.SetFlags(log.LstdFlags | log.Lshortfile) + log.SetOutput(os.Stderr) + } + + os.Exit(m.Run()) +} + +func TestApplication_DebugLogging(t *testing.T) { + // Exercises the logger.DebugLogf code path with Debug: true. + _, err := Run(&Options{ + Packages: []string{"./goparsing/petstore/..."}, + WorkDir: "fixtures", + ScanModels: true, + Debug: true, + }) + + require.NoError(t, err) +} + +func TestRun_InvalidWorkDir(t *testing.T) { + // Exercises the Run() error path when package loading fails. + _, err := Run(&Options{ + Packages: []string{"./..."}, + WorkDir: "/nonexistent/directory", + }) + + require.Error(t, err) +} + +func TestSetEnumDoesNotPanic(t *testing.T) { + // Regression: ensure Run() does not panic on minimal source with an enum. + dir := t.TempDir() + + src := ` + package failure + + // swagger:model Order + type Order struct { + State State ` + "`json:\"state\"`" + ` + } + + // State represents the state of an order. + // enum: ["created","processed"] + type State string + ` + err := os.WriteFile(filepath.Join(dir, "model.go"), []byte(src), 0o600) + require.NoError(t, err) + + goMod := ` + module failure + go 1.23` + err = os.WriteFile(filepath.Join(dir, "go.mod"), []byte(goMod), 0o600) + require.NoError(t, err) + + _, err = Run(&Options{ + WorkDir: dir, + ScanModels: true, + }) + + require.NoError(t, err) +} diff --git a/application.go b/application.go deleted file mode 100644 index 0f07b51..0000000 --- a/application.go +++ /dev/null @@ -1,833 +0,0 @@ -// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers -// SPDX-License-Identifier: Apache-2.0 - -package codescan - -import ( - "fmt" - "go/ast" - "go/token" - "go/types" - "log" - "regexp" - "strings" - - "github.com/go-openapi/spec" - - "golang.org/x/tools/go/packages" -) - -const pkgLoadMode = packages.NeedName | packages.NeedFiles | packages.NeedImports | packages.NeedDeps | packages.NeedTypes | packages.NeedSyntax | packages.NeedTypesInfo - -type node uint32 - -const ( - metaNode node = 1 << iota - routeNode - operationNode - modelNode - parametersNode - responseNode -) - -// Options for the scanner. -type Options struct { - Packages []string - InputSpec *spec.Swagger - ScanModels bool - WorkDir string - BuildTags string - ExcludeDeps bool - Include []string - Exclude []string - IncludeTags []string - ExcludeTags []string - SetXNullableForPointers bool - RefAliases bool // aliases result in $ref, otherwise aliases are expanded - TransparentAliases bool // aliases are completely transparent, never creating definitions - DescWithRef bool // allow overloaded descriptions together with $ref, otherwise jsonschema draft4 $ref predates everything - SkipExtensions bool // skip generating x-go-* vendor extensions in the spec - Debug bool // enable verbose debug logging during scanning -} - -type scanCtx struct { - pkgs []*packages.Package - app *typeIndex - debug bool - - opts *Options -} - -func sliceToSet(names []string) map[string]bool { - result := make(map[string]bool) - for _, v := range names { - result[v] = true - } - return result -} - -// Run the scanner to produce a spec with the options provided. -func Run(opts *Options) (*spec.Swagger, error) { - sc, err := newScanCtx(opts) - if err != nil { - return nil, err - } - sb := newSpecBuilder(opts.InputSpec, sc, opts.ScanModels) - return sb.Build() -} - -func newScanCtx(opts *Options) (*scanCtx, error) { - cfg := &packages.Config{ - Dir: opts.WorkDir, - Mode: pkgLoadMode, - Tests: false, - } - if opts.BuildTags != "" { - cfg.BuildFlags = []string{"-tags", opts.BuildTags} - } - - pkgs, err := packages.Load(cfg, opts.Packages...) - if err != nil { - return nil, err - } - - app, err := newTypeIndex(pkgs, - withExcludeDeps(opts.ExcludeDeps), - withIncludeTags(sliceToSet(opts.IncludeTags)), - withExcludeTags(sliceToSet(opts.ExcludeTags)), - withIncludePkgs(opts.Include), - withExcludePkgs(opts.Exclude), - withXNullableForPointers(opts.SetXNullableForPointers), - withRefAliases(opts.RefAliases), - withTransparentAliases(opts.TransparentAliases), - withDebug(opts.Debug), - ) - if err != nil { - return nil, err - } - - return &scanCtx{ - pkgs: pkgs, - app: app, - debug: opts.Debug, - opts: opts, - }, nil -} - -type entityDecl struct { - Comments *ast.CommentGroup - Type *types.Named - Alias *types.Alias // added to supplement Named, after go1.22 - Ident *ast.Ident - Spec *ast.TypeSpec - File *ast.File - Pkg *packages.Package - hasModelAnnotation bool - hasResponseAnnotation bool - hasParameterAnnotation bool -} - -// Obj returns the type name for the declaration defining the named type or alias t. -func (d *entityDecl) Obj() *types.TypeName { - if d.Type != nil { - return d.Type.Obj() - } - if d.Alias != nil { - return d.Alias.Obj() - } - - panic("invalid entityDecl: Type and Alias are both nil") -} - -func (d *entityDecl) ObjType() types.Type { - if d.Type != nil { - return d.Type - } - if d.Alias != nil { - return d.Alias - } - - panic("invalid entityDecl: Type and Alias are both nil") -} - -func (d *entityDecl) Names() (name, goName string) { - goName = d.Ident.Name - name = goName - if d.Comments == nil { - return name, goName - } - -DECLS: - for _, cmt := range d.Comments.List { - for ln := range strings.SplitSeq(cmt.Text, "\n") { - matches := rxModelOverride.FindStringSubmatch(ln) - if len(matches) > 0 { - d.hasModelAnnotation = true - } - if len(matches) > 1 && len(matches[1]) > 0 { - name = matches[1] - break DECLS - } - } - } - - return name, goName -} - -func (d *entityDecl) ResponseNames() (name, goName string) { - goName = d.Ident.Name - name = goName - if d.Comments == nil { - return name, goName - } - -DECLS: - for _, cmt := range d.Comments.List { - for ln := range strings.SplitSeq(cmt.Text, "\n") { - matches := rxResponseOverride.FindStringSubmatch(ln) - if len(matches) > 0 { - d.hasResponseAnnotation = true - } - if len(matches) > 1 && len(matches[1]) > 0 { - name = matches[1] - break DECLS - } - } - } - return name, goName -} - -func (d *entityDecl) OperationIDs() (result []string) { - if d == nil || d.Comments == nil { - return nil - } - - for _, cmt := range d.Comments.List { - for ln := range strings.SplitSeq(cmt.Text, "\n") { - matches := rxParametersOverride.FindStringSubmatch(ln) - if len(matches) > 0 { - d.hasParameterAnnotation = true - } - if len(matches) > 1 && len(matches[1]) > 0 { - for pt := range strings.SplitSeq(matches[1], " ") { - tr := strings.TrimSpace(pt) - if len(tr) > 0 { - result = append(result, tr) - } - } - } - } - } - return result -} - -func (d *entityDecl) HasModelAnnotation() bool { - if d.hasModelAnnotation { - return true - } - if d.Comments == nil { - return false - } - for _, cmt := range d.Comments.List { - for ln := range strings.SplitSeq(cmt.Text, "\n") { - matches := rxModelOverride.FindStringSubmatch(ln) - if len(matches) > 0 { - d.hasModelAnnotation = true - return true - } - } - } - return false -} - -func (d *entityDecl) HasResponseAnnotation() bool { - if d.hasResponseAnnotation { - return true - } - if d.Comments == nil { - return false - } - for _, cmt := range d.Comments.List { - for ln := range strings.SplitSeq(cmt.Text, "\n") { - matches := rxResponseOverride.FindStringSubmatch(ln) - if len(matches) > 0 { - d.hasResponseAnnotation = true - return true - } - } - } - return false -} - -func (d *entityDecl) HasParameterAnnotation() bool { - if d.hasParameterAnnotation { - return true - } - if d.Comments == nil { - return false - } - for _, cmt := range d.Comments.List { - for ln := range strings.SplitSeq(cmt.Text, "\n") { - matches := rxParametersOverride.FindStringSubmatch(ln) - if len(matches) > 0 { - d.hasParameterAnnotation = true - return true - } - } - } - return false -} - -func (s *scanCtx) FindDecl(pkgPath, name string) (*entityDecl, bool) { - pkg, ok := s.app.AllPackages[pkgPath] - if !ok { - return nil, false - } - - for _, file := range pkg.Syntax { - for _, d := range file.Decls { - gd, ok := d.(*ast.GenDecl) - if !ok { - continue - } - - for _, sp := range gd.Specs { - ts, ok := sp.(*ast.TypeSpec) - if !ok || ts.Name.Name != name { - continue - } - - def, ok := pkg.TypesInfo.Defs[ts.Name] - if !ok { - debugLogf(s.debug, "couldn't find type info for %s", ts.Name) - continue - } - - nt, isNamed := def.Type().(*types.Named) - at, isAliased := def.Type().(*types.Alias) - if !isNamed && !isAliased { - debugLogf(s.debug, "%s is not a named or an aliased type but a %T", ts.Name, def.Type()) - continue - } - - comments := ts.Doc // type ( /* doc */ Foo struct{} ) - if comments == nil { - comments = gd.Doc // /* doc */ type ( Foo struct{} ) - } - - return &entityDecl{ - Comments: comments, - Type: nt, - Alias: at, - Ident: ts.Name, - Spec: ts, - File: file, - Pkg: pkg, - }, true - } - } - } - - return nil, false -} - -func (s *scanCtx) FindModel(pkgPath, name string) (*entityDecl, bool) { - for _, cand := range s.app.Models { - ct := cand.Obj() - if ct.Name() == name && ct.Pkg().Path() == pkgPath { - return cand, true - } - } - - if decl, found := s.FindDecl(pkgPath, name); found { - s.app.ExtraModels[decl.Ident] = decl - return decl, true - } - - return nil, false -} - -func (s *scanCtx) PkgForPath(pkgPath string) (*packages.Package, bool) { - v, ok := s.app.AllPackages[pkgPath] - return v, ok -} - -func (s *scanCtx) DeclForType(t types.Type) (*entityDecl, bool) { - switch tpe := t.(type) { - case *types.Pointer: - return s.DeclForType(tpe.Elem()) - case *types.Named: - return s.FindDecl(tpe.Obj().Pkg().Path(), tpe.Obj().Name()) - case *types.Alias: - return s.FindDecl(tpe.Obj().Pkg().Path(), tpe.Obj().Name()) - - default: - log.Printf("WARNING: unknown type to find the package for [%T]: %s", t, t.String()) - - return nil, false - } -} - -func (s *scanCtx) PkgForType(t types.Type) (*packages.Package, bool) { - switch tpe := t.(type) { - // case *types.Basic: - // case *types.Struct: - // case *types.Pointer: - // case *types.Interface: - // case *types.Array: - // case *types.Slice: - // case *types.Map: - case *types.Named: - v, ok := s.app.AllPackages[tpe.Obj().Pkg().Path()] - return v, ok - case *types.Alias: - v, ok := s.app.AllPackages[tpe.Obj().Pkg().Path()] - return v, ok - default: - log.Printf("unknown type to find the package for [%T]: %s", t, t.String()) - return nil, false - } -} - -func (s *scanCtx) FindComments(pkg *packages.Package, name string) (*ast.CommentGroup, bool) { - for _, f := range pkg.Syntax { - for _, d := range f.Decls { - gd, ok := d.(*ast.GenDecl) - if !ok { - continue - } - - for _, s := range gd.Specs { - if ts, ok := s.(*ast.TypeSpec); ok { - if ts.Name.Name == name { - return gd.Doc, true - } - } - } - } - } - return nil, false -} - -func (s *scanCtx) FindEnumValues(pkg *packages.Package, enumName string) (list []any, descList []string, _ bool) { - for _, f := range pkg.Syntax { - for _, d := range f.Decls { - gd, ok := d.(*ast.GenDecl) - if !ok { - continue - } - - if gd.Tok != token.CONST { - continue - } - - for _, spec := range gd.Specs { - literalValue, description := s.findEnumValue(spec, enumName) - if literalValue == nil { - continue - } - - list = append(list, literalValue) - descList = append(descList, description) - } - } - } - - return list, descList, true -} - -func (s *scanCtx) findEnumValue(spec ast.Spec, enumName string) (literalValue any, description string) { - vs, ok := spec.(*ast.ValueSpec) - if !ok { - return nil, "" - } - - vsIdent, ok := vs.Type.(*ast.Ident) - if !ok { - return nil, "" - } - - if vsIdent.Name != enumName { - return nil, "" - } - - if len(vs.Values) == 0 { - return nil, "" - } - - bl, ok := vs.Values[0].(*ast.BasicLit) - if !ok { - return nil, "" - } - - literalValue = getEnumBasicLitValue(bl) - - // build the enum description - var ( - desc = &strings.Builder{} - namesLen = len(vs.Names) - ) - - fmt.Fprintf(desc, "%v ", literalValue) - for i, name := range vs.Names { - desc.WriteString(name.Name) - if i < namesLen-1 { - desc.WriteString(" ") - } - } - - if vs.Doc != nil { - docListLen := len(vs.Doc.List) - if docListLen > 0 { - desc.WriteString(" ") - } - - for i, doc := range vs.Doc.List { - if doc.Text != "" { - text := strings.TrimPrefix(doc.Text, "//") - desc.WriteString(text) - if i < docListLen-1 { - desc.WriteString(" ") - } - } - } - } - - description = desc.String() - - return literalValue, description -} - -type typeIndexOption func(*typeIndex) - -func withExcludeDeps(excluded bool) typeIndexOption { - return func(a *typeIndex) { - a.excludeDeps = excluded - } -} - -func withIncludeTags(included map[string]bool) typeIndexOption { - return func(a *typeIndex) { - a.includeTags = included - } -} - -func withExcludeTags(excluded map[string]bool) typeIndexOption { - return func(a *typeIndex) { - a.excludeTags = excluded - } -} - -func withIncludePkgs(included []string) typeIndexOption { - return func(a *typeIndex) { - a.includePkgs = included - } -} - -func withExcludePkgs(excluded []string) typeIndexOption { - return func(a *typeIndex) { - a.excludePkgs = excluded - } -} - -func withXNullableForPointers(enabled bool) typeIndexOption { - return func(a *typeIndex) { - a.setXNullableForPointers = enabled - } -} - -func withRefAliases(enabled bool) typeIndexOption { - return func(a *typeIndex) { - a.refAliases = enabled - } -} - -func withTransparentAliases(enabled bool) typeIndexOption { - return func(a *typeIndex) { - a.transparentAliases = enabled - } -} - -func withDebug(enabled bool) typeIndexOption { - return func(a *typeIndex) { - a.debug = enabled - } -} - -func newTypeIndex(pkgs []*packages.Package, opts ...typeIndexOption) (*typeIndex, error) { - ac := &typeIndex{ - AllPackages: make(map[string]*packages.Package), - Models: make(map[*ast.Ident]*entityDecl), - ExtraModels: make(map[*ast.Ident]*entityDecl), - } - for _, apply := range opts { - apply(ac) - } - - if err := ac.build(pkgs); err != nil { - return nil, err - } - return ac, nil -} - -type typeIndex struct { - AllPackages map[string]*packages.Package - Models map[*ast.Ident]*entityDecl - ExtraModels map[*ast.Ident]*entityDecl - Meta []metaSection - Routes []parsedPathContent - Operations []parsedPathContent - Parameters []*entityDecl - Responses []*entityDecl - excludeDeps bool - includeTags map[string]bool - excludeTags map[string]bool - includePkgs []string - excludePkgs []string - setXNullableForPointers bool - refAliases bool - transparentAliases bool - debug bool -} - -func (a *typeIndex) build(pkgs []*packages.Package) error { - for _, pkg := range pkgs { - if _, known := a.AllPackages[pkg.PkgPath]; known { - continue - } - a.AllPackages[pkg.PkgPath] = pkg - if err := a.processPackage(pkg); err != nil { - return err - } - if err := a.walkImports(pkg); err != nil { - return err - } - } - - return nil -} - -func (a *typeIndex) processPackage(pkg *packages.Package) error { - if !shouldAcceptPkg(pkg.PkgPath, a.includePkgs, a.excludePkgs) { - debugLogf(a.debug, "package %s is ignored due to rules", pkg.Name) - return nil - } - - for _, file := range pkg.Syntax { - if err := a.processFile(pkg, file); err != nil { - return err - } - } - - return nil -} - -func (a *typeIndex) processFile(pkg *packages.Package, file *ast.File) error { - n, err := a.detectNodes(file) - if err != nil { - return err - } - - if n&metaNode != 0 { - a.Meta = append(a.Meta, metaSection{Comments: file.Doc}) - } - - if n&operationNode != 0 { - a.Operations = a.collectPathAnnotations(rxOperation, file.Comments, a.Operations) - } - - if n&routeNode != 0 { - a.Routes = a.collectPathAnnotations(rxRoute, file.Comments, a.Routes) - } - - a.processFileDecls(pkg, file, n) - - return nil -} - -func (a *typeIndex) collectPathAnnotations(rx *regexp.Regexp, comments []*ast.CommentGroup, dst []parsedPathContent) []parsedPathContent { - for _, cmts := range comments { - pp := parsePathAnnotation(rx, cmts.List) - if pp.Method == "" { - continue - } - if !shouldAcceptTag(pp.Tags, a.includeTags, a.excludeTags) { - debugLogf(a.debug, "operation %s %s is ignored due to tag rules", pp.Method, pp.Path) - continue - } - dst = append(dst, pp) - } - return dst -} - -func (a *typeIndex) processFileDecls(pkg *packages.Package, file *ast.File, n node) { - for _, dt := range file.Decls { - switch fd := dt.(type) { - case *ast.BadDecl: - continue - case *ast.FuncDecl: - if fd.Body == nil { - continue - } - for _, stmt := range fd.Body.List { - if dstm, ok := stmt.(*ast.DeclStmt); ok { - if gd, isGD := dstm.Decl.(*ast.GenDecl); isGD { - a.processDecl(pkg, file, n, gd) - } - } - } - case *ast.GenDecl: - a.processDecl(pkg, file, n, fd) - } - } -} - -func (a *typeIndex) processDecl(pkg *packages.Package, file *ast.File, n node, gd *ast.GenDecl) { - for _, sp := range gd.Specs { - switch ts := sp.(type) { - case *ast.ValueSpec: - debugLogf(a.debug, "saw value spec: %v", ts.Names) - return - case *ast.ImportSpec: - debugLogf(a.debug, "saw import spec: %v", ts.Name) - return - case *ast.TypeSpec: - def, ok := pkg.TypesInfo.Defs[ts.Name] - if !ok { - debugLogf(a.debug, "couldn't find type info for %s", ts.Name) - continue - } - nt, isNamed := def.Type().(*types.Named) - at, isAliased := def.Type().(*types.Alias) - if !isNamed && !isAliased { - debugLogf(a.debug, "%s is not a named or aliased type but a %T", ts.Name, def.Type()) - - continue - } - - comments := ts.Doc // type ( /* doc */ Foo struct{} ) - if comments == nil { - comments = gd.Doc // /* doc */ type ( Foo struct{} ) - } - - decl := &entityDecl{ - Comments: comments, - Type: nt, - Alias: at, - Ident: ts.Name, - Spec: ts, - File: file, - Pkg: pkg, - } - key := ts.Name - switch { - case n&modelNode != 0 && decl.HasModelAnnotation(): - a.Models[key] = decl - case n¶metersNode != 0 && decl.HasParameterAnnotation(): - a.Parameters = append(a.Parameters, decl) - case n&responseNode != 0 && decl.HasResponseAnnotation(): - a.Responses = append(a.Responses, decl) - default: - debugLogf(a.debug, - "type %q skipped because it is not tagged as a model, a parameter or a response. %s", - decl.Obj().Name(), - "It may reenter the scope because it is a discovered dependency", - ) - } - } - } -} - -func (a *typeIndex) walkImports(pkg *packages.Package) error { - if a.excludeDeps { - return nil - } - for _, v := range pkg.Imports { - if _, known := a.AllPackages[v.PkgPath]; known { - continue - } - - a.AllPackages[v.PkgPath] = v - if err := a.processPackage(v); err != nil { - return err - } - if err := a.walkImports(v); err != nil { - return err - } - } - return nil -} - -func checkStructConflict(seenStruct *string, annotation string, text string) error { - if *seenStruct != "" && *seenStruct != annotation { - return fmt.Errorf("classifier: already annotated as %s, can't also be %q - %s: %w", *seenStruct, annotation, text, ErrCodeScan) - } - *seenStruct = annotation - return nil -} - -// detectNodes scans all comment groups in a file and returns a bitmask of -// detected swagger annotation types. Node types like route, operation, and -// meta accumulate freely across comment groups. Struct-level annotations -// (model, parameters, response) are mutually exclusive within a single -// comment group — mixing them is an error. -func (a *typeIndex) detectNodes(file *ast.File) (node, error) { - var n node - for _, comments := range file.Comments { - var seenStruct string // tracks the struct annotation for this comment group - for _, cline := range comments.List { - if cline == nil { - continue - } - } - - for _, cline := range comments.List { - if cline == nil { - continue - } - - matches := rxSwaggerAnnotation.FindStringSubmatch(cline.Text) - if len(matches) < minAnnotationMatch { - continue - } - - switch matches[1] { - case "route": - n |= routeNode - case "operation": - n |= operationNode - case "model": - n |= modelNode - if err := checkStructConflict(&seenStruct, matches[1], cline.Text); err != nil { - return 0, err - } - case "meta": - n |= metaNode - case "parameters": - n |= parametersNode - if err := checkStructConflict(&seenStruct, matches[1], cline.Text); err != nil { - return 0, err - } - case "response": - n |= responseNode - if err := checkStructConflict(&seenStruct, matches[1], cline.Text); err != nil { - return 0, err - } - case "strfmt", paramNameKey, "discriminated", "file", "enum", "default", "alias", "type": - // TODO: perhaps collect these and pass along to avoid lookups later on - case "allOf": - case "ignore": - default: - return 0, fmt.Errorf("classifier: unknown swagger annotation %q: %w", matches[1], ErrCodeScan) - } - } - } - - return n, nil -} - -func debugLogf(debug bool, format string, args ...any) { - if debug { - _ = log.Output(logCallerDepth, fmt.Sprintf(format, args...)) - } -} diff --git a/assertions.go b/assertions.go deleted file mode 100644 index e78f760..0000000 --- a/assertions.go +++ /dev/null @@ -1,37 +0,0 @@ -// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers -// SPDX-License-Identifier: Apache-2.0 - -package codescan - -import ( - "fmt" - "go/types" -) - -type testError string - -func (e testError) Error() string { - return string(e) -} - -const ( - errInternal testError = "internal error due to a bug or a mishandling of go types AST. This usually indicates a bug in the scanner" -) - -// code assertions to be explicit about the various expectations when entering a function - -func mustNotBeABuiltinType(o *types.TypeName) { - if o.Pkg() != nil { - return - } - - panic(fmt.Errorf("type %q expected not to be a builtin: %w", o.Name(), errInternal)) -} - -func mustHaveRightHandSide(a *types.Alias) { - if a.Rhs() != nil { - return - } - - panic(fmt.Errorf("type alias %q expected to declare a right-hand-side: %w", a.Obj().Name(), errInternal)) -} diff --git a/enum.go b/enum.go deleted file mode 100644 index 9f958d6..0000000 --- a/enum.go +++ /dev/null @@ -1,35 +0,0 @@ -// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers -// SPDX-License-Identifier: Apache-2.0 - -package codescan - -import ( - "go/ast" - "strconv" - "strings" - - "github.com/go-openapi/spec" -) - -func getEnumBasicLitValue(basicLit *ast.BasicLit) any { - switch basicLit.Kind.String() { - case "INT": - if result, err := strconv.ParseInt(basicLit.Value, 10, 64); err == nil { - return result - } - case "FLOAT": - if result, err := strconv.ParseFloat(basicLit.Value, 64); err == nil { - return result - } - default: - return strings.Trim(basicLit.Value, "\"") - } - return nil -} - -const extEnumDesc = "x-go-enum-desc" - -func getEnumDesc(extensions spec.Extensions) (desc string) { - desc, _ = extensions.GetString(extEnumDesc) - return desc -} diff --git a/enum_test.go b/enum_test.go deleted file mode 100644 index b1be0d3..0000000 --- a/enum_test.go +++ /dev/null @@ -1,39 +0,0 @@ -// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers -// SPDX-License-Identifier: Apache-2.0 - -package codescan - -import ( - "go/ast" - "go/token" - "testing" - - "github.com/go-openapi/testify/v2/assert" -) - -func Test_getEnumBasicLitValue(t *testing.T) { - verifyGetEnumBasicLitValue(t, ast.BasicLit{Kind: token.INT, Value: "0"}, int64(0)) - verifyGetEnumBasicLitValue(t, ast.BasicLit{Kind: token.INT, Value: "-1"}, int64(-1)) - verifyGetEnumBasicLitValue(t, ast.BasicLit{Kind: token.INT, Value: "42"}, int64(42)) - verifyGetEnumBasicLitValue(t, ast.BasicLit{Kind: token.INT, Value: ""}, nil) - verifyGetEnumBasicLitValue(t, ast.BasicLit{Kind: token.INT, Value: "word"}, nil) - - verifyGetEnumBasicLitValue(t, ast.BasicLit{Kind: token.FLOAT, Value: "0"}, float64(0)) - verifyGetEnumBasicLitValue(t, ast.BasicLit{Kind: token.FLOAT, Value: "-1"}, float64(-1)) - verifyGetEnumBasicLitValue(t, ast.BasicLit{Kind: token.FLOAT, Value: "42"}, float64(42)) - verifyGetEnumBasicLitValue(t, ast.BasicLit{Kind: token.FLOAT, Value: "1.1234"}, float64(1.1234)) - verifyGetEnumBasicLitValue(t, ast.BasicLit{Kind: token.FLOAT, Value: "1.9876"}, float64(1.9876)) - verifyGetEnumBasicLitValue(t, ast.BasicLit{Kind: token.FLOAT, Value: ""}, nil) - verifyGetEnumBasicLitValue(t, ast.BasicLit{Kind: token.FLOAT, Value: "word"}, nil) - - verifyGetEnumBasicLitValue(t, ast.BasicLit{Kind: token.STRING, Value: "Foo"}, "Foo") - verifyGetEnumBasicLitValue(t, ast.BasicLit{Kind: token.STRING, Value: ""}, "") - verifyGetEnumBasicLitValue(t, ast.BasicLit{Kind: token.STRING, Value: "0"}, "0") - verifyGetEnumBasicLitValue(t, ast.BasicLit{Kind: token.STRING, Value: "1.1"}, "1.1") -} - -func verifyGetEnumBasicLitValue(t *testing.T, basicLit ast.BasicLit, expected any) { - actual := getEnumBasicLitValue(&basicLit) - - assert.Equal(t, expected, actual) -} diff --git a/errors.go b/errors.go index 16a68ff..522a133 100644 --- a/errors.go +++ b/errors.go @@ -3,7 +3,13 @@ package codescan -import "errors" +type codescanError string -// ErrCodeScan is the sentinel error for all errors originating from the codescan package. -var ErrCodeScan = errors.New("codescan") +func (e codescanError) Error() string { + return string(e) +} + +const ( + // ErrCodeScan is the sentinel error for all errors originating from the codescan package. + ErrCodeScan codescanError = "codescan error" +) diff --git a/fixtures/enhancements/alias-expand/api.go b/fixtures/enhancements/alias-expand/api.go new file mode 100644 index 0000000..22fbab5 --- /dev/null +++ b/fixtures/enhancements/alias-expand/api.go @@ -0,0 +1,108 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +// Package alias_expand exercises non-transparent alias handling for both +// parameter and response scanners. It is designed to be scanned with the +// default Options (TransparentAliases=false) and with RefAliases=true to +// cover the two non-transparent code paths. +package alias_expand + +// Payload is the canonical struct referenced by aliases. +// +// swagger:model Payload +type Payload struct { + // required: true + ID int64 `json:"id"` + + Name string `json:"name"` +} + +// PayloadAlias aliases Payload once. +type PayloadAlias = Payload + +// PayloadAlias2 aliases PayloadAlias (alias-of-alias chain). +type PayloadAlias2 = PayloadAlias + +// QueryID is a named string used as the base of a non-body parameter alias. +type QueryID string + +// QueryIDAlias aliases QueryID for a non-body parameter field. +type QueryIDAlias = QueryID + +// AliasedParams exposes one body parameter that is an alias, one body +// parameter that is an alias of an alias, and one non-body parameter that +// is an alias of a primitive-backed named type. +// +// swagger:parameters aliasedRequest +type AliasedParams struct { + // BodyPrimary is a body parameter of aliased struct type. + // + // in: body + // required: true + BodyPrimary PayloadAlias `json:"bodyPrimary"` + + // BodyNested is a body parameter whose type is an alias of an alias. + // + // in: body + BodyNested PayloadAlias2 `json:"bodyNested"` + + // Lookup is a query parameter aliased off a named primitive type. + // + // in: query + Lookup QueryIDAlias `json:"lookup"` +} + +// ResponseEnvelope is the canonical struct referenced by aliases used in +// responses. +// +// swagger:model ResponseEnvelope +type ResponseEnvelope struct { + Payload PayloadAlias `json:"payload"` +} + +// EnvelopeAlias aliases ResponseEnvelope once. +type EnvelopeAlias = ResponseEnvelope + +// EnvelopeAlias2 aliases EnvelopeAlias (alias-of-alias). +type EnvelopeAlias2 = EnvelopeAlias + +// AliasedResponse has a body field whose type is an alias chain. +// +// swagger:response aliasedResponse +type AliasedResponse struct { + // Body is an alias of an alias. + // + // in: body + Body EnvelopeAlias2 `json:"body"` +} + +// exportedParams is the backing struct for an aliased swagger:parameters. +type exportedParams struct { + // in: query + Search string `json:"search"` + + // in: body + // required: true + Data Payload `json:"data"` +} + +// AliasedTopParams annotates an alias as the parameters set: the scanner +// must resolve the alias via parameterBuilder.buildAlias. +// +// swagger:parameters aliasedTop +type AliasedTopParams = exportedParams + +// AliasedTopParams2 chains AliasedTopParams through a second alias layer. +// +// swagger:parameters aliasedTop2 +type AliasedTopParams2 = AliasedTopParams + +// NamedTopResponse is a plain struct annotated as a response — used to +// keep a deterministic response in the expand-mode fixture even though +// response-level aliasing is deferred to the alias-response fixture. +// +// swagger:response namedTopResponse +type NamedTopResponse struct { + // in: body + Body ResponseEnvelope `json:"body"` +} diff --git a/fixtures/enhancements/alias-response/api.go b/fixtures/enhancements/alias-response/api.go new file mode 100644 index 0000000..4b137c6 --- /dev/null +++ b/fixtures/enhancements/alias-response/api.go @@ -0,0 +1,34 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +// Package alias_response exercises response-level top-alias handling. +// It is scanned with RefAliases=true because the non-transparent expand +// path on top-level response aliases is not supported by the scanner. +package alias_response + +// Envelope is the canonical response body type. +// +// swagger:model Envelope +type Envelope struct { + // required: true + ID int64 `json:"id"` + + Name string `json:"name"` +} + +// exportedResponse is the backing struct for the aliased response. +type exportedResponse struct { + // in: body + Body Envelope `json:"body"` +} + +// AliasedTopResponse annotates an alias as the response: the scanner +// resolves it via responseBuilder.buildAlias under RefAliases=true. +// +// swagger:response aliasedTopResponse +type AliasedTopResponse = exportedResponse + +// AliasedTopResponse2 chains AliasedTopResponse through a second alias. +// +// swagger:response aliasedTopResponse2 +type AliasedTopResponse2 = AliasedTopResponse diff --git a/fixtures/enhancements/all-http-methods/handlers.go b/fixtures/enhancements/all-http-methods/handlers.go new file mode 100644 index 0000000..86d17e6 --- /dev/null +++ b/fixtures/enhancements/all-http-methods/handlers.go @@ -0,0 +1,70 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +// Package all_http_methods declares one handler per HTTP verb so that +// setPathOperation walks the PATCH, HEAD and OPTIONS branches alongside +// the already-tested GET/POST/PUT/DELETE. +package all_http_methods + +// GetItem swagger:route GET /items items getItem +// +// Get an item by id. +// +// Responses: +// +// 200: description: OK +func GetItem() {} + +// PostItem swagger:route POST /items items postItem +// +// Create an item. +// +// Responses: +// +// 201: description: Created +func PostItem() {} + +// PutItem swagger:route PUT /items items putItem +// +// Replace an item. +// +// Responses: +// +// 200: description: OK +func PutItem() {} + +// PatchItem swagger:route PATCH /items items patchItem +// +// Apply a partial update. +// +// Responses: +// +// 200: description: OK +func PatchItem() {} + +// DeleteItem swagger:route DELETE /items items deleteItem +// +// Delete an item. +// +// Responses: +// +// 204: description: No Content +func DeleteItem() {} + +// HeadItem swagger:route HEAD /items items headItem +// +// Probe an item. +// +// Responses: +// +// 200: description: OK +func HeadItem() {} + +// OptionsItem swagger:route OPTIONS /items items optionsItem +// +// Describe the supported HTTP methods. +// +// Responses: +// +// 200: description: OK +func OptionsItem() {} diff --git a/fixtures/enhancements/allof-edges/types.go b/fixtures/enhancements/allof-edges/types.go new file mode 100644 index 0000000..29b88d1 --- /dev/null +++ b/fixtures/enhancements/allof-edges/types.go @@ -0,0 +1,74 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +// Package allof_edges exercises allOf composition edge cases: pointer +// members, time.Time members, strfmt-tagged members, and interface members. +package allof_edges + +import "time" + +// SimpleBase is a plain struct used as an allOf member. +// +// swagger:model +type SimpleBase struct { + // required: true + ID string `json:"id"` + + Name string `json:"name"` +} + +// ULID is a named struct formatted as a swagger strfmt. Using a struct +// underlying type exercises the strfmt branch of buildNamedAllOf for +// struct members. +// +// swagger:strfmt ulid +type ULID struct{} + +// Tagger is a non-empty named interface used as an allOf member. +// +// swagger:model +type Tagger interface { + // Tag returns an identifier. + Tag() string +} + +// AllOfPointer composes a pointer allOf member. +// +// swagger:model +type AllOfPointer struct { + // swagger:allOf + *SimpleBase + + Extra string `json:"extra"` +} + +// AllOfStdTime composes an allOf member that is time.Time. +// +// swagger:model +type AllOfStdTime struct { + // swagger:allOf + time.Time + + Label string `json:"label"` +} + +// AllOfStrfmt composes an allOf member that carries a swagger:strfmt tag. +// +// swagger:model +type AllOfStrfmt struct { + // swagger:allOf + ULID + + Note string `json:"note"` +} + +// AllOfInterface composes an allOf member whose underlying type is an +// interface. +// +// swagger:model +type AllOfInterface struct { + // swagger:allOf + Tagger + + Count int `json:"count"` +} diff --git a/fixtures/enhancements/defaults-examples/types.go b/fixtures/enhancements/defaults-examples/types.go new file mode 100644 index 0000000..86ae15e --- /dev/null +++ b/fixtures/enhancements/defaults-examples/types.go @@ -0,0 +1,42 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +// Package defaults_examples exercises default and example tag parsing +// across numeric, array and object schema types. +package defaults_examples + +// Metrics aggregates fields that carry default and example tags for the +// numeric, array and object branches of parseValueFromSchema. +// +// swagger:model +type Metrics struct { + // Ratio is a float32 value. + // + // default: 1.5 + // example: 2.25 + Ratio float32 `json:"ratio"` + + // Weight is a float64 value. + // + // default: 3.14 + // example: 9.81 + Weight float64 `json:"weight"` + + // Tags is a slice with a JSON-array default and example. + // + // default: ["a","b"] + // example: ["x","y","z"] + Tags []string `json:"tags"` + + // Counts is a slice of integers with a JSON-array default. + // + // default: [1,2,3] + // example: [4,5] + Counts []int `json:"counts"` + + // Props is a map represented as a JSON object. + // + // default: {"k":1} + // example: {"q":42,"r":7} + Props map[string]int `json:"props"` +} diff --git a/fixtures/enhancements/embedded-types/types.go b/fixtures/enhancements/embedded-types/types.go new file mode 100644 index 0000000..e965505 --- /dev/null +++ b/fixtures/enhancements/embedded-types/types.go @@ -0,0 +1,70 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +// Package embedded_types exercises schema-builder edge cases when a struct +// embeds an alias, a named empty interface, or the predeclared error +// interface. +package embedded_types + +// Base is a plain struct used as the right-hand side of an alias. +// +// swagger:model +type Base struct { + // required: true + ID int64 `json:"id"` + + Name string `json:"name"` +} + +// BaseAlias aliases Base so that embedding it exercises the alias branch +// of buildEmbedded. +type BaseAlias = Base + +// EmbedsAlias embeds an aliased named struct. +// +// swagger:model +type EmbedsAlias struct { + BaseAlias + + Extra string `json:"extra"` +} + +// Marker is a named empty interface used as an embedded field. +// +// It carries no methods, exercising the utpe.Empty() branch of +// buildNamedEmbedded for interfaces. +type Marker any + +// EmbedsEmptyNamedInterface embeds a named empty interface. +// +// swagger:model +type EmbedsEmptyNamedInterface struct { + Marker + + Value string `json:"value"` +} + +// EmbedsError embeds the predeclared error interface, exercising the +// isStdError branch of buildNamedEmbedded. +// +// swagger:model +type EmbedsError struct { + error + + Code int `json:"code"` +} + +// Handler is a non-empty named interface with a single exported method. +type Handler interface { + // Handle is a unary method exposed as a schema property. + Handle() string +} + +// EmbedsNamedInterface embeds a non-empty, non-error named interface. +// +// swagger:model +type EmbedsNamedInterface struct { + Handler + + Tag string `json:"tag"` +} diff --git a/fixtures/enhancements/enum-docs/types.go b/fixtures/enhancements/enum-docs/types.go new file mode 100644 index 0000000..4de6878 --- /dev/null +++ b/fixtures/enhancements/enum-docs/types.go @@ -0,0 +1,52 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +// Package enum_docs exercises scanCtx.findEnumValue description building +// on const blocks that carry per-value doc comments and multi-name value +// specs so both the docs loop and the names separator are walked. +package enum_docs + +// Priority is an annotated enum string. +// +// swagger:enum Priority +type Priority string + +const ( + // PriorityLow is a low-priority level. + PriorityLow Priority = "low" + + // PriorityMed is a medium-priority level. + PriorityMed Priority = "medium" + + // PriorityHigh is a high-priority level. + PriorityHigh Priority = "high" +) + +// Channel is an enum with a multi-name value spec so that findEnumValue +// walks the separator branch in its names loop. +// +// swagger:enum Channel +type Channel string + +const ( + // ChannelEmail and ChannelSMS share a single spec. + ChannelEmail, ChannelSMS Channel = "email", "sms" + + // ChannelPush is the push notification channel. + ChannelPush Channel = "push" +) + +// Notification holds both enums so that the scanner emits schemas with +// the enriched descriptions produced by findEnumValue. +// +// swagger:model Notification +type Notification struct { + // required: true + ID int64 `json:"id"` + + // The priority level. + Priority Priority `json:"priority"` + + // The delivery channel. + Channel Channel `json:"channel"` +} diff --git a/fixtures/enhancements/interface-methods/types.go b/fixtures/enhancements/interface-methods/types.go new file mode 100644 index 0000000..daed6df --- /dev/null +++ b/fixtures/enhancements/interface-methods/types.go @@ -0,0 +1,121 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +// Package interface_methods exercises the processInterfaceMethod and +// processAnonInterfaceMethod branches of the schema builder: non-exported +// methods, methods with parameters, methods with multiple returns, +// swagger:ignore methods, swagger:name overrides, swagger:strfmt returns, +// and pointer returns that may be x-nullable. +package interface_methods + +import "time" + +// Audited is a small named interface that is embedded with swagger:allOf +// into richer interfaces below. +// +// swagger:model +type Audited interface { + // CreatedAt is the creation timestamp. + CreatedAt() time.Time + + // UpdatedAt is the update timestamp. + UpdatedAt() time.Time +} + +// UserProfile is a read-only view over a user, exposed as a schema via its +// method set. Each method exercises a distinct branch of +// processInterfaceMethod. +// +// swagger:model +type UserProfile interface { + // swagger:allOf + Audited + + // ID is the user identifier. + // + // required: true + // min: 1 + ID() int64 + + // Name is re-exposed as "fullName" in JSON. + // + // swagger:name fullName + Name() string + + // Email is formatted as an email strfmt. + // + // swagger:strfmt email + Email() string + + // Bio is a nullable pointer string. + Bio() *string + + // Tags returns the user's labels. + Tags() []string + + // Profile returns nested structured data. + Profile() map[string]string + + // swagger:ignore + // + // Secret is deliberately omitted from the schema. + Secret() string + + // WithParams takes an argument and is therefore not a valid property + // accessor; the scanner must skip it. + WithParams(x int) string + + // WithMultiReturn returns multiple values and is also skipped. + WithMultiReturn() (string, error) + + // WithNoReturn returns nothing and is skipped. + WithNoReturn() + + // notExported is an unexported method that must be skipped. + notExported() int +} + +// Public exposes just a single scalar so we get a minimal, deterministic +// companion to assert the default code path. +// +// swagger:model +type Public interface { + // Kind names the public flavor. + Kind() string +} + +// WithAnonEmbed embeds an anonymous inline interface so that the scanner +// walks processAnonInterfaceMethod for its methods. This exercises the +// buildAnonymousInterface call site inside processEmbeddedType. +// +// swagger:model +type WithAnonEmbed interface { + // swagger:allOf + interface { + // AuditTrail is exposed via the anonymous embedded interface. + // + // swagger:name audit + AuditTrail() string + + // ExternalID is tagged as uuid so the anon-method strfmt branch + // is exercised. + // + // swagger:strfmt uuid + ExternalID() string + + // Revision is a nullable pointer return for x-nullable coverage. + Revision() *int + + // swagger:ignore + IgnoredByAnon() string + + internalOnly() bool + + WithArgs(y int) string + + WithMulti() (string, error) + } + + // Kind names the root flavor. + Kind() string +} diff --git a/fixtures/enhancements/malformed/bad-contact/doc.go b/fixtures/enhancements/malformed/bad-contact/doc.go new file mode 100644 index 0000000..c6b4055 --- /dev/null +++ b/fixtures/enhancements/malformed/bad-contact/doc.go @@ -0,0 +1,15 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +// Package bad_contact Probe API. +// +// Probe description. +// +// Schemes: http +// Host: localhost +// BasePath: / +// Version: 0.0.1 +// Contact: Not an email address <@@@> +// +// swagger:meta +package bad_contact diff --git a/fixtures/enhancements/malformed/bad-response-tag/handlers.go b/fixtures/enhancements/malformed/bad-response-tag/handlers.go new file mode 100644 index 0000000..1f239e4 --- /dev/null +++ b/fixtures/enhancements/malformed/bad-response-tag/handlers.go @@ -0,0 +1,14 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +// Package bad_response_tag declares a swagger:route whose Responses +// block carries an unrecognised tag prefix, tripping parseTags's +// "invalid tag" branch. +package bad_response_tag + +// DoBad swagger:route GET /bad-response bad doBadResp +// +// Responses: +// +// 200: notAValidTag: value +func DoBad() {} diff --git a/fixtures/enhancements/malformed/bad-sec-defs/doc.go b/fixtures/enhancements/malformed/bad-sec-defs/doc.go new file mode 100644 index 0000000..2e0f717 --- /dev/null +++ b/fixtures/enhancements/malformed/bad-sec-defs/doc.go @@ -0,0 +1,17 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +// Package bad_sec_defs Probe API. +// +// Probe description. +// +// Schemes: http +// Host: localhost +// BasePath: / +// Version: 0.0.1 +// +// SecurityDefinitions: +// this is not: valid: yaml: at all: [ +// +// swagger:meta +package bad_sec_defs diff --git a/fixtures/enhancements/malformed/default-int/api.go b/fixtures/enhancements/malformed/default-int/api.go new file mode 100644 index 0000000..6814e26 --- /dev/null +++ b/fixtures/enhancements/malformed/default-int/api.go @@ -0,0 +1,24 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +// Package default_int carries a parameter with a `default:` value that +// does not parse as the declared type. parseValueFromSchema is wired to +// run `strconv.Atoi` on int types, which will return an error that +// propagates up through setDefault.Parse. +package default_int + +// BadParams has an int query parameter with a non-numeric default. +// +// swagger:parameters badDefaultInt +type BadParams struct { + // in: query + // default: notanumber + Value int `json:"value"` +} + +// DoBad swagger:route GET /bad-default bad badDefaultInt +// +// Responses: +// +// 200: description: OK +func DoBad() {} diff --git a/fixtures/enhancements/malformed/duplicate-body-tag/handlers.go b/fixtures/enhancements/malformed/duplicate-body-tag/handlers.go new file mode 100644 index 0000000..0aacf08 --- /dev/null +++ b/fixtures/enhancements/malformed/duplicate-body-tag/handlers.go @@ -0,0 +1,14 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +// Package duplicate_body_tag declares a swagger:route whose Responses +// block has `body:` appearing twice so parseTags trips its "valid tag +// but not in a valid position" error branch. +package duplicate_body_tag + +// DoBad swagger:route GET /duplicate-body bad doDupBody +// +// Responses: +// +// 200: body:Foo body:Bar +func DoBad() {} diff --git a/fixtures/enhancements/malformed/example-int/api.go b/fixtures/enhancements/malformed/example-int/api.go new file mode 100644 index 0000000..ade2b00 --- /dev/null +++ b/fixtures/enhancements/malformed/example-int/api.go @@ -0,0 +1,23 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +// Package example_int carries a parameter with an `example:` value that +// does not parse as the declared int type, exercising setExample.Parse's +// error branch. +package example_int + +// BadParams has an int query parameter with a non-numeric example. +// +// swagger:parameters badExampleInt +type BadParams struct { + // in: query + // example: notanumber + Value int `json:"value"` +} + +// DoBad swagger:route GET /bad-example bad badExampleInt +// +// Responses: +// +// 200: description: OK +func DoBad() {} diff --git a/fixtures/enhancements/malformed/info-bad-ext-key/doc.go b/fixtures/enhancements/malformed/info-bad-ext-key/doc.go new file mode 100644 index 0000000..746a3eb --- /dev/null +++ b/fixtures/enhancements/malformed/info-bad-ext-key/doc.go @@ -0,0 +1,17 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +// Package info_bad_ext_key Probe API. +// +// Probe description. +// +// Schemes: http +// Host: localhost +// BasePath: / +// Version: 0.0.1 +// +// InfoExtensions: +// invalid-key: oops +// +// swagger:meta +package info_bad_ext_key diff --git a/fixtures/enhancements/malformed/meta-bad-ext-key/doc.go b/fixtures/enhancements/malformed/meta-bad-ext-key/doc.go new file mode 100644 index 0000000..f3b550b --- /dev/null +++ b/fixtures/enhancements/malformed/meta-bad-ext-key/doc.go @@ -0,0 +1,17 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +// Package meta_bad_ext_key Probe API. +// +// Probe description. +// +// Schemes: http +// Host: localhost +// BasePath: / +// Version: 0.0.1 +// +// Extensions: +// invalid-key: oops +// +// swagger:meta +package meta_bad_ext_key diff --git a/fixtures/enhancements/named-basic/types.go b/fixtures/enhancements/named-basic/types.go new file mode 100644 index 0000000..74e6a5a --- /dev/null +++ b/fixtures/enhancements/named-basic/types.go @@ -0,0 +1,40 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +// Package named_basic exercises the schemaBuilder.buildNamedBasic branches +// for named basic types carrying swagger:strfmt, swagger:type and +// swagger:default annotations. +package named_basic + +// Email is a named string with a swagger:strfmt tag. The scanner must +// emit {"type": "string", "format": "email"} via the strfmt branch. +// +// swagger:strfmt email +type Email string + +// Colour is a named int whose representation is overridden via +// swagger:type so the scanner emits it as a string instead of an integer. +// +// swagger:type string +type Colour int + +// Grade is a named int tagged with swagger:default which causes the +// scanner to emit an empty schema for the declared type. +// +// swagger:default Grade +type Grade int + +// User embeds the three named basic types above so that the full scan +// walks buildNamedBasic for each field. +// +// swagger:model User +type User struct { + // required: true + ID int64 `json:"id"` + + Email Email `json:"email"` + + Colour Colour `json:"colour"` + + Grade Grade `json:"grade"` +} diff --git a/fixtures/enhancements/named-struct-tags/types.go b/fixtures/enhancements/named-struct-tags/types.go new file mode 100644 index 0000000..852dae6 --- /dev/null +++ b/fixtures/enhancements/named-struct-tags/types.go @@ -0,0 +1,39 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +// Package named_struct_tags exercises schemaBuilder.buildNamedStruct +// branches where a named struct referenced as a field carries a +// swagger:strfmt or swagger:type annotation that overrides its schema. +package named_struct_tags + +// PhoneNumber is a named struct annotated as a strfmt. When used as a +// field type the scanner emits {type: "string", format: "phone"} via the +// strfmt branch of buildNamedStruct. +// +// swagger:strfmt phone +type PhoneNumber struct { + CountryCode string + Number string +} + +// LegacyCode is a named struct annotated with swagger:type so that its +// referenced schema is coerced to the declared swagger type rather than +// emitted as an object. +// +// swagger:type string +type LegacyCode struct { + Version int +} + +// Contact references both tagged struct types so the scanner walks the +// buildNamedStruct strfmt and typeName branches on distinct fields. +// +// swagger:model Contact +type Contact struct { + // required: true + ID int64 `json:"id"` + + Phone PhoneNumber `json:"phone"` + + Code LegacyCode `json:"code"` +} diff --git a/fixtures/enhancements/ref-alias-chain/types.go b/fixtures/enhancements/ref-alias-chain/types.go new file mode 100644 index 0000000..bd6f428 --- /dev/null +++ b/fixtures/enhancements/ref-alias-chain/types.go @@ -0,0 +1,54 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +// Package ref_alias_chain exercises buildDeclAlias under RefAliases=true +// with an alias-of-alias chain and aliases to well-known standard library +// types (time.Time, any). +package ref_alias_chain + +import "time" + +// BaseBody is the concrete named struct at the bottom of the alias chain. +// +// swagger:model BaseBody +type BaseBody struct { + // required: true + ID int64 `json:"id"` + + Name string `json:"name"` +} + +// LinkA is a direct alias of BaseBody. +// +// swagger:model LinkA +type LinkA = BaseBody + +// LinkB is an alias of an alias — chain depth two. +// +// swagger:model LinkB +type LinkB = LinkA + +// Timestamp aliases time.Time so that buildDeclAlias takes its isStdTime +// branch. +// +// swagger:model Timestamp +type Timestamp = time.Time + +// Wildcard aliases any so that buildDeclAlias takes its isAny branch. +// +// swagger:model Wildcard +type Wildcard = any + +// Envelope references the aliases via its fields so the scanner also +// walks the schemaBuilder.buildAlias path for each chain member. +// +// swagger:model Envelope +type Envelope struct { + First LinkA `json:"first"` + + Second LinkB `json:"second"` + + CreatedAt Timestamp `json:"createdAt"` + + Meta Wildcard `json:"meta"` +} diff --git a/fixtures/enhancements/response-edges/api.go b/fixtures/enhancements/response-edges/api.go new file mode 100644 index 0000000..a577fe5 --- /dev/null +++ b/fixtures/enhancements/response-edges/api.go @@ -0,0 +1,63 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +// Package response_edges exercises edge cases in the response builder: +// embedded response structs, time.Time header fields, strfmt-tagged named +// header fields, and top-level non-struct response payloads. +package response_edges + +import "time" + +// RequestID is a named string with a swagger:strfmt tag used as a header +// field so that buildNamedField takes its strfmt branch. +// +// swagger:strfmt uuid +type RequestID string + +// CommonHeaders is embedded into the full response so that +// responseBuilder.buildFromStruct walks the embedded-field branch and +// recursively invokes buildFromType on an embedded named type. +type CommonHeaders struct { + // The request trace identifier. + // + // in: header + TraceID RequestID `json:"X-Trace-ID"` +} + +// Body is the canonical body for the full response. +// +// swagger:model Body +type Body struct { + // required: true + ID int64 `json:"id"` + + Payload string `json:"payload"` +} + +// FullResponse carries headers (embedded + inline) and a body field so +// that buildFromStruct, processResponseField and buildNamedField are all +// exercised in a single scan. +// +// swagger:response fullResponse +type FullResponse struct { + CommonHeaders + + // The request rate-limit window. + // + // in: header + RateLimit int `json:"X-Rate-Limit"` + + // The server-side timestamp for this response. + // + // in: header + Timestamp time.Time `json:"X-Timestamp"` + + // in: body + Body Body `json:"body"` +} + +// IDs is a named slice so that responseBuilder.buildNamedType walks its +// non-struct default branch. +// +// swagger:response idsResponse +type IDs []int64 diff --git a/fixtures/enhancements/strfmt-arrays/types.go b/fixtures/enhancements/strfmt-arrays/types.go new file mode 100644 index 0000000..10e8b8f --- /dev/null +++ b/fixtures/enhancements/strfmt-arrays/types.go @@ -0,0 +1,47 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +// Package strfmt_arrays exercises strfmt handling on named array and slice +// types, including the byte/bsonobjectid fast paths. +package strfmt_arrays + +// Hash is a 32-byte array tagged as the byte swagger strfmt. +// +// swagger:strfmt byte +type Hash [32]byte + +// ObjectID is a 12-byte array tagged as a BSON object id. +// +// swagger:strfmt bsonobjectid +type ObjectID [12]byte + +// Signature is a named array that carries a generic strfmt tag. +// +// swagger:strfmt password +type Signature [64]byte + +// Blob is a named byte slice tagged as the byte swagger strfmt. +// +// swagger:strfmt byte +type Blob []byte + +// Token is a named slice tagged with a generic strfmt. +// +// swagger:strfmt uuid +type Token []string + +// Carrier embeds all of the named array and slice types above. +// +// swagger:model +type Carrier struct { + // required: true + Hash Hash `json:"hash"` + + ObjectID ObjectID `json:"objectId"` + + Signature Signature `json:"signature"` + + Blob Blob `json:"blob"` + + Token Token `json:"token"` +} diff --git a/fixtures/enhancements/text-marshal/types.go b/fixtures/enhancements/text-marshal/types.go new file mode 100644 index 0000000..df18fe5 --- /dev/null +++ b/fixtures/enhancements/text-marshal/types.go @@ -0,0 +1,54 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +// Package text_marshal exercises schemaBuilder.buildFromTextMarshal: +// a type recognised via its TextMarshaler implementation, the +// case-insensitive "uuid" fast-path, and a named text-marshalable type +// that carries a swagger:strfmt tag. +package text_marshal + +// UUID is a text-marshalable type whose name matches the uuid fast-path. +type UUID [16]byte + +// MarshalText renders the UUID as text. +func (u UUID) MarshalText() ([]byte, error) { return nil, nil } + +// UnmarshalText parses the UUID from text. +func (u *UUID) UnmarshalText([]byte) error { return nil } + +// MAC is a text-marshalable named type annotated with a swagger:strfmt so +// the strfmt branch of buildFromTextMarshal fires. +// +// swagger:strfmt mac +type MAC [6]byte + +// MarshalText renders the MAC as text. +func (m MAC) MarshalText() ([]byte, error) { return nil, nil } + +// UnmarshalText parses the MAC from text. +func (m *MAC) UnmarshalText([]byte) error { return nil } + +// Opaque is a text-marshalable named type without any swagger hints so +// that buildFromTextMarshal takes its fallback branch. +type Opaque struct { + Value string +} + +// MarshalText renders the Opaque value as text. +func (o Opaque) MarshalText() ([]byte, error) { return []byte(o.Value), nil } + +// UnmarshalText parses the Opaque value from text. +func (o *Opaque) UnmarshalText(data []byte) error { o.Value = string(data); return nil } + +// Device aggregates all three text-marshalable fields so that a full scan +// walks buildFromTextMarshal for each. +// +// swagger:model Device +type Device struct { + // required: true + ID UUID `json:"id"` + + MAC MAC `json:"mac"` + + Data Opaque `json:"data"` +} diff --git a/fixtures/enhancements/top-level-kinds/types.go b/fixtures/enhancements/top-level-kinds/types.go new file mode 100644 index 0000000..cd9760c --- /dev/null +++ b/fixtures/enhancements/top-level-kinds/types.go @@ -0,0 +1,45 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +// Package top_level_kinds declares top-level types of every Go kind +// (basic, array, slice, map, interface) annotated with swagger:model so +// that schemaBuilder.buildFromDecl walks each non-struct case branch. +package top_level_kinds + +// MyInt is a top-level named basic integer type. +// +// swagger:model MyInt +type MyInt int + +// MyArray is a top-level named array. +// +// swagger:model MyArray +type MyArray [10]string + +// MySlice is a top-level named slice. +// +// swagger:model MySlice +type MySlice []string + +// MyMap is a top-level named map. +// +// swagger:model MyMap +type MyMap map[string]int + +// MyInterface is a top-level named interface. +// +// swagger:model MyInterface +type MyInterface interface { + // Identify returns the name of this object. + Identify() string +} + +// IgnoredModel is annotated as a model but also carries swagger:ignore, +// so the sectionedParser flags it as ignored and buildFromDecl returns +// early via its `sp.ignored` branch. +// +// swagger:model IgnoredModel +// swagger:ignore +type IgnoredModel struct { + Value int `json:"value"` +} diff --git a/fixtures/enhancements/unknown-annotation/types.go b/fixtures/enhancements/unknown-annotation/types.go new file mode 100644 index 0000000..1e598ad --- /dev/null +++ b/fixtures/enhancements/unknown-annotation/types.go @@ -0,0 +1,13 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +// Package unknown_annotation carries a deliberately bogus swagger +// annotation so the classifier returns an error from detectNodes. +package unknown_annotation + +// Bogus uses an unknown swagger annotation. +// +// swagger:doesnotexist BogusTag +type Bogus struct { + ID int64 `json:"id"` +} diff --git a/fixtures/goparsing/petstore/enums/priority.go b/fixtures/goparsing/petstore/enums/priority.go new file mode 100644 index 0000000..4a08818 --- /dev/null +++ b/fixtures/goparsing/petstore/enums/priority.go @@ -0,0 +1,29 @@ +// Copyright 2015 go-swagger maintainers +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package enums + +// Priority indicates the urgency level. +// +// swagger:enum Priority +type Priority int + +const ( + // low priority + // items are handled last + PriorityLow Priority = 1 + + // high priority + PriorityHigh Priority = 2 +) diff --git a/fixtures/goparsing/spec/api.go b/fixtures/goparsing/spec/api.go index f6816d0..674e67c 100644 --- a/fixtures/goparsing/spec/api.go +++ b/fixtures/goparsing/spec/api.go @@ -79,4 +79,5 @@ type BookingResponse struct { // Responses: // 200: BookingResponse func bookings(w http.ResponseWriter, r *http.Request) { + } diff --git a/fixtures/integration/golden/api_spec_go111.json b/fixtures/integration/golden/api_spec_go111.json new file mode 100644 index 0000000..4d11762 --- /dev/null +++ b/fixtures/integration/golden/api_spec_go111.json @@ -0,0 +1,134 @@ +{ + "consumes":[ + "application/json" + ], + "produces":[ + "application/json" + ], + "schemes":[ + "https" + ], + "swagger":"2.0", + "info":{ + "description":"the purpose of this application is to provide an application\nthat is using plain go code to define an API", + "title":"API.", + "version":"0.0.1" + }, + "host":"localhost", + "paths":{ + "/admin/bookings/":{ + "get":{ + "consumes":[ + "application/json" + ], + "produces":[ + "application/json" + ], + "schemes":[ + "http", + "https" + ], + "tags":[ + "booking" + ], + "summary":"Bookings lists all the appointments that have been made on the site.", + "deprecated": true, + "operationId":"Bookings", + "responses":{ + "200":{ + "$ref":"#/responses/BookingResponse" + } + } + } + } + }, + "definitions":{ + "Booking":{ + "description":"A Booking in the system", + "type":"object", + "required":[ + "id", + "Subject" + ], + "properties":{ + "Subject":{ + "description":"Subject the subject of this booking", + "type":"string" + }, + "id":{ + "description":"ID the id of the booking", + "type":"integer", + "format":"int64", + "x-go-name":"ID", + "readOnly":true + } + }, + "x-go-package":"github.com/go-swagger/scan-repo-boundary/makeplans" + }, + "Customer":{ + "type":"object", + "title":"Customer of the site.", + "properties":{ + "name":{ + "type":"string", + "x-go-name":"Name" + } + }, + "x-go-package":"github.com/go-openapi/codescan/fixtures/goparsing/spec" + }, + "DateRange":{ + "description":"DateRange represents a scheduled appointments time\nDateRange should be in definitions since it's being used in a response", + "type":"object", + "properties":{ + "end":{ + "type":"string", + "x-go-name":"End" + }, + "start":{ + "type":"string", + "x-go-name":"Start" + } + }, + "x-go-package":"github.com/go-openapi/codescan/fixtures/goparsing/spec" + } + }, + "responses":{ + "BookingResponse":{ + "description":"BookingResponse represents a scheduled appointment", + "schema":{ + "type":"object", + "properties":{ + "booking":{ + "$ref":"#/definitions/Booking" + }, + "customer":{ + "$ref":"#/definitions/Customer" + }, + "dates":{ + "$ref":"#/definitions/DateRange" + }, + "map": { + "type": "object", + "additionalProperties": { + "type": "string" + }, + "example": {"key": "value"}, + "x-go-name": "Map" + }, + "slice": { + "type": "array", + "items": { + "type": "integer", + "format": "int64" + }, + "x-go-name": "Slice", + "example": [ + 1, + 2 + ] + } + } + } + } + } +} diff --git a/fixtures/integration/golden/api_spec_go111_ref.json b/fixtures/integration/golden/api_spec_go111_ref.json new file mode 100644 index 0000000..353b13c --- /dev/null +++ b/fixtures/integration/golden/api_spec_go111_ref.json @@ -0,0 +1,137 @@ +{ + "consumes":[ + "application/json" + ], + "produces":[ + "application/json" + ], + "schemes":[ + "https" + ], + "swagger":"2.0", + "info":{ + "description":"the purpose of this application is to provide an application\nthat is using plain go code to define an API", + "title":"API.", + "version":"0.0.1" + }, + "host":"localhost", + "paths":{ + "/admin/bookings/":{ + "get":{ + "consumes":[ + "application/json" + ], + "produces":[ + "application/json" + ], + "schemes":[ + "http", + "https" + ], + "tags":[ + "booking" + ], + "summary":"Bookings lists all the appointments that have been made on the site.", + "deprecated": true, + "operationId":"Bookings", + "responses":{ + "200":{ + "$ref":"#/responses/BookingResponse" + } + } + } + } + }, + "definitions":{ + "Booking":{ + "description":"A Booking in the system", + "type":"object", + "required":[ + "id", + "Subject" + ], + "properties":{ + "Subject":{ + "description":"Subject the subject of this booking", + "type":"string" + }, + "id":{ + "description":"ID the id of the booking", + "type":"integer", + "format":"int64", + "x-go-name":"ID", + "readOnly":true + } + }, + "x-go-package":"github.com/go-swagger/scan-repo-boundary/makeplans" + }, + "Customer":{ + "title":"Customer of the site.", + "$ref": "#/definitions/User" + }, + "User":{ + "type":"object", + "properties":{ + "name":{ + "type":"string", + "x-go-name":"Name" + } + }, + "x-go-package":"github.com/go-openapi/codescan/fixtures/goparsing/spec" + }, + "DateRange":{ + "description":"DateRange represents a scheduled appointments time\nDateRange should be in definitions since it's being used in a response", + "type":"object", + "properties":{ + "end":{ + "type":"string", + "x-go-name":"End" + }, + "start":{ + "type":"string", + "x-go-name":"Start" + } + }, + "x-go-package":"github.com/go-openapi/codescan/fixtures/goparsing/spec" + } + }, + "responses":{ + "BookingResponse":{ + "description":"BookingResponse represents a scheduled appointment", + "schema":{ + "type":"object", + "properties":{ + "booking":{ + "$ref":"#/definitions/Booking" + }, + "customer":{ + "$ref":"#/definitions/Customer" + }, + "dates":{ + "$ref":"#/definitions/DateRange" + }, + "map": { + "type": "object", + "additionalProperties": { + "type": "string" + }, + "example": {"key": "value"}, + "x-go-name": "Map" + }, + "slice": { + "type": "array", + "items": { + "type": "integer", + "format": "int64" + }, + "x-go-name": "Slice", + "example": [ + 1, + 2 + ] + } + } + } + } + } +} diff --git a/fixtures/integration/golden/api_spec_go111_transparent.json b/fixtures/integration/golden/api_spec_go111_transparent.json new file mode 100644 index 0000000..d9aa1e9 --- /dev/null +++ b/fixtures/integration/golden/api_spec_go111_transparent.json @@ -0,0 +1,135 @@ +{ + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "schemes": [ + "https" + ], + "swagger": "2.0", + "info": { + "description": "the purpose of this application is to provide an application\nthat is using plain go code to define an API", + "title": "API.", + "version": "0.0.1" + }, + "host": "localhost", + "paths": { + "/admin/bookings/": { + "get": { + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "schemes": [ + "http", + "https" + ], + "tags": [ + "booking" + ], + "summary": "Bookings lists all the appointments that have been made on the site.", + "operationId": "Bookings", + "deprecated": true, + "responses": { + "200": { + "$ref": "#/responses/BookingResponse" + } + } + } + } + }, + "definitions": { + "Booking": { + "description": "A Booking in the system", + "type": "object", + "required": [ + "id", + "Subject" + ], + "properties": { + "Subject": { + "description": "Subject the subject of this booking", + "type": "string" + }, + "id": { + "description": "ID the id of the booking", + "type": "integer", + "format": "int64", + "x-go-name": "ID", + "readOnly": true + } + }, + "x-go-package": "github.com/go-swagger/scan-repo-boundary/makeplans" + }, + "DateRange": { + "description": "DateRange represents a scheduled appointments time\nDateRange should be in definitions since it's being used in a response", + "type": "object", + "properties": { + "end": { + "type": "string", + "x-go-name": "End" + }, + "start": { + "type": "string", + "x-go-name": "Start" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/spec" + }, + "User": { + "type": "object", + "properties": { + "name": { + "type": "string", + "x-go-name": "Name" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/spec" + } + }, + "responses": { + "BookingResponse": { + "description": "BookingResponse represents a scheduled appointment", + "schema": { + "type": "object", + "properties": { + "booking": { + "$ref": "#/definitions/Booking" + }, + "customer": { + "$ref": "#/definitions/User" + }, + "dates": { + "$ref": "#/definitions/DateRange" + }, + "map": { + "type": "object", + "additionalProperties": { + "type": "string" + }, + "x-go-name": "Map", + "example": { + "key": "value" + } + }, + "slice": { + "type": "array", + "items": { + "type": "integer", + "format": "int64" + }, + "x-go-name": "Slice", + "example": [ + 1, + 2 + ] + } + } + } + } + } +} diff --git a/fixtures/integration/golden/bookings_spec.json b/fixtures/integration/golden/bookings_spec.json new file mode 100644 index 0000000..b062b02 --- /dev/null +++ b/fixtures/integration/golden/bookings_spec.json @@ -0,0 +1,113 @@ +{ + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "schemes": [ + "https" + ], + "swagger": "2.0", + "info": { + "description": "the purpose of this application is to provide an application\nthat is using plain go code to define an API", + "title": "API.", + "version": "0.0.1" + }, + "host": "localhost", + "paths": { + "/admin/bookings/": { + "get": { + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "schemes": [ + "http", + "https" + ], + "tags": [ + "booking" + ], + "summary": "Bookings lists all the appointments that have been made on the site.", + "operationId": "Bookings", + "responses": { + "200": { + "$ref": "#/responses/BookingResponse" + } + } + } + } + }, + "definitions": { + "Booking": { + "description": "A Booking in the system", + "type": "object", + "required": [ + "id", + "Subject" + ], + "properties": { + "Subject": { + "description": "Subject the subject of this booking", + "type": "string" + }, + "id": { + "description": "ID the id of the booking", + "type": "integer", + "format": "int64", + "x-go-name": "ID", + "readOnly": true + } + }, + "x-go-package": "github.com/go-swagger/scan-repo-boundary/makeplans" + }, + "Customer": { + "type": "object", + "title": "Customer of the site.", + "properties": { + "name": { + "type": "string", + "x-go-name": "Name" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/bookings" + }, + "DateRange": { + "description": "DateRange represents a scheduled appointments time\nDateRange should be in definitions since it's being used in a response", + "type": "object", + "properties": { + "end": { + "type": "string", + "x-go-name": "End" + }, + "start": { + "type": "string", + "x-go-name": "Start" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/bookings" + } + }, + "responses": { + "BookingResponse": { + "description": "BookingResponse represents a scheduled appointment", + "schema": { + "type": "object", + "properties": { + "booking": { + "$ref": "#/definitions/Booking" + }, + "customer": { + "$ref": "#/definitions/Customer" + }, + "dates": { + "$ref": "#/definitions/DateRange" + } + } + } + } + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/bugs_3125_schema.json b/fixtures/integration/golden/bugs_3125_schema.json new file mode 100644 index 0000000..291ecea --- /dev/null +++ b/fixtures/integration/golden/bugs_3125_schema.json @@ -0,0 +1,22 @@ +{ + "Item": { + "type": "object", + "required": [ + "value1", + "value2" + ], + "properties": { + "value1": { + "description": "Nullable value", + "x-nullable": true, + "$ref": "#/definitions/ValueStruct" + }, + "value2": { + "description": "Non-nullable value", + "$ref": "#/definitions/ValueStruct", + "example": "{\"value\": 42}" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/bugs/3125/minimal" + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/classification_operations.json b/fixtures/integration/golden/classification_operations.json new file mode 100644 index 0000000..9ed8c72 --- /dev/null +++ b/fixtures/integration/golden/classification_operations.json @@ -0,0 +1,206 @@ +{ + "/pets": { + "get": { + "description": "List all pets", + "consumes": [ + "application/json", + "application/xml" + ], + "produces": [ + "application/xml", + "application/json" + ], + "tags": [ + "pets" + ], + "operationId": "getPet", + "parameters": [ + { + "type": "integer", + "format": "int32", + "description": "How many items to return at one time (max 100)", + "name": "limit", + "in": "query" + } + ], + "responses": { + "200": { + "description": "An paged array of pets", + "schema": { + "type": "array", + "items": { + "schema": { + "properties": { + "id": { + "format": "int64", + "type": "integer" + }, + "name": { + "type": "string" + } + }, + "required": [ + "id", + "name" + ], + "type": "object" + } + } + }, + "headers": { + "x-next": { + "type": "string", + "description": "A link to the next page of responses" + } + } + }, + "default": { + "description": "unexpected error", + "schema": { + "type": "object", + "required": [ + "code", + "message" + ], + "properties": { + "code": { + "type": "integer", + "format": "int32" + }, + "message": { + "type": "string" + } + } + } + } + }, + "security": [ + { + "petstore_auth": [ + "write:pets", + "read:pets" + ] + } + ] + } + }, + "/pets/{id}": { + "put": { + "description": "Some long explanation,\nspanning over multipele lines,\nAKA the description.", + "consumes": [ + "application/json", + "application/xml" + ], + "produces": [ + "application/xml", + "application/json" + ], + "tags": [ + "pets" + ], + "summary": "Updates the details for a pet.", + "operationId": "updatePet", + "parameters": [ + { + "description": "Pet object that needs to be added to the store", + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "required": [ + "name" + ], + "properties": { + "age": { + "type": "integer", + "format": "int32", + "minimum": 0 + }, + "name": { + "type": "string" + } + } + } + }, + { + "description": "Pet object that needs to be added to the store", + "name": "id", + "in": "path", + "required": true, + "schema": { + "type": "string", + "pattern": "[A-Z]{3}-[0-9]{3}" + } + } + ], + "responses": { + "400": { + "description": "Invalid ID supplied" + }, + "404": { + "description": "Pet not found" + }, + "405": { + "description": "Validation exception" + } + }, + "security": [ + { + "petstore_auth": [ + "write:pets", + "read:pets" + ] + } + ] + } + }, + "/v1/events": { + "get": { + "description": "Mitigation Events", + "consumes": [ + "application/json", + "application/xml" + ], + "produces": [ + "application/xml", + "application/json" + ], + "tags": [ + "Events" + ], + "summary": "Events", + "operationId": "getEvents", + "parameters": [ + { + "type": "boolean", + "description": "(boolean) Filters", + "name": "running", + "in": "query" + } + ], + "responses": { + "200": { + "description": "200", + "schema": { + "$ref": "#/definitions/ListResponse" + } + }, + "400": { + "description": "400", + "schema": { + "$ref": "#/definitions/ErrorResponse" + } + } + }, + "security": [ + { + "petstore_auth": [ + "write:pets", + "read:pets" + ] + } + ] + } + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/classification_params.json b/fixtures/integration/golden/classification_params.json new file mode 100644 index 0000000..b234624 --- /dev/null +++ b/fixtures/integration/golden/classification_params.json @@ -0,0 +1,677 @@ +{ + "anotherOperation": { + "operationId": "anotherOperation", + "parameters": [ + { + "maximum": 1000, + "exclusiveMaximum": true, + "minimum": 10, + "exclusiveMinimum": true, + "type": "integer", + "format": "int64", + "default": 1, + "x-go-name": "ID", + "description": "ID of this no model instance.\nids in this application start at 11 and are smaller than 1000", + "name": "id", + "in": "path", + "required": true + }, + { + "maximum": 45, + "minimum": 3, + "multipleOf": 3, + "type": "integer", + "format": "int32", + "default": 2, + "example": 27, + "x-go-name": "Score", + "description": "The Score of this model", + "name": "score", + "in": "query", + "required": true + }, + { + "maxLength": 50, + "minLength": 4, + "pattern": "[A-Za-z0-9-.]*", + "type": "string", + "x-go-name": "Name", + "description": "Name of this no model instance", + "name": "x-hdr-name", + "in": "header", + "required": true + }, + { + "type": "string", + "format": "date-time", + "x-go-name": "Created", + "description": "Created holds the time when this entry was created", + "name": "created", + "in": "query" + }, + { + "enum": [ + "foo", + "bar", + "none" + ], + "type": "string", + "default": "bar", + "x-go-name": "CategoryOld", + "description": "The Category of this model (old enum format)", + "name": "category_old", + "in": "query", + "required": true + }, + { + "enum": [ + "foo", + "bar", + "none" + ], + "type": "string", + "default": "bar", + "x-go-name": "Category", + "description": "The Category of this model", + "name": "category", + "in": "query", + "required": true + }, + { + "enum": [ + 1, + 3, + 5 + ], + "type": "integer", + "format": "int64", + "default": 1, + "x-go-name": "TypeOld", + "description": "Type of this model (old enum format)", + "name": "type_old", + "in": "query" + }, + { + "enum": [ + 1, + 3, + 5 + ], + "type": "integer", + "format": "int64", + "default": 1, + "x-go-name": "Type", + "description": "Type of this model", + "name": "type", + "in": "query" + }, + { + "enum": [ + 1, + "rsq", + "qaz" + ], + "type": "integer", + "format": "int64", + "x-go-name": "BadEnum", + "description": "This is mix in enum. And actually on output should be valid form where int will be int and\nstring will also be presented.", + "name": "bad_enum", + "in": "query" + }, + { + "maxItems": 10, + "minItems": 3, + "uniqueItems": true, + "type": "array", + "items": { + "maxLength": 10, + "minLength": 3, + "pattern": "\\w+", + "type": "string", + "default": "bar" + }, + "collectionFormat": "pipe", + "x-go-name": "FooSlice", + "description": "a FooSlice has foos which are strings", + "name": "foo_slice", + "in": "query" + }, + { + "maxItems": 10, + "minItems": 3, + "uniqueItems": true, + "type": "array", + "items": { + "maxItems": 9, + "minItems": 4, + "enum": [ + "bar1", + "bar2", + "bar3" + ], + "type": "array", + "items": { + "maxItems": 8, + "minItems": 5, + "type": "array", + "items": { + "maxLength": 10, + "minLength": 3, + "pattern": "\\w+", + "type": "string" + } + }, + "default": "bar2" + }, + "collectionFormat": "pipe", + "x-go-name": "BarSlice", + "description": "a BarSlice has bars which are strings", + "name": "bar_slice", + "in": "query" + }, + { + "maxItems": 20, + "minItems": 1, + "type": "array", + "items": { + "maximum": 100, + "minimum": 5, + "uniqueItems": true, + "multipleOf": 5, + "type": "integer", + "format": "int32", + "collectionFormat": "csv" + }, + "x-go-name": "NumSlice", + "description": "a NumSlice has numeric items with item-level validation", + "name": "num_slice", + "in": "query" + }, + { + "x-go-name": "Items", + "description": "the items for this order", + "name": "items", + "in": "body", + "schema": { + "type": "array", + "items": { + "type": "object", + "required": [ + "id", + "pet", + "quantity" + ], + "properties": { + "id": { + "description": "ID of this no model instance.\nids in this application start at 11 and are smaller than 1000", + "type": "integer", + "format": "int32", + "default": 3, + "maximum": 1000, + "exclusiveMaximum": true, + "minimum": 10, + "exclusiveMinimum": true, + "x-go-name": "ID" + }, + "notes": { + "description": "Notes to add to this item.\nThis can be used to add special instructions.", + "type": "string", + "x-go-name": "Notes" + }, + "pet": { + "$ref": "#/definitions/pet" + }, + "quantity": { + "description": "The amount of pets to add to this bucket.", + "type": "integer", + "format": "int16", + "maximum": 10, + "minimum": 1, + "x-go-name": "Quantity" + } + } + } + } + } + ] + }, + "createOrder": { + "operationId": "createOrder", + "parameters": [ + { + "x-go-name": "Order", + "description": "The order to submit.", + "name": "order", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/order" + } + } + ] + }, + "getOrders": { + "operationId": "getOrders", + "parameters": [ + { + "type": "array", + "items": { + "$ref": "#/definitions/OrderBodyParams" + }, + "x-go-name": "Orders", + "description": "The orders", + "name": "orders", + "in": "query", + "required": true + }, + { + "x-go-name": "Another", + "description": "And another thing", + "name": "another", + "in": "body", + "schema": { + "type": "array", + "items": { + "type": "object", + "properties": { + "that": { + "type": "string", + "x-go-name": "That" + } + } + } + } + } + ] + }, + "myFuncOperation": { + "operationId": "myFuncOperation", + "parameters": [ + { + "type": "file", + "x-go-name": "MyFormFile", + "description": "MyFormFile desc.", + "name": "myFormFile", + "in": "formData" + } + ] + }, + "myOperation": { + "operationId": "myOperation", + "parameters": [ + { + "type": "file", + "x-go-name": "MyFormFile", + "description": "MyFormFile desc.", + "name": "myFormFile", + "in": "formData" + } + ] + }, + "myOtherOperation": { + "operationId": "myOtherOperation", + "parameters": [ + { + "type": "file", + "x-go-name": "MyFormFile", + "description": "MyFormFile desc.", + "name": "myFormFile", + "in": "formData" + }, + { + "type": "integer", + "format": "int64", + "x-go-name": "ExtraParam", + "description": "ExtraParam desc.", + "name": "extraParam", + "in": "formData", + "required": true + } + ] + }, + "someAliasOperation": { + "operationId": "someAliasOperation", + "parameters": [ + { + "maximum": 10, + "minimum": 1, + "type": "integer", + "format": "int64", + "x-go-name": "IntAlias", + "description": "default \"in\" is \"query\" =\u003e this params should be aliased", + "name": "intAlias", + "in": "query", + "required": true + }, + { + "type": "string", + "x-go-name": "StringAlias", + "name": "stringAlias", + "in": "query" + }, + { + "type": "integer", + "format": "int64", + "x-go-name": "IntAliasPath", + "name": "intAliasPath", + "in": "path", + "required": true + }, + { + "type": "integer", + "format": "int64", + "x-go-name": "IntAliasForm", + "name": "intAliasForm", + "in": "formData" + } + ] + }, + "someOperation": { + "operationId": "someOperation", + "parameters": [ + { + "maximum": 1000, + "exclusiveMaximum": true, + "minimum": 10, + "exclusiveMinimum": true, + "type": "integer", + "format": "int64", + "default": 1, + "x-go-name": "ID", + "description": "ID of this no model instance.\nids in this application start at 11 and are smaller than 1000", + "name": "id", + "in": "path", + "required": true + }, + { + "maximum": 45, + "minimum": 3, + "multipleOf": 3, + "type": "integer", + "format": "int32", + "default": 2, + "example": 27, + "x-go-name": "Score", + "description": "The Score of this model", + "name": "score", + "in": "query", + "required": true + }, + { + "maxLength": 50, + "minLength": 4, + "pattern": "[A-Za-z0-9-.]*", + "type": "string", + "x-go-name": "Name", + "description": "Name of this no model instance", + "name": "x-hdr-name", + "in": "header", + "required": true + }, + { + "type": "string", + "format": "date-time", + "x-go-name": "Created", + "description": "Created holds the time when this entry was created", + "name": "created", + "in": "query" + }, + { + "enum": [ + "foo", + "bar", + "none" + ], + "type": "string", + "default": "bar", + "x-go-name": "CategoryOld", + "description": "The Category of this model (old enum format)", + "name": "category_old", + "in": "query", + "required": true + }, + { + "enum": [ + "foo", + "bar", + "none" + ], + "type": "string", + "default": "bar", + "x-go-name": "Category", + "description": "The Category of this model", + "name": "category", + "in": "query", + "required": true + }, + { + "enum": [ + 1, + 3, + 5 + ], + "type": "integer", + "format": "int64", + "default": 1, + "x-go-name": "TypeOld", + "description": "Type of this model (old enum format)", + "name": "type_old", + "in": "query" + }, + { + "enum": [ + 1, + 3, + 5 + ], + "type": "integer", + "format": "int64", + "default": 1, + "x-go-name": "Type", + "description": "Type of this model", + "name": "type", + "in": "query" + }, + { + "enum": [ + 1, + "rsq", + "qaz" + ], + "type": "integer", + "format": "int64", + "x-go-name": "BadEnum", + "description": "This is mix in enum. And actually on output should be valid form where int will be int and\nstring will also be presented.", + "name": "bad_enum", + "in": "query" + }, + { + "maxItems": 10, + "minItems": 3, + "uniqueItems": true, + "type": "array", + "items": { + "maxLength": 10, + "minLength": 3, + "pattern": "\\w+", + "type": "string", + "default": "bar" + }, + "collectionFormat": "pipe", + "x-go-name": "FooSlice", + "description": "a FooSlice has foos which are strings", + "name": "foo_slice", + "in": "query" + }, + { + "maxItems": 10, + "minItems": 3, + "uniqueItems": true, + "type": "array", + "items": { + "maxItems": 9, + "minItems": 4, + "enum": [ + "bar1", + "bar2", + "bar3" + ], + "type": "array", + "items": { + "maxItems": 8, + "minItems": 5, + "type": "array", + "items": { + "maxLength": 10, + "minLength": 3, + "pattern": "\\w+", + "type": "string" + } + }, + "default": "bar2" + }, + "collectionFormat": "pipe", + "x-go-name": "BarSlice", + "description": "a BarSlice has bars which are strings", + "name": "bar_slice", + "in": "query" + }, + { + "maxItems": 20, + "minItems": 1, + "type": "array", + "items": { + "maximum": 100, + "minimum": 5, + "uniqueItems": true, + "multipleOf": 5, + "type": "integer", + "format": "int32", + "collectionFormat": "csv" + }, + "x-go-name": "NumSlice", + "description": "a NumSlice has numeric items with item-level validation", + "name": "num_slice", + "in": "query" + }, + { + "x-go-name": "Items", + "description": "the items for this order", + "name": "items", + "in": "body", + "schema": { + "type": "array", + "items": { + "type": "object", + "required": [ + "id", + "pet", + "quantity" + ], + "properties": { + "id": { + "description": "ID of this no model instance.\nids in this application start at 11 and are smaller than 1000", + "type": "integer", + "format": "int32", + "default": 3, + "maximum": 1000, + "exclusiveMaximum": true, + "minimum": 10, + "exclusiveMinimum": true, + "x-go-name": "ID" + }, + "notes": { + "description": "Notes to add to this item.\nThis can be used to add special instructions.", + "type": "string", + "x-go-name": "Notes" + }, + "pet": { + "$ref": "#/definitions/pet" + }, + "quantity": { + "description": "The amount of pets to add to this bucket.", + "type": "integer", + "format": "int16", + "maximum": 10, + "minimum": 1, + "x-go-name": "Quantity" + } + } + } + } + } + ] + }, + "updateOrder": { + "operationId": "updateOrder", + "parameters": [ + { + "x-go-name": "Order", + "description": "The order to submit.", + "name": "order", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/order" + } + } + ] + }, + "yetAnotherOperation": { + "operationId": "yetAnotherOperation", + "parameters": [ + { + "type": "integer", + "format": "int32", + "x-go-name": "Age", + "name": "age", + "in": "query" + }, + { + "type": "integer", + "format": "int64", + "x-go-name": "ID", + "description": "ID the id of this not selected model", + "name": "id", + "in": "query" + }, + { + "type": "string", + "x-go-name": "Name", + "description": "Name the name of this not selected model", + "name": "name", + "in": "query" + }, + { + "type": "string", + "x-go-name": "Notes", + "name": "notes", + "in": "query" + }, + { + "type": "string", + "x-go-name": "Extra", + "name": "extra", + "in": "query" + }, + { + "type": "string", + "format": "date-time", + "x-go-name": "CreatedAt", + "name": "createdAt", + "in": "query" + }, + { + "type": "string", + "x-go-name": "Informity", + "name": "informity", + "in": "formData" + }, + { + "type": "string", + "name": "NoTagName", + "in": "query" + } + ] + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/classification_params_file.json b/fixtures/integration/golden/classification_params_file.json new file mode 100644 index 0000000..b234624 --- /dev/null +++ b/fixtures/integration/golden/classification_params_file.json @@ -0,0 +1,677 @@ +{ + "anotherOperation": { + "operationId": "anotherOperation", + "parameters": [ + { + "maximum": 1000, + "exclusiveMaximum": true, + "minimum": 10, + "exclusiveMinimum": true, + "type": "integer", + "format": "int64", + "default": 1, + "x-go-name": "ID", + "description": "ID of this no model instance.\nids in this application start at 11 and are smaller than 1000", + "name": "id", + "in": "path", + "required": true + }, + { + "maximum": 45, + "minimum": 3, + "multipleOf": 3, + "type": "integer", + "format": "int32", + "default": 2, + "example": 27, + "x-go-name": "Score", + "description": "The Score of this model", + "name": "score", + "in": "query", + "required": true + }, + { + "maxLength": 50, + "minLength": 4, + "pattern": "[A-Za-z0-9-.]*", + "type": "string", + "x-go-name": "Name", + "description": "Name of this no model instance", + "name": "x-hdr-name", + "in": "header", + "required": true + }, + { + "type": "string", + "format": "date-time", + "x-go-name": "Created", + "description": "Created holds the time when this entry was created", + "name": "created", + "in": "query" + }, + { + "enum": [ + "foo", + "bar", + "none" + ], + "type": "string", + "default": "bar", + "x-go-name": "CategoryOld", + "description": "The Category of this model (old enum format)", + "name": "category_old", + "in": "query", + "required": true + }, + { + "enum": [ + "foo", + "bar", + "none" + ], + "type": "string", + "default": "bar", + "x-go-name": "Category", + "description": "The Category of this model", + "name": "category", + "in": "query", + "required": true + }, + { + "enum": [ + 1, + 3, + 5 + ], + "type": "integer", + "format": "int64", + "default": 1, + "x-go-name": "TypeOld", + "description": "Type of this model (old enum format)", + "name": "type_old", + "in": "query" + }, + { + "enum": [ + 1, + 3, + 5 + ], + "type": "integer", + "format": "int64", + "default": 1, + "x-go-name": "Type", + "description": "Type of this model", + "name": "type", + "in": "query" + }, + { + "enum": [ + 1, + "rsq", + "qaz" + ], + "type": "integer", + "format": "int64", + "x-go-name": "BadEnum", + "description": "This is mix in enum. And actually on output should be valid form where int will be int and\nstring will also be presented.", + "name": "bad_enum", + "in": "query" + }, + { + "maxItems": 10, + "minItems": 3, + "uniqueItems": true, + "type": "array", + "items": { + "maxLength": 10, + "minLength": 3, + "pattern": "\\w+", + "type": "string", + "default": "bar" + }, + "collectionFormat": "pipe", + "x-go-name": "FooSlice", + "description": "a FooSlice has foos which are strings", + "name": "foo_slice", + "in": "query" + }, + { + "maxItems": 10, + "minItems": 3, + "uniqueItems": true, + "type": "array", + "items": { + "maxItems": 9, + "minItems": 4, + "enum": [ + "bar1", + "bar2", + "bar3" + ], + "type": "array", + "items": { + "maxItems": 8, + "minItems": 5, + "type": "array", + "items": { + "maxLength": 10, + "minLength": 3, + "pattern": "\\w+", + "type": "string" + } + }, + "default": "bar2" + }, + "collectionFormat": "pipe", + "x-go-name": "BarSlice", + "description": "a BarSlice has bars which are strings", + "name": "bar_slice", + "in": "query" + }, + { + "maxItems": 20, + "minItems": 1, + "type": "array", + "items": { + "maximum": 100, + "minimum": 5, + "uniqueItems": true, + "multipleOf": 5, + "type": "integer", + "format": "int32", + "collectionFormat": "csv" + }, + "x-go-name": "NumSlice", + "description": "a NumSlice has numeric items with item-level validation", + "name": "num_slice", + "in": "query" + }, + { + "x-go-name": "Items", + "description": "the items for this order", + "name": "items", + "in": "body", + "schema": { + "type": "array", + "items": { + "type": "object", + "required": [ + "id", + "pet", + "quantity" + ], + "properties": { + "id": { + "description": "ID of this no model instance.\nids in this application start at 11 and are smaller than 1000", + "type": "integer", + "format": "int32", + "default": 3, + "maximum": 1000, + "exclusiveMaximum": true, + "minimum": 10, + "exclusiveMinimum": true, + "x-go-name": "ID" + }, + "notes": { + "description": "Notes to add to this item.\nThis can be used to add special instructions.", + "type": "string", + "x-go-name": "Notes" + }, + "pet": { + "$ref": "#/definitions/pet" + }, + "quantity": { + "description": "The amount of pets to add to this bucket.", + "type": "integer", + "format": "int16", + "maximum": 10, + "minimum": 1, + "x-go-name": "Quantity" + } + } + } + } + } + ] + }, + "createOrder": { + "operationId": "createOrder", + "parameters": [ + { + "x-go-name": "Order", + "description": "The order to submit.", + "name": "order", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/order" + } + } + ] + }, + "getOrders": { + "operationId": "getOrders", + "parameters": [ + { + "type": "array", + "items": { + "$ref": "#/definitions/OrderBodyParams" + }, + "x-go-name": "Orders", + "description": "The orders", + "name": "orders", + "in": "query", + "required": true + }, + { + "x-go-name": "Another", + "description": "And another thing", + "name": "another", + "in": "body", + "schema": { + "type": "array", + "items": { + "type": "object", + "properties": { + "that": { + "type": "string", + "x-go-name": "That" + } + } + } + } + } + ] + }, + "myFuncOperation": { + "operationId": "myFuncOperation", + "parameters": [ + { + "type": "file", + "x-go-name": "MyFormFile", + "description": "MyFormFile desc.", + "name": "myFormFile", + "in": "formData" + } + ] + }, + "myOperation": { + "operationId": "myOperation", + "parameters": [ + { + "type": "file", + "x-go-name": "MyFormFile", + "description": "MyFormFile desc.", + "name": "myFormFile", + "in": "formData" + } + ] + }, + "myOtherOperation": { + "operationId": "myOtherOperation", + "parameters": [ + { + "type": "file", + "x-go-name": "MyFormFile", + "description": "MyFormFile desc.", + "name": "myFormFile", + "in": "formData" + }, + { + "type": "integer", + "format": "int64", + "x-go-name": "ExtraParam", + "description": "ExtraParam desc.", + "name": "extraParam", + "in": "formData", + "required": true + } + ] + }, + "someAliasOperation": { + "operationId": "someAliasOperation", + "parameters": [ + { + "maximum": 10, + "minimum": 1, + "type": "integer", + "format": "int64", + "x-go-name": "IntAlias", + "description": "default \"in\" is \"query\" =\u003e this params should be aliased", + "name": "intAlias", + "in": "query", + "required": true + }, + { + "type": "string", + "x-go-name": "StringAlias", + "name": "stringAlias", + "in": "query" + }, + { + "type": "integer", + "format": "int64", + "x-go-name": "IntAliasPath", + "name": "intAliasPath", + "in": "path", + "required": true + }, + { + "type": "integer", + "format": "int64", + "x-go-name": "IntAliasForm", + "name": "intAliasForm", + "in": "formData" + } + ] + }, + "someOperation": { + "operationId": "someOperation", + "parameters": [ + { + "maximum": 1000, + "exclusiveMaximum": true, + "minimum": 10, + "exclusiveMinimum": true, + "type": "integer", + "format": "int64", + "default": 1, + "x-go-name": "ID", + "description": "ID of this no model instance.\nids in this application start at 11 and are smaller than 1000", + "name": "id", + "in": "path", + "required": true + }, + { + "maximum": 45, + "minimum": 3, + "multipleOf": 3, + "type": "integer", + "format": "int32", + "default": 2, + "example": 27, + "x-go-name": "Score", + "description": "The Score of this model", + "name": "score", + "in": "query", + "required": true + }, + { + "maxLength": 50, + "minLength": 4, + "pattern": "[A-Za-z0-9-.]*", + "type": "string", + "x-go-name": "Name", + "description": "Name of this no model instance", + "name": "x-hdr-name", + "in": "header", + "required": true + }, + { + "type": "string", + "format": "date-time", + "x-go-name": "Created", + "description": "Created holds the time when this entry was created", + "name": "created", + "in": "query" + }, + { + "enum": [ + "foo", + "bar", + "none" + ], + "type": "string", + "default": "bar", + "x-go-name": "CategoryOld", + "description": "The Category of this model (old enum format)", + "name": "category_old", + "in": "query", + "required": true + }, + { + "enum": [ + "foo", + "bar", + "none" + ], + "type": "string", + "default": "bar", + "x-go-name": "Category", + "description": "The Category of this model", + "name": "category", + "in": "query", + "required": true + }, + { + "enum": [ + 1, + 3, + 5 + ], + "type": "integer", + "format": "int64", + "default": 1, + "x-go-name": "TypeOld", + "description": "Type of this model (old enum format)", + "name": "type_old", + "in": "query" + }, + { + "enum": [ + 1, + 3, + 5 + ], + "type": "integer", + "format": "int64", + "default": 1, + "x-go-name": "Type", + "description": "Type of this model", + "name": "type", + "in": "query" + }, + { + "enum": [ + 1, + "rsq", + "qaz" + ], + "type": "integer", + "format": "int64", + "x-go-name": "BadEnum", + "description": "This is mix in enum. And actually on output should be valid form where int will be int and\nstring will also be presented.", + "name": "bad_enum", + "in": "query" + }, + { + "maxItems": 10, + "minItems": 3, + "uniqueItems": true, + "type": "array", + "items": { + "maxLength": 10, + "minLength": 3, + "pattern": "\\w+", + "type": "string", + "default": "bar" + }, + "collectionFormat": "pipe", + "x-go-name": "FooSlice", + "description": "a FooSlice has foos which are strings", + "name": "foo_slice", + "in": "query" + }, + { + "maxItems": 10, + "minItems": 3, + "uniqueItems": true, + "type": "array", + "items": { + "maxItems": 9, + "minItems": 4, + "enum": [ + "bar1", + "bar2", + "bar3" + ], + "type": "array", + "items": { + "maxItems": 8, + "minItems": 5, + "type": "array", + "items": { + "maxLength": 10, + "minLength": 3, + "pattern": "\\w+", + "type": "string" + } + }, + "default": "bar2" + }, + "collectionFormat": "pipe", + "x-go-name": "BarSlice", + "description": "a BarSlice has bars which are strings", + "name": "bar_slice", + "in": "query" + }, + { + "maxItems": 20, + "minItems": 1, + "type": "array", + "items": { + "maximum": 100, + "minimum": 5, + "uniqueItems": true, + "multipleOf": 5, + "type": "integer", + "format": "int32", + "collectionFormat": "csv" + }, + "x-go-name": "NumSlice", + "description": "a NumSlice has numeric items with item-level validation", + "name": "num_slice", + "in": "query" + }, + { + "x-go-name": "Items", + "description": "the items for this order", + "name": "items", + "in": "body", + "schema": { + "type": "array", + "items": { + "type": "object", + "required": [ + "id", + "pet", + "quantity" + ], + "properties": { + "id": { + "description": "ID of this no model instance.\nids in this application start at 11 and are smaller than 1000", + "type": "integer", + "format": "int32", + "default": 3, + "maximum": 1000, + "exclusiveMaximum": true, + "minimum": 10, + "exclusiveMinimum": true, + "x-go-name": "ID" + }, + "notes": { + "description": "Notes to add to this item.\nThis can be used to add special instructions.", + "type": "string", + "x-go-name": "Notes" + }, + "pet": { + "$ref": "#/definitions/pet" + }, + "quantity": { + "description": "The amount of pets to add to this bucket.", + "type": "integer", + "format": "int16", + "maximum": 10, + "minimum": 1, + "x-go-name": "Quantity" + } + } + } + } + } + ] + }, + "updateOrder": { + "operationId": "updateOrder", + "parameters": [ + { + "x-go-name": "Order", + "description": "The order to submit.", + "name": "order", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/order" + } + } + ] + }, + "yetAnotherOperation": { + "operationId": "yetAnotherOperation", + "parameters": [ + { + "type": "integer", + "format": "int32", + "x-go-name": "Age", + "name": "age", + "in": "query" + }, + { + "type": "integer", + "format": "int64", + "x-go-name": "ID", + "description": "ID the id of this not selected model", + "name": "id", + "in": "query" + }, + { + "type": "string", + "x-go-name": "Name", + "description": "Name the name of this not selected model", + "name": "name", + "in": "query" + }, + { + "type": "string", + "x-go-name": "Notes", + "name": "notes", + "in": "query" + }, + { + "type": "string", + "x-go-name": "Extra", + "name": "extra", + "in": "query" + }, + { + "type": "string", + "format": "date-time", + "x-go-name": "CreatedAt", + "name": "createdAt", + "in": "query" + }, + { + "type": "string", + "x-go-name": "Informity", + "name": "informity", + "in": "formData" + }, + { + "type": "string", + "name": "NoTagName", + "in": "query" + } + ] + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/classification_params_issue2007.json b/fixtures/integration/golden/classification_params_issue2007.json new file mode 100644 index 0000000..b9bcb61 --- /dev/null +++ b/fixtures/integration/golden/classification_params_issue2007.json @@ -0,0 +1,17 @@ +{ + "getConfiguration": { + "operationId": "getConfiguration", + "parameters": [ + { + "name": "Value", + "in": "body", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + ] + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/classification_params_issue2011.json b/fixtures/integration/golden/classification_params_issue2011.json new file mode 100644 index 0000000..3b0d71c --- /dev/null +++ b/fixtures/integration/golden/classification_params_issue2011.json @@ -0,0 +1,13 @@ +{ + "putNumPlate": { + "operationId": "putNumPlate", + "parameters": [ + { + "x-go-name": "NumPlates", + "name": "num_plates", + "in": "body", + "schema": {} + } + ] + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/classification_responses.json b/fixtures/integration/golden/classification_responses.json new file mode 100644 index 0000000..f831651 --- /dev/null +++ b/fixtures/integration/golden/classification_responses.json @@ -0,0 +1,251 @@ +{ + "complexerOne": { + "description": "A ComplexerOne is composed of a SimpleOne and some extra fields.", + "headers": { + "NoTagName": { + "type": "string" + }, + "age": { + "type": "integer", + "format": "int32" + }, + "createdAt": { + "type": "string", + "format": "date-time" + }, + "extra": { + "type": "string" + }, + "id": { + "type": "integer", + "format": "int64", + "description": "ID the id of this not selected model" + }, + "name": { + "type": "string", + "description": "Name the name of this not selected model" + }, + "notes": { + "type": "string" + } + } + }, + "complexerPointerOne": { + "description": "A ComplexerPointerOne is composed of a *SimpleOne and some extra fields.", + "headers": { + "age": { + "type": "integer", + "format": "int32" + }, + "extra": { + "type": "integer", + "format": "int64" + }, + "id": { + "type": "integer", + "format": "int64" + }, + "name": { + "type": "string" + } + } + }, + "fileResponse": { + "description": "File response", + "schema": { + "type": "file" + } + }, + "genericError": { + "description": "A GenericError is an error that is used when no other error is appropriate", + "schema": { + "type": "object", + "properties": { + "Message": { + "type": "string" + } + } + } + }, + "resp": { + "description": "Resp a response for testing", + "schema": { + "$ref": "#/definitions/user" + }, + "headers": { + "UUID": { + "type": "string", + "format": "uuid" + } + } + }, + "simpleOnes": { + "description": "SimpleOnes is a collection of SimpleOne", + "headers": { + "ones": { + "type": "array", + "items": { + "$ref": "#/definitions/SimpleOne" + } + } + } + }, + "simpleOnesFunc": { + "description": "SimpleOnesFunc is a collection of SimpleOne", + "headers": { + "ones": { + "type": "array", + "items": { + "$ref": "#/definitions/SimpleOne" + } + } + } + }, + "someResponse": { + "description": "A SomeResponse is a dummy response object to test parsing.\n\nThe properties are the same as the other structs used to test parsing.", + "schema": { + "type": "array", + "items": { + "type": "object", + "required": [ + "id", + "pet", + "quantity" + ], + "properties": { + "id": { + "description": "ID of this some response instance.\nids in this application start at 11 and are smaller than 1000", + "type": "integer", + "format": "int32", + "maximum": 1000, + "exclusiveMaximum": true, + "minimum": 10, + "exclusiveMinimum": true, + "x-go-name": "ID" + }, + "notes": { + "description": "Notes to add to this item.\nThis can be used to add special instructions.", + "type": "string", + "x-go-name": "Notes" + }, + "pet": { + "$ref": "#/definitions/pet" + }, + "quantity": { + "description": "The amount of pets to add to this bucket.", + "type": "integer", + "format": "int16", + "maximum": 10, + "minimum": 1, + "x-go-name": "Quantity" + } + } + } + }, + "headers": { + "active": { + "type": "boolean", + "default": true, + "description": "Active state of the record" + }, + "bar_slice": { + "maxItems": 10, + "minItems": 3, + "uniqueItems": true, + "type": "array", + "items": { + "maxItems": 9, + "minItems": 4, + "type": "array", + "items": { + "maxItems": 8, + "minItems": 5, + "type": "array", + "items": { + "maxLength": 10, + "minLength": 3, + "pattern": "\\w+", + "type": "string" + } + } + }, + "collectionFormat": "pipe", + "description": "a BarSlice has bars which are strings" + }, + "created": { + "type": "string", + "format": "date-time", + "description": "Created holds the time when this entry was created" + }, + "foo_slice": { + "maxItems": 10, + "minItems": 3, + "uniqueItems": true, + "type": "array", + "items": { + "maxLength": 10, + "minLength": 3, + "pattern": "\\w+", + "type": "string", + "example": "foo" + }, + "collectionFormat": "pipe", + "description": "a FooSlice has foos which are strings" + }, + "id": { + "maximum": 1000, + "exclusiveMaximum": true, + "minimum": 10, + "exclusiveMinimum": true, + "type": "integer", + "format": "int64", + "default": 11, + "description": "ID of this some response instance.\nids in this application start at 11 and are smaller than 1000" + }, + "score": { + "maximum": 45, + "minimum": 3, + "multipleOf": 3, + "type": "integer", + "format": "int32", + "example": 27, + "description": "The Score of this model" + }, + "x-hdr-name": { + "maxLength": 50, + "minLength": 4, + "pattern": "[A-Za-z0-9-.]*", + "type": "string", + "description": "Name of this some response instance" + } + } + }, + "validationError": { + "description": "A ValidationError is an error that is used when the required input fails validation.", + "schema": { + "type": "object", + "properties": { + "FieldName": { + "description": "An optional field name to which this validation applies", + "type": "string" + }, + "Message": { + "description": "The validation message", + "type": "string" + } + } + }, + "headers": { + "code": { + "enum": [ + "foo", + "bar" + ], + "type": "integer", + "format": "int64", + "default": 400, + "description": "in: header" + } + } + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/classification_responses_issue2007.json b/fixtures/integration/golden/classification_responses_issue2007.json new file mode 100644 index 0000000..6fcc36b --- /dev/null +++ b/fixtures/integration/golden/classification_responses_issue2007.json @@ -0,0 +1,11 @@ +{ + "GetConfiguration": { + "description": "Success", + "schema": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/classification_responses_issue2011.json b/fixtures/integration/golden/classification_responses_issue2011.json new file mode 100644 index 0000000..ba6aae8 --- /dev/null +++ b/fixtures/integration/golden/classification_responses_issue2011.json @@ -0,0 +1,6 @@ +{ + "NumPlatesResp": { + "description": "", + "schema": {} + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/classification_routes.json b/fixtures/integration/golden/classification_routes.json new file mode 100644 index 0000000..7746e7b --- /dev/null +++ b/fixtures/integration/golden/classification_routes.json @@ -0,0 +1,332 @@ +{ + "/orders": { + "get": { + "consumes": [ + "application/json", + "application/x-protobuf" + ], + "produces": [ + "application/json", + "application/x-protobuf" + ], + "schemes": [ + "http", + "https", + "ws", + "wss" + ], + "tags": [ + "orders" + ], + "summary": "lists orders filtered by some parameters.", + "operationId": "listOrders", + "responses": { + "200": { + "$ref": "#/responses/someResponse" + }, + "422": { + "$ref": "#/responses/validationError" + }, + "default": { + "$ref": "#/responses/genericError" + } + }, + "security": [ + { + "api_key": [] + }, + { + "oauth": [ + "orders:read", + "https://www.googleapis.com/auth/userinfo.email" + ] + } + ], + "x-some-flag": "false", + "x-some-list": [ + "item1", + "item2", + "item3" + ], + "x-some-object": { + "key1": "value1", + "key2": "value2", + "key3": "value3", + "subobject": { + "subkey1": "subvalue1", + "subkey2": "subvalue2" + } + } + }, + "post": { + "consumes": [ + "application/json", + "application/x-protobuf" + ], + "produces": [ + "application/json", + "application/x-protobuf" + ], + "schemes": [ + "http", + "https", + "ws", + "wss" + ], + "tags": [ + "orders" + ], + "summary": "create an order based on the parameters.", + "operationId": "createOrder", + "responses": { + "200": { + "$ref": "#/responses/someResponse" + }, + "422": { + "$ref": "#/responses/validationError" + }, + "default": { + "$ref": "#/responses/genericError" + } + }, + "security": [ + { + "api_key": [] + }, + { + "oauth": [ + "read", + "write" + ] + } + ] + } + }, + "/orders/{id}": { + "get": { + "consumes": [ + "application/json", + "application/x-protobuf" + ], + "produces": [ + "application/json", + "application/x-protobuf" + ], + "schemes": [ + "http", + "https", + "ws", + "wss" + ], + "tags": [ + "orders" + ], + "summary": "gets the details for an order.", + "operationId": "orderDetails", + "responses": { + "200": { + "$ref": "#/responses/someResponse" + }, + "201": { + "$ref": "#/responses/fileResponse" + }, + "422": { + "$ref": "#/responses/validationError" + }, + "default": { + "$ref": "#/responses/genericError" + } + }, + "security": [ + { + "api_key": [] + }, + { + "oauth": [ + "read", + "write" + ] + } + ] + }, + "put": { + "description": "When the order doesn't exist this will return an error.", + "consumes": [ + "application/json", + "application/x-protobuf" + ], + "produces": [ + "application/json", + "application/x-protobuf" + ], + "schemes": [ + "http", + "https", + "ws", + "wss" + ], + "tags": [ + "orders" + ], + "summary": "Update the details for an order.", + "operationId": "updateOrder", + "responses": { + "200": { + "$ref": "#/responses/someResponse" + }, + "422": { + "$ref": "#/responses/validationError" + }, + "default": { + "$ref": "#/responses/genericError" + } + }, + "security": [ + { + "api_key": [] + }, + { + "oauth": [ + "read", + "write" + ] + } + ] + }, + "delete": { + "consumes": [ + "application/json", + "application/x-protobuf" + ], + "produces": [ + "application/json", + "application/x-protobuf" + ], + "schemes": [ + "http", + "https", + "ws", + "wss" + ], + "summary": "delete a particular order.", + "operationId": "deleteOrder", + "responses": { + "200": { + "$ref": "#/responses/someResponse" + }, + "202": { + "description": "Some description" + }, + "422": { + "$ref": "#/responses/validationError" + }, + "default": { + "$ref": "#/responses/genericError" + } + }, + "security": [ + { + "api_key": [] + }, + { + "oauth": [ + "read", + "write" + ] + } + ] + } + }, + "/pets": { + "get": { + "description": "This will show all available pets by default.\nYou can get the pets that are out of stock", + "consumes": [ + "application/json", + "application/x-protobuf" + ], + "produces": [ + "application/json", + "application/x-protobuf" + ], + "schemes": [ + "http", + "https", + "ws", + "wss" + ], + "tags": [ + "pets", + "users" + ], + "summary": "Lists pets filtered by some parameters.", + "operationId": "listPets", + "responses": { + "200": { + "$ref": "#/responses/someResponse" + }, + "422": { + "$ref": "#/responses/validationError" + }, + "default": { + "$ref": "#/responses/genericError" + } + }, + "security": [ + { + "api_key": [] + }, + { + "oauth": [ + "read", + "write" + ] + } + ], + "x-some-flag": "true" + }, + "post": { + "consumes": [ + "application/json", + "application/x-protobuf" + ], + "produces": [ + "application/json", + "application/x-protobuf" + ], + "schemes": [ + "http", + "https", + "ws", + "wss" + ], + "tags": [ + "pets", + "users" + ], + "summary": "Create a pet based on the parameters.", + "operationId": "createPet", + "responses": { + "200": { + "$ref": "#/responses/someResponse" + }, + "422": { + "$ref": "#/responses/validationError" + }, + "default": { + "$ref": "#/responses/genericError" + } + }, + "security": [ + { + "api_key": [] + }, + { + "oauth": [ + "read", + "write" + ] + } + ] + } + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/classification_routes_body.json b/fixtures/integration/golden/classification_routes_body.json new file mode 100644 index 0000000..b14aa22 --- /dev/null +++ b/fixtures/integration/golden/classification_routes_body.json @@ -0,0 +1,535 @@ +{ + "/orders": { + "get": { + "consumes": [ + "application/json", + "application/x-protobuf" + ], + "produces": [ + "application/json", + "application/x-protobuf" + ], + "schemes": [ + "http", + "https", + "ws", + "wss" + ], + "tags": [ + "orders" + ], + "summary": "lists orders filtered by some parameters.", + "operationId": "listOrders", + "responses": { + "200": { + "description": "someResponse", + "schema": { + "$ref": "#/definitions/someResponse" + } + }, + "422": { + "description": "validationError", + "schema": { + "$ref": "#/definitions/validationError" + } + }, + "default": { + "description": "genericError", + "schema": { + "$ref": "#/definitions/genericError" + } + } + }, + "security": [ + { + "api_key": [] + }, + { + "oauth": [ + "orders:read", + "https://www.googleapis.com/auth/userinfo.email" + ] + } + ], + "x-some-flag": "false", + "x-some-list": [ + "item1", + "item2", + "item3" + ], + "x-some-object": { + "key1": "value1", + "key2": "value2", + "key3": "value3", + "subobject": { + "subkey1": "subvalue1", + "subkey2": "subvalue2" + } + } + }, + "post": { + "consumes": [ + "application/json", + "application/x-protobuf" + ], + "produces": [ + "application/json", + "application/x-protobuf" + ], + "schemes": [ + "http", + "https", + "ws", + "wss" + ], + "tags": [ + "orders" + ], + "summary": "create an order based on the parameters.", + "operationId": "createOrder", + "parameters": [ + { + "description": "The order id", + "name": "id", + "allowEmptyValue": true + }, + { + "description": "The request model.", + "name": "request", + "in": "body", + "schema": { + "description": "The request model.", + "type": "object", + "$ref": "#/definitions/orderModel" + } + } + ], + "responses": { + "200": { + "description": "someResponse", + "schema": { + "$ref": "#/definitions/someResponse" + } + }, + "422": { + "description": "validationError", + "schema": { + "$ref": "#/definitions/validationError" + } + }, + "default": { + "description": "genericError", + "schema": { + "$ref": "#/definitions/genericError" + } + } + }, + "security": [ + { + "api_key": [] + }, + { + "oauth": [ + "read", + "write" + ] + } + ] + } + }, + "/orders/{id}": { + "get": { + "consumes": [ + "application/json", + "application/x-protobuf" + ], + "produces": [ + "application/json", + "application/x-protobuf" + ], + "schemes": [ + "http", + "https", + "ws", + "wss" + ], + "tags": [ + "orders" + ], + "summary": "gets the details for an order.", + "operationId": "orderDetails", + "responses": { + "200": { + "description": "someResponse", + "schema": { + "$ref": "#/definitions/someResponse" + } + }, + "422": { + "description": "validationError", + "schema": { + "$ref": "#/definitions/validationError" + } + }, + "default": { + "description": "genericError", + "schema": { + "$ref": "#/definitions/genericError" + } + } + }, + "security": [ + { + "api_key": [] + }, + { + "oauth": [ + "read", + "write" + ] + } + ] + }, + "put": { + "description": "When the order doesn't exist this will return an error.", + "consumes": [ + "application/json", + "application/x-protobuf" + ], + "produces": [ + "application/json", + "application/x-protobuf" + ], + "schemes": [ + "http", + "https", + "ws", + "wss" + ], + "tags": [ + "orders" + ], + "summary": "Update the details for an order.", + "operationId": "updateOrder", + "responses": { + "200": { + "description": "someResponse", + "schema": { + "$ref": "#/definitions/someResponse" + } + }, + "422": { + "description": "validationError", + "schema": { + "$ref": "#/definitions/validationError" + } + }, + "default": { + "description": "genericError", + "schema": { + "$ref": "#/definitions/genericError" + } + } + }, + "security": [ + { + "api_key": [] + }, + { + "oauth": [ + "read", + "write" + ] + } + ] + }, + "delete": { + "consumes": [ + "application/json", + "application/x-protobuf" + ], + "produces": [ + "application/json", + "application/x-protobuf" + ], + "schemes": [ + "http", + "https", + "ws", + "wss" + ], + "summary": "delete a particular order.", + "operationId": "deleteOrder", + "responses": { + "200": { + "description": "someResponse", + "schema": { + "$ref": "#/definitions/someResponse" + } + }, + "422": { + "description": "validationError", + "schema": { + "$ref": "#/definitions/validationError" + } + }, + "default": { + "description": "genericError", + "schema": { + "$ref": "#/definitions/genericError" + } + } + }, + "security": [ + { + "api_key": [] + }, + { + "oauth": [ + "read", + "write" + ] + } + ] + } + }, + "/param-test": { + "post": { + "consumes": [ + "application/json", + "application/x-protobuf" + ], + "produces": [ + "application/json", + "application/x-protobuf" + ], + "schemes": [ + "http", + "https", + "ws", + "wss" + ], + "tags": [ + "params" + ], + "summary": "Allow some params with constraints.", + "operationId": "testParams", + "parameters": [ + { + "maximum": 20, + "minimum": 10, + "type": "number", + "default": 15, + "description": "some number", + "name": "someNumber", + "in": "path", + "required": true, + "allowEmptyValue": true + }, + { + "maxLength": 20, + "minLength": 5, + "type": "array", + "description": "some query values", + "name": "someQuery", + "in": "query" + }, + { + "type": "boolean", + "default": true, + "description": "some boolean", + "name": "someBoolean", + "in": "path" + }, + { + "type": "boolean", + "format": "abcde", + "default": false, + "description": "test constraints on invalid types", + "name": "constraintsOnInvalidType", + "in": "query" + }, + { + "description": "test no type", + "name": "noType" + }, + { + "description": "The request model.", + "name": "request", + "in": "body", + "schema": { + "description": "The request model.", + "type": "string", + "default": "orange", + "enum": [ + "apple", + "orange", + "pineapple", + "peach", + "plum" + ] + } + } + ], + "responses": { + "200": { + "description": "someResponse", + "schema": { + "$ref": "#/definitions/someResponse" + } + }, + "422": { + "description": "validationError", + "schema": { + "$ref": "#/definitions/validationError" + } + }, + "default": { + "description": "genericError", + "schema": { + "$ref": "#/definitions/genericError" + } + } + }, + "security": [ + { + "api_key": [] + }, + { + "oauth": [ + "read", + "write" + ] + } + ] + } + }, + "/pets": { + "get": { + "description": "This will show all available pets by default.\nYou can get the pets that are out of stock", + "consumes": [ + "application/json", + "application/x-protobuf" + ], + "produces": [ + "application/json", + "application/x-protobuf" + ], + "schemes": [ + "http", + "https", + "ws", + "wss" + ], + "tags": [ + "pets", + "users" + ], + "summary": "Lists pets filtered by some parameters.", + "operationId": "listPets", + "responses": { + "200": { + "description": "someResponse", + "schema": { + "$ref": "#/definitions/someResponse" + } + }, + "422": { + "description": "validationError", + "schema": { + "$ref": "#/definitions/validationError" + } + }, + "default": { + "description": "genericError", + "schema": { + "$ref": "#/definitions/genericError" + } + } + }, + "security": [ + { + "api_key": [] + }, + { + "oauth": [ + "read", + "write" + ] + } + ], + "x-some-flag": "true" + }, + "post": { + "consumes": [ + "application/json", + "application/x-protobuf" + ], + "produces": [ + "application/json", + "application/x-protobuf" + ], + "schemes": [ + "http", + "https", + "ws", + "wss" + ], + "tags": [ + "pets", + "users" + ], + "summary": "Create a pet based on the parameters.", + "operationId": "createPet", + "parameters": [ + { + "description": "The request model.", + "name": "request", + "in": "body", + "schema": { + "description": "The request model.", + "type": "object", + "$ref": "#/definitions/petModel" + } + }, + { + "description": "The pet id", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "someResponse", + "schema": { + "$ref": "#/definitions/someResponse" + } + }, + "422": { + "description": "validationError", + "schema": { + "$ref": "#/definitions/validationError" + } + }, + "default": { + "description": "genericError", + "schema": { + "$ref": "#/definitions/genericError" + } + } + }, + "security": [ + { + "api_key": [] + }, + { + "oauth": [ + "read", + "write" + ] + } + ] + } + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/classification_schema_AllOfModel.json b/fixtures/integration/golden/classification_schema_AllOfModel.json new file mode 100644 index 0000000..5f4764b --- /dev/null +++ b/fixtures/integration/golden/classification_schema_AllOfModel.json @@ -0,0 +1,49 @@ +{ + "AllOfModel": { + "description": "An AllOfModel is composed out of embedded structs but it should build\nan allOf property", + "allOf": [ + { + "type": "object", + "properties": { + "age": { + "type": "integer", + "format": "int32", + "x-go-name": "Age" + }, + "id": { + "type": "integer", + "format": "int64", + "x-go-name": "ID" + }, + "name": { + "type": "string", + "x-go-name": "Name" + } + } + }, + { + "$ref": "#/definitions/withNotes" + }, + { + "type": "object", + "properties": { + "cat": { + "type": "string", + "x-go-name": "Cat" + }, + "createdAt": { + "type": "string", + "format": "date-time", + "x-go-name": "CreatedAt" + }, + "did": { + "type": "integer", + "format": "int64", + "x-go-name": "DID" + } + } + } + ], + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/classification/models" + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/classification_schema_NamedWithType.json b/fixtures/integration/golden/classification_schema_NamedWithType.json new file mode 100644 index 0000000..3e2392f --- /dev/null +++ b/fixtures/integration/golden/classification_schema_NamedWithType.json @@ -0,0 +1,13 @@ +{ + "namedWithType": { + "type": "object", + "properties": { + "some_map": { + "type": "object", + "x-go-name": "SomeMap" + } + }, + "x-go-name": "NamedWithType", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/classification/models" + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/classification_schema_NoModel.json b/fixtures/integration/golden/classification_schema_NoModel.json new file mode 100644 index 0000000..eee3651 --- /dev/null +++ b/fixtures/integration/golden/classification_schema_NoModel.json @@ -0,0 +1,261 @@ +{ + "NoModel": { + "description": "NoModel exists in a package\nbut is not annotated with the swagger model annotations\nso it should now show up in a test.", + "type": "object", + "title": "NoModel is a struct without an annotation.", + "required": [ + "id", + "score", + "name" + ], + "properties": { + "NoNameOmitEmpty": { + "description": "A field which has omitempty set but no name", + "type": "string" + }, + "bar_slice": { + "description": "a BarSlice has bars which are strings", + "type": "array", + "maxItems": 10, + "minItems": 3, + "uniqueItems": true, + "items": { + "type": "array", + "maxItems": 9, + "minItems": 4, + "items": { + "type": "array", + "maxItems": 8, + "minItems": 5, + "items": { + "type": "string", + "maxLength": 10, + "minLength": 3, + "pattern": "\\w+" + } + } + }, + "x-go-name": "BarSlice" + }, + "created": { + "description": "Created holds the time when this entry was created", + "type": "string", + "format": "date-time", + "x-go-name": "Created", + "readOnly": true + }, + "deep_time_slice": { + "description": "a DeepSlice has bars which are time", + "type": "array", + "maxItems": 10, + "minItems": 3, + "uniqueItems": true, + "items": { + "type": "array", + "maxItems": 9, + "minItems": 4, + "items": { + "type": "array", + "maxItems": 8, + "minItems": 5, + "items": { + "type": "string", + "format": "date-time" + } + } + }, + "x-go-name": "DeepTimeSlice" + }, + "foo_slice": { + "description": "a FooSlice has foos which are strings", + "type": "array", + "maxItems": 10, + "minItems": 3, + "uniqueItems": true, + "items": { + "type": "string", + "maxLength": 10, + "minLength": 3, + "pattern": "\\w+" + }, + "x-go-name": "FooSlice" + }, + "gocreated": { + "description": "GoTimeCreated holds the time when this entry was created in go time.Time", + "type": "string", + "format": "date-time", + "x-go-name": "GoTimeCreated" + }, + "id": { + "description": "ID of this no model instance.\nids in this application start at 11 and are smaller than 1000", + "type": "integer", + "format": "int64", + "default": 11, + "maximum": 1000, + "exclusiveMaximum": true, + "minimum": 10, + "exclusiveMinimum": true, + "x-go-name": "ID" + }, + "items": { + "description": "the items for this order", + "type": "array", + "items": { + "type": "object", + "required": [ + "id", + "pet", + "quantity", + "expiration" + ], + "properties": { + "expiration": { + "description": "A dummy expiration date.", + "type": "string", + "format": "date-time", + "x-go-name": "Expiration" + }, + "id": { + "description": "ID of this no model instance.\nids in this application start at 11 and are smaller than 1000", + "type": "integer", + "format": "int32", + "default": 11, + "maximum": 1000, + "exclusiveMaximum": true, + "minimum": 10, + "exclusiveMinimum": true, + "x-go-name": "ID" + }, + "notes": { + "description": "Notes to add to this item.\nThis can be used to add special instructions.", + "type": "string", + "x-go-name": "Notes" + }, + "pet": { + "$ref": "#/definitions/pet" + }, + "quantity": { + "description": "The amount of pets to add to this bucket.", + "type": "integer", + "format": "int16", + "maximum": 10, + "minimum": 1, + "x-go-name": "Quantity" + } + } + }, + "x-go-name": "Items" + }, + "name": { + "description": "Name of this no model instance", + "type": "string", + "maxLength": 50, + "minLength": 4, + "pattern": "[A-Za-z0-9-.]*", + "x-go-name": "Name", + "x-property-array": [ + "value1", + "value2" + ], + "x-property-array-obj": [ + { + "name": "obj", + "value": "field" + } + ], + "x-property-value": "value" + }, + "noteb64": { + "description": "Note is a free form data in base64", + "type": "string", + "format": "byte", + "x-go-name": "Note" + }, + "score": { + "description": "The Score of this model", + "type": "integer", + "format": "int32", + "maximum": 45, + "minimum": 3, + "multipleOf": 3, + "x-go-name": "Score", + "example": 27 + }, + "time_slice": { + "description": "a TimeSlice is a slice of times", + "type": "array", + "maxItems": 10, + "minItems": 3, + "uniqueItems": true, + "items": { + "type": "string", + "format": "date-time" + }, + "x-go-name": "TimeSlice" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/classification/models" + }, + "order": { + "description": "An order can either be created, processed or completed.", + "type": "object", + "title": "StoreOrder represents an order in this application.", + "required": [ + "id", + "userId", + "category" + ], + "properties": { + "category": { + "description": "the category of this user", + "type": "string", + "default": "bar", + "enum": [ + "foo", + "bar", + "none" + ], + "x-go-name": "Category" + }, + "id": { + "description": "the id for this order", + "type": "integer", + "format": "int64", + "minimum": 1, + "x-go-name": "ID" + }, + "items": { + "description": "the items for this order", + "type": "array", + "items": { + "type": "object", + "properties": { + "id": { + "type": "integer", + "format": "int32", + "x-go-name": "ID" + }, + "pet": { + "$ref": "#/definitions/pet" + }, + "quantity": { + "type": "integer", + "format": "int16", + "x-go-name": "Quantity" + } + } + }, + "x-go-name": "Items" + }, + "userId": { + "description": "the name for this user", + "type": "integer", + "format": "int64", + "minLength": 3, + "x-go-name": "UserID" + } + }, + "x-go-name": "StoreOrder", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/classification/models" + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/classification_schema_NullString.json b/fixtures/integration/golden/classification_schema_NullString.json new file mode 100644 index 0000000..f1a1dbe --- /dev/null +++ b/fixtures/integration/golden/classification_schema_NullString.json @@ -0,0 +1,6 @@ +{ + "NullString": { + "type": "string", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/classification/models" + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/classification_schema_interface_discriminators.json b/fixtures/integration/golden/classification_schema_interface_discriminators.json new file mode 100644 index 0000000..1c0eb64 --- /dev/null +++ b/fixtures/integration/golden/classification_schema_interface_discriminators.json @@ -0,0 +1,229 @@ +{ + "Identifiable": { + "description": "Identifiable is an interface for things that have an ID", + "type": "object", + "required": [ + "id" + ], + "properties": { + "id": { + "description": "ID of this no model instance.\nids in this application start at 11 and are smaller than 1000", + "type": "integer", + "format": "int64", + "maximum": 1000, + "exclusiveMaximum": true, + "minimum": 10, + "exclusiveMinimum": true, + "x-go-name": "ID" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/classification/models" + }, + "TeslaCar": { + "description": "TeslaCar is a tesla car", + "type": "object", + "properties": { + "autoPilot": { + "description": "AutoPilot returns true when it supports autopilot", + "type": "boolean", + "x-go-name": "AutoPilot" + }, + "model": { + "description": "The model of tesla car\n\ndiscriminated: true", + "type": "string", + "x-go-name": "Model" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/classification/models" + }, + "animal": { + "description": "it should deserialize into one of the struct types that\nenlist for being implementations of this struct", + "type": "object", + "title": "A BaseStruct is a struct that has subtypes.", + "required": [ + "id", + "name" + ], + "properties": { + "id": { + "description": "ID of this no model instance.\nids in this application start at 11 and are smaller than 1000", + "type": "integer", + "format": "int64", + "maximum": 1000, + "exclusiveMaximum": true, + "minimum": 10, + "exclusiveMinimum": true, + "x-go-name": "ID" + }, + "jsonClass": { + "description": "StructType the type of this polymorphic model", + "type": "string", + "x-go-name": "StructType" + }, + "name": { + "description": "Name of this no model instance", + "type": "string", + "maxLength": 50, + "minLength": 4, + "pattern": "[A-Za-z0-9-.]*", + "x-go-name": "Name" + } + }, + "x-go-name": "BaseStruct", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/classification/models", + "discriminator": "jsonClass" + }, + "cars": { + "description": "Cars is a collection of cars", + "type": "object", + "properties": { + "cars": { + "type": "array", + "items": { + "$ref": "#/definitions/TeslaCar" + }, + "x-go-name": "Cars" + } + }, + "x-go-name": "Cars", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/classification/models" + }, + "fish": { + "description": "Fish represents a base type implemented as interface\nthe nullary methods of this interface will be included as", + "allOf": [ + { + "type": "object", + "required": [ + "id" + ], + "properties": { + "id": { + "description": "ID of this no model instance.\nids in this application start at 11 and are smaller than 1000", + "type": "integer", + "format": "int64", + "maximum": 1000, + "exclusiveMaximum": true, + "minimum": 10, + "exclusiveMinimum": true, + "x-go-name": "ID" + } + } + }, + { + "$ref": "#/definitions/water" + }, + { + "$ref": "#/definitions/extra" + }, + { + "type": "object", + "properties": { + "colorName": { + "type": "string", + "x-go-name": "ColorName" + } + } + }, + { + "type": "object", + "required": [ + "name" + ], + "properties": { + "jsonClass": { + "description": "StructType the type of this polymorphic model", + "type": "string", + "x-go-name": "StructType" + }, + "name": { + "description": "Name of this no model instance", + "type": "string", + "maxLength": 50, + "minLength": 4, + "pattern": "[A-Za-z0-9-.]*", + "x-go-name": "Name" + } + }, + "discriminator": "jsonClass" + } + ], + "x-go-name": "Fish", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/classification/models" + }, + "modelA": { + "description": "The ModelA version of the tesla car", + "type": "object", + "properties": { + "Tesla": { + "$ref": "#/definitions/TeslaCar" + }, + "doors": { + "description": "The number of doors on this Model A", + "type": "integer", + "format": "int64", + "x-go-name": "Doors" + } + }, + "x-go-name": "ModelA", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/classification/models" + }, + "modelS": { + "description": "The ModelS version of the tesla car", + "allOf": [ + { + "$ref": "#/definitions/TeslaCar" + }, + { + "type": "object", + "properties": { + "edition": { + "description": "The edition of this Model S", + "type": "string", + "x-go-name": "Edition" + } + } + } + ], + "x-class": "com.tesla.models.ModelS", + "x-go-name": "ModelS", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/classification/models" + }, + "modelX": { + "description": "The ModelX version of the tesla car", + "allOf": [ + { + "$ref": "#/definitions/TeslaCar" + }, + { + "type": "object", + "properties": { + "doors": { + "description": "The number of doors on this Model X", + "type": "integer", + "format": "int64", + "x-go-name": "Doors" + } + } + } + ], + "x-class": "com.tesla.models.ModelX", + "x-go-name": "ModelX", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/classification/models" + }, + "water": { + "description": "WaterType is an interface describing a water type", + "type": "object", + "properties": { + "saltWater": { + "type": "boolean", + "x-go-name": "SaltWater" + }, + "sweetWater": { + "type": "boolean", + "x-go-name": "SweetWater" + } + }, + "x-go-name": "WaterType", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/classification/models" + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/classification_schema_struct_discriminators.json b/fixtures/integration/golden/classification_schema_struct_discriminators.json new file mode 100644 index 0000000..2ebb2db --- /dev/null +++ b/fixtures/integration/golden/classification_schema_struct_discriminators.json @@ -0,0 +1,84 @@ +{ + "animal": { + "description": "it should deserialize into one of the struct types that\nenlist for being implementations of this struct", + "type": "object", + "title": "A BaseStruct is a struct that has subtypes.", + "required": [ + "id", + "name" + ], + "properties": { + "id": { + "description": "ID of this no model instance.\nids in this application start at 11 and are smaller than 1000", + "type": "integer", + "format": "int64", + "maximum": 1000, + "exclusiveMaximum": true, + "minimum": 10, + "exclusiveMinimum": true, + "x-go-name": "ID" + }, + "jsonClass": { + "description": "StructType the type of this polymorphic model", + "type": "string", + "x-go-name": "StructType" + }, + "name": { + "description": "Name of this no model instance", + "type": "string", + "maxLength": 50, + "minLength": 4, + "pattern": "[A-Za-z0-9-.]*", + "x-go-name": "Name" + } + }, + "x-go-name": "BaseStruct", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/classification/models", + "discriminator": "jsonClass" + }, + "gazelle": { + "description": "The struct includes the BaseStruct and that embedded value\nis annotated with the discriminator value annotation so it\nwhere it only requires 1 argument because it knows which\ndiscriminator type this belongs to", + "title": "A Gazelle is a struct is discriminated for BaseStruct.", + "allOf": [ + { + "$ref": "#/definitions/animal" + }, + { + "type": "object", + "properties": { + "hornSize": { + "description": "The size of the horns", + "type": "number", + "format": "float", + "x-go-name": "HornSize" + } + } + } + ], + "x-class": "a.b.c.d.E", + "x-go-name": "Gazelle", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/classification/models" + }, + "giraffe": { + "description": "the annotation is not necessary here because of inclusion\nof a discriminated type\nit infers the name of the x-class value from its context", + "title": "A Giraffe is a struct that embeds BaseStruct", + "allOf": [ + { + "$ref": "#/definitions/animal" + }, + { + "type": "object", + "properties": { + "neckSize": { + "description": "NeckSize the size of the neck of this giraffe", + "type": "integer", + "format": "int64", + "x-go-name": "NeckSize" + } + } + } + ], + "x-go-name": "Giraffe", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/classification/models" + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/enhancements_alias_expand.json b/fixtures/integration/golden/enhancements_alias_expand.json new file mode 100644 index 0000000..d674961 --- /dev/null +++ b/fixtures/integration/golden/enhancements_alias_expand.json @@ -0,0 +1,187 @@ +{ + "swagger": "2.0", + "paths": {}, + "definitions": { + "AliasedTopParams": { + "description": "AliasedTopParams annotates an alias as the parameters set: the scanner\nmust resolve the alias via parameterBuilder.buildAlias.", + "type": "object", + "required": [ + "data" + ], + "properties": { + "data": { + "description": "in: body", + "type": "object", + "required": [ + "id" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64", + "x-go-name": "ID" + }, + "name": { + "type": "string", + "x-go-name": "Name" + } + }, + "x-go-name": "Data" + }, + "search": { + "description": "in: query", + "type": "string", + "x-go-name": "Search" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/alias-expand" + }, + "AliasedTopParams2": { + "type": "object", + "title": "AliasedTopParams2 chains AliasedTopParams through a second alias layer.", + "required": [ + "data" + ], + "properties": { + "data": { + "description": "in: body", + "type": "object", + "required": [ + "id" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64", + "x-go-name": "ID" + }, + "name": { + "type": "string", + "x-go-name": "Name" + } + }, + "x-go-name": "Data" + }, + "search": { + "description": "in: query", + "type": "string", + "x-go-name": "Search" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/alias-expand" + }, + "EnvelopeAlias2": { + "type": "object", + "title": "EnvelopeAlias2 aliases EnvelopeAlias (alias-of-alias).", + "properties": { + "payload": { + "$ref": "#/definitions/PayloadAlias" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/alias-expand" + }, + "Payload": { + "type": "object", + "title": "Payload is the canonical struct referenced by aliases.", + "required": [ + "id" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64", + "x-go-name": "ID" + }, + "name": { + "type": "string", + "x-go-name": "Name" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/alias-expand" + }, + "PayloadAlias": { + "type": "object", + "title": "PayloadAlias aliases Payload once.", + "required": [ + "id" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64", + "x-go-name": "ID" + }, + "name": { + "type": "string", + "x-go-name": "Name" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/alias-expand" + }, + "PayloadAlias2": { + "type": "object", + "title": "PayloadAlias2 aliases PayloadAlias (alias-of-alias chain).", + "required": [ + "id" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64", + "x-go-name": "ID" + }, + "name": { + "type": "string", + "x-go-name": "Name" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/alias-expand" + }, + "QueryIDAlias": { + "type": "string", + "title": "QueryIDAlias aliases QueryID for a non-body parameter field.", + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/alias-expand" + }, + "ResponseEnvelope": { + "description": "ResponseEnvelope is the canonical struct referenced by aliases used in\nresponses.", + "type": "object", + "properties": { + "payload": { + "$ref": "#/definitions/PayloadAlias" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/alias-expand" + }, + "exportedParams": { + "type": "object", + "required": [ + "data" + ], + "properties": { + "data": { + "$ref": "#/definitions/Payload" + }, + "search": { + "description": "in: query", + "type": "string", + "x-go-name": "Search" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/alias-expand" + } + }, + "responses": { + "aliasedResponse": { + "description": "AliasedResponse has a body field whose type is an alias chain.", + "schema": { + "$ref": "#/definitions/EnvelopeAlias2" + } + }, + "namedTopResponse": { + "description": "NamedTopResponse is a plain struct annotated as a response — used to\nkeep a deterministic response in the expand-mode fixture even though\nresponse-level aliasing is deferred to the alias-response fixture.", + "schema": { + "$ref": "#/definitions/ResponseEnvelope" + } + } + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/enhancements_alias_ref.json b/fixtures/integration/golden/enhancements_alias_ref.json new file mode 100644 index 0000000..7fdd722 --- /dev/null +++ b/fixtures/integration/golden/enhancements_alias_ref.json @@ -0,0 +1,99 @@ +{ + "swagger": "2.0", + "paths": {}, + "definitions": { + "AliasedTopParams": { + "description": "AliasedTopParams annotates an alias as the parameters set: the scanner\nmust resolve the alias via parameterBuilder.buildAlias.", + "$ref": "#/definitions/exportedParams" + }, + "AliasedTopParams2": { + "title": "AliasedTopParams2 chains AliasedTopParams through a second alias layer.", + "$ref": "#/definitions/AliasedTopParams" + }, + "EnvelopeAlias": { + "title": "EnvelopeAlias aliases ResponseEnvelope once.", + "$ref": "#/definitions/ResponseEnvelope" + }, + "EnvelopeAlias2": { + "title": "EnvelopeAlias2 aliases EnvelopeAlias (alias-of-alias).", + "$ref": "#/definitions/EnvelopeAlias" + }, + "Payload": { + "type": "object", + "title": "Payload is the canonical struct referenced by aliases.", + "required": [ + "id" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64", + "x-go-name": "ID" + }, + "name": { + "type": "string", + "x-go-name": "Name" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/alias-expand" + }, + "PayloadAlias": { + "title": "PayloadAlias aliases Payload once.", + "$ref": "#/definitions/Payload" + }, + "PayloadAlias2": { + "title": "PayloadAlias2 aliases PayloadAlias (alias-of-alias chain).", + "$ref": "#/definitions/PayloadAlias" + }, + "QueryID": { + "type": "string", + "title": "QueryID is a named string used as the base of a non-body parameter alias.", + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/alias-expand" + }, + "QueryIDAlias": { + "title": "QueryIDAlias aliases QueryID for a non-body parameter field.", + "$ref": "#/definitions/QueryID" + }, + "ResponseEnvelope": { + "description": "ResponseEnvelope is the canonical struct referenced by aliases used in\nresponses.", + "type": "object", + "properties": { + "payload": { + "$ref": "#/definitions/PayloadAlias" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/alias-expand" + }, + "exportedParams": { + "type": "object", + "required": [ + "data" + ], + "properties": { + "data": { + "$ref": "#/definitions/Payload" + }, + "search": { + "description": "in: query", + "type": "string", + "x-go-name": "Search" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/alias-expand" + } + }, + "responses": { + "aliasedResponse": { + "description": "AliasedResponse has a body field whose type is an alias chain.", + "schema": { + "$ref": "#/definitions/EnvelopeAlias2" + } + }, + "namedTopResponse": { + "description": "NamedTopResponse is a plain struct annotated as a response — used to\nkeep a deterministic response in the expand-mode fixture even though\nresponse-level aliasing is deferred to the alias-response fixture.", + "schema": { + "$ref": "#/definitions/ResponseEnvelope" + } + } + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/enhancements_alias_response_ref.json b/fixtures/integration/golden/enhancements_alias_response_ref.json new file mode 100644 index 0000000..6e41336 --- /dev/null +++ b/fixtures/integration/golden/enhancements_alias_response_ref.json @@ -0,0 +1,51 @@ +{ + "swagger": "2.0", + "paths": {}, + "definitions": { + "AliasedTopResponse": { + "description": "AliasedTopResponse annotates an alias as the response: the scanner\nresolves it via responseBuilder.buildAlias under RefAliases=true.", + "$ref": "#/definitions/exportedResponse" + }, + "AliasedTopResponse2": { + "title": "AliasedTopResponse2 chains AliasedTopResponse through a second alias.", + "$ref": "#/definitions/AliasedTopResponse" + }, + "Envelope": { + "type": "object", + "title": "Envelope is the canonical response body type.", + "required": [ + "id" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64", + "x-go-name": "ID" + }, + "name": { + "type": "string", + "x-go-name": "Name" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/alias-response" + }, + "exportedResponse": { + "type": "object", + "title": "exportedResponse is the backing struct for the aliased response.", + "properties": { + "body": { + "$ref": "#/definitions/Envelope" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/alias-response" + } + }, + "responses": { + "aliasedTopResponse": { + "description": "AliasedTopResponse annotates an alias as the response: the scanner\nresolves it via responseBuilder.buildAlias under RefAliases=true." + }, + "aliasedTopResponse2": { + "description": "AliasedTopResponse2 chains AliasedTopResponse through a second alias." + } + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/enhancements_all_http_methods.json b/fixtures/integration/golden/enhancements_all_http_methods.json new file mode 100644 index 0000000..4231faa --- /dev/null +++ b/fixtures/integration/golden/enhancements_all_http_methods.json @@ -0,0 +1,91 @@ +{ + "swagger": "2.0", + "paths": { + "/items": { + "get": { + "tags": [ + "items" + ], + "summary": "Get an item by id.", + "operationId": "getItem", + "responses": { + "200": { + "description": " OK" + } + } + }, + "put": { + "tags": [ + "items" + ], + "summary": "Replace an item.", + "operationId": "putItem", + "responses": { + "200": { + "description": " OK" + } + } + }, + "post": { + "tags": [ + "items" + ], + "summary": "Create an item.", + "operationId": "postItem", + "responses": { + "201": { + "description": " Created" + } + } + }, + "delete": { + "tags": [ + "items" + ], + "summary": "Delete an item.", + "operationId": "deleteItem", + "responses": { + "204": { + "description": " No Content" + } + } + }, + "options": { + "tags": [ + "items" + ], + "summary": "Describe the supported HTTP methods.", + "operationId": "optionsItem", + "responses": { + "200": { + "description": " OK" + } + } + }, + "head": { + "tags": [ + "items" + ], + "summary": "Probe an item.", + "operationId": "headItem", + "responses": { + "200": { + "description": " OK" + } + } + }, + "patch": { + "tags": [ + "items" + ], + "summary": "Apply a partial update.", + "operationId": "patchItem", + "responses": { + "200": { + "description": " OK" + } + } + } + } + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/enhancements_allof_edges.json b/fixtures/integration/golden/enhancements_allof_edges.json new file mode 100644 index 0000000..762602c --- /dev/null +++ b/fixtures/integration/golden/enhancements_allof_edges.json @@ -0,0 +1,100 @@ +{ + "swagger": "2.0", + "paths": {}, + "definitions": { + "AllOfInterface": { + "description": "AllOfInterface composes an allOf member whose underlying type is an\ninterface.", + "allOf": [ + { + "$ref": "#/definitions/Tagger" + }, + { + "type": "object", + "properties": { + "count": { + "type": "integer", + "format": "int64", + "x-go-name": "Count" + } + } + } + ], + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/allof-edges" + }, + "AllOfPointer": { + "title": "AllOfPointer composes a pointer allOf member.", + "allOf": [ + { + "$ref": "#/definitions/SimpleBase" + }, + { + "type": "object", + "properties": { + "extra": { + "type": "string", + "x-go-name": "Extra" + } + } + } + ], + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/allof-edges" + }, + "AllOfStdTime": { + "type": "string", + "title": "AllOfStdTime composes an allOf member that is time.Time.", + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/allof-edges" + }, + "AllOfStrfmt": { + "allOf": [ + { + "type": "string", + "format": "ulid" + }, + { + "type": "object", + "properties": { + "note": { + "type": "string", + "x-go-name": "Note" + } + } + } + ], + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/allof-edges" + }, + "SimpleBase": { + "type": "object", + "title": "SimpleBase is a plain struct used as an allOf member.", + "required": [ + "id" + ], + "properties": { + "id": { + "type": "string", + "x-go-name": "ID" + }, + "name": { + "type": "string", + "x-go-name": "Name" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/allof-edges" + }, + "Tagger": { + "type": "object", + "title": "Tagger is a non-empty named interface used as an allOf member.", + "properties": { + "Tag": { + "description": "Tag returns an identifier.", + "type": "string" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/allof-edges" + }, + "ULID": { + "description": "ULID is a named struct formatted as a swagger strfmt. Using a struct\nunderlying type exercises the strfmt branch of buildNamedAllOf for\nstruct members.", + "type": "object", + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/allof-edges" + } + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/enhancements_defaults_examples.json b/fixtures/integration/golden/enhancements_defaults_examples.json new file mode 100644 index 0000000..c4f31ed --- /dev/null +++ b/fixtures/integration/golden/enhancements_defaults_examples.json @@ -0,0 +1,80 @@ +{ + "swagger": "2.0", + "paths": {}, + "definitions": { + "Metrics": { + "description": "Metrics aggregates fields that carry default and example tags for the\nnumeric, array and object branches of parseValueFromSchema.", + "type": "object", + "properties": { + "counts": { + "description": "Counts is a slice of integers with a JSON-array default.", + "type": "array", + "default": [ + 1, + 2, + 3 + ], + "items": { + "type": "integer", + "format": "int64" + }, + "x-go-name": "Counts", + "example": [ + 4, + 5 + ] + }, + "props": { + "description": "Props is a map represented as a JSON object.", + "type": "object", + "default": { + "k": 1 + }, + "additionalProperties": { + "type": "integer", + "format": "int64" + }, + "x-go-name": "Props", + "example": { + "q": 42, + "r": 7 + } + }, + "ratio": { + "description": "Ratio is a float32 value.", + "type": "number", + "format": "float", + "default": 1.5, + "x-go-name": "Ratio", + "example": 2.25 + }, + "tags": { + "description": "Tags is a slice with a JSON-array default and example.", + "type": "array", + "default": [ + "a", + "b" + ], + "items": { + "type": "string" + }, + "x-go-name": "Tags", + "example": [ + "x", + "y", + "z" + ] + }, + "weight": { + "description": "Weight is a float64 value.", + "type": "number", + "format": "double", + "default": 3.14, + "x-go-name": "Weight", + "example": 9.81 + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/defaults-examples" + } + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/enhancements_embedded_types.json b/fixtures/integration/golden/enhancements_embedded_types.json new file mode 100644 index 0000000..b46d244 --- /dev/null +++ b/fixtures/integration/golden/enhancements_embedded_types.json @@ -0,0 +1,112 @@ +{ + "swagger": "2.0", + "paths": {}, + "definitions": { + "Base": { + "type": "object", + "title": "Base is a plain struct used as the right-hand side of an alias.", + "required": [ + "id" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64", + "x-go-name": "ID" + }, + "name": { + "type": "string", + "x-go-name": "Name" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/embedded-types" + }, + "BaseAlias": { + "description": "BaseAlias aliases Base so that embedding it exercises the alias branch\nof buildEmbedded.", + "type": "object", + "required": [ + "id" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64", + "x-go-name": "ID" + }, + "name": { + "type": "string", + "x-go-name": "Name" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/embedded-types" + }, + "EmbedsAlias": { + "title": "EmbedsAlias embeds an aliased named struct.", + "allOf": [ + { + "$ref": "#/definitions/BaseAlias" + }, + { + "type": "object", + "properties": { + "extra": { + "type": "string", + "x-go-name": "Extra" + } + } + } + ], + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/embedded-types" + }, + "EmbedsEmptyNamedInterface": { + "type": "object", + "title": "EmbedsEmptyNamedInterface embeds a named empty interface.", + "properties": { + "value": { + "type": "string", + "x-go-name": "Value" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/embedded-types" + }, + "EmbedsError": { + "description": "EmbedsError embeds the predeclared error interface, exercising the\nisStdError branch of buildNamedEmbedded.", + "type": "object", + "properties": { + "code": { + "type": "integer", + "format": "int64", + "x-go-name": "Code" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/embedded-types", + "x-go-type": "error" + }, + "EmbedsNamedInterface": { + "type": "object", + "title": "EmbedsNamedInterface embeds a non-empty, non-error named interface.", + "properties": { + "Handle": { + "description": "Handle is a unary method exposed as a schema property.", + "type": "string" + }, + "tag": { + "type": "string", + "x-go-name": "Tag" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/embedded-types" + }, + "Handler": { + "type": "object", + "title": "Handler is a non-empty named interface with a single exported method.", + "properties": { + "Handle": { + "description": "Handle is a unary method exposed as a schema property.", + "type": "string" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/embedded-types" + } + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/enhancements_enum_docs.json b/fixtures/integration/golden/enhancements_enum_docs.json new file mode 100644 index 0000000..c85d25a --- /dev/null +++ b/fixtures/integration/golden/enhancements_enum_docs.json @@ -0,0 +1,42 @@ +{ + "swagger": "2.0", + "paths": {}, + "definitions": { + "Notification": { + "description": "Notification holds both enums so that the scanner emits schemas with\nthe enriched descriptions produced by findEnumValue.", + "type": "object", + "required": [ + "id" + ], + "properties": { + "channel": { + "description": "The delivery channel.\nemail ChannelEmail ChannelSMS ChannelEmail and ChannelSMS share a single spec.\npush ChannelPush ChannelPush is the push notification channel.", + "type": "string", + "enum": [ + "email", + "push" + ], + "x-go-enum-desc": "email ChannelEmail ChannelSMS ChannelEmail and ChannelSMS share a single spec.\npush ChannelPush ChannelPush is the push notification channel.", + "x-go-name": "Channel" + }, + "id": { + "type": "integer", + "format": "int64", + "x-go-name": "ID" + }, + "priority": { + "description": "The priority level.\nlow PriorityLow PriorityLow is a low-priority level.\nmedium PriorityMed PriorityMed is a medium-priority level.\nhigh PriorityHigh PriorityHigh is a high-priority level.", + "type": "string", + "enum": [ + "low", + "medium", + "high" + ], + "x-go-enum-desc": "low PriorityLow PriorityLow is a low-priority level.\nmedium PriorityMed PriorityMed is a medium-priority level.\nhigh PriorityHigh PriorityHigh is a high-priority level.", + "x-go-name": "Priority" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/enum-docs" + } + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/enhancements_input_overlay.json b/fixtures/integration/golden/enhancements_input_overlay.json new file mode 100644 index 0000000..c1bb2d5 --- /dev/null +++ b/fixtures/integration/golden/enhancements_input_overlay.json @@ -0,0 +1,32 @@ +{ + "swagger": "2.0", + "info": { + "title": "Overlay", + "version": "0.0.1" + }, + "paths": { + "/items": { + "get": { + "operationId": "listItems" + }, + "put": { + "operationId": "replaceItem" + }, + "post": { + "operationId": "createItem" + }, + "delete": { + "operationId": "deleteItem" + }, + "options": { + "operationId": "optionsItem" + }, + "head": { + "operationId": "checkItem" + }, + "patch": { + "operationId": "patchItem" + } + } + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/enhancements_interface_methods.json b/fixtures/integration/golden/enhancements_interface_methods.json new file mode 100644 index 0000000..0020f0d --- /dev/null +++ b/fixtures/integration/golden/enhancements_interface_methods.json @@ -0,0 +1,119 @@ +{ + "swagger": "2.0", + "paths": {}, + "definitions": { + "Audited": { + "type": "object", + "properties": { + "CreatedAt": { + "description": "CreatedAt is the creation timestamp.", + "type": "string", + "format": "date-time" + }, + "UpdatedAt": { + "description": "UpdatedAt is the update timestamp.", + "type": "string", + "format": "date-time" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/interface-methods" + }, + "Public": { + "description": "Public exposes just a single scalar so we get a minimal, deterministic\ncompanion to assert the default code path.", + "type": "object", + "properties": { + "Kind": { + "description": "Kind names the public flavor.", + "type": "string" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/interface-methods" + }, + "UserProfile": { + "description": "UserProfile is a read-only view over a user, exposed as a schema via its\nmethod set. Each method exercises a distinct branch of\nprocessInterfaceMethod.", + "allOf": [ + { + "$ref": "#/definitions/Audited" + }, + { + "type": "object", + "required": [ + "ID" + ], + "properties": { + "Bio": { + "description": "Bio is a nullable pointer string.", + "type": "string" + }, + "Email": { + "description": "Email is formatted as an email strfmt.", + "type": "string", + "format": "email" + }, + "ID": { + "description": "ID is the user identifier.", + "type": "integer", + "format": "int64", + "minimum": 1 + }, + "Profile": { + "description": "Profile returns nested structured data.", + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "Tags": { + "description": "Tags returns the user's labels.", + "type": "array", + "items": { + "type": "string" + } + }, + "fullName": { + "description": "Name is re-exposed as \"fullName\" in JSON.", + "type": "string", + "x-go-name": "Name" + } + } + } + ], + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/interface-methods" + }, + "WithAnonEmbed": { + "description": "WithAnonEmbed embeds an anonymous inline interface so that the scanner\nwalks processAnonInterfaceMethod for its methods. This exercises the\nbuildAnonymousInterface call site inside processEmbeddedType.", + "allOf": [ + { + "type": "object", + "properties": { + "ExternalID": { + "description": "ExternalID is tagged as uuid so the anon-method strfmt branch\nis exercised.", + "type": "string", + "format": "uuid" + }, + "Revision": { + "description": "Revision is a nullable pointer return for x-nullable coverage.", + "type": "integer", + "format": "int64" + }, + "audit": { + "description": "AuditTrail is exposed via the anonymous embedded interface.", + "type": "string", + "x-go-name": "AuditTrail" + } + } + }, + { + "type": "object", + "properties": { + "Kind": { + "description": "Kind names the root flavor.", + "type": "string" + } + } + } + ], + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/interface-methods" + } + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/enhancements_interface_methods_xnullable.json b/fixtures/integration/golden/enhancements_interface_methods_xnullable.json new file mode 100644 index 0000000..a72e999 --- /dev/null +++ b/fixtures/integration/golden/enhancements_interface_methods_xnullable.json @@ -0,0 +1,121 @@ +{ + "swagger": "2.0", + "paths": {}, + "definitions": { + "Audited": { + "type": "object", + "properties": { + "CreatedAt": { + "description": "CreatedAt is the creation timestamp.", + "type": "string", + "format": "date-time" + }, + "UpdatedAt": { + "description": "UpdatedAt is the update timestamp.", + "type": "string", + "format": "date-time" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/interface-methods" + }, + "Public": { + "description": "Public exposes just a single scalar so we get a minimal, deterministic\ncompanion to assert the default code path.", + "type": "object", + "properties": { + "Kind": { + "description": "Kind names the public flavor.", + "type": "string" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/interface-methods" + }, + "UserProfile": { + "description": "UserProfile is a read-only view over a user, exposed as a schema via its\nmethod set. Each method exercises a distinct branch of\nprocessInterfaceMethod.", + "allOf": [ + { + "$ref": "#/definitions/Audited" + }, + { + "type": "object", + "required": [ + "ID" + ], + "properties": { + "Bio": { + "description": "Bio is a nullable pointer string.", + "type": "string", + "x-nullable": true + }, + "Email": { + "description": "Email is formatted as an email strfmt.", + "type": "string", + "format": "email" + }, + "ID": { + "description": "ID is the user identifier.", + "type": "integer", + "format": "int64", + "minimum": 1 + }, + "Profile": { + "description": "Profile returns nested structured data.", + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "Tags": { + "description": "Tags returns the user's labels.", + "type": "array", + "items": { + "type": "string" + } + }, + "fullName": { + "description": "Name is re-exposed as \"fullName\" in JSON.", + "type": "string", + "x-go-name": "Name" + } + } + } + ], + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/interface-methods" + }, + "WithAnonEmbed": { + "description": "WithAnonEmbed embeds an anonymous inline interface so that the scanner\nwalks processAnonInterfaceMethod for its methods. This exercises the\nbuildAnonymousInterface call site inside processEmbeddedType.", + "allOf": [ + { + "type": "object", + "properties": { + "ExternalID": { + "description": "ExternalID is tagged as uuid so the anon-method strfmt branch\nis exercised.", + "type": "string", + "format": "uuid" + }, + "Revision": { + "description": "Revision is a nullable pointer return for x-nullable coverage.", + "type": "integer", + "format": "int64", + "x-nullable": true + }, + "audit": { + "description": "AuditTrail is exposed via the anonymous embedded interface.", + "type": "string", + "x-go-name": "AuditTrail" + } + } + }, + { + "type": "object", + "properties": { + "Kind": { + "description": "Kind names the root flavor.", + "type": "string" + } + } + } + ], + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/interface-methods" + } + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/enhancements_named_basic.json b/fixtures/integration/golden/enhancements_named_basic.json new file mode 100644 index 0000000..abe51bf --- /dev/null +++ b/fixtures/integration/golden/enhancements_named_basic.json @@ -0,0 +1,33 @@ +{ + "swagger": "2.0", + "paths": {}, + "definitions": { + "User": { + "description": "User embeds the three named basic types above so that the full scan\nwalks buildNamedBasic for each field.", + "type": "object", + "required": [ + "id" + ], + "properties": { + "colour": { + "type": "string", + "x-go-name": "Colour" + }, + "email": { + "type": "string", + "format": "email", + "x-go-name": "Email" + }, + "grade": { + "x-go-name": "Grade" + }, + "id": { + "type": "integer", + "format": "int64", + "x-go-name": "ID" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/named-basic" + } + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/enhancements_named_struct_tags.json b/fixtures/integration/golden/enhancements_named_struct_tags.json new file mode 100644 index 0000000..187cb2f --- /dev/null +++ b/fixtures/integration/golden/enhancements_named_struct_tags.json @@ -0,0 +1,43 @@ +{ + "swagger": "2.0", + "paths": {}, + "definitions": { + "Contact": { + "description": "Contact references both tagged struct types so the scanner walks the\nbuildNamedStruct strfmt and typeName branches on distinct fields.", + "type": "object", + "required": [ + "id" + ], + "properties": { + "code": { + "type": "string", + "x-go-name": "Code" + }, + "id": { + "type": "integer", + "format": "int64", + "x-go-name": "ID" + }, + "phone": { + "type": "string", + "format": "phone", + "x-go-name": "Phone" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/named-struct-tags" + }, + "PhoneNumber": { + "description": "PhoneNumber is a named struct annotated as a strfmt. When used as a\nfield type the scanner emits {type: \"string\", format: \"phone\"} via the\nstrfmt branch of buildNamedStruct.", + "type": "object", + "properties": { + "CountryCode": { + "type": "string" + }, + "Number": { + "type": "string" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/named-struct-tags" + } + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/enhancements_pointers_no_xnullable.json b/fixtures/integration/golden/enhancements_pointers_no_xnullable.json new file mode 100644 index 0000000..1862437 --- /dev/null +++ b/fixtures/integration/golden/enhancements_pointers_no_xnullable.json @@ -0,0 +1,60 @@ +{ + "Item": { + "type": "object", + "properties": { + "Value1": { + "type": "integer", + "format": "int64" + }, + "Value2": { + "type": "integer", + "format": "int64" + }, + "Value3": { + "type": "integer", + "format": "int64", + "x-nullable": false + }, + "Value4": { + "type": "integer", + "format": "int64", + "x-isnullable": false + }, + "Value5": { + "type": "integer", + "format": "int64" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/pointers-nullable-by-default" + }, + "ItemInterface": { + "type": "object", + "properties": { + "Value1": { + "type": "integer", + "format": "int64" + }, + "Value2": { + "type": "integer", + "format": "int64" + }, + "Value3": { + "description": "Value3 is a nullable value", + "type": "integer", + "format": "int64", + "x-nullable": false + }, + "Value4": { + "description": "Value4 is a non-nullable value", + "type": "integer", + "format": "int64", + "x-isnullable": false + }, + "Value5": { + "type": "integer", + "format": "int64" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/pointers-nullable-by-default" + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/enhancements_pointers_xnullable.json b/fixtures/integration/golden/enhancements_pointers_xnullable.json new file mode 100644 index 0000000..d27b827 --- /dev/null +++ b/fixtures/integration/golden/enhancements_pointers_xnullable.json @@ -0,0 +1,62 @@ +{ + "Item": { + "type": "object", + "properties": { + "Value1": { + "type": "integer", + "format": "int64", + "x-nullable": true + }, + "Value2": { + "type": "integer", + "format": "int64" + }, + "Value3": { + "type": "integer", + "format": "int64", + "x-nullable": false + }, + "Value4": { + "type": "integer", + "format": "int64", + "x-isnullable": false + }, + "Value5": { + "type": "integer", + "format": "int64" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/pointers-nullable-by-default" + }, + "ItemInterface": { + "type": "object", + "properties": { + "Value1": { + "type": "integer", + "format": "int64", + "x-nullable": true + }, + "Value2": { + "type": "integer", + "format": "int64" + }, + "Value3": { + "description": "Value3 is a nullable value", + "type": "integer", + "format": "int64", + "x-nullable": false + }, + "Value4": { + "description": "Value4 is a non-nullable value", + "type": "integer", + "format": "int64", + "x-isnullable": false + }, + "Value5": { + "type": "integer", + "format": "int64" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/pointers-nullable-by-default" + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/enhancements_ref_alias_chain.json b/fixtures/integration/golden/enhancements_ref_alias_chain.json new file mode 100644 index 0000000..e1b820d --- /dev/null +++ b/fixtures/integration/golden/enhancements_ref_alias_chain.json @@ -0,0 +1,68 @@ +{ + "swagger": "2.0", + "paths": {}, + "definitions": { + "BaseBody": { + "type": "object", + "title": "BaseBody is the concrete named struct at the bottom of the alias chain.", + "required": [ + "id" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64", + "x-go-name": "ID" + }, + "name": { + "type": "string", + "x-go-name": "Name" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/ref-alias-chain" + }, + "Envelope": { + "description": "Envelope references the aliases via its fields so the scanner also\nwalks the schemaBuilder.buildAlias path for each chain member.", + "type": "object", + "properties": { + "createdAt": { + "type": "string", + "x-go-name": "CreatedAt" + }, + "first": { + "$ref": "#/definitions/LinkA" + }, + "meta": { + "$ref": "#/definitions/Wildcard" + }, + "second": { + "$ref": "#/definitions/LinkB" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/ref-alias-chain" + }, + "LinkA": { + "title": "LinkA is a direct alias of BaseBody.", + "$ref": "#/definitions/BaseBody" + }, + "LinkB": { + "title": "LinkB is an alias of an alias — chain depth two.", + "$ref": "#/definitions/LinkA" + }, + "Time": { + "description": "Programs using times should typically store and pass them as values,\nnot pointers. That is, time variables and struct fields should be of\ntype [time.Time], not *time.Time.\n\nA Time value can be used by multiple goroutines simultaneously except\nthat the methods [Time.GobDecode], [Time.UnmarshalBinary], [Time.UnmarshalJSON] and\n[Time.UnmarshalText] are not concurrency-safe.\n\nTime instants can be compared using the [Time.Before], [Time.After], and [Time.Equal] methods.\nThe [Time.Sub] method subtracts two instants, producing a [Duration].\nThe [Time.Add] method adds a Time and a Duration, producing a Time.\n\nThe zero value of type Time is January 1, year 1, 00:00:00.000000000 UTC.\nAs this time is unlikely to come up in practice, the [Time.IsZero] method gives\na simple way of detecting a time that has not been initialized explicitly.\n\nEach time has an associated [Location]. The methods [Time.Local], [Time.UTC], and Time.In return a\nTime with a specific Location. Changing the Location of a Time value with\nthese methods does not change the actual instant it represents, only the time\nzone in which to interpret it.\n\nRepresentations of a Time value saved by the [Time.GobEncode], [Time.MarshalBinary], [Time.AppendBinary],\n[Time.MarshalJSON], [Time.MarshalText] and [Time.AppendText] methods store the [Time.Location]'s offset,\nbut not the location name. They therefore lose information about Daylight Saving Time.\n\nIn addition to the required “wall clock” reading, a Time may contain an optional\nreading of the current process's monotonic clock, to provide additional precision\nfor comparison or subtraction.\nSee the “Monotonic Clocks” section in the package documentation for details.\n\nNote that the Go == operator compares not just the time instant but also the\nLocation and the monotonic clock reading. Therefore, Time values should not\nbe used as map or database keys without first guaranteeing that the\nidentical Location has been set for all values, which can be achieved\nthrough use of the UTC or Local method, and that the monotonic clock reading\nhas been stripped by setting t = t.Round(0). In general, prefer t.Equal(u)\nto t == u, since t.Equal uses the most accurate comparison available and\ncorrectly handles the case when only one of its arguments has a monotonic\nclock reading.", + "type": "string", + "format": "date-time", + "title": "A Time represents an instant in time with nanosecond precision.", + "x-go-package": "time" + }, + "Timestamp": { + "description": "Timestamp aliases time.Time so that buildDeclAlias takes its isStdTime\nbranch.", + "$ref": "#/definitions/Time" + }, + "Wildcard": { + "title": "Wildcard aliases any so that buildDeclAlias takes its isAny branch.", + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/ref-alias-chain" + } + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/enhancements_response_edges.json b/fixtures/integration/golden/enhancements_response_edges.json new file mode 100644 index 0000000..fdaccea --- /dev/null +++ b/fixtures/integration/golden/enhancements_response_edges.json @@ -0,0 +1,60 @@ +{ + "swagger": "2.0", + "paths": {}, + "definitions": { + "Body": { + "type": "object", + "title": "Body is the canonical body for the full response.", + "required": [ + "id" + ], + "properties": { + "id": { + "type": "integer", + "format": "int64", + "x-go-name": "ID" + }, + "payload": { + "type": "string", + "x-go-name": "Payload" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/response-edges" + } + }, + "responses": { + "fullResponse": { + "description": "FullResponse carries headers (embedded + inline) and a body field so\nthat buildFromStruct, processResponseField and buildNamedField are all\nexercised in a single scan.", + "schema": { + "$ref": "#/definitions/Body" + }, + "headers": { + "X-Rate-Limit": { + "type": "integer", + "format": "int64", + "description": "The request rate-limit window.\n\nin: header" + }, + "X-Timestamp": { + "type": "string", + "format": "date-time", + "description": "The server-side timestamp for this response.\n\nin: header" + }, + "X-Trace-ID": { + "type": "string", + "format": "uuid", + "description": "The request trace identifier.\n\nin: header" + } + } + }, + "idsResponse": { + "description": "IDs is a named slice so that responseBuilder.buildNamedType walks its\nnon-struct default branch.", + "schema": { + "type": "array", + "items": { + "type": "integer", + "format": "int64" + } + } + } + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/enhancements_strfmt_arrays.json b/fixtures/integration/golden/enhancements_strfmt_arrays.json new file mode 100644 index 0000000..6be1622 --- /dev/null +++ b/fixtures/integration/golden/enhancements_strfmt_arrays.json @@ -0,0 +1,47 @@ +{ + "swagger": "2.0", + "paths": {}, + "definitions": { + "Carrier": { + "type": "object", + "title": "Carrier embeds all of the named array and slice types above.", + "required": [ + "hash" + ], + "properties": { + "blob": { + "type": "string", + "format": "byte", + "x-go-name": "Blob" + }, + "hash": { + "type": "string", + "format": "byte", + "x-go-name": "Hash" + }, + "objectId": { + "type": "string", + "format": "bsonobjectid", + "x-go-name": "ObjectID" + }, + "signature": { + "type": "array", + "items": { + "type": "string", + "format": "password" + }, + "x-go-name": "Signature" + }, + "token": { + "type": "array", + "items": { + "type": "string", + "format": "uuid" + }, + "x-go-name": "Token" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/strfmt-arrays" + } + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/enhancements_text_marshal.json b/fixtures/integration/golden/enhancements_text_marshal.json new file mode 100644 index 0000000..3e483bd --- /dev/null +++ b/fixtures/integration/golden/enhancements_text_marshal.json @@ -0,0 +1,31 @@ +{ + "swagger": "2.0", + "paths": {}, + "definitions": { + "Device": { + "description": "Device aggregates all three text-marshalable fields so that a full scan\nwalks buildFromTextMarshal for each.", + "type": "object", + "required": [ + "id" + ], + "properties": { + "data": { + "type": "string", + "x-go-name": "Data", + "x-go-type": "github.com/go-openapi/codescan/fixtures/enhancements/text-marshal.Opaque" + }, + "id": { + "type": "string", + "format": "uuid", + "x-go-name": "ID" + }, + "mac": { + "type": "string", + "format": "so", + "x-go-name": "MAC" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/text-marshal" + } + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/enhancements_top_level_kinds.json b/fixtures/integration/golden/enhancements_top_level_kinds.json new file mode 100644 index 0000000..93f1d2a --- /dev/null +++ b/fixtures/integration/golden/enhancements_top_level_kinds.json @@ -0,0 +1,59 @@ +{ + "swagger": "2.0", + "paths": {}, + "definitions": { + "IgnoredModel": { + "type": "object", + "properties": { + "value": { + "type": "integer", + "format": "int64", + "x-go-name": "Value" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/top-level-kinds" + }, + "MyArray": { + "type": "array", + "title": "MyArray is a top-level named array.", + "items": { + "type": "string" + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/top-level-kinds" + }, + "MyInt": { + "type": "integer", + "format": "int64", + "title": "MyInt is a top-level named basic integer type.", + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/top-level-kinds" + }, + "MyInterface": { + "type": "object", + "title": "MyInterface is a top-level named interface.", + "properties": { + "Identify": { + "description": "Identify returns the name of this object.", + "type": "string" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/top-level-kinds" + }, + "MyMap": { + "type": "object", + "title": "MyMap is a top-level named map.", + "additionalProperties": { + "type": "integer", + "format": "int64" + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/top-level-kinds" + }, + "MySlice": { + "type": "array", + "title": "MySlice is a top-level named slice.", + "items": { + "type": "string" + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/top-level-kinds" + } + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/go118_params_issue2011.json b/fixtures/integration/golden/go118_params_issue2011.json new file mode 100644 index 0000000..3b0d71c --- /dev/null +++ b/fixtures/integration/golden/go118_params_issue2011.json @@ -0,0 +1,13 @@ +{ + "putNumPlate": { + "operationId": "putNumPlate", + "parameters": [ + { + "x-go-name": "NumPlates", + "name": "num_plates", + "in": "body", + "schema": {} + } + ] + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/go118_responses_issue2011.json b/fixtures/integration/golden/go118_responses_issue2011.json new file mode 100644 index 0000000..ba6aae8 --- /dev/null +++ b/fixtures/integration/golden/go118_responses_issue2011.json @@ -0,0 +1,6 @@ +{ + "NumPlatesResp": { + "description": "", + "schema": {} + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/go118_schema_Interfaced.json b/fixtures/integration/golden/go118_schema_Interfaced.json new file mode 100644 index 0000000..421c766 --- /dev/null +++ b/fixtures/integration/golden/go118_schema_Interfaced.json @@ -0,0 +1,12 @@ +{ + "Interfaced": { + "description": "An Interfaced struct contains objects with interface definitions", + "type": "object", + "properties": { + "custom_data": { + "x-go-name": "CustomData" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go118" + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/go118_schema_NamedWithType.json b/fixtures/integration/golden/go118_schema_NamedWithType.json new file mode 100644 index 0000000..b906e85 --- /dev/null +++ b/fixtures/integration/golden/go118_schema_NamedWithType.json @@ -0,0 +1,13 @@ +{ + "namedWithType": { + "type": "object", + "properties": { + "some_map": { + "type": "object", + "x-go-name": "SomeMap" + } + }, + "x-go-name": "NamedWithType", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go118" + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/go118_schema_aliased.json b/fixtures/integration/golden/go118_schema_aliased.json new file mode 100644 index 0000000..5858fe2 --- /dev/null +++ b/fixtures/integration/golden/go118_schema_aliased.json @@ -0,0 +1,8 @@ +{ + "SomeObject": { + "description": "SomeObject is a type that refines an untyped map", + "type": "object", + "additionalProperties": {}, + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go118" + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/go118_schema_transportErr.json b/fixtures/integration/golden/go118_schema_transportErr.json new file mode 100644 index 0000000..c075a3f --- /dev/null +++ b/fixtures/integration/golden/go118_schema_transportErr.json @@ -0,0 +1,20 @@ +{ + "transportErr": { + "type": "object", + "required": [ + "message" + ], + "properties": { + "data": { + "description": "Data is additional data about the error.", + "x-go-name": "Data" + }, + "message": { + "description": "Message is a human-readable description of the error.", + "type": "string", + "x-go-name": "Message" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go118" + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/go119_operations.json b/fixtures/integration/golden/go119_operations.json new file mode 100644 index 0000000..9da4262 --- /dev/null +++ b/fixtures/integration/golden/go119_operations.json @@ -0,0 +1,58 @@ +{ + "/api/v1/somefunc": { + "post": { + "description": "Do something", + "operationId": "someFunc", + "responses": { + "200": { + "description": "Some func", + "examples": { + "application/json": { + "key": "value" + } + } + }, + "400": { + "$ref": "#/responses/ErrorResponse" + }, + "503": { + "$ref": "#/responses/ErrorResponse" + } + }, + "x-codeSamples": [ + { + "lang": "curl", + "source": "curl -u \"${LOGIN}:${PASSWORD}\" -d '{\"key\": \"value\"}' -X POST \"https://{host}/api/v1/somefunc\"\ncurl -u \"${LOGIN}:${PASSWORD}\" -d '{\"key2\": \"value2\"}' -X POST \"https://{host}/api/v1/somefunc\"\n" + } + ] + } + }, + "/api/v1/somefuncTabs": { + "post": { + "description": "Do something", + "operationId": "someFuncTabs", + "responses": { + "200": { + "description": "Some func", + "examples": { + "application/json": { + "key": "value" + } + } + }, + "400": { + "$ref": "#/responses/ErrorResponse" + }, + "503": { + "$ref": "#/responses/ErrorResponse" + } + }, + "x-codeSamples": [ + { + "lang": "curl", + "source": "curl -u \"${LOGIN}:${PASSWORD}\" -d '{\"key\": \"value\"}' -X POST \"https://{host}/api/v1/somefunc\"\ncurl -u \"${LOGIN}:${PASSWORD}\" -d '{\"key2\": \"value2\"}' -X POST \"https://{host}/api/v1/somefunc\"\n" + } + ] + } + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/go123_aliased_spec.json b/fixtures/integration/golden/go123_aliased_spec.json new file mode 100644 index 0000000..aebd157 --- /dev/null +++ b/fixtures/integration/golden/go123_aliased_spec.json @@ -0,0 +1,318 @@ +{ + "swagger": "2.0", + "paths": {}, + "definitions": { + "Anything": { + "title": "Anything should be discovered through dependency analysis.", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go123/aliased/schema" + }, + "Empty": { + "type": "object", + "title": "Empty should be discovered through dependency analysis.", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go123/aliased/schema" + }, + "ExtendedID": { + "title": "ExtendedID should be discovered through dependency analysis.", + "allOf": [ + { + "$ref": "#/definitions/Empty" + }, + { + "type": "object", + "properties": { + "EvenMore": {}, + "StillMore": {}, + "more": { + "type": "string", + "x-go-name": "More" + } + } + } + ], + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go123/aliased/schema" + }, + "ShouldSee": { + "type": "boolean", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go123/aliased/schema" + }, + "UUID": { + "type": "integer", + "format": "int64", + "title": "UUID should be discovered through dependency analysis.", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go123/aliased/schema" + }, + "aliased_id": { + "$ref": "#/definitions/ExtendedID" + }, + "anonymous_iface": { + "type": "object", + "properties": { + "String": { + "type": "string" + } + }, + "x-go-name": "AnonymousIface", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go123/aliased/schema" + }, + "anonymous_iface_alias": { + "$ref": "#/definitions/anonymous_iface" + }, + "anonymous_struct": { + "type": "object", + "properties": { + "A": { + "type": "object", + "properties": { + "B": { + "type": "integer", + "format": "int64" + } + } + } + }, + "x-go-name": "AnonymousStruct", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go123/aliased/schema" + }, + "embedded_with_alias": { + "allOf": [ + { + "$ref": "#/definitions/Anything" + }, + { + "$ref": "#/definitions/UUID" + }, + { + "type": "object", + "properties": { + "EvenMore": {}, + "StillMore": {}, + "more": { + "type": "string", + "x-go-name": "More" + } + } + } + ], + "x-go-name": "EmbeddedWithAlias", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go123/aliased/schema" + }, + "empty_redefinition": { + "type": "object", + "x-go-name": "EmptyRedefinition", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go123/aliased/schema" + }, + "iface": { + "type": "object", + "properties": { + "Get": { + "type": "string" + } + }, + "x-go-name": "Iface", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go123/aliased/schema" + }, + "iface_alias": { + "$ref": "#/definitions/iface" + }, + "iface_embedded": { + "allOf": [ + { + "type": "object", + "properties": { + "Get": { + "type": "string" + } + } + }, + { + "type": "object", + "properties": { + "Dump": { + "type": "array", + "items": { + "type": "integer", + "format": "uint8" + } + } + } + } + ], + "x-go-name": "IfaceEmbedded", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go123/aliased/schema" + }, + "iface_embedded_anonymous": { + "allOf": [ + { + "type": "object", + "properties": { + "String": { + "type": "string" + } + } + }, + { + "type": "object", + "properties": { + "Error": { + "type": "string" + } + } + } + ], + "x-go-name": "IfaceEmbeddedAnonymous", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go123/aliased/schema" + }, + "iface_embedded_empty": { + "x-go-name": "IfaceEmpty", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go123/aliased/schema" + }, + "iface_embedded_with_alias": { + "allOf": [ + { + "$ref": "#/definitions/iface_alias" + }, + { + "type": "object", + "properties": { + "String": { + "type": "string" + } + } + }, + { + "type": "object", + "properties": { + "Dump": { + "type": "array", + "items": { + "type": "integer", + "format": "uint8" + } + } + } + } + ], + "x-go-name": "IfaceEmbeddedWithAlias", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go123/aliased/schema" + }, + "iface_redefinition": { + "$ref": "#/definitions/iface" + }, + "order": { + "description": "An order can either be created, processed or completed.", + "type": "object", + "title": "StoreOrder represents an order in this application.", + "required": [ + "id", + "userId", + "category" + ], + "properties": { + "DeliveryOption": { + "$ref": "#/definitions/Anything" + }, + "Extras": {}, + "MoreExtras": {}, + "category": { + "description": "the category of this user", + "type": "string", + "default": "bar", + "enum": [ + "foo", + "bar", + "none" + ], + "x-go-name": "Category" + }, + "extended_id": { + "$ref": "#/definitions/ExtendedID" + }, + "id": { + "$ref": "#/definitions/UUID" + }, + "items": { + "description": "the items for this order", + "type": "array", + "items": { + "type": "object", + "properties": { + "extra_options": { + "x-go-name": "ExtraOptions" + }, + "id": { + "type": "integer", + "format": "int32", + "x-go-name": "ID" + }, + "quantity": { + "type": "integer", + "format": "int16", + "x-go-name": "Quantity" + } + } + }, + "x-go-name": "Items" + }, + "userId": { + "description": "the name for this user", + "type": "integer", + "format": "int64", + "minLength": 3, + "x-go-name": "UserID" + } + }, + "x-go-name": "StoreOrder", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go123/aliased/schema" + }, + "slice_alias": { + "type": "array", + "items": {}, + "x-go-name": "SliceAlias", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go123/aliased/schema" + }, + "slice_of_structs": { + "type": "array", + "items": { + "type": "object" + }, + "x-go-name": "SliceOfStructs", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go123/aliased/schema" + }, + "slice_of_structs_alias": { + "type": "array", + "items": { + "type": "object" + }, + "x-go-name": "SliceOfStructsAlias", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go123/aliased/schema" + }, + "slice_to_slice": { + "$ref": "#/definitions/slice_type" + }, + "slice_type": { + "type": "array", + "items": {}, + "x-go-name": "Slice", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go123/aliased/schema" + }, + "void": { + "$ref": "#/definitions/Empty" + }, + "whatnot": { + "x-go-name": "WhatNot", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go123/aliased/schema" + }, + "whatnot2": { + "x-go-name": "WhatNot2", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go123/aliased/schema" + }, + "whatnot2_alias": { + "x-go-name": "WhatNot2Alias", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go123/aliased/schema" + }, + "whatnot_alias": { + "x-go-name": "WhatNotAlias", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go123/aliased/schema" + } + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/go123_special_spec.json b/fixtures/integration/golden/go123_special_spec.json new file mode 100644 index 0000000..6a50e41 --- /dev/null +++ b/fixtures/integration/golden/go123_special_spec.json @@ -0,0 +1,297 @@ +{ + "swagger": "2.0", + "paths": {}, + "definitions": { + "ChanDir": { + "type": "integer", + "format": "int64", + "title": "ChanDir represents a channel type's direction.", + "x-go-package": "reflect" + }, + "Duration": { + "description": "A Duration represents the elapsed time between two instants\nas an int64 nanosecond count. The representation limits the\nlargest representable duration to approximately 290 years.", + "type": "integer", + "format": "int64", + "x-go-package": "time" + }, + "GoStruct": { + "type": "object", + "properties": { + "A": { + "type": "number", + "format": "float" + } + }, + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go123/special" + }, + "Kind": { + "description": "The zero Kind is not a valid kind.", + "type": "integer", + "format": "uint64", + "title": "A Kind represents the specific kind of type that a [Type] represents.", + "x-go-package": "reflect" + }, + "Type": { + "description": "Not all methods apply to all kinds of types. Restrictions,\nif any, are noted in the documentation for each method.\nUse the Kind method to find out the kind of type before\ncalling kind-specific methods. Calling a method\ninappropriate to the kind of type causes a run-time panic.\n\nType values are comparable, such as with the == operator,\nso they can be used as map keys.\nTwo Type values are equal if they represent identical types.", + "type": "object", + "title": "Type is the representation of a Go type.", + "properties": { + "Align": { + "description": "Align returns the alignment in bytes of a value of\nthis type when allocated in memory.", + "type": "integer", + "format": "int64" + }, + "Bits": { + "description": "Bits returns the size of the type in bits.\nIt panics if the type's Kind is not one of the\nsized or unsized Int, Uint, Float, or Complex kinds.", + "type": "integer", + "format": "int64" + }, + "CanSeq": { + "description": "CanSeq reports whether a [Value] with this type can be iterated over using [Value.Seq].", + "type": "boolean" + }, + "CanSeq2": { + "description": "CanSeq2 reports whether a [Value] with this type can be iterated over using [Value.Seq2].", + "type": "boolean" + }, + "ChanDir": { + "$ref": "#/definitions/ChanDir" + }, + "Comparable": { + "description": "Comparable reports whether values of this type are comparable.\nEven if Comparable returns true, the comparison may still panic.\nFor example, values of interface type are comparable,\nbut the comparison will panic if their dynamic type is not comparable.", + "type": "boolean" + }, + "Elem": { + "$ref": "#/definitions/Type" + }, + "FieldAlign": { + "description": "FieldAlign returns the alignment in bytes of a value of\nthis type when used as a field in a struct.", + "type": "integer", + "format": "int64" + }, + "Fields": { + "description": "Fields returns an iterator over each struct field for struct type t. The sequence is\nequivalent to calling Field successively for each index i in the range [0, NumField()).\nIt panics if the type's Kind is not Struct." + }, + "Ins": { + "description": "Ins returns an iterator over each input parameter of function type t. The sequence\nis equivalent to calling In successively for each index i in the range [0, NumIn()).\nIt panics if the type's Kind is not Func." + }, + "IsVariadic": { + "description": "IsVariadic reports whether a function type's final input parameter\nis a \"...\" parameter. If so, t.In(t.NumIn() - 1) returns the parameter's\nimplicit actual type []T.\n\nFor concreteness, if t represents func(x int, y ... float64), then\n\nt.NumIn() == 2\nt.In(0) is the reflect.Type for \"int\"\nt.In(1) is the reflect.Type for \"[]float64\"\nt.IsVariadic() == true\n\nIsVariadic panics if the type's Kind is not Func.", + "type": "boolean" + }, + "Key": { + "$ref": "#/definitions/Type" + }, + "Kind": { + "$ref": "#/definitions/Kind" + }, + "Len": { + "description": "Len returns an array type's length.\nIt panics if the type's Kind is not Array.", + "type": "integer", + "format": "int64" + }, + "Methods": { + "description": "Methods returns an iterator over each method in the type's method set. The sequence is\nequivalent to calling Method successively for each index i in the range [0, NumMethod())." + }, + "Name": { + "description": "Name returns the type's name within its package for a defined type.\nFor other (non-defined) types it returns the empty string.", + "type": "string" + }, + "NumField": { + "description": "NumField returns a struct type's field count.\nIt panics if the type's Kind is not Struct.", + "type": "integer", + "format": "int64" + }, + "NumIn": { + "description": "NumIn returns a function type's input parameter count.\nIt panics if the type's Kind is not Func.", + "type": "integer", + "format": "int64" + }, + "NumMethod": { + "description": "NumMethod returns the number of methods accessible using Method.\n\nFor a non-interface type, it returns the number of exported methods.\n\nFor an interface type, it returns the number of exported and unexported methods.", + "type": "integer", + "format": "int64" + }, + "NumOut": { + "description": "NumOut returns a function type's output parameter count.\nIt panics if the type's Kind is not Func.", + "type": "integer", + "format": "int64" + }, + "Outs": { + "description": "Outs returns an iterator over each output parameter of function type t. The sequence\nis equivalent to calling Out successively for each index i in the range [0, NumOut()).\nIt panics if the type's Kind is not Func." + }, + "PkgPath": { + "description": "PkgPath returns a defined type's package path, that is, the import path\nthat uniquely identifies the package, such as \"encoding/base64\".\nIf the type was predeclared (string, error) or not defined (*T, struct{},\n[]int, or A where A is an alias for a non-defined type), the package path\nwill be the empty string.", + "type": "string" + }, + "Size": { + "description": "Size returns the number of bytes needed to store\na value of the given type; it is analogous to unsafe.Sizeof.", + "type": "integer", + "format": "uint64" + }, + "String": { + "description": "String returns a string representation of the type.\nThe string representation may use shortened package names\n(e.g., base64 instead of \"encoding/base64\") and is not\nguaranteed to be unique among types. To test for type identity,\ncompare the Types directly.", + "type": "string" + } + }, + "x-go-package": "reflect" + }, + "Value": { + "description": "Not all methods apply to all kinds of values. Restrictions,\nif any, are noted in the documentation for each method.\nUse the Kind method to find out the kind of value before\ncalling kind-specific methods. Calling a method\ninappropriate to the kind of type causes a run time panic.\n\nThe zero Value represents no value.\nIts [Value.IsValid] method returns false, its Kind method returns [Invalid],\nits String method returns \"\u003cinvalid Value\u003e\", and all other methods panic.\nMost functions and methods never return an invalid value.\nIf one does, its documentation states the conditions explicitly.\n\nA Value can be used concurrently by multiple goroutines provided that\nthe underlying Go value can be used concurrently for the equivalent\ndirect operations.\n\nTo compare two Values, compare the results of the Interface method.\nUsing == on two Values does not compare the underlying values\nthey represent.", + "type": "object", + "title": "Value is the reflection interface to a Go value.", + "x-go-package": "reflect" + }, + "generic_constraint": { + "allOf": [ + { + "type": "object", + "properties": { + "Uint": { + "type": "integer", + "format": "uint16" + } + } + } + ], + "x-go-name": "Constraint", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go123/special" + }, + "generic_indirect": { + "$ref": "#/definitions/generic_map_alias" + }, + "generic_map": { + "x-go-name": "GenericMap", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go123/special" + }, + "generic_map_alias": { + "x-go-name": "GenericMapAlias", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go123/special" + }, + "generic_slice": { + "type": "array", + "items": {}, + "x-go-name": "GenericSlice", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go123/special" + }, + "go_array": { + "type": "array", + "items": { + "type": "integer", + "format": "int64" + }, + "x-go-name": "GoArray", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go123/special" + }, + "go_error": { + "type": "string", + "x-go-name": "Error", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go123/special", + "x-go-type": "error" + }, + "go_map": { + "type": "object", + "additionalProperties": { + "type": "integer", + "format": "uint16" + }, + "x-go-name": "GoMap", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go123/special" + }, + "index_map": { + "x-go-name": "UnsupportedMap", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go123/special" + }, + "numerical_constraint": { + "x-go-name": "Numerical", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go123/special" + }, + "primitive": { + "type": "string", + "x-go-name": "Primitive", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go123/special" + }, + "special_types": { + "type": "object", + "properties": { + "Duration": { + "$ref": "#/definitions/Duration" + }, + "Err": { + "type": "string", + "x-go-type": "error" + }, + "Error": { + "type": "string", + "x-go-type": "error" + }, + "FormatDate": { + "type": "string", + "format": "date" + }, + "FormatTime": { + "type": "string", + "format": "date-time" + }, + "FormatUUID": { + "type": "string", + "format": "uuid" + }, + "Map": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/GoStruct" + } + }, + "Marshaler": { + "type": "string", + "x-go-type": "github.com/go-openapi/codescan/fixtures/goparsing/go123/special.IsATextMarshaler" + }, + "Message": { + "type": "object" + }, + "NamedArray": { + "$ref": "#/definitions/go_array" + }, + "PtrFormatUUID": { + "type": "string", + "format": "uuid" + }, + "PtrStruct": { + "$ref": "#/definitions/GoStruct" + }, + "ShouldAlsoBeStringTime": { + "type": "string", + "format": "date-time" + }, + "ShouldBeStringTime": { + "type": "string", + "format": "date-time" + }, + "WhatNot": { + "description": "and what not", + "type": "object" + } + }, + "x-go-name": "SpecialTypes", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go123/special" + }, + "unexported": { + "type": "object", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go123/special" + }, + "union_alias": { + "$ref": "#/definitions/numerical_constraint" + }, + "unsafe_pointer_alias": { + "x-go-name": "Unsafe", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go123/special" + }, + "upointer_alias": { + "type": "integer", + "format": "uint64", + "x-go-name": "UIntPtr", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go123/special" + } + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/petstore_schema_Order.json b/fixtures/integration/golden/petstore_schema_Order.json new file mode 100644 index 0000000..9f03b9b --- /dev/null +++ b/fixtures/integration/golden/petstore_schema_Order.json @@ -0,0 +1,61 @@ +{ + "order": { + "type": "object", + "title": "An Order for one or more pets by a user.", + "required": [ + "id", + "userId", + "orderedAt" + ], + "properties": { + "id": { + "description": "the ID of the order", + "type": "integer", + "format": "int64", + "x-go-name": "ID" + }, + "items": { + "description": "the items for this order", + "type": "array", + "minItems": 1, + "items": { + "type": "object", + "required": [ + "petId", + "qty" + ], + "properties": { + "petId": { + "description": "the id of the pet to order", + "type": "integer", + "format": "int64", + "x-go-name": "PetID" + }, + "qty": { + "description": "the quantity of this pet to order", + "type": "integer", + "format": "int32", + "minimum": 1, + "x-go-name": "Quantity" + } + } + }, + "x-go-name": "Items" + }, + "orderedAt": { + "description": "the time at which this order was made.", + "type": "string", + "format": "date-time", + "x-go-name": "OrderedAt" + }, + "userId": { + "description": "the id of the user who placed the order.", + "type": "integer", + "format": "int64", + "x-go-name": "UserID" + } + }, + "x-go-name": "Order", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/petstore/models" + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/petstore_schema_Pet.json b/fixtures/integration/golden/petstore_schema_Pet.json new file mode 100644 index 0000000..db5e481 --- /dev/null +++ b/fixtures/integration/golden/petstore_schema_Pet.json @@ -0,0 +1,63 @@ +{ + "pet": { + "description": "It is used to describe the animals available in the store.", + "type": "object", + "title": "A Pet is the main product in the store.", + "required": [ + "id", + "name" + ], + "properties": { + "birthday": { + "description": "The pet's birthday", + "type": "string", + "format": "date", + "x-go-name": "Birthday" + }, + "id": { + "description": "The id of the pet.", + "type": "integer", + "format": "int64", + "x-go-name": "ID" + }, + "name": { + "description": "The name of the pet.", + "type": "string", + "maxLength": 50, + "minLength": 3, + "pattern": "\\w[\\w-]+", + "x-go-name": "Name" + }, + "photoUrls": { + "description": "The photo urls for the pet.\nThis only accepts jpeg or png images.", + "type": "array", + "items": { + "type": "string", + "pattern": "\\.(jpe?g|png)$" + }, + "x-go-name": "PhotoURLs" + }, + "status": { + "description": "The current status of the pet in the store.\navailable STATUS_AVAILABLE\npending STATUS_PENDING\nsold STATUS_SOLD", + "type": "string", + "enum": [ + "available", + "pending", + "sold" + ], + "x-go-enum-desc": "available STATUS_AVAILABLE\npending STATUS_PENDING\nsold STATUS_SOLD", + "x-go-name": "Status" + }, + "tags": { + "description": "Extra bits of information attached to this pet.", + "type": "array", + "items": { + "$ref": "#/definitions/tag" + }, + "x-go-name": "Tags" + } + }, + "x-go-name": "Pet", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/petstore/models" + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/petstore_schema_Tag.json b/fixtures/integration/golden/petstore_schema_Tag.json new file mode 100644 index 0000000..a43cf0e --- /dev/null +++ b/fixtures/integration/golden/petstore_schema_Tag.json @@ -0,0 +1,26 @@ +{ + "tag": { + "description": "It is used to describe the animals available in the store.", + "type": "object", + "title": "A Tag is an extra piece of data to provide more information about a pet.", + "required": [ + "id", + "value" + ], + "properties": { + "id": { + "description": "The id of the tag.", + "type": "integer", + "format": "int64", + "x-go-name": "ID" + }, + "value": { + "description": "The value of the tag.", + "type": "string", + "x-go-name": "Value" + } + }, + "x-go-name": "Tag", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/petstore/models" + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/petstore_spec.json b/fixtures/integration/golden/petstore_spec.json new file mode 100644 index 0000000..f3db8fa --- /dev/null +++ b/fixtures/integration/golden/petstore_spec.json @@ -0,0 +1,556 @@ +{ + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "schemes": [ + "http", + "https" + ], + "swagger": "2.0", + "info": { + "description": "the purpose of this application is to provide an application\nthat is using plain go code to define an API\n\nThis should demonstrate all the possible comment annotations\nthat are available to turn go code into a fully compliant swagger 2.0 spec", + "title": "Petstore API.", + "termsOfService": "there are no TOS at this moment, use at your own risk we take no responsibility", + "contact": { + "name": "John Doe", + "url": "http://john.doe.com", + "email": "john.doe@example.com" + }, + "license": { + "name": "MIT", + "url": "http://opensource.org/licenses/MIT" + }, + "version": "0.0.1" + }, + "host": "localhost", + "basePath": "/v2", + "paths": { + "/help": { + "get": { + "summary": "Gets the help as markdown", + "operationId": "help", + "responses": { + "200": { + "$ref": "#/responses/MarkdownRender" + }, + "422": { + "$ref": "#/responses/validationError" + }, + "default": { + "$ref": "#/responses/genericError" + } + } + } + }, + "/orders": { + "post": { + "tags": [ + "orders" + ], + "summary": "Creates an order.", + "operationId": "createOrder", + "parameters": [ + { + "x-go-name": "Order", + "description": "The order to submit", + "name": "order", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/order" + } + } + ], + "responses": { + "200": { + "$ref": "#/responses/orderResponse" + }, + "422": { + "$ref": "#/responses/validationError" + }, + "default": { + "$ref": "#/responses/genericError" + } + } + } + }, + "/orders/{id}": { + "get": { + "tags": [ + "orders" + ], + "summary": "Gets the details for an order.", + "operationId": "getOrderDetails", + "parameters": [ + { + "type": "integer", + "format": "int64", + "x-go-name": "ID", + "description": "The ID of the order", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "$ref": "#/responses/orderResponse" + }, + "default": { + "$ref": "#/responses/genericError" + } + } + }, + "put": { + "tags": [ + "orders" + ], + "summary": "Updates an order.", + "operationId": "updateOrder", + "parameters": [ + { + "type": "integer", + "format": "int64", + "x-go-name": "ID", + "description": "The ID of the order", + "name": "id", + "in": "path", + "required": true + }, + { + "x-go-name": "Order", + "description": "The order to submit", + "name": "order", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/order" + } + } + ], + "responses": { + "200": { + "description": "order", + "schema": { + "$ref": "#/definitions/order" + } + }, + "422": { + "$ref": "#/responses/validationError" + }, + "default": { + "$ref": "#/responses/genericError" + } + } + }, + "delete": { + "tags": [ + "orders" + ], + "summary": "Deletes an order.", + "operationId": "cancelOrder", + "parameters": [ + { + "type": "integer", + "format": "int64", + "x-go-name": "ID", + "description": "The ID of the order", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "204": { + "description": "" + }, + "default": { + "$ref": "#/responses/genericError" + } + } + } + }, + "/pets": { + "get": { + "description": "By default it will only lists pets that are available for sale.\nThis can be changed with the status flag.", + "tags": [ + "pets" + ], + "summary": "Lists the pets known to the store.", + "operationId": "listPets", + "deprecated": true, + "parameters": [ + { + "enum": [ + "available", + "pending", + "sold" + ], + "type": "string", + "x-go-enum-desc": "available STATUS_AVAILABLE\npending STATUS_PENDING\nsold STATUS_SOLD", + "x-go-name": "Status", + "description": "Status\navailable STATUS_AVAILABLE\npending STATUS_PENDING\nsold STATUS_SOLD", + "name": "status", + "in": "query" + }, + { + "type": "string", + "format": "date", + "x-go-name": "Birthday", + "description": "Birthday", + "name": "birthday", + "in": "query" + } + ], + "responses": { + "200": { + "description": "pet", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/pet" + } + } + }, + "default": { + "$ref": "#/responses/genericError" + } + } + }, + "post": { + "tags": [ + "pets" + ], + "summary": "Creates a new pet in the store.", + "operationId": "createPet", + "parameters": [ + { + "x-go-name": "Pet", + "description": "The pet to submit.", + "name": "pet", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/pet" + } + } + ], + "responses": { + "200": { + "description": "pet", + "schema": { + "$ref": "#/definitions/pet" + } + }, + "422": { + "$ref": "#/responses/validationError" + }, + "default": { + "$ref": "#/responses/genericError" + } + } + } + }, + "/pets/{id}": { + "get": { + "tags": [ + "pets" + ], + "summary": "Gets the details for a pet.", + "operationId": "getPetById", + "parameters": [ + { + "type": "integer", + "format": "int64", + "x-go-name": "ID", + "description": "The ID of the pet", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "pet", + "schema": { + "$ref": "#/definitions/pet" + } + }, + "default": { + "$ref": "#/responses/genericError" + } + } + }, + "put": { + "tags": [ + "pets" + ], + "summary": "Updates the details for a pet.", + "operationId": "updatePet", + "parameters": [ + { + "type": "integer", + "format": "int64", + "x-go-name": "ID", + "description": "The ID of the pet", + "name": "id", + "in": "path", + "required": true + }, + { + "x-go-name": "Pet", + "description": "The pet to submit.", + "name": "pet", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/pet" + } + } + ], + "responses": { + "200": { + "description": "pet", + "schema": { + "$ref": "#/definitions/pet" + } + }, + "422": { + "$ref": "#/responses/validationError" + }, + "default": { + "$ref": "#/responses/genericError" + } + } + }, + "delete": { + "tags": [ + "pets" + ], + "summary": "Deletes a pet from the store.", + "operationId": "deletePet", + "parameters": [ + { + "type": "integer", + "format": "int64", + "x-go-name": "ID", + "description": "The ID of the pet", + "name": "id", + "in": "path", + "required": true + } + ], + "responses": { + "204": { + "description": "" + }, + "default": { + "$ref": "#/responses/genericError" + } + } + } + } + }, + "definitions": { + "order": { + "type": "object", + "title": "An Order for one or more pets by a user.", + "required": [ + "id", + "userId", + "orderedAt" + ], + "properties": { + "id": { + "description": "the ID of the order", + "type": "integer", + "format": "int64", + "x-go-name": "ID" + }, + "items": { + "description": "the items for this order", + "type": "array", + "minItems": 1, + "items": { + "type": "object", + "required": [ + "petId", + "qty" + ], + "properties": { + "petId": { + "description": "the id of the pet to order", + "type": "integer", + "format": "int64", + "x-go-name": "PetID" + }, + "qty": { + "description": "the quantity of this pet to order", + "type": "integer", + "format": "int32", + "minimum": 1, + "x-go-name": "Quantity" + } + } + }, + "x-go-name": "Items" + }, + "orderedAt": { + "description": "the time at which this order was made.", + "type": "string", + "format": "date-time", + "x-go-name": "OrderedAt" + }, + "userId": { + "description": "the id of the user who placed the order.", + "type": "integer", + "format": "int64", + "x-go-name": "UserID" + } + }, + "x-go-name": "Order", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/petstore/models" + }, + "pet": { + "description": "It is used to describe the animals available in the store.", + "type": "object", + "title": "A Pet is the main product in the store.", + "required": [ + "id", + "name" + ], + "properties": { + "birthday": { + "description": "The pet's birthday", + "type": "string", + "format": "date", + "x-go-name": "Birthday" + }, + "id": { + "description": "The id of the pet.", + "type": "integer", + "format": "int64", + "x-go-name": "ID" + }, + "name": { + "description": "The name of the pet.", + "type": "string", + "maxLength": 50, + "minLength": 3, + "pattern": "\\w[\\w-]+", + "x-go-name": "Name" + }, + "photoUrls": { + "description": "The photo urls for the pet.\nThis only accepts jpeg or png images.", + "type": "array", + "items": { + "type": "string", + "pattern": "\\.(jpe?g|png)$" + }, + "x-go-name": "PhotoURLs" + }, + "status": { + "description": "The current status of the pet in the store.\navailable STATUS_AVAILABLE\npending STATUS_PENDING\nsold STATUS_SOLD", + "type": "string", + "enum": [ + "available", + "pending", + "sold" + ], + "x-go-enum-desc": "available STATUS_AVAILABLE\npending STATUS_PENDING\nsold STATUS_SOLD", + "x-go-name": "Status" + }, + "tags": { + "description": "Extra bits of information attached to this pet.", + "type": "array", + "items": { + "$ref": "#/definitions/tag" + }, + "x-go-name": "Tags" + } + }, + "x-go-name": "Pet", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/petstore/models" + }, + "tag": { + "description": "It is used to describe the animals available in the store.", + "type": "object", + "title": "A Tag is an extra piece of data to provide more information about a pet.", + "required": [ + "id", + "value" + ], + "properties": { + "id": { + "description": "The id of the tag.", + "type": "integer", + "format": "int64", + "x-go-name": "ID" + }, + "value": { + "description": "The value of the tag.", + "type": "string", + "x-go-name": "Value" + } + }, + "x-go-name": "Tag", + "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/petstore/models" + } + }, + "responses": { + "MarkdownRender": { + "description": "MarkdownRender is a rendered markdown document", + "schema": { + "type": "string" + } + }, + "genericError": { + "description": "A GenericError is the default error message that is generated.\nFor certain status codes there are more appropriate error structures.", + "schema": { + "type": "object", + "properties": { + "code": { + "type": "integer", + "format": "int32", + "x-go-name": "Code" + }, + "message": { + "type": "string", + "x-go-name": "Message", + "x-go-type": "error" + } + } + } + }, + "orderResponse": { + "description": "An OrderResponse response model\n\n# This is used for returning a response with a single order as body", + "schema": { + "$ref": "#/definitions/order" + } + }, + "validationError": { + "description": "A ValidationError is an that is generated for validation failures.\nIt has the same fields as a generic error but adds a Field property.", + "schema": { + "type": "object", + "properties": { + "code": { + "type": "integer", + "format": "int32", + "x-go-name": "Code" + }, + "field": { + "type": "string", + "x-go-name": "Field" + }, + "message": { + "type": "string", + "x-go-name": "Message" + } + } + } + } + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/product_responses.json b/fixtures/integration/golden/product_responses.json new file mode 100644 index 0000000..8cf3e9b --- /dev/null +++ b/fixtures/integration/golden/product_responses.json @@ -0,0 +1,11 @@ +{ + "GetProductsResponse": { + "description": "", + "schema": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/Product" + } + } + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/transparentalias_params.json b/fixtures/integration/golden/transparentalias_params.json new file mode 100644 index 0000000..762444b --- /dev/null +++ b/fixtures/integration/golden/transparentalias_params.json @@ -0,0 +1,40 @@ +{ + "transparentAlias": { + "operationId": "transparentAlias", + "parameters": [ + { + "x-go-name": "AliasBody", + "description": "AliasBody exercises alias handling for body parameters.", + "name": "aliasBody", + "in": "body", + "required": true, + "schema": { + "type": "object", + "required": [ + "id" + ], + "properties": { + "id": { + "description": "ID of the payload.", + "type": "integer", + "format": "int64", + "x-go-name": "ID" + }, + "name": { + "description": "Name of the payload.", + "type": "string", + "x-go-name": "Name" + } + } + } + }, + { + "type": "string", + "x-go-name": "AliasQuery", + "description": "AliasQuery exercises alias handling for non-body parameters.", + "name": "aliasQuery", + "in": "query" + } + ] + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/transparentalias_responses.json b/fixtures/integration/golden/transparentalias_responses.json new file mode 100644 index 0000000..af1f248 --- /dev/null +++ b/fixtures/integration/golden/transparentalias_responses.json @@ -0,0 +1,15 @@ +{ + "transparentAliasResponse": { + "description": "TransparentAliasResponse is an exported alias annotated as swagger response.", + "schema": { + "type": "object", + "properties": { + "payload": { + "description": "Payload uses an alias that should resolve transparently.", + "type": "object", + "x-go-name": "Payload" + } + } + } + } +} \ No newline at end of file diff --git a/internal/builders/items/errors.go b/internal/builders/items/errors.go new file mode 100644 index 0000000..096703e --- /dev/null +++ b/internal/builders/items/errors.go @@ -0,0 +1,15 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package items + +type itemsError string + +func (e itemsError) Error() string { + return string(e) +} + +const ( + // ErrItems is the sentinel error for all errors originating from the items package. + ErrItems itemsError = "builders:items" +) diff --git a/internal/builders/items/taggers.go b/internal/builders/items/taggers.go new file mode 100644 index 0000000..bfb5a1e --- /dev/null +++ b/internal/builders/items/taggers.go @@ -0,0 +1,77 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package items + +import ( + "fmt" + "go/ast" + "slices" + + "github.com/go-openapi/codescan/internal/parsers" + "github.com/go-openapi/spec" +) + +// Taggers builds tag parsers for array items at a given nesting level. +func Taggers(items *spec.Items, level int) []parsers.TagParser { + return itemsTaggers(items, level) +} + +func itemsTaggers(items *spec.Items, level int) []parsers.TagParser { + opts := []parsers.PrefixRxOption{parsers.WithItemsPrefixLevel(level)} + + return []parsers.TagParser{ + parsers.NewSingleLineTagParser(fmt.Sprintf("items%dMaximum", level), parsers.NewSetMaximum(Validations{items}, opts...)), + parsers.NewSingleLineTagParser(fmt.Sprintf("items%dMinimum", level), parsers.NewSetMinimum(Validations{items}, opts...)), + parsers.NewSingleLineTagParser(fmt.Sprintf("items%dMultipleOf", level), parsers.NewSetMultipleOf(Validations{items}, opts...)), + parsers.NewSingleLineTagParser(fmt.Sprintf("items%dMinLength", level), parsers.NewSetMinLength(Validations{items}, opts...)), + parsers.NewSingleLineTagParser(fmt.Sprintf("items%dMaxLength", level), parsers.NewSetMaxLength(Validations{items}, opts...)), + parsers.NewSingleLineTagParser(fmt.Sprintf("items%dPattern", level), parsers.NewSetPattern(Validations{items}, opts...)), + parsers.NewSingleLineTagParser(fmt.Sprintf("items%dCollectionFormat", level), parsers.NewSetCollectionFormat(Validations{items}, opts...)), + parsers.NewSingleLineTagParser(fmt.Sprintf("items%dMinItems", level), parsers.NewSetMinItems(Validations{items}, opts...)), + parsers.NewSingleLineTagParser(fmt.Sprintf("items%dMaxItems", level), parsers.NewSetMaxItems(Validations{items}, opts...)), + parsers.NewSingleLineTagParser(fmt.Sprintf("items%dUnique", level), parsers.NewSetUnique(Validations{items}, opts...)), + parsers.NewSingleLineTagParser(fmt.Sprintf("items%dEnum", level), parsers.NewSetEnum(Validations{items}, opts...)), + parsers.NewSingleLineTagParser(fmt.Sprintf("items%dDefault", level), parsers.NewSetDefault(&items.SimpleSchema, Validations{items}, opts...)), + parsers.NewSingleLineTagParser(fmt.Sprintf("items%dExample", level), parsers.NewSetExample(&items.SimpleSchema, Validations{items}, opts...)), + } +} + +// ParseArrayTypes recursively builds tag parsers for nested array types. +func ParseArrayTypes(taggers []parsers.TagParser, name string, expr ast.Expr, items *spec.Items, level int) ([]parsers.TagParser, error) { + return parseArrayTypes(taggers, name, expr, items, level) +} + +func parseArrayTypes(taggers []parsers.TagParser, name string, expr ast.Expr, items *spec.Items, level int) ([]parsers.TagParser, error) { + if items == nil { + return taggers, nil + } + + switch iftpe := expr.(type) { + case *ast.ArrayType: + eleTaggers := itemsTaggers(items, level) + return parseArrayTypes(slices.Concat(eleTaggers, taggers), name, iftpe.Elt, items.Items, level+1) + + case *ast.SelectorExpr: + return parseArrayTypes(taggers, name, iftpe.Sel, items.Items, level+1) + + case *ast.Ident: + var identTaggers []parsers.TagParser + if iftpe.Obj == nil { + identTaggers = itemsTaggers(items, level) + } + + otherTaggers, err := parseArrayTypes(taggers, name, expr, items.Items, level+1) + if err != nil { + return nil, err + } + + return slices.Concat(identTaggers, otherTaggers), nil + + case *ast.StarExpr: + return parseArrayTypes(taggers, name, iftpe.X, items, level) + + default: + return nil, fmt.Errorf("unknown field type element for %q: %w", name, ErrItems) + } +} diff --git a/internal/builders/items/typable.go b/internal/builders/items/typable.go new file mode 100644 index 0000000..806059d --- /dev/null +++ b/internal/builders/items/typable.go @@ -0,0 +1,59 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package items + +import ( + "github.com/go-openapi/codescan/internal/ifaces" + oaispec "github.com/go-openapi/spec" +) + +type Typable struct { + items *oaispec.Items + level int + in string +} + +func NewTypable(items *oaispec.Items, level int, in string) Typable { + return Typable{ + items: items, + level: level, + in: in, + } +} + +func (pt Typable) In() string { return pt.in } // TODO(fred): inherit from param + +func (pt Typable) Level() int { return pt.level } + +func (pt Typable) Typed(tpe, format string) { + pt.items.Typed(tpe, format) +} + +func (pt Typable) SetRef(ref oaispec.Ref) { + pt.items.Ref = ref +} + +func (pt Typable) Schema() *oaispec.Schema { + return nil +} + +func (pt Typable) Items() ifaces.SwaggerTypable { //nolint:ireturn // polymorphic by design + if pt.items.Items == nil { + pt.items.Items = new(oaispec.Items) + } + pt.items.Type = "array" + return Typable{pt.items.Items, pt.level + 1, pt.in} +} + +func (pt Typable) AddExtension(key string, value any) { + pt.items.AddExtension(key, value) +} + +func (pt Typable) WithEnum(values ...any) { + pt.items.WithEnum(values...) +} + +func (pt Typable) WithEnumDescription(_ string) { + // no +} diff --git a/internal/builders/items/validations.go b/internal/builders/items/validations.go new file mode 100644 index 0000000..515ff34 --- /dev/null +++ b/internal/builders/items/validations.go @@ -0,0 +1,36 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package items + +import ( + "github.com/go-openapi/codescan/internal/parsers" + oaispec "github.com/go-openapi/spec" +) + +type Validations struct { + current *oaispec.Items +} + +func (sv Validations) SetMaximum(val float64, exclusive bool) { + sv.current.Maximum = &val + sv.current.ExclusiveMaximum = exclusive +} + +func (sv Validations) SetMinimum(val float64, exclusive bool) { + sv.current.Minimum = &val + sv.current.ExclusiveMinimum = exclusive +} +func (sv Validations) SetMultipleOf(val float64) { sv.current.MultipleOf = &val } +func (sv Validations) SetMinItems(val int64) { sv.current.MinItems = &val } +func (sv Validations) SetMaxItems(val int64) { sv.current.MaxItems = &val } +func (sv Validations) SetMinLength(val int64) { sv.current.MinLength = &val } +func (sv Validations) SetMaxLength(val int64) { sv.current.MaxLength = &val } +func (sv Validations) SetPattern(val string) { sv.current.Pattern = val } +func (sv Validations) SetUnique(val bool) { sv.current.UniqueItems = val } +func (sv Validations) SetCollectionFormat(val string) { sv.current.CollectionFormat = val } +func (sv Validations) SetEnum(val string) { + sv.current.Enum = parsers.ParseEnum(val, &oaispec.SimpleSchema{Type: sv.current.Type, Format: sv.current.Format}) +} +func (sv Validations) SetDefault(val any) { sv.current.Default = val } +func (sv Validations) SetExample(val any) { sv.current.Example = val } diff --git a/internal/builders/operations/errors.go b/internal/builders/operations/errors.go new file mode 100644 index 0000000..2f8fdf2 --- /dev/null +++ b/internal/builders/operations/errors.go @@ -0,0 +1,9 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package operations + +import "errors" + +// ErrOperations is the sentinel error for all errors originating from the operations builder package. +var ErrOperations = errors.New("codescan:builders:operations") diff --git a/internal/builders/operations/operations.go b/internal/builders/operations/operations.go new file mode 100644 index 0000000..44011ac --- /dev/null +++ b/internal/builders/operations/operations.go @@ -0,0 +1,97 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package operations + +import ( + "fmt" + "strings" + + "github.com/go-openapi/codescan/internal/parsers" + "github.com/go-openapi/codescan/internal/scanner" + oaispec "github.com/go-openapi/spec" +) + +type Builder struct { + ctx *scanner.ScanCtx + path parsers.ParsedPathContent + operations map[string]*oaispec.Operation +} + +func NewBuilder(ctx *scanner.ScanCtx, pth parsers.ParsedPathContent, operations map[string]*oaispec.Operation) *Builder { + return &Builder{ + ctx: ctx, + path: pth, + operations: operations, + } +} + +func (o *Builder) Build(tgt *oaispec.Paths) error { + pthObj := tgt.Paths[o.path.Path] + + op := setPathOperation( + o.path.Method, o.path.ID, + &pthObj, o.operations[o.path.ID]) + op.Tags = o.path.Tags + sp := parsers.NewYAMLSpecScanner( + func(lines []string) { op.Summary = parsers.JoinDropLast(lines) }, // setTitle + func(lines []string) { op.Description = parsers.JoinDropLast(lines) }, // setDescription + ) + + if err := sp.Parse(o.path.Remaining); err != nil { + return fmt.Errorf("operation (%s): %w", op.ID, err) + } + if err := sp.UnmarshalSpec(op.UnmarshalJSON); err != nil { + return fmt.Errorf("operation (%s): %w", op.ID, err) + } + + if tgt.Paths == nil { + tgt.Paths = make(map[string]oaispec.PathItem) + } + + tgt.Paths[o.path.Path] = pthObj + + return nil +} + +// assignOrReuse either reuses an existing operation (if the ID matches) +// or assigns op to the slot. +// +// TODO(claude): rewrite without double indirection. +func assignOrReuse(slot **oaispec.Operation, op *oaispec.Operation, id string) *oaispec.Operation { + if *slot != nil && id == (*slot).ID { + return *slot + } + *slot = op + return op +} + +func SetPathOperation(method, id string, pthObj *oaispec.PathItem, op *oaispec.Operation) *oaispec.Operation { + return setPathOperation(method, id, pthObj, op) +} + +func setPathOperation(method, id string, pthObj *oaispec.PathItem, op *oaispec.Operation) *oaispec.Operation { + if op == nil { + op = new(oaispec.Operation) + op.ID = id + } + + switch strings.ToUpper(method) { + case "GET": + op = assignOrReuse(&pthObj.Get, op, id) + case "POST": + op = assignOrReuse(&pthObj.Post, op, id) + case "PUT": + op = assignOrReuse(&pthObj.Put, op, id) + case "PATCH": + op = assignOrReuse(&pthObj.Patch, op, id) + case "HEAD": + op = assignOrReuse(&pthObj.Head, op, id) + case "DELETE": + op = assignOrReuse(&pthObj.Delete, op, id) + case "OPTIONS": + op = assignOrReuse(&pthObj.Options, op, id) + } + + return op +} diff --git a/operations_go119_test.go b/internal/builders/operations/operations_go119_test.go similarity index 86% rename from operations_go119_test.go rename to internal/builders/operations/operations_go119_test.go index 87967e6..7a932a4 100644 --- a/operations_go119_test.go +++ b/internal/builders/operations/operations_go119_test.go @@ -1,11 +1,13 @@ // SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers // SPDX-License-Identifier: Apache-2.0 -package codescan +package operations import ( "testing" + "github.com/go-openapi/codescan/internal/scanner" + "github.com/go-openapi/codescan/internal/scantest" "github.com/go-openapi/testify/v2/assert" "github.com/go-openapi/testify/v2/require" @@ -13,17 +15,17 @@ import ( ) func TestIndentedYAMLBlock(t *testing.T) { - sctx, err := newScanCtx(&Options{ + sctx, err := scanner.NewScanCtx(&scanner.Options{ Packages: []string{ "./goparsing/go119", }, - WorkDir: "fixtures", + WorkDir: scantest.FixturesDir(), }) require.NoError(t, err) var ops spec.Paths - for _, apiPath := range sctx.app.Operations { - prs := &operationsBuilder{ + for apiPath := range sctx.Operations() { + prs := &Builder{ ctx: sctx, path: apiPath, operations: make(map[string]*spec.Operation), @@ -77,4 +79,6 @@ curl -u "${LOGIN}:${PASSWORD}" -d '{"key2": "value2"}' -X POST "https://{host} assert.MapContainsT(t, sample2, "source") assert.Equal(t, expectedSource, sample2["source"]) + + scantest.CompareOrDumpJSON(t, ops, "go119_operations.json") } diff --git a/operations_test.go b/internal/builders/operations/operations_test.go similarity index 89% rename from operations_test.go rename to internal/builders/operations/operations_test.go index 5427ef1..d16281e 100644 --- a/operations_test.go +++ b/internal/builders/operations/operations_test.go @@ -1,11 +1,13 @@ // SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers // SPDX-License-Identifier: Apache-2.0 -package codescan +package operations import ( "testing" + "github.com/go-openapi/codescan/internal/scanner" + "github.com/go-openapi/codescan/internal/scantest" "github.com/go-openapi/testify/v2/require" "github.com/go-openapi/spec" @@ -13,25 +15,20 @@ import ( "github.com/go-openapi/testify/v2/assert" ) -func TestOperationsExpression(t *testing.T) { - assert.RegexpT(t, rxOperation, "swagger:operation DELETE /orders/{id} deleteOrder") - assert.RegexpT(t, rxOperation, "swagger:operation GET /v1.2/something deleteOrder") -} - func TestOperationsParser(t *testing.T) { - sctx, err := newScanCtx(&Options{ + sctx, err := scanner.NewScanCtx(&scanner.Options{ Packages: []string{ "./goparsing/classification", "./goparsing/classification/models", "./goparsing/classification/operations", "./goparsing/classification/operations_annotation", }, - WorkDir: "fixtures", + WorkDir: scantest.FixturesDir(), }) require.NoError(t, err) var ops spec.Paths - for _, apiPath := range sctx.app.Operations { - prs := &operationsBuilder{ + for apiPath := range sctx.Operations() { + prs := &Builder{ ctx: sctx, path: apiPath, operations: make(map[string]*spec.Operation), @@ -108,6 +105,8 @@ func TestOperationsParser(t *testing.T) { assert.EqualT(t, "400", rsp.Description) } } + + scantest.CompareOrDumpJSON(t, ops, "classification_operations.json") } func assertAnnotationOperation(t *testing.T, op *spec.Operation, id, summary, description string, tags []string) { diff --git a/internal/builders/parameters/errors.go b/internal/builders/parameters/errors.go new file mode 100644 index 0000000..90f56d5 --- /dev/null +++ b/internal/builders/parameters/errors.go @@ -0,0 +1,9 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package parameters + +import "errors" + +// ErrParameters is the sentinel error for all errors originating from the parameters package. +var ErrParameters = errors.New("codescan:builders:parameters") diff --git a/internal/builders/parameters/parameters.go b/internal/builders/parameters/parameters.go new file mode 100644 index 0000000..151e188 --- /dev/null +++ b/internal/builders/parameters/parameters.go @@ -0,0 +1,468 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package parameters + +import ( + "fmt" + "go/types" + + "github.com/go-openapi/codescan/internal/builders/resolvers" + "github.com/go-openapi/codescan/internal/builders/schema" + "github.com/go-openapi/codescan/internal/ifaces" + "github.com/go-openapi/codescan/internal/logger" + "github.com/go-openapi/codescan/internal/parsers" + "github.com/go-openapi/codescan/internal/scanner" + oaispec "github.com/go-openapi/spec" +) + +const inBody = "body" + +type ParameterBuilder struct { + ctx *scanner.ScanCtx + decl *scanner.EntityDecl + postDecls []*scanner.EntityDecl +} + +func NewBuilder(ctx *scanner.ScanCtx, decl *scanner.EntityDecl) *ParameterBuilder { + return &ParameterBuilder{ + ctx: ctx, + decl: decl, + } +} + +func (p *ParameterBuilder) Build(operations map[string]*oaispec.Operation) error { + // check if there is a swagger:parameters tag that is followed by one or more words, + // these words are the ids of the operations this parameter struct applies to + // once type name is found convert it to a schema, by looking up the schema in the + // parameters dictionary that got passed into this parse method + for _, opid := range p.decl.OperationIDs() { + operation, ok := operations[opid] + if !ok { + operation = new(oaispec.Operation) + operations[opid] = operation + operation.ID = opid + } + logger.DebugLogf(p.ctx.Debug(), "building parameters for: %s", opid) + + // analyze struct body for fields etc + // each exported struct field: + // * gets a type mapped to a go primitive + // * perhaps gets a format + // * has to document the validations that apply for the type and the field + // * when the struct field points to a model it becomes a ref: #/definitions/ModelName + // * comments that aren't tags is used as the description + if err := p.buildFromType(p.decl.ObjType(), operation, make(map[string]oaispec.Parameter)); err != nil { + return err + } + } + + return nil +} + +func (p *ParameterBuilder) PostDeclarations() []*scanner.EntityDecl { + return p.postDecls +} + +func (p *ParameterBuilder) buildFromType(otpe types.Type, op *oaispec.Operation, seen map[string]oaispec.Parameter) error { + switch tpe := otpe.(type) { + case *types.Pointer: + return p.buildFromType(tpe.Elem(), op, seen) + case *types.Named: + return p.buildNamedType(tpe, op, seen) + case *types.Alias: + logger.DebugLogf(p.ctx.Debug(), "alias(parameters.buildFromType): got alias %v to %v", tpe, tpe.Rhs()) + return p.buildAlias(tpe, op, seen) + default: + return fmt.Errorf("unhandled type (%T): %s: %w", otpe, tpe.String(), ErrParameters) + } +} + +func (p *ParameterBuilder) buildNamedType(tpe *types.Named, op *oaispec.Operation, seen map[string]oaispec.Parameter) error { + o := tpe.Obj() + if resolvers.IsAny(o) || resolvers.IsStdError(o) { + return fmt.Errorf("%s type not supported in the context of a parameters section definition: %w", o.Name(), ErrParameters) + } + resolvers.MustNotBeABuiltinType(o) + + switch stpe := o.Type().Underlying().(type) { + case *types.Struct: + logger.DebugLogf(p.ctx.Debug(), "build from named type %s: %T", o.Name(), tpe) + if decl, found := p.ctx.DeclForType(o.Type()); found { + return p.buildFromStruct(decl, stpe, op, seen) + } + + return p.buildFromStruct(p.decl, stpe, op, seen) + default: + return fmt.Errorf("unhandled type (%T): %s: %w", stpe, o.Type().Underlying().String(), ErrParameters) + } +} + +func (p *ParameterBuilder) buildAlias(tpe *types.Alias, op *oaispec.Operation, seen map[string]oaispec.Parameter) error { + o := tpe.Obj() + if resolvers.IsAny(o) || resolvers.IsStdError(o) { + return fmt.Errorf("%s type not supported in the context of a parameters section definition: %w", o.Name(), ErrParameters) + } + resolvers.MustNotBeABuiltinType(o) + resolvers.MustHaveRightHandSide(tpe) + + rhs := tpe.Rhs() + + // If transparent aliases are enabled, use the underlying type directly without creating a definition + if p.ctx.TransparentAliases() { + return p.buildFromType(rhs, op, seen) + } + + decl, ok := p.ctx.FindModel(o.Pkg().Path(), o.Name()) + if !ok { + return fmt.Errorf("can't find source file for aliased type: %v -> %v: %w", tpe, rhs, ErrParameters) + } + p.postDecls = append(p.postDecls, decl) // mark the left-hand side as discovered + + switch rtpe := rhs.(type) { + // load declaration for named unaliased type + case *types.Named: + o := rtpe.Obj() + if o.Pkg() == nil { + break // builtin + } + decl, found := p.ctx.FindModel(o.Pkg().Path(), o.Name()) + if !found { + return fmt.Errorf("can't find source file for target type of alias: %v -> %v: %w", tpe, rtpe, ErrParameters) + } + p.postDecls = append(p.postDecls, decl) + case *types.Alias: + o := rtpe.Obj() + if o.Pkg() == nil { + break // builtin + } + decl, found := p.ctx.FindModel(o.Pkg().Path(), o.Name()) + if !found { + return fmt.Errorf("can't find source file for target type of alias: %v -> %v: %w", tpe, rtpe, ErrParameters) + } + p.postDecls = append(p.postDecls, decl) + } + + return p.buildFromType(rhs, op, seen) +} + +func (p *ParameterBuilder) buildFromField(fld *types.Var, tpe types.Type, typable ifaces.SwaggerTypable, seen map[string]oaispec.Parameter) error { + logger.DebugLogf(p.ctx.Debug(), "build from field %s: %T", fld.Name(), tpe) + + switch ftpe := tpe.(type) { + case *types.Basic: + return resolvers.SwaggerSchemaForType(ftpe.Name(), typable) + case *types.Struct: + return p.buildFromFieldStruct(ftpe, typable) + case *types.Pointer: + return p.buildFromField(fld, ftpe.Elem(), typable, seen) + case *types.Interface: + return p.buildFromFieldInterface(ftpe, typable) + case *types.Array: + return p.buildFromField(fld, ftpe.Elem(), typable.Items(), seen) + case *types.Slice: + return p.buildFromField(fld, ftpe.Elem(), typable.Items(), seen) + case *types.Map: + return p.buildFromFieldMap(ftpe, typable) + case *types.Named: + return p.buildNamedField(ftpe, typable) + case *types.Alias: + logger.DebugLogf(p.ctx.Debug(), "alias(parameters.buildFromField): got alias %v to %v", ftpe, ftpe.Rhs()) // TODO + return p.buildFieldAlias(ftpe, typable, fld, seen) + default: + return fmt.Errorf("unknown type for %s: %T: %w", fld.String(), fld.Type(), ErrParameters) + } +} + +func (p *ParameterBuilder) buildFromFieldStruct(tpe *types.Struct, typable ifaces.SwaggerTypable) error { + sb := schema.NewBuilder(p.ctx, p.decl) + if err := sb.BuildFromType(tpe, typable); err != nil { + return err + } + p.postDecls = append(p.postDecls, sb.PostDeclarations()...) + + return nil +} + +func (p *ParameterBuilder) buildFromFieldMap(ftpe *types.Map, typable ifaces.SwaggerTypable) error { + sch := new(oaispec.Schema) + typable.Schema().Typed("object", "").AdditionalProperties = &oaispec.SchemaOrBool{ + Schema: sch, + } + + sb := schema.NewBuilder(p.ctx, p.decl) + if err := sb.BuildFromType(ftpe.Elem(), schema.NewTypable(sch, typable.Level()+1, p.ctx.SkipExtensions())); err != nil { + return err + } + + return nil +} + +func (p *ParameterBuilder) buildFromFieldInterface(tpe *types.Interface, typable ifaces.SwaggerTypable) error { + sb := schema.NewBuilder(p.ctx, p.decl) + if err := sb.BuildFromType(tpe, typable); err != nil { + return err + } + + p.postDecls = append(p.postDecls, sb.PostDeclarations()...) + + return nil +} + +func (p *ParameterBuilder) buildNamedField(ftpe *types.Named, typable ifaces.SwaggerTypable) error { + o := ftpe.Obj() + if resolvers.IsAny(o) { + // e.g. Field interface{} or Field any + return nil + } + if resolvers.IsStdError(o) { + return fmt.Errorf("%s type not supported in the context of a parameter definition: %w", o.Name(), ErrParameters) + } + resolvers.MustNotBeABuiltinType(o) + + decl, found := p.ctx.DeclForType(o.Type()) + if !found { + return fmt.Errorf("unable to find package and source file for: %s: %w", ftpe.String(), ErrParameters) + } + + if resolvers.IsStdTime(o) { + typable.Typed("string", "date-time") + return nil + } + + if sfnm, isf := parsers.StrfmtName(decl.Comments); isf { + typable.Typed("string", sfnm) + return nil + } + + sb := schema.NewBuilder(p.ctx, decl) + sb.InferNames() + if err := sb.BuildFromType(decl.ObjType(), typable); err != nil { + return err + } + + p.postDecls = append(p.postDecls, sb.PostDeclarations()...) + + return nil +} + +func (p *ParameterBuilder) buildFieldAlias(tpe *types.Alias, typable ifaces.SwaggerTypable, fld *types.Var, seen map[string]oaispec.Parameter) error { + o := tpe.Obj() + if resolvers.IsAny(o) { + // e.g. Field interface{} or Field any + _ = typable.Schema() + + return nil // just leave an empty schema + } + if resolvers.IsStdError(o) { + return fmt.Errorf("%s type not supported in the context of a parameter definition: %w", o.Name(), ErrParameters) + } + resolvers.MustNotBeABuiltinType(o) + resolvers.MustHaveRightHandSide(tpe) + + rhs := tpe.Rhs() + + // If transparent aliases are enabled, use the underlying type directly without creating a definition + if p.ctx.TransparentAliases() { + sb := schema.NewBuilder(p.ctx, p.decl) + if err := sb.BuildFromType(rhs, typable); err != nil { + return err + } + p.postDecls = append(p.postDecls, sb.PostDeclarations()...) + return nil + } + + decl, ok := p.ctx.FindModel(o.Pkg().Path(), o.Name()) + if !ok { + return fmt.Errorf("can't find source file for aliased type: %v -> %v: %w", tpe, rhs, ErrParameters) + } + p.postDecls = append(p.postDecls, decl) // mark the left-hand side as discovered + + if typable.In() != inBody || !p.ctx.RefAliases() { + // if ref option is disabled, and always for non-body parameters: just expand the alias + unaliased := types.Unalias(tpe) + return p.buildFromField(fld, unaliased, typable, seen) + } + + // for parameters that are full-fledged schemas, consider expanding or ref'ing + switch rtpe := rhs.(type) { + // load declaration for named RHS type (might be an alias itself) + case *types.Named: + o := rtpe.Obj() + if o.Pkg() == nil { + break // builtin + } + + decl, found := p.ctx.FindModel(o.Pkg().Path(), o.Name()) + if !found { + return fmt.Errorf("can't find source file for target type of alias: %v -> %v: %w", tpe, rtpe, ErrParameters) + } + + return p.makeRef(decl, typable) + case *types.Alias: + o := rtpe.Obj() + if o.Pkg() == nil { + break // builtin + } + + decl, found := p.ctx.FindModel(o.Pkg().Path(), o.Name()) + if !found { + return fmt.Errorf("can't find source file for target type of alias: %v -> %v: %w", tpe, rtpe, ErrParameters) + } + + return p.makeRef(decl, typable) + } + + // anonymous type: just expand it + return p.buildFromField(fld, rhs, typable, seen) +} + +func (p *ParameterBuilder) buildFromStruct(decl *scanner.EntityDecl, tpe *types.Struct, op *oaispec.Operation, seen map[string]oaispec.Parameter) error { + numFields := tpe.NumFields() + + if numFields == 0 { + return nil + } + + sequence := make([]string, 0, numFields) + for fld := range tpe.Fields() { + if fld.Embedded() { + if err := p.buildFromType(fld.Type(), op, seen); err != nil { + return err + } + continue + } + + name, err := p.processParamField(fld, decl, seen) + if err != nil { + return err + } + + if name != "" { + sequence = append(sequence, name) + } + } + + for _, k := range sequence { + p := seen[k] + for i, v := range op.Parameters { + if v.Name == k { + op.Parameters = append(op.Parameters[:i], op.Parameters[i+1:]...) + break + } + } + op.Parameters = append(op.Parameters, p) + } + + return nil +} + +// processParamField processes a single non-embedded struct field for parameter building. +// Returns the parameter name if the field was processed, or "" if it was skipped. +func (p *ParameterBuilder) processParamField(fld *types.Var, decl *scanner.EntityDecl, seen map[string]oaispec.Parameter) (string, error) { + if !fld.Exported() { + logger.DebugLogf(p.ctx.Debug(), "skipping field %s because it's not exported", fld.Name()) + return "", nil + } + + afld := resolvers.FindASTField(decl.File, fld.Pos()) + if afld == nil { + logger.DebugLogf(p.ctx.Debug(), "can't find source associated with %s", fld.String()) + return "", nil + } + + if parsers.Ignored(afld.Doc) { + return "", nil + } + + name, ignore, _, _, err := resolvers.ParseJSONTag(afld) + if err != nil { + return "", err + } + if ignore { + return "", nil + } + + in := "query" + // scan for param location first, this changes some behavior down the line + if afld.Doc != nil { + inOverride, ok := parsers.ParamLocation(afld.Doc) + if ok { + in = inOverride + } + } + + ps := seen[name] + ps.In = in + var pty ifaces.SwaggerTypable = paramTypable{&ps, p.ctx.SkipExtensions()} + if in == inBody { + pty = schema.NewTypable(pty.Schema(), 0, p.ctx.SkipExtensions()) + } + + if in == "formData" && afld.Doc != nil && parsers.FileParam(afld.Doc) { + pty.Typed("file", "") + } else if err := p.buildFromField(fld, fld.Type(), pty, seen); err != nil { + return "", err + } + + if strfmtName, ok := parsers.StrfmtName(afld.Doc); ok { + ps.Typed("string", strfmtName) + ps.Ref = oaispec.Ref{} + ps.Items = nil + } + + taggers, err := setupParamTaggers(&ps, name, afld, p.ctx.SkipExtensions(), p.ctx.Debug()) + if err != nil { + return "", err + } + + sp := parsers.NewSectionedParser( + parsers.WithSetDescription(func(lines []string) { + ps.Description = parsers.JoinDropLast(lines) + enumDesc := parsers.GetEnumDesc(ps.Extensions) + if enumDesc != "" { + ps.Description += "\n" + enumDesc + } + }), + parsers.WithTaggers(taggers...), + ) + + if err := sp.Parse(afld.Doc); err != nil { + return "", err + } + if ps.In == "path" { + ps.Required = true + } + + if ps.Name == "" { + ps.Name = name + } + + if name != fld.Name() { + resolvers.AddExtension(&ps.VendorExtensible, "x-go-name", fld.Name(), p.ctx.SkipExtensions()) + } + + seen[name] = ps + return name, nil +} + +func (p *ParameterBuilder) makeRef(decl *scanner.EntityDecl, prop ifaces.SwaggerTypable) error { + nm, _ := decl.Names() + ref, err := oaispec.NewRef("#/definitions/" + nm) + if err != nil { + return err + } + + prop.SetRef(ref) + p.postDecls = append(p.postDecls, decl) // mark the $ref target as discovered + + return nil +} + +func spExtensionsSetter(ps *oaispec.Parameter, skipExt bool) func(*oaispec.Extensions) { + return func(exts *oaispec.Extensions) { + for name, value := range *exts { + resolvers.AddExtension(&ps.VendorExtensible, name, value, skipExt) + } + } +} diff --git a/parameters_test.go b/internal/builders/parameters/parameters_test.go similarity index 89% rename from parameters_test.go rename to internal/builders/parameters/parameters_test.go index ea4691a..1e4fb95 100644 --- a/parameters_test.go +++ b/internal/builders/parameters/parameters_test.go @@ -1,11 +1,13 @@ // SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers // SPDX-License-Identifier: Apache-2.0 -package codescan +package parameters import ( "testing" + "github.com/go-openapi/codescan/internal/scanner" + "github.com/go-openapi/codescan/internal/scantest" "github.com/go-openapi/testify/v2/assert" "github.com/go-openapi/testify/v2/require" @@ -13,6 +15,8 @@ import ( ) const ( + epsilon = 1e-9 + gcBadEnum = "bad_enum" paramID = "id" paramAge = "age" @@ -22,20 +26,25 @@ const ( paramCreated = "created" paramFooSlice = "foo_slice" paramBarSlice = "bar_slice" + + // paramNameKey / paramTypeKey mirror swagger:route param tag keys defined in parsers/route_params.go. + // Duplicated here because the production constants are unexported and these swagger tag names are + // part of a stable external contract. + paramNameKey = "name" + paramTypeKey = "type" ) -func getParameter(sctx *scanCtx, nm string) *entityDecl { - for _, v := range sctx.app.Parameters { - param := v +func getParameter(sctx *scanner.ScanCtx, nm string) *scanner.EntityDecl { + for v := range sctx.Parameters() { if v.Ident.Name == nm { - return param + return v } } return nil } func TestScanFileParam(t *testing.T) { - sctx := loadClassificationPkgsCtx(t) + sctx := scantest.LoadClassificationPkgsCtx(t) operations := make(map[string]*spec.Operation) paramNames := []string{ "OrderBodyParams", "MultipleOrderParams", "ComplexerOneParams", "NoParams", @@ -43,8 +52,9 @@ func TestScanFileParam(t *testing.T) { } for _, rn := range paramNames { td := getParameter(sctx, rn) + require.NotNil(t, td) - prs := ¶meterBuilder{ + prs := &ParameterBuilder{ ctx: sctx, decl: td, } @@ -83,10 +93,12 @@ func TestScanFileParam(t *testing.T) { assert.EqualT(t, "formData", fileParam.In) assert.EqualT(t, "file", fileParam.Type) assert.FalseT(t, fileParam.Required) + + scantest.CompareOrDumpJSON(t, operations, "classification_params_file.json") } func TestParamsParser(t *testing.T) { - sctx := loadClassificationPkgsCtx(t) + sctx := scantest.LoadClassificationPkgsCtx(t) operations := make(map[string]*spec.Operation) paramNames := []string{ "OrderBodyParams", "MultipleOrderParams", "ComplexerOneParams", "NoParams", @@ -95,7 +107,7 @@ func TestParamsParser(t *testing.T) { for _, rn := range paramNames { td := getParameter(sctx, rn) - prs := ¶meterBuilder{ + prs := &ParameterBuilder{ ctx: sctx, decl: td, } @@ -113,7 +125,7 @@ func TestParamsParser(t *testing.T) { assert.Len(t, ob.Parameters, 1) bodyParam := ob.Parameters[0] assert.EqualT(t, "The order to submit.", bodyParam.Description) - assert.EqualT(t, "body", bodyParam.In) + assert.EqualT(t, inBody, bodyParam.In) assert.EqualT(t, "#/definitions/order", bodyParam.Schema.Ref.String()) assert.TrueT(t, bodyParam.Required) @@ -138,6 +150,8 @@ func TestParamsParser(t *testing.T) { t.Run("someAliasOperation", func(t *testing.T) { assertSomeAliasOperationParams(t, operations) }) + + scantest.CompareOrDumpJSON(t, operations, "classification_params.json") } func assertYetAnotherOperationParams(t *testing.T, operations map[string]*spec.Operation) { @@ -291,7 +305,7 @@ func assertSomeOperationParams(t *testing.T, operations map[string]*spec.Operati case "items": assert.Equal(t, "Items", param.Extensions["x-go-name"]) - assert.EqualT(t, "body", param.In) + assert.EqualT(t, inBody, param.In) assert.NotNil(t, param.Schema) aprop := param.Schema assert.EqualT(t, "array", aprop.Type[0]) @@ -301,7 +315,7 @@ func assertSomeOperationParams(t *testing.T, operations map[string]*spec.Operati itprop := aprop.Items.Schema assert.Len(t, itprop.Properties, 4) assert.Len(t, itprop.Required, 3) - assertProperty(t, itprop, "integer", "id", "int32", "ID") + scantest.AssertProperty(t, itprop, "integer", "id", "int32", "ID") iprop, ok := itprop.Properties["id"] assert.TrueT(t, ok) assert.EqualT(t, "ID of this no model instance.\nids in this application start at 11 and are smaller than 1000", iprop.Description) @@ -313,14 +327,14 @@ func assertSomeOperationParams(t *testing.T, operations map[string]*spec.Operati assert.TrueT(t, iprop.ExclusiveMinimum, "'id' should have had an exclusive minimum") assert.Equal(t, 3, iprop.Default, "Items.ID default value is incorrect") - assertRef(t, itprop, "pet", "Pet", "#/definitions/pet") + scantest.AssertRef(t, itprop, "pet", "Pet", "#/definitions/pet") _, ok = itprop.Properties["pet"] assert.TrueT(t, ok) // if itprop.Ref.String() == "" { // assert.Equal(t, "The Pet to add to this NoModel items bucket.\nPets can appear more than once in the bucket", iprop.Description) // } - assertProperty(t, itprop, "integer", "quantity", "int16", "Quantity") + scantest.AssertProperty(t, itprop, "integer", "quantity", "int16", "Quantity") iprop, ok = itprop.Properties["quantity"] assert.TrueT(t, ok) assert.EqualT(t, "The amount of pets to add to this bucket.", iprop.Description) @@ -329,7 +343,7 @@ func assertSomeOperationParams(t *testing.T, operations map[string]*spec.Operati require.NotNil(t, iprop.Maximum) assert.InDeltaT(t, 10.00, *iprop.Maximum, epsilon) - assertProperty(t, itprop, "string", "notes", "", "Notes") + scantest.AssertProperty(t, itprop, "string", "notes", "", "Notes") iprop, ok = itprop.Properties["notes"] assert.TrueT(t, ok) assert.EqualT(t, "Notes to add to this item.\nThis can be used to add special instructions.", iprop.Description) @@ -472,9 +486,9 @@ func assertSomeAliasOperationParams(t *testing.T, operations map[string]*spec.Op } func TestParamsParser_TransparentAliases(t *testing.T) { - sctx, err := newScanCtx(&Options{ + sctx, err := scanner.NewScanCtx(&scanner.Options{ Packages: []string{"./goparsing/transparentalias"}, - WorkDir: "fixtures", + WorkDir: scantest.FixturesDir(), TransparentAliases: true, ScanModels: true, }) @@ -485,7 +499,7 @@ func TestParamsParser_TransparentAliases(t *testing.T) { // Build the operation map from the transparent alias fixtures. operations := make(map[string]*spec.Operation) - prs := ¶meterBuilder{ + prs := &ParameterBuilder{ ctx: sctx, decl: td, } @@ -499,7 +513,7 @@ func TestParamsParser_TransparentAliases(t *testing.T) { for i := range op.Parameters { p := &op.Parameters[i] switch p.In { - case "body": + case inBody: bodyParam = p case "query": queryParam = p @@ -523,13 +537,15 @@ func TestParamsParser_TransparentAliases(t *testing.T) { assert.EqualT(t, "aliasQuery", queryParam.Name) assert.EqualT(t, "string", queryParam.Type) assert.Empty(t, queryParam.Ref.String()) + + scantest.CompareOrDumpJSON(t, operations, "transparentalias_params.json") } func TestParameterParser_Issue2007(t *testing.T) { - sctx := loadClassificationPkgsCtx(t) + sctx := scantest.LoadClassificationPkgsCtx(t) operations := make(map[string]*spec.Operation) td := getParameter(sctx, "SetConfiguration") - prs := ¶meterBuilder{ + prs := &ParameterBuilder{ ctx: sctx, decl: td, } @@ -545,13 +561,34 @@ func TestParameterParser_Issue2007(t *testing.T) { require.NotNil(t, sch.AdditionalProperties) require.NotNil(t, sch.AdditionalProperties.Schema) require.TrueT(t, sch.AdditionalProperties.Schema.Type.Contains("string")) + + scantest.CompareOrDumpJSON(t, operations, "classification_params_issue2007.json") } func TestParameterParser_Issue2011(t *testing.T) { - sctx := loadClassificationPkgsCtx(t) + sctx := scantest.LoadClassificationPkgsCtx(t) + operations := make(map[string]*spec.Operation) + td := getParameter(sctx, "NumPlates") + prs := &ParameterBuilder{ + ctx: sctx, + decl: td, + } + require.NoError(t, prs.Build(operations)) + + op := operations["putNumPlate"] + require.NotNil(t, op) + require.Len(t, op.Parameters, 1) + sch := op.Parameters[0].Schema + require.NotNil(t, sch) + + scantest.CompareOrDumpJSON(t, operations, "classification_params_issue2011.json") +} + +func TestGo118ParameterParser_Issue2011(t *testing.T) { + sctx := scantest.LoadGo118ClassificationPkgsCtx(t) operations := make(map[string]*spec.Operation) td := getParameter(sctx, "NumPlates") - prs := ¶meterBuilder{ + prs := &ParameterBuilder{ ctx: sctx, decl: td, } @@ -562,4 +599,6 @@ func TestParameterParser_Issue2011(t *testing.T) { require.Len(t, op.Parameters, 1) sch := op.Parameters[0].Schema require.NotNil(t, sch) + + scantest.CompareOrDumpJSON(t, operations, "go118_params_issue2011.json") } diff --git a/internal/builders/parameters/taggers.go b/internal/builders/parameters/taggers.go new file mode 100644 index 0000000..159fbcc --- /dev/null +++ b/internal/builders/parameters/taggers.go @@ -0,0 +1,62 @@ +package parameters + +import ( + "go/ast" + "slices" + + "github.com/go-openapi/codescan/internal/builders/items" + "github.com/go-openapi/codescan/internal/parsers" + oaispec "github.com/go-openapi/spec" +) + +func setupParamTaggers(param *oaispec.Parameter, name string, afld *ast.Field, skipExt, debug bool) ([]parsers.TagParser, error) { + if param.Ref.String() != "" { + return setupRefParamTaggers(param, skipExt, debug), nil + } + + return setupInlineParamTaggers(param, name, afld, skipExt, debug) +} + +// setupRefParamTaggers configures taggers for a parameter that is a $ref. +func setupRefParamTaggers(param *oaispec.Parameter, skipExt, debug bool) []parsers.TagParser { + return []parsers.TagParser{ + parsers.NewSingleLineTagParser("in", parsers.NewMatchParamIn(param)), + parsers.NewSingleLineTagParser("required", parsers.NewMatchParamRequired(param)), + parsers.NewMultiLineTagParser("Extensions", parsers.NewSetExtensions(spExtensionsSetter(param, skipExt), debug), true), + } +} + +// baseInlineParamTaggers configures taggers for a fully-defined inline parameter. +func baseInlineParamTaggers(param *oaispec.Parameter, skipExt, debug bool) []parsers.TagParser { + return []parsers.TagParser{ + parsers.NewSingleLineTagParser("in", parsers.NewMatchParamIn(param)), + parsers.NewSingleLineTagParser("maximum", parsers.NewSetMaximum(paramValidations{param})), + parsers.NewSingleLineTagParser("minimum", parsers.NewSetMinimum(paramValidations{param})), + parsers.NewSingleLineTagParser("multipleOf", parsers.NewSetMultipleOf(paramValidations{param})), + parsers.NewSingleLineTagParser("minLength", parsers.NewSetMinLength(paramValidations{param})), + parsers.NewSingleLineTagParser("maxLength", parsers.NewSetMaxLength(paramValidations{param})), + parsers.NewSingleLineTagParser("pattern", parsers.NewSetPattern(paramValidations{param})), + parsers.NewSingleLineTagParser("collectionFormat", parsers.NewSetCollectionFormat(paramValidations{param})), + parsers.NewSingleLineTagParser("minItems", parsers.NewSetMinItems(paramValidations{param})), + parsers.NewSingleLineTagParser("maxItems", parsers.NewSetMaxItems(paramValidations{param})), + parsers.NewSingleLineTagParser("unique", parsers.NewSetUnique(paramValidations{param})), + parsers.NewSingleLineTagParser("enum", parsers.NewSetEnum(paramValidations{param})), + parsers.NewSingleLineTagParser("default", parsers.NewSetDefault(¶m.SimpleSchema, paramValidations{param})), + parsers.NewSingleLineTagParser("example", parsers.NewSetExample(¶m.SimpleSchema, paramValidations{param})), + parsers.NewSingleLineTagParser("required", parsers.NewSetRequiredParam(param)), + parsers.NewMultiLineTagParser("Extensions", parsers.NewSetExtensions(spExtensionsSetter(param, skipExt), debug), true), + } +} + +func setupInlineParamTaggers(param *oaispec.Parameter, name string, afld *ast.Field, skipExt, debug bool) ([]parsers.TagParser, error) { + // TODO(claude): don't understand why we need this step. Isn't it handled by the recursion already? + if ftped, ok := afld.Type.(*ast.ArrayType); ok { + taggers, err := items.ParseArrayTypes([]parsers.TagParser{}, name, ftped.Elt, param.Items, 0) + if err != nil { + return nil, err + } + return slices.Concat(taggers, baseInlineParamTaggers(param, skipExt, debug)), nil + } + + return baseInlineParamTaggers(param, skipExt, debug), nil +} diff --git a/internal/builders/parameters/typable.go b/internal/builders/parameters/typable.go new file mode 100644 index 0000000..cb27df5 --- /dev/null +++ b/internal/builders/parameters/typable.go @@ -0,0 +1,101 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package parameters + +import ( + "github.com/go-openapi/codescan/internal/builders/items" + "github.com/go-openapi/codescan/internal/builders/schema" + "github.com/go-openapi/codescan/internal/ifaces" + "github.com/go-openapi/codescan/internal/parsers" + oaispec "github.com/go-openapi/spec" +) + +var _ ifaces.OperationValidationBuilder = ¶mValidations{} + +type paramTypable struct { + param *oaispec.Parameter + skipExt bool +} + +func (pt paramTypable) In() string { return pt.param.In } + +func (pt paramTypable) Level() int { return 0 } + +func (pt paramTypable) Typed(tpe, format string) { + pt.param.Typed(tpe, format) +} + +func (pt paramTypable) SetRef(ref oaispec.Ref) { + pt.param.Ref = ref +} + +func (pt paramTypable) Items() ifaces.SwaggerTypable { //nolint:ireturn // polymorphic by design + bdt, schema := schema.BodyTypable(pt.param.In, pt.param.Schema, pt.skipExt) + if bdt != nil { + pt.param.Schema = schema + return bdt + } + + if pt.param.Items == nil { + pt.param.Items = new(oaispec.Items) + } + pt.param.Type = "array" + return items.NewTypable(pt.param.Items, 1, pt.param.In) +} + +func (pt paramTypable) Schema() *oaispec.Schema { + if pt.param.In != inBody { + return nil + } + if pt.param.Schema == nil { + pt.param.Schema = new(oaispec.Schema) + } + return pt.param.Schema +} + +func (pt paramTypable) AddExtension(key string, value any) { + if pt.param.In == inBody { + pt.Schema().AddExtension(key, value) + } else { + pt.param.AddExtension(key, value) + } +} + +func (pt paramTypable) WithEnum(values ...any) { + pt.param.WithEnum(values...) +} + +func (pt paramTypable) WithEnumDescription(desc string) { + if desc == "" { + return + } + pt.param.AddExtension(parsers.EnumDescExtension(), desc) +} + +type paramValidations struct { + current *oaispec.Parameter +} + +func (sv paramValidations) SetMaximum(val float64, exclusive bool) { + sv.current.Maximum = &val + sv.current.ExclusiveMaximum = exclusive +} + +func (sv paramValidations) SetMinimum(val float64, exclusive bool) { + sv.current.Minimum = &val + sv.current.ExclusiveMinimum = exclusive +} +func (sv paramValidations) SetMultipleOf(val float64) { sv.current.MultipleOf = &val } +func (sv paramValidations) SetMinItems(val int64) { sv.current.MinItems = &val } +func (sv paramValidations) SetMaxItems(val int64) { sv.current.MaxItems = &val } +func (sv paramValidations) SetMinLength(val int64) { sv.current.MinLength = &val } +func (sv paramValidations) SetMaxLength(val int64) { sv.current.MaxLength = &val } +func (sv paramValidations) SetPattern(val string) { sv.current.Pattern = val } +func (sv paramValidations) SetUnique(val bool) { sv.current.UniqueItems = val } +func (sv paramValidations) SetCollectionFormat(val string) { sv.current.CollectionFormat = val } +func (sv paramValidations) SetEnum(val string) { + sv.current.Enum = parsers.ParseEnum(val, &oaispec.SimpleSchema{Type: sv.current.Type, Format: sv.current.Format}) +} +func (sv paramValidations) SetDefault(val any) { sv.current.Default = val } +func (sv paramValidations) SetExample(val any) { sv.current.Example = val } diff --git a/internal/builders/resolvers/assertions.go b/internal/builders/resolvers/assertions.go new file mode 100644 index 0000000..f588ed7 --- /dev/null +++ b/internal/builders/resolvers/assertions.go @@ -0,0 +1,102 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package resolvers + +import ( + "fmt" + "go/ast" + "go/importer" + "go/types" + + oaispec "github.com/go-openapi/spec" +) + +type internalError string + +func (e internalError) Error() string { + return string(e) +} + +const ( + ErrInternal internalError = "internal error due to a bug or a mishandling of go types AST. This usually indicates a bug in the scanner" +) + +// code assertions to be explicit about the various expectations when entering a function + +func MustNotBeABuiltinType(o *types.TypeName) { + if o.Pkg() != nil { + return + } + + panic(fmt.Errorf("type %q expected not to be a builtin: %w", o.Name(), ErrInternal)) +} + +func MustHaveRightHandSide(a *types.Alias) { + if a.Rhs() != nil { + return + } + + panic(fmt.Errorf("type alias %q expected to declare a right-hand-side: %w", a.Obj().Name(), ErrInternal)) +} + +// IsFieldStringable check if the field type is a scalar. If the field type is +// *ast.StarExpr and is pointer type, check if it refers to a scalar. +// Otherwise, the ",string" directive doesn't apply. +func IsFieldStringable(tpe ast.Expr) bool { + if ident, ok := tpe.(*ast.Ident); ok { + switch ident.Name { + case "int", "int8", "int16", "int32", "int64", + "uint", "uint8", "uint16", "uint32", "uint64", + "float64", "string", "bool": + return true + } + } else if starExpr, ok := tpe.(*ast.StarExpr); ok { + return IsFieldStringable(starExpr.X) + } else { + return false + } + return false +} + +func IsTextMarshaler(tpe types.Type) bool { + encodingPkg, err := importer.Default().Import("encoding") + if err != nil { + return false + } + // Proposal for enhancement: there should be a better way to check this than hardcoding the TextMarshaler iface. + obj := encodingPkg.Scope().Lookup("TextMarshaler") + if obj == nil { + return false + } + ifc, ok := obj.Type().Underlying().(*types.Interface) + if !ok { + return false + } + + return types.Implements(tpe, ifc) +} + +func IsStdTime(o *types.TypeName) bool { + return o.Pkg() != nil && o.Pkg().Name() == "time" && o.Name() == "Time" +} + +func IsStdError(o *types.TypeName) bool { + return o.Pkg() == nil && o.Name() == "error" +} + +func IsStdJSONRawMessage(o *types.TypeName) bool { + return o.Pkg() != nil && o.Pkg().Path() == "encoding/json" && o.Name() == "RawMessage" +} + +func IsAny(o *types.TypeName) bool { + return o.Pkg() == nil && o.Name() == "any" +} + +func AddExtension(ve *oaispec.VendorExtensible, key string, value any, skip bool) { + if skip { + return + } + + ve.AddExtension(key, value) +} diff --git a/internal/builders/resolvers/errors.go b/internal/builders/resolvers/errors.go new file mode 100644 index 0000000..fe137a2 --- /dev/null +++ b/internal/builders/resolvers/errors.go @@ -0,0 +1,9 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package resolvers + +import "errors" + +// ErrResolver is the sentinel error for all errors originating from the resolver package. +var ErrResolver = errors.New("codescan:resolver") diff --git a/internal/builders/resolvers/resolvers.go b/internal/builders/resolvers/resolvers.go new file mode 100644 index 0000000..68f24d0 --- /dev/null +++ b/internal/builders/resolvers/resolvers.go @@ -0,0 +1,195 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package resolvers + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + "reflect" + "strconv" + "strings" + + "github.com/go-openapi/codescan/internal/ifaces" + "golang.org/x/tools/go/ast/astutil" +) + +const ( + // Go builtin type names used for type-to-schema mapping. + goTypeByte = "byte" + goTypeFloat64 = "float64" + goTypeInt = "int" + goTypeInt16 = "int16" + goTypeInt32 = "int32" + goTypeInt64 = "int64" +) + +// SwaggerSchemaForType maps all Go builtin types that have Json representation to Swagger/Json types. +// See https://golang.org/pkg/builtin/ and http://swagger.io/specification/ +func SwaggerSchemaForType(typeName string, prop ifaces.SwaggerTypable) error { + switch typeName { + case "bool": + prop.Typed("boolean", "") + case goTypeByte: + prop.Typed("integer", "uint8") + case "complex128", "complex64": + return fmt.Errorf("unsupported builtin %q (no JSON marshaller): %w", typeName, ErrResolver) + case "error": + // TODO: error is often marshalled into a string but not always (e.g. errors package creates + // errors that are marshalled into an empty object), this could be handled the same way + // custom JSON marshallers are handled (in future) + prop.Typed("string", "") + case "float32": + prop.Typed("number", "float") + case goTypeFloat64: + prop.Typed("number", "double") + case goTypeInt: + prop.Typed("integer", goTypeInt64) + case goTypeInt16: + prop.Typed("integer", goTypeInt16) + case goTypeInt32: + prop.Typed("integer", goTypeInt32) + case goTypeInt64: + prop.Typed("integer", goTypeInt64) + case "int8": + prop.Typed("integer", "int8") + case "rune": + prop.Typed("integer", goTypeInt32) + case "string": + prop.Typed("string", "") + case "uint": + prop.Typed("integer", "uint64") + case "uint16": + prop.Typed("integer", "uint16") + case "uint32": + prop.Typed("integer", "uint32") + case "uint64": + prop.Typed("integer", "uint64") + case "uint8": + prop.Typed("integer", "uint8") + case "uintptr": + prop.Typed("integer", "uint64") + case "object": + prop.Typed("object", "") + default: + return fmt.Errorf("unsupported type %q: %w", typeName, ErrResolver) + } + return nil +} + +var unsupportedTypes = map[string]struct{}{ //nolint:gochecknoglobals // immutable lookup table + "complex64": {}, + "complex128": {}, +} + +func UnsupportedBuiltinType(tpe types.Type) bool { + unaliased := types.Unalias(tpe) + + switch ftpe := unaliased.(type) { + case *types.Basic: + return UnsupportedBasic(ftpe) + case *types.TypeParam: + return true + case *types.Chan: + return true + case *types.Signature: + return true + case ifaces.Objecter: + return UnsupportedBuiltin(ftpe) + default: + return false + } +} + +func UnsupportedBuiltin(tpe ifaces.Objecter) bool { + o := tpe.Obj() + if o == nil { + return false + } + + if o.Pkg() != nil { + if o.Pkg().Path() == "unsafe" { + return true + } + + return false // not a builtin type + } + + _, found := unsupportedTypes[o.Name()] + + return found +} + +func UnsupportedBasic(tpe *types.Basic) bool { + if tpe.Kind() == types.UnsafePointer { + return true + } + + _, found := unsupportedTypes[tpe.Name()] + + return found +} + +func FindASTField(file *ast.File, pos token.Pos) *ast.Field { + ans, _ := astutil.PathEnclosingInterval(file, pos, pos) + for _, an := range ans { + if at, valid := an.(*ast.Field); valid { + return at + } + } + return nil +} + +type tagOptions []string + +func (t tagOptions) Contain(option string) bool { + for i := 1; i < len(t); i++ { + if t[i] == option { + return true + } + } + return false +} + +func (t tagOptions) Name() string { + return t[0] +} + +func ParseJSONTag(field *ast.Field) (name string, ignore, isString, omitEmpty bool, err error) { + if len(field.Names) > 0 { + name = field.Names[0].Name + } + if field.Tag == nil || len(strings.TrimSpace(field.Tag.Value)) == 0 { + return name, false, false, false, nil + } + + tv, err := strconv.Unquote(field.Tag.Value) + if err != nil { + return name, false, false, false, err + } + + if strings.TrimSpace(tv) != "" { + st := reflect.StructTag(tv) + jsonParts := tagOptions(strings.Split(st.Get("json"), ",")) + + if jsonParts.Contain("string") { + // Need to check if the field type is a scalar. Otherwise, the + // ",string" directive doesn't apply. + isString = IsFieldStringable(field.Type) + } + + omitEmpty = jsonParts.Contain("omitempty") + + switch jsonParts.Name() { + case "-": + return name, true, isString, omitEmpty, nil + case "": + return name, false, isString, omitEmpty, nil + default: + return jsonParts.Name(), false, isString, omitEmpty, nil + } + } + return name, false, false, false, nil +} diff --git a/internal/builders/resolvers/resolvers_test.go b/internal/builders/resolvers/resolvers_test.go new file mode 100644 index 0000000..3dbe9ee --- /dev/null +++ b/internal/builders/resolvers/resolvers_test.go @@ -0,0 +1,34 @@ +package resolvers + +import ( + "testing" + + oaispec "github.com/go-openapi/spec" + "github.com/go-openapi/testify/v2/assert" + "github.com/go-openapi/testify/v2/require" +) + +func TestAddExtension(t *testing.T) { + ve := &oaispec.VendorExtensible{ + Extensions: make(oaispec.Extensions), + } + + key := "x-go-name" + value := "Name" + AddExtension(ve, key, value, false) + veStr, ok := ve.Extensions[key].(string) + require.TrueT(t, ok) + assert.EqualT(t, value, veStr) + + key2 := "x-go-package" + value2 := "schema" + AddExtension(ve, key2, value2, false) + veStr2, ok := ve.Extensions[key2].(string) + require.TrueT(t, ok) + assert.EqualT(t, value2, veStr2) + + key3 := "x-go-class" + value3 := "Spec" + AddExtension(ve, key3, value3, true) + assert.Nil(t, ve.Extensions[key3]) +} diff --git a/internal/builders/responses/errors.go b/internal/builders/responses/errors.go new file mode 100644 index 0000000..8cceaff --- /dev/null +++ b/internal/builders/responses/errors.go @@ -0,0 +1,9 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package responses + +import "errors" + +// ErrResponses is the sentinel error for all errors originating from the responses package. +var ErrResponses = errors.New("codescan:builders:responses") diff --git a/internal/builders/responses/responses.go b/internal/builders/responses/responses.go new file mode 100644 index 0000000..e08d9eb --- /dev/null +++ b/internal/builders/responses/responses.go @@ -0,0 +1,415 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package responses + +import ( + "fmt" + "go/types" + + "github.com/go-openapi/codescan/internal/builders/resolvers" + "github.com/go-openapi/codescan/internal/builders/schema" + "github.com/go-openapi/codescan/internal/ifaces" + "github.com/go-openapi/codescan/internal/logger" + "github.com/go-openapi/codescan/internal/parsers" + "github.com/go-openapi/codescan/internal/scanner" + oaispec "github.com/go-openapi/spec" +) + +type ResponseBuilder struct { + ctx *scanner.ScanCtx + decl *scanner.EntityDecl + postDecls []*scanner.EntityDecl +} + +func NewBuilder(ctx *scanner.ScanCtx, decl *scanner.EntityDecl) *ResponseBuilder { + return &ResponseBuilder{ + ctx: ctx, + decl: decl, + } +} + +func (r *ResponseBuilder) Build(responses map[string]oaispec.Response) error { + // check if there is a swagger:response tag that is followed by one or more words, + // these words are the ids of the operations this parameter struct applies to + // once type name is found convert it to a schema, by looking up the schema in the + // parameters dictionary that got passed into this parse method + + name, _ := r.decl.ResponseNames() + response := responses[name] + logger.DebugLogf(r.ctx.Debug(), "building response: %s", name) + + // analyze doc comment for the model + sp := parsers.NewSectionedParser( + parsers.WithSetDescription(func(lines []string) { + response.Description = parsers.JoinDropLast(lines) + }), + ) + if err := sp.Parse(r.decl.Comments); err != nil { + return err + } + + // analyze struct body for fields etc + // each exported struct field: + // * gets a type mapped to a go primitive + // * perhaps gets a format + // * has to document the validations that apply for the type and the field + // * when the struct field points to a model it becomes a ref: #/definitions/ModelName + // * comments that aren't tags is used as the description + if err := r.buildFromType(r.decl.ObjType(), &response, make(map[string]bool)); err != nil { + return err + } + responses[name] = response + + return nil +} + +func (r *ResponseBuilder) PostDeclarations() []*scanner.EntityDecl { + return r.postDecls +} + +func (r *ResponseBuilder) buildFromField(fld *types.Var, tpe types.Type, typable ifaces.SwaggerTypable, seen map[string]bool) error { + logger.DebugLogf(r.ctx.Debug(), "build from field %s: %T", fld.Name(), tpe) + + switch ftpe := tpe.(type) { + case *types.Basic: + return resolvers.SwaggerSchemaForType(ftpe.Name(), typable) + case *types.Struct: + return r.buildFromFieldStruct(ftpe, typable) + case *types.Pointer: + return r.buildFromField(fld, ftpe.Elem(), typable, seen) + case *types.Interface: + return r.buildFromFieldInterface(ftpe, typable) + case *types.Array: + return r.buildFromField(fld, ftpe.Elem(), typable.Items(), seen) + case *types.Slice: + return r.buildFromField(fld, ftpe.Elem(), typable.Items(), seen) + case *types.Map: + return r.buildFromFieldMap(ftpe, typable) + case *types.Named: + return r.buildNamedField(ftpe, typable) + case *types.Alias: + logger.DebugLogf(r.ctx.Debug(), "alias(responses.buildFromField): got alias %v to %v", ftpe, ftpe.Rhs()) + return r.buildFieldAlias(ftpe, typable, fld, seen) + default: + return fmt.Errorf("unknown type for %s: %T: %w", fld.String(), fld.Type(), ErrResponses) + } +} + +func (r *ResponseBuilder) buildFromFieldStruct(ftpe *types.Struct, typable ifaces.SwaggerTypable) error { + sb := schema.NewBuilder(r.ctx, r.decl) + if err := sb.BuildFromType(ftpe, typable); err != nil { + return err + } + + r.postDecls = append(r.postDecls, sb.PostDeclarations()...) + + return nil +} + +func (r *ResponseBuilder) buildFromFieldMap(ftpe *types.Map, typable ifaces.SwaggerTypable) error { + sch := new(oaispec.Schema) + typable.Schema().Typed("object", "").AdditionalProperties = &oaispec.SchemaOrBool{ + Schema: sch, + } + + sb := schema.NewBuilder(r.ctx, r.decl) + if err := sb.BuildFromType(ftpe.Elem(), schema.NewTypable(sch, typable.Level()+1, r.ctx.SkipExtensions())); err != nil { + return err + } + + r.postDecls = append(r.postDecls, sb.PostDeclarations()...) + + return nil +} + +func (r *ResponseBuilder) buildFromFieldInterface(tpe types.Type, typable ifaces.SwaggerTypable) error { + sb := schema.NewBuilder(r.ctx, r.decl) + if err := sb.BuildFromType(tpe, typable); err != nil { + return err + } + + r.postDecls = append(r.postDecls, sb.PostDeclarations()...) + + return nil +} + +func (r *ResponseBuilder) buildFromType(otpe types.Type, resp *oaispec.Response, seen map[string]bool) error { + switch tpe := otpe.(type) { + case *types.Pointer: + return r.buildFromType(tpe.Elem(), resp, seen) + case *types.Named: + return r.buildNamedType(tpe, resp, seen) + case *types.Alias: + logger.DebugLogf(r.ctx.Debug(), "alias(responses.buildFromType): got alias %v to %v", tpe, tpe.Rhs()) + return r.buildAlias(tpe, resp, seen) + default: + return fmt.Errorf("anonymous types are currently not supported for responses: %w", ErrResponses) + } +} + +func (r *ResponseBuilder) buildNamedType(tpe *types.Named, resp *oaispec.Response, seen map[string]bool) error { + o := tpe.Obj() + if resolvers.IsAny(o) || resolvers.IsStdError(o) { + return fmt.Errorf("%s type not supported in the context of a responses section definition: %w", o.Name(), ErrResponses) + } + resolvers.MustNotBeABuiltinType(o) + + switch stpe := o.Type().Underlying().(type) { // TODO(fred): this is wrong without checking for aliases? + case *types.Struct: + logger.DebugLogf(r.ctx.Debug(), "build from type %s: %T", o.Name(), tpe) + if decl, found := r.ctx.DeclForType(o.Type()); found { + return r.buildFromStruct(decl, stpe, resp, seen) + } + return r.buildFromStruct(r.decl, stpe, resp, seen) + + default: + if decl, found := r.ctx.DeclForType(o.Type()); found { + var sch oaispec.Schema + typable := schema.NewTypable(&sch, 0, r.ctx.SkipExtensions()) + + d := decl.Obj() + if resolvers.IsStdTime(d) { + typable.Typed("string", "date-time") + return nil + } + if sfnm, isf := parsers.StrfmtName(decl.Comments); isf { + typable.Typed("string", sfnm) + return nil + } + sb := schema.NewBuilder(r.ctx, decl) + sb.InferNames() + if err := sb.BuildFromType(tpe.Underlying(), typable); err != nil { + return err + } + resp.WithSchema(&sch) + r.postDecls = append(r.postDecls, sb.PostDeclarations()...) + return nil + } + return fmt.Errorf("responses can only be structs, did you mean for %s to be the response body?: %w", tpe.String(), ErrResponses) + } +} + +func (r *ResponseBuilder) buildAlias(tpe *types.Alias, resp *oaispec.Response, seen map[string]bool) error { + // panic("yay") + o := tpe.Obj() + if resolvers.IsAny(o) || resolvers.IsStdError(o) { + // wrong: TODO(fred): see what object exactly we want to build here - figure out with specific tests + return fmt.Errorf("%s type not supported in the context of a responses section definition: %w", o.Name(), ErrResponses) + } + resolvers.MustNotBeABuiltinType(o) + resolvers.MustHaveRightHandSide(tpe) + + rhs := tpe.Rhs() + + // If transparent aliases are enabled, use the underlying type directly without creating a definition + if r.ctx.TransparentAliases() { + return r.buildFromType(rhs, resp, seen) + } + + decl, ok := r.ctx.FindModel(o.Pkg().Path(), o.Name()) + if !ok { + return fmt.Errorf("can't find source file for aliased type: %v -> %v: %w", tpe, rhs, ErrResponses) + } + r.postDecls = append(r.postDecls, decl) // mark the left-hand side as discovered + + if !r.ctx.RefAliases() { + // expand alias + unaliased := types.Unalias(tpe) + return r.buildFromType(unaliased.Underlying(), resp, seen) + } + + switch rtpe := rhs.(type) { + // load declaration for named unaliased type + case *types.Named: + o := rtpe.Obj() + if o.Pkg() == nil { + break // builtin + } + + typable := schema.NewTypable(&oaispec.Schema{}, 0, r.ctx.SkipExtensions()) + return r.makeRef(decl, typable) + case *types.Alias: + o := rtpe.Obj() + if o.Pkg() == nil { + break // builtin + } + + typable := schema.NewTypable(&oaispec.Schema{}, 0, r.ctx.SkipExtensions()) + + return r.makeRef(decl, typable) + } + + return r.buildFromType(rhs, resp, seen) +} + +func (r *ResponseBuilder) buildNamedField(ftpe *types.Named, typable ifaces.SwaggerTypable) error { + decl, found := r.ctx.DeclForType(ftpe.Obj().Type()) + if !found { + return fmt.Errorf("unable to find package and source file for: %s: %w", ftpe.String(), ErrResponses) + } + + d := decl.Obj() + if resolvers.IsStdTime(d) { + typable.Typed("string", "date-time") + return nil + } + + if sfnm, isf := parsers.StrfmtName(decl.Comments); isf { + typable.Typed("string", sfnm) + return nil + } + + sb := schema.NewBuilder(r.ctx, decl) + sb.InferNames() + if err := sb.BuildFromType(decl.ObjType(), typable); err != nil { + return err + } + + r.postDecls = append(r.postDecls, sb.PostDeclarations()...) + + return nil +} + +func (r *ResponseBuilder) buildFieldAlias(tpe *types.Alias, typable ifaces.SwaggerTypable, fld *types.Var, seen map[string]bool) error { + _ = fld + _ = seen + o := tpe.Obj() + if resolvers.IsAny(o) { + // e.g. Field interface{} or Field any + _ = typable.Schema() + + return nil // just leave an empty schema + } + + // If transparent aliases are enabled, use the underlying type directly without creating a definition + if r.ctx.TransparentAliases() { + sb := schema.NewBuilder(r.ctx, r.decl) + if err := sb.BuildFromType(tpe.Rhs(), typable); err != nil { + return err + } + r.postDecls = append(r.postDecls, sb.PostDeclarations()...) + return nil + } + + decl, ok := r.ctx.FindModel(o.Pkg().Path(), o.Name()) + if !ok { + return fmt.Errorf("can't find source file for aliased type: %v: %w", tpe, ErrResponses) + } + r.postDecls = append(r.postDecls, decl) // mark the left-hand side as discovered + + return r.makeRef(decl, typable) +} + +func (r *ResponseBuilder) buildFromStruct(decl *scanner.EntityDecl, tpe *types.Struct, resp *oaispec.Response, seen map[string]bool) error { + if tpe.NumFields() == 0 { + return nil + } + + for fld := range tpe.Fields() { + if fld.Embedded() { + if err := r.buildFromType(fld.Type(), resp, seen); err != nil { + return err + } + continue + } + if fld.Anonymous() { + logger.DebugLogf(r.ctx.Debug(), "skipping anonymous field") + continue + } + + if err := r.processResponseField(fld, decl, resp, seen); err != nil { + return err + } + } + + for k := range resp.Headers { + if !seen[k] { + delete(resp.Headers, k) + } + } + return nil +} + +func (r *ResponseBuilder) processResponseField(fld *types.Var, decl *scanner.EntityDecl, resp *oaispec.Response, seen map[string]bool) error { + if !fld.Exported() { + return nil + } + + afld := resolvers.FindASTField(decl.File, fld.Pos()) + if afld == nil { + logger.DebugLogf(r.ctx.Debug(), "can't find source associated with %s", fld.String()) + return nil + } + + if parsers.Ignored(afld.Doc) { + logger.DebugLogf(r.ctx.Debug(), "field %v is deliberately ignored", fld) + return nil + } + + name, ignore, _, _, err := resolvers.ParseJSONTag(afld) + if err != nil { + return err + } + if ignore { + return nil + } + + // scan for param location first, this changes some behavior down the line + in, _ := parsers.ParamLocation(afld.Doc) + ps := resp.Headers[name] + + // support swagger:file for response + // An API operation can return a file, such as an image or PDF. In this case, + // define the response schema with type: file and specify the appropriate MIME types in the produces section. + if afld.Doc != nil && parsers.FileParam(afld.Doc) { + resp.Schema = &oaispec.Schema{} + resp.Schema.Typed("file", "") + } else { + logger.DebugLogf(r.ctx.Debug(), "build response %v (%v) (not a file)", fld, fld.Type()) + if err := r.buildFromField(fld, fld.Type(), responseTypable{in, &ps, resp, r.ctx.SkipExtensions()}, seen); err != nil { + return err + } + } + + if strfmtName, ok := parsers.StrfmtName(afld.Doc); ok { + ps.Typed("string", strfmtName) + } + + taggers, err := setupResponseHeaderTaggers(&ps, name, afld) + if err != nil { + return err + } + + sp := parsers.NewSectionedParser( + parsers.WithSetDescription(func(lines []string) { ps.Description = parsers.JoinDropLast(lines) }), + parsers.WithTaggers(taggers...), + ) + + if err := sp.Parse(afld.Doc); err != nil { + return err + } + + if in != "body" { + seen[name] = true + if resp.Headers == nil { + resp.Headers = make(map[string]oaispec.Header) + } + resp.Headers[name] = ps + } + + return nil +} + +func (r *ResponseBuilder) makeRef(decl *scanner.EntityDecl, prop ifaces.SwaggerTypable) error { + nm, _ := decl.Names() + ref, err := oaispec.NewRef("#/definitions/" + nm) + if err != nil { + return err + } + + prop.SetRef(ref) + r.postDecls = append(r.postDecls, decl) // mark the $ref target as discovered + + return nil +} diff --git a/responses_test.go b/internal/builders/responses/responses_test.go similarity index 86% rename from responses_test.go rename to internal/builders/responses/responses_test.go index 8d23860..2117559 100644 --- a/responses_test.go +++ b/internal/builders/responses/responses_test.go @@ -1,28 +1,27 @@ // SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers // SPDX-License-Identifier: Apache-2.0 -package codescan +package responses import ( "testing" + "github.com/go-openapi/codescan/internal/scanner" + "github.com/go-openapi/codescan/internal/scantest" "github.com/go-openapi/testify/v2/assert" "github.com/go-openapi/testify/v2/require" "github.com/go-openapi/spec" ) -func getResponse(sctx *scanCtx, nm string) *entityDecl { - for _, v := range sctx.app.Responses { - if v.Ident.Name == nm { - return v - } - } - return nil -} +const ( + epsilon = 1e-9 + + paramID = "id" +) func TestParseResponses(t *testing.T) { - sctx := loadClassificationPkgsCtx(t) + sctx := scantest.LoadClassificationPkgsCtx(t) responses := make(map[string]spec.Response) responseNames := []string{ "ComplexerOne", "SimpleOnes", "SimpleOnesFunc", "ComplexerPointerOne", @@ -31,7 +30,8 @@ func TestParseResponses(t *testing.T) { } for _, rn := range responseNames { td := getResponse(sctx, rn) - prs := &responseBuilder{ + require.NotNil(t, td) + prs := &ResponseBuilder{ ctx: sctx, decl: td, } @@ -64,6 +64,8 @@ func TestParseResponses(t *testing.T) { assert.TrueT(t, ok) assert.NotNil(t, res.Schema) assert.EqualT(t, "#/definitions/user", res.Schema.Ref.String()) + + scantest.CompareOrDumpJSON(t, responses, "classification_responses.json") } func assertComplexerOneHeaders(t *testing.T, responses map[string]spec.Response) { @@ -251,7 +253,7 @@ func assertSomeResponseHeaders(t *testing.T, responses map[string]spec.Response) itprop := aprop.Items.Schema assert.Len(t, itprop.Properties, 4) assert.Len(t, itprop.Required, 3) - assertProperty(t, itprop, "integer", "id", "int32", "ID") + scantest.AssertProperty(t, itprop, "integer", "id", "int32", "ID") iprop, ok := itprop.Properties["id"] assert.TrueT(t, ok) @@ -263,14 +265,14 @@ func assertSomeResponseHeaders(t *testing.T, responses map[string]spec.Response) assert.InDeltaT(t, 10.00, *iprop.Minimum, epsilon) assert.TrueT(t, iprop.ExclusiveMinimum, "'id' should have had an exclusive minimum") - assertRef(t, itprop, "pet", "Pet", "#/definitions/pet") + scantest.AssertRef(t, itprop, "pet", "Pet", "#/definitions/pet") _, ok = itprop.Properties["pet"] assert.TrueT(t, ok) // if itprop.Ref.String() == "" { // assert.Equal(t, "The Pet to add to this NoModel items bucket.\nPets can appear more than once in the bucket", iprop.Description) // } - assertProperty(t, itprop, "integer", "quantity", "int16", "Quantity") + scantest.AssertProperty(t, itprop, "integer", "quantity", "int16", "Quantity") iprop, ok = itprop.Properties["quantity"] assert.TrueT(t, ok) assert.EqualT(t, "The amount of pets to add to this bucket.", iprop.Description) @@ -279,16 +281,16 @@ func assertSomeResponseHeaders(t *testing.T, responses map[string]spec.Response) require.NotNil(t, iprop.Maximum) assert.InDeltaT(t, 10.00, *iprop.Maximum, epsilon) - assertProperty(t, itprop, "string", "notes", "", "Notes") + scantest.AssertProperty(t, itprop, "string", "notes", "", "Notes") iprop, ok = itprop.Properties["notes"] assert.TrueT(t, ok) assert.EqualT(t, "Notes to add to this item.\nThis can be used to add special instructions.", iprop.Description) } func TestParseResponses_TransparentAliases(t *testing.T) { - sctx, err := newScanCtx(&Options{ + sctx, err := scanner.NewScanCtx(&scanner.Options{ Packages: []string{"./goparsing/transparentalias"}, - WorkDir: "fixtures", + WorkDir: scantest.FixturesDir(), TransparentAliases: true, ScanModels: true, }) @@ -299,7 +301,7 @@ func TestParseResponses_TransparentAliases(t *testing.T) { // Build the response map using the transparent alias fixtures. responses := make(map[string]spec.Response) - prs := &responseBuilder{ + prs := &ResponseBuilder{ ctx: sctx, decl: td, } @@ -317,13 +319,15 @@ func TestParseResponses_TransparentAliases(t *testing.T) { assert.TrueT(t, payload.Type.Contains("object")) assert.Empty(t, payload.Ref.String()) assert.Equal(t, "Payload", payload.Extensions["x-go-name"]) + + scantest.CompareOrDumpJSON(t, responses, "transparentalias_responses.json") } func TestParseResponses_Issue2007(t *testing.T) { - sctx := loadClassificationPkgsCtx(t) + sctx := scantest.LoadClassificationPkgsCtx(t) responses := make(map[string]spec.Response) td := getResponse(sctx, "GetConfiguration") - prs := &responseBuilder{ + prs := &ResponseBuilder{ ctx: sctx, decl: td, } @@ -337,13 +341,16 @@ func TestParseResponses_Issue2007(t *testing.T) { require.NotNil(t, resp.Schema.AdditionalProperties) require.NotNil(t, resp.Schema.AdditionalProperties.Schema) require.TrueT(t, resp.Schema.AdditionalProperties.Schema.Type.Contains("string")) + + scantest.CompareOrDumpJSON(t, responses, "classification_responses_issue2007.json") } func TestParseResponses_Issue2011(t *testing.T) { - sctx := loadClassificationPkgsCtx(t) + sctx := scantest.LoadClassificationPkgsCtx(t) responses := make(map[string]spec.Response) td := getResponse(sctx, "NumPlatesResp") - prs := &responseBuilder{ + require.NotNil(t, td) + prs := &ResponseBuilder{ ctx: sctx, decl: td, } @@ -352,17 +359,19 @@ func TestParseResponses_Issue2011(t *testing.T) { resp := responses["NumPlatesResp"] require.Empty(t, resp.Headers) require.NotNil(t, resp.Schema) + + scantest.CompareOrDumpJSON(t, responses, "classification_responses_issue2011.json") } func TestParseResponses_Issue2145(t *testing.T) { - sctx, err := newScanCtx(&Options{ + sctx, err := scanner.NewScanCtx(&scanner.Options{ Packages: []string{"./goparsing/product/..."}, - WorkDir: "fixtures", + WorkDir: scantest.FixturesDir(), }) require.NoError(t, err) responses := make(map[string]spec.Response) td := getResponse(sctx, "GetProductsResponse") - prs := &responseBuilder{ + prs := &ResponseBuilder{ ctx: sctx, decl: td, } @@ -372,4 +381,32 @@ func TestParseResponses_Issue2145(t *testing.T) { require.NotNil(t, resp.Schema) assert.NotEmpty(t, prs.postDecls) // should have Product + + scantest.CompareOrDumpJSON(t, responses, "product_responses.json") +} + +func getResponse(sctx *scanner.ScanCtx, nm string) *scanner.EntityDecl { + for v := range sctx.Responses() { + if v.Ident.Name == nm { + return v + } + } + return nil +} + +func TestGo118ParseResponses_Issue2011(t *testing.T) { + sctx := scantest.LoadGo118ClassificationPkgsCtx(t) + responses := make(map[string]spec.Response) + td := getResponse(sctx, "NumPlatesResp") + prs := &ResponseBuilder{ + ctx: sctx, + decl: td, + } + require.NoError(t, prs.Build(responses)) + + resp := responses["NumPlatesResp"] + require.Empty(t, resp.Headers) + require.NotNil(t, resp.Schema) + + scantest.CompareOrDumpJSON(t, responses, "go118_responses_issue2011.json") } diff --git a/internal/builders/responses/taggers.go b/internal/builders/responses/taggers.go new file mode 100644 index 0000000..2c93d06 --- /dev/null +++ b/internal/builders/responses/taggers.go @@ -0,0 +1,43 @@ +package responses + +import ( + "go/ast" + "slices" + + "github.com/go-openapi/codescan/internal/builders/items" + "github.com/go-openapi/codescan/internal/parsers" + oaispec "github.com/go-openapi/spec" +) + +// baseResponseHeaderTaggers configures taggers for a response header field. +func baseResponseHeaderTaggers(header *oaispec.Header) []parsers.TagParser { + return []parsers.TagParser{ + parsers.NewSingleLineTagParser("maximum", parsers.NewSetMaximum(headerValidations{header})), + parsers.NewSingleLineTagParser("minimum", parsers.NewSetMinimum(headerValidations{header})), + parsers.NewSingleLineTagParser("multipleOf", parsers.NewSetMultipleOf(headerValidations{header})), + parsers.NewSingleLineTagParser("minLength", parsers.NewSetMinLength(headerValidations{header})), + parsers.NewSingleLineTagParser("maxLength", parsers.NewSetMaxLength(headerValidations{header})), + parsers.NewSingleLineTagParser("pattern", parsers.NewSetPattern(headerValidations{header})), + parsers.NewSingleLineTagParser("collectionFormat", parsers.NewSetCollectionFormat(headerValidations{header})), + parsers.NewSingleLineTagParser("minItems", parsers.NewSetMinItems(headerValidations{header})), + parsers.NewSingleLineTagParser("maxItems", parsers.NewSetMaxItems(headerValidations{header})), + parsers.NewSingleLineTagParser("unique", parsers.NewSetUnique(headerValidations{header})), + parsers.NewSingleLineTagParser("enum", parsers.NewSetEnum(headerValidations{header})), + parsers.NewSingleLineTagParser("default", parsers.NewSetDefault(&header.SimpleSchema, headerValidations{header})), + parsers.NewSingleLineTagParser("example", parsers.NewSetExample(&header.SimpleSchema, headerValidations{header})), + } +} + +func setupResponseHeaderTaggers(header *oaispec.Header, name string, afld *ast.Field) ([]parsers.TagParser, error) { + // TODO(claude): don't understand why we need this step. Isn't it handled by the recursion already? + if ftped, ok := afld.Type.(*ast.ArrayType); ok { + taggers, err := items.ParseArrayTypes([]parsers.TagParser{}, name, ftped.Elt, header.Items, 0) + if err != nil { + return nil, err + } + + return slices.Concat(taggers, baseResponseHeaderTaggers(header)), nil + } + + return baseResponseHeaderTaggers(header), nil +} diff --git a/internal/builders/responses/typable.go b/internal/builders/responses/typable.go new file mode 100644 index 0000000..f9c9cdb --- /dev/null +++ b/internal/builders/responses/typable.go @@ -0,0 +1,132 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package responses + +import ( + "github.com/go-openapi/codescan/internal/builders/items" + "github.com/go-openapi/codescan/internal/builders/schema" + "github.com/go-openapi/codescan/internal/ifaces" + "github.com/go-openapi/codescan/internal/parsers" + oaispec "github.com/go-openapi/spec" +) + +var _ ifaces.ValidationBuilder = &headerValidations{} + +type responseTypable struct { + in string + header *oaispec.Header + response *oaispec.Response + skipExt bool +} + +func (ht responseTypable) In() string { return ht.in } + +func (ht responseTypable) Level() int { return 0 } + +func (ht responseTypable) Typed(tpe, format string) { + ht.header.Typed(tpe, format) +} + +func (ht responseTypable) Items() ifaces.SwaggerTypable { //nolint:ireturn // polymorphic by design + bdt, schema := schema.BodyTypable(ht.in, ht.response.Schema, ht.skipExt) + if bdt != nil { + ht.response.Schema = schema + return bdt + } + + if ht.header.Items == nil { + ht.header.Items = new(oaispec.Items) + } + + ht.header.Type = "array" + + return items.NewTypable(ht.header.Items, 1, "header") +} + +func (ht responseTypable) SetRef(ref oaispec.Ref) { + // having trouble seeing the usefulness of this one here + ht.Schema().Ref = ref +} + +func (ht responseTypable) Schema() *oaispec.Schema { + if ht.response.Schema == nil { + ht.response.Schema = new(oaispec.Schema) + } + + return ht.response.Schema +} + +func (ht responseTypable) SetSchema(schema *oaispec.Schema) { + ht.response.Schema = schema +} + +func (ht responseTypable) CollectionOf(items *oaispec.Items, format string) { + ht.header.CollectionOf(items, format) +} + +func (ht responseTypable) AddExtension(key string, value any) { + ht.response.AddExtension(key, value) +} + +func (ht responseTypable) WithEnum(values ...any) { + ht.header.WithEnum(values) +} + +func (ht responseTypable) WithEnumDescription(_ string) { + // no +} + +type headerValidations struct { + current *oaispec.Header +} + +func (sv headerValidations) SetMaximum(val float64, exclusive bool) { + sv.current.Maximum = &val + sv.current.ExclusiveMaximum = exclusive +} + +func (sv headerValidations) SetMinimum(val float64, exclusive bool) { + sv.current.Minimum = &val + sv.current.ExclusiveMinimum = exclusive +} + +func (sv headerValidations) SetMultipleOf(val float64) { + sv.current.MultipleOf = &val +} + +func (sv headerValidations) SetMinItems(val int64) { + sv.current.MinItems = &val +} + +func (sv headerValidations) SetMaxItems(val int64) { + sv.current.MaxItems = &val +} + +func (sv headerValidations) SetMinLength(val int64) { + sv.current.MinLength = &val +} + +func (sv headerValidations) SetMaxLength(val int64) { + sv.current.MaxLength = &val +} + +func (sv headerValidations) SetPattern(val string) { + sv.current.Pattern = val +} + +func (sv headerValidations) SetUnique(val bool) { + sv.current.UniqueItems = val +} + +func (sv headerValidations) SetCollectionFormat(val string) { + sv.current.CollectionFormat = val +} + +func (sv headerValidations) SetEnum(val string) { + sv.current.Enum = parsers.ParseEnum(val, &oaispec.SimpleSchema{Type: sv.current.Type, Format: sv.current.Format}) +} + +func (sv headerValidations) SetDefault(val any) { sv.current.Default = val } + +func (sv headerValidations) SetExample(val any) { sv.current.Example = val } diff --git a/internal/builders/routes/errors.go b/internal/builders/routes/errors.go new file mode 100644 index 0000000..5c484d2 --- /dev/null +++ b/internal/builders/routes/errors.go @@ -0,0 +1,9 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package routes + +import "errors" + +// ErrRoutes is the sentinel error for all errors originating from the routes builder package. +var ErrRoutes = errors.New("codescan:builders:routes") diff --git a/internal/builders/routes/routes.go b/internal/builders/routes/routes.go new file mode 100644 index 0000000..5d3687f --- /dev/null +++ b/internal/builders/routes/routes.go @@ -0,0 +1,64 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package routes + +import ( + "fmt" + + "github.com/go-openapi/codescan/internal/builders/operations" + "github.com/go-openapi/codescan/internal/parsers" + "github.com/go-openapi/codescan/internal/scanner" + oaispec "github.com/go-openapi/spec" +) + +type Builder struct { + ctx *scanner.ScanCtx + route parsers.ParsedPathContent + responses map[string]oaispec.Response + operations map[string]*oaispec.Operation + parameters []*oaispec.Parameter + definitions map[string]oaispec.Schema +} + +type Inputs struct { + Responses map[string]oaispec.Response + Operations map[string]*oaispec.Operation + Definitions map[string]oaispec.Schema +} + +func NewBuilder(ctx *scanner.ScanCtx, route parsers.ParsedPathContent, inputs Inputs) *Builder { + return &Builder{ + ctx: ctx, + route: route, + responses: inputs.Responses, + operations: inputs.Operations, + definitions: inputs.Definitions, + } +} + +func (r *Builder) Build(tgt *oaispec.Paths) error { + pthObj := tgt.Paths[r.route.Path] + op := operations.SetPathOperation( + r.route.Method, r.route.ID, + &pthObj, r.operations[r.route.ID], + ) + op.Tags = r.route.Tags + + sp := parsers.NewSectionedParser( + parsers.WithSetTitle(func(lines []string) { op.Summary = parsers.JoinDropLast(lines) }), + parsers.WithSetDescription(func(lines []string) { op.Description = parsers.JoinDropLast(lines) }), + parsers.WithTaggers(r.routeTaggers(op)...), + ) + + if err := sp.Parse(r.route.Remaining); err != nil { + return fmt.Errorf("operation (%s): %w", op.ID, err) + } + + if tgt.Paths == nil { + tgt.Paths = make(map[string]oaispec.PathItem) + } + tgt.Paths[r.route.Path] = pthObj + + return nil +} diff --git a/routes_test.go b/internal/builders/routes/routes_test.go similarity index 90% rename from routes_test.go rename to internal/builders/routes/routes_test.go index 9f6c216..5c99d75 100644 --- a/routes_test.go +++ b/internal/builders/routes/routes_test.go @@ -1,31 +1,29 @@ // SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers // SPDX-License-Identifier: Apache-2.0 -package codescan +package routes import ( "testing" - "github.com/go-openapi/testify/v2/require" - + "github.com/go-openapi/codescan/internal/scanner" + "github.com/go-openapi/codescan/internal/scantest" "github.com/go-openapi/testify/v2/assert" + "github.com/go-openapi/testify/v2/require" - "github.com/go-openapi/spec" + oaispec "github.com/go-openapi/spec" ) -func TestRouteExpression(t *testing.T) { - assert.RegexpT(t, rxRoute, "swagger:route DELETE /orders/{id} deleteOrder") - assert.RegexpT(t, rxRoute, "swagger:route GET /v1.2/something deleteOrder") -} +const epsilon = 1e-9 func TestRoutesParser(t *testing.T) { - sctx := loadClassificationPkgsCtx(t) - var ops spec.Paths - for _, apiPath := range sctx.app.Routes { - prs := &routesBuilder{ + sctx := scantest.LoadClassificationPkgsCtx(t) + var ops oaispec.Paths + for apiPath := range sctx.Routes() { + prs := &Builder{ ctx: sctx, route: apiPath, - operations: make(map[string]*spec.Operation), + operations: make(map[string]*oaispec.Operation), } require.NoError(t, prs.Build(&ops)) } @@ -33,7 +31,7 @@ func TestRoutesParser(t *testing.T) { assert.Len(t, ops.Paths, 3) po, ok := ops.Paths["/pets"] - ext := make(spec.Extensions) + ext := make(oaispec.Extensions) ext.Add("x-some-flag", "true") assert.TrueT(t, ok) assert.NotNil(t, po.Get) @@ -57,7 +55,7 @@ func TestRoutesParser(t *testing.T) { ) po, ok = ops.Paths["/orders"] - ext = make(spec.Extensions) + ext = make(oaispec.Extensions) ext.Add("x-some-flag", "false") ext.Add("x-some-list", []string{"item1", "item2", "item3"}) ext.Add("x-some-object", map[string]any{ @@ -128,25 +126,27 @@ func TestRoutesParser(t *testing.T) { assert.TrueT(t, ok) assert.EqualT(t, "Some description", rsp.Description) assert.Empty(t, rsp.Ref.String()) + + scantest.CompareOrDumpJSON(t, ops, "classification_routes.json") } func TestRoutesParserBody(t *testing.T) { - sctx, err := newScanCtx(&Options{ + sctx, err := scanner.NewScanCtx(&scanner.Options{ Packages: []string{ "./goparsing/classification", "./goparsing/classification/models", "./goparsing/classification/operations", "./goparsing/classification/operations_body", }, - WorkDir: "fixtures", + WorkDir: scantest.FixturesDir(), }) require.NoError(t, err) - var ops spec.Paths - for _, apiPath := range sctx.app.Routes { - prs := &routesBuilder{ + var ops oaispec.Paths + for apiPath := range sctx.Routes() { + prs := &Builder{ ctx: sctx, route: apiPath, - operations: make(map[string]*spec.Operation), + operations: make(map[string]*oaispec.Operation), } require.NoError(t, prs.Build(&ops)) } @@ -228,9 +228,11 @@ func TestRoutesParserBody(t *testing.T) { ) validateRoutesParameters(t, ops) + + scantest.CompareOrDumpJSON(t, ops, "classification_routes_body.json") } -func validateRoutesParameters(t *testing.T, ops spec.Paths) { +func validateRoutesParameters(t *testing.T, ops oaispec.Paths) { po := ops.Paths["/pets"] assert.Len(t, po.Post.Parameters, 2) @@ -338,7 +340,7 @@ func validateRoutesParameters(t *testing.T, ops spec.Paths) { assert.Empty(t, p.Type) } -func assertOperation(t *testing.T, op *spec.Operation, id, summary, description string, tags, scopes []string, extensions spec.Extensions) { +func assertOperation(t *testing.T, op *oaispec.Operation, id, summary, description string, tags, scopes []string, extensions oaispec.Extensions) { t.Helper() assert.NotNil(t, op) @@ -374,7 +376,7 @@ func assertOperation(t *testing.T, op *spec.Operation, id, summary, description assert.Equal(t, extensions, ext) } -func assertOperationBody(t *testing.T, op *spec.Operation, id, summary, description string, tags, scopes []string) { +func assertOperationBody(t *testing.T, op *oaispec.Operation, id, summary, description string, tags, scopes []string) { assert.NotNil(t, op) assert.EqualT(t, summary, op.Summary) assert.EqualT(t, description, op.Description) diff --git a/internal/builders/routes/setters.go b/internal/builders/routes/setters.go new file mode 100644 index 0000000..ab6b191 --- /dev/null +++ b/internal/builders/routes/setters.go @@ -0,0 +1,48 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package routes + +import "github.com/go-openapi/spec" + +func opConsumesSetter(op *spec.Operation) func([]string) { + return func(consumes []string) { op.Consumes = consumes } +} + +func opProducesSetter(op *spec.Operation) func([]string) { + return func(produces []string) { op.Produces = produces } +} + +func opSchemeSetter(op *spec.Operation) func([]string) { + return func(schemes []string) { op.Schemes = schemes } +} + +func opSecurityDefsSetter(op *spec.Operation) func([]map[string][]string) { + return func(securityDefs []map[string][]string) { op.Security = securityDefs } +} + +func opResponsesSetter(op *spec.Operation) func(*spec.Response, map[int]spec.Response) { + return func(def *spec.Response, scr map[int]spec.Response) { + if op.Responses == nil { + op.Responses = new(spec.Responses) + } + op.Responses.Default = def + op.Responses.StatusCodeResponses = scr + } +} + +func opParamSetter(op *spec.Operation) func([]*spec.Parameter) { + return func(params []*spec.Parameter) { + for _, v := range params { + op.AddParam(v) + } + } +} + +func opExtensionsSetter(op *spec.Operation) func(*spec.Extensions) { + return func(exts *spec.Extensions) { + for name, value := range *exts { + op.AddExtension(name, value) + } + } +} diff --git a/internal/builders/routes/taggers.go b/internal/builders/routes/taggers.go new file mode 100644 index 0000000..cb2b15f --- /dev/null +++ b/internal/builders/routes/taggers.go @@ -0,0 +1,19 @@ +package routes + +import ( + "github.com/go-openapi/codescan/internal/parsers" + oaispec "github.com/go-openapi/spec" +) + +func (r *Builder) routeTaggers(op *oaispec.Operation) []parsers.TagParser { + return []parsers.TagParser{ + parsers.NewMultiLineTagParser("Consumes", parsers.NewConsumesDropEmptyParser(opConsumesSetter(op)), false), + parsers.NewMultiLineTagParser("Produces", parsers.NewProducesDropEmptyParser(opProducesSetter(op)), false), + parsers.NewSingleLineTagParser("Schemes", parsers.NewSetSchemes(opSchemeSetter(op))), + parsers.NewMultiLineTagParser("Security", parsers.NewSetSecurityScheme(opSecurityDefsSetter(op)), false), + parsers.NewMultiLineTagParser("Parameters", parsers.NewSetParams(r.parameters, opParamSetter(op)), false), + parsers.NewMultiLineTagParser("Responses", parsers.NewSetResponses(r.definitions, r.responses, opResponsesSetter(op)), false), + parsers.NewSingleLineTagParser("Deprecated", parsers.NewSetDeprecatedOp(op)), + parsers.NewMultiLineTagParser("Extensions", parsers.NewSetExtensions(opExtensionsSetter(op), r.ctx.Debug()), true), + } +} diff --git a/internal/builders/schema/errors.go b/internal/builders/schema/errors.go new file mode 100644 index 0000000..2bb3cba --- /dev/null +++ b/internal/builders/schema/errors.go @@ -0,0 +1,9 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package schema + +import "errors" + +// ErrSchema is the sentinel error for all errors originating from the schema builder package. +var ErrSchema = errors.New("codescan:builders:schema") diff --git a/internal/builders/schema/schema.go b/internal/builders/schema/schema.go new file mode 100644 index 0000000..bf88b7a --- /dev/null +++ b/internal/builders/schema/schema.go @@ -0,0 +1,1443 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package schema + +import ( + "encoding/json" + "fmt" + "go/ast" + "go/types" + "log" + "reflect" + "strings" + + "golang.org/x/tools/go/packages" + + "github.com/go-openapi/codescan/internal/builders/resolvers" + "github.com/go-openapi/codescan/internal/ifaces" + "github.com/go-openapi/codescan/internal/logger" + "github.com/go-openapi/codescan/internal/parsers" + "github.com/go-openapi/codescan/internal/scanner" + oaispec "github.com/go-openapi/spec" +) + +type Builder struct { + ctx *scanner.ScanCtx + decl *scanner.EntityDecl + GoName string + Name string + annotated bool + discovered []*scanner.EntityDecl + postDecls []*scanner.EntityDecl +} + +func NewBuilder(ctx *scanner.ScanCtx, decl *scanner.EntityDecl) *Builder { + return &Builder{ + ctx: ctx, + decl: decl, + } +} + +func (s *Builder) Build(definitions map[string]oaispec.Schema) error { + s.inferNames() + + schema := definitions[s.Name] + err := s.buildFromDecl(s.decl, &schema) + if err != nil { + return err + } + definitions[s.Name] = schema + + return nil +} + +func (s *Builder) SetDiscovered(discovered []*scanner.EntityDecl) { + s.discovered = discovered +} + +func (s *Builder) PostDeclarations() []*scanner.EntityDecl { + return s.postDecls +} + +func (s *Builder) InferNames() { + s.inferNames() +} + +func (s *Builder) BuildFromType(tpe types.Type, tgt ifaces.SwaggerTypable) error { + return s.buildFromType(tpe, tgt) +} + +func (s *Builder) inferNames() { + if s.GoName != "" { + return + } + + goName := s.decl.Ident.Name + s.GoName = goName + s.Name = goName + + override, ok := parsers.ModelOverride(s.decl.Comments) + if !ok { + return + } + s.annotated = true + // Why: ModelOverride returns ("", true) for a bare `swagger:model` annotation + // without a name — in that case the Go identifier is the model name. + if override != "" { + s.Name = override + } +} + +func (s *Builder) buildFromDecl(_ *scanner.EntityDecl, schema *oaispec.Schema) error { + // analyze doc comment for the model + // This includes parsing "example", "default" and other validation at the top-level declaration. + sp := s.createParser("", schema, schema, nil, + parsers.WithSetTitle(func(lines []string) { schema.Title = parsers.JoinDropLast(lines) }), + parsers.WithSetDescription(func(lines []string) { + schema.Description = parsers.JoinDropLast(lines) + enumDesc := parsers.GetEnumDesc(schema.Extensions) + if enumDesc != "" { + schema.Description += "\n" + enumDesc + } + }), + ) + + if err := sp.Parse(s.decl.Comments); err != nil { + return err + } + + // if the type is marked to ignore, just return + if sp.Ignored() { + return nil + } + + defer func() { + if schema.Ref.String() == "" { + // unless this is a $ref, we add traceability of the origin of this schema in source + if s.Name != s.GoName { + resolvers.AddExtension(&schema.VendorExtensible, "x-go-name", s.GoName, s.ctx.SkipExtensions()) + } + resolvers.AddExtension(&schema.VendorExtensible, "x-go-package", s.decl.Obj().Pkg().Path(), s.ctx.SkipExtensions()) + } + }() + + switch tpe := s.decl.ObjType().(type) { + // TODO(fredbi): we may safely remove all the cases here that are not Named or Alias + case *types.Basic: + logger.DebugLogf(s.ctx.Debug(), "basic: %v", tpe.Name()) + return nil + case *types.Struct: + return s.buildFromStruct(s.decl, tpe, schema, make(map[string]string)) + case *types.Interface: + return s.buildFromInterface(s.decl, tpe, schema, make(map[string]string)) + case *types.Array: + logger.DebugLogf(s.ctx.Debug(), "array: %v -> %v", s.decl.Ident.Name, tpe.Elem().String()) + return nil + case *types.Slice: + logger.DebugLogf(s.ctx.Debug(), "slice: %v -> %v", s.decl.Ident.Name, tpe.Elem().String()) + return nil + case *types.Map: + logger.DebugLogf(s.ctx.Debug(), "map: %v -> [%v]%v", s.decl.Ident.Name, tpe.Key().String(), tpe.Elem().String()) + return nil + case *types.Named: + logger.DebugLogf(s.ctx.Debug(), "named: %v", tpe) + return s.buildDeclNamed(tpe, schema) + case *types.Alias: + logger.DebugLogf(s.ctx.Debug(), "alias: %v -> %v", tpe, tpe.Rhs()) + tgt := Typable{schema, 0, s.ctx.SkipExtensions()} + + return s.buildDeclAlias(tpe, tgt) + case *types.TypeParam: + log.Printf("WARNING: generic type parameters are not supported yet %[1]v (%[1]T). Skipped", tpe) + return nil + case *types.Chan: + log.Printf("WARNING: channels are not supported %[1]v (%[1]T). Skipped", tpe) + return nil + case *types.Signature: + log.Printf("WARNING: functions are not supported %[1]v (%[1]T). Skipped", tpe) + return nil + default: + log.Printf("WARNING: missing parser for type %T, skipping model: %s\n", tpe, s.Name) + return nil + } +} + +func (s *Builder) buildDeclNamed(tpe *types.Named, schema *oaispec.Schema) error { + if resolvers.UnsupportedBuiltin(tpe) { + log.Printf("WARNING: skipped unsupported builtin type: %v", tpe) + + return nil + } + o := tpe.Obj() + + resolvers.MustNotBeABuiltinType(o) + + logger.DebugLogf(s.ctx.Debug(), "got the named type object: %s.%s | isAlias: %t | exported: %t", o.Pkg().Path(), o.Name(), o.IsAlias(), o.Exported()) + if resolvers.IsStdTime(o) { + schema.Typed("string", "date-time") + return nil + } + + ps := Typable{schema, 0, s.ctx.SkipExtensions()} + ti := s.decl.Pkg.TypesInfo.Types[s.decl.Spec.Type] + if !ti.IsType() { + return fmt.Errorf("declaration is not a type: %v: %w", o, ErrSchema) + } + + return s.buildFromType(ti.Type, ps) +} + +// buildFromTextMarshal renders a type that marshals as text as a string. +func (s *Builder) buildFromTextMarshal(tpe types.Type, tgt ifaces.SwaggerTypable) error { + if typePtr, ok := tpe.(*types.Pointer); ok { + return s.buildFromTextMarshal(typePtr.Elem(), tgt) + } + + typeNamed, ok := tpe.(*types.Named) + if !ok { + tgt.Typed("string", "") + return nil + } + + tio := typeNamed.Obj() + if resolvers.IsStdError(tio) { + tgt.AddExtension("x-go-type", tio.Name()) + return resolvers.SwaggerSchemaForType(tio.Name(), tgt) + } + + logger.DebugLogf(s.ctx.Debug(), "named refined type %s.%s", tio.Pkg().Path(), tio.Name()) + pkg, found := s.ctx.PkgForType(tpe) + + if strings.ToLower(tio.Name()) == "uuid" { + tgt.Typed("string", "uuid") + return nil + } + + if !found { + // this must be a builtin + logger.DebugLogf(s.ctx.Debug(), "skipping because package is nil: %v", tpe) + return nil + } + + if resolvers.IsStdTime(tio) { + tgt.Typed("string", "date-time") + return nil + } + + if resolvers.IsStdJSONRawMessage(tio) { + tgt.Typed("object", "") // TODO: this should actually be any type + return nil + } + + cmt, hasComments := s.ctx.FindComments(pkg, tio.Name()) + if !hasComments { + cmt = new(ast.CommentGroup) + } + + if sfnm, isf := parsers.StrfmtName(cmt); isf { + tgt.Typed("string", sfnm) + return nil + } + + tgt.Typed("string", "") + tgt.AddExtension("x-go-type", tio.Pkg().Path()+"."+tio.Name()) + + return nil +} + +func (s *Builder) buildFromType(tpe types.Type, tgt ifaces.SwaggerTypable) error { + // check if the type implements encoding.TextMarshaler interface + // if so, the type is rendered as a string. + logger.DebugLogf(s.ctx.Debug(), "schema buildFromType %v (%T)", tpe, tpe) + + if resolvers.IsTextMarshaler(tpe) { + return s.buildFromTextMarshal(tpe, tgt) + } + + switch titpe := tpe.(type) { + case *types.Basic: + if resolvers.UnsupportedBuiltinType(titpe) { + log.Printf("WARNING: skipped unsupported builtin type: %v", tpe) + return nil + } + return resolvers.SwaggerSchemaForType(titpe.String(), tgt) + case *types.Pointer: + return s.buildFromType(titpe.Elem(), tgt) + case *types.Struct: + return s.buildFromStruct(s.decl, titpe, tgt.Schema(), make(map[string]string)) + case *types.Interface: + return s.buildFromInterface(s.decl, titpe, tgt.Schema(), make(map[string]string)) + case *types.Slice: + // anonymous slice + return s.buildFromType(titpe.Elem(), tgt.Items()) + case *types.Array: + // anonymous array + return s.buildFromType(titpe.Elem(), tgt.Items()) + case *types.Map: + return s.buildFromMap(titpe, tgt) + case *types.Named: + // a named type, e.g. type X struct {} + return s.buildNamedType(titpe, tgt) + case *types.Alias: + // a named alias, e.g. type X = {RHS type}. + logger.DebugLogf(s.ctx.Debug(), "alias(schema.buildFromType): got alias %v to %v", titpe, titpe.Rhs()) + return s.buildAlias(titpe, tgt) + case *types.TypeParam: + log.Printf("WARNING: generic type parameters are not supported yet %[1]v (%[1]T). Skipped", titpe) + return nil + case *types.Chan: + log.Printf("WARNING: channels are not supported %[1]v (%[1]T). Skipped", tpe) + return nil + case *types.Signature: + log.Printf("WARNING: functions are not supported %[1]v (%[1]T). Skipped", tpe) + return nil + default: + panic(fmt.Errorf("ERROR: can't determine refined type %[1]v (%[1]T): %w", titpe, resolvers.ErrInternal)) + } +} + +func (s *Builder) buildNamedType(titpe *types.Named, tgt ifaces.SwaggerTypable) error { + tio := titpe.Obj() + if resolvers.UnsupportedBuiltin(titpe) { + log.Printf("WARNING: skipped unsupported builtin type: %v", titpe) + return nil + } + + if resolvers.IsAny(tio) { + // e.g type X any or type X interface{} + _ = tgt.Schema() + + return nil + } + + // special case of the "error" interface. + if resolvers.IsStdError(tio) { + tgt.AddExtension("x-go-type", tio.Name()) + return resolvers.SwaggerSchemaForType(tio.Name(), tgt) + } + + // special case of the "time.Time" type + if resolvers.IsStdTime(tio) { + tgt.Typed("string", "date-time") + return nil + } + + // special case of the "json.RawMessage" type + if resolvers.IsStdJSONRawMessage(tio) { + tgt.Typed("object", "") // TODO: this should actually be any type + return nil + } + + pkg, found := s.ctx.PkgForType(titpe) + logger.DebugLogf(s.ctx.Debug(), "named refined type %s.%s", pkg, tio.Name()) + if !found { + // this must be a builtin + // + // This could happen for example when using unsupported types such as complex64, complex128, uintptr, + // or type constraints such as comparable. + logger.DebugLogf(s.ctx.Debug(), "skipping because package is nil (builtin type): %v", tio) + + return nil + } + + cmt, hasComments := s.ctx.FindComments(pkg, tio.Name()) + if !hasComments { + cmt = new(ast.CommentGroup) + } + + if typeName, ok := parsers.TypeName(cmt); ok { + _ = resolvers.SwaggerSchemaForType(typeName, tgt) + + return nil + } + + if s.decl.Spec.Assign.IsValid() { + logger.DebugLogf(s.ctx.Debug(), "found assignment: %s.%s", tio.Pkg().Path(), tio.Name()) + return s.buildFromType(titpe.Underlying(), tgt) + } + + if titpe.TypeArgs() != nil && titpe.TypeArgs().Len() > 0 { + return s.buildFromType(titpe.Underlying(), tgt) + } + + // invariant: the Underlying cannot be an alias or named type + switch utitpe := titpe.Underlying().(type) { + case *types.Struct: + return s.buildNamedStruct(tio, cmt, tgt) + case *types.Interface: + logger.DebugLogf(s.ctx.Debug(), "found interface: %s.%s", tio.Pkg().Path(), tio.Name()) + + decl, found := s.ctx.FindModel(tio.Pkg().Path(), tio.Name()) + if !found { + return fmt.Errorf("can't find source file for type: %v: %w", utitpe, ErrSchema) + } + + return s.makeRef(decl, tgt) + case *types.Basic: + return s.buildNamedBasic(tio, pkg, cmt, utitpe, tgt) + case *types.Array: + return s.buildNamedArray(tio, cmt, utitpe.Elem(), tgt) + case *types.Slice: + return s.buildNamedSlice(tio, cmt, utitpe.Elem(), tgt) + case *types.Map: + logger.DebugLogf(s.ctx.Debug(), "found map type: %s.%s", tio.Pkg().Path(), tio.Name()) + + if decl, ok := s.ctx.FindModel(tio.Pkg().Path(), tio.Name()); ok { + return s.makeRef(decl, tgt) + } + return nil + case *types.TypeParam: + log.Printf("WARNING: generic type parameters are not supported yet %[1]v (%[1]T). Skipped", utitpe) + return nil + case *types.Chan: + log.Printf("WARNING: channels are not supported %[1]v (%[1]T). Skipped", utitpe) + return nil + case *types.Signature: + log.Printf("WARNING: functions are not supported %[1]v (%[1]T). Skipped", utitpe) + return nil + default: + log.Printf( + "WARNING: can't figure out object type for named type (%T): %v [alias: %t]", + titpe.Underlying(), titpe.Underlying(), titpe.Obj().IsAlias(), + ) + + return nil + } +} + +func (s *Builder) buildNamedBasic(tio *types.TypeName, pkg *packages.Package, cmt *ast.CommentGroup, utitpe *types.Basic, tgt ifaces.SwaggerTypable) error { + if resolvers.UnsupportedBuiltinType(utitpe) { + log.Printf("WARNING: skipped unsupported builtin type: %v", utitpe) + return nil + } + + logger.DebugLogf(s.ctx.Debug(), "found primitive type: %s.%s", tio.Pkg().Path(), tio.Name()) + + if sfnm, isf := parsers.StrfmtName(cmt); isf { + tgt.Typed("string", sfnm) + return nil + } + + if enumName, ok := parsers.EnumName(cmt); ok { + enumValues, enumDesces, _ := s.ctx.FindEnumValues(pkg, enumName) + if len(enumValues) > 0 { + tgt.WithEnum(enumValues...) + enumTypeName := reflect.TypeOf(enumValues[0]).String() + _ = resolvers.SwaggerSchemaForType(enumTypeName, tgt) + } + + if len(enumDesces) > 0 { + tgt.WithEnumDescription(strings.Join(enumDesces, "\n")) + } + + return nil + } + + if defaultName, ok := parsers.DefaultName(cmt); ok { + logger.DebugLogf(s.ctx.Debug(), "default name: %s", defaultName) + return nil + } + + if typeName, ok := parsers.TypeName(cmt); ok { + _ = resolvers.SwaggerSchemaForType(typeName, tgt) + return nil + } + + if parsers.IsAliasParam(tgt) || parsers.AliasParam(cmt) { + err := resolvers.SwaggerSchemaForType(utitpe.Name(), tgt) + if err == nil { + return nil + } + } + + if decl, ok := s.ctx.FindModel(tio.Pkg().Path(), tio.Name()); ok { + return s.makeRef(decl, tgt) + } + + return resolvers.SwaggerSchemaForType(utitpe.String(), tgt) +} + +func (s *Builder) buildNamedStruct(tio *types.TypeName, cmt *ast.CommentGroup, tgt ifaces.SwaggerTypable) error { + logger.DebugLogf(s.ctx.Debug(), "found struct: %s.%s", tio.Pkg().Path(), tio.Name()) + + decl, ok := s.ctx.FindModel(tio.Pkg().Path(), tio.Name()) + if !ok { + logger.DebugLogf(s.ctx.Debug(), "could not find model in index: %s.%s", tio.Pkg().Path(), tio.Name()) + return nil + } + + o := decl.Obj() + if resolvers.IsStdTime(o) { + tgt.Typed("string", "date-time") + return nil + } + + if sfnm, isf := parsers.StrfmtName(cmt); isf { + tgt.Typed("string", sfnm) + return nil + } + + if typeName, ok := parsers.TypeName(cmt); ok { + _ = resolvers.SwaggerSchemaForType(typeName, tgt) + return nil + } + + return s.makeRef(decl, tgt) +} + +func (s *Builder) buildNamedArray(tio *types.TypeName, cmt *ast.CommentGroup, elem types.Type, tgt ifaces.SwaggerTypable) error { + logger.DebugLogf(s.ctx.Debug(), "found array type: %s.%s", tio.Pkg().Path(), tio.Name()) + + if sfnm, isf := parsers.StrfmtName(cmt); isf { + if sfnm == "byte" { + tgt.Typed("string", sfnm) + return nil + } + if sfnm == "bsonobjectid" { + tgt.Typed("string", sfnm) + return nil + } + + tgt.Items().Typed("string", sfnm) + return nil + } + if decl, ok := s.ctx.FindModel(tio.Pkg().Path(), tio.Name()); ok { + return s.makeRef(decl, tgt) + } + return s.buildFromType(elem, tgt.Items()) +} + +func (s *Builder) buildNamedSlice(tio *types.TypeName, cmt *ast.CommentGroup, elem types.Type, tgt ifaces.SwaggerTypable) error { + logger.DebugLogf(s.ctx.Debug(), "found slice type: %s.%s", tio.Pkg().Path(), tio.Name()) + + if sfnm, isf := parsers.StrfmtName(cmt); isf { + if sfnm == "byte" { + tgt.Typed("string", sfnm) + return nil + } + tgt.Items().Typed("string", sfnm) + return nil + } + if decl, ok := s.ctx.FindModel(tio.Pkg().Path(), tio.Name()); ok { + return s.makeRef(decl, tgt) + } + return s.buildFromType(elem, tgt.Items()) +} + +// buildDeclAlias builds a top-level alias declaration. +func (s *Builder) buildDeclAlias(tpe *types.Alias, tgt ifaces.SwaggerTypable) error { + if resolvers.UnsupportedBuiltinType(tpe) { + log.Printf("WARNING: skipped unsupported builtin type: %v", tpe) + return nil + } + + o := tpe.Obj() + if resolvers.IsAny(o) { + _ = tgt.Schema() // this is mutating tgt to create an empty schema + return nil + } + if resolvers.IsStdError(o) { + tgt.AddExtension("x-go-type", o.Name()) + return resolvers.SwaggerSchemaForType(o.Name(), tgt) + } + resolvers.MustNotBeABuiltinType(o) + + if resolvers.IsStdTime(o) { + tgt.Typed("string", "date-time") + return nil + } + + resolvers.MustHaveRightHandSide(tpe) + rhs := tpe.Rhs() + + // If transparent aliases are enabled, use the underlying type directly without creating a definition + if s.ctx.TransparentAliases() { + return s.buildFromType(rhs, tgt) + } + + decl, ok := s.ctx.FindModel(o.Pkg().Path(), o.Name()) + if !ok { + return fmt.Errorf("can't find source file for aliased type: %v -> %v: %w", tpe, rhs, ErrSchema) + } + + s.postDecls = append(s.postDecls, decl) // mark the left-hand side as discovered + + if !s.ctx.RefAliases() { + // expand alias + return s.buildFromType(tpe.Underlying(), tgt) + } + + // resolve alias to named type as $ref + switch rtpe := rhs.(type) { + // named declarations: we construct a $ref to the right-hand side target of the alias + case *types.Named: + ro := rtpe.Obj() + rdecl, found := s.ctx.FindModel(ro.Pkg().Path(), ro.Name()) + if !found { + return fmt.Errorf("can't find source file for target type of alias: %v -> %v: %w", tpe, rtpe, ErrSchema) + } + + return s.makeRef(rdecl, tgt) + case *types.Alias: + ro := rtpe.Obj() + if resolvers.UnsupportedBuiltin(rtpe) { + log.Printf("WARNING: skipped unsupported builtin type: %v", rtpe) + return nil + } + if resolvers.IsAny(ro) { + // e.g. type X = any + _ = tgt.Schema() // this is mutating tgt to create an empty schema + return nil + } + if resolvers.IsStdError(ro) { + // e.g. type X = error + tgt.AddExtension("x-go-type", o.Name()) + return resolvers.SwaggerSchemaForType(o.Name(), tgt) + } + resolvers.MustNotBeABuiltinType(ro) // TODO(fred): there are a few other cases + + rdecl, found := s.ctx.FindModel(ro.Pkg().Path(), ro.Name()) + if !found { + return fmt.Errorf("can't find source file for target type of alias: %v -> %v: %w", tpe, rtpe, ErrSchema) + } + + return s.makeRef(rdecl, tgt) + } + + // alias to anonymous type + return s.buildFromType(rhs, tgt) +} + +func (s *Builder) buildAnonymousInterface(it *types.Interface, tgt ifaces.SwaggerTypable, decl *scanner.EntityDecl) error { + tgt.Typed("object", "") + + for fld := range it.ExplicitMethods() { + if err := s.processAnonInterfaceMethod(fld, it, decl, tgt.Schema()); err != nil { + return err + } + } + + return nil +} + +func (s *Builder) processAnonInterfaceMethod(fld *types.Func, it *types.Interface, decl *scanner.EntityDecl, schema *oaispec.Schema) error { + if !fld.Exported() { + return nil + } + sig, isSignature := fld.Type().(*types.Signature) + if !isSignature { + return nil + } + if sig.Params().Len() > 0 { + return nil + } + if sig.Results() == nil || sig.Results().Len() != 1 { + return nil + } + + afld := resolvers.FindASTField(decl.File, fld.Pos()) + if afld == nil { + logger.DebugLogf(s.ctx.Debug(), "can't find source associated with %s for %s", fld.String(), it.String()) + return nil + } + + if parsers.Ignored(afld.Doc) { + return nil + } + + name, ok := parsers.NameOverride(afld.Doc) + if !ok { + name = fld.Name() + } + + if schema.Properties == nil { + schema.Properties = make(map[string]oaispec.Schema) + } + ps := schema.Properties[name] + if err := s.buildFromType(sig.Results().At(0).Type(), Typable{&ps, 0, s.ctx.SkipExtensions()}); err != nil { + return err + } + if sfName, isStrfmt := parsers.StrfmtName(afld.Doc); isStrfmt { + ps.Typed("string", sfName) + ps.Ref = oaispec.Ref{} + ps.Items = nil + } + + sp := s.createParser(name, schema, &ps, afld) + if err := sp.Parse(afld.Doc); err != nil { + return err + } + + if ps.Ref.String() == "" && name != fld.Name() { + ps.AddExtension("x-go-name", fld.Name()) + } + + if s.ctx.SetXNullableForPointers() { + _, isPointer := fld.Type().(*types.Signature).Results().At(0).Type().(*types.Pointer) + noNullableExt := ps.Extensions == nil || + (ps.Extensions["x-nullable"] == nil && ps.Extensions["x-isnullable"] == nil) + if isPointer && noNullableExt { + ps.AddExtension("x-nullable", true) + } + } + + schema.Properties[name] = ps + return nil +} + +// buildAlias builds a reference to an alias from another type. +func (s *Builder) buildAlias(tpe *types.Alias, tgt ifaces.SwaggerTypable) error { + if resolvers.UnsupportedBuiltinType(tpe) { + log.Printf("WARNING: skipped unsupported builtin type: %v", tpe) + + return nil + } + + o := tpe.Obj() + if resolvers.IsAny(o) { + _ = tgt.Schema() + return nil + } + resolvers.MustNotBeABuiltinType(o) + + // If transparent aliases are enabled, use the underlying type directly + if s.ctx.TransparentAliases() { + return s.buildFromType(tpe.Rhs(), tgt) + } + + decl, ok := s.ctx.FindModel(o.Pkg().Path(), o.Name()) + if !ok { + return fmt.Errorf("can't find source file for aliased type: %v: %w", tpe, ErrSchema) + } + + return s.makeRef(decl, tgt) +} + +func (s *Builder) buildFromMap(titpe *types.Map, tgt ifaces.SwaggerTypable) error { + // check if key is a string type, or knows how to marshall to text. + // If not, print a message and skip the map property. + // + // Only maps with string keys can go into additional properties + + sch := tgt.Schema() + if sch == nil { + return fmt.Errorf("items doesn't support maps: %w", ErrSchema) + } + + eleProp := Typable{sch, tgt.Level(), s.ctx.SkipExtensions()} + key := titpe.Key() + if key.Underlying().String() == "string" || resolvers.IsTextMarshaler(key) { + return s.buildFromType(titpe.Elem(), eleProp.AdditionalProperties()) + } + + return nil +} + +func (s *Builder) buildFromInterface(decl *scanner.EntityDecl, it *types.Interface, schema *oaispec.Schema, seen map[string]string) error { + if it.Empty() { + // return an empty schema for empty interfaces + return nil + } + + var ( + tgt *oaispec.Schema + hasAllOf bool + ) + + var flist []*ast.Field + if specType, ok := decl.Spec.Type.(*ast.InterfaceType); ok { + flist = make([]*ast.Field, it.NumEmbeddeds()+it.NumExplicitMethods()) + copy(flist, specType.Methods.List) + } + + // First collect the embedded interfaces + // create refs when: + // + // 1. the embedded interface is decorated with an allOf annotation + // 2. the embedded interface is an alias + for fld := range it.EmbeddedTypes() { + if tgt == nil { + tgt = &oaispec.Schema{} + } + + fieldHasAllOf, err := s.processEmbeddedType(fld, flist, decl, schema, seen) + if err != nil { + return err + } + hasAllOf = hasAllOf || fieldHasAllOf + } + + if tgt == nil { + tgt = schema + } + + // We can finally build the actual schema for the struct + if tgt.Properties == nil { + tgt.Properties = make(map[string]oaispec.Schema) + } + tgt.Typed("object", "") + + for fld := range it.ExplicitMethods() { + if err := s.processInterfaceMethod(fld, it, decl, tgt, seen); err != nil { + return err + } + } + + if tgt == nil { + return nil + } + if hasAllOf && len(tgt.Properties) > 0 { + schema.AllOf = append(schema.AllOf, *tgt) + } + + for k := range tgt.Properties { + if _, ok := seen[k]; !ok { + delete(tgt.Properties, k) + } + } + + return nil +} + +func (s *Builder) processEmbeddedType( + fld types.Type, + flist []*ast.Field, + decl *scanner.EntityDecl, + schema *oaispec.Schema, + seen map[string]string, +) (fieldHasAllOf bool, err error) { + logger.DebugLogf(s.ctx.Debug(), "inspecting embedded type in interface: %v", fld) + + switch ftpe := fld.(type) { + case *types.Named: + logger.DebugLogf(s.ctx.Debug(), "embedded named type (buildInterface): %v", ftpe) + o := ftpe.Obj() + if resolvers.IsAny(o) || resolvers.IsStdError(o) { + return false, nil + } + return s.buildNamedInterface(ftpe, flist, decl, schema, seen) + case *types.Interface: + logger.DebugLogf(s.ctx.Debug(), "embedded anonymous interface type (buildInterface): %v", ftpe) + var aliasedSchema oaispec.Schema + ps := Typable{schema: &aliasedSchema, skipExt: s.ctx.SkipExtensions()} + if err = s.buildAnonymousInterface(ftpe, ps, decl); err != nil { + return false, err + } + if aliasedSchema.Ref.String() != "" || len(aliasedSchema.Properties) > 0 || len(aliasedSchema.AllOf) > 0 { + fieldHasAllOf = true + schema.AddToAllOf(aliasedSchema) + } + case *types.Alias: + logger.DebugLogf(s.ctx.Debug(), "embedded alias (buildInterface): %v -> %v", ftpe, ftpe.Rhs()) + var aliasedSchema oaispec.Schema + ps := Typable{schema: &aliasedSchema, skipExt: s.ctx.SkipExtensions()} + if err = s.buildAlias(ftpe, ps); err != nil { + return false, err + } + if aliasedSchema.Ref.String() != "" || len(aliasedSchema.Properties) > 0 || len(aliasedSchema.AllOf) > 0 { + fieldHasAllOf = true + schema.AddToAllOf(aliasedSchema) + } + case *types.Union: + log.Printf("WARNING: union type constraints are not supported yet %[1]v (%[1]T). Skipped", ftpe) + case *types.TypeParam: + log.Printf("WARNING: generic type parameters are not supported yet %[1]v (%[1]T). Skipped", ftpe) + case *types.Chan: + log.Printf("WARNING: channels are not supported %[1]v (%[1]T). Skipped", ftpe) + case *types.Signature: + log.Printf("WARNING: functions are not supported %[1]v (%[1]T). Skipped", ftpe) + default: + log.Printf( + "WARNING: can't figure out object type for allOf named type (%T): %v", + ftpe, ftpe.Underlying(), + ) + } + + logger.DebugLogf(s.ctx.Debug(), "got embedded interface: %v {%T}, fieldHasAllOf: %t", fld, fld, fieldHasAllOf) + return fieldHasAllOf, nil +} + +func (s *Builder) processInterfaceMethod(fld *types.Func, it *types.Interface, decl *scanner.EntityDecl, tgt *oaispec.Schema, seen map[string]string) error { + if !fld.Exported() { + return nil + } + + sig, isSignature := fld.Type().(*types.Signature) + if !isSignature { + return nil + } + + if sig.Params().Len() > 0 { + return nil + } + + if sig.Results() == nil || sig.Results().Len() != 1 { + return nil + } + + afld := resolvers.FindASTField(decl.File, fld.Pos()) + if afld == nil { + logger.DebugLogf(s.ctx.Debug(), "can't find source associated with %s for %s", fld.String(), it.String()) + return nil + } + + // if the field is annotated with swagger:ignore, ignore it + if parsers.Ignored(afld.Doc) { + return nil + } + + name, ok := parsers.NameOverride(afld.Doc) + if !ok { + name = fld.Name() + } + + ps := tgt.Properties[name] + if err := s.buildFromType(sig.Results().At(0).Type(), Typable{&ps, 0, s.ctx.SkipExtensions()}); err != nil { + return err + } + + if sfName, isStrfmt := parsers.StrfmtName(afld.Doc); isStrfmt { + ps.Typed("string", sfName) + ps.Ref = oaispec.Ref{} + ps.Items = nil + } + + sp := s.createParser(name, tgt, &ps, afld) + if err := sp.Parse(afld.Doc); err != nil { + return err + } + + if ps.Ref.String() == "" && name != fld.Name() { + ps.AddExtension("x-go-name", fld.Name()) + } + + if s.ctx.SetXNullableForPointers() { + _, isPointer := fld.Type().(*types.Signature).Results().At(0).Type().(*types.Pointer) + noNullableExt := ps.Extensions == nil || + (ps.Extensions["x-nullable"] == nil && ps.Extensions["x-isnullable"] == nil) + if isPointer && noNullableExt { + ps.AddExtension("x-nullable", true) + } + } + + seen[name] = fld.Name() + tgt.Properties[name] = ps + + return nil +} + +func (s *Builder) buildNamedInterface(ftpe *types.Named, flist []*ast.Field, decl *scanner.EntityDecl, schema *oaispec.Schema, seen map[string]string) (hasAllOf bool, err error) { + o := ftpe.Obj() + var afld *ast.Field + + for _, an := range flist { + if len(an.Names) != 0 { + continue + } + + tpp := decl.Pkg.TypesInfo.Types[an.Type] + if tpp.Type.String() != o.Type().String() { + continue + } + + // decl. + logger.DebugLogf(s.ctx.Debug(), "maybe interface field %s: %s(%T)", o.Name(), o.Type().String(), o.Type()) + afld = an + break + } + + if afld == nil { + logger.DebugLogf(s.ctx.Debug(), "can't find source associated with %s", ftpe.String()) + return hasAllOf, nil + } + + // if the field is annotated with swagger:ignore, ignore it + if parsers.Ignored(afld.Doc) { + return hasAllOf, nil + } + + if !parsers.AllOfMember(afld.Doc) { + var newSch oaispec.Schema + if err = s.buildEmbedded(o.Type(), &newSch, seen); err != nil { + return hasAllOf, err + } + schema.AllOf = append(schema.AllOf, newSch) + hasAllOf = true + + return hasAllOf, nil + } + + hasAllOf = true + + var newSch oaispec.Schema + // when the embedded struct is annotated with swagger:allOf it will be used as allOf property + // otherwise the fields will just be included as normal properties + if err = s.buildAllOf(o.Type(), &newSch); err != nil { + return hasAllOf, err + } + + if afld.Doc != nil { + extractAllOfClass(afld.Doc, schema) + } + + schema.AllOf = append(schema.AllOf, newSch) + + return hasAllOf, nil +} + +func (s *Builder) buildFromStruct(decl *scanner.EntityDecl, st *types.Struct, schema *oaispec.Schema, seen map[string]string) error { + cmt, hasComments := s.ctx.FindComments(decl.Pkg, decl.Obj().Name()) + if !hasComments { + cmt = new(ast.CommentGroup) + } + name, ok := parsers.TypeName(cmt) + if ok { + _ = resolvers.SwaggerSchemaForType(name, Typable{schema: schema, skipExt: s.ctx.SkipExtensions()}) + return nil + } + // First pass: scan anonymous/embedded fields for allOf composition. + // Returns the target schema for properties (may differ from schema when allOf is used). + tgt, hasAllOf, err := s.scanEmbeddedFields(decl, st, schema, seen) + if err != nil { + return err + } + + if tgt == nil { + if schema != nil { + tgt = schema + } else { + tgt = &oaispec.Schema{} + } + } + if tgt.Properties == nil { + tgt.Properties = make(map[string]oaispec.Schema) + } + tgt.Typed("object", "") + + // Second pass: build properties from non-embedded exported fields. + if err := s.buildStructFields(decl, st, tgt, seen); err != nil { + return err + } + + if tgt == nil { + return nil + } + if hasAllOf && len(tgt.Properties) > 0 { + schema.AllOf = append(schema.AllOf, *tgt) + } + for k := range tgt.Properties { + if _, ok := seen[k]; !ok { + delete(tgt.Properties, k) + } + } + return nil +} + +// scanEmbeddedFields iterates over anonymous struct fields to detect allOf composition. +// It returns: +// - tgt: the schema that should receive properties (nil if no embedded fields were processed, +// schema itself for plain embeds, or a new schema when allOf is detected) +// - hasAllOf: whether any allOf member was found +func (s *Builder) scanEmbeddedFields(decl *scanner.EntityDecl, st *types.Struct, schema *oaispec.Schema, seen map[string]string) (tgt *oaispec.Schema, hasAllOf bool, err error) { + for i := range st.NumFields() { + fld := st.Field(i) + if !fld.Anonymous() { + logger.DebugLogf(s.ctx.Debug(), "skipping field %q for allOf scan because not anonymous", fld.Name()) + continue + } + tg := st.Tag(i) + + logger.DebugLogf(s.ctx.Debug(), + "maybe allof field(%t) %s: %s (%T) [%q](anon: %t, embedded: %t)", + fld.IsField(), fld.Name(), fld.Type().String(), fld.Type(), tg, fld.Anonymous(), fld.Embedded(), + ) + afld := resolvers.FindASTField(decl.File, fld.Pos()) + if afld == nil { + logger.DebugLogf(s.ctx.Debug(), "can't find source associated with %s for %s", fld.String(), st.String()) + continue + } + + if parsers.Ignored(afld.Doc) { + continue + } + + _, ignore, _, _, err := resolvers.ParseJSONTag(afld) + if err != nil { + return nil, false, err + } + if ignore { + continue + } + + _, isAliased := fld.Type().(*types.Alias) + + if !parsers.AllOfMember(afld.Doc) && !isAliased { + // Plain embed: merge fields into the main schema + if tgt == nil { + tgt = schema + } + if err := s.buildEmbedded(fld.Type(), tgt, seen); err != nil { + return nil, false, err + } + continue + } + + if isAliased { + logger.DebugLogf(s.ctx.Debug(), "alias member in struct: %v", fld) + } + + // allOf member: fields go into a separate schema, embedded struct becomes an allOf entry + hasAllOf = true + if tgt == nil { + tgt = &oaispec.Schema{} + } + var newSch oaispec.Schema + if err := s.buildAllOf(fld.Type(), &newSch); err != nil { + return nil, false, err + } + + extractAllOfClass(afld.Doc, schema) + schema.AllOf = append(schema.AllOf, newSch) + } + + return tgt, hasAllOf, nil +} + +func (s *Builder) buildStructFields(decl *scanner.EntityDecl, st *types.Struct, tgt *oaispec.Schema, seen map[string]string) error { + for fld := range st.Fields() { + if err := s.processStructField(fld, decl, tgt, seen); err != nil { + return err + } + } + return nil +} + +func (s *Builder) processStructField(fld *types.Var, decl *scanner.EntityDecl, tgt *oaispec.Schema, seen map[string]string) error { + if fld.Embedded() || !fld.Exported() { + return nil + } + + afld := resolvers.FindASTField(decl.File, fld.Pos()) + if afld == nil { + logger.DebugLogf(s.ctx.Debug(), "can't find source associated with %s", fld.String()) + return nil + } + + if parsers.Ignored(afld.Doc) { + return nil + } + + name, ignore, isString, omitEmpty, err := resolvers.ParseJSONTag(afld) + if err != nil { + return err + } + + if ignore { + for seenTagName, seenFieldName := range seen { + if seenFieldName == fld.Name() { + delete(tgt.Properties, seenTagName) + break + } + } + return nil + } + + ps := tgt.Properties[name] + if err = s.buildFromType(fld.Type(), Typable{&ps, 0, s.ctx.SkipExtensions()}); err != nil { + return err + } + if isString { + ps.Typed("string", ps.Format) + ps.Ref = oaispec.Ref{} + ps.Items = nil + } + + if sfName, isStrfmt := parsers.StrfmtName(afld.Doc); isStrfmt { + ps.Typed("string", sfName) + ps.Ref = oaispec.Ref{} + ps.Items = nil + } + + sp := s.createParser(name, tgt, &ps, afld) + if err := sp.Parse(afld.Doc); err != nil { + return err + } + + if ps.Ref.String() == "" && name != fld.Name() { + resolvers.AddExtension(&ps.VendorExtensible, "x-go-name", fld.Name(), s.ctx.SkipExtensions()) + } + + if s.ctx.SetXNullableForPointers() { + if _, isPointer := fld.Type().(*types.Pointer); isPointer && !omitEmpty && + (ps.Extensions == nil || (ps.Extensions["x-nullable"] == nil && ps.Extensions["x-isnullable"] == nil)) { + ps.AddExtension("x-nullable", true) + } + } + + // we have 2 cases: + // 1. field with different name override tag + // 2. field with different name removes tag + // so we need to save both tag&name + seen[name] = fld.Name() + tgt.Properties[name] = ps + return nil +} + +func (s *Builder) buildAllOf(tpe types.Type, schema *oaispec.Schema) error { + logger.DebugLogf(s.ctx.Debug(), "allOf %s", tpe.Underlying()) + + switch ftpe := tpe.(type) { + case *types.Pointer: + return s.buildAllOf(ftpe.Elem(), schema) + case *types.Named: + return s.buildNamedAllOf(ftpe, schema) + case *types.Alias: + logger.DebugLogf(s.ctx.Debug(), "allOf member is alias %v => %v", ftpe, ftpe.Rhs()) + tgt := Typable{schema: schema, skipExt: s.ctx.SkipExtensions()} + return s.buildAlias(ftpe, tgt) + case *types.TypeParam: + log.Printf("WARNING: generic type parameters are not supported yet %[1]v (%[1]T). Skipped", ftpe) + return nil + case *types.Chan: + log.Printf("WARNING: channels are not supported %[1]v (%[1]T). Skipped", ftpe) + return nil + case *types.Signature: + log.Printf("WARNING: functions are not supported %[1]v (%[1]T). Skipped", ftpe) + return nil + default: + log.Printf("WARNING: missing allOf parser for a %T, skipping field", ftpe) + return fmt.Errorf("unable to resolve allOf member for: %v: %w", ftpe, ErrSchema) + } +} + +func (s *Builder) buildNamedAllOf(ftpe *types.Named, schema *oaispec.Schema) error { + switch utpe := ftpe.Underlying().(type) { + case *types.Struct: + decl, found := s.ctx.FindModel(ftpe.Obj().Pkg().Path(), ftpe.Obj().Name()) + if !found { + return fmt.Errorf("can't find source file for struct: %s: %w", ftpe.String(), ErrSchema) + } + + if resolvers.IsStdTime(ftpe.Obj()) { + schema.Typed("string", "date-time") + return nil + } + + if sfnm, isf := parsers.StrfmtName(decl.Comments); isf { + schema.Typed("string", sfnm) + return nil + } + + if decl.HasModelAnnotation() { + return s.makeRef(decl, Typable{schema, 0, s.ctx.SkipExtensions()}) + } + + return s.buildFromStruct(decl, utpe, schema, make(map[string]string)) + case *types.Interface: + decl, found := s.ctx.FindModel(ftpe.Obj().Pkg().Path(), ftpe.Obj().Name()) + if !found { + return fmt.Errorf("can't find source file for interface: %s: %w", ftpe.String(), ErrSchema) + } + + if sfnm, isf := parsers.StrfmtName(decl.Comments); isf { + schema.Typed("string", sfnm) + return nil + } + + if decl.HasModelAnnotation() { + return s.makeRef(decl, Typable{schema, 0, s.ctx.SkipExtensions()}) + } + + return s.buildFromInterface(decl, utpe, schema, make(map[string]string)) + case *types.TypeParam: + log.Printf("WARNING: generic type parameters are not supported yet %[1]v (%[1]T). Skipped", ftpe) + return nil + case *types.Chan: + log.Printf("WARNING: channels are not supported %[1]v (%[1]T). Skipped", ftpe) + return nil + case *types.Signature: + log.Printf("WARNING: functions are not supported %[1]v (%[1]T). Skipped", ftpe) + return nil + default: + log.Printf( + "WARNING: can't figure out object type for allOf named type (%T): %v", + ftpe, utpe, + ) + return fmt.Errorf("unable to locate source file for allOf (%T): %v: %w", + ftpe, utpe, ErrSchema, + ) + } +} + +func (s *Builder) buildEmbedded(tpe types.Type, schema *oaispec.Schema, seen map[string]string) error { + logger.DebugLogf(s.ctx.Debug(), "embedded %v", tpe.Underlying()) + + switch ftpe := tpe.(type) { + case *types.Pointer: + return s.buildEmbedded(ftpe.Elem(), schema, seen) + case *types.Named: + return s.buildNamedEmbedded(ftpe, schema, seen) + case *types.Alias: + logger.DebugLogf(s.ctx.Debug(), "embedded alias %v => %v", ftpe, ftpe.Rhs()) + tgt := Typable{schema, 0, s.ctx.SkipExtensions()} + return s.buildAlias(ftpe, tgt) + case *types.Union: // e.g. type X interface{ ~uint16 | ~float32 } + log.Printf("WARNING: union type constraints are not supported yet %[1]v (%[1]T). Skipped", ftpe) + return nil + case *types.TypeParam: + log.Printf("WARNING: generic type parameters are not supported yet %[1]v (%[1]T). Skipped", ftpe) + return nil + case *types.Chan: + log.Printf("WARNING: channels are not supported %[1]v (%[1]T). Skipped", ftpe) + return nil + case *types.Signature: + log.Printf("WARNING: functions are not supported %[1]v (%[1]T). Skipped", ftpe) + return nil + default: + log.Printf("WARNING: Missing embedded parser for a %T, skipping model\n", ftpe) + return nil + } +} + +func (s *Builder) buildNamedEmbedded(ftpe *types.Named, schema *oaispec.Schema, seen map[string]string) error { + logger.DebugLogf(s.ctx.Debug(), "embedded named type: %T", ftpe.Underlying()) + if resolvers.UnsupportedBuiltin(ftpe) { + log.Printf("WARNING: skipped unsupported builtin type: %v", ftpe) + + return nil + } + + switch utpe := ftpe.Underlying().(type) { + case *types.Struct: + decl, found := s.ctx.FindModel(ftpe.Obj().Pkg().Path(), ftpe.Obj().Name()) + if !found { + return fmt.Errorf("can't find source file for struct: %s: %w", ftpe.String(), ErrSchema) + } + + return s.buildFromStruct(decl, utpe, schema, seen) + case *types.Interface: + if utpe.Empty() { + return nil + } + o := ftpe.Obj() + if resolvers.IsAny(o) { + return nil + } + if resolvers.IsStdError(o) { + tgt := Typable{schema: schema, skipExt: s.ctx.SkipExtensions()} + tgt.AddExtension("x-go-type", o.Name()) + return resolvers.SwaggerSchemaForType(o.Name(), tgt) + } + resolvers.MustNotBeABuiltinType(o) + + decl, found := s.ctx.FindModel(o.Pkg().Path(), o.Name()) + if !found { + return fmt.Errorf("can't find source file for struct: %s: %w", ftpe.String(), ErrSchema) + } + return s.buildFromInterface(decl, utpe, schema, seen) + case *types.Union: // e.g. type X interface{ ~uint16 | ~float32 } + log.Printf("WARNING: union type constraints are not supported yet %[1]v (%[1]T). Skipped", utpe) + return nil + case *types.TypeParam: + log.Printf("WARNING: generic type parameters are not supported yet %[1]v (%[1]T). Skipped", utpe) + return nil + case *types.Chan: + log.Printf("WARNING: channels are not supported %[1]v (%[1]T). Skipped", utpe) + return nil + case *types.Signature: + log.Printf("WARNING: functions are not supported %[1]v (%[1]T). Skipped", utpe) + return nil + default: + log.Printf("WARNING: can't figure out object type for embedded named type (%T): %v", + ftpe, utpe, + ) + return nil + } +} + +func (s *Builder) makeRef(decl *scanner.EntityDecl, prop ifaces.SwaggerTypable) error { + nm, _ := decl.Names() + ref, err := oaispec.NewRef("#/definitions/" + nm) + if err != nil { + return err + } + + prop.SetRef(ref) + s.postDecls = append(s.postDecls, decl) + + return nil +} + +func (s *Builder) createParser(nm string, schema, ps *oaispec.Schema, fld *ast.Field, opts ...parsers.SectionedParserOption) *parsers.SectionedParser { + if ps.Ref.String() != "" && !s.ctx.DescWithRef() { + // if DescWithRef option is enabled, allow the tagged documentation to flow alongside the $ref + // otherwise behave as expected by jsonschema draft4: $ref predates all sibling keys. + opts = append( + opts, + parsers.WithTaggers(refSchemaTaggers(schema, nm)...), + ) + + return parsers.NewSectionedParser(opts...) + } + + taggers := schemaTaggers(schema, ps, nm) + + // the parser may be called outside the context of struct field. + // In that case, just return the outcome of the parsing now. + + if fld != nil { + // check if this is a primitive, if so parse the validations from the + // doc comments of the slice declaration. + if ftped, ok := fld.Type.(*ast.ArrayType); ok { + var err error + arrayTaggers, err := parseArrayTypes(taggers, ftped.Elt, ps.Items, 0) // NOTE: swallows error silently + if err == nil { + taggers = arrayTaggers + } + } + } + + opts = append( + opts, + parsers.WithSetDescription(func(lines []string) { + ps.Description = parsers.JoinDropLast(lines) + enumDesc := parsers.GetEnumDesc(ps.Extensions) + if enumDesc != "" { + ps.Description += "\n" + enumDesc + } + }), + parsers.WithTaggers(taggers...), + ) + + return parsers.NewSectionedParser(opts...) +} + +func schemaVendorExtensibleSetter(meta *oaispec.Schema) func(json.RawMessage) error { + return func(jsonValue json.RawMessage) error { + var jsonData oaispec.Extensions + err := json.Unmarshal(jsonValue, &jsonData) + if err != nil { + return err + } + + for k := range jsonData { + if !parsers.IsAllowedExtension(k) { + return fmt.Errorf("invalid schema extension name, should start from `x-`: %s: %w", k, ErrSchema) + } + } + + meta.Extensions = jsonData + + return nil + } +} + +func extractAllOfClass(doc *ast.CommentGroup, schema *oaispec.Schema) { + allOfClass, ok := parsers.AllOfName(doc) + if !ok { + return + } + + schema.AddExtension("x-class", allOfClass) +} diff --git a/internal/builders/schema/schema_go118_test.go b/internal/builders/schema/schema_go118_test.go new file mode 100644 index 0000000..7b9f1d2 --- /dev/null +++ b/internal/builders/schema/schema_go118_test.go @@ -0,0 +1,109 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package schema + +import ( + "testing" + + "github.com/go-openapi/codescan/internal/scanner" + "github.com/go-openapi/codescan/internal/scantest" + "github.com/go-openapi/testify/v2/assert" + "github.com/go-openapi/testify/v2/require" + + oaispec "github.com/go-openapi/spec" +) + +func getGo118ClassificationModel(sctx *scanner.ScanCtx, nm string) *scanner.EntityDecl { + decl, ok := sctx.FindDecl(fixturesModule+"/goparsing/go118", nm) + if !ok { + return nil + } + return decl +} + +func TestGo118SwaggerTypeNamed(t *testing.T) { + sctx := scantest.LoadGo118ClassificationPkgsCtx(t) + decl := getGo118ClassificationModel(sctx, "NamedWithType") + require.NotNil(t, decl) + prs := &Builder{ + ctx: sctx, + decl: decl, + } + models := make(map[string]oaispec.Schema) + require.NoError(t, prs.Build(models)) + schema := models["namedWithType"] + + scantest.AssertProperty(t, &schema, "object", "some_map", "", "SomeMap") + + scantest.CompareOrDumpJSON(t, models, "go118_schema_NamedWithType.json") +} + +func TestGo118AliasedModels(t *testing.T) { + sctx := scantest.LoadGo118ClassificationPkgsCtx(t) + + names := []string{ + "SomeObject", + } + + defs := make(map[string]oaispec.Schema) + for _, nm := range names { + decl := getGo118ClassificationModel(sctx, nm) + require.NotNil(t, decl) + + prs := &Builder{ + decl: decl, + ctx: sctx, + } + require.NoError(t, prs.Build(defs)) + } + + for k := range defs { + for i, b := range names { + if b == k { + // remove the entry from the collection + names = append(names[:i], names[i+1:]...) + } + } + } + if assert.Empty(t, names) { + // map types + assertMapDefinition(t, defs, "SomeObject", "object", "", "") + } + + scantest.CompareOrDumpJSON(t, defs, "go118_schema_aliased.json") +} + +func TestGo118InterfaceField(t *testing.T) { + sctx := scantest.LoadGo118ClassificationPkgsCtx(t) + decl := getGo118ClassificationModel(sctx, "Interfaced") + require.NotNil(t, decl) + prs := &Builder{ + ctx: sctx, + decl: decl, + } + models := make(map[string]oaispec.Schema) + require.NoError(t, prs.Build(models)) + + schema := models["Interfaced"] + scantest.AssertProperty(t, &schema, "", "custom_data", "", "CustomData") + + scantest.CompareOrDumpJSON(t, models, "go118_schema_Interfaced.json") +} + +func TestGo118_Issue2809(t *testing.T) { + sctx := scantest.LoadGo118ClassificationPkgsCtx(t) + decl := getGo118ClassificationModel(sctx, "transportErr") + require.NotNil(t, decl) + prs := &Builder{ + ctx: sctx, + decl: decl, + } + models := make(map[string]oaispec.Schema) + require.NoError(t, prs.Build(models)) + + schema := models["transportErr"] + scantest.AssertProperty(t, &schema, "", "data", "", "Data") + + scantest.CompareOrDumpJSON(t, models, "go118_schema_transportErr.json") +} diff --git a/internal/builders/schema/schema_test.go b/internal/builders/schema/schema_test.go new file mode 100644 index 0000000..24b45e2 --- /dev/null +++ b/internal/builders/schema/schema_test.go @@ -0,0 +1,1506 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package schema + +import ( + "encoding/json" + "testing" + + "github.com/go-openapi/codescan/internal/scanner" + "github.com/go-openapi/codescan/internal/scantest" + "github.com/go-openapi/testify/v2/assert" + "github.com/go-openapi/testify/v2/require" + + oaispec "github.com/go-openapi/spec" +) + +const ( + epsilon = 1e-9 + + // fixturesModule is the module path of the fixtures nested module. + fixturesModule = "github.com/go-openapi/codescan/fixtures" +) + +func TestBuilder_Struct_Tag(t *testing.T) { + ctx := scantest.LoadPetstorePkgsCtx(t, false) + + var td *scanner.EntityDecl + t.Run("should find a Tag model", func(t *testing.T) { + for k, v := range ctx.Models() { + if k.Name != "Tag" { + continue + } + td = v + break + } + require.NotNil(t, td) + }) + + prs := &Builder{ + ctx: ctx, + decl: td, + } + result := make(map[string]oaispec.Schema) + require.NoError(t, prs.Build(result)) + + scantest.CompareOrDumpJSON(t, result, "petstore_schema_Tag.json") +} + +func TestBuilder_Struct_Pet(t *testing.T) { + // Debug = true + // defer func() { Debug = false }() + + ctx := scantest.LoadPetstorePkgsCtx(t, false) + var td *scanner.EntityDecl + for k, v := range ctx.Models() { + if k.Name != "Pet" { + continue + } + td = v + break + } + require.NotNil(t, td) + + prs := &Builder{ + ctx: ctx, + decl: td, + } + result := make(map[string]oaispec.Schema) + require.NoError(t, prs.Build(result)) + + scantest.CompareOrDumpJSON(t, result, "petstore_schema_Pet.json") +} + +func TestBuilder_Struct_Order(t *testing.T) { + // Debug = true + // defer func() { Debug = false }() + + ctx := scantest.LoadPetstorePkgsCtx(t, false) + var td *scanner.EntityDecl + for k, v := range ctx.Models() { + if k.Name != "Order" { + continue + } + td = v + break + } + require.NotNil(t, td) + + prs := &Builder{ + ctx: ctx, + decl: td, + } + result := make(map[string]oaispec.Schema) + require.NoError(t, prs.Build(result)) + + scantest.CompareOrDumpJSON(t, result, "petstore_schema_Order.json") +} + +func TestBuilder(t *testing.T) { + ctx := scantest.LoadClassificationPkgsCtx(t) + decl := getClassificationModel(ctx, "NoModel") + require.NotNil(t, decl) + prs := &Builder{ + ctx: ctx, + decl: decl, + } + models := make(map[string]oaispec.Schema) + require.NoError(t, prs.Build(models)) + schema := models["NoModel"] + + assert.Equal(t, oaispec.StringOrArray([]string{"object"}), schema.Type) + assert.EqualT(t, "NoModel is a struct without an annotation.", schema.Title) + assert.EqualT(t, "NoModel exists in a package\nbut is not annotated with the swagger model annotations\nso it should now show up in a test.", schema.Description) + assert.Len(t, schema.Required, 3) + assert.Len(t, schema.Properties, 12) + + scantest.AssertProperty(t, &schema, "integer", "id", "int64", "ID") + prop, ok := schema.Properties["id"] + assert.EqualT(t, "ID of this no model instance.\nids in this application start at 11 and are smaller than 1000", prop.Description) + assert.TrueT(t, ok, "should have had an 'id' property") + assert.InDeltaT(t, 1000.00, *prop.Maximum, epsilon) + assert.TrueT(t, prop.ExclusiveMaximum, "'id' should have had an exclusive maximum") + assert.NotNil(t, prop.Minimum) + assert.InDeltaT(t, 10.00, *prop.Minimum, epsilon) + assert.TrueT(t, prop.ExclusiveMinimum, "'id' should have had an exclusive minimum") + assert.Equal(t, 11, prop.Default, "ID default value is incorrect") + + scantest.AssertProperty(t, &schema, "string", "NoNameOmitEmpty", "", "") + prop, ok = schema.Properties["NoNameOmitEmpty"] + assert.EqualT(t, "A field which has omitempty set but no name", prop.Description) + assert.TrueT(t, ok, "should have had an 'NoNameOmitEmpty' property") + + scantest.AssertProperty(t, &schema, "string", "noteb64", "byte", "Note") + prop, ok = schema.Properties["noteb64"] + assert.TrueT(t, ok, "should have a 'noteb64' property") + assert.Nil(t, prop.Items) + + scantest.AssertProperty(t, &schema, "integer", "score", "int32", "Score") + prop, ok = schema.Properties["score"] + assert.EqualT(t, "The Score of this model", prop.Description) + assert.TrueT(t, ok, "should have had a 'score' property") + assert.InDeltaT(t, 45.00, *prop.Maximum, epsilon) + assert.FalseT(t, prop.ExclusiveMaximum, "'score' should not have had an exclusive maximum") + assert.NotNil(t, prop.Minimum) + assert.InDeltaT(t, 3.00, *prop.Minimum, epsilon) + assert.FalseT(t, prop.ExclusiveMinimum, "'score' should not have had an exclusive minimum") + assert.EqualValues(t, 27, prop.Example) + require.NotNil(t, prop.MultipleOf, "'score' should have had a multipleOf") + assert.InDeltaT(t, 3.00, *prop.MultipleOf, epsilon, "'score' should have had multipleOf 3") + + expectedNameExtensions := oaispec.Extensions{ + "x-go-name": "Name", + "x-property-array": []any{ + "value1", + "value2", + }, + "x-property-array-obj": []any{ + map[string]any{ + "name": "obj", + "value": "field", + }, + }, + "x-property-value": "value", + } + + scantest.AssertProperty(t, &schema, "string", "name", "", "Name") + prop, ok = schema.Properties["name"] + assert.TrueT(t, ok) + assert.EqualT(t, "Name of this no model instance", prop.Description) + require.NotNil(t, prop.MinLength) + require.NotNil(t, prop.MaxLength) + assert.EqualT(t, int64(4), *prop.MinLength) + assert.EqualT(t, int64(50), *prop.MaxLength) + assert.EqualT(t, "[A-Za-z0-9-.]*", prop.Pattern) + assert.Equal(t, expectedNameExtensions, prop.Extensions) + + scantest.AssertProperty(t, &schema, "string", "created", "date-time", "Created") + prop, ok = schema.Properties["created"] + assert.EqualT(t, "Created holds the time when this entry was created", prop.Description) + assert.TrueT(t, ok, "should have a 'created' property") + assert.TrueT(t, prop.ReadOnly, "'created' should be read only") + + scantest.AssertProperty(t, &schema, "string", "gocreated", "date-time", "GoTimeCreated") + prop, ok = schema.Properties["gocreated"] + assert.EqualT(t, "GoTimeCreated holds the time when this entry was created in go time.Time", prop.Description) + assert.TrueT(t, ok, "should have a 'gocreated' property") + + scantest.AssertArrayProperty(t, &schema, "string", "foo_slice", "", "FooSlice") + prop, ok = schema.Properties["foo_slice"] + assert.EqualT(t, "a FooSlice has foos which are strings", prop.Description) + assert.TrueT(t, ok, "should have a 'foo_slice' property") + require.NotNil(t, prop.Items, "foo_slice should have had an items property") + require.NotNil(t, prop.Items.Schema, "foo_slice.items should have had a schema property") + assert.TrueT(t, prop.UniqueItems, "'foo_slice' should have unique items") + assert.EqualT(t, int64(3), *prop.MinItems, "'foo_slice' should have had 3 min items") + assert.EqualT(t, int64(10), *prop.MaxItems, "'foo_slice' should have had 10 max items") + itprop := prop.Items.Schema + assert.EqualT(t, int64(3), *itprop.MinLength, "'foo_slice.items.minLength' should have been 3") + assert.EqualT(t, int64(10), *itprop.MaxLength, "'foo_slice.items.maxLength' should have been 10") + assert.EqualT(t, "\\w+", itprop.Pattern, "'foo_slice.items.pattern' should have \\w+") + + scantest.AssertArrayProperty(t, &schema, "string", "time_slice", "date-time", "TimeSlice") + prop, ok = schema.Properties["time_slice"] + assert.EqualT(t, "a TimeSlice is a slice of times", prop.Description) + assert.TrueT(t, ok, "should have a 'time_slice' property") + require.NotNil(t, prop.Items, "time_slice should have had an items property") + require.NotNil(t, prop.Items.Schema, "time_slice.items should have had a schema property") + assert.TrueT(t, prop.UniqueItems, "'time_slice' should have unique items") + assert.EqualT(t, int64(3), *prop.MinItems, "'time_slice' should have had 3 min items") + assert.EqualT(t, int64(10), *prop.MaxItems, "'time_slice' should have had 10 max items") + + scantest.AssertArrayProperty(t, &schema, "array", "bar_slice", "", "BarSlice") + prop, ok = schema.Properties["bar_slice"] + assert.EqualT(t, "a BarSlice has bars which are strings", prop.Description) + assert.TrueT(t, ok, "should have a 'bar_slice' property") + require.NotNil(t, prop.Items, "bar_slice should have had an items property") + require.NotNil(t, prop.Items.Schema, "bar_slice.items should have had a schema property") + assert.TrueT(t, prop.UniqueItems, "'bar_slice' should have unique items") + assert.EqualT(t, int64(3), *prop.MinItems, "'bar_slice' should have had 3 min items") + assert.EqualT(t, int64(10), *prop.MaxItems, "'bar_slice' should have had 10 max items") + + itprop = prop.Items.Schema + require.NotNil(t, itprop) + assert.EqualT(t, int64(4), *itprop.MinItems, "'bar_slice.items.minItems' should have been 4") + assert.EqualT(t, int64(9), *itprop.MaxItems, "'bar_slice.items.maxItems' should have been 9") + + itprop2 := itprop.Items.Schema + require.NotNil(t, itprop2) + assert.EqualT(t, int64(5), *itprop2.MinItems, "'bar_slice.items.items.minItems' should have been 5") + assert.EqualT(t, int64(8), *itprop2.MaxItems, "'bar_slice.items.items.maxItems' should have been 8") + + itprop3 := itprop2.Items.Schema + require.NotNil(t, itprop3) + assert.EqualT(t, int64(3), *itprop3.MinLength, "'bar_slice.items.items.items.minLength' should have been 3") + assert.EqualT(t, int64(10), *itprop3.MaxLength, "'bar_slice.items.items.items.maxLength' should have been 10") + assert.EqualT(t, "\\w+", itprop3.Pattern, "'bar_slice.items.items.items.pattern' should have \\w+") + + scantest.AssertArrayProperty(t, &schema, "array", "deep_time_slice", "", "DeepTimeSlice") + prop, ok = schema.Properties["deep_time_slice"] + assert.EqualT(t, "a DeepSlice has bars which are time", prop.Description) + assert.TrueT(t, ok, "should have a 'deep_time_slice' property") + require.NotNil(t, prop.Items, "deep_time_slice should have had an items property") + require.NotNil(t, prop.Items.Schema, "deep_time_slice.items should have had a schema property") + assert.TrueT(t, prop.UniqueItems, "'deep_time_slice' should have unique items") + assert.EqualT(t, int64(3), *prop.MinItems, "'deep_time_slice' should have had 3 min items") + assert.EqualT(t, int64(10), *prop.MaxItems, "'deep_time_slice' should have had 10 max items") + itprop = prop.Items.Schema + require.NotNil(t, itprop) + assert.EqualT(t, int64(4), *itprop.MinItems, "'deep_time_slice.items.minItems' should have been 4") + assert.EqualT(t, int64(9), *itprop.MaxItems, "'deep_time_slice.items.maxItems' should have been 9") + + itprop2 = itprop.Items.Schema + require.NotNil(t, itprop2) + assert.EqualT(t, int64(5), *itprop2.MinItems, "'deep_time_slice.items.items.minItems' should have been 5") + assert.EqualT(t, int64(8), *itprop2.MaxItems, "'deep_time_slice.items.items.maxItems' should have been 8") + + itprop3 = itprop2.Items.Schema + require.NotNil(t, itprop3) + + scantest.AssertArrayProperty(t, &schema, "object", "items", "", "Items") + prop, ok = schema.Properties["items"] + assert.TrueT(t, ok, "should have an 'items' slice") + assert.NotNil(t, prop.Items, "items should have had an items property") + assert.NotNil(t, prop.Items.Schema, "items.items should have had a schema property") + itprop = prop.Items.Schema + assert.Len(t, itprop.Properties, 5) + assert.Len(t, itprop.Required, 4) + scantest.AssertProperty(t, itprop, "integer", "id", "int32", "ID") + iprop, ok := itprop.Properties["id"] + assert.TrueT(t, ok) + assert.EqualT(t, "ID of this no model instance.\nids in this application start at 11 and are smaller than 1000", iprop.Description) + require.NotNil(t, iprop.Maximum) + assert.InDeltaT(t, 1000.00, *iprop.Maximum, epsilon) + assert.TrueT(t, iprop.ExclusiveMaximum, "'id' should have had an exclusive maximum") + require.NotNil(t, iprop.Minimum) + assert.InDeltaT(t, 10.00, *iprop.Minimum, epsilon) + assert.TrueT(t, iprop.ExclusiveMinimum, "'id' should have had an exclusive minimum") + assert.Equal(t, 11, iprop.Default, "ID default value is incorrect") + + scantest.AssertRef(t, itprop, "pet", "Pet", "#/definitions/pet") + iprop, ok = itprop.Properties["pet"] + assert.TrueT(t, ok) + if itprop.Ref.String() != "" { + assert.EqualT(t, "The Pet to add to this NoModel items bucket.\nPets can appear more than once in the bucket", iprop.Description) + } + + scantest.AssertProperty(t, itprop, "integer", "quantity", "int16", "Quantity") + iprop, ok = itprop.Properties["quantity"] + assert.TrueT(t, ok) + assert.EqualT(t, "The amount of pets to add to this bucket.", iprop.Description) + assert.InDeltaT(t, 1.00, *iprop.Minimum, epsilon) + assert.InDeltaT(t, 10.00, *iprop.Maximum, epsilon) + + scantest.AssertProperty(t, itprop, "string", "expiration", "date-time", "Expiration") + iprop, ok = itprop.Properties["expiration"] + assert.TrueT(t, ok) + assert.EqualT(t, "A dummy expiration date.", iprop.Description) + + scantest.AssertProperty(t, itprop, "string", "notes", "", "Notes") + iprop, ok = itprop.Properties["notes"] + assert.TrueT(t, ok) + assert.EqualT(t, "Notes to add to this item.\nThis can be used to add special instructions.", iprop.Description) + + decl2 := getClassificationModel(ctx, "StoreOrder") + require.NotNil(t, decl2) + require.NoError(t, (&Builder{decl: decl2, ctx: ctx}).Build(models)) + msch, ok := models["order"] + pn := fixturesModule + "/goparsing/classification/models" + assert.TrueT(t, ok) + assert.Equal(t, pn, msch.Extensions["x-go-package"]) + assert.Equal(t, "StoreOrder", msch.Extensions["x-go-name"]) + + scantest.CompareOrDumpJSON(t, models, "classification_schema_NoModel.json") +} + +func TestBuilder_AddExtensions(t *testing.T) { + ctx := scantest.LoadClassificationPkgsCtx(t) + models := make(map[string]oaispec.Schema) + decl := getClassificationModel(ctx, "StoreOrder") + require.NotNil(t, decl) + require.NoError(t, (&Builder{decl: decl, ctx: ctx}).Build(models)) + + msch, ok := models["order"] + pn := fixturesModule + "/goparsing/classification/models" + assert.TrueT(t, ok) + assert.Equal(t, pn, msch.Extensions["x-go-package"]) + assert.Equal(t, "StoreOrder", msch.Extensions["x-go-name"]) + assert.EqualT(t, "StoreOrder represents an order in this application.", msch.Title) +} + +func TestTextMarhalCustomType(t *testing.T) { + ctx := scantest.LoadClassificationPkgsCtx(t) + decl := getClassificationModel(ctx, "TextMarshalModel") + require.NotNil(t, decl) + prs := &Builder{ + ctx: ctx, + decl: decl, + } + models := make(map[string]oaispec.Schema) + require.NoError(t, prs.Build(models)) + schema := models["TextMarshalModel"] + scantest.AssertProperty(t, &schema, "string", "id", "uuid", "ID") + scantest.AssertArrayProperty(t, &schema, "string", "ids", "uuid", "IDs") + scantest.AssertProperty(t, &schema, "string", "struct", "", "Struct") + scantest.AssertProperty(t, &schema, "string", "map", "", "Map") + assertMapProperty(t, &schema, "string", "mapUUID", "uuid", "MapUUID") + scantest.AssertRef(t, &schema, "url", "URL", "#/definitions/URL") + scantest.AssertProperty(t, &schema, "string", "time", "date-time", "Time") + scantest.AssertProperty(t, &schema, "string", "structStrfmt", "date-time", "StructStrfmt") + scantest.AssertProperty(t, &schema, "string", "structStrfmtPtr", "date-time", "StructStrfmtPtr") + scantest.AssertProperty(t, &schema, "string", "customUrl", "url", "CustomURL") +} + +func TestEmbeddedTypes(t *testing.T) { + ctx := scantest.LoadClassificationPkgsCtx(t) + decl := getClassificationModel(ctx, "ComplexerOne") + require.NotNil(t, decl) + prs := &Builder{ + ctx: ctx, + decl: decl, + } + models := make(map[string]oaispec.Schema) + require.NoError(t, prs.Build(models)) + schema := models["ComplexerOne"] + scantest.AssertProperty(t, &schema, "integer", "age", "int32", "Age") + scantest.AssertProperty(t, &schema, "integer", "id", "int64", "ID") + scantest.AssertProperty(t, &schema, "string", "createdAt", "date-time", "CreatedAt") + scantest.AssertProperty(t, &schema, "string", "extra", "", "Extra") + scantest.AssertProperty(t, &schema, "string", "name", "", "Name") + scantest.AssertProperty(t, &schema, "string", "notes", "", "Notes") +} + +func TestParsePrimitiveSchemaProperty(t *testing.T) { + ctx := scantest.LoadClassificationPkgsCtx(t) + decl := getClassificationModel(ctx, "PrimateModel") + require.NotNil(t, decl) + prs := &Builder{ + ctx: ctx, + decl: decl, + } + models := make(map[string]oaispec.Schema) + require.NoError(t, prs.Build(models)) + + schema := models["PrimateModel"] + scantest.AssertProperty(t, &schema, "boolean", "a", "", "A") + scantest.AssertProperty(t, &schema, "integer", "b", "int32", "B") + scantest.AssertProperty(t, &schema, "string", "c", "", "C") + scantest.AssertProperty(t, &schema, "integer", "d", "int64", "D") + scantest.AssertProperty(t, &schema, "integer", "e", "int8", "E") + scantest.AssertProperty(t, &schema, "integer", "f", "int16", "F") + scantest.AssertProperty(t, &schema, "integer", "g", "int32", "G") + scantest.AssertProperty(t, &schema, "integer", "h", "int64", "H") + scantest.AssertProperty(t, &schema, "integer", "i", "uint64", "I") + scantest.AssertProperty(t, &schema, "integer", "j", "uint8", "J") + scantest.AssertProperty(t, &schema, "integer", "k", "uint16", "K") + scantest.AssertProperty(t, &schema, "integer", "l", "uint32", "L") + scantest.AssertProperty(t, &schema, "integer", "m", "uint64", "M") + scantest.AssertProperty(t, &schema, "number", "n", "float", "N") + scantest.AssertProperty(t, &schema, "number", "o", "double", "O") + scantest.AssertProperty(t, &schema, "integer", "p", "uint8", "P") + scantest.AssertProperty(t, &schema, "integer", "q", "uint64", "Q") +} + +func TestParseStringFormatSchemaProperty(t *testing.T) { + ctx := scantest.LoadClassificationPkgsCtx(t) + decl := getClassificationModel(ctx, "FormattedModel") + require.NotNil(t, decl) + prs := &Builder{ + ctx: ctx, + decl: decl, + } + models := make(map[string]oaispec.Schema) + require.NoError(t, prs.Build(models)) + + schema := models["FormattedModel"] + scantest.AssertProperty(t, &schema, "string", "a", "byte", "A") + scantest.AssertProperty(t, &schema, "string", "b", "creditcard", "B") + scantest.AssertProperty(t, &schema, "string", "c", "date", "C") + scantest.AssertProperty(t, &schema, "string", "d", "date-time", "D") + scantest.AssertProperty(t, &schema, "string", "e", "duration", "E") + scantest.AssertProperty(t, &schema, "string", "f", "email", "F") + scantest.AssertProperty(t, &schema, "string", "g", "hexcolor", "G") + scantest.AssertProperty(t, &schema, "string", "h", "hostname", "H") + scantest.AssertProperty(t, &schema, "string", "i", "ipv4", "I") + scantest.AssertProperty(t, &schema, "string", "j", "ipv6", "J") + scantest.AssertProperty(t, &schema, "string", "k", "isbn", "K") + scantest.AssertProperty(t, &schema, "string", "l", "isbn10", "L") + scantest.AssertProperty(t, &schema, "string", "m", "isbn13", "M") + scantest.AssertProperty(t, &schema, "string", "n", "rgbcolor", "N") + scantest.AssertProperty(t, &schema, "string", "o", "ssn", "O") + scantest.AssertProperty(t, &schema, "string", "p", "uri", "P") + scantest.AssertProperty(t, &schema, "string", "q", "uuid", "Q") + scantest.AssertProperty(t, &schema, "string", "r", "uuid3", "R") + scantest.AssertProperty(t, &schema, "string", "s", "uuid4", "S") + scantest.AssertProperty(t, &schema, "string", "t", "uuid5", "T") + scantest.AssertProperty(t, &schema, "string", "u", "mac", "U") +} + +func TestStringStructTag(t *testing.T) { + ctx := scantest.LoadClassificationPkgsCtx(t) + decl := getClassificationModel(ctx, "JSONString") + require.NotNil(t, decl) + prs := &Builder{ + ctx: ctx, + decl: decl, + } + models := make(map[string]oaispec.Schema) + require.NoError(t, prs.Build(models)) + + sch := models["jsonString"] + scantest.AssertProperty(t, &sch, "string", "someInt", "int64", "SomeInt") + scantest.AssertProperty(t, &sch, "string", "someInt8", "int8", "SomeInt8") + scantest.AssertProperty(t, &sch, "string", "someInt16", "int16", "SomeInt16") + scantest.AssertProperty(t, &sch, "string", "someInt32", "int32", "SomeInt32") + scantest.AssertProperty(t, &sch, "string", "someInt64", "int64", "SomeInt64") + scantest.AssertProperty(t, &sch, "string", "someUint", "uint64", "SomeUint") + scantest.AssertProperty(t, &sch, "string", "someUint8", "uint8", "SomeUint8") + scantest.AssertProperty(t, &sch, "string", "someUint16", "uint16", "SomeUint16") + scantest.AssertProperty(t, &sch, "string", "someUint32", "uint32", "SomeUint32") + scantest.AssertProperty(t, &sch, "string", "someUint64", "uint64", "SomeUint64") + scantest.AssertProperty(t, &sch, "string", "someFloat64", "double", "SomeFloat64") + scantest.AssertProperty(t, &sch, "string", "someString", "", "SomeString") + scantest.AssertProperty(t, &sch, "string", "someBool", "", "SomeBool") + scantest.AssertProperty(t, &sch, "string", "SomeDefaultInt", "int64", "") + + prop, ok := sch.Properties["somethingElse"] + if assert.TrueT(t, ok) { + assert.NotEqual(t, "string", prop.Type) + } +} + +func TestPtrFieldStringStructTag(t *testing.T) { + ctx := scantest.LoadClassificationPkgsCtx(t) + decl := getClassificationModel(ctx, "JSONPtrString") + require.NotNil(t, decl) + prs := &Builder{ + ctx: ctx, + decl: decl, + } + models := make(map[string]oaispec.Schema) + require.NoError(t, prs.Build(models)) + + sch := models["jsonPtrString"] + scantest.AssertProperty(t, &sch, "string", "someInt", "int64", "SomeInt") + scantest.AssertProperty(t, &sch, "string", "someInt8", "int8", "SomeInt8") + scantest.AssertProperty(t, &sch, "string", "someInt16", "int16", "SomeInt16") + scantest.AssertProperty(t, &sch, "string", "someInt32", "int32", "SomeInt32") + scantest.AssertProperty(t, &sch, "string", "someInt64", "int64", "SomeInt64") + scantest.AssertProperty(t, &sch, "string", "someUint", "uint64", "SomeUint") + scantest.AssertProperty(t, &sch, "string", "someUint8", "uint8", "SomeUint8") + scantest.AssertProperty(t, &sch, "string", "someUint16", "uint16", "SomeUint16") + scantest.AssertProperty(t, &sch, "string", "someUint32", "uint32", "SomeUint32") + scantest.AssertProperty(t, &sch, "string", "someUint64", "uint64", "SomeUint64") + scantest.AssertProperty(t, &sch, "string", "someFloat64", "double", "SomeFloat64") + scantest.AssertProperty(t, &sch, "string", "someString", "", "SomeString") + scantest.AssertProperty(t, &sch, "string", "someBool", "", "SomeBool") + + prop, ok := sch.Properties["somethingElse"] + if assert.TrueT(t, ok) { + assert.NotEqual(t, "string", prop.Type) + } +} + +func TestIgnoredStructField(t *testing.T) { + ctx := scantest.LoadClassificationPkgsCtx(t) + decl := getClassificationModel(ctx, "IgnoredFields") + require.NotNil(t, decl) + prs := &Builder{ + ctx: ctx, + decl: decl, + } + models := make(map[string]oaispec.Schema) + require.NoError(t, prs.Build(models)) + + sch := models["ignoredFields"] + scantest.AssertProperty(t, &sch, "string", "someIncludedField", "", "SomeIncludedField") + scantest.AssertProperty(t, &sch, "string", "someErroneouslyIncludedField", "", "SomeErroneouslyIncludedField") + assert.Len(t, sch.Properties, 2) +} + +func TestParseStructFields(t *testing.T) { + ctx := scantest.LoadClassificationPkgsCtx(t) + decl := getClassificationModel(ctx, "SimpleComplexModel") + require.NotNil(t, decl) + prs := &Builder{ + ctx: ctx, + decl: decl, + } + models := make(map[string]oaispec.Schema) + require.NoError(t, prs.Build(models)) + + schema := models["SimpleComplexModel"] + scantest.AssertProperty(t, &schema, "object", "emb", "", "Emb") + eSchema := schema.Properties["emb"] + scantest.AssertProperty(t, &eSchema, "integer", "cid", "int64", "CID") + scantest.AssertProperty(t, &eSchema, "string", "baz", "", "Baz") + + scantest.AssertRef(t, &schema, "top", "Top", "#/definitions/Something") + scantest.AssertRef(t, &schema, "notSel", "NotSel", "#/definitions/NotSelected") +} + +func TestParsePointerFields(t *testing.T) { + ctx := scantest.LoadClassificationPkgsCtx(t) + decl := getClassificationModel(ctx, "Pointdexter") + require.NotNil(t, decl) + prs := &Builder{ + ctx: ctx, + decl: decl, + } + models := make(map[string]oaispec.Schema) + require.NoError(t, prs.Build(models)) + + schema := models["Pointdexter"] + + scantest.AssertProperty(t, &schema, "integer", "id", "int64", "ID") + scantest.AssertProperty(t, &schema, "string", "name", "", "Name") + scantest.AssertProperty(t, &schema, "object", "emb", "", "Emb") + scantest.AssertProperty(t, &schema, "string", "t", "uuid5", "T") + eSchema := schema.Properties["emb"] + scantest.AssertProperty(t, &eSchema, "integer", "cid", "int64", "CID") + scantest.AssertProperty(t, &eSchema, "string", "baz", "", "Baz") + + scantest.AssertRef(t, &schema, "top", "Top", "#/definitions/Something") + scantest.AssertRef(t, &schema, "notSel", "NotSel", "#/definitions/NotSelected") +} + +func TestEmbeddedStarExpr(t *testing.T) { + ctx := scantest.LoadClassificationPkgsCtx(t) + decl := getClassificationModel(ctx, "EmbeddedStarExpr") + require.NotNil(t, decl) + prs := &Builder{ + ctx: ctx, + decl: decl, + } + models := make(map[string]oaispec.Schema) + require.NoError(t, prs.Build(models)) + + schema := models["EmbeddedStarExpr"] + + scantest.AssertProperty(t, &schema, "integer", "embeddedMember", "int64", "EmbeddedMember") + scantest.AssertProperty(t, &schema, "integer", "notEmbedded", "int64", "NotEmbedded") +} + +func TestArrayOfPointers(t *testing.T) { + ctx := scantest.LoadClassificationPkgsCtx(t) + decl := getClassificationModel(ctx, "Cars") + require.NotNil(t, decl) + prs := &Builder{ + ctx: ctx, + decl: decl, + } + models := make(map[string]oaispec.Schema) + require.NoError(t, prs.Build(models)) + + schema := models["cars"] + scantest.AssertProperty(t, &schema, "array", "cars", "", "Cars") +} + +func TestOverridingOneIgnore(t *testing.T) { + ctx := scantest.LoadClassificationPkgsCtx(t) + decl := getClassificationModel(ctx, "OverridingOneIgnore") + require.NotNil(t, decl) + prs := &Builder{ + ctx: ctx, + decl: decl, + } + models := make(map[string]oaispec.Schema) + require.NoError(t, prs.Build(models)) + + schema := models["OverridingOneIgnore"] + + scantest.AssertProperty(t, &schema, "integer", "id", "int64", "ID") + scantest.AssertProperty(t, &schema, "string", "name", "", "Name") + assert.Len(t, schema.Properties, 2) +} + +type collectionAssertions struct { + assertProperty func(t *testing.T, schema *oaispec.Schema, typeName, jsonName, format, goName string) + assertRef func(t *testing.T, schema *oaispec.Schema, jsonName, goName, fragment string) + nestedSchema func(prop oaispec.Schema) *oaispec.Schema +} + +func testParseCollectionFields( + t *testing.T, + modelName string, + ca collectionAssertions, +) { + t.Helper() + ctx := scantest.LoadClassificationPkgsCtx(t) + decl := getClassificationModel(ctx, modelName) + require.NotNil(t, decl) + prs := &Builder{ + ctx: ctx, + decl: decl, + } + models := make(map[string]oaispec.Schema) + require.NoError(t, prs.Build(models)) + + schema := models[modelName] + + ca.assertProperty(t, &schema, "integer", "ids", "int64", "IDs") + ca.assertProperty(t, &schema, "string", "names", "", "Names") + ca.assertProperty(t, &schema, "string", "uuids", "uuid", "UUIDs") + ca.assertProperty(t, &schema, "object", "embs", "", "Embs") + eSchema := ca.nestedSchema(schema.Properties["embs"]) + ca.assertProperty(t, eSchema, "integer", "cid", "int64", "CID") + ca.assertProperty(t, eSchema, "string", "baz", "", "Baz") + + ca.assertRef(t, &schema, "tops", "Tops", "#/definitions/Something") + ca.assertRef(t, &schema, "notSels", "NotSels", "#/definitions/NotSelected") + + ca.assertProperty(t, &schema, "integer", "ptrIds", "int64", "PtrIDs") + ca.assertProperty(t, &schema, "string", "ptrNames", "", "PtrNames") + ca.assertProperty(t, &schema, "string", "ptrUuids", "uuid", "PtrUUIDs") + ca.assertProperty(t, &schema, "object", "ptrEmbs", "", "PtrEmbs") + eSchema = ca.nestedSchema(schema.Properties["ptrEmbs"]) + ca.assertProperty(t, eSchema, "integer", "ptrCid", "int64", "PtrCID") + ca.assertProperty(t, eSchema, "string", "ptrBaz", "", "PtrBaz") + + ca.assertRef(t, &schema, "ptrTops", "PtrTops", "#/definitions/Something") + ca.assertRef(t, &schema, "ptrNotSels", "PtrNotSels", "#/definitions/NotSelected") +} + +func TestParseSliceFields(t *testing.T) { + testParseCollectionFields(t, "SliceAndDice", collectionAssertions{ + assertProperty: scantest.AssertArrayProperty, + assertRef: assertArrayRef, + nestedSchema: func(prop oaispec.Schema) *oaispec.Schema { return prop.Items.Schema }, + }) +} + +func TestParseMapFields(t *testing.T) { + testParseCollectionFields(t, "MapTastic", collectionAssertions{ + assertProperty: assertMapProperty, + assertRef: assertMapRef, + nestedSchema: func(prop oaispec.Schema) *oaispec.Schema { return prop.AdditionalProperties.Schema }, + }) +} + +func TestInterfaceField(t *testing.T) { + ctx := scantest.LoadClassificationPkgsCtx(t) + decl := getClassificationModel(ctx, "Interfaced") + require.NotNil(t, decl) + prs := &Builder{ + ctx: ctx, + decl: decl, + } + models := make(map[string]oaispec.Schema) + require.NoError(t, prs.Build(models)) + + schema := models["Interfaced"] + scantest.AssertProperty(t, &schema, "", "custom_data", "", "CustomData") +} + +func TestAliasedTypes(t *testing.T) { + ctx := scantest.LoadClassificationPkgsCtx(t) + decl := getClassificationModel(ctx, "OtherTypes") + require.NotNil(t, decl) + prs := &Builder{ + ctx: ctx, + decl: decl, + } + models := make(map[string]oaispec.Schema) + require.NoError(t, prs.Build(models)) + + schema := models["OtherTypes"] + scantest.AssertRef(t, &schema, "named", "Named", "#/definitions/SomeStringType") + scantest.AssertRef(t, &schema, "numbered", "Numbered", "#/definitions/SomeIntType") + scantest.AssertProperty(t, &schema, "string", "dated", "date-time", "Dated") + scantest.AssertRef(t, &schema, "timed", "Timed", "#/definitions/SomeTimedType") + scantest.AssertRef(t, &schema, "petted", "Petted", "#/definitions/SomePettedType") + scantest.AssertRef(t, &schema, "somethinged", "Somethinged", "#/definitions/SomethingType") + scantest.AssertRef(t, &schema, "strMap", "StrMap", "#/definitions/SomeStringMap") + scantest.AssertRef(t, &schema, "strArrMap", "StrArrMap", "#/definitions/SomeArrayStringMap") + + scantest.AssertRef(t, &schema, "manyNamed", "ManyNamed", "#/definitions/SomeStringsType") + scantest.AssertRef(t, &schema, "manyNumbered", "ManyNumbered", "#/definitions/SomeIntsType") + scantest.AssertArrayProperty(t, &schema, "string", "manyDated", "date-time", "ManyDated") + scantest.AssertRef(t, &schema, "manyTimed", "ManyTimed", "#/definitions/SomeTimedsType") + scantest.AssertRef(t, &schema, "manyPetted", "ManyPetted", "#/definitions/SomePettedsType") + scantest.AssertRef(t, &schema, "manySomethinged", "ManySomethinged", "#/definitions/SomethingsType") + + assertArrayRef(t, &schema, "nameds", "Nameds", "#/definitions/SomeStringType") + assertArrayRef(t, &schema, "numbereds", "Numbereds", "#/definitions/SomeIntType") + scantest.AssertArrayProperty(t, &schema, "string", "dateds", "date-time", "Dateds") + assertArrayRef(t, &schema, "timeds", "Timeds", "#/definitions/SomeTimedType") + assertArrayRef(t, &schema, "petteds", "Petteds", "#/definitions/SomePettedType") + assertArrayRef(t, &schema, "somethingeds", "Somethingeds", "#/definitions/SomethingType") + + scantest.AssertRef(t, &schema, "modsNamed", "ModsNamed", "#/definitions/modsSomeStringType") + scantest.AssertRef(t, &schema, "modsNumbered", "ModsNumbered", "#/definitions/modsSomeIntType") + scantest.AssertProperty(t, &schema, "string", "modsDated", "date-time", "ModsDated") + scantest.AssertRef(t, &schema, "modsTimed", "ModsTimed", "#/definitions/modsSomeTimedType") + scantest.AssertRef(t, &schema, "modsPetted", "ModsPetted", "#/definitions/modsSomePettedType") + + assertArrayRef(t, &schema, "modsNameds", "ModsNameds", "#/definitions/modsSomeStringType") + assertArrayRef(t, &schema, "modsNumbereds", "ModsNumbereds", "#/definitions/modsSomeIntType") + scantest.AssertArrayProperty(t, &schema, "string", "modsDateds", "date-time", "ModsDateds") + assertArrayRef(t, &schema, "modsTimeds", "ModsTimeds", "#/definitions/modsSomeTimedType") + assertArrayRef(t, &schema, "modsPetteds", "ModsPetteds", "#/definitions/modsSomePettedType") + + scantest.AssertRef(t, &schema, "manyModsNamed", "ManyModsNamed", "#/definitions/modsSomeStringsType") + scantest.AssertRef(t, &schema, "manyModsNumbered", "ManyModsNumbered", "#/definitions/modsSomeIntsType") + scantest.AssertArrayProperty(t, &schema, "string", "manyModsDated", "date-time", "ManyModsDated") + scantest.AssertRef(t, &schema, "manyModsTimed", "ManyModsTimed", "#/definitions/modsSomeTimedsType") + scantest.AssertRef(t, &schema, "manyModsPetted", "ManyModsPetted", "#/definitions/modsSomePettedsType") + scantest.AssertRef(t, &schema, "manyModsPettedPtr", "ManyModsPettedPtr", "#/definitions/modsSomePettedsPtrType") + + scantest.AssertProperty(t, &schema, "string", "namedAlias", "", "NamedAlias") + scantest.AssertProperty(t, &schema, "integer", "numberedAlias", "int64", "NumberedAlias") + scantest.AssertArrayProperty(t, &schema, "string", "namedsAlias", "", "NamedsAlias") + scantest.AssertArrayProperty(t, &schema, "integer", "numberedsAlias", "int64", "NumberedsAlias") +} + +func TestAliasedModels(t *testing.T) { + ctx := scantest.LoadClassificationPkgsCtx(t) + + names := []string{ + "SomeStringType", + "SomeIntType", + "SomeTimeType", + "SomeTimedType", + "SomePettedType", + "SomethingType", + "SomeStringsType", + "SomeIntsType", + "SomeTimesType", + "SomeTimedsType", + "SomePettedsType", + "SomethingsType", + "SomeObject", + "SomeStringMap", + "SomeIntMap", + "SomeTimeMap", + "SomeTimedMap", + "SomePettedMap", + "SomeSomethingMap", + } + + defs := make(map[string]oaispec.Schema) + for _, nm := range names { + decl := getClassificationModel(ctx, nm) + require.NotNil(t, decl) + + prs := &Builder{ + decl: decl, + ctx: ctx, + } + require.NoError(t, prs.Build(defs)) + } + + for k := range defs { + for i, b := range names { + if b == k { + // remove the entry from the collection + names = append(names[:i], names[i+1:]...) + } + } + } + if assert.Empty(t, names) { + // single value types + assertDefinition(t, defs, "SomeStringType", "string", "") + assertDefinition(t, defs, "SomeIntType", "integer", "int64") + assertDefinition(t, defs, "SomeTimeType", "string", "date-time") + assertDefinition(t, defs, "SomeTimedType", "string", "date-time") + assertRefDefinition(t, defs, "SomePettedType", "#/definitions/pet", "") + assertRefDefinition(t, defs, "SomethingType", "#/definitions/Something", "") + + // slice types + assertArrayDefinition(t, defs, "SomeStringsType", "string", "", "") + assertArrayDefinition(t, defs, "SomeIntsType", "integer", "int64", "") + assertArrayDefinition(t, defs, "SomeTimesType", "string", "date-time", "") + assertArrayDefinition(t, defs, "SomeTimedsType", "string", "date-time", "") + assertArrayWithRefDefinition(t, defs, "SomePettedsType", "#/definitions/pet", "") + assertArrayWithRefDefinition(t, defs, "SomethingsType", "#/definitions/Something", "") + + // map types + assertMapDefinition(t, defs, "SomeObject", "object", "", "") + assertMapDefinition(t, defs, "SomeStringMap", "string", "", "") + assertMapDefinition(t, defs, "SomeIntMap", "integer", "int64", "") + assertMapDefinition(t, defs, "SomeTimeMap", "string", "date-time", "") + assertMapDefinition(t, defs, "SomeTimedMap", "string", "date-time", "") + assertMapWithRefDefinition(t, defs, "SomePettedMap", "#/definitions/pet", "") + assertMapWithRefDefinition(t, defs, "SomeSomethingMap", "#/definitions/Something", "") + } +} + +func TestAliasedTopLevelModels(t *testing.T) { + t.Run("with options: no scan models, with aliases as ref", func(t *testing.T) { + t.Run("with goparsing/spec", func(t *testing.T) { + ctx, err := scanner.NewScanCtx(&scanner.Options{ + Packages: []string{ + "./goparsing/spec", + }, + WorkDir: scantest.FixturesDir(), + ScanModels: false, + RefAliases: true, + }) + require.NoError(t, err) + + t.Run("should find User definition in source", func(t *testing.T) { + _, hasUser := ctx.FindDecl(fixturesModule+"/goparsing/spec", "User") + require.TrueT(t, hasUser) + }) + + var decl *scanner.EntityDecl + t.Run("should find Customer definition in source", func(t *testing.T) { + var hasCustomer bool + decl, hasCustomer = ctx.FindDecl(fixturesModule+"/goparsing/spec", "Customer") + require.TrueT(t, hasCustomer) + }) + + t.Run("with schema builder", func(t *testing.T) { + require.NotNil(t, decl) + builder := &Builder{ + ctx: ctx, + decl: decl, + } + + t.Run("should build model for Customer", func(t *testing.T) { + models := make(map[string]oaispec.Schema) + require.NoError(t, builder.Build(models)) + + assertRefDefinition(t, models, "Customer", "#/definitions/User", "") + }) + + t.Run("should have discovered models for User and Customer", func(t *testing.T) { + require.Len(t, builder.postDecls, 2) + foundUserIndex := -1 + foundCustomerIndex := -1 + + for i, discoveredDecl := range builder.postDecls { + switch discoveredDecl.Obj().Name() { + case "User": + foundUserIndex = i + case "Customer": + foundCustomerIndex = i + } + } + require.GreaterOrEqualT(t, foundUserIndex, 0) + require.GreaterOrEqualT(t, foundCustomerIndex, 0) + + userBuilder := &Builder{ + ctx: ctx, + decl: builder.postDecls[foundUserIndex], + } + + t.Run("should build model for User", func(t *testing.T) { + models := make(map[string]oaispec.Schema) + require.NoError(t, userBuilder.Build(models)) + + require.MapContainsT(t, models, "User") + + user := models["User"] + assert.TrueT(t, user.Type.Contains("object")) + + userProperties := user.Properties + require.MapContainsT(t, userProperties, "name") + }) + }) + }) + }) + }) + + t.Run("with options: no scan models, without aliases as ref", func(t *testing.T) { + t.Run("with goparsing/spec", func(t *testing.T) { + ctx, err := scanner.NewScanCtx(&scanner.Options{ + Packages: []string{ + "./goparsing/spec", + }, + WorkDir: scantest.FixturesDir(), + ScanModels: false, + RefAliases: false, + }) + require.NoError(t, err) + + t.Run("should find User definition in source", func(t *testing.T) { + _, hasUser := ctx.FindDecl(fixturesModule+"/goparsing/spec", "User") + require.TrueT(t, hasUser) + }) + + var decl *scanner.EntityDecl + t.Run("should find Customer definition in source", func(t *testing.T) { + var hasCustomer bool + decl, hasCustomer = ctx.FindDecl(fixturesModule+"/goparsing/spec", "Customer") + require.TrueT(t, hasCustomer) + }) + + t.Run("with schema builder", func(t *testing.T) { + require.NotNil(t, decl) + builder := &Builder{ + ctx: ctx, + decl: decl, + } + + t.Run("should build model for Customer", func(t *testing.T) { + models := make(map[string]oaispec.Schema) + require.NoError(t, builder.Build(models)) + + require.MapContainsT(t, models, "Customer") + customer := models["Customer"] + require.MapNotContainsT(t, models, "User") + + assert.TrueT(t, customer.Type.Contains("object")) + + customerProperties := customer.Properties + assert.MapContainsT(t, customerProperties, "name") + assert.NotEmpty(t, customer.Title) + }) + + t.Run("should have discovered only Customer", func(t *testing.T) { + require.Len(t, builder.postDecls, 1) + discovered := builder.postDecls[0] + assert.EqualT(t, "Customer", discovered.Obj().Name()) + }) + }) + }) + }) +} + +func TestEmbeddedAllOf(t *testing.T) { + ctx := scantest.LoadClassificationPkgsCtx(t) + decl := getClassificationModel(ctx, "AllOfModel") + require.NotNil(t, decl) + prs := &Builder{ + ctx: ctx, + decl: decl, + } + models := make(map[string]oaispec.Schema) + require.NoError(t, prs.Build(models)) + schema := models["AllOfModel"] + + require.Len(t, schema.AllOf, 3) + asch := schema.AllOf[0] + scantest.AssertProperty(t, &asch, "integer", "age", "int32", "Age") + scantest.AssertProperty(t, &asch, "integer", "id", "int64", "ID") + scantest.AssertProperty(t, &asch, "string", "name", "", "Name") + + asch = schema.AllOf[1] + assert.EqualT(t, "#/definitions/withNotes", asch.Ref.String()) + + asch = schema.AllOf[2] + scantest.AssertProperty(t, &asch, "string", "createdAt", "date-time", "CreatedAt") + scantest.AssertProperty(t, &asch, "integer", "did", "int64", "DID") + scantest.AssertProperty(t, &asch, "string", "cat", "", "Cat") + + scantest.CompareOrDumpJSON(t, models, "classification_schema_AllOfModel.json") +} + +func TestPointersAreNullableByDefaultWhenSetXNullableForPointersIsSet(t *testing.T) { + allModels := make(map[string]oaispec.Schema) + assertModel := func(ctx *scanner.ScanCtx, packagePath, modelName string) { + decl, _ := ctx.FindDecl(packagePath, modelName) + require.NotNil(t, decl) + prs := &Builder{ + ctx: ctx, + decl: decl, + } + require.NoError(t, prs.Build(allModels)) + + schema := allModels[modelName] + require.Len(t, schema.Properties, 5) + + require.MapContainsT(t, schema.Properties, "Value1") + assert.Equal(t, true, schema.Properties["Value1"].Extensions["x-nullable"]) + require.MapContainsT(t, schema.Properties, "Value2") + assert.MapNotContainsT(t, schema.Properties["Value2"].Extensions, "x-nullable") + require.MapContainsT(t, schema.Properties, "Value3") + assert.Equal(t, false, schema.Properties["Value3"].Extensions["x-nullable"]) + require.MapContainsT(t, schema.Properties, "Value4") + assert.MapNotContainsT(t, schema.Properties["Value4"].Extensions, "x-nullable") + assert.Equal(t, false, schema.Properties["Value4"].Extensions["x-isnullable"]) + require.MapContainsT(t, schema.Properties, "Value5") + assert.MapNotContainsT(t, schema.Properties["Value5"].Extensions, "x-nullable") + } + + packagePattern := "./enhancements/pointers-nullable-by-default" + packagePath := fixturesModule + "/enhancements/pointers-nullable-by-default" + ctx, err := scanner.NewScanCtx(&scanner.Options{Packages: []string{packagePattern}, WorkDir: scantest.FixturesDir(), SetXNullableForPointers: true}) + require.NoError(t, err) + + assertModel(ctx, packagePath, "Item") + assertModel(ctx, packagePath, "ItemInterface") + + scantest.CompareOrDumpJSON(t, allModels, "enhancements_pointers_xnullable.json") +} + +func TestPointersAreNotNullableByDefaultWhenSetXNullableForPointersIsNotSet(t *testing.T) { + allModels := make(map[string]oaispec.Schema) + assertModel := func(ctx *scanner.ScanCtx, packagePath, modelName string) { + decl, _ := ctx.FindDecl(packagePath, modelName) + require.NotNil(t, decl) + prs := &Builder{ + ctx: ctx, + decl: decl, + } + require.NoError(t, prs.Build(allModels)) + + schema := allModels[modelName] + require.Len(t, schema.Properties, 5) + + require.MapContainsT(t, schema.Properties, "Value1") + assert.MapNotContainsT(t, schema.Properties["Value1"].Extensions, "x-nullable") + require.MapContainsT(t, schema.Properties, "Value2") + assert.MapNotContainsT(t, schema.Properties["Value2"].Extensions, "x-nullable") + require.MapContainsT(t, schema.Properties, "Value3") + assert.Equal(t, false, schema.Properties["Value3"].Extensions["x-nullable"]) + require.MapContainsT(t, schema.Properties, "Value4") + assert.MapNotContainsT(t, schema.Properties["Value4"].Extensions, "x-nullable") + assert.Equal(t, false, schema.Properties["Value4"].Extensions["x-isnullable"]) + require.MapContainsT(t, schema.Properties, "Value5") + assert.MapNotContainsT(t, schema.Properties["Value5"].Extensions, "x-nullable") + } + + packagePattern := "./enhancements/pointers-nullable-by-default" + packagePath := fixturesModule + "/enhancements/pointers-nullable-by-default" + ctx, err := scanner.NewScanCtx(&scanner.Options{Packages: []string{packagePattern}, WorkDir: scantest.FixturesDir()}) + require.NoError(t, err) + + assertModel(ctx, packagePath, "Item") + assertModel(ctx, packagePath, "ItemInterface") + + scantest.CompareOrDumpJSON(t, allModels, "enhancements_pointers_no_xnullable.json") +} + +func TestSwaggerTypeNamed(t *testing.T) { + ctx := scantest.LoadClassificationPkgsCtx(t) + decl := getClassificationModel(ctx, "NamedWithType") + require.NotNil(t, decl) + prs := &Builder{ + ctx: ctx, + decl: decl, + } + models := make(map[string]oaispec.Schema) + require.NoError(t, prs.Build(models)) + schema := models["namedWithType"] + + scantest.AssertProperty(t, &schema, "object", "some_map", "", "SomeMap") + + scantest.CompareOrDumpJSON(t, models, "classification_schema_NamedWithType.json") +} + +func TestSwaggerTypeNamedWithGenerics(t *testing.T) { + tests := map[string]func(t *testing.T, models map[string]oaispec.Schema){ + "NamedStringResults": func(t *testing.T, models map[string]oaispec.Schema) { + schema := models["namedStringResults"] + scantest.AssertArrayProperty(t, &schema, "string", "matches", "", "Matches") + }, + "NamedStoreOrderResults": func(t *testing.T, models map[string]oaispec.Schema) { + schema := models["namedStoreOrderResults"] + assertArrayRef(t, &schema, "matches", "Matches", "#/definitions/order") + }, + "NamedStringSlice": func(t *testing.T, models map[string]oaispec.Schema) { + assertArrayDefinition(t, models, "namedStringSlice", "string", "", "NamedStringSlice") + }, + "NamedStoreOrderSlice": func(t *testing.T, models map[string]oaispec.Schema) { + assertArrayWithRefDefinition(t, models, "namedStoreOrderSlice", "#/definitions/order", "NamedStoreOrderSlice") + }, + "NamedStringMap": func(t *testing.T, models map[string]oaispec.Schema) { + assertMapDefinition(t, models, "namedStringMap", "string", "", "NamedStringMap") + }, + "NamedStoreOrderMap": func(t *testing.T, models map[string]oaispec.Schema) { + assertMapWithRefDefinition(t, models, "namedStoreOrderMap", "#/definitions/order", "NamedStoreOrderMap") + }, + "NamedMapOfStoreOrderSlices": func(t *testing.T, models map[string]oaispec.Schema) { + assertMapDefinition(t, models, "namedMapOfStoreOrderSlices", "array", "", "NamedMapOfStoreOrderSlices") + arraySchema := models["namedMapOfStoreOrderSlices"].AdditionalProperties.Schema + assertArrayWithRefDefinition(t, map[string]oaispec.Schema{ + "array": *arraySchema, + }, "array", "#/definitions/order", "") + }, + } + + for testName, testFunc := range tests { + t.Run(testName, func(t *testing.T) { + ctx := scantest.LoadClassificationPkgsCtx(t) + decl := getClassificationModel(ctx, testName) + require.NotNil(t, decl) + prs := &Builder{ + ctx: ctx, + decl: decl, + } + models := make(map[string]oaispec.Schema) + require.NoError(t, prs.Build(models)) + testFunc(t, models) + }) + } +} + +func TestSwaggerTypeStruct(t *testing.T) { + ctx := scantest.LoadClassificationPkgsCtx(t) + decl := getClassificationModel(ctx, "NullString") + require.NotNil(t, decl) + prs := &Builder{ + ctx: ctx, + decl: decl, + } + models := make(map[string]oaispec.Schema) + require.NoError(t, prs.Build(models)) + schema := models["NullString"] + + assert.TrueT(t, schema.Type.Contains("string")) + + scantest.CompareOrDumpJSON(t, models, "classification_schema_NullString.json") +} + +func TestStructDiscriminators(t *testing.T) { + ctx := scantest.LoadClassificationPkgsCtx(t) + + models := make(map[string]oaispec.Schema) + for _, tn := range []string{"BaseStruct", "Giraffe", "Gazelle"} { + decl := getClassificationModel(ctx, tn) + require.NotNil(t, decl) + prs := &Builder{ + ctx: ctx, + decl: decl, + } + require.NoError(t, prs.Build(models)) + } + + schema := models["animal"] + + assert.Equal(t, "BaseStruct", schema.Extensions["x-go-name"]) + assert.EqualT(t, "jsonClass", schema.Discriminator) + + sch := models["gazelle"] + assert.Len(t, sch.AllOf, 2) + cl, _ := sch.Extensions.GetString("x-class") + assert.EqualT(t, "a.b.c.d.E", cl) + cl, _ = sch.Extensions.GetString("x-go-name") + assert.EqualT(t, "Gazelle", cl) + + sch = models["giraffe"] + assert.Len(t, sch.AllOf, 2) + cl, _ = sch.Extensions.GetString("x-class") + assert.Empty(t, cl) + cl, _ = sch.Extensions.GetString("x-go-name") + assert.EqualT(t, "Giraffe", cl) + + // sch = noModelDefs["lion"] + + // b, _ := json.MarshalIndent(sch, "", " ") + // fmt.Println(string(b)) + + scantest.CompareOrDumpJSON(t, models, "classification_schema_struct_discriminators.json") +} + +func TestInterfaceDiscriminators(t *testing.T) { + ctx := scantest.LoadClassificationPkgsCtx(t) + models := make(map[string]oaispec.Schema) + for _, tn := range []string{"BaseStruct", "Identifiable", "WaterType", "Fish", "TeslaCar", "ModelS", "ModelX", "ModelA", "Cars"} { + decl := getClassificationModel(ctx, tn) + require.NotNil(t, decl) + + prs := &Builder{ + ctx: ctx, + decl: decl, + } + require.NoError(t, prs.Build(models)) + } + + schema, ok := models["fish"] + + if assert.TrueT(t, ok) && assert.Len(t, schema.AllOf, 5) { + sch := schema.AllOf[3] + assert.Len(t, sch.Properties, 1) + scantest.AssertProperty(t, &sch, "string", "colorName", "", "ColorName") + + sch = schema.AllOf[2] + assert.EqualT(t, "#/definitions/extra", sch.Ref.String()) + + sch = schema.AllOf[0] + assert.Len(t, sch.Properties, 1) + scantest.AssertProperty(t, &sch, "integer", "id", "int64", "ID") + + sch = schema.AllOf[1] + assert.EqualT(t, "#/definitions/water", sch.Ref.String()) + + sch = schema.AllOf[4] + assert.Len(t, sch.Properties, 2) + scantest.AssertProperty(t, &sch, "string", "name", "", "Name") + scantest.AssertProperty(t, &sch, "string", "jsonClass", "", "StructType") + assert.EqualT(t, "jsonClass", sch.Discriminator) + } + + schema, ok = models["modelS"] + if assert.TrueT(t, ok) { + assert.Len(t, schema.AllOf, 2) + cl, _ := schema.Extensions.GetString("x-class") + assert.EqualT(t, "com.tesla.models.ModelS", cl) + cl, _ = schema.Extensions.GetString("x-go-name") + assert.EqualT(t, "ModelS", cl) + + sch := schema.AllOf[0] + assert.EqualT(t, "#/definitions/TeslaCar", sch.Ref.String()) + sch = schema.AllOf[1] + assert.Len(t, sch.Properties, 1) + scantest.AssertProperty(t, &sch, "string", "edition", "", "Edition") + } + + schema, ok = models["modelA"] + if assert.TrueT(t, ok) { + cl, _ := schema.Extensions.GetString("x-go-name") + assert.EqualT(t, "ModelA", cl) + + sch, ok := schema.Properties["Tesla"] + if assert.TrueT(t, ok) { + assert.EqualT(t, "#/definitions/TeslaCar", sch.Ref.String()) + } + + scantest.AssertProperty(t, &schema, "integer", "doors", "int64", "Doors") + } + + scantest.CompareOrDumpJSON(t, models, "classification_schema_interface_discriminators.json") +} + +func getClassificationModel(ctx *scanner.ScanCtx, nm string) *scanner.EntityDecl { + decl, ok := ctx.FindDecl(fixturesModule+"/goparsing/classification/models", nm) + if !ok { + return nil + } + return decl +} + +func assertArrayRef(t *testing.T, schema *oaispec.Schema, jsonName, goName, fragment string) { + t.Helper() + + scantest.AssertArrayProperty(t, schema, "", jsonName, "", goName) + psch := schema.Properties[jsonName].Items.Schema + assert.EqualT(t, fragment, psch.Ref.String()) +} + +func assertDefinition(t *testing.T, defs map[string]oaispec.Schema, defName, typeName, formatName string) { + t.Helper() + + schema, ok := defs[defName] + if assert.TrueT(t, ok) { + if assert.NotEmpty(t, schema.Type) { + assert.EqualT(t, typeName, schema.Type[0]) + assert.Nil(t, schema.Extensions["x-go-name"]) + assert.EqualT(t, formatName, schema.Format) + } + } +} + +func assertMapDefinition(t *testing.T, defs map[string]oaispec.Schema, defName, typeName, formatName, goName string) { + t.Helper() + + schema, ok := defs[defName] + require.TrueT(t, ok) + require.NotEmpty(t, schema.Type) + + assert.EqualT(t, "object", schema.Type[0]) + adl := schema.AdditionalProperties + + require.NotNil(t, adl) + require.NotNil(t, adl.Schema) + + if len(adl.Schema.Type) > 0 { + assert.EqualT(t, typeName, adl.Schema.Type[0]) + } + assert.EqualT(t, formatName, adl.Schema.Format) + + assertExtension(t, schema, goName) +} + +func assertExtension(t *testing.T, schema oaispec.Schema, goName string) { + t.Helper() + + if goName != "" { + assert.Equal(t, goName, schema.Extensions["x-go-name"]) + + return + } + + assert.Nil(t, schema.Extensions["x-go-name"]) +} + +func assertMapWithRefDefinition(t *testing.T, defs map[string]oaispec.Schema, defName, refURL, goName string) { + t.Helper() + + schema, ok := defs[defName] + require.TrueT(t, ok) + require.NotEmpty(t, schema.Type) + assert.EqualT(t, "object", schema.Type[0]) + adl := schema.AdditionalProperties + require.NotNil(t, adl) + require.NotNil(t, adl.Schema) + require.NotZero(t, adl.Schema.Ref) + assert.EqualT(t, refURL, adl.Schema.Ref.String()) + assertExtension(t, schema, goName) +} + +func assertArrayDefinition(t *testing.T, defs map[string]oaispec.Schema, defName, typeName, formatName, goName string) { + t.Helper() + + schema, ok := defs[defName] + require.TrueT(t, ok) + require.NotEmpty(t, schema.Type) + assert.EqualT(t, "array", schema.Type[0]) + adl := schema.Items + require.NotNil(t, adl) + require.NotNil(t, adl.Schema) + assert.EqualT(t, typeName, adl.Schema.Type[0]) + assert.EqualT(t, formatName, adl.Schema.Format) + assertExtension(t, schema, goName) +} + +func assertArrayWithRefDefinition(t *testing.T, defs map[string]oaispec.Schema, defName, refURL, goName string) { + t.Helper() + + schema, ok := defs[defName] + require.TrueT(t, ok) + require.NotEmpty(t, schema.Type) + assert.EqualT(t, "array", schema.Type[0]) + adl := schema.Items + require.NotNil(t, adl) + require.NotNil(t, adl.Schema) + require.NotZero(t, adl.Schema.Ref) + assert.EqualT(t, refURL, adl.Schema.Ref.String()) + assertExtension(t, schema, goName) +} + +func assertRefDefinition(t *testing.T, defs map[string]oaispec.Schema, defName, refURL, goName string) { + schema, ok := defs[defName] + if assert.TrueT(t, ok) { + if assert.NotZero(t, schema.Ref) { + url := schema.Ref.String() + assert.EqualT(t, refURL, url) + if goName != "" { + assert.Equal(t, goName, schema.Extensions["x-go-name"]) + } else { + assert.Nil(t, schema.Extensions["x-go-name"]) + } + } + } +} + +func assertMapProperty(t *testing.T, schema *oaispec.Schema, typeName, jsonName, format, goName string) { + prop := schema.Properties[jsonName] + assert.NotEmpty(t, prop.Type) + assert.TrueT(t, prop.Type.Contains("object")) + assert.NotNil(t, prop.AdditionalProperties) + if typeName != "" { + assert.EqualT(t, typeName, prop.AdditionalProperties.Schema.Type[0]) + } + assert.Equal(t, goName, prop.Extensions["x-go-name"]) + assert.EqualT(t, format, prop.AdditionalProperties.Schema.Format) +} + +func assertMapRef(t *testing.T, schema *oaispec.Schema, jsonName, goName, fragment string) { + assertMapProperty(t, schema, "", jsonName, "", goName) + psch := schema.Properties[jsonName].AdditionalProperties.Schema + assert.EqualT(t, fragment, psch.Ref.String()) +} + +func TestEmbeddedDescriptionAndTags(t *testing.T) { + packagePattern := "./bugs/3125/minimal" + packagePath := fixturesModule + "/bugs/3125/minimal" + ctx, err := scanner.NewScanCtx(&scanner.Options{ + Packages: []string{packagePattern}, + WorkDir: scantest.FixturesDir(), + DescWithRef: true, + }) + require.NoError(t, err) + decl, _ := ctx.FindDecl(packagePath, "Item") + require.NotNil(t, decl) + prs := &Builder{ + ctx: ctx, + decl: decl, + } + models := make(map[string]oaispec.Schema) + require.NoError(t, prs.Build(models)) + schema := models["Item"] + + assert.Equal(t, []string{"value1", "value2"}, schema.Required) + require.Len(t, schema.Properties, 2) + + require.MapContainsT(t, schema.Properties, "value1") + assert.EqualT(t, "Nullable value", schema.Properties["value1"].Description) + assert.Equal(t, true, schema.Properties["value1"].Extensions["x-nullable"]) + + require.MapContainsT(t, schema.Properties, "value2") + assert.EqualT(t, "Non-nullable value", schema.Properties["value2"].Description) + assert.MapNotContainsT(t, schema.Properties["value2"].Extensions, "x-nullable") + assert.Equal(t, `{"value": 42}`, schema.Properties["value2"].Example) + + scantest.CompareOrDumpJSON(t, models, "bugs_3125_schema.json") +} + +func TestIssue2540(t *testing.T) { + t.Run("should produce example and default for top level declaration only", + testIssue2540(false, `{ + "Book": { + "description": "At this moment, a book is only described by its publishing date\nand author.", + "type": "object", + "title": "Book holds all relevant information about a book.", + "example": "{ \"Published\": 2026, \"Author\": \"Fred\" }", + "default": "{ \"Published\": 1900, \"Author\": \"Unknown\" }", + "properties": { + "Author": { + "$ref": "#/definitions/Author" + }, + "Published": { + "type": "integer", + "format": "int64", + "minimum": 0, + "example": 2021 + } + } + } + }`), + ) + t.Run("should produce example and default for top level declaration and embedded $ref field", + testIssue2540(true, `{ + "Book": { + "description": "At this moment, a book is only described by its publishing date\nand author.", + "type": "object", + "title": "Book holds all relevant information about a book.", + "example": "{ \"Published\": 2026, \"Author\": \"Fred\" }", + "default": "{ \"Published\": 1900, \"Author\": \"Unknown\" }", + "properties": { + "Author": { + "$ref": "#/definitions/Author", + "example": "{ \"Name\": \"Tolkien\" }" + }, + "Published": { + "type": "integer", + "format": "int64", + "minimum": 0, + "example": 2021 + } + } + } + }`), + ) +} + +func testIssue2540(descWithRef bool, expectedJSON string) func(*testing.T) { + return func(t *testing.T) { + packagePattern := "./bugs/2540/foo" + packagePath := fixturesModule + "/bugs/2540/foo" + ctx, err := scanner.NewScanCtx(&scanner.Options{ + Packages: []string{packagePattern}, + WorkDir: scantest.FixturesDir(), + DescWithRef: descWithRef, + SkipExtensions: true, + }) + require.NoError(t, err) + + decl, _ := ctx.FindDecl(packagePath, "Book") + require.NotNil(t, decl) + prs := &Builder{ + ctx: ctx, + decl: decl, + } + + models := make(map[string]oaispec.Schema) + require.NoError(t, prs.Build(models)) + + b, err := json.Marshal(models) + require.NoError(t, err) + assert.JSONEqT(t, expectedJSON, string(b)) + } +} diff --git a/internal/builders/schema/taggers.go b/internal/builders/schema/taggers.go new file mode 100644 index 0000000..3e1c33f --- /dev/null +++ b/internal/builders/schema/taggers.go @@ -0,0 +1,120 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package schema + +import ( + "fmt" + "go/ast" + "slices" + + "github.com/go-openapi/codescan/internal/parsers" + oaispec "github.com/go-openapi/spec" +) + +func schemaTaggers(schema, ps *oaispec.Schema, nm string) []parsers.TagParser { + schemeType, err := ps.Type.MarshalJSON() + if err != nil { + return nil + } + scheme := &oaispec.SimpleSchema{Type: string(schemeType)} + + return []parsers.TagParser{ + parsers.NewSingleLineTagParser("maximum", parsers.NewSetMaximum(schemaValidations{ps})), + parsers.NewSingleLineTagParser("minimum", parsers.NewSetMinimum(schemaValidations{ps})), + parsers.NewSingleLineTagParser("multipleOf", parsers.NewSetMultipleOf(schemaValidations{ps})), + parsers.NewSingleLineTagParser("minLength", parsers.NewSetMinLength(schemaValidations{ps})), + parsers.NewSingleLineTagParser("maxLength", parsers.NewSetMaxLength(schemaValidations{ps})), + parsers.NewSingleLineTagParser("pattern", parsers.NewSetPattern(schemaValidations{ps})), + parsers.NewSingleLineTagParser("minItems", parsers.NewSetMinItems(schemaValidations{ps})), + parsers.NewSingleLineTagParser("maxItems", parsers.NewSetMaxItems(schemaValidations{ps})), + parsers.NewSingleLineTagParser("unique", parsers.NewSetUnique(schemaValidations{ps})), + parsers.NewSingleLineTagParser("enum", parsers.NewSetEnum(schemaValidations{ps})), + parsers.NewSingleLineTagParser("default", parsers.NewSetDefault(scheme, schemaValidations{ps})), + parsers.NewSingleLineTagParser("type", parsers.NewSetDefault(scheme, schemaValidations{ps})), + parsers.NewSingleLineTagParser("example", parsers.NewSetExample(scheme, schemaValidations{ps})), + parsers.NewSingleLineTagParser("required", parsers.NewSetRequiredSchema(schema, nm)), + parsers.NewSingleLineTagParser("readOnly", parsers.NewSetReadOnlySchema(ps)), + parsers.NewSingleLineTagParser("discriminator", parsers.NewSetDiscriminator(schema, nm)), + parsers.NewMultiLineTagParser("YAMLExtensionsBlock", parsers.NewYAMLParser( + parsers.WithExtensionMatcher(), + parsers.WithSetter(schemaVendorExtensibleSetter(ps)), + ), true), + } +} + +func refSchemaTaggers(schema *oaispec.Schema, name string) []parsers.TagParser { + return []parsers.TagParser{ + parsers.NewSingleLineTagParser("required", parsers.NewSetRequiredSchema(schema, name)), + } +} + +func itemsTaggers(items *oaispec.Schema, level int) []parsers.TagParser { + schemeType, err := items.Type.MarshalJSON() + if err != nil { + return nil + } + + scheme := &oaispec.SimpleSchema{Type: string(schemeType)} + opts := []parsers.PrefixRxOption{parsers.WithItemsPrefixLevel(level)} + + return []parsers.TagParser{ + parsers.NewSingleLineTagParser(fmt.Sprintf("items%dMaximum", level), parsers.NewSetMaximum(schemaValidations{items}, opts...)), + parsers.NewSingleLineTagParser(fmt.Sprintf("items%dMinimum", level), parsers.NewSetMinimum(schemaValidations{items}, opts...)), + parsers.NewSingleLineTagParser(fmt.Sprintf("items%dMultipleOf", level), parsers.NewSetMultipleOf(schemaValidations{items}, opts...)), + parsers.NewSingleLineTagParser(fmt.Sprintf("items%dMinLength", level), parsers.NewSetMinLength(schemaValidations{items}, opts...)), + parsers.NewSingleLineTagParser(fmt.Sprintf("items%dMaxLength", level), parsers.NewSetMaxLength(schemaValidations{items}, opts...)), + parsers.NewSingleLineTagParser(fmt.Sprintf("items%dPattern", level), parsers.NewSetPattern(schemaValidations{items}, opts...)), + parsers.NewSingleLineTagParser(fmt.Sprintf("items%dMinItems", level), parsers.NewSetMinItems(schemaValidations{items}, opts...)), + parsers.NewSingleLineTagParser(fmt.Sprintf("items%dMaxItems", level), parsers.NewSetMaxItems(schemaValidations{items}, opts...)), + parsers.NewSingleLineTagParser(fmt.Sprintf("items%dUnique", level), parsers.NewSetUnique(schemaValidations{items}, opts...)), + parsers.NewSingleLineTagParser(fmt.Sprintf("items%dEnum", level), parsers.NewSetEnum(schemaValidations{items}, opts...)), + parsers.NewSingleLineTagParser(fmt.Sprintf("items%dDefault", level), parsers.NewSetDefault(scheme, schemaValidations{items}, opts...)), + parsers.NewSingleLineTagParser(fmt.Sprintf("items%dExample", level), parsers.NewSetExample(scheme, schemaValidations{items}, opts...)), + } +} + +func parseArrayTypes(taggers []parsers.TagParser, expr ast.Expr, items *oaispec.SchemaOrArray, level int) ([]parsers.TagParser, error) { + if items == nil || items.Schema == nil { + return taggers, nil + } + + switch iftpe := expr.(type) { + case *ast.ArrayType: + eleTaggers := itemsTaggers(items.Schema, level) + otherTaggers, err := parseArrayTypes(slices.Concat(eleTaggers, taggers), iftpe.Elt, items.Schema.Items, level+1) + if err != nil { + return nil, err + } + + return otherTaggers, nil + + case *ast.Ident: + var identTaggers []parsers.TagParser + if iftpe.Obj == nil { + identTaggers = itemsTaggers(items.Schema, level) + } + + otherTaggers, err := parseArrayTypes(taggers, expr, items.Schema.Items, level+1) + if err != nil { + return nil, err + } + + return slices.Concat(identTaggers, otherTaggers), nil + + case *ast.StarExpr: + return parseArrayTypes(taggers, iftpe.X, items, level) + + case *ast.SelectorExpr: + // qualified name (e.g. time.Time): terminal leaf, register items-level validations. + return slices.Concat(itemsTaggers(items.Schema, level), taggers), nil + + case *ast.StructType, *ast.InterfaceType, *ast.MapType: + // anonymous struct / interface / map element: no further items-level + // validations apply; the element type itself carries its schema. + return taggers, nil + + default: + return nil, fmt.Errorf("unknown field type element: %w", ErrSchema) + } +} diff --git a/internal/builders/schema/typable.go b/internal/builders/schema/typable.go new file mode 100644 index 0000000..ab49562 --- /dev/null +++ b/internal/builders/schema/typable.go @@ -0,0 +1,132 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package schema + +import ( + "github.com/go-openapi/codescan/internal/builders/resolvers" + "github.com/go-openapi/codescan/internal/ifaces" + "github.com/go-openapi/codescan/internal/parsers" + oaispec "github.com/go-openapi/spec" +) + +var _ ifaces.ValidationBuilder = &schemaValidations{} + +type Typable struct { + schema *oaispec.Schema + level int + skipExt bool +} + +func NewTypable(schema *oaispec.Schema, level int, skipExt bool) Typable { + return Typable{ + schema: schema, + level: level, + skipExt: skipExt, + } +} + +func (st Typable) In() string { return "body" } + +func (st Typable) Typed(tpe, format string) { + st.schema.Typed(tpe, format) +} + +func (st Typable) SetRef(ref oaispec.Ref) { // TODO(fred/claude): isn't it a bug? Setter on non-pointer receiver? + st.schema.Ref = ref +} + +func (st Typable) Schema() *oaispec.Schema { + return st.schema +} + +//nolint:ireturn // polymorphic by design +func (st Typable) Items() ifaces.SwaggerTypable { + if st.schema.Items == nil { + st.schema.Items = new(oaispec.SchemaOrArray) // TODO(fred/claude): isn't it a bug? Setter on non-pointer receiver? + } + if st.schema.Items.Schema == nil { + st.schema.Items.Schema = new(oaispec.Schema) // TODO(fred/claude): isn't it a bug? Setter on non-pointer receiver? + } + + st.schema.Typed("array", "") + return Typable{st.schema.Items.Schema, st.level + 1, st.skipExt} +} + +func (st Typable) AdditionalProperties() ifaces.SwaggerTypable { //nolint:ireturn // polymorphic by design + if st.schema.AdditionalProperties == nil { + st.schema.AdditionalProperties = new(oaispec.SchemaOrBool) + } + if st.schema.AdditionalProperties.Schema == nil { + st.schema.AdditionalProperties.Schema = new(oaispec.Schema) + } + + st.schema.Typed("object", "") + return Typable{st.schema.AdditionalProperties.Schema, st.level + 1, st.skipExt} +} + +func (st Typable) Level() int { return st.level } + +func (st Typable) AddExtension(key string, value any) { + resolvers.AddExtension(&st.schema.VendorExtensible, key, value, st.skipExt) +} + +func (st Typable) WithEnum(values ...any) { + st.schema.WithEnum(values...) +} + +func (st Typable) WithEnumDescription(desc string) { + if desc == "" { + return + } + st.AddExtension(parsers.EnumDescExtension(), desc) +} + +func BodyTypable(in string, schema *oaispec.Schema, skipExt bool) (ifaces.SwaggerTypable, *oaispec.Schema) { //nolint:ireturn // polymorphic by design + if in == "body" { + // get the schema for items on the schema property + if schema == nil { + schema = new(oaispec.Schema) + } + if schema.Items == nil { + schema.Items = new(oaispec.SchemaOrArray) + } + if schema.Items.Schema == nil { + schema.Items.Schema = new(oaispec.Schema) + } + schema.Typed("array", "") + return Typable{schema.Items.Schema, 1, skipExt}, schema + } + + return nil, nil +} + +type schemaValidations struct { + current *oaispec.Schema +} + +func (sv schemaValidations) SetMaximum(val float64, exclusive bool) { + sv.current.Maximum = &val + sv.current.ExclusiveMaximum = exclusive +} + +func (sv schemaValidations) SetMinimum(val float64, exclusive bool) { + sv.current.Minimum = &val + sv.current.ExclusiveMinimum = exclusive +} +func (sv schemaValidations) SetMultipleOf(val float64) { sv.current.MultipleOf = &val } +func (sv schemaValidations) SetMinItems(val int64) { sv.current.MinItems = &val } +func (sv schemaValidations) SetMaxItems(val int64) { sv.current.MaxItems = &val } +func (sv schemaValidations) SetMinLength(val int64) { sv.current.MinLength = &val } +func (sv schemaValidations) SetMaxLength(val int64) { sv.current.MaxLength = &val } +func (sv schemaValidations) SetPattern(val string) { sv.current.Pattern = val } +func (sv schemaValidations) SetUnique(val bool) { sv.current.UniqueItems = val } +func (sv schemaValidations) SetDefault(val any) { sv.current.Default = val } +func (sv schemaValidations) SetExample(val any) { sv.current.Example = val } +func (sv schemaValidations) SetEnum(val string) { + var typ string + if len(sv.current.Type) > 0 { + typ = sv.current.Type[0] + } + sv.current.Enum = parsers.ParseEnum(val, &oaispec.SimpleSchema{Format: sv.current.Format, Type: typ}) +} diff --git a/spec.go b/internal/builders/spec/spec.go similarity index 54% rename from spec.go rename to internal/builders/spec/spec.go index 5dc9ea1..1e49ee8 100644 --- a/spec.go +++ b/internal/builders/spec/spec.go @@ -1,34 +1,51 @@ // SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers // SPDX-License-Identifier: Apache-2.0 -package codescan +package spec import ( "go/ast" - "github.com/go-openapi/spec" + "github.com/go-openapi/codescan/internal/builders/operations" + "github.com/go-openapi/codescan/internal/builders/parameters" + "github.com/go-openapi/codescan/internal/builders/responses" + "github.com/go-openapi/codescan/internal/builders/routes" + "github.com/go-openapi/codescan/internal/builders/schema" + "github.com/go-openapi/codescan/internal/parsers" + "github.com/go-openapi/codescan/internal/scanner" + oaispec "github.com/go-openapi/spec" ) -func newSpecBuilder(input *spec.Swagger, sc *scanCtx, scanModels bool) *specBuilder { +type Builder struct { + scanModels bool + input *oaispec.Swagger + ctx *scanner.ScanCtx + discovered []*scanner.EntityDecl + definitions map[string]oaispec.Schema + responses map[string]oaispec.Response + operations map[string]*oaispec.Operation +} + +func NewBuilder(input *oaispec.Swagger, sc *scanner.ScanCtx, scanModels bool) *Builder { if input == nil { - input = new(spec.Swagger) + input = new(oaispec.Swagger) input.Swagger = "2.0" } if input.Paths == nil { - input.Paths = new(spec.Paths) + input.Paths = new(oaispec.Paths) } if input.Definitions == nil { - input.Definitions = make(map[string]spec.Schema) + input.Definitions = make(map[string]oaispec.Schema) } if input.Responses == nil { - input.Responses = make(map[string]spec.Response) + input.Responses = make(map[string]oaispec.Response) } if input.Extensions == nil { - input.Extensions = make(spec.Extensions) + input.Extensions = make(oaispec.Extensions) } - return &specBuilder{ + return &Builder{ ctx: sc, input: input, scanModels: scanModels, @@ -38,17 +55,7 @@ func newSpecBuilder(input *spec.Swagger, sc *scanCtx, scanModels bool) *specBuil } } -type specBuilder struct { - scanModels bool - input *spec.Swagger - ctx *scanCtx - discovered []*entityDecl - definitions map[string]spec.Schema - responses map[string]spec.Response - operations map[string]*spec.Operation -} - -func (s *specBuilder) Build() (*spec.Swagger, error) { +func (s *Builder) Build() (*oaispec.Swagger, error) { // this initial scan step is skipped if !scanModels. // Discovered dependencies should however be resolved. if err := s.buildModels(); err != nil { @@ -87,11 +94,11 @@ func (s *specBuilder) Build() (*spec.Swagger, error) { return s.input, nil } -func (s *specBuilder) buildDiscovered() error { +func (s *Builder) buildDiscovered() error { // loop over discovered until all the items are in definitions keepGoing := len(s.discovered) > 0 for keepGoing { - var queue []*entityDecl + var queue []*scanner.EntityDecl for _, d := range s.discovered { nm, _ := d.Names() if _, ok := s.definitions[nm]; !ok { @@ -110,53 +117,52 @@ func (s *specBuilder) buildDiscovered() error { return nil } -func (s *specBuilder) buildDiscoveredSchema(decl *entityDecl) error { - sb := &schemaBuilder{ - ctx: s.ctx, - decl: decl, - discovered: s.discovered, - } +func (s *Builder) buildDiscoveredSchema(decl *scanner.EntityDecl) error { + sb := schema.NewBuilder(s.ctx, decl) + sb.SetDiscovered(s.discovered) if err := sb.Build(s.definitions); err != nil { return err } - s.discovered = append(s.discovered, sb.postDecls...) + + s.discovered = append(s.discovered, sb.PostDeclarations()...) + return nil } -func (s *specBuilder) buildMeta() error { +func (s *Builder) buildMeta() error { // build swagger object - for _, decl := range s.ctx.app.Meta { - if err := newMetaParser(s.input).Parse(decl.Comments); err != nil { + for decl := range s.ctx.Meta() { + if err := parsers.NewMetaParser(s.input).Parse(decl.Comments); err != nil { return err } } + return nil } -func (s *specBuilder) buildOperations() error { - for _, pp := range s.ctx.app.Operations { - ob := &operationsBuilder{ - operations: s.operations, - ctx: s.ctx, - path: pp, - } +func (s *Builder) buildOperations() error { + for pp := range s.ctx.Operations() { + ob := operations.NewBuilder(s.ctx, pp, s.operations) if err := ob.Build(s.input.Paths); err != nil { return err } } + return nil } -func (s *specBuilder) buildRoutes() error { +func (s *Builder) buildRoutes() error { // build paths dictionary - for _, pp := range s.ctx.app.Routes { - rb := &routesBuilder{ - ctx: s.ctx, - route: pp, - responses: s.responses, - operations: s.operations, - definitions: s.definitions, - } + for pp := range s.ctx.Routes() { + rb := routes.NewBuilder( + s.ctx, + pp, + routes.Inputs{ + Responses: s.responses, + Operations: s.operations, + Definitions: s.definitions, + }, + ) if err := rb.Build(s.input.Paths); err != nil { return err } @@ -165,43 +171,39 @@ func (s *specBuilder) buildRoutes() error { return nil } -func (s *specBuilder) buildResponses() error { +func (s *Builder) buildResponses() error { // build responses dictionary - for _, decl := range s.ctx.app.Responses { - rb := &responseBuilder{ - ctx: s.ctx, - decl: decl, - } + for decl := range s.ctx.Responses() { + rb := responses.NewBuilder(s.ctx, decl) if err := rb.Build(s.responses); err != nil { return err } - s.discovered = append(s.discovered, rb.postDecls...) + s.discovered = append(s.discovered, rb.PostDeclarations()...) } + return nil } -func (s *specBuilder) buildParameters() error { +func (s *Builder) buildParameters() error { // build parameters dictionary - for _, decl := range s.ctx.app.Parameters { - pb := ¶meterBuilder{ - ctx: s.ctx, - decl: decl, - } + for decl := range s.ctx.Parameters() { + pb := parameters.NewBuilder(s.ctx, decl) if err := pb.Build(s.operations); err != nil { return err } - s.discovered = append(s.discovered, pb.postDecls...) + s.discovered = append(s.discovered, pb.PostDeclarations()...) } + return nil } -func (s *specBuilder) buildModels() error { +func (s *Builder) buildModels() error { // build models dictionary if !s.scanModels { return nil } - for _, decl := range s.ctx.app.Models { + for _, decl := range s.ctx.Models() { if err := s.buildDiscoveredSchema(decl); err != nil { return err } @@ -210,12 +212,16 @@ func (s *specBuilder) buildModels() error { return s.joinExtraModels() } -func (s *specBuilder) joinExtraModels() error { - tmp := make(map[*ast.Ident]*entityDecl, len(s.ctx.app.ExtraModels)) - for k, v := range s.ctx.app.ExtraModels { +func (s *Builder) joinExtraModels() error { + l := s.ctx.NumExtraModels() + if l == 0 { + return nil + } + + tmp := make(map[*ast.Ident]*scanner.EntityDecl, l) + for k, v := range s.ctx.ExtraModels() { tmp[k] = v - s.ctx.app.Models[k] = v - delete(s.ctx.app.ExtraModels, k) + s.ctx.MoveExtraToModel(k) } // process extra models and see if there is any reference to a new extra one @@ -225,15 +231,15 @@ func (s *specBuilder) joinExtraModels() error { } } - if len(s.ctx.app.ExtraModels) > 0 { + if s.ctx.NumExtraModels() > 0 { return s.joinExtraModels() } return nil } -func collectOperationsFromInput(input *spec.Swagger) map[string]*spec.Operation { - operations := make(map[string]*spec.Operation) +func collectOperationsFromInput(input *oaispec.Swagger) map[string]*oaispec.Operation { + operations := make(map[string]*oaispec.Operation) if input == nil || input.Paths == nil { return operations } @@ -261,5 +267,6 @@ func collectOperationsFromInput(input *spec.Swagger) map[string]*spec.Operation operations[pth.Options.ID] = pth.Options } } + return operations } diff --git a/internal/ifaces/ifaces.go b/internal/ifaces/ifaces.go new file mode 100644 index 0000000..049feca --- /dev/null +++ b/internal/ifaces/ifaces.go @@ -0,0 +1,105 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +// Package ifaces defines the internal interfaces that decouple the +// comment-parsing pipeline from the concrete Swagger spec builders. +// +// These interfaces allow the parsers (in internal/parsers) to remain +// independent of the specific spec objects they write into (Schema, +// Parameter, Header, Items), and let the builders (in internal/builders) +// provide type-specific implementations. +package ifaces + +import ( + "go/types" + + "github.com/go-openapi/spec" +) + +// SwaggerTypable is a write target for Swagger type assignments. +// +// When the scanner resolves a Go type to its Swagger representation +// (e.g. int64 -> "integer"/"int64", or a named struct -> a $ref), +// it writes the result through this interface. The four production +// implementations adapt SwaggerTypable to the Swagger spec object +// shapes: [spec.Schema], [spec.Parameter], [spec.Response]/[spec.Header], +// and [spec.Items]. +// +// Items returns a nested SwaggerTypable for the element type of arrays, +// enabling recursive descent into multi-level array types. In reports the +// parameter location (body, query, path, header, formData) so the pipeline +// can branch: body parameters use schemas, while others use simple types. +// Level reports the current nesting depth for array items. +type SwaggerTypable interface { + Typed(swaggerType string, format string) + SetRef(ref spec.Ref) + Items() SwaggerTypable + Schema() *spec.Schema + Level() int + AddExtension(key string, value any) + WithEnum(values ...any) + WithEnumDescription(desc string) + In() string +} + +// ValidationBuilder is a write target for Swagger validation constraints. +// +// When the comment parser encounters a validation directive such as +// "Maximum: 100" or "Pattern: ^[a-z]+$", it extracts the value and +// calls the corresponding setter on this interface. This decouples +// the parsing logic (in internal/parsers) from the spec-object +// structure: each builder implementation (for Schema, Parameter, +// Header, or Items) knows how to write the constraint onto its +// specific spec type. +type ValidationBuilder interface { //nolint:interfacebloat // mirrors the full set of Swagger validation properties + SetMaximum(maxium float64, isExclusive bool) + SetMinimum(minimum float64, isExclusive bool) + SetMultipleOf(multiple float64) + + SetMinItems(minItems int64) + SetMaxItems(maxItems int64) + + SetMinLength(minLength int64) + SetMaxLength(maxLength int64) + SetPattern(pattern string) + + SetUnique(isUniqueItems bool) + SetEnum(enumValue string) + SetDefault(defaultValue any) + SetExample(example any) +} + +// ValueParser is the fundamental unit of comment-line parsing. +// +// Each implementation recognizes (via Matches) and extracts data from +// (via Parse) one kind of swagger annotation or validation directive +// in a Go comment block. ValueParsers are composed into TagParser +// wrappers and fed to the SectionedParser, which iterates over +// comment lines and dispatches each line to the first matching parser. +type ValueParser interface { + Parse(commentlines []string) error + Matches(commentLine string) bool +} + +// OperationValidationBuilder extends [ValidationBuilder] with +// SetCollectionFormat, which applies only to operation parameters, +// response headers, and array items — not to schema definitions. +// +// The narrower interface enforces at the type level that collection +// format (csv, ssv, tsv, pipes, multi) cannot be accidentally set on +// a schema. Schema validations implement only [ValidationBuilder]. +type OperationValidationBuilder interface { + ValidationBuilder + SetCollectionFormat(collectionFormat string) +} + +// Objecter abstracts over Go type objects that carry a [types.TypeName]. +// +// It is used during type resolution to detect unsupported builtin types +// (complex64, complex128, unsafe.Pointer) that have no JSON/Swagger +// representation. Both [*types.Named] and [scanner.EntityDecl] satisfy +// this interface, giving the resolver a uniform way to extract the +// underlying type name without a type switch on every concrete type. +type Objecter interface { + Obj() *types.TypeName +} diff --git a/internal/integration/coverage_enhancements_test.go b/internal/integration/coverage_enhancements_test.go new file mode 100644 index 0000000..9ba5be7 --- /dev/null +++ b/internal/integration/coverage_enhancements_test.go @@ -0,0 +1,291 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package integration_test + +import ( + "testing" + + "github.com/go-openapi/codescan" + "github.com/go-openapi/codescan/internal/scantest" + "github.com/go-openapi/testify/v2/require" + + oaispec "github.com/go-openapi/spec" +) + +// These tests mirror the baseline coverage-enhancement tests. They scan +// dedicated fixtures under fixtures/enhancements/ and compare the result to +// the golden JSON captured on the baseline worktree, so we can catch any +// behavioural drift introduced by the refactor. + +func TestCoverage_EmbeddedTypes(t *testing.T) { + doc, err := codescan.Run(&codescan.Options{ + Packages: []string{"./enhancements/embedded-types/..."}, + WorkDir: scantest.FixturesDir(), + ScanModels: true, + }) + require.NoError(t, err) + require.NotNil(t, doc) + + scantest.CompareOrDumpJSON(t, doc, "enhancements_embedded_types.json") +} + +func TestCoverage_AllOfEdges(t *testing.T) { + doc, err := codescan.Run(&codescan.Options{ + Packages: []string{"./enhancements/allof-edges/..."}, + WorkDir: scantest.FixturesDir(), + ScanModels: true, + }) + require.NoError(t, err) + require.NotNil(t, doc) + + scantest.CompareOrDumpJSON(t, doc, "enhancements_allof_edges.json") +} + +func TestCoverage_StrfmtArrays(t *testing.T) { + doc, err := codescan.Run(&codescan.Options{ + Packages: []string{"./enhancements/strfmt-arrays/..."}, + WorkDir: scantest.FixturesDir(), + ScanModels: true, + }) + require.NoError(t, err) + require.NotNil(t, doc) + + scantest.CompareOrDumpJSON(t, doc, "enhancements_strfmt_arrays.json") +} + +func TestCoverage_DefaultsExamples(t *testing.T) { + doc, err := codescan.Run(&codescan.Options{ + Packages: []string{"./enhancements/defaults-examples/..."}, + WorkDir: scantest.FixturesDir(), + ScanModels: true, + }) + require.NoError(t, err) + require.NotNil(t, doc) + + scantest.CompareOrDumpJSON(t, doc, "enhancements_defaults_examples.json") +} + +func TestCoverage_InterfaceMethods(t *testing.T) { + doc, err := codescan.Run(&codescan.Options{ + Packages: []string{"./enhancements/interface-methods/..."}, + WorkDir: scantest.FixturesDir(), + ScanModels: true, + }) + require.NoError(t, err) + require.NotNil(t, doc) + + scantest.CompareOrDumpJSON(t, doc, "enhancements_interface_methods.json") +} + +func TestCoverage_InterfaceMethods_XNullable(t *testing.T) { + doc, err := codescan.Run(&codescan.Options{ + Packages: []string{"./enhancements/interface-methods/..."}, + WorkDir: scantest.FixturesDir(), + ScanModels: true, + SetXNullableForPointers: true, + }) + require.NoError(t, err) + require.NotNil(t, doc) + + scantest.CompareOrDumpJSON(t, doc, "enhancements_interface_methods_xnullable.json") +} + +// TestCoverage_AliasExpand scans the alias-expand fixture with default +// Options so that buildAlias / buildFieldAlias take the non-transparent +// expansion path: each alias resolves to the underlying type and the +// target is emitted inline rather than as a $ref. +func TestCoverage_AliasExpand(t *testing.T) { + doc, err := codescan.Run(&codescan.Options{ + Packages: []string{"./enhancements/alias-expand/..."}, + WorkDir: scantest.FixturesDir(), + ScanModels: true, + }) + require.NoError(t, err) + require.NotNil(t, doc) + + scantest.CompareOrDumpJSON(t, doc, "enhancements_alias_expand.json") +} + +// TestCoverage_AliasRef scans the alias-expand fixture with RefAliases=true +// so body-parameter and response aliases resolve to $ref via makeRef, and +// the alias-of-alias chain resolves through the non-transparent switch. +func TestCoverage_AliasRef(t *testing.T) { + doc, err := codescan.Run(&codescan.Options{ + Packages: []string{"./enhancements/alias-expand/..."}, + WorkDir: scantest.FixturesDir(), + ScanModels: true, + RefAliases: true, + }) + require.NoError(t, err) + require.NotNil(t, doc) + + scantest.CompareOrDumpJSON(t, doc, "enhancements_alias_ref.json") +} + +// TestCoverage_AliasResponseRef scans a fixture where the swagger:response +// annotation is itself on an alias declaration. Under RefAliases=true the +// scanner takes the responseBuilder.buildAlias refAliases switch, which +// is not covered by any other test. +func TestCoverage_AliasResponseRef(t *testing.T) { + doc, err := codescan.Run(&codescan.Options{ + Packages: []string{"./enhancements/alias-response/..."}, + WorkDir: scantest.FixturesDir(), + ScanModels: true, + RefAliases: true, + }) + require.NoError(t, err) + require.NotNil(t, doc) + + scantest.CompareOrDumpJSON(t, doc, "enhancements_alias_response_ref.json") +} + +func TestCoverage_ResponseEdges(t *testing.T) { + doc, err := codescan.Run(&codescan.Options{ + Packages: []string{"./enhancements/response-edges/..."}, + WorkDir: scantest.FixturesDir(), + ScanModels: true, + }) + require.NoError(t, err) + require.NotNil(t, doc) + + scantest.CompareOrDumpJSON(t, doc, "enhancements_response_edges.json") +} + +func TestCoverage_NamedBasic(t *testing.T) { + doc, err := codescan.Run(&codescan.Options{ + Packages: []string{"./enhancements/named-basic/..."}, + WorkDir: scantest.FixturesDir(), + ScanModels: true, + }) + require.NoError(t, err) + require.NotNil(t, doc) + + scantest.CompareOrDumpJSON(t, doc, "enhancements_named_basic.json") +} + +func TestCoverage_RefAliasChain(t *testing.T) { + doc, err := codescan.Run(&codescan.Options{ + Packages: []string{"./enhancements/ref-alias-chain/..."}, + WorkDir: scantest.FixturesDir(), + ScanModels: true, + RefAliases: true, + }) + require.NoError(t, err) + require.NotNil(t, doc) + + scantest.CompareOrDumpJSON(t, doc, "enhancements_ref_alias_chain.json") +} + +func TestCoverage_EnumDocs(t *testing.T) { + doc, err := codescan.Run(&codescan.Options{ + Packages: []string{"./enhancements/enum-docs/..."}, + WorkDir: scantest.FixturesDir(), + ScanModels: true, + }) + require.NoError(t, err) + require.NotNil(t, doc) + + scantest.CompareOrDumpJSON(t, doc, "enhancements_enum_docs.json") +} + +func TestCoverage_TextMarshal(t *testing.T) { + doc, err := codescan.Run(&codescan.Options{ + Packages: []string{"./enhancements/text-marshal/..."}, + WorkDir: scantest.FixturesDir(), + ScanModels: true, + }) + require.NoError(t, err) + require.NotNil(t, doc) + + scantest.CompareOrDumpJSON(t, doc, "enhancements_text_marshal.json") +} + +func TestCoverage_AllHTTPMethods(t *testing.T) { + doc, err := codescan.Run(&codescan.Options{ + Packages: []string{"./enhancements/all-http-methods/..."}, + WorkDir: scantest.FixturesDir(), + }) + require.NoError(t, err) + require.NotNil(t, doc) + + scantest.CompareOrDumpJSON(t, doc, "enhancements_all_http_methods.json") +} + +// TestCoverage_UnknownAnnotation asserts that scanning a file with an +// unknown swagger: annotation returns a classifier error. This exercises +// the default branch of typeIndex.detectNodes. +func TestCoverage_UnknownAnnotation(t *testing.T) { + _, err := codescan.Run(&codescan.Options{ + Packages: []string{"./enhancements/unknown-annotation/..."}, + WorkDir: scantest.FixturesDir(), + ScanModels: true, + }) + require.Error(t, err) +} + +func TestCoverage_NamedStructTags(t *testing.T) { + doc, err := codescan.Run(&codescan.Options{ + Packages: []string{"./enhancements/named-struct-tags/..."}, + WorkDir: scantest.FixturesDir(), + ScanModels: true, + }) + require.NoError(t, err) + require.NotNil(t, doc) + + scantest.CompareOrDumpJSON(t, doc, "enhancements_named_struct_tags.json") +} + +func TestCoverage_TopLevelKinds(t *testing.T) { + doc, err := codescan.Run(&codescan.Options{ + Packages: []string{"./enhancements/top-level-kinds/..."}, + WorkDir: scantest.FixturesDir(), + ScanModels: true, + }) + require.NoError(t, err) + require.NotNil(t, doc) + + scantest.CompareOrDumpJSON(t, doc, "enhancements_top_level_kinds.json") +} + +// TestCoverage_InputOverlay feeds an InputSpec carrying paths with every +// HTTP verb so that operations from the input spec are indexed before the +// scanner merges its own discoveries. +func TestCoverage_InputOverlay(t *testing.T) { + input := &oaispec.Swagger{ + SwaggerProps: oaispec.SwaggerProps{ + Swagger: "2.0", + Info: &oaispec.Info{ + InfoProps: oaispec.InfoProps{ + Title: "Overlay", + Version: "0.0.1", + }, + }, + Paths: &oaispec.Paths{ + Paths: map[string]oaispec.PathItem{ + "/items": { + PathItemProps: oaispec.PathItemProps{ + Get: &oaispec.Operation{OperationProps: oaispec.OperationProps{ID: "listItems"}}, + Post: &oaispec.Operation{OperationProps: oaispec.OperationProps{ID: "createItem"}}, + Put: &oaispec.Operation{OperationProps: oaispec.OperationProps{ID: "replaceItem"}}, + Patch: &oaispec.Operation{OperationProps: oaispec.OperationProps{ID: "patchItem"}}, + Delete: &oaispec.Operation{OperationProps: oaispec.OperationProps{ID: "deleteItem"}}, + Head: &oaispec.Operation{OperationProps: oaispec.OperationProps{ID: "checkItem"}}, + Options: &oaispec.Operation{OperationProps: oaispec.OperationProps{ID: "optionsItem"}}, + }, + }, + }, + }, + }, + } + + doc, err := codescan.Run(&codescan.Options{ + Packages: []string{"./enhancements/embedded-types/..."}, + WorkDir: scantest.FixturesDir(), + InputSpec: input, + }) + require.NoError(t, err) + require.NotNil(t, doc) + + scantest.CompareOrDumpJSON(t, doc, "enhancements_input_overlay.json") +} diff --git a/internal/integration/coverage_malformed_test.go b/internal/integration/coverage_malformed_test.go new file mode 100644 index 0000000..3abe2d9 --- /dev/null +++ b/internal/integration/coverage_malformed_test.go @@ -0,0 +1,81 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package integration_test + +import ( + "testing" + + "github.com/go-openapi/codescan" + "github.com/go-openapi/codescan/internal/scantest" + "github.com/go-openapi/testify/v2/require" +) + +// Bucket-B error-path tests. Each subfixture under fixtures/enhancements/ +// malformed/ carries exactly one annotation that the scanner cannot +// reconcile, so Run() must return a non-nil error. No goldens are +// produced — these tests exist purely to pin the error surface. + +func TestMalformed_DefaultInt(t *testing.T) { + _, err := codescan.Run(&codescan.Options{ + Packages: []string{"./enhancements/malformed/default-int/..."}, + WorkDir: scantest.FixturesDir(), + }) + require.Error(t, err) +} + +func TestMalformed_ExampleInt(t *testing.T) { + _, err := codescan.Run(&codescan.Options{ + Packages: []string{"./enhancements/malformed/example-int/..."}, + WorkDir: scantest.FixturesDir(), + }) + require.Error(t, err) +} + +func TestMalformed_MetaBadExtensionKey(t *testing.T) { + _, err := codescan.Run(&codescan.Options{ + Packages: []string{"./enhancements/malformed/meta-bad-ext-key/..."}, + WorkDir: scantest.FixturesDir(), + }) + require.Error(t, err) +} + +func TestMalformed_InfoBadExtensionKey(t *testing.T) { + _, err := codescan.Run(&codescan.Options{ + Packages: []string{"./enhancements/malformed/info-bad-ext-key/..."}, + WorkDir: scantest.FixturesDir(), + }) + require.Error(t, err) +} + +func TestMalformed_BadContact(t *testing.T) { + _, err := codescan.Run(&codescan.Options{ + Packages: []string{"./enhancements/malformed/bad-contact/..."}, + WorkDir: scantest.FixturesDir(), + }) + require.Error(t, err) +} + +func TestMalformed_DuplicateBodyTag(t *testing.T) { + _, err := codescan.Run(&codescan.Options{ + Packages: []string{"./enhancements/malformed/duplicate-body-tag/..."}, + WorkDir: scantest.FixturesDir(), + }) + require.Error(t, err) +} + +func TestMalformed_BadResponseTag(t *testing.T) { + _, err := codescan.Run(&codescan.Options{ + Packages: []string{"./enhancements/malformed/bad-response-tag/..."}, + WorkDir: scantest.FixturesDir(), + }) + require.Error(t, err) +} + +func TestMalformed_BadSecurityDefinitions(t *testing.T) { + _, err := codescan.Run(&codescan.Options{ + Packages: []string{"./enhancements/malformed/bad-sec-defs/..."}, + WorkDir: scantest.FixturesDir(), + }) + require.Error(t, err) +} diff --git a/internal/integration/doc.go b/internal/integration/doc.go new file mode 100644 index 0000000..4d29b9b --- /dev/null +++ b/internal/integration/doc.go @@ -0,0 +1,2 @@ +// Package integration provides integration tests. +package integration diff --git a/internal/integration/go_swagger_spec_test.go b/internal/integration/go_swagger_spec_test.go new file mode 100644 index 0000000..216785e --- /dev/null +++ b/internal/integration/go_swagger_spec_test.go @@ -0,0 +1,54 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package integration_test + +import ( + "path/filepath" + "testing" + + "github.com/go-openapi/codescan" + "github.com/go-openapi/codescan/internal/scantest" + "github.com/go-openapi/testify/v2/require" +) + +// These tests are the migration canary ported from go-swagger's +// cmd/swagger/commands/generate/spec_test.go. They exercise the same api.go +// fixture with the three Options variants go-swagger tests, and compare +// against go-swagger's pre-existing golden JSON files. If these pass, codescan +// still produces the specs go-swagger has depended on for years. + +func TestGoSwagger_GenerateJSONSpec(t *testing.T) { + opts := codescan.Options{ + WorkDir: filepath.Join(scantest.FixturesDir(), "goparsing", "spec"), + Packages: []string{"./..."}, + } + swspec, err := codescan.Run(&opts) + require.NoError(t, err) + + scantest.CompareOrDumpJSON(t, swspec, "api_spec_go111.json") +} + +func TestGoSwagger_GenerateJSONSpec_RefAliases(t *testing.T) { + opts := codescan.Options{ + WorkDir: filepath.Join(scantest.FixturesDir(), "goparsing", "spec"), + Packages: []string{"./..."}, + RefAliases: true, + } + swspec, err := codescan.Run(&opts) + require.NoError(t, err) + + scantest.CompareOrDumpJSON(t, swspec, "api_spec_go111_ref.json") +} + +func TestGoSwagger_GenerateJSONSpec_TransparentAliases(t *testing.T) { + opts := codescan.Options{ + WorkDir: filepath.Join(scantest.FixturesDir(), "goparsing", "spec"), + Packages: []string{"./..."}, + TransparentAliases: true, + } + swspec, err := codescan.Run(&opts) + require.NoError(t, err) + + scantest.CompareOrDumpJSON(t, swspec, "api_spec_go111_transparent.json") +} diff --git a/internal/integration/helpers_test.go b/internal/integration/helpers_test.go new file mode 100644 index 0000000..740a809 --- /dev/null +++ b/internal/integration/helpers_test.go @@ -0,0 +1,63 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package integration_test + +import ( + "encoding/json" + "testing" + + "gopkg.in/yaml.v2" + + "github.com/go-openapi/testify/v2/assert" + + oaispec "github.com/go-openapi/spec" +) + +// enableSpecOutput toggles YAML dumping of generated specs for debugging. +const enableSpecOutput = false + +// fixturesModule is the module path of the fixtures nested module. +const fixturesModule = "github.com/go-openapi/codescan/fixtures" + +func marshalToYAMLFormat(swspec any) ([]byte, error) { + b, err := json.Marshal(swspec) + if err != nil { + return nil, err + } + + var jsonObj any + if err := yaml.Unmarshal(b, &jsonObj); err != nil { + return nil, err + } + + return yaml.Marshal(jsonObj) +} + +func assertHasExtension(t *testing.T, sch oaispec.Schema, ext string) { + t.Helper() + pkg, hasExt := sch.Extensions.GetString(ext) + assert.TrueT(t, hasExt) + assert.NotEmpty(t, pkg) +} + +func assertHasGoPackageExt(t *testing.T, sch oaispec.Schema) { + t.Helper() + assertHasExtension(t, sch, "x-go-package") +} + +func assertHasTitle(t *testing.T, sch oaispec.Schema) { + t.Helper() + assert.NotEmpty(t, sch.Title) +} + +func assertHasNoTitle(t *testing.T, sch oaispec.Schema) { + t.Helper() + assert.Empty(t, sch.Title) +} + +func assertIsRef(t *testing.T, schema *oaispec.Schema, fragment string) { + t.Helper() + + assert.EqualT(t, fragment, schema.Ref.String()) +} diff --git a/application_test.go b/internal/integration/petstore_test.go similarity index 76% rename from application_test.go rename to internal/integration/petstore_test.go index b895b08..77c9e6d 100644 --- a/application_test.go +++ b/internal/integration/petstore_test.go @@ -1,101 +1,44 @@ // SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers // SPDX-License-Identifier: Apache-2.0 -package codescan +package integration_test import ( - "flag" - "io" - "log" - "os" "sort" "testing" - "github.com/go-openapi/spec" - + "github.com/go-openapi/codescan" + "github.com/go-openapi/codescan/internal/scantest" + "github.com/go-openapi/codescan/internal/scantest/classification" "github.com/go-openapi/testify/v2/assert" - "github.com/go-openapi/testify/v2/require" -) -var ( - petstoreCtx *scanCtx //nolint:gochecknoglobals // test package cache shared across test functions - classificationCtx *scanCtx //nolint:gochecknoglobals // test package cache shared across test functions + oaispec "github.com/go-openapi/spec" ) -var ( - enableSpecOutput bool //nolint:gochecknoglobals // test flag registered in init - enableDebug bool //nolint:gochecknoglobals // test flag registered in init -) - -func init() { //nolint:gochecknoinits // registers test flags before TestMain - flag.BoolVar(&enableSpecOutput, "enable-spec-output", false, "enable spec gen test to write output to a file") - flag.BoolVar(&enableDebug, "enable-debug", false, "enable debug output in tests") -} - -func TestMain(m *testing.M) { - // initializations to run tests in this package - flag.Parse() - - if !enableDebug { - log.SetOutput(io.Discard) - } else { - // enable full debug when test is run with -enable-debug arg - log.SetFlags(log.LstdFlags | log.Lshortfile) - log.SetOutput(os.Stderr) - } - - os.Exit(m.Run()) -} - -func TestApplication_LoadCode(t *testing.T) { - sctx := loadClassificationPkgsCtx(t) - require.NotNil(t, sctx) - require.Len(t, sctx.app.Models, 45) - require.Len(t, sctx.app.Meta, 1) - require.Len(t, sctx.app.Routes, 7) - require.Empty(t, sctx.app.Operations) - require.Len(t, sctx.app.Parameters, 10) - require.Len(t, sctx.app.Responses, 11) -} - -func TestApplication_DebugLogging(t *testing.T) { - // Exercises the debugLogf code path with Debug: true. - _, err := Run(&Options{ - Packages: []string{"./goparsing/petstore/..."}, - WorkDir: "fixtures", - ScanModels: true, - Debug: true, - }) - require.NoError(t, err) -} - -func TestRun_InvalidWorkDir(t *testing.T) { - // Exercises the Run() error path when package loading fails. - _, err := Run(&Options{ - Packages: []string{"./..."}, - WorkDir: "/nonexistent/directory", - }) - require.Error(t, err) -} +const epsilon = 1e-9 func TestAppScanner_NewSpec(t *testing.T) { - doc, err := Run(&Options{ + doc, err := codescan.Run(&codescan.Options{ Packages: []string{"./goparsing/petstore/..."}, - WorkDir: "fixtures", + WorkDir: scantest.FixturesDir(), }) + require.NoError(t, err) require.NotNil(t, doc) verifyParsedPetStore(t, doc) + + scantest.CompareOrDumpJSON(t, doc, "petstore_spec.json") } func TestAppScanner_Definitions(t *testing.T) { - doc, err := Run(&Options{ + doc, err := codescan.Run(&codescan.Options{ Packages: []string{"./goparsing/bookings/..."}, - WorkDir: "fixtures", + WorkDir: scantest.FixturesDir(), ScanModels: true, }) + require.NoError(t, err) if assert.NotNil(t, doc) { _, ok := doc.Definitions["Booking"] @@ -109,52 +52,15 @@ func TestAppScanner_Definitions(t *testing.T) { _, ok = doc.Definitions["IgnoreMe"] assert.FalseT(t, ok, "Should not include un-annotated/un-referenced structs") } -} - -func loadPetstorePkgsCtx(t *testing.T) *scanCtx { - t.Helper() - - if petstoreCtx != nil { - return petstoreCtx - } - sctx, err := newScanCtx(&Options{ - Packages: []string{"./goparsing/petstore/..."}, - WorkDir: "fixtures", - Debug: enableDebug, - }) - require.NoError(t, err) - petstoreCtx = sctx - - return petstoreCtx -} - -func loadClassificationPkgsCtx(t *testing.T) *scanCtx { - t.Helper() - - if classificationCtx != nil { - return classificationCtx - } - - sctx, err := newScanCtx(&Options{ - Packages: []string{ - "./goparsing/classification", - "./goparsing/classification/models", - "./goparsing/classification/operations", - }, - WorkDir: "fixtures", - Debug: enableDebug, - }) - require.NoError(t, err) - classificationCtx = sctx - return classificationCtx + scantest.CompareOrDumpJSON(t, doc, "bookings_spec.json") } -func verifyParsedPetStore(t *testing.T, doc *spec.Swagger) { +func verifyParsedPetStore(t *testing.T, doc *oaispec.Swagger) { t.Helper() verifyTop(t, doc) - verifyInfo(t, doc.Info) + classification.VerifyInfo(t, doc.Info) verifyModels(t, doc.Definitions) verifyCommonResponses(t, doc.Responses) @@ -346,7 +252,7 @@ func verifyParsedPetStore(t *testing.T, doc *spec.Swagger) { }) } -func verifyTop(t *testing.T, doc *spec.Swagger) { +func verifyTop(t *testing.T, doc *oaispec.Swagger) { t.Helper() t.Run("with top level specification", func(t *testing.T) { @@ -366,7 +272,7 @@ func verifyTop(t *testing.T, doc *spec.Swagger) { }) } -func verifyCommonResponses(t *testing.T, responses map[string]spec.Response) { +func verifyCommonResponses(t *testing.T, responses map[string]oaispec.Response) { t.Helper() t.Run("with responses", func(t *testing.T) { @@ -377,8 +283,8 @@ func verifyCommonResponses(t *testing.T, responses map[string]spec.Response) { require.TrueT(t, ok) require.NotNil(t, resp.Schema) assert.Len(t, resp.Schema.Properties, 2) - assertProperty(t, resp.Schema, "integer", "code", "int32", "Code") - assertProperty(t, resp.Schema, "string", "message", "", "Message") + scantest.AssertProperty(t, resp.Schema, "integer", "code", "int32", "Code") + scantest.AssertProperty(t, resp.Schema, "string", "message", "", "Message") }) t.Run("should define validationError", func(t *testing.T) { @@ -386,9 +292,9 @@ func verifyCommonResponses(t *testing.T, responses map[string]spec.Response) { require.TrueT(t, ok) require.NotNil(t, resp.Schema) assert.Len(t, resp.Schema.Properties, 3) - assertProperty(t, resp.Schema, "integer", "code", "int32", "Code") - assertProperty(t, resp.Schema, "string", "message", "", "Message") - assertProperty(t, resp.Schema, "string", "field", "", "Field") + scantest.AssertProperty(t, resp.Schema, "integer", "code", "int32", "Code") + scantest.AssertProperty(t, resp.Schema, "string", "message", "", "Message") + scantest.AssertProperty(t, resp.Schema, "string", "field", "", "Field") }) t.Run("should define MarkdownRender", func(t *testing.T) { @@ -400,7 +306,7 @@ func verifyCommonResponses(t *testing.T, responses map[string]spec.Response) { }) } -func verifyModels(t *testing.T, definitions spec.Definitions) { +func verifyModels(t *testing.T, definitions oaispec.Definitions) { t.Helper() t.Run("with models definitions", func(t *testing.T) { @@ -412,34 +318,34 @@ func verifyModels(t *testing.T, definitions spec.Definitions) { mod, ok := definitions["tag"] require.TrueT(t, ok) - assert.Equal(t, spec.StringOrArray([]string{"object"}), mod.Type) + assert.Equal(t, oaispec.StringOrArray([]string{"object"}), mod.Type) assert.EqualT(t, "A Tag is an extra piece of data to provide more information about a pet.", mod.Title) assert.EqualT(t, "It is used to describe the animals available in the store.", mod.Description) assert.Len(t, mod.Required, 2) - assertProperty(t, &mod, "integer", "id", "int64", "ID") + scantest.AssertProperty(t, &mod, "integer", "id", "int64", "ID") prop, ok := mod.Properties["id"] require.TrueT(t, ok, "should have had an 'id' property") assert.EqualT(t, "The id of the tag.", prop.Description) - assertProperty(t, &mod, "string", "value", "", "Value") + scantest.AssertProperty(t, &mod, "string", "value", "", "Value") prop, ok = mod.Properties["value"] require.TrueT(t, ok) assert.EqualT(t, "The value of the tag.", prop.Description) mod, ok = definitions["pet"] require.TrueT(t, ok) - assert.Equal(t, spec.StringOrArray([]string{"object"}), mod.Type) + assert.Equal(t, oaispec.StringOrArray([]string{"object"}), mod.Type) assert.EqualT(t, "A Pet is the main product in the store.", mod.Title) assert.EqualT(t, "It is used to describe the animals available in the store.", mod.Description) assert.Len(t, mod.Required, 2) - assertProperty(t, &mod, "integer", "id", "int64", "ID") + scantest.AssertProperty(t, &mod, "integer", "id", "int64", "ID") prop, ok = mod.Properties["id"] require.TrueT(t, ok, "should have had an 'id' property") assert.EqualT(t, "The id of the pet.", prop.Description) - assertProperty(t, &mod, "string", "name", "", "Name") + scantest.AssertProperty(t, &mod, "string", "name", "", "Name") prop, ok = mod.Properties["name"] require.TrueT(t, ok) assert.EqualT(t, "The name of the pet.", prop.Description) @@ -447,7 +353,7 @@ func verifyModels(t *testing.T, definitions spec.Definitions) { assert.EqualValues(t, 50, *prop.MaxLength) assert.EqualT(t, "\\w[\\w-]+", prop.Pattern) - assertArrayProperty(t, &mod, "string", "photoUrls", "", "PhotoURLs") + scantest.AssertArrayProperty(t, &mod, "string", "photoUrls", "", "PhotoURLs") prop, ok = mod.Properties["photoUrls"] require.TrueT(t, ok) assert.EqualT(t, "The photo urls for the pet.\nThis only accepts jpeg or png images.", prop.Description) @@ -455,18 +361,18 @@ func verifyModels(t *testing.T, definitions spec.Definitions) { assert.EqualT(t, "\\.(jpe?g|png)$", prop.Items.Schema.Pattern) } - assertProperty(t, &mod, "string", "status", "", "Status") + scantest.AssertProperty(t, &mod, "string", "status", "", "Status") prop, ok = mod.Properties["status"] assert.TrueT(t, ok) assert.EqualT(t, "The current status of the pet in the store.\navailable STATUS_AVAILABLE\npending STATUS_PENDING\nsold STATUS_SOLD", prop.Description) assert.Equal(t, []any{"available", "pending", "sold"}, prop.Enum) - assertProperty(t, &mod, "string", "birthday", "date", "Birthday") + scantest.AssertProperty(t, &mod, "string", "birthday", "date", "Birthday") prop, ok = mod.Properties["birthday"] assert.TrueT(t, ok) assert.EqualT(t, "The pet's birthday", prop.Description) - assertArrayRef(t, &mod, "tags", "Tags", "#/definitions/tag") + scantest.AssertArrayRef(t, &mod, "tags", "Tags", "#/definitions/tag") prop, ok = mod.Properties["tags"] assert.TrueT(t, ok) assert.EqualT(t, "Extra bits of information attached to this pet.", prop.Description) @@ -476,22 +382,22 @@ func verifyModels(t *testing.T, definitions spec.Definitions) { assert.Len(t, mod.Properties, 4) assert.Len(t, mod.Required, 3) - assertProperty(t, &mod, "integer", "id", "int64", "ID") + scantest.AssertProperty(t, &mod, "integer", "id", "int64", "ID") prop, ok = mod.Properties["id"] assert.TrueT(t, ok, "should have had an 'id' property") assert.EqualT(t, "the ID of the order", prop.Description) - assertProperty(t, &mod, "integer", "userId", "int64", "UserID") + scantest.AssertProperty(t, &mod, "integer", "userId", "int64", "UserID") prop, ok = mod.Properties["userId"] assert.TrueT(t, ok, "should have had an 'userId' property") assert.EqualT(t, "the id of the user who placed the order.", prop.Description) - assertProperty(t, &mod, "string", "orderedAt", "date-time", "OrderedAt") + scantest.AssertProperty(t, &mod, "string", "orderedAt", "date-time", "OrderedAt") prop, ok = mod.Properties["orderedAt"] assert.EqualT(t, "the time at which this order was made.", prop.Description) assert.TrueT(t, ok, "should have an 'orderedAt' property") - assertArrayProperty(t, &mod, "object", "items", "", "Items") + scantest.AssertArrayProperty(t, &mod, "object", "items", "", "Items") prop, ok = mod.Properties["items"] assert.TrueT(t, ok, "should have an 'items' slice") assert.NotNil(t, prop.Items, "items should have had an items property") @@ -501,12 +407,12 @@ func verifyModels(t *testing.T, definitions spec.Definitions) { assert.Len(t, itprop.Properties, 2) assert.Len(t, itprop.Required, 2) - assertProperty(t, itprop, "integer", "petId", "int64", "PetID") + scantest.AssertProperty(t, itprop, "integer", "petId", "int64", "PetID") iprop, ok := itprop.Properties["petId"] assert.TrueT(t, ok, "should have had a 'petId' property") assert.EqualT(t, "the id of the pet to order", iprop.Description) - assertProperty(t, itprop, "integer", "qty", "int32", "Quantity") + scantest.AssertProperty(t, itprop, "integer", "qty", "int32", "Quantity") iprop, ok = itprop.Properties["qty"] assert.TrueT(t, ok, "should have had a 'qty' property") assert.EqualT(t, "the quantity of this pet to order", iprop.Description) @@ -515,7 +421,7 @@ func verifyModels(t *testing.T, definitions spec.Definitions) { }) } -func verifyIDParam(t *testing.T, param spec.Parameter, description string) { +func verifyIDParam(t *testing.T, param oaispec.Parameter, description string) { t.Helper() assert.EqualT(t, description, param.Description) @@ -526,12 +432,11 @@ func verifyIDParam(t *testing.T, param spec.Parameter, description string) { assert.Equal(t, "ID", param.Extensions["x-go-name"]) } -func verifyRefParam(t *testing.T, param spec.Parameter, description, refed string) { +func verifyRefParam(t *testing.T, param oaispec.Parameter, description, refed string) { t.Helper() assert.EqualT(t, description, param.Description) assert.EqualT(t, "body", param.In) - // TODO: this may fail sometimes (seen on go1.12 windows test): require pointer to be valid and avoid panicking require.NotNil(t, param) require.NotNil(t, param.Schema) assert.EqualT(t, "#/definitions/"+refed, param.Schema.Ref.String()) diff --git a/internal/integration/schema_aliased_test.go b/internal/integration/schema_aliased_test.go new file mode 100644 index 0000000..9b74ef1 --- /dev/null +++ b/internal/integration/schema_aliased_test.go @@ -0,0 +1,551 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package integration_test + +import ( + "os" + "path/filepath" + "testing" + + "github.com/go-openapi/codescan" + "github.com/go-openapi/codescan/internal/scantest" + "github.com/go-openapi/testify/v2/assert" + "github.com/go-openapi/testify/v2/require" + + oaispec "github.com/go-openapi/spec" +) + +func TestAliasedSchemas(t *testing.T) { + fixturesPath := filepath.Join(scantest.FixturesDir(), "goparsing", "go123", "aliased", "schema") + var sp *oaispec.Swagger + t.Run("end-to-end source scan should succeed", func(t *testing.T) { + var err error + sp, err = codescan.Run(&codescan.Options{ + WorkDir: fixturesPath, + BuildTags: "testscanner", // fixture code is excluded from normal build + ScanModels: true, + RefAliases: true, + }) + require.NoError(t, err) + }) + + if enableSpecOutput { + // for debugging, output the resulting spec as YAML + yml, err := marshalToYAMLFormat(sp) + require.NoError(t, err) + + _, _ = os.Stdout.Write(yml) + } + + t.Run("type aliased to any should yield an empty schema", func(t *testing.T) { + anything, ok := sp.Definitions["Anything"] + require.TrueT(t, ok) + + assertHasGoPackageExt(t, anything) + assertHasTitle(t, anything) + + // after stripping extension and title, should be empty + anything.VendorExtensible = oaispec.VendorExtensible{} + anything.Title = "" + assert.Equal(t, oaispec.Schema{}, anything) + }) + + t.Run("type aliased to an empty struct should yield an empty object", func(t *testing.T) { + empty, ok := sp.Definitions["Empty"] + require.TrueT(t, ok) + + assertHasGoPackageExt(t, empty) + assertHasTitle(t, empty) + + // after stripping extension and title, should be empty + empty.VendorExtensible = oaispec.VendorExtensible{} + empty.Title = "" + emptyObject := &oaispec.Schema{} + emptyObject = emptyObject.Typed("object", "").WithProperties(map[string]oaispec.Schema{}) + assert.Equal(t, *emptyObject, empty) + }) + + t.Run("struct fields defined as any or interface{} should yield properties with an empty schema", func(t *testing.T) { + testAliasedExtendedIDAllOf(t, sp) + }) + + t.Run("aliased primitive types remain unaffected", func(t *testing.T) { + uuid, ok := sp.Definitions["UUID"] + require.TrueT(t, ok) + + assertHasGoPackageExt(t, uuid) + assertHasTitle(t, uuid) + + // after strip extension, should be equal to integer with format + uuid.VendorExtensible = oaispec.VendorExtensible{} + uuid.Title = "" + intSchema := &oaispec.Schema{} + intSchema = intSchema.Typed("integer", "int64") + assert.Equal(t, *intSchema, uuid) + }) + + t.Run("with struct having fields aliased to any or interface{}", func(t *testing.T) { + order, ok := sp.Definitions["order"] + require.TrueT(t, ok) + + t.Run("field defined on an alias should produce a ref", func(t *testing.T) { + t.Run("with alias to any", func(t *testing.T) { + _, ok = order.Properties["DeliveryOption"] + require.TrueT(t, ok) + scantest.AssertRef(t, &order, "DeliveryOption", "", "#/definitions/Anything") // points to an alias to any + }) + + t.Run("with alias to primitive type", func(t *testing.T) { + _, ok = order.Properties["id"] + require.TrueT(t, ok) + scantest.AssertRef(t, &order, "id", "", "#/definitions/UUID") // points to an alias to any + }) + + t.Run("with alias to struct type", func(t *testing.T) { + _, ok = order.Properties["extended_id"] + require.TrueT(t, ok) + scantest.AssertRef(t, &order, "extended_id", "", "#/definitions/ExtendedID") // points to an alias to any + }) + + t.Run("inside anonymous array", func(t *testing.T) { + items, ok := order.Properties["items"] + require.TrueT(t, ok) + + require.NotNil(t, items) + require.NotNil(t, items.Items) + + assert.TrueT(t, items.Type.Contains("array")) + t.Run("field as any should render as empty object", func(t *testing.T) { + require.NotNil(t, items.Items.Schema) + itemsSchema := items.Items.Schema + assert.TrueT(t, itemsSchema.Type.Contains("object")) + + require.MapContainsT(t, itemsSchema.Properties, "extra_options") + extraOptions := itemsSchema.Properties["extra_options"] + assertHasExtension(t, extraOptions, "x-go-name") + + extraOptions.VendorExtensible = oaispec.VendorExtensible{} + empty := oaispec.Schema{} + assert.Equal(t, empty, extraOptions) + }) + }) + }) + + t.Run("struct field defined as any should produce an empty schema", func(t *testing.T) { + extras, ok := order.Properties["Extras"] + require.TrueT(t, ok) + assert.Equal(t, oaispec.Schema{}, extras) + }) + + t.Run("struct field defined as interface{} should produce an empty schema", func(t *testing.T) { + extras, ok := order.Properties["MoreExtras"] + require.TrueT(t, ok) + assert.Equal(t, oaispec.Schema{}, extras) + }) + }) + + t.Run("type redefinitions and syntactic aliases to any should render the same", func(t *testing.T) { + whatnot, ok := sp.Definitions["whatnot"] + require.TrueT(t, ok) + // after strip extension, should be empty + whatnot.VendorExtensible = oaispec.VendorExtensible{} + assert.Equal(t, oaispec.Schema{}, whatnot) + + whatnotAlias, ok := sp.Definitions["whatnot_alias"] + require.TrueT(t, ok) + // after strip extension, should be empty + whatnotAlias.VendorExtensible = oaispec.VendorExtensible{} + assert.Equal(t, oaispec.Schema{}, whatnotAlias) + + whatnot2, ok := sp.Definitions["whatnot2"] + require.TrueT(t, ok) + // after strip extension, should be empty + whatnot2.VendorExtensible = oaispec.VendorExtensible{} + assert.Equal(t, oaispec.Schema{}, whatnot2) + + whatnot2Alias, ok := sp.Definitions["whatnot2_alias"] + require.TrueT(t, ok) + // after strip extension, should be empty + whatnot2Alias.VendorExtensible = oaispec.VendorExtensible{} + assert.Equal(t, oaispec.Schema{}, whatnot2Alias) + }) + + t.Run("alias to another alias is resolved as a ref", func(t *testing.T) { + void, ok := sp.Definitions["void"] + require.TrueT(t, ok) + + assertIsRef(t, &void, "#/definitions/Empty") // points to another alias + }) + + t.Run("type redefinition to anonymous is not an alias and is resolved as an object", func(t *testing.T) { + empty, ok := sp.Definitions["empty_redefinition"] + require.TrueT(t, ok) + + assertHasGoPackageExt(t, empty) + assertHasNoTitle(t, empty) + + // after stripping extension and title, should be empty + empty.VendorExtensible = oaispec.VendorExtensible{} + emptyObject := &oaispec.Schema{} + emptyObject = emptyObject.Typed("object", "").WithProperties(map[string]oaispec.Schema{}) + assert.Equal(t, *emptyObject, empty) + }) + + t.Run("alias to a named interface should render as a $ref", func(t *testing.T) { + iface, ok := sp.Definitions["iface_alias"] + require.TrueT(t, ok) + + assertIsRef(t, &iface, "#/definitions/iface") // points to an interface + }) + + t.Run("interface redefinition is not an alias and should render as a $ref", func(t *testing.T) { + iface, ok := sp.Definitions["iface_redefinition"] + require.TrueT(t, ok) + + assertIsRef(t, &iface, "#/definitions/iface") // points to an interface + }) + + t.Run("anonymous interface should render a schema", func(t *testing.T) { + iface, ok := sp.Definitions["anonymous_iface"] + require.TrueT(t, ok) + + require.NotEmpty(t, iface.Properties) + require.MapContainsT(t, iface.Properties, "String") + }) + + t.Run("anonymous struct should render as an anonymous schema", func(t *testing.T) { + obj, ok := sp.Definitions["anonymous_struct"] + require.TrueT(t, ok) + + require.NotEmpty(t, obj.Properties) + require.MapContainsT(t, obj.Properties, "A") + + a := obj.Properties["A"] + assert.TrueT(t, a.Type.Contains("object")) + require.MapContainsT(t, a.Properties, "B") + b := a.Properties["B"] + assert.TrueT(t, b.Type.Contains("integer")) + }) + + t.Run("standalone model with a tag should be rendered", func(t *testing.T) { + shouldSee, ok := sp.Definitions["ShouldSee"] + require.TrueT(t, ok) + assert.TrueT(t, shouldSee.Type.Contains("boolean")) + }) + + t.Run("standalone model without a tag should not be rendered", func(t *testing.T) { + _, ok := sp.Definitions["ShouldNotSee"] + require.FalseT(t, ok) + + _, ok = sp.Definitions["ShouldNotSeeSlice"] + require.FalseT(t, ok) + + _, ok = sp.Definitions["ShouldNotSeeMap"] + require.FalseT(t, ok) + }) + + t.Run("with aliases in slices and arrays", func(t *testing.T) { + t.Run("slice redefinition should render as schema", func(t *testing.T) { + t.Run("with anonymous slice", func(t *testing.T) { + slice, ok := sp.Definitions["slice_type"] // []any + require.TrueT(t, ok) + assert.TrueT(t, slice.Type.Contains("array")) + require.NotNil(t, slice.Items) + require.NotNil(t, slice.Items.Schema) + + assert.Equal(t, &oaispec.Schema{}, slice.Items.Schema) + }) + + t.Run("with anonymous struct", func(t *testing.T) { + slice, ok := sp.Definitions["slice_of_structs"] // type X = []struct{} + require.TrueT(t, ok) + assert.TrueT(t, slice.Type.Contains("array")) + + require.NotNil(t, slice.Items) + require.NotNil(t, slice.Items.Schema) + + emptyObject := &oaispec.Schema{} + emptyObject = emptyObject.Typed("object", "").WithProperties(map[string]oaispec.Schema{}) + assert.Equal(t, emptyObject, slice.Items.Schema) + }) + }) + + t.Run("alias to anonymous slice should render as schema", func(t *testing.T) { + t.Run("with anonymous slice", func(t *testing.T) { + slice, ok := sp.Definitions["slice_alias"] // type X = []any + require.TrueT(t, ok) + assert.TrueT(t, slice.Type.Contains("array")) + + require.NotNil(t, slice.Items) + require.NotNil(t, slice.Items.Schema) + + assert.Equal(t, &oaispec.Schema{}, slice.Items.Schema) + }) + + t.Run("with anonymous struct", func(t *testing.T) { + slice, ok := sp.Definitions["slice_of_structs_alias"] // type X = []struct{} + require.TrueT(t, ok) + assert.TrueT(t, slice.Type.Contains("array")) + require.NotNil(t, slice.Items) + require.NotNil(t, slice.Items.Schema) + + emptyObject := &oaispec.Schema{} + emptyObject = emptyObject.Typed("object", "").WithProperties(map[string]oaispec.Schema{}) + assert.Equal(t, emptyObject, slice.Items.Schema) + }) + }) + + t.Run("alias to named alias to anonymous slice should render as ref", func(t *testing.T) { + slice, ok := sp.Definitions["slice_to_slice"] // type X = Slice + require.TrueT(t, ok) + assertIsRef(t, &slice, "#/definitions/slice_type") // points to a named alias + }) + }) + + t.Run("with aliases in interfaces", func(t *testing.T) { + testAliasedInterfaceVariants(t, sp) + }) + + t.Run("with aliases in embedded types", func(t *testing.T) { + testAliasedEmbeddedTypes(t, sp) + }) + + scantest.CompareOrDumpJSON(t, sp, "go123_aliased_spec.json") +} + +func testAliasedExtendedIDAllOf(t *testing.T, sp *oaispec.Swagger) { + t.Helper() + extended, ok := sp.Definitions["ExtendedID"] + require.TrueT(t, ok) + + t.Run("struct with an embedded alias should render as allOf", func(t *testing.T) { + require.Len(t, extended.AllOf, 2) + assertHasTitle(t, extended) + + foundAliased := false + foundProps := false + for idx, member := range extended.AllOf { + isProps := len(member.Properties) > 0 + isAlias := member.Ref.String() != "" + + switch { + case isProps: + props := member + t.Run("with property of type any", func(t *testing.T) { + evenMore, ok := props.Properties["EvenMore"] + require.TrueT(t, ok) + assert.Equal(t, oaispec.Schema{}, evenMore) + }) + + t.Run("with property of type interface{}", func(t *testing.T) { + evenMore, ok := props.Properties["StillMore"] + require.TrueT(t, ok) + assert.Equal(t, oaispec.Schema{}, evenMore) + }) + + t.Run("non-aliased properties remain unaffected", func(t *testing.T) { + more, ok := props.Properties["more"] + require.TrueT(t, ok) + + assertHasExtension(t, more, "x-go-name") // because we have a struct tag + assertHasNoTitle(t, more) + + // after stripping extension and title, should be empty + more.VendorExtensible = oaispec.VendorExtensible{} + + strSchema := &oaispec.Schema{} + strSchema = strSchema.Typed("string", "") + assert.Equal(t, *strSchema, more) + }) + foundProps = true + case isAlias: + assertIsRef(t, &member, "#/definitions/Empty") + foundAliased = true + default: + assert.Failf(t, "embedded members in struct are not as expected", "unexpected member in allOf: %d", idx) + } + } + require.TrueT(t, foundProps) + require.TrueT(t, foundAliased) + }) +} + +func testAliasedInterfaceVariants(t *testing.T, sp *oaispec.Swagger) { + t.Helper() + + t.Run("should render anonymous interface as a schema", func(t *testing.T) { + iface, ok := sp.Definitions["anonymous_iface"] // e.g. type X interface{ String() string} + require.TrueT(t, ok) + + require.TrueT(t, iface.Type.Contains("object")) + require.MapContainsT(t, iface.Properties, "String") + prop := iface.Properties["String"] + require.TrueT(t, prop.Type.Contains("string")) + assert.Len(t, iface.Properties, 1) + }) + + t.Run("alias to an anonymous interface should render as a $ref", func(t *testing.T) { + iface, ok := sp.Definitions["anonymous_iface_alias"] + require.TrueT(t, ok) + + assertIsRef(t, &iface, "#/definitions/anonymous_iface") // points to an anonymous interface + }) + + t.Run("named interface should render as a schema", func(t *testing.T) { + iface, ok := sp.Definitions["iface"] + require.TrueT(t, ok) + + require.TrueT(t, iface.Type.Contains("object")) + require.MapContainsT(t, iface.Properties, "Get") + prop := iface.Properties["Get"] + require.TrueT(t, prop.Type.Contains("string")) + assert.Len(t, iface.Properties, 1) + }) + + t.Run("named interface with embedded types should render as allOf", func(t *testing.T) { + iface, ok := sp.Definitions["iface_embedded"] + require.TrueT(t, ok) + + require.Len(t, iface.AllOf, 2) + foundEmbedded := false + foundMethod := false + for idx, member := range iface.AllOf { + require.TrueT(t, member.Type.Contains("object")) + require.NotEmpty(t, member.Properties) + require.Len(t, member.Properties, 1) + propGet, isEmbedded := member.Properties["Get"] + propMethod, isMethod := member.Properties["Dump"] + + switch { + case isEmbedded: + assert.TrueT(t, propGet.Type.Contains("string")) + foundEmbedded = true + case isMethod: + assert.TrueT(t, propMethod.Type.Contains("array")) + foundMethod = true + default: + assert.Failf(t, "embedded members in interface are not as expected", "unexpected member in allOf: %d", idx) + } + } + require.TrueT(t, foundEmbedded) + require.TrueT(t, foundMethod) + }) + + t.Run("named interface with embedded anonymous interface should render as allOf", func(t *testing.T) { + iface, ok := sp.Definitions["iface_embedded_anonymous"] + require.TrueT(t, ok) + + require.Len(t, iface.AllOf, 2) + foundEmbedded := false + foundAnonymous := false + for idx, member := range iface.AllOf { + require.TrueT(t, member.Type.Contains("object")) + require.NotEmpty(t, member.Properties) + require.Len(t, member.Properties, 1) + propGet, isEmbedded := member.Properties["String"] + propAnonymous, isAnonymous := member.Properties["Error"] + + switch { + case isEmbedded: + assert.TrueT(t, propGet.Type.Contains("string")) + foundEmbedded = true + case isAnonymous: + assert.TrueT(t, propAnonymous.Type.Contains("string")) + foundAnonymous = true + default: + assert.Failf(t, "embedded members in interface are not as expected", "unexpected member in allOf: %d", idx) + } + } + require.TrueT(t, foundEmbedded) + require.TrueT(t, foundAnonymous) + }) + + t.Run("composition of empty interfaces is rendered as an empty schema", func(t *testing.T) { + iface, ok := sp.Definitions["iface_embedded_empty"] + require.TrueT(t, ok) + + iface.VendorExtensible = oaispec.VendorExtensible{} + assert.Equal(t, oaispec.Schema{}, iface) + }) + + t.Run("interface embedded with an alias should be rendered as allOf, with a ref", func(t *testing.T) { + iface, ok := sp.Definitions["iface_embedded_with_alias"] + require.TrueT(t, ok) + + require.Len(t, iface.AllOf, 3) + foundEmbedded := false + foundEmbeddedAnon := false + foundRef := false + for idx, member := range iface.AllOf { + propGet, isEmbedded := member.Properties["String"] + propAnonymous, isAnonymous := member.Properties["Dump"] + isRef := member.Ref.String() != "" + + switch { + case isEmbedded: + require.TrueT(t, member.Type.Contains("object")) + require.Len(t, member.Properties, 1) + assert.TrueT(t, propGet.Type.Contains("string")) + foundEmbedded = true + case isAnonymous: + require.TrueT(t, member.Type.Contains("object")) + require.Len(t, member.Properties, 1) + assert.TrueT(t, propAnonymous.Type.Contains("array")) + foundEmbeddedAnon = true + case isRef: + require.Empty(t, member.Properties) + assertIsRef(t, &member, "#/definitions/iface_alias") + foundRef = true + default: + assert.Failf(t, "embedded members in interface are not as expected", "unexpected member in allOf: %d", idx) + } + } + require.TrueT(t, foundEmbedded) + require.TrueT(t, foundEmbeddedAnon) + require.TrueT(t, foundRef) + }) +} + +func testAliasedEmbeddedTypes(t *testing.T, sp *oaispec.Swagger) { + t.Helper() + + t.Run("embedded alias should render as a $ref", func(t *testing.T) { + iface, ok := sp.Definitions["embedded_with_alias"] + require.TrueT(t, ok) + + require.Len(t, iface.AllOf, 3) + foundAnything := false + foundUUID := false + foundProps := false + for idx, member := range iface.AllOf { + isProps := len(member.Properties) > 0 + isRef := member.Ref.String() != "" + + switch { + case isProps: + require.TrueT(t, member.Type.Contains("object")) + require.Len(t, member.Properties, 3) + assert.MapContainsT(t, member.Properties, "EvenMore") + foundProps = true + case isRef: + switch member.Ref.String() { + case "#/definitions/Anything": + foundAnything = true + case "#/definitions/UUID": + foundUUID = true + default: + assert.Failf(t, + "embedded members in interface are not as expected", "unexpected $ref for member (%v): %d", + member.Ref, idx, + ) + } + default: + assert.Failf(t, "embedded members in interface are not as expected", "unexpected member in allOf: %d", idx) + } + } + require.TrueT(t, foundAnything) + require.TrueT(t, foundUUID) + require.TrueT(t, foundProps) + }) +} diff --git a/internal/integration/schema_special_test.go b/internal/integration/schema_special_test.go new file mode 100644 index 0000000..89319ec --- /dev/null +++ b/internal/integration/schema_special_test.go @@ -0,0 +1,451 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package integration_test + +import ( + "os" + "path/filepath" + "testing" + + "github.com/go-openapi/codescan" + "github.com/go-openapi/codescan/internal/scantest" + "github.com/go-openapi/testify/v2/assert" + "github.com/go-openapi/testify/v2/require" + + oaispec "github.com/go-openapi/spec" +) + +func TestSpecialSchemas(t *testing.T) { + fixturesPath := filepath.Join(scantest.FixturesDir(), "goparsing", "go123", "special") + var sp *oaispec.Swagger + + t.Run("end-to-end source scan should succeed", func(t *testing.T) { + var err error + sp, err = codescan.Run(&codescan.Options{ + WorkDir: fixturesPath, + BuildTags: "testscanner", // fixture code is excluded from normal build + ScanModels: true, + RefAliases: true, + }) + require.NoError(t, err) + }) + + if enableSpecOutput { + // for debugging, output the resulting spec as YAML + yml, err := marshalToYAMLFormat(sp) + require.NoError(t, err) + + _, _ = os.Stdout.Write(yml) + } + + t.Run("top-level primitive declaration should render just fine", func(t *testing.T) { + primitive, ok := sp.Definitions["primitive"] + require.TrueT(t, ok) + + require.TrueT(t, primitive.Type.Contains("string")) + }) + + t.Run("alias to unsafe pointer at top level should render empty", func(t *testing.T) { + uptr, ok := sp.Definitions["unsafe_pointer_alias"] + require.TrueT(t, ok) + var empty oaispec.Schema + uptr.VendorExtensible = oaispec.VendorExtensible{} + require.Equal(t, empty, uptr) + }) + + t.Run("alias to uintptr at top level should render as integer", func(t *testing.T) { + uptr, ok := sp.Definitions["upointer_alias"] + require.TrueT(t, ok) + require.TrueT(t, uptr.Type.Contains("integer")) + require.EqualT(t, "uint64", uptr.Format) + }) + + t.Run("top-level map[string]... should render just fine", func(t *testing.T) { + gomap, ok := sp.Definitions["go_map"] + require.TrueT(t, ok) + require.TrueT(t, gomap.Type.Contains("object")) + require.NotNil(t, gomap.AdditionalProperties) + + mapSchema := gomap.AdditionalProperties.Schema + require.NotNil(t, mapSchema) + require.TrueT(t, mapSchema.Type.Contains("integer")) + require.EqualT(t, "uint16", mapSchema.Format) + }) + + t.Run("untagged struct referenced by a tagged model should be discovered", func(t *testing.T) { + gostruct, ok := sp.Definitions["GoStruct"] + require.TrueT(t, ok) + require.TrueT(t, gostruct.Type.Contains("object")) + require.NotEmpty(t, gostruct.Properties) + + t.Run("pointer property should render just fine", func(t *testing.T) { + a, ok := gostruct.Properties["A"] + require.TrueT(t, ok) + require.TrueT(t, a.Type.Contains("number")) + require.EqualT(t, "float", a.Format) + }) + }) + + t.Run("tagged unsupported map type should render empty", func(t *testing.T) { + idx, ok := sp.Definitions["index_map"] + require.TrueT(t, ok) + var empty oaispec.Schema + idx.VendorExtensible = oaispec.VendorExtensible{} + require.Equal(t, empty, idx) + }) + + t.Run("redefinition of the builtin error type should render as a string", func(t *testing.T) { + goerror, ok := sp.Definitions["go_error"] + require.TrueT(t, ok) + require.TrueT(t, goerror.Type.Contains("string")) + + t.Run("a type based on the error builtin should be decorated with a x-go-type: error extension", func(t *testing.T) { + val, hasExt := goerror.Extensions.GetString("x-go-type") + assert.TrueT(t, hasExt) + assert.EqualT(t, "error", val) + }) + }) + + t.Run("with SpecialTypes struct", func(t *testing.T) { + testSpecialTypesStruct(t, sp) + }) + + t.Run("with generic types", func(t *testing.T) { + // NOTE: codescan does not really support generic types. + // This test just makes sure generic definitions don't crash the scanner. + // + // The general approach of the scanner is to make an empty schema out of anything + // it doesn't understand. + + // generic_constraint + t.Run("generic type constraint should render like an interface", func(t *testing.T) { + generic, ok := sp.Definitions["generic_constraint"] + require.TrueT(t, ok) + require.Len(t, generic.AllOf, 1) // scanner only understood one member, and skipped the ~uint16 member is doesn't understand + member := generic.AllOf[0] + require.TrueT(t, member.Type.Contains("object")) + require.Len(t, member.Properties, 1) + prop, ok := member.Properties["Uint"] + require.TrueT(t, ok) + require.TrueT(t, prop.Type.Contains("integer")) + require.EqualT(t, "uint16", prop.Format) + }) + + // numerical_constraint + t.Run("generic type constraint with union type should render an empty schema", func(t *testing.T) { + generic, ok := sp.Definitions["numerical_constraint"] + require.TrueT(t, ok) + var empty oaispec.Schema + generic.VendorExtensible = oaispec.VendorExtensible{} + require.Equal(t, empty, generic) + }) + + // generic_map + t.Run("generic map should render an empty schema", func(t *testing.T) { + generic, ok := sp.Definitions["generic_map"] + require.TrueT(t, ok) + var empty oaispec.Schema + generic.VendorExtensible = oaispec.VendorExtensible{} + require.Equal(t, empty, generic) + }) + + // generic_map_alias + t.Run("generic map alias to an anonymous generic type should render an empty schema", func(t *testing.T) { + generic, ok := sp.Definitions["generic_map_alias"] + require.TrueT(t, ok) + var empty oaispec.Schema + generic.VendorExtensible = oaispec.VendorExtensible{} + require.Equal(t, empty, generic) + }) + + // generic_indirect + t.Run("generic map alias to a named generic type should render a ref", func(t *testing.T) { + generic, ok := sp.Definitions["generic_indirect"] + require.TrueT(t, ok) + assertIsRef(t, &generic, "#/definitions/generic_map_alias") + }) + + // generic_slice + t.Run("generic slice should render as an array of empty schemas", func(t *testing.T) { + generic, ok := sp.Definitions["generic_slice"] + require.TrueT(t, ok) + require.TrueT(t, generic.Type.Contains("array")) + require.NotNil(t, generic.Items) + itemsSchema := generic.Items.Schema + require.NotNil(t, itemsSchema) + var empty oaispec.Schema + require.Equal(t, &empty, itemsSchema) + }) + + // union_alias: + t.Run("alias to type constraint should render a ref", func(t *testing.T) { + generic, ok := sp.Definitions["union_alias"] + require.TrueT(t, ok) + assertIsRef(t, &generic, "#/definitions/numerical_constraint") + }) + }) + + scantest.CompareOrDumpJSON(t, sp, "go123_special_spec.json") +} + +func testSpecialTypesStruct(t *testing.T, sp *oaispec.Swagger) { + t.Helper() + + t.Run("in spite of all the pitfalls, the struct should be rendered", func(t *testing.T) { + special, ok := sp.Definitions["special_types"] + require.TrueT(t, ok) + require.TrueT(t, special.Type.Contains("object")) + props := special.Properties + require.NotEmpty(t, props) + require.Empty(t, special.AllOf) + + t.Run("property pointer to struct should render as a ref", func(t *testing.T) { + ptr, ok := props["PtrStruct"] + require.TrueT(t, ok) + assertIsRef(t, &ptr, "#/definitions/GoStruct") + }) + + t.Run("property as time.Time should render as a formatted string", func(t *testing.T) { + str, ok := props["ShouldBeStringTime"] + require.TrueT(t, ok) + require.TrueT(t, str.Type.Contains("string")) + require.EqualT(t, "date-time", str.Format) + }) + + t.Run("property as *time.Time should also render as a formatted string", func(t *testing.T) { + str, ok := props["ShouldAlsoBeStringTime"] + require.TrueT(t, ok) + require.TrueT(t, str.Type.Contains("string")) + require.EqualT(t, "date-time", str.Format) + }) + + t.Run("property as builtin error should render as a string", func(t *testing.T) { + goerror, ok := props["Err"] + require.TrueT(t, ok) + require.TrueT(t, goerror.Type.Contains("string")) + + t.Run("a type based on the error builtin should be decorated with a x-go-type: error extension", func(t *testing.T) { + val, hasExt := goerror.Extensions.GetString("x-go-type") + assert.TrueT(t, hasExt) + assert.EqualT(t, "error", val) + }) + }) + + t.Run("type recognized as a text marshaler should render as a string", func(t *testing.T) { + m, ok := props["Marshaler"] + require.TrueT(t, ok) + require.TrueT(t, m.Type.Contains("string")) + + t.Run("a type based on the encoding.TextMarshaler decorated with a x-go-type extension", func(t *testing.T) { + val, hasExt := m.Extensions.GetString("x-go-type") + assert.TrueT(t, hasExt) + assert.EqualT(t, fixturesModule+"/goparsing/go123/special.IsATextMarshaler", val) + }) + }) + + t.Run("a json.RawMessage should be recognized and render as an object (yes this is wrong)", func(t *testing.T) { + m, ok := props["Message"] + require.TrueT(t, ok) + require.TrueT(t, m.Type.Contains("object")) + }) + + t.Run("type time.Duration is not recognized as a special type and should just render as a ref", func(t *testing.T) { + d, ok := props["Duration"] + require.TrueT(t, ok) + assertIsRef(t, &d, "#/definitions/Duration") + + t.Run("discovered definition should be an integer", func(t *testing.T) { + duration, ok := sp.Definitions["Duration"] + require.TrueT(t, ok) + require.TrueT(t, duration.Type.Contains("integer")) + require.EqualT(t, "int64", duration.Format) + + t.Run("time.Duration schema should be decorated with a x-go-package: time", func(t *testing.T) { + val, hasExt := duration.Extensions.GetString("x-go-package") + assert.TrueT(t, hasExt) + assert.EqualT(t, "time", val) + }) + }) + }) + + testSpecialTypesStrfmt(t, props) + + t.Run("a property which is a map should render just fine, with a ref", func(t *testing.T) { + mm, ok := props["Map"] + require.TrueT(t, ok) + require.TrueT(t, mm.Type.Contains("object")) + require.NotNil(t, mm.AdditionalProperties) + mapSchema := mm.AdditionalProperties.Schema + require.NotNil(t, mapSchema) + assertIsRef(t, mapSchema, "#/definitions/GoStruct") + }) + + t.Run("a property which is a named array type should render as a ref", func(t *testing.T) { + na, ok := props["NamedArray"] + require.TrueT(t, ok) + assertIsRef(t, &na, "#/definitions/go_array") + }) + + testSpecialTypesWhatNot(t, sp, props) + }) +} + +func testSpecialTypesStrfmt(t *testing.T, props map[string]oaispec.Schema) { + t.Helper() + + t.Run("with strfmt types", func(t *testing.T) { + t.Run("a strfmt.Date should be recognized and render as a formatted string", func(t *testing.T) { + d, ok := props["FormatDate"] + require.TrueT(t, ok) + require.TrueT(t, d.Type.Contains("string")) + require.EqualT(t, "date", d.Format) + }) + + t.Run("a strfmt.DateTime should be recognized and render as a formatted string", func(t *testing.T) { + d, ok := props["FormatTime"] + require.TrueT(t, ok) + require.TrueT(t, d.Type.Contains("string")) + require.EqualT(t, "date-time", d.Format) + }) + + t.Run("a strfmt.UUID should be recognized and render as a formatted string", func(t *testing.T) { + u, ok := props["FormatUUID"] + require.TrueT(t, ok) + require.TrueT(t, u.Type.Contains("string")) + require.EqualT(t, "uuid", u.Format) + }) + + t.Run("a pointer to strfmt.UUID should be recognized and render as a formatted string", func(t *testing.T) { + u, ok := props["PtrFormatUUID"] + require.TrueT(t, ok) + require.TrueT(t, u.Type.Contains("string")) + require.EqualT(t, "uuid", u.Format) + }) + }) +} + +func testSpecialTypesWhatNot(t *testing.T, sp *oaispec.Swagger, props map[string]oaispec.Schema) { + t.Helper() + + t.Run(`with the "WhatNot" anonymous inner struct`, func(t *testing.T) { + t.Run("should render as an anonymous schema, in spite of all the unsupported things", func(t *testing.T) { + wn, ok := props["WhatNot"] + require.TrueT(t, ok) + require.TrueT(t, wn.Type.Contains("object")) + require.NotEmpty(t, wn.Properties) + + markedProps := make([]string, 0) + + for _, unsupportedProp := range []string{ + "AA", // complex128 + "A", // complex64 + "B", // chan int + "C", // func() + "D", // func() string + "E", // unsafe.Pointer + } { + t.Run("with property "+unsupportedProp, func(t *testing.T) { + prop, ok := wn.Properties[unsupportedProp] + require.TrueT(t, ok) + markedProps = append(markedProps, unsupportedProp) + + t.Run("unsupported type in property should render as an empty schema", func(t *testing.T) { + var empty oaispec.Schema + require.Equal(t, empty, prop) + }) + }) + } + + for _, supportedProp := range []string{ + "F", // uintptr + "G", + "H", + "I", + "J", + "K", + } { + t.Run("with property "+supportedProp, func(t *testing.T) { + prop, ok := wn.Properties[supportedProp] + require.TrueT(t, ok) + markedProps = append(markedProps, supportedProp) + + switch supportedProp { + case "F": + t.Run("uintptr should render as integer", func(t *testing.T) { + require.TrueT(t, prop.Type.Contains("integer")) + require.EqualT(t, "uint64", prop.Format) + }) + case "G", "H": + t.Run( + "math/big types are not recognized as special types and as TextMarshalers they render as string", + func(t *testing.T) { + require.TrueT(t, prop.Type.Contains("string")) + }) + case "I": + t.Run("go array should render as a json array", func(t *testing.T) { + require.TrueT(t, prop.Type.Contains("array")) + require.NotNil(t, prop.Items) + itemsSchema := prop.Items.Schema + require.NotNil(t, itemsSchema) + + require.TrueT(t, itemsSchema.Type.Contains("integer")) + // [5]byte is not recognized an array of bytes, but of uint8 + // (internally this is the same for go) + require.EqualT(t, "uint8", itemsSchema.Format) + }) + case "J", "K": + t.Run("reflect types should render just fine", func(t *testing.T) { + var dest string + if supportedProp == "J" { + dest = "Type" + } else { + dest = "Value" + } + assertIsRef(t, &prop, "#/definitions/"+dest) + + t.Run("the $ref should exist", func(t *testing.T) { + deref, ok := sp.Definitions[dest] + require.TrueT(t, ok) + val, hasExt := deref.Extensions.GetString("x-go-package") + assert.TrueT(t, hasExt) + assert.EqualT(t, "reflect", val) + }) + }) + } + }) + } + + t.Run("we should not have any property left in WhatNot", func(t *testing.T) { + for _, key := range markedProps { + delete(wn.Properties, key) + } + + require.Empty(t, wn.Properties) + }) + + t.Run("surprisingly, a tagged unexported top-level definition can be rendered", func(t *testing.T) { + unexported, ok := sp.Definitions["unexported"] + require.TrueT(t, ok) + require.TrueT(t, unexported.Type.Contains("object")) + }) + + t.Run("the IsATextMarshaler type is not identified as a discovered type and is not rendered", func(t *testing.T) { + _, ok := sp.Definitions["IsATextMarshaler"] + require.FalseT(t, ok) + }) + + t.Run("a top-level go array should render just fine", func(t *testing.T) { + // Notice that the semantics of fixed length are lost in this mapping + goarray, ok := sp.Definitions["go_array"] + require.TrueT(t, ok) + require.TrueT(t, goarray.Type.Contains("array")) + require.NotNil(t, goarray.Items) + itemsSchema := goarray.Items.Schema + require.NotNil(t, itemsSchema) + require.TrueT(t, itemsSchema.Type.Contains("integer")) + require.EqualT(t, "int64", itemsSchema.Format) + }) + }) + }) +} diff --git a/internal/logger/debug.go b/internal/logger/debug.go new file mode 100644 index 0000000..0d5e8e4 --- /dev/null +++ b/internal/logger/debug.go @@ -0,0 +1,18 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package logger + +import ( + "fmt" + "log" +) + +// logCallerDepth is the caller depth for log.Output. +const logCallerDepth = 2 + +func DebugLogf(debug bool, format string, args ...any) { + if debug { + _ = log.Output(logCallerDepth, fmt.Sprintf(format, args...)) + } +} diff --git a/internal/parsers/enum.go b/internal/parsers/enum.go new file mode 100644 index 0000000..0b4e62f --- /dev/null +++ b/internal/parsers/enum.go @@ -0,0 +1,149 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package parsers + +import ( + "encoding/json" + "go/ast" + "log" + "regexp" + "strconv" + "strings" + + "github.com/go-openapi/codescan/internal/ifaces" + "github.com/go-openapi/spec" +) + +type SetEnum struct { + builder ifaces.ValidationBuilder + rx *regexp.Regexp +} + +func NewSetEnum(builder ifaces.ValidationBuilder, opts ...PrefixRxOption) *SetEnum { + rx := rxEnumValidation + for _, apply := range opts { + rx = apply(rxEnumFmt) + } + + return &SetEnum{ + builder: builder, + rx: rx, + } +} + +func (se *SetEnum) Matches(line string) bool { + return se.rx.MatchString(line) +} + +func (se *SetEnum) Parse(lines []string) error { + if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { + return nil + } + + matches := se.rx.FindStringSubmatch(lines[0]) + if len(matches) > 1 && len(matches[1]) > 0 { + se.builder.SetEnum(matches[1]) + } + + return nil +} + +func parseValueFromSchema(s string, schema *spec.SimpleSchema) (any, error) { + if schema == nil { + return s, nil + } + + switch strings.Trim(schema.TypeName(), "\"") { + case "integer", "int", "int64", "int32", "int16": + return strconv.Atoi(s) + case "bool", "boolean": + return strconv.ParseBool(s) + case "number", "float64", "float32": + return strconv.ParseFloat(s, 64) + case "object": + var obj map[string]any + if err := json.Unmarshal([]byte(s), &obj); err != nil { + return s, nil //nolint:nilerr // fallback: return raw string when JSON is invalid + } + return obj, nil + case "array": + var slice []any + if err := json.Unmarshal([]byte(s), &slice); err != nil { + return s, nil //nolint:nilerr // fallback: return raw string when JSON is invalid + } + return slice, nil + default: + return s, nil + } +} + +func parseEnumOld(val string, s *spec.SimpleSchema) []any { + list := strings.Split(val, ",") + interfaceSlice := make([]any, len(list)) + for i, d := range list { + v, err := parseValueFromSchema(d, s) + if err != nil { + interfaceSlice[i] = d + continue + } + + interfaceSlice[i] = v + } + return interfaceSlice +} + +func ParseEnum(val string, s *spec.SimpleSchema) []any { + // obtain the raw elements of the list to latter process them with the parseValueFromSchema + var rawElements []json.RawMessage + if err := json.Unmarshal([]byte(val), &rawElements); err != nil { + log.Print("WARNING: item list for enum is not a valid JSON array, using the old deprecated format") + return parseEnumOld(val, s) + } + + interfaceSlice := make([]any, len(rawElements)) + + for i, d := range rawElements { + ds, err := strconv.Unquote(string(d)) + if err != nil { + ds = string(d) + } + + v, err := parseValueFromSchema(ds, s) + if err != nil { + interfaceSlice[i] = ds + continue + } + + interfaceSlice[i] = v + } + + return interfaceSlice +} + +func GetEnumBasicLitValue(basicLit *ast.BasicLit) any { + switch basicLit.Kind.String() { + case "INT": + if result, err := strconv.ParseInt(basicLit.Value, 10, 64); err == nil { + return result + } + case "FLOAT": + if result, err := strconv.ParseFloat(basicLit.Value, 64); err == nil { + return result + } + default: + return strings.Trim(basicLit.Value, "\"") + } + return nil +} + +const extEnumDesc = "x-go-enum-desc" + +func GetEnumDesc(extensions spec.Extensions) (desc string) { + desc, _ = extensions.GetString(extEnumDesc) + return desc +} + +func EnumDescExtension() string { + return extEnumDesc +} diff --git a/internal/parsers/enum_test.go b/internal/parsers/enum_test.go new file mode 100644 index 0000000..14f9288 --- /dev/null +++ b/internal/parsers/enum_test.go @@ -0,0 +1,145 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package parsers + +import ( + "go/ast" + "go/token" + "testing" + + "github.com/go-openapi/testify/v2/assert" + "github.com/go-openapi/testify/v2/require" + + "github.com/go-openapi/spec" +) + +func Test_getEnumBasicLitValue(t *testing.T) { + verifyGetEnumBasicLitValue(t, ast.BasicLit{Kind: token.INT, Value: "0"}, int64(0)) + verifyGetEnumBasicLitValue(t, ast.BasicLit{Kind: token.INT, Value: "-1"}, int64(-1)) + verifyGetEnumBasicLitValue(t, ast.BasicLit{Kind: token.INT, Value: "42"}, int64(42)) + verifyGetEnumBasicLitValue(t, ast.BasicLit{Kind: token.INT, Value: ""}, nil) + verifyGetEnumBasicLitValue(t, ast.BasicLit{Kind: token.INT, Value: "word"}, nil) + + verifyGetEnumBasicLitValue(t, ast.BasicLit{Kind: token.FLOAT, Value: "0"}, float64(0)) + verifyGetEnumBasicLitValue(t, ast.BasicLit{Kind: token.FLOAT, Value: "-1"}, float64(-1)) + verifyGetEnumBasicLitValue(t, ast.BasicLit{Kind: token.FLOAT, Value: "42"}, float64(42)) + verifyGetEnumBasicLitValue(t, ast.BasicLit{Kind: token.FLOAT, Value: "1.1234"}, float64(1.1234)) + verifyGetEnumBasicLitValue(t, ast.BasicLit{Kind: token.FLOAT, Value: "1.9876"}, float64(1.9876)) + verifyGetEnumBasicLitValue(t, ast.BasicLit{Kind: token.FLOAT, Value: ""}, nil) + verifyGetEnumBasicLitValue(t, ast.BasicLit{Kind: token.FLOAT, Value: "word"}, nil) + + verifyGetEnumBasicLitValue(t, ast.BasicLit{Kind: token.STRING, Value: "Foo"}, "Foo") + verifyGetEnumBasicLitValue(t, ast.BasicLit{Kind: token.STRING, Value: ""}, "") + verifyGetEnumBasicLitValue(t, ast.BasicLit{Kind: token.STRING, Value: "0"}, "0") + verifyGetEnumBasicLitValue(t, ast.BasicLit{Kind: token.STRING, Value: "1.1"}, "1.1") +} + +func verifyGetEnumBasicLitValue(t *testing.T, basicLit ast.BasicLit, expected any) { + actual := GetEnumBasicLitValue(&basicLit) + + assert.Equal(t, expected, actual) +} + +func TestParseValueFromSchema(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + input string + schema *spec.SimpleSchema + want any + }{ + {"nil schema", "hello", nil, "hello"}, + {"string", "hello", &spec.SimpleSchema{Type: "string"}, "hello"}, + {"integer", "42", &spec.SimpleSchema{Type: "integer"}, 42}, + {"int64", "100", &spec.SimpleSchema{Type: "int64"}, 100}, + {"bool true", "true", &spec.SimpleSchema{Type: "bool"}, true}, + {"boolean false", "false", &spec.SimpleSchema{Type: "boolean"}, false}, + {"float64", "3.14", &spec.SimpleSchema{Type: "float64"}, float64(3.14)}, + {"number", "2.5", &spec.SimpleSchema{Type: "number"}, float64(2.5)}, + {"object valid", `{"a":"b"}`, &spec.SimpleSchema{Type: "object"}, map[string]any{"a": "b"}}, + {"object invalid json", `not-json`, &spec.SimpleSchema{Type: "object"}, "not-json"}, + {"array valid", `[1,2,3]`, &spec.SimpleSchema{Type: "array"}, []any{float64(1), float64(2), float64(3)}}, + {"array invalid json", `not-json`, &spec.SimpleSchema{Type: "array"}, "not-json"}, + {"unknown type", "raw", &spec.SimpleSchema{Type: "custom"}, "raw"}, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + got, err := parseValueFromSchema(tc.input, tc.schema) + require.NoError(t, err) + assert.Equal(t, tc.want, got) + }) + } + + t.Run("integer parse error", func(t *testing.T) { + _, err := parseValueFromSchema("not-a-number", &spec.SimpleSchema{Type: "integer"}) + require.Error(t, err) + }) + + t.Run("bool parse error", func(t *testing.T) { + _, err := parseValueFromSchema("maybe", &spec.SimpleSchema{Type: "bool"}) + require.Error(t, err) + }) +} + +func TestParseEnum(t *testing.T) { + t.Parallel() + + t.Run("JSON format strings", func(t *testing.T) { + result := ParseEnum(`["a","b","c"]`, &spec.SimpleSchema{Type: "string"}) + assert.Equal(t, []any{"a", "b", "c"}, result) + }) + + t.Run("JSON format integers", func(t *testing.T) { + result := ParseEnum(`[1,2,3]`, &spec.SimpleSchema{Type: "integer"}) + assert.Equal(t, []any{1, 2, 3}, result) + }) + + t.Run("old comma-separated format", func(t *testing.T) { + result := ParseEnum("a,b,c", &spec.SimpleSchema{Type: "string"}) + assert.Equal(t, []any{"a", "b", "c"}, result) + }) + + t.Run("old format integers", func(t *testing.T) { + result := ParseEnum("1,2,3", &spec.SimpleSchema{Type: "integer"}) + assert.Equal(t, []any{1, 2, 3}, result) + }) + + t.Run("old format with parse error fallback", func(t *testing.T) { + // "abc" cannot be parsed as integer → fallback to raw string + result := ParseEnum("abc,2,xyz", &spec.SimpleSchema{Type: "integer"}) + assert.Equal(t, []any{"abc", 2, "xyz"}, result) + }) + + t.Run("JSON format with parse error fallback", func(t *testing.T) { + // JSON array of integers, but "abc" can't parse as integer → fallback + result := ParseEnum(`["abc",2,"xyz"]`, &spec.SimpleSchema{Type: "integer"}) + assert.Equal(t, []any{"abc", 2, "xyz"}, result) + }) +} + +func TestGetEnumDesc(t *testing.T) { + t.Parallel() + + t.Run("with extension", func(t *testing.T) { + ext := spec.Extensions{"x-go-enum-desc": "Active - active state\nInactive - inactive state"} + assert.EqualT(t, "Active - active state\nInactive - inactive state", GetEnumDesc(ext)) + }) + + t.Run("without extension", func(t *testing.T) { + ext := spec.Extensions{} + assert.EqualT(t, "", GetEnumDesc(ext)) + }) + + t.Run("nil extensions", func(t *testing.T) { + assert.EqualT(t, "", GetEnumDesc(nil)) + }) +} + +func TestEnumDescExtension(t *testing.T) { + t.Parallel() + + assert.EqualT(t, "x-go-enum-desc", EnumDescExtension()) +} diff --git a/internal/parsers/errors.go b/internal/parsers/errors.go new file mode 100644 index 0000000..0fb4e79 --- /dev/null +++ b/internal/parsers/errors.go @@ -0,0 +1,9 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package parsers + +import "errors" + +// ErrParser is the sentinel error for all errors originating from the parsers package. +var ErrParser = errors.New("codescan:parsers") diff --git a/internal/parsers/extensions.go b/internal/parsers/extensions.go new file mode 100644 index 0000000..b09bbed --- /dev/null +++ b/internal/parsers/extensions.go @@ -0,0 +1,242 @@ +package parsers + +import ( + "fmt" + "reflect" + "regexp" + "strings" + + "github.com/go-openapi/codescan/internal/logger" + oaispec "github.com/go-openapi/spec" +) + +// alphaChars used when parsing for Vendor Extensions. +const alphaChars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" + +type SetOpExtensions struct { + Set func(*oaispec.Extensions) + rx *regexp.Regexp + Debug bool +} + +func NewSetExtensions(setter func(*oaispec.Extensions), debug bool) *SetOpExtensions { + return &SetOpExtensions{ + Set: setter, + rx: rxExtensions, + Debug: debug, + } +} + +func (ss *SetOpExtensions) Matches(line string) bool { + return ss.rx.MatchString(line) +} + +func (ss *SetOpExtensions) Parse(lines []string) error { + if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { + return nil + } + + cleanLines := cleanupScannerLines(lines, rxUncommentHeaders) + + exts := new(oaispec.VendorExtensible) + extList := make([]extensionObject, 0) + buildExtensionObjects(lines, cleanLines, 0, &extList, nil) + + // Extensions can be one of the following: + // key:value pair + // list/array + // object + for _, ext := range extList { + if m, ok := ext.Root.(map[string]string); ok { + exts.AddExtension(ext.Extension, m[ext.Extension]) + } else if m, ok := ext.Root.(map[string]*[]string); ok { + exts.AddExtension(ext.Extension, *m[ext.Extension]) + } else if m, ok := ext.Root.(map[string]any); ok { + exts.AddExtension(ext.Extension, m[ext.Extension]) + } else { + logger.DebugLogf(ss.Debug, "Unknown Extension type: %s", fmt.Sprint(reflect.TypeOf(ext.Root))) + } + } + + ss.Set(&exts.Extensions) + return nil +} + +type extensionObject struct { + Extension string + Root any +} + +type extensionParsingStack []any + +// Helper function to walk back through extensions until the proper nest level is reached. +func (stack *extensionParsingStack) walkBack(rawLines []string, lineIndex int) { + indent := strings.IndexAny(rawLines[lineIndex], alphaChars) + nextIndent := strings.IndexAny(rawLines[lineIndex+1], alphaChars) + if nextIndent >= indent { + return + } + + // Pop elements off the stack until we're back where we need to be + runbackIndex := 0 + poppedIndent := 1000 + for { + checkIndent := strings.IndexAny(rawLines[lineIndex-runbackIndex], alphaChars) + if nextIndent == checkIndent { + break + } + if checkIndent < poppedIndent { + *stack = (*stack)[:len(*stack)-1] + poppedIndent = checkIndent + } + runbackIndex++ + } +} + +// Recursively parses through the given extension lines, building and adding extension objects as it goes. +// Extensions may be key:value pairs, arrays, or objects. +func buildExtensionObjects(rawLines []string, cleanLines []string, lineIndex int, extObjs *[]extensionObject, stack *extensionParsingStack) { + if lineIndex >= len(rawLines) { + if stack != nil { + if ext, ok := (*stack)[0].(extensionObject); ok { + *extObjs = append(*extObjs, ext) + } + } + return + } + + kv := strings.SplitN(cleanLines[lineIndex], ":", kvParts) + key := strings.TrimSpace(kv[0]) + if key == "" { + // Some odd empty line + return + } + + nextIsList := false + if lineIndex < len(rawLines)-1 { + next := strings.SplitAfterN(cleanLines[lineIndex+1], ":", kvParts) + nextIsList = len(next) == 1 + } + + if len(kv) <= 1 { + // Should be a list item + if stack == nil || len(*stack) == 0 { + return + } + stackIndex := len(*stack) - 1 + list, ok := (*stack)[stackIndex].(*[]string) + if !ok { + panic(fmt.Errorf("internal error: expected *[]string, got %T: %w", (*stack)[stackIndex], ErrParser)) + } + *list = append(*list, key) + (*stack)[stackIndex] = list + if lineIndex < len(rawLines)-1 && !rxAllowedExtensions.MatchString(cleanLines[lineIndex+1]) { + stack.walkBack(rawLines, lineIndex) + } + buildExtensionObjects(rawLines, cleanLines, lineIndex+1, extObjs, stack) + return + } + + // Should be the start of a map or a key:value pair + value := strings.TrimSpace(kv[1]) + + if rxAllowedExtensions.MatchString(key) { + buildNewExtension(key, value, nextIsList, stack, rawLines, cleanLines, lineIndex, extObjs) + return + } + + if stack == nil || len(*stack) == 0 { + return + } + + buildStackEntry(key, value, nextIsList, stack, rawLines, cleanLines, lineIndex) + buildExtensionObjects(rawLines, cleanLines, lineIndex+1, extObjs, stack) +} + +// buildNewExtension handles the start of a new x- extension key. +func buildNewExtension(key, value string, nextIsList bool, stack *extensionParsingStack, rawLines, cleanLines []string, lineIndex int, extObjs *[]extensionObject) { + // Flush any previous extension on the stack + if stack != nil { + if ext, ok := (*stack)[0].(extensionObject); ok { + *extObjs = append(*extObjs, ext) + } + } + + if value != "" { + ext := extensionObject{ + Extension: key, + } + // Extension is simple key:value pair, no stack + rootMap := make(map[string]string) + rootMap[key] = value + ext.Root = rootMap + *extObjs = append(*extObjs, ext) + buildExtensionObjects(rawLines, cleanLines, lineIndex+1, extObjs, nil) + return + } + + ext := extensionObject{ + Extension: key, + } + if nextIsList { + // Extension is an array + rootMap := make(map[string]*[]string) + rootList := make([]string, 0) + rootMap[key] = &rootList + ext.Root = rootMap + stack = &extensionParsingStack{} + *stack = append(*stack, ext) + rootListMap, ok := ext.Root.(map[string]*[]string) + if !ok { + panic(fmt.Errorf("internal error: expected map[string]*[]string, got %T: %w", ext.Root, ErrParser)) + } + *stack = append(*stack, rootListMap[key]) + } else { + // Extension is an object + rootMap := make(map[string]any) + innerMap := make(map[string]any) + rootMap[key] = innerMap + ext.Root = rootMap + stack = &extensionParsingStack{} + *stack = append(*stack, ext) + *stack = append(*stack, innerMap) + } + buildExtensionObjects(rawLines, cleanLines, lineIndex+1, extObjs, stack) +} + +func assertStackMap(stack *extensionParsingStack, index int) map[string]any { + asMap, ok := (*stack)[index].(map[string]any) + if !ok { + panic(fmt.Errorf("internal error: stack index expected to be map[string]any, but got %T: %w", (*stack)[index], ErrParser)) + } + return asMap +} + +// buildStackEntry adds a key/value, nested list, or nested map to the current stack. +func buildStackEntry(key, value string, nextIsList bool, stack *extensionParsingStack, rawLines, cleanLines []string, lineIndex int) { + stackIndex := len(*stack) - 1 + if value == "" { + asMap := assertStackMap(stack, stackIndex) + if nextIsList { + // start of new list + newList := make([]string, 0) + asMap[key] = &newList + *stack = append(*stack, &newList) + } else { + // start of new map + newMap := make(map[string]any) + asMap[key] = newMap + *stack = append(*stack, newMap) + } + return + } + + // key:value + if reflect.TypeOf((*stack)[stackIndex]).Kind() == reflect.Map { + asMap := assertStackMap(stack, stackIndex) + asMap[key] = value + } + if lineIndex < len(rawLines)-1 && !rxAllowedExtensions.MatchString(cleanLines[lineIndex+1]) { + stack.walkBack(rawLines, lineIndex) + } +} diff --git a/internal/parsers/extensions_test.go b/internal/parsers/extensions_test.go new file mode 100644 index 0000000..c07db43 --- /dev/null +++ b/internal/parsers/extensions_test.go @@ -0,0 +1,196 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package parsers + +import ( + "testing" + + "github.com/go-openapi/testify/v2/assert" + "github.com/go-openapi/testify/v2/require" + + oaispec "github.com/go-openapi/spec" +) + +func TestSetOpExtensions_Matches(t *testing.T) { + t.Parallel() + + se := NewSetExtensions(nil, false) + assert.TrueT(t, se.Matches("extensions:")) + assert.TrueT(t, se.Matches("Extensions:")) + assert.FalseT(t, se.Matches("something else")) +} + +func TestSetOpExtensions_Parse(t *testing.T) { + t.Parallel() + + t.Run("empty", func(t *testing.T) { + var called bool + se := NewSetExtensions(func(_ *oaispec.Extensions) { called = true }, false) + require.NoError(t, se.Parse(nil)) + assert.FalseT(t, called) + require.NoError(t, se.Parse([]string{})) + require.NoError(t, se.Parse([]string{""})) + }) + + t.Run("simple key-value", func(t *testing.T) { + var got oaispec.Extensions + se := NewSetExtensions(func(ext *oaispec.Extensions) { got = *ext }, false) + + lines := []string{ + "x-custom-value: hello", + } + require.NoError(t, se.Parse(lines)) + val, ok := got.GetString("x-custom-value") + require.TrueT(t, ok) + assert.EqualT(t, "hello", val) + }) + + t.Run("array extension", func(t *testing.T) { + var got oaispec.Extensions + se := NewSetExtensions(func(ext *oaispec.Extensions) { got = *ext }, false) + + lines := []string{ + "x-tags:", + " value1", + " value2", + } + require.NoError(t, se.Parse(lines)) + require.NotNil(t, got) + val, ok := got["x-tags"] + require.TrueT(t, ok) + arr, ok := val.([]string) + require.TrueT(t, ok) + assert.Equal(t, []string{"value1", "value2"}, arr) + }) + + t.Run("object extension", func(t *testing.T) { + var got oaispec.Extensions + se := NewSetExtensions(func(ext *oaispec.Extensions) { got = *ext }, false) + + lines := []string{ + "x-meta:", + " name: obj", + " value: field", + } + require.NoError(t, se.Parse(lines)) + require.NotNil(t, got) + val, ok := got["x-meta"] + require.TrueT(t, ok) + obj, ok := val.(map[string]any) + require.TrueT(t, ok) + assert.EqualT(t, "obj", obj["name"]) + assert.EqualT(t, "field", obj["value"]) + }) + + t.Run("multiple extensions", func(t *testing.T) { + var got oaispec.Extensions + se := NewSetExtensions(func(ext *oaispec.Extensions) { got = *ext }, false) + + lines := []string{ + "x-first: one", + "x-second: two", + } + require.NoError(t, se.Parse(lines)) + v1, ok := got.GetString("x-first") + require.TrueT(t, ok) + assert.EqualT(t, "one", v1) + v2, ok := got.GetString("x-second") + require.TrueT(t, ok) + assert.EqualT(t, "two", v2) + }) + + t.Run("nested object with key-value", func(t *testing.T) { + var got oaispec.Extensions + se := NewSetExtensions(func(ext *oaispec.Extensions) { got = *ext }, false) + + lines := []string{ + "x-nested:", + " outer:", + " inner-key: inner-value", + } + require.NoError(t, se.Parse(lines)) + require.NotNil(t, got) + val, ok := got["x-nested"] + require.TrueT(t, ok) + obj, ok := val.(map[string]any) + require.TrueT(t, ok) + outer, ok := obj["outer"].(map[string]any) + require.TrueT(t, ok) + assert.EqualT(t, "inner-value", outer["inner-key"]) + }) + + t.Run("object then back to simple extension", func(t *testing.T) { + var got oaispec.Extensions + se := NewSetExtensions(func(ext *oaispec.Extensions) { got = *ext }, false) + + lines := []string{ + "x-obj:", + " key1: val1", + " key2: val2", + "x-simple: value", + } + require.NoError(t, se.Parse(lines)) + require.NotNil(t, got) + + obj, ok := got["x-obj"] + require.TrueT(t, ok) + m, ok := obj.(map[string]any) + require.TrueT(t, ok) + assert.EqualT(t, "val1", m["key1"]) + assert.EqualT(t, "val2", m["key2"]) + + simple, ok := got.GetString("x-simple") + require.TrueT(t, ok) + assert.EqualT(t, "value", simple) + }) + + t.Run("nested object with sub-list", func(t *testing.T) { + var got oaispec.Extensions + se := NewSetExtensions(func(ext *oaispec.Extensions) { got = *ext }, false) + + lines := []string{ + "x-deep:", + " sub-list:", + " item1", + " item2", + " sub-key: sub-value", + } + require.NoError(t, se.Parse(lines)) + require.NotNil(t, got) + val, ok := got["x-deep"] + require.TrueT(t, ok) + obj, ok := val.(map[string]any) + require.TrueT(t, ok) + assert.NotNil(t, obj["sub-list"]) + assert.EqualT(t, "sub-value", obj["sub-key"]) + }) + + t.Run("array extension followed by object extension", func(t *testing.T) { + var got oaispec.Extensions + se := NewSetExtensions(func(ext *oaispec.Extensions) { got = *ext }, false) + + lines := []string{ + "x-list:", + " alpha", + " beta", + "x-map:", + " a: 1", + " b: 2", + } + require.NoError(t, se.Parse(lines)) + require.NotNil(t, got) + + list, ok := got["x-list"] + require.TrueT(t, ok) + arr, ok := list.([]string) + require.TrueT(t, ok) + assert.Equal(t, []string{"alpha", "beta"}, arr) + + m, ok := got["x-map"] + require.TrueT(t, ok) + obj, ok := m.(map[string]any) + require.TrueT(t, ok) + assert.EqualT(t, "1", obj["a"]) + }) +} diff --git a/internal/parsers/lines.go b/internal/parsers/lines.go new file mode 100644 index 0000000..e79c041 --- /dev/null +++ b/internal/parsers/lines.go @@ -0,0 +1,43 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package parsers + +import "strings" + +func JoinDropLast(lines []string) string { + l := len(lines) + lns := lines + if l > 0 && len(strings.TrimSpace(lines[l-1])) == 0 { + lns = lines[:l-1] + } + return strings.Join(lns, "\n") +} + +// Setter sets a string field from a multi lines comment. +// +// Usage: +// +// Setter(&op.Description) +// Setter(&op.Summary) +// +// Replaces this idiom: +// +// parsers.WithSetDescription(func(lines []string) { op.Description = parsers.JoinDropLast(lines) }), +func Setter(target *string) func([]string) { + return func(lines []string) { + *target = JoinDropLast(lines) + } +} + +func removeEmptyLines(lines []string) []string { + notEmpty := make([]string, 0, len(lines)) + + for _, l := range lines { + if len(strings.TrimSpace(l)) > 0 { + notEmpty = append(notEmpty, l) + } + } + + return notEmpty +} diff --git a/internal/parsers/lines_test.go b/internal/parsers/lines_test.go new file mode 100644 index 0000000..ead2970 --- /dev/null +++ b/internal/parsers/lines_test.go @@ -0,0 +1,66 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package parsers + +import ( + "testing" + + "github.com/go-openapi/testify/v2/assert" +) + +func TestJoinDropLast(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + lines []string + want string + }{ + {"nil", nil, ""}, + {"empty slice", []string{}, ""}, + {"single line", []string{"hello"}, "hello"}, + {"trailing blank dropped", []string{"hello", "world", " "}, "hello\nworld"}, + {"trailing empty dropped", []string{"hello", "world", ""}, "hello\nworld"}, + {"no trailing blank", []string{"hello", "world"}, "hello\nworld"}, + {"only blank", []string{" "}, ""}, + {"only empty", []string{""}, ""}, + {"middle blank preserved", []string{"hello", "", "world"}, "hello\n\nworld"}, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + assert.EqualT(t, tc.want, JoinDropLast(tc.lines)) + }) + } +} + +func TestSetter(t *testing.T) { + t.Parallel() + + var target string + set := Setter(&target) + set([]string{"line1", "line2", ""}) + assert.EqualT(t, "line1\nline2", target) +} + +func TestRemoveEmptyLines(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + input []string + want []string + }{ + {"nil", nil, []string{}}, + {"all empty", []string{"", " ", "\t"}, []string{}}, + {"mixed", []string{"hello", "", "world", " "}, []string{"hello", "world"}}, + {"no empty", []string{"a", "b"}, []string{"a", "b"}}, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + assert.Equal(t, tc.want, removeEmptyLines(tc.input)) + }) + } +} diff --git a/internal/parsers/matchers.go b/internal/parsers/matchers.go new file mode 100644 index 0000000..79dfc85 --- /dev/null +++ b/internal/parsers/matchers.go @@ -0,0 +1,180 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package parsers + +import ( + "go/ast" + "regexp" + "slices" + "strings" + + "github.com/go-openapi/codescan/internal/ifaces" +) + +const minMatchCount = 2 + +func HasAnnotation(line string) bool { + return rxSwaggerAnnotation.MatchString(line) +} + +func IsAliasParam(prop ifaces.SwaggerTypable) bool { + in := prop.In() + return in == "query" || in == "path" || in == "formData" +} + +func IsAllowedExtension(ext string) bool { + return rxAllowedExtensions.MatchString(ext) +} + +func ExtractAnnotation(line string) (string, bool) { + matches := rxSwaggerAnnotation.FindStringSubmatch(line) + if len(matches) < minMatchCount { + return "", false + } + + return matches[1], true +} + +func AllOfMember(comments *ast.CommentGroup) bool { + return commentMatcher(rxAllOf)(comments) +} + +func FileParam(comments *ast.CommentGroup) bool { + return commentMatcher(rxFileUpload)(comments) +} + +func Ignored(comments *ast.CommentGroup) bool { + return commentMatcher(rxIgnoreOverride)(comments) +} + +func AliasParam(comments *ast.CommentGroup) bool { + return commentMatcher(rxAlias)(comments) +} + +func StrfmtName(comments *ast.CommentGroup) (string, bool) { + return commentSubMatcher(rxStrFmt)(comments) +} + +func ParamLocation(comments *ast.CommentGroup) (string, bool) { + return commentSubMatcher(rxIn)(comments) +} + +func EnumName(comments *ast.CommentGroup) (string, bool) { + return commentSubMatcher(rxEnum)(comments) +} + +func AllOfName(comments *ast.CommentGroup) (string, bool) { + return commentSubMatcher(rxAllOf)(comments) +} + +func NameOverride(comments *ast.CommentGroup) (string, bool) { + return commentSubMatcher(rxName)(comments) +} + +func DefaultName(comments *ast.CommentGroup) (string, bool) { + return commentSubMatcher(rxDefault)(comments) +} + +func TypeName(comments *ast.CommentGroup) (string, bool) { + return commentSubMatcher(rxType)(comments) +} + +func ModelOverride(comments *ast.CommentGroup) (string, bool) { + return commentBlankSubMatcher(rxModelOverride)(comments) +} + +func ResponseOverride(comments *ast.CommentGroup) (string, bool) { + return commentBlankSubMatcher(rxResponseOverride)(comments) +} + +func ParametersOverride(comments *ast.CommentGroup) ([]string, bool) { + return commentMultipleSubMatcher(rxParametersOverride)(comments) +} + +func commentMatcher(rx *regexp.Regexp) func(*ast.CommentGroup) bool { + return func(comments *ast.CommentGroup) bool { + if comments == nil { + return false + } + + return slices.ContainsFunc(comments.List, func(cmt *ast.Comment) bool { + for ln := range strings.SplitSeq(cmt.Text, "\n") { + if rx.MatchString(ln) { + return true + } + } + + return false + }) + } +} + +func commentSubMatcher(rx *regexp.Regexp) func(*ast.CommentGroup) (string, bool) { + return func(comments *ast.CommentGroup) (string, bool) { + if comments == nil { + return "", false + } + + for _, cmt := range comments.List { + for ln := range strings.SplitSeq(cmt.Text, "\n") { + matches := rx.FindStringSubmatch(ln) + if len(matches) > 1 && len(strings.TrimSpace(matches[1])) > 0 { + return strings.TrimSpace(matches[1]), true + } + } + } + + return "", false + } +} + +// same as commentSubMatcher but returns true if a bare annotation is found, even without an empty submatch. +func commentBlankSubMatcher(rx *regexp.Regexp) func(*ast.CommentGroup) (string, bool) { + return func(comments *ast.CommentGroup) (string, bool) { + if comments == nil { + return "", false + } + var found bool + + for _, cmt := range comments.List { + for ln := range strings.SplitSeq(cmt.Text, "\n") { + matches := rx.FindStringSubmatch(ln) + if len(matches) > 1 && len(strings.TrimSpace(matches[1])) > 0 { + return strings.TrimSpace(matches[1]), true + } + if len(matches) > 0 { + found = true + } + } + } + + return "", found + } +} + +func commentMultipleSubMatcher(rx *regexp.Regexp) func(*ast.CommentGroup) ([]string, bool) { + return func(comments *ast.CommentGroup) ([]string, bool) { + if comments == nil { + return nil, false + } + + var result []string + for _, cmt := range comments.List { + for ln := range strings.SplitSeq(cmt.Text, "\n") { + matches := rx.FindStringSubmatch(ln) + if len(matches) < minMatchCount { + continue + } + trimmed := strings.TrimSpace(matches[1]) + if len(trimmed) == 0 { + continue + } + + result = append(result, trimmed) + } + } + + return result, len(result) > 0 + } +} diff --git a/internal/parsers/matchers_test.go b/internal/parsers/matchers_test.go new file mode 100644 index 0000000..54adb02 --- /dev/null +++ b/internal/parsers/matchers_test.go @@ -0,0 +1,299 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package parsers + +import ( + "go/ast" + "testing" + + "github.com/go-openapi/testify/v2/assert" + "github.com/go-openapi/testify/v2/require" + + "github.com/go-openapi/codescan/internal/ifaces" + oaispec "github.com/go-openapi/spec" +) + +// stubTypable is a minimal SwaggerTypable for testing IsAliasParam. +type stubTypable struct { + in string +} + +func (s stubTypable) In() string { return s.in } +func (s stubTypable) Typed(string, string) {} +func (s stubTypable) SetRef(oaispec.Ref) {} + +//nolint:ireturn // test stub +func (s stubTypable) Items() ifaces.SwaggerTypable { return s } +func (s stubTypable) Schema() *oaispec.Schema { return nil } +func (s stubTypable) Level() int { return 0 } +func (s stubTypable) AddExtension(string, any) {} +func (s stubTypable) WithEnum(...any) {} +func (s stubTypable) WithEnumDescription(string) {} + +func TestHasAnnotation(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + line string + want bool + }{ + {"swagger:model", "// swagger:model Foo", true}, + {"swagger:route", "swagger:route GET /foo tags fooOp", true}, + {"swagger:parameters", "// swagger:parameters addFoo", true}, + {"swagger:response", "swagger:response notFound", true}, + {"swagger:operation", "// swagger:operation POST /bar tags barOp", true}, + {"no annotation", "// this is just a comment", false}, + {"empty", "", false}, + {"partial", "swagger:", false}, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + assert.EqualT(t, tc.want, HasAnnotation(tc.line)) + }) + } +} + +func TestIsAliasParam(t *testing.T) { + t.Parallel() + + tests := []struct { + in string + want bool + }{ + {"query", true}, + {"path", true}, + {"formData", true}, + {"body", false}, + {"header", false}, + {"", false}, + } + + for _, tc := range tests { + t.Run(tc.in, func(t *testing.T) { + assert.EqualT(t, tc.want, IsAliasParam(stubTypable{in: tc.in})) + }) + } +} + +func TestIsAllowedExtension(t *testing.T) { + t.Parallel() + + tests := []struct { + ext string + want bool + }{ + {"x-foo", true}, + {"X-bar", true}, + {"x-", true}, + {"y-foo", false}, + {"foo", false}, + {"", false}, + } + + for _, tc := range tests { + t.Run(tc.ext, func(t *testing.T) { + assert.EqualT(t, tc.want, IsAllowedExtension(tc.ext)) + }) + } +} + +func TestExtractAnnotation(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + line string + want string + wantOK bool + }{ + {"model", "// swagger:model Foo", "model", true}, + {"route", "swagger:route GET /foo tags fooOp", "route", true}, + {"parameters", "// swagger:parameters addFoo", "parameters", true}, + {"no annotation", "// just a comment", "", false}, + {"empty", "", "", false}, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + got, ok := ExtractAnnotation(tc.line) + assert.EqualT(t, tc.wantOK, ok) + assert.EqualT(t, tc.want, got) + }) + } +} + +// makeCommentGroup builds an *ast.CommentGroup from raw lines. +func makeCommentGroup(lines ...string) *ast.CommentGroup { + if len(lines) == 0 { + return nil + } + cg := &ast.CommentGroup{} + for _, line := range lines { + cg.List = append(cg.List, &ast.Comment{Text: line}) + } + return cg +} + +func TestCommentMatcher(t *testing.T) { + t.Parallel() + + t.Run("AllOfMember", func(t *testing.T) { + assert.TrueT(t, AllOfMember(makeCommentGroup("// swagger:allOf"))) + assert.TrueT(t, AllOfMember(makeCommentGroup("// swagger:allOf MyParent"))) + assert.FalseT(t, AllOfMember(makeCommentGroup("// just a comment"))) + assert.FalseT(t, AllOfMember(nil)) + }) + + t.Run("FileParam", func(t *testing.T) { + assert.TrueT(t, FileParam(makeCommentGroup("// swagger:file"))) + assert.FalseT(t, FileParam(makeCommentGroup("// swagger:model"))) + assert.FalseT(t, FileParam(nil)) + }) + + t.Run("Ignored", func(t *testing.T) { + assert.TrueT(t, Ignored(makeCommentGroup("// swagger:ignore"))) + assert.TrueT(t, Ignored(makeCommentGroup("// swagger:ignore Foo"))) + assert.FalseT(t, Ignored(makeCommentGroup("// swagger:model"))) + assert.FalseT(t, Ignored(nil)) + }) + + t.Run("AliasParam", func(t *testing.T) { + assert.TrueT(t, AliasParam(makeCommentGroup("// swagger:alias"))) + assert.FalseT(t, AliasParam(makeCommentGroup("// swagger:model"))) + assert.FalseT(t, AliasParam(nil)) + }) +} + +func TestCommentSubMatcher(t *testing.T) { + t.Parallel() + + t.Run("StrfmtName", func(t *testing.T) { + name, ok := StrfmtName(makeCommentGroup("// swagger:strfmt date-time")) + require.TrueT(t, ok) + assert.EqualT(t, "date-time", name) + + _, ok = StrfmtName(makeCommentGroup("// swagger:model Foo")) + assert.FalseT(t, ok) + + _, ok = StrfmtName(nil) + assert.FalseT(t, ok) + }) + + t.Run("ParamLocation", func(t *testing.T) { + loc, ok := ParamLocation(makeCommentGroup("// In: query")) + require.TrueT(t, ok) + assert.EqualT(t, "query", loc) + + loc, ok = ParamLocation(makeCommentGroup("// in: body")) + require.TrueT(t, ok) + assert.EqualT(t, "body", loc) + + _, ok = ParamLocation(makeCommentGroup("// no location")) + assert.FalseT(t, ok) + }) + + t.Run("EnumName", func(t *testing.T) { + name, ok := EnumName(makeCommentGroup("// swagger:enum Status")) + require.TrueT(t, ok) + assert.EqualT(t, "Status", name) + + _, ok = EnumName(nil) + assert.FalseT(t, ok) + }) + + t.Run("AllOfName", func(t *testing.T) { + name, ok := AllOfName(makeCommentGroup("// swagger:allOf MyParent")) + require.TrueT(t, ok) + assert.EqualT(t, "MyParent", name) + + // bare annotation: no submatch → returns false + _, ok = AllOfName(makeCommentGroup("// swagger:allOf")) + assert.FalseT(t, ok) + }) + + t.Run("NameOverride", func(t *testing.T) { + name, ok := NameOverride(makeCommentGroup("// swagger:name MyName")) + require.TrueT(t, ok) + assert.EqualT(t, "MyName", name) + + _, ok = NameOverride(nil) + assert.FalseT(t, ok) + }) + + t.Run("DefaultName", func(t *testing.T) { + name, ok := DefaultName(makeCommentGroup("// swagger:default MyDefault")) + require.TrueT(t, ok) + assert.EqualT(t, "MyDefault", name) + }) + + t.Run("TypeName", func(t *testing.T) { + name, ok := TypeName(makeCommentGroup("// swagger:type string")) + require.TrueT(t, ok) + assert.EqualT(t, "string", name) + }) +} + +func TestCommentBlankSubMatcher(t *testing.T) { + t.Parallel() + + t.Run("ModelOverride with name", func(t *testing.T) { + name, ok := ModelOverride(makeCommentGroup("// swagger:model MyModel")) + require.TrueT(t, ok) + assert.EqualT(t, "MyModel", name) + }) + + t.Run("ModelOverride bare", func(t *testing.T) { + name, ok := ModelOverride(makeCommentGroup("// swagger:model")) + assert.TrueT(t, ok) // bare annotation is recognized + assert.EqualT(t, "", name) + }) + + t.Run("ModelOverride nil", func(t *testing.T) { + _, ok := ModelOverride(nil) + assert.FalseT(t, ok) + }) + + t.Run("ResponseOverride with name", func(t *testing.T) { + name, ok := ResponseOverride(makeCommentGroup("// swagger:response notFound")) + require.TrueT(t, ok) + assert.EqualT(t, "notFound", name) + }) + + t.Run("ResponseOverride bare", func(t *testing.T) { + name, ok := ResponseOverride(makeCommentGroup("// swagger:response")) + assert.TrueT(t, ok) + assert.EqualT(t, "", name) + }) +} + +func TestCommentMultipleSubMatcher(t *testing.T) { + t.Parallel() + + t.Run("ParametersOverride single", func(t *testing.T) { + names, ok := ParametersOverride(makeCommentGroup("// swagger:parameters addFoo")) + require.TrueT(t, ok) + assert.Equal(t, []string{"addFoo"}, names) + }) + + t.Run("ParametersOverride multiple", func(t *testing.T) { + names, ok := ParametersOverride(makeCommentGroup( + "// swagger:parameters addFoo", + "// swagger:parameters updateBar", + )) + require.TrueT(t, ok) + assert.Equal(t, []string{"addFoo", "updateBar"}, names) + }) + + t.Run("ParametersOverride nil", func(t *testing.T) { + _, ok := ParametersOverride(nil) + assert.FalseT(t, ok) + }) + + t.Run("ParametersOverride no match", func(t *testing.T) { + _, ok := ParametersOverride(makeCommentGroup("// just a comment")) + assert.FalseT(t, ok) + }) +} diff --git a/meta.go b/internal/parsers/meta.go similarity index 73% rename from meta.go rename to internal/parsers/meta.go index 0cf4421..2d357ca 100644 --- a/meta.go +++ b/internal/parsers/meta.go @@ -1,7 +1,7 @@ // SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers // SPDX-License-Identifier: Apache-2.0 -package codescan +package parsers import ( "encoding/json" @@ -14,13 +14,13 @@ import ( "github.com/go-openapi/spec" ) -type metaSection struct { +type MetaSection struct { Comments *ast.CommentGroup } func metaTOSSetter(meta *spec.Info) func([]string) { return func(lines []string) { - meta.TermsOfService = joinDropLast(lines) + meta.TermsOfService = JoinDropLast(lines) } } @@ -61,7 +61,7 @@ func metaVendorExtensibleSetter(meta *spec.Swagger) func(json.RawMessage) error } for k := range jsonData { if !rxAllowedExtensions.MatchString(k) { - return fmt.Errorf("invalid schema extension name, should start from `x-`: %s: %w", k, ErrCodeScan) + return fmt.Errorf("invalid schema extension name, should start from `x-`: %s: %w", k, ErrParser) } } meta.Extensions = jsonData @@ -78,7 +78,7 @@ func infoVendorExtensibleSetter(meta *spec.Swagger) func(json.RawMessage) error } for k := range jsonData { if !rxAllowedExtensions.MatchString(k) { - return fmt.Errorf("invalid schema extension name, should start from `x-`: %s: %w", k, ErrCodeScan) + return fmt.Errorf("invalid schema extension name, should start from `x-`: %s: %w", k, ErrParser) } } meta.Info.Extensions = jsonData @@ -86,55 +86,56 @@ func infoVendorExtensibleSetter(meta *spec.Swagger) func(json.RawMessage) error } } -func newMetaParser(swspec *spec.Swagger) *sectionedParser { - sp := new(sectionedParser) +func NewMetaParser(swspec *spec.Swagger) *SectionedParser { + sp := new(SectionedParser) if swspec.Info == nil { swspec.Info = new(spec.Info) } info := swspec.Info sp.setTitle = func(lines []string) { - tosave := joinDropLast(lines) + tosave := JoinDropLast(lines) if len(tosave) > 0 { tosave = rxStripTitleComments.ReplaceAllString(tosave, "") } info.Title = tosave } - sp.setDescription = func(lines []string) { info.Description = joinDropLast(lines) } - sp.taggers = []tagParser{ - newMultiLineTagParser("TOS", newMultilineDropEmptyParser(rxTOS, metaTOSSetter(info)), false), - newMultiLineTagParser("Consumes", newMultilineDropEmptyParser(rxConsumes, metaConsumesSetter(swspec)), false), - newMultiLineTagParser("Produces", newMultilineDropEmptyParser(rxProduces, metaProducesSetter(swspec)), false), - newSingleLineTagParser("Schemes", newSetSchemes(metaSchemeSetter(swspec))), - newMultiLineTagParser("Security", newSetSecurity(rxSecuritySchemes, metaSecuritySetter(swspec)), false), - newMultiLineTagParser("SecurityDefinitions", newYamlParser(rxSecurity, metaSecurityDefinitionsSetter(swspec)), true), - newSingleLineTagParser("Version", &setMetaSingle{swspec, rxVersion, setInfoVersion}), - newSingleLineTagParser("Host", &setMetaSingle{swspec, rxHost, setSwaggerHost}), - newSingleLineTagParser("BasePath", &setMetaSingle{swspec, rxBasePath, setSwaggerBasePath}), - newSingleLineTagParser("Contact", &setMetaSingle{swspec, rxContact, setInfoContact}), - newSingleLineTagParser("License", &setMetaSingle{swspec, rxLicense, setInfoLicense}), - newMultiLineTagParser("YAMLInfoExtensionsBlock", newYamlParser(rxInfoExtensions, infoVendorExtensibleSetter(swspec)), true), - newMultiLineTagParser("YAMLExtensionsBlock", newYamlParser(rxExtensions, metaVendorExtensibleSetter(swspec)), true), + sp.setDescription = func(lines []string) { info.Description = JoinDropLast(lines) } + sp.taggers = []TagParser{ + NewMultiLineTagParser("TOS", newMultilineDropEmptyParser(rxTOS, metaTOSSetter(info)), false), + NewMultiLineTagParser("Consumes", newMultilineDropEmptyParser(rxConsumes, metaConsumesSetter(swspec)), false), + NewMultiLineTagParser("Produces", newMultilineDropEmptyParser(rxProduces, metaProducesSetter(swspec)), false), + NewSingleLineTagParser("Schemes", NewSetSchemes(metaSchemeSetter(swspec))), + NewMultiLineTagParser("Security", newSetSecurity(rxSecuritySchemes, metaSecuritySetter(swspec)), false), + NewMultiLineTagParser("SecurityDefinitions", NewYAMLParser(WithMatcher(rxSecurity), WithSetter(metaSecurityDefinitionsSetter(swspec))), true), + NewSingleLineTagParser("Version", &setMetaSingle{Spec: swspec, Rx: rxVersion, Set: setInfoVersion}), + NewSingleLineTagParser("Host", &setMetaSingle{Spec: swspec, Rx: rxHost, Set: setSwaggerHost}), + NewSingleLineTagParser("BasePath", &setMetaSingle{swspec, rxBasePath, setSwaggerBasePath}), + NewSingleLineTagParser("Contact", &setMetaSingle{Spec: swspec, Rx: rxContact, Set: setInfoContact}), + NewSingleLineTagParser("License", &setMetaSingle{Spec: swspec, Rx: rxLicense, Set: setInfoLicense}), + NewMultiLineTagParser("YAMLInfoExtensionsBlock", NewYAMLParser(WithMatcher(rxInfoExtensions), WithSetter(infoVendorExtensibleSetter(swspec))), true), + NewMultiLineTagParser("YAMLExtensionsBlock", NewYAMLParser(WithExtensionMatcher(), WithSetter(metaVendorExtensibleSetter(swspec))), true), } + return sp } type setMetaSingle struct { - spec *spec.Swagger - rx *regexp.Regexp - set func(spec *spec.Swagger, lines []string) error + Spec *spec.Swagger + Rx *regexp.Regexp + Set func(spec *spec.Swagger, lines []string) error } func (s *setMetaSingle) Matches(line string) bool { - return s.rx.MatchString(line) + return s.Rx.MatchString(line) } func (s *setMetaSingle) Parse(lines []string) error { if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { return nil } - matches := s.rx.FindStringSubmatch(lines[0]) + matches := s.Rx.FindStringSubmatch(lines[0]) if len(matches) > 1 && len(matches[1]) > 0 { - return s.set(s.spec, []string{matches[1]}) + return s.Set(s.Spec, []string{matches[1]}) } return nil } diff --git a/internal/parsers/meta_test.go b/internal/parsers/meta_test.go new file mode 100644 index 0000000..8b8d391 --- /dev/null +++ b/internal/parsers/meta_test.go @@ -0,0 +1,270 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package parsers + +import ( + goparser "go/parser" + "go/token" + "testing" + + "github.com/go-openapi/codescan/internal/scantest/classification" + "github.com/go-openapi/testify/v2/assert" + "github.com/go-openapi/testify/v2/require" + + oaispec "github.com/go-openapi/spec" +) + +func TestSetInfoVersion(t *testing.T) { + info := new(oaispec.Swagger) + err := setInfoVersion(info, []string{"0.0.1"}) + require.NoError(t, err) + assert.EqualT(t, "0.0.1", info.Info.Version) +} + +func TestSetInfoLicense(t *testing.T) { + info := new(oaispec.Swagger) + err := setInfoLicense(info, []string{"MIT http://license.org/MIT"}) + require.NoError(t, err) + assert.EqualT(t, "MIT", info.Info.License.Name) + assert.EqualT(t, "http://license.org/MIT", info.Info.License.URL) +} + +func TestSetInfoContact(t *testing.T) { + info := new(oaispec.Swagger) + err := setInfoContact(info, []string{"Homer J. Simpson http://simpsons.com"}) + require.NoError(t, err) + assert.EqualT(t, "Homer J. Simpson", info.Info.Contact.Name) + assert.EqualT(t, "homer@simpsons.com", info.Info.Contact.Email) + assert.EqualT(t, "http://simpsons.com", info.Info.Contact.URL) +} + +func TestParseInfo(t *testing.T) { + swspec := new(oaispec.Swagger) + parser := NewMetaParser(swspec) + docFile := "../../fixtures/goparsing/classification/doc.go" + fileSet := token.NewFileSet() + fileTree, err := goparser.ParseFile(fileSet, docFile, nil, goparser.ParseComments) + if err != nil { + t.FailNow() + } + + err = parser.Parse(fileTree.Doc) + + require.NoError(t, err) + classification.VerifyInfo(t, swspec.Info) +} + +func TestParseSwagger(t *testing.T) { + swspec := new(oaispec.Swagger) + parser := NewMetaParser(swspec) + docFile := "../../fixtures/goparsing/classification/doc.go" + fileSet := token.NewFileSet() + fileTree, err := goparser.ParseFile(fileSet, docFile, nil, goparser.ParseComments) + if err != nil { + t.FailNow() + } + + err = parser.Parse(fileTree.Doc) + verifyMeta(t, swspec) + + require.NoError(t, err) +} + +func verifyMeta(t *testing.T, doc *oaispec.Swagger) { + assert.NotNil(t, doc) + classification.VerifyInfo(t, doc.Info) + assert.Equal(t, []string{"application/json", "application/xml"}, doc.Consumes) + assert.Equal(t, []string{"application/json", "application/xml"}, doc.Produces) + assert.Equal(t, []string{"http", "https"}, doc.Schemes) + assert.Equal(t, []map[string][]string{{"api_key": {}}}, doc.Security) + expectedSecuritySchemaKey := oaispec.SecurityScheme{ + SecuritySchemeProps: oaispec.SecuritySchemeProps{ + Type: "apiKey", + In: "header", + Name: "KEY", + }, + } + expectedSecuritySchemaOAuth := oaispec.SecurityScheme{ + SecuritySchemeProps: oaispec.SecuritySchemeProps{ //nolint:gosec // G101: false positive, test fixture not real credentials + Type: "oauth2", + In: "header", + AuthorizationURL: "/oauth2/auth", + TokenURL: "/oauth2/token", + Flow: "accessCode", + Scopes: map[string]string{ + "bla1": "foo1", + "bla2": "foo2", + }, + }, + } + expectedExtensions := oaispec.Extensions{ + "x-meta-array": []any{ + "value1", + "value2", + }, + "x-meta-array-obj": []any{ + map[string]any{ + "name": "obj", + "value": "field", + }, + }, + "x-meta-value": "value", + } + expectedInfoExtensions := oaispec.Extensions{ + "x-info-array": []any{ + "value1", + "value2", + }, + "x-info-array-obj": []any{ + map[string]any{ + "name": "obj", + "value": "field", + }, + }, + "x-info-value": "value", + } + assert.NotNil(t, doc.SecurityDefinitions["api_key"]) + assert.NotNil(t, doc.SecurityDefinitions["oauth2"]) + assert.Equal(t, oaispec.SecurityDefinitions{"api_key": &expectedSecuritySchemaKey, "oauth2": &expectedSecuritySchemaOAuth}, doc.SecurityDefinitions) + assert.Equal(t, expectedExtensions, doc.Extensions) + assert.Equal(t, expectedInfoExtensions, doc.Info.Extensions) + assert.EqualT(t, "localhost", doc.Host) + assert.EqualT(t, "/v2", doc.BasePath) +} + +func TestMoreParseMeta(t *testing.T) { + for _, docFile := range []string{ + "../../fixtures/goparsing/meta/v1/doc.go", + "../../fixtures/goparsing/meta/v2/doc.go", + "../../fixtures/goparsing/meta/v3/doc.go", + "../../fixtures/goparsing/meta/v4/doc.go", + } { + swspec := new(oaispec.Swagger) + parser := NewMetaParser(swspec) + fileSet := token.NewFileSet() + fileTree, err := goparser.ParseFile(fileSet, docFile, nil, goparser.ParseComments) + if err != nil { + t.FailNow() + } + + err = parser.Parse(fileTree.Doc) + require.NoError(t, err) + assert.EqualT(t, "there are no TOS at this moment, use at your own risk we take no responsibility", swspec.Info.TermsOfService) + /* + jazon, err := json.MarshalIndent(swoaispec.Info, "", " ") + require.NoError(t, err) + t.Logf("%v", string(jazon)) + */ + } +} + +func TestSetInfoVersion_Empty(t *testing.T) { + swspec := new(oaispec.Swagger) + require.NoError(t, setInfoVersion(swspec, nil)) + assert.Nil(t, swspec.Info) +} + +func TestSetSwaggerHost_Empty(t *testing.T) { + swspec := new(oaispec.Swagger) + require.NoError(t, setSwaggerHost(swspec, nil)) + assert.EqualT(t, "localhost", swspec.Host) // fallback + swspec2 := new(oaispec.Swagger) + require.NoError(t, setSwaggerHost(swspec2, []string{""})) + assert.EqualT(t, "localhost", swspec2.Host) // fallback +} + +func TestSetInfoContact_Empty(t *testing.T) { + swspec := new(oaispec.Swagger) + require.NoError(t, setInfoContact(swspec, nil)) + assert.Nil(t, swspec.Info) + require.NoError(t, setInfoContact(swspec, []string{""})) +} + +func TestSetInfoContact_BadEmail(t *testing.T) { + swspec := new(oaispec.Swagger) + err := setInfoContact(swspec, []string{"not-a-valid-email-address <<<"}) + require.Error(t, err) +} + +func TestSetInfoLicense_Empty(t *testing.T) { + swspec := new(oaispec.Swagger) + require.NoError(t, setInfoLicense(swspec, nil)) + assert.Nil(t, swspec.Info) + require.NoError(t, setInfoLicense(swspec, []string{""})) +} + +func TestSetMetaSingle_Parse_Empty(t *testing.T) { + swspec := new(oaispec.Swagger) + s := &setMetaSingle{Spec: swspec, Rx: rxVersion, Set: setInfoVersion} + require.NoError(t, s.Parse(nil)) + require.NoError(t, s.Parse([]string{""})) + // Line that doesn't match the regex + require.NoError(t, s.Parse([]string{"no match here"})) +} + +func TestSplitURL(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + line string + wantNot string + wantURL string + }{ + {"with http url", "MIT http://example.com", "MIT", "http://example.com"}, + {"with https url", "MIT https://example.com", "MIT", "https://example.com"}, + {"url only", "http://example.com", "", "http://example.com"}, + {"no url", "just text", "just text", ""}, + {"empty", "", "", ""}, + {"ws url", "live ws://example.com/ws", "live", "ws://example.com/ws"}, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + notURL, url := splitURL(tc.line) + assert.EqualT(t, tc.wantNot, notURL) + assert.EqualT(t, tc.wantURL, url) + }) + } +} + +func TestMetaVendorExtensibleSetter_InvalidKey(t *testing.T) { + swspec := new(oaispec.Swagger) + setter := metaVendorExtensibleSetter(swspec) + // Extension key that doesn't start with x- + err := setter([]byte(`{"not-x-key": "value"}`)) + require.Error(t, err) + require.ErrorIs(t, err, ErrParser) +} + +func TestMetaVendorExtensibleSetter_BadJSON(t *testing.T) { + swspec := new(oaispec.Swagger) + setter := metaVendorExtensibleSetter(swspec) + err := setter([]byte(`{bad json`)) + require.Error(t, err) +} + +func TestInfoVendorExtensibleSetter_InvalidKey(t *testing.T) { + swspec := &oaispec.Swagger{} + swspec.Info = new(oaispec.Info) + setter := infoVendorExtensibleSetter(swspec) + err := setter([]byte(`{"invalid-key": "value"}`)) + require.Error(t, err) + require.ErrorIs(t, err, ErrParser) +} + +func TestInfoVendorExtensibleSetter_BadJSON(t *testing.T) { + swspec := &oaispec.Swagger{} + swspec.Info = new(oaispec.Info) + setter := infoVendorExtensibleSetter(swspec) + err := setter([]byte(`{bad json`)) + require.Error(t, err) +} + +func TestMetaSecurityDefinitionsSetter_BadJSON(t *testing.T) { + swspec := new(oaispec.Swagger) + setter := metaSecurityDefinitionsSetter(swspec) + err := setter([]byte(`{bad json`)) + require.Error(t, err) +} diff --git a/internal/parsers/parsed_path_content.go b/internal/parsers/parsed_path_content.go new file mode 100644 index 0000000..739afaa --- /dev/null +++ b/internal/parsers/parsed_path_content.go @@ -0,0 +1,66 @@ +package parsers + +import ( + "go/ast" + "regexp" + "strings" +) + +var ( + rxStripComments = regexp.MustCompile(`^[^\p{L}\p{N}\p{Pd}\p{Pc}\+]*`) + rxSpace = regexp.MustCompile(`\p{Zs}+`) +) + +type ParsedPathContent struct { + Method, Path, ID string + Tags []string + Remaining *ast.CommentGroup +} + +func ParseOperationPathAnnotation(lines []*ast.Comment) (cnt ParsedPathContent) { + return parsePathAnnotation(rxOperation, lines) +} + +func ParseRoutePathAnnotation(lines []*ast.Comment) (cnt ParsedPathContent) { + return parsePathAnnotation(rxRoute, lines) +} + +func parsePathAnnotation(annotation *regexp.Regexp, lines []*ast.Comment) (cnt ParsedPathContent) { + const routeTagsIndex = 3 // routeTagsIndex is the regex submatch index where route tags begin. + var justMatched bool + + for _, cmt := range lines { + txt := cmt.Text + for line := range strings.SplitSeq(txt, "\n") { + matches := annotation.FindStringSubmatch(line) + if len(matches) > routeTagsIndex { + cnt.Method, cnt.Path, cnt.ID = matches[1], matches[2], matches[len(matches)-1] + cnt.Tags = rxSpace.Split(matches[3], -1) + if len(matches[3]) == 0 { + cnt.Tags = nil + } + justMatched = true + + continue + } + + if cnt.Method == "" { + continue + } + + if cnt.Remaining == nil { + cnt.Remaining = new(ast.CommentGroup) + } + + if !justMatched || strings.TrimSpace(rxStripComments.ReplaceAllString(line, "")) != "" { + cc := new(ast.Comment) + cc.Slash = cmt.Slash + cc.Text = line + cnt.Remaining.List = append(cnt.Remaining.List, cc) + justMatched = false + } + } + } + + return cnt +} diff --git a/internal/parsers/parsed_path_content_test.go b/internal/parsers/parsed_path_content_test.go new file mode 100644 index 0000000..694a900 --- /dev/null +++ b/internal/parsers/parsed_path_content_test.go @@ -0,0 +1,144 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package parsers + +import ( + "go/ast" + "testing" + + "github.com/go-openapi/testify/v2/assert" + "github.com/go-openapi/testify/v2/require" +) + +func TestParseRoutePathAnnotation(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + line string + wantMethod string + wantPath string + wantID string + wantTags []string + }{ + { + name: "GET with tags", + line: "// swagger:route GET /pets pets listPets", + wantMethod: "GET", + wantPath: "/pets", + wantID: "listPets", + wantTags: []string{"pets"}, + }, + { + name: "POST without tags", + line: "// swagger:route POST /pets createPet", + wantMethod: "POST", + wantPath: "/pets", + wantID: "createPet", + }, + { + name: "with path params", + line: "// swagger:route GET /pets/{petId} pets getPet", + wantMethod: "GET", + wantPath: "/pets/{petId}", + wantID: "getPet", + wantTags: []string{"pets"}, + }, + { + name: "multiple tags", + line: "// swagger:route DELETE /pets/{petId} pets admin deletePet", + wantMethod: "DELETE", + wantPath: "/pets/{petId}", + wantID: "deletePet", + wantTags: []string{"pets", "admin"}, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + comments := []*ast.Comment{{Text: tc.line}} + cnt := ParseRoutePathAnnotation(comments) + assert.EqualT(t, tc.wantMethod, cnt.Method) + assert.EqualT(t, tc.wantPath, cnt.Path) + assert.EqualT(t, tc.wantID, cnt.ID) + if tc.wantTags == nil { + assert.Nil(t, cnt.Tags) + } else { + assert.Equal(t, tc.wantTags, cnt.Tags) + } + }) + } +} + +func TestParseOperationPathAnnotation(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + line string + wantMethod string + wantPath string + wantID string + wantTags []string + }{ + { + name: "basic operation", + line: "// swagger:operation POST /v1/pets pets addPet", + wantMethod: "POST", + wantPath: "/v1/pets", + wantID: "addPet", + wantTags: []string{"pets"}, + }, + { + name: "without tags", + line: "// swagger:operation GET /health checkHealth", + wantMethod: "GET", + wantPath: "/health", + wantID: "checkHealth", + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + comments := []*ast.Comment{{Text: tc.line}} + cnt := ParseOperationPathAnnotation(comments) + assert.EqualT(t, tc.wantMethod, cnt.Method) + assert.EqualT(t, tc.wantPath, cnt.Path) + assert.EqualT(t, tc.wantID, cnt.ID) + if tc.wantTags == nil { + assert.Nil(t, cnt.Tags) + } else { + assert.Equal(t, tc.wantTags, cnt.Tags) + } + }) + } +} + +func TestParsePathAnnotation_Remaining(t *testing.T) { + t.Parallel() + + comments := []*ast.Comment{ + {Text: "// swagger:route GET /pets pets listPets"}, + {Text: "// This is the description"}, + {Text: "// More description"}, + } + + cnt := ParseRoutePathAnnotation(comments) + assert.EqualT(t, "GET", cnt.Method) + require.NotNil(t, cnt.Remaining) + assert.Len(t, cnt.Remaining.List, 2) +} + +func TestParsePathAnnotation_NoMatch(t *testing.T) { + t.Parallel() + + comments := []*ast.Comment{ + {Text: "// just a regular comment"}, + } + + cnt := ParseRoutePathAnnotation(comments) + assert.EqualT(t, "", cnt.Method) + assert.EqualT(t, "", cnt.Path) + assert.EqualT(t, "", cnt.ID) +} diff --git a/internal/parsers/parsers.go b/internal/parsers/parsers.go new file mode 100644 index 0000000..360edc3 --- /dev/null +++ b/internal/parsers/parsers.go @@ -0,0 +1,134 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package parsers + +import ( + "regexp" + "strconv" + + oaispec "github.com/go-openapi/spec" +) + +const ( + // kvParts is the number of parts when splitting key:value pairs. + kvParts = 2 +) + +// Many thanks go to https://github.com/yvasiyarov/swagger +// this is loosely based on that implementation but for swagger 2.0 + +type matchOnlyParam struct { + rx *regexp.Regexp +} + +func (mo *matchOnlyParam) Matches(line string) bool { + return mo.rx.MatchString(line) +} + +func (mo *matchOnlyParam) Parse(_ []string) error { + return nil +} + +type MatchParamIn struct { + *matchOnlyParam +} + +func NewMatchParamIn(_ *oaispec.Parameter) *MatchParamIn { + return &MatchParamIn{ + matchOnlyParam: &matchOnlyParam{ + rx: rxIn, + }, + } +} + +type MatchParamRequired struct { + *matchOnlyParam +} + +func NewMatchParamRequired(_ *oaispec.Parameter) *MatchParamRequired { + return &MatchParamRequired{ + matchOnlyParam: &matchOnlyParam{ + rx: rxRequired, + }, + } +} + +type SetDeprecatedOp struct { + tgt *oaispec.Operation +} + +func NewSetDeprecatedOp(operation *oaispec.Operation) *SetDeprecatedOp { + return &SetDeprecatedOp{ + tgt: operation, + } +} + +func (su *SetDeprecatedOp) Matches(line string) bool { + return rxDeprecated.MatchString(line) +} + +func (su *SetDeprecatedOp) Parse(lines []string) error { + if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { + return nil + } + + matches := rxDeprecated.FindStringSubmatch(lines[0]) + if len(matches) > 1 && len(matches[1]) > 0 { + req, err := strconv.ParseBool(matches[1]) + if err != nil { + return err + } + su.tgt.Deprecated = req + } + + return nil +} + +type ConsumesDropEmptyParser struct { + *multilineDropEmptyParser +} + +func NewConsumesDropEmptyParser(set func([]string)) *ConsumesDropEmptyParser { + return &ConsumesDropEmptyParser{ + multilineDropEmptyParser: &multilineDropEmptyParser{ + set: set, + rx: rxConsumes, + }, + } +} + +type ProducesDropEmptyParser struct { + *multilineDropEmptyParser +} + +func NewProducesDropEmptyParser(set func([]string)) *ConsumesDropEmptyParser { + return &ConsumesDropEmptyParser{ + multilineDropEmptyParser: &multilineDropEmptyParser{ + set: set, + rx: rxProduces, + }, + } +} + +type multilineDropEmptyParser struct { + set func([]string) + rx *regexp.Regexp +} + +func newMultilineDropEmptyParser(rx *regexp.Regexp, set func([]string)) *multilineDropEmptyParser { + return &multilineDropEmptyParser{ + set: set, + rx: rx, + } +} + +func (m *multilineDropEmptyParser) Matches(line string) bool { + return m.rx.MatchString(line) +} + +func (m *multilineDropEmptyParser) Parse(lines []string) error { + m.set(removeEmptyLines(lines)) + + return nil +} diff --git a/parser_helpers.go b/internal/parsers/parsers_helpers.go similarity index 98% rename from parser_helpers.go rename to internal/parsers/parsers_helpers.go index 2efe2f9..3295c41 100644 --- a/parser_helpers.go +++ b/internal/parsers/parsers_helpers.go @@ -1,7 +1,7 @@ // SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers // SPDX-License-Identifier: Apache-2.0 -package codescan +package parsers import ( "strings" diff --git a/internal/parsers/parsers_helpers_test.go b/internal/parsers/parsers_helpers_test.go new file mode 100644 index 0000000..bc5f5c7 --- /dev/null +++ b/internal/parsers/parsers_helpers_test.go @@ -0,0 +1,73 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package parsers + +import ( + "testing" + + "github.com/go-openapi/testify/v2/assert" +) + +func TestCollectScannerTitleDescription(t *testing.T) { + t.Parallel() + + t.Run("title and description separated by blank", func(t *testing.T) { + headers := []string{ + "// This is the title.", + "//", + "// This is the description.", + "// More description.", + } + title, desc := collectScannerTitleDescription(headers) + assert.Equal(t, []string{"This is the title."}, title) + assert.Equal(t, []string{"This is the description.", "More description."}, desc) + }) + + t.Run("title only with punctuation", func(t *testing.T) { + headers := []string{ + "// A single title line.", + "// And some description.", + } + title, desc := collectScannerTitleDescription(headers) + assert.Equal(t, []string{"A single title line."}, title) + assert.Equal(t, []string{"And some description."}, desc) + }) + + t.Run("title with markdown header prefix", func(t *testing.T) { + headers := []string{ + "// # My Title", + "// Description here.", + } + title, desc := collectScannerTitleDescription(headers) + assert.Equal(t, []string{"My Title"}, title) + assert.Equal(t, []string{"Description here."}, desc) + }) + + t.Run("no title, all description", func(t *testing.T) { + headers := []string{ + "// no punctuation at end means no title", + "// more text", + } + title, desc := collectScannerTitleDescription(headers) + assert.Empty(t, title) + assert.Equal(t, []string{"no punctuation at end means no title", "more text"}, desc) + }) + + t.Run("empty", func(t *testing.T) { + title, desc := collectScannerTitleDescription(nil) + assert.Empty(t, title) + assert.Empty(t, desc) + }) + + t.Run("blank line only", func(t *testing.T) { + headers := []string{"//"} + title, desc := collectScannerTitleDescription(headers) + assert.Empty(t, title) + assert.Nil(t, desc) + }) + + // Note: the branch at line 31-32 (desc = nil when blank is last line) + // is unreachable because cleanupScannerLines always trims trailing blanks + // before collectScannerTitleDescription processes the slice. +} diff --git a/internal/parsers/parsers_test.go b/internal/parsers/parsers_test.go new file mode 100644 index 0000000..02841d4 --- /dev/null +++ b/internal/parsers/parsers_test.go @@ -0,0 +1,99 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package parsers + +import ( + "testing" + + "github.com/go-openapi/testify/v2/assert" + "github.com/go-openapi/testify/v2/require" + + oaispec "github.com/go-openapi/spec" +) + +func TestMatchParamIn(t *testing.T) { + t.Parallel() + + mp := NewMatchParamIn(nil) + assert.TrueT(t, mp.Matches("In: query")) + assert.TrueT(t, mp.Matches("in: body")) + assert.TrueT(t, mp.Matches("in: path")) + assert.TrueT(t, mp.Matches("in: header")) + assert.TrueT(t, mp.Matches("in: formData")) + assert.FalseT(t, mp.Matches("in: cookie")) // not a valid swagger 2.0 location + assert.FalseT(t, mp.Matches("something else")) + + // Parse is a no-op + require.NoError(t, mp.Parse(nil)) +} + +func TestMatchParamRequired(t *testing.T) { + t.Parallel() + + mp := NewMatchParamRequired(nil) + assert.TrueT(t, mp.Matches("required: true")) + assert.TrueT(t, mp.Matches("Required: false")) + assert.FalseT(t, mp.Matches("something else")) + + // Parse is a no-op + require.NoError(t, mp.Parse(nil)) +} + +func TestSetDeprecatedOp(t *testing.T) { + t.Parallel() + + t.Run("true", func(t *testing.T) { + op := new(oaispec.Operation) + sd := NewSetDeprecatedOp(op) + assert.TrueT(t, sd.Matches("deprecated: true")) + require.NoError(t, sd.Parse([]string{"deprecated: true"})) + assert.TrueT(t, op.Deprecated) + }) + + t.Run("false", func(t *testing.T) { + op := new(oaispec.Operation) + sd := NewSetDeprecatedOp(op) + require.NoError(t, sd.Parse([]string{"deprecated: false"})) + assert.FalseT(t, op.Deprecated) + }) + + t.Run("empty", func(t *testing.T) { + op := new(oaispec.Operation) + sd := NewSetDeprecatedOp(op) + require.NoError(t, sd.Parse(nil)) + require.NoError(t, sd.Parse([]string{})) + require.NoError(t, sd.Parse([]string{""})) + assert.FalseT(t, op.Deprecated) + }) + + t.Run("no match", func(t *testing.T) { + sd := NewSetDeprecatedOp(new(oaispec.Operation)) + assert.FalseT(t, sd.Matches("something else")) + }) +} + +func TestConsumesDropEmptyParser(t *testing.T) { + t.Parallel() + + var got []string + cp := NewConsumesDropEmptyParser(func(v []string) { got = v }) + assert.TrueT(t, cp.Matches("consumes:")) + assert.TrueT(t, cp.Matches("Consumes:")) + assert.FalseT(t, cp.Matches("other")) + + require.NoError(t, cp.Parse([]string{"application/json", "", "application/xml", " "})) + assert.Equal(t, []string{"application/json", "application/xml"}, got) +} + +func TestProducesDropEmptyParser(t *testing.T) { + t.Parallel() + + var got []string + pp := NewProducesDropEmptyParser(func(v []string) { got = v }) + assert.TrueT(t, pp.Matches("produces:")) + assert.TrueT(t, pp.Matches("Produces:")) + + require.NoError(t, pp.Parse([]string{"text/plain", "", "text/html"})) + assert.Equal(t, []string{"text/plain", "text/html"}, got) +} diff --git a/regexprs.go b/internal/parsers/regexprs.go similarity index 97% rename from regexprs.go rename to internal/parsers/regexprs.go index 692ae82..37ea349 100644 --- a/regexprs.go +++ b/internal/parsers/regexprs.go @@ -1,9 +1,12 @@ // SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers // SPDX-License-Identifier: Apache-2.0 -package codescan +package parsers -import "regexp" +import ( + "fmt" + "regexp" +) const ( rxMethod = "(\\p{L}+)" @@ -66,12 +69,10 @@ var ( ")?\\p{Zs}+" + rxOpID + "\\p{Zs}*$") - rxSpace = regexp.MustCompile(`\p{Zs}+`) rxIndent = regexp.MustCompile(`[\p{Zs}\t]*/*[\p{Zs}\t]*[^\p{Zs}\t]`) rxNotIndent = regexp.MustCompile(`[^\p{Zs}\t]`) rxPunctuationEnd = regexp.MustCompile(`\p{Po}$`) rxTitleStart = regexp.MustCompile(`^[#]+\p{Zs}+`) - rxStripComments = regexp.MustCompile(`^[^\p{L}\p{N}\p{Pd}\p{Pc}\+]*`) rxStripTitleComments = regexp.MustCompile(`^[^\p{L}]*[Pp]ackage\p{Zs}+[^\p{Zs}]+\p{Zs}*`) rxAllowedExtensions = regexp.MustCompile(`^[Xx]-`) @@ -97,3 +98,7 @@ var ( rxDeprecated = regexp.MustCompile(`[Dd]eprecated\p{Zs}*:\p{Zs}*(true|false)(?:\.)?$`) // currently unused: rxExample = regexp.MustCompile(`[Ex]ample\p{Zs}*:\p{Zs}*(.*)$`). ) + +func Rxf(rxp, ar string) *regexp.Regexp { + return regexp.MustCompile(fmt.Sprintf(rxp, ar)) +} diff --git a/regexprs_test.go b/internal/parsers/regexprs_test.go similarity index 89% rename from regexprs_test.go rename to internal/parsers/regexprs_test.go index a201704..cb5a71c 100644 --- a/regexprs_test.go +++ b/internal/parsers/regexprs_test.go @@ -1,7 +1,7 @@ // SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers // SPDX-License-Identifier: Apache-2.0 -package codescan +package parsers import ( "fmt" @@ -12,6 +12,16 @@ import ( "github.com/go-openapi/testify/v2/assert" ) +func TestRouteExpression(t *testing.T) { + assert.RegexpT(t, rxRoute, "swagger:route DELETE /orders/{id} deleteOrder") + assert.RegexpT(t, rxRoute, "swagger:route GET /v1.2/something deleteOrder") +} + +func TestOperationExpression(t *testing.T) { + assert.RegexpT(t, rxOperation, "swagger:operation DELETE /orders/{id} deleteOrder") + assert.RegexpT(t, rxOperation, "swagger:operation GET /v1.2/something deleteOrder") +} + func TestSchemaValueExtractors(t *testing.T) { strfmts := []string{ "// swagger:strfmt ", @@ -87,14 +97,14 @@ func TestSchemaValueExtractors(t *testing.T) { verifySwaggerMultiArgSwaggerTag(t, rxParametersOverride, parameters, validParams, invalidParams) - verifyMinMax(t, rxf(rxMinimumFmt, ""), "min", []string{"", ">", "="}) - verifyMinMax(t, rxf(rxMinimumFmt, fmt.Sprintf(rxItemsPrefixFmt, 1)), "items.min", []string{"", ">", "="}) - verifyMinMax(t, rxf(rxMaximumFmt, ""), "max", []string{"", "<", "="}) - verifyMinMax(t, rxf(rxMaximumFmt, fmt.Sprintf(rxItemsPrefixFmt, 1)), "items.max", []string{"", "<", "="}) - verifyNumeric2Words(t, rxf(rxMultipleOfFmt, ""), "multiple", "of") - verifyNumeric2Words(t, rxf(rxMultipleOfFmt, fmt.Sprintf(rxItemsPrefixFmt, 1)), "items.multiple", "of") + verifyMinMax(t, Rxf(rxMinimumFmt, ""), "min", []string{"", ">", "="}) + verifyMinMax(t, Rxf(rxMinimumFmt, fmt.Sprintf(rxItemsPrefixFmt, 1)), "items.min", []string{"", ">", "="}) + verifyMinMax(t, Rxf(rxMaximumFmt, ""), "max", []string{"", "<", "="}) + verifyMinMax(t, Rxf(rxMaximumFmt, fmt.Sprintf(rxItemsPrefixFmt, 1)), "items.max", []string{"", "<", "="}) + verifyNumeric2Words(t, Rxf(rxMultipleOfFmt, ""), "multiple", "of") + verifyNumeric2Words(t, Rxf(rxMultipleOfFmt, fmt.Sprintf(rxItemsPrefixFmt, 1)), "items.multiple", "of") - verifyIntegerMinMaxManyWords(t, rxf(rxMinLengthFmt, ""), "min", []string{"len", "length"}) + verifyIntegerMinMaxManyWords(t, Rxf(rxMinLengthFmt, ""), "min", []string{"len", "length"}) // pattern patPrefixes := cartesianJoin( []string{"//", "*", ""}, @@ -104,10 +114,10 @@ func TestSchemaValueExtractors(t *testing.T) { []string{":"}, []string{"", " ", " ", " "}, ) - verifyRegexpArgs(t, rxf(rxPatternFmt, ""), patPrefixes, []string{"^\\w+$", "[A-Za-z0-9-.]*"}, nil, 2, 1) + verifyRegexpArgs(t, Rxf(rxPatternFmt, ""), patPrefixes, []string{"^\\w+$", "[A-Za-z0-9-.]*"}, nil, 2, 1) - verifyIntegerMinMaxManyWords(t, rxf(rxMinItemsFmt, ""), "min", []string{"items"}) - verifyBoolean(t, rxf(rxUniqueFmt, ""), []string{"unique"}, nil) + verifyIntegerMinMaxManyWords(t, Rxf(rxMinItemsFmt, ""), "min", []string{"items"}) + verifyBoolean(t, Rxf(rxUniqueFmt, ""), []string{"unique"}, nil) verifyBoolean(t, rxReadOnly, []string{"read"}, []string{"only"}) verifyBoolean(t, rxRequired, []string{"required"}, nil) diff --git a/internal/parsers/responses.go b/internal/parsers/responses.go new file mode 100644 index 0000000..0957360 --- /dev/null +++ b/internal/parsers/responses.go @@ -0,0 +1,221 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package parsers + +import ( + "fmt" + "regexp" + "strconv" + "strings" + + oaispec "github.com/go-openapi/spec" +) + +const ( + // r)sponseTag used when specifying a response to point to a defined swagger:response. + responseTag = "response" + + // bodyTag used when specifying a response to point to a model/schema. + bodyTag = "body" + + // descriptionTag used when specifying a response that gives a description of the response. + descriptionTag = "description" +) + +type SetOpResponses struct { + set func(*oaispec.Response, map[int]oaispec.Response) + rx *regexp.Regexp + definitions map[string]oaispec.Schema + responses map[string]oaispec.Response +} + +func NewSetResponses(definitions map[string]oaispec.Schema, responses map[string]oaispec.Response, setter func(*oaispec.Response, map[int]oaispec.Response)) *SetOpResponses { + return &SetOpResponses{ + set: setter, + rx: rxResponses, + definitions: definitions, + responses: responses, + } +} + +func (ss *SetOpResponses) Matches(line string) bool { + return ss.rx.MatchString(line) +} + +func (ss *SetOpResponses) Parse(lines []string) error { + if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { + return nil + } + + var def *oaispec.Response + var scr map[int]oaispec.Response + + for _, line := range lines { + var err error + def, scr, err = ss.parseResponseLine(line, def, scr) + if err != nil { + return err + } + } + + ss.set(def, scr) + + return nil +} + +func (ss *SetOpResponses) parseResponseLine(line string, def *oaispec.Response, scr map[int]oaispec.Response) (*oaispec.Response, map[int]oaispec.Response, error) { + kv := strings.SplitN(line, ":", kvParts) + if len(kv) <= 1 { + return def, scr, nil + } + + key := strings.TrimSpace(kv[0]) + if key == "" { + return def, scr, nil + } + + value := strings.TrimSpace(kv[1]) + if value == "" { + def, scr = assignResponse(key, oaispec.Response{}, def, scr) + return def, scr, nil + } + + refTarget, arrays, isDefinitionRef, description, err := parseTags(value) + if err != nil { + return def, scr, err + } + + // A possible exception for having a definition + if _, ok := ss.responses[refTarget]; !ok { + if _, ok := ss.definitions[refTarget]; ok { + isDefinitionRef = true + } + } + + var ref oaispec.Ref + if isDefinitionRef { + if description == "" { + description = refTarget + } + ref, err = oaispec.NewRef("#/definitions/" + refTarget) + } else { + ref, err = oaispec.NewRef("#/responses/" + refTarget) + } + if err != nil { + return def, scr, err + } + + // description should used on anyway. + resp := oaispec.Response{ResponseProps: oaispec.ResponseProps{Description: description}} + + if isDefinitionRef { + resp.Schema = new(oaispec.Schema) + resp.Description = description + if arrays == 0 { + resp.Schema.Ref = ref + } else { + cs := resp.Schema + for range arrays { + cs.Typed("array", "") + cs.Items = new(oaispec.SchemaOrArray) + cs.Items.Schema = new(oaispec.Schema) + cs = cs.Items.Schema + } + cs.Ref = ref + } + // ref. could be empty while use description tag + } else if len(refTarget) > 0 { + resp.Ref = ref + } + + def, scr = assignResponse(key, resp, def, scr) + return def, scr, nil +} + +func parseTags(line string) (modelOrResponse string, arrays int, isDefinitionRef bool, description string, err error) { + tags := strings.Split(line, " ") + parsedModelOrResponse := false + + for i, tagAndValue := range tags { + tagValList := strings.SplitN(tagAndValue, ":", kvParts) + var tag, value string + if len(tagValList) > 1 { + tag = tagValList[0] + value = tagValList[1] + } else { + // TODO: Print a warning, and in the long term, do not support not tagged values + // Add a default tag if none is supplied + if i == 0 { + tag = responseTag + } else { + tag = descriptionTag + } + value = tagValList[0] + } + + foundModelOrResponse := false + if !parsedModelOrResponse { + if tag == bodyTag { + foundModelOrResponse = true + isDefinitionRef = true + } + if tag == responseTag { + foundModelOrResponse = true + isDefinitionRef = false + } + } + if foundModelOrResponse { + // Read the model or response tag + parsedModelOrResponse = true + // Check for nested arrays + arrays = 0 + for strings.HasPrefix(value, "[]") { + arrays++ + value = value[2:] + } + // What's left over is the model name + modelOrResponse = value + continue + } + + if tag == descriptionTag { + // Descriptions are special, they read the rest of the line + descriptionWords := []string{value} + if i < len(tags)-1 { + descriptionWords = append(descriptionWords, tags[i+1:]...) + } + description = strings.Join(descriptionWords, " ") + break + } + + if tag == responseTag || tag == bodyTag { + err = fmt.Errorf("valid tag %s, but not in a valid position: %w", tag, ErrParser) + } else { + err = fmt.Errorf("invalid tag: %s: %w", tag, ErrParser) + } + // return error + return modelOrResponse, arrays, isDefinitionRef, description, err + } + + // TODO: Maybe do, if !parsedModelOrResponse {return some error} + return modelOrResponse, arrays, isDefinitionRef, description, err +} + +func assignResponse(key string, resp oaispec.Response, def *oaispec.Response, scr map[int]oaispec.Response) (*oaispec.Response, map[int]oaispec.Response) { + if strings.EqualFold("default", key) { + if def == nil { + def = &resp + } + return def, scr + } + + if sc, err := strconv.Atoi(key); err == nil { + if scr == nil { + scr = make(map[int]oaispec.Response) + } + scr[sc] = resp + } + + return def, scr +} diff --git a/internal/parsers/responses_test.go b/internal/parsers/responses_test.go new file mode 100644 index 0000000..7fca1f7 --- /dev/null +++ b/internal/parsers/responses_test.go @@ -0,0 +1,264 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package parsers + +import ( + "testing" + + "github.com/go-openapi/testify/v2/assert" + "github.com/go-openapi/testify/v2/require" + + oaispec "github.com/go-openapi/spec" +) + +func TestSetOpResponses_Matches(t *testing.T) { + t.Parallel() + + sr := NewSetResponses(nil, nil, nil) + assert.TrueT(t, sr.Matches("responses:")) + assert.TrueT(t, sr.Matches("Responses:")) + assert.FalseT(t, sr.Matches("something else")) +} + +func TestSetOpResponses_Parse(t *testing.T) { + t.Parallel() + + t.Run("empty", func(t *testing.T) { + var called bool + sr := NewSetResponses(nil, nil, func(_ *oaispec.Response, _ map[int]oaispec.Response) { called = true }) + require.NoError(t, sr.Parse(nil)) + assert.FalseT(t, called) + require.NoError(t, sr.Parse([]string{})) + require.NoError(t, sr.Parse([]string{""})) + }) + + t.Run("response ref", func(t *testing.T) { + responses := map[string]oaispec.Response{ + "notFound": {ResponseProps: oaispec.ResponseProps{Description: "not found"}}, + } + var gotDef *oaispec.Response + var gotScr map[int]oaispec.Response + + sr := NewSetResponses(nil, responses, func(def *oaispec.Response, scr map[int]oaispec.Response) { + gotDef = def + gotScr = scr + }) + + require.NoError(t, sr.Parse([]string{"404: notFound"})) + assert.Nil(t, gotDef) + require.NotNil(t, gotScr) + resp, ok := gotScr[404] + require.TrueT(t, ok) + assert.NotEmpty(t, resp.Ref.String()) + }) + + t.Run("default response", func(t *testing.T) { + responses := map[string]oaispec.Response{ + "genericError": {}, + } + var gotDef *oaispec.Response + + sr := NewSetResponses(nil, responses, func(def *oaispec.Response, _ map[int]oaispec.Response) { + gotDef = def + }) + + require.NoError(t, sr.Parse([]string{"default: genericError"})) + require.NotNil(t, gotDef) + }) + + t.Run("body ref", func(t *testing.T) { + definitions := map[string]oaispec.Schema{ + "Pet": {}, + } + var gotScr map[int]oaispec.Response + + sr := NewSetResponses(definitions, nil, func(_ *oaispec.Response, scr map[int]oaispec.Response) { + gotScr = scr + }) + + require.NoError(t, sr.Parse([]string{"200: body:Pet"})) + require.NotNil(t, gotScr) + resp := gotScr[200] + require.NotNil(t, resp.Schema) + assert.Contains(t, resp.Schema.Ref.String(), "definitions/Pet") + }) + + t.Run("body array ref", func(t *testing.T) { + definitions := map[string]oaispec.Schema{ + "Pet": {}, + } + var gotScr map[int]oaispec.Response + + sr := NewSetResponses(definitions, nil, func(_ *oaispec.Response, scr map[int]oaispec.Response) { + gotScr = scr + }) + + require.NoError(t, sr.Parse([]string{"200: body:[]Pet"})) + require.NotNil(t, gotScr) + resp := gotScr[200] + require.NotNil(t, resp.Schema) + assert.EqualT(t, "array", resp.Schema.Type[0]) + require.NotNil(t, resp.Schema.Items) + assert.Contains(t, resp.Schema.Items.Schema.Ref.String(), "definitions/Pet") + }) + + t.Run("with description tag", func(t *testing.T) { + responses := map[string]oaispec.Response{ + "notFound": {}, + } + var gotScr map[int]oaispec.Response + + sr := NewSetResponses(nil, responses, func(_ *oaispec.Response, scr map[int]oaispec.Response) { + gotScr = scr + }) + + require.NoError(t, sr.Parse([]string{"404: response:notFound description:Not Found"})) + require.NotNil(t, gotScr) + assert.EqualT(t, "Not Found", gotScr[404].Description) + }) +} + +func TestParseTags(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + line string + wantModel string + wantArrays int + wantIsDefRef bool + wantDesc string + wantErr bool + }{ + {"response ref", "notFound", "notFound", 0, false, "", false}, + {"body ref", "body:Pet", "Pet", 0, true, "", false}, + {"body array", "body:[]Pet", "Pet", 1, true, "", false}, + {"body nested array", "body:[][]Pet", "Pet", 2, true, "", false}, + {"with description", "notFound description:Resource not found", "notFound", 0, false, "Resource not found", false}, + {"invalid tag", "invalid:tag value:wrong", "", 0, false, "", true}, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + model, arrays, isDefRef, desc, err := parseTags(tc.line) + if tc.wantErr { + require.Error(t, err) + return + } + require.NoError(t, err) + assert.EqualT(t, tc.wantModel, model) + assert.EqualT(t, tc.wantArrays, arrays) + assert.EqualT(t, tc.wantIsDefRef, isDefRef) + assert.EqualT(t, tc.wantDesc, desc) + }) + } +} + +func TestAssignResponse(t *testing.T) { + t.Parallel() + + t.Run("default", func(t *testing.T) { + resp := oaispec.Response{ResponseProps: oaispec.ResponseProps{Description: "error"}} + def, scr := assignResponse("default", resp, nil, nil) + require.NotNil(t, def) + assert.EqualT(t, "error", def.Description) + assert.Nil(t, scr) + }) + + t.Run("default already set", func(t *testing.T) { + existing := &oaispec.Response{ResponseProps: oaispec.ResponseProps{Description: "existing"}} + def, _ := assignResponse("default", oaispec.Response{}, existing, nil) + assert.EqualT(t, "existing", def.Description) // not overwritten + }) + + t.Run("status code", func(t *testing.T) { + resp := oaispec.Response{ResponseProps: oaispec.ResponseProps{Description: "ok"}} + def, scr := assignResponse("200", resp, nil, nil) + assert.Nil(t, def) + require.NotNil(t, scr) + assert.EqualT(t, "ok", scr[200].Description) + }) + + t.Run("non-numeric key ignored", func(t *testing.T) { + def, scr := assignResponse("notANumber", oaispec.Response{}, nil, nil) + assert.Nil(t, def) + assert.Nil(t, scr) + }) +} + +func TestSetOpResponses_ParseEdgeCases(t *testing.T) { + t.Parallel() + + t.Run("line without colon", func(t *testing.T) { + var gotScr map[int]oaispec.Response + sr := NewSetResponses(nil, nil, func(_ *oaispec.Response, scr map[int]oaispec.Response) { + gotScr = scr + }) + require.NoError(t, sr.Parse([]string{"no-colon-here"})) + assert.Nil(t, gotScr) + }) + + t.Run("empty key after trim", func(t *testing.T) { + var gotScr map[int]oaispec.Response + sr := NewSetResponses(nil, nil, func(_ *oaispec.Response, scr map[int]oaispec.Response) { + gotScr = scr + }) + require.NoError(t, sr.Parse([]string{" : someValue"})) + assert.Nil(t, gotScr) + }) + + t.Run("empty value assigns empty response", func(t *testing.T) { + var gotScr map[int]oaispec.Response + sr := NewSetResponses(nil, nil, func(_ *oaispec.Response, scr map[int]oaispec.Response) { + gotScr = scr + }) + require.NoError(t, sr.Parse([]string{"200:"})) + require.NotNil(t, gotScr) + _, ok := gotScr[200] + assert.TrueT(t, ok) + }) + + t.Run("parse error propagated", func(t *testing.T) { + sr := NewSetResponses(nil, nil, func(_ *oaispec.Response, _ map[int]oaispec.Response) {}) + // "invalid:tag" is not response/body/description → error + err := sr.Parse([]string{"200: invalid:tag"}) + require.Error(t, err) + assert.ErrorIs(t, err, ErrParser) + }) + + t.Run("definition found by fallback lookup", func(t *testing.T) { + // refTarget is not in responses but IS in definitions → isDefinitionRef becomes true + definitions := map[string]oaispec.Schema{ + "ErrorModel": {}, + } + var gotScr map[int]oaispec.Response + sr := NewSetResponses(definitions, nil, func(_ *oaispec.Response, scr map[int]oaispec.Response) { + gotScr = scr + }) + require.NoError(t, sr.Parse([]string{"500: ErrorModel"})) + require.NotNil(t, gotScr) + resp := gotScr[500] + require.NotNil(t, resp.Schema) + assert.Contains(t, resp.Schema.Ref.String(), "definitions/ErrorModel") + }) +} + +func TestParseTags_UntaggedValues(t *testing.T) { + t.Parallel() + + t.Run("second value defaults to description tag", func(t *testing.T) { + // "notFound Something" → first untagged = responseTag, second untagged = descriptionTag + model, _, _, desc, err := parseTags("notFound Something here") + require.NoError(t, err) + assert.EqualT(t, "notFound", model) + assert.EqualT(t, "Something here", desc) + }) + + t.Run("response tag out of position", func(t *testing.T) { + // response: after first value already parsed + _, _, _, _, err := parseTags("body:Pet response:duplicate") + require.Error(t, err) + assert.ErrorIs(t, err, ErrParser) + }) +} diff --git a/route_params.go b/internal/parsers/route_params.go similarity index 85% rename from route_params.go rename to internal/parsers/route_params.go index 2a6248a..4354ab8 100644 --- a/route_params.go +++ b/internal/parsers/route_params.go @@ -1,7 +1,7 @@ // SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers // SPDX-License-Identifier: Apache-2.0 -package codescan +package parsers import ( "fmt" @@ -9,7 +9,7 @@ import ( "strconv" "strings" - "github.com/go-openapi/spec" + oaispec "github.com/go-openapi/spec" ) const ( @@ -41,9 +41,6 @@ const ( // schemaMaxLenKey indicates the minimum length this field in swagger:route. schemaMaxLenKey = "maxlength" - // paramInQuery is the default parameter location for query parameters. - paramInQuery = "query" - // typeArray is the identifier for an array type in swagger:route. typeArray = "array" // typeNumber is the identifier for a number type in swagger:route. @@ -65,28 +62,28 @@ var ( basicTypes = []string{typeInteger, typeNumber, typeString, typeBoolean, typeBool, typeArray} //nolint:gochecknoglobals // immutable lookup table ) -func newSetParams(params []*spec.Parameter, setter func([]*spec.Parameter)) *setOpParams { - return &setOpParams{ +type SetOpParams struct { + set func([]*oaispec.Parameter) + parameters []*oaispec.Parameter +} + +func NewSetParams(params []*oaispec.Parameter, setter func([]*oaispec.Parameter)) *SetOpParams { + return &SetOpParams{ set: setter, parameters: params, } } -type setOpParams struct { - set func([]*spec.Parameter) - parameters []*spec.Parameter -} - -func (s *setOpParams) Matches(line string) bool { +func (s *SetOpParams) Matches(line string) bool { return rxParameters.MatchString(line) } -func (s *setOpParams) Parse(lines []string) error { +func (s *SetOpParams) Parse(lines []string) error { if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { return nil } - var current *spec.Parameter + var current *oaispec.Parameter var extraData map[string]string for _, line := range lines { @@ -94,7 +91,7 @@ func (s *setOpParams) Parse(lines []string) error { if strings.HasPrefix(l, "+") { s.finalizeParam(current, extraData) - current = new(spec.Parameter) + current = new(oaispec.Parameter) extraData = make(map[string]string) l = strings.TrimPrefix(l, "+") } @@ -109,7 +106,7 @@ func (s *setOpParams) Parse(lines []string) error { value := strings.TrimSpace(kv[1]) if current == nil { - return fmt.Errorf("invalid route/operation schema provided: %w", ErrCodeScan) + return fmt.Errorf("invalid route/operation schema provided: %w", ErrParser) } applyParamField(current, extraData, key, value) @@ -117,10 +114,11 @@ func (s *setOpParams) Parse(lines []string) error { s.finalizeParam(current, extraData) s.set(s.parameters) + return nil } -func applyParamField(current *spec.Parameter, extraData map[string]string, key, value string) { +func applyParamField(current *oaispec.Parameter, extraData map[string]string, key, value string) { switch key { case paramDescriptionKey: current.Description = value @@ -137,18 +135,18 @@ func applyParamField(current *spec.Parameter, extraData map[string]string, key, } case paramTypeKey: if current.Schema == nil { - current.Schema = new(spec.Schema) + current.Schema = new(oaispec.Schema) } if contains(basicTypes, value) { current.Type = strings.ToLower(value) if current.Type == typeBool { current.Type = typeBoolean } - } else if ref, err := spec.NewRef("#/definitions/" + value); err == nil { + } else if ref, err := oaispec.NewRef("#/definitions/" + value); err == nil { current.Type = typeObject current.Schema.Ref = ref } - current.Schema.Type = spec.StringOrArray{current.Type} + current.Schema.Type = oaispec.StringOrArray{current.Type} case paramAllowEmptyKey: if v, err := strconv.ParseBool(value); err == nil { current.AllowEmptyValue = v @@ -158,7 +156,7 @@ func applyParamField(current *spec.Parameter, extraData map[string]string, key, } } -func (s *setOpParams) finalizeParam(param *spec.Parameter, data map[string]string) { +func (s *SetOpParams) finalizeParam(param *oaispec.Parameter, data map[string]string) { if param == nil { return } @@ -168,7 +166,7 @@ func (s *setOpParams) finalizeParam(param *spec.Parameter, data map[string]strin // schema is only allowed for parameters in "body" // see https://swagger.io/specification/v2/#parameterObject switch { - case param.In == bodyTag: + case param.In == "body": param.Type = "" case param.Schema != nil: @@ -182,7 +180,7 @@ func (s *setOpParams) finalizeParam(param *spec.Parameter, data map[string]strin s.parameters = append(s.parameters, param) } -func processSchema(data map[string]string, param *spec.Parameter) { +func processSchema(data map[string]string, param *oaispec.Parameter) { if param.Schema == nil { return } @@ -227,7 +225,7 @@ func processSchema(data map[string]string, param *spec.Parameter) { convertEnum(param.Schema, enumValues) } -func convertEnum(schema *spec.Schema, enumValues []string) { +func convertEnum(schema *oaispec.Schema, enumValues []string) { if len(enumValues) == 0 { return } @@ -257,7 +255,7 @@ func convert(typeStr, valueStr string) any { return valueStr } -func getType(schema *spec.Schema) string { +func getType(schema *oaispec.Schema) string { if len(schema.Type) == 0 { return "" } diff --git a/internal/parsers/route_params_test.go b/internal/parsers/route_params_test.go new file mode 100644 index 0000000..da68209 --- /dev/null +++ b/internal/parsers/route_params_test.go @@ -0,0 +1,250 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package parsers + +import ( + "testing" + + "github.com/go-openapi/testify/v2/assert" + "github.com/go-openapi/testify/v2/require" + + oaispec "github.com/go-openapi/spec" +) + +func TestSetOpParams_Matches(t *testing.T) { + t.Parallel() + + sp := NewSetParams(nil, nil) + assert.TrueT(t, sp.Matches("parameters:")) + assert.TrueT(t, sp.Matches("Parameters:")) + assert.FalseT(t, sp.Matches("something else")) +} + +func TestSetOpParams_Parse(t *testing.T) { + t.Parallel() + + t.Run("empty", func(t *testing.T) { + sp := NewSetParams(nil, func(_ []*oaispec.Parameter) {}) + require.NoError(t, sp.Parse(nil)) + require.NoError(t, sp.Parse([]string{})) + require.NoError(t, sp.Parse([]string{""})) + }) + + t.Run("single param", func(t *testing.T) { + var got []*oaispec.Parameter + sp := NewSetParams(nil, func(params []*oaispec.Parameter) { got = params }) + + lines := []string{ + "+ name: id", + " in: path", + " type: integer", + " required: true", + " description: The pet ID", + } + require.NoError(t, sp.Parse(lines)) + require.Len(t, got, 1) + assert.EqualT(t, "id", got[0].Name) + assert.EqualT(t, "path", got[0].In) + assert.TrueT(t, got[0].Required) + assert.EqualT(t, "The pet ID", got[0].Description) + assert.EqualT(t, "integer", got[0].Type) + }) + + t.Run("multiple params", func(t *testing.T) { + var got []*oaispec.Parameter + sp := NewSetParams(nil, func(params []*oaispec.Parameter) { got = params }) + + lines := []string{ + "+ name: limit", + " in: query", + " type: integer", + "+ name: offset", + " in: query", + " type: integer", + } + require.NoError(t, sp.Parse(lines)) + require.Len(t, got, 2) + assert.EqualT(t, "limit", got[0].Name) + assert.EqualT(t, "offset", got[1].Name) + }) + + t.Run("body param", func(t *testing.T) { + var got []*oaispec.Parameter + sp := NewSetParams(nil, func(params []*oaispec.Parameter) { got = params }) + + lines := []string{ + "+ name: body", + " in: body", + " type: Pet", + } + require.NoError(t, sp.Parse(lines)) + require.Len(t, got, 1) + assert.EqualT(t, "body", got[0].In) + assert.EqualT(t, "", got[0].Type) // body params clear type + require.NotNil(t, got[0].Schema) + }) + + t.Run("boolean type", func(t *testing.T) { + var got []*oaispec.Parameter + sp := NewSetParams(nil, func(params []*oaispec.Parameter) { got = params }) + + lines := []string{ + "+ name: active", + " in: query", + " type: bool", + } + require.NoError(t, sp.Parse(lines)) + require.Len(t, got, 1) + assert.EqualT(t, "boolean", got[0].Type) // "bool" → "boolean" + }) + + t.Run("allow empty value", func(t *testing.T) { + var got []*oaispec.Parameter + sp := NewSetParams(nil, func(params []*oaispec.Parameter) { got = params }) + + lines := []string{ + "+ name: q", + " in: query", + " type: string", + " allowempty: true", + } + require.NoError(t, sp.Parse(lines)) + require.Len(t, got, 1) + assert.TrueT(t, got[0].AllowEmptyValue) + }) + + t.Run("error no leading +", func(t *testing.T) { + sp := NewSetParams(nil, func(_ []*oaispec.Parameter) {}) + err := sp.Parse([]string{"name: id"}) + require.Error(t, err) + assert.ErrorIs(t, err, ErrParser) + }) + + t.Run("with schema extras", func(t *testing.T) { + var got []*oaispec.Parameter + sp := NewSetParams(nil, func(params []*oaispec.Parameter) { got = params }) + + lines := []string{ + "+ name: age", + " in: query", + " type: integer", + " min: 0", + " max: 150", + " default: 25", + " enum: 18,25,30,65", + " format: int32", + } + require.NoError(t, sp.Parse(lines)) + require.Len(t, got, 1) + assert.EqualT(t, "int32", got[0].Format) + assert.Equal(t, float64(25), got[0].Default) + }) +} + +func TestConvert(t *testing.T) { + t.Parallel() + + tests := []struct { + typeStr string + valueStr string + want any + }{ + {"integer", "42", float64(42)}, + {"number", "3.14", float64(3.14)}, + {"boolean", "true", true}, + {"bool", "false", false}, + {"string", "hello", "hello"}, + {"integer", "not-a-number", "not-a-number"}, + {"boolean", "maybe", "maybe"}, + {"", "raw", "raw"}, + } + + for _, tc := range tests { + t.Run(tc.typeStr+"/"+tc.valueStr, func(t *testing.T) { + assert.Equal(t, tc.want, convert(tc.typeStr, tc.valueStr)) + }) + } +} + +func TestGetType(t *testing.T) { + t.Parallel() + + t.Run("empty type", func(t *testing.T) { + s := &oaispec.Schema{} + assert.EqualT(t, "", getType(s)) + }) + + t.Run("with type", func(t *testing.T) { + s := &oaispec.Schema{} + s.Type = oaispec.StringOrArray{"string"} + assert.EqualT(t, "string", getType(s)) + }) +} + +func TestSetOpParams_ParseLineWithoutColon(t *testing.T) { + t.Parallel() + + var got []*oaispec.Parameter + sp := NewSetParams(nil, func(params []*oaispec.Parameter) { got = params }) + // A line after "+" that has no colon is silently skipped + lines := []string{ + "+ name: test", + " no-colon-here", + " in: query", + " type: string", + } + require.NoError(t, sp.Parse(lines)) + require.Len(t, got, 1) + assert.EqualT(t, "test", got[0].Name) +} + +func TestSetOpParams_ArraySchemaExtras(t *testing.T) { + t.Parallel() + + var got []*oaispec.Parameter + sp := NewSetParams(nil, func(params []*oaispec.Parameter) { got = params }) + lines := []string{ + "+ name: tags", + " in: query", + " type: array", + " minlength: 1", + " maxlength: 10", + " enum: a,b,c", + " description: A list of tags", + } + require.NoError(t, sp.Parse(lines)) + require.Len(t, got, 1) + // For non-body params, schema is converted to validations + assert.EqualT(t, "array", got[0].Type) +} + +func TestSetOpParams_NilSchemaGuard(t *testing.T) { + t.Parallel() + + // A param with no type: field won't have a schema + var got []*oaispec.Parameter + sp := NewSetParams(nil, func(params []*oaispec.Parameter) { got = params }) + lines := []string{ + "+ name: simple", + " in: query", + } + require.NoError(t, sp.Parse(lines)) + require.Len(t, got, 1) + assert.Nil(t, got[0].Schema) +} + +func TestSetOpParams_InvalidIn(t *testing.T) { + t.Parallel() + + var got []*oaispec.Parameter + sp := NewSetParams(nil, func(params []*oaispec.Parameter) { got = params }) + lines := []string{ + "+ name: test", + " in: cookie", + " type: string", + } + require.NoError(t, sp.Parse(lines)) + require.Len(t, got, 1) + assert.EqualT(t, "", got[0].In) // "cookie" is not in validIn +} diff --git a/internal/parsers/sectioned_parser.go b/internal/parsers/sectioned_parser.go new file mode 100644 index 0000000..97a407b --- /dev/null +++ b/internal/parsers/sectioned_parser.go @@ -0,0 +1,286 @@ +package parsers + +import ( + "go/ast" + "strings" + + "github.com/go-openapi/codescan/internal/ifaces" +) + +// SectionedParserOption configures a [SectionedParser] via [NewSectionedParser]. +type SectionedParserOption func(*SectionedParser) + +// WithSetTitle provides a callback that receives the extracted title lines +// after parsing completes. If no title callback is set, the parser does not +// attempt to separate the title from the description. +func WithSetTitle(setTitle func([]string)) SectionedParserOption { + return func(p *SectionedParser) { + p.setTitle = setTitle + } +} + +// WithSetDescription provides a callback that receives the extracted +// description lines after parsing completes. +func WithSetDescription(setDescription func([]string)) SectionedParserOption { + return func(p *SectionedParser) { + p.setDescription = setDescription + } +} + +// WithTaggers registers the [TagParser] instances that this SectionedParser +// will try to match against each line after the header section ends. +func WithTaggers(taggers ...TagParser) SectionedParserOption { + return func(p *SectionedParser) { + p.taggers = taggers + } +} + +// SectionedParser is the core comment-block parser for go-swagger annotations. +// It processes an [ast.CommentGroup] and splits its content into three sections: +// +// 1. Header — free-form text at the top of the comment block, later split +// into a title and description. +// 2. Tags — structured key:value lines (e.g. "minimum: 10", "consumes:", +// "schemes: http, https") recognized by registered [TagParser] instances. +// 3. Annotation — an optional swagger:* annotation line (e.g. "swagger:model +// Foo") handled by a dedicated [ifaces.ValueParser]. +// +// # Parsing algorithm +// +// Parse walks each line of the comment block in order. For every line: +// +// 1. If the line contains a swagger:* annotation: +// - "swagger:ignore" → mark as ignored, stop parsing. +// - If an annotation parser is registered and matches → delegate to it. +// - Otherwise → stop parsing (the annotation belongs to a different parser). +// +// 2. If any registered [TagParser] matches the line: +// - For a single-line tagger: collect the line, then reset the current +// tagger so the next line can match a different tag. +// - For a multi-line tagger: the matching (header) line is consumed but NOT +// collected; all subsequent lines are collected into that tagger until a +// different tagger matches or the block ends. +// +// 3. Otherwise, if no tag has been seen yet, the line is appended to the +// header (free-form text). +// +// After the line walk completes, three things happen: +// +// 1. The header is split into title + description (see [collectScannerTitleDescription]). +// 2. For each matched tagger, its collected lines are cleaned up (comment +// prefixes stripped, unless SkipCleanUp is set) and passed to the +// tagger's Parse method, which writes the extracted value into the target +// spec object. +// 3. Title and description callbacks are invoked. +// +// # Example: Swagger meta block +// +// Given the comment block on a package doc.go: +// +// // Petstore API. +// // +// // The purpose of this application is to provide an API for pets. +// // +// // Schemes: http, https +// // Host: petstore.example.com +// // BasePath: /v2 +// // Version: 1.0.0 +// // License: MIT http://opensource.org/licenses/MIT +// // Contact: John Doe http://john.example.com +// // +// // Consumes: +// // - application/json +// // - application/xml +// // +// // swagger:meta +// +// The SectionedParser (configured by [NewMetaParser]) will: +// +// - Collect "Petstore API." as the title, and the next paragraph as the +// description (header section, lines 1-3). +// - Match "Schemes: http, https" via the single-line "Schemes" tagger. +// - Match "Host: ...", "BasePath: ...", etc. via their respective single-line taggers. +// - Match "Consumes:" via the multi-line "Consumes" tagger, collecting +// "- application/json" and "- application/xml" as its body. +// - Stop at "swagger:meta" (an annotation that doesn't match any registered +// annotation parser, so it terminates the block). +type SectionedParser struct { + header []string + matched map[string]TagParser + annotation ifaces.ValueParser + + seenTag bool + skipHeader bool + setTitle func([]string) + setDescription func([]string) + workedOutTitle bool + taggers []TagParser + currentTagger *TagParser + title []string + ignored bool +} + +// NewSectionedParser creates a SectionedParser configured by the given options. +// +// At minimum, callers should provide [WithSetTitle] and [WithTaggers]: +// +// sp := NewSectionedParser( +// WithSetTitle(func(lines []string) { op.Summary = JoinDropLast(lines) }), +// WithSetDescription(func(lines []string) { op.Description = JoinDropLast(lines) }), +// WithTaggers( +// NewSingleLineTagParser("maximum", NewSetMaximum(builder)), +// NewMultiLineTagParser("consumes", NewConsumesDropEmptyParser(setter), false), +// ), +// ) +func NewSectionedParser(opts ...SectionedParserOption) *SectionedParser { + var p SectionedParser + + for _, apply := range opts { + apply(&p) + } + + return &p +} + +// Title returns the title lines extracted from the header. The title is +// separated from the description by the first blank line, or inferred from +// punctuation and markdown heading prefixes when there is no blank line. +// +// Title triggers lazy title/description splitting on first call. +func (st *SectionedParser) Title() []string { + st.collectTitleDescription() + return st.title +} + +// Description returns the description lines extracted from the header (everything +// after the title). Like [SectionedParser.Title], it triggers lazy splitting on first call. +func (st *SectionedParser) Description() []string { + st.collectTitleDescription() + return st.header +} + +// Ignored reports whether a "swagger:ignore" annotation was encountered. +func (st *SectionedParser) Ignored() bool { + return st.ignored +} + +// Parse processes an [ast.CommentGroup] through the sectioned parsing algorithm +// described in the type documentation. Returns an error if any matched tagger's +// Parse method fails. +func (st *SectionedParser) Parse(doc *ast.CommentGroup) error { + if doc == nil { + return nil + } + +COMMENTS: + for _, c := range doc.List { + for line := range strings.SplitSeq(c.Text, "\n") { + if st.parseLine(line) { + break COMMENTS + } + } + } + + if st.setTitle != nil { + st.setTitle(st.Title()) + } + + if st.setDescription != nil { + st.setDescription(st.Description()) + } + + for _, mt := range st.matched { + if !mt.SkipCleanUp { + mt.Lines = cleanupScannerLines(mt.Lines, rxUncommentHeaders) + } + if err := mt.Parse(mt.Lines); err != nil { + return err + } + } + + return nil +} + +// parseLine processes a single comment line. It returns true when the +// caller should stop processing further comments (a swagger: annotation +// that doesn't belong to this parser, or swagger:ignore). +func (st *SectionedParser) parseLine(line string) (stop bool) { + // Step 1: check for swagger:* annotations. + if rxSwaggerAnnotation.MatchString(line) { + if rxIgnoreOverride.MatchString(line) { + st.ignored = true + return true // an explicit ignore terminates this parser + } + if st.annotation == nil || !st.annotation.Matches(line) { + return true // a new swagger: annotation terminates this parser + } + + _ = st.annotation.Parse([]string{line}) + if len(st.header) > 0 { + st.seenTag = true + } + return false + } + + // Step 2: try to match a registered tagger. + var matched bool + for _, tg := range st.taggers { + tagger := tg + if tagger.Matches(line) { + st.seenTag = true + st.currentTagger = &tagger + matched = true + break + } + } + + // Step 3: no tagger active → accumulate as header (free-form text). + if st.currentTagger == nil { + if !st.skipHeader && !st.seenTag { + st.header = append(st.header, line) + } + return false + } + + // For multi-line taggers, the header line (the one that matched) is + // consumed but not collected — only subsequent lines are body. + if st.currentTagger.MultiLine && matched { + return false + } + + // Collect the line into the matched tagger's line buffer. + ts, ok := st.matched[st.currentTagger.Name] + if !ok { + ts = *st.currentTagger + } + ts.Lines = append(ts.Lines, line) + if st.matched == nil { + st.matched = make(map[string]TagParser) + } + st.matched[st.currentTagger.Name] = ts + + // Single-line taggers reset immediately; multi-line taggers stay active. + if !st.currentTagger.MultiLine { + st.currentTagger = nil + } + return false +} + +// collectTitleDescription lazily splits the accumulated header lines into +// title and description. The split is performed at most once. +// +// When setTitle is nil (no title callback registered), the header is only +// cleaned up (comment prefixes removed) but not split — everything stays +// in the description. +func (st *SectionedParser) collectTitleDescription() { + if st.workedOutTitle { + return + } + if st.setTitle == nil { + st.header = cleanupScannerLines(st.header, rxUncommentHeaders) + return + } + + st.workedOutTitle = true + st.title, st.header = collectScannerTitleDescription(st.header) +} diff --git a/parser_go119_test.go b/internal/parsers/sectioned_parser_go119_test.go similarity index 94% rename from parser_go119_test.go rename to internal/parsers/sectioned_parser_go119_test.go index 4379d5a..729e047 100644 --- a/parser_go119_test.go +++ b/internal/parsers/sectioned_parser_go119_test.go @@ -1,7 +1,7 @@ // SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers // SPDX-License-Identifier: Apache-2.0 -package codescan +package parsers import ( "testing" @@ -26,7 +26,7 @@ The punctuation here does indeed matter. But it won't for go. var err error - st := §ionedParser{} + st := &SectionedParser{} st.setTitle = func(_ []string) {} err = st.Parse(ascg(text)) require.NoError(t, err) @@ -34,7 +34,7 @@ The punctuation here does indeed matter. But it won't for go. assert.Equal(t, []string{"This has a title that starts with a hash tag"}, st.Title()) assert.Equal(t, []string{"The punctuation here does indeed matter. But it won't for go."}, st.Description()) - st = §ionedParser{} + st = &SectionedParser{} st.setTitle = func(_ []string) {} err = st.Parse(ascg(text2)) require.NoError(t, err) diff --git a/parser_test.go b/internal/parsers/sectioned_parser_test.go similarity index 55% rename from parser_test.go rename to internal/parsers/sectioned_parser_test.go index 6385997..d28d82c 100644 --- a/parser_test.go +++ b/internal/parsers/sectioned_parser_test.go @@ -1,9 +1,10 @@ // SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers // SPDX-License-Identifier: Apache-2.0 -package codescan +package parsers import ( + "errors" "fmt" "go/ast" "regexp" @@ -85,7 +86,7 @@ Sample code block: var err error - st := §ionedParser{} + st := &SectionedParser{} st.setTitle = func(_ []string) {} err = st.Parse(ascg(text)) require.NoError(t, err) @@ -93,7 +94,7 @@ Sample code block: assert.Equal(t, []string{"This has a title, separated by a whitespace line"}, st.Title()) assert.Equal(t, []string{"In this example the punctuation for the title should not matter for swagger.", "For go it will still make a difference though."}, st.Description()) - st = §ionedParser{} + st = &SectionedParser{} st.setTitle = func(_ []string) {} err = st.Parse(ascg(text2)) require.NoError(t, err) @@ -101,7 +102,7 @@ Sample code block: assert.Equal(t, []string{"This has a title without whitespace."}, st.Title()) assert.Equal(t, []string{"The punctuation here does indeed matter. But it won't for go."}, st.Description()) - st = §ionedParser{} + st = &SectionedParser{} st.setTitle = func(_ []string) {} err = st.Parse(ascg(text3)) require.NoError(t, err) @@ -113,7 +114,7 @@ Sample code block: "[Links works too](http://localhost)", }, st.Description()) - st = §ionedParser{} + st = &SectionedParser{} st.setTitle = func(_ []string) {} err = st.Parse(ascg(text4)) require.NoError(t, err) @@ -122,7 +123,37 @@ Sample code block: assert.Equal(t, []string{"+ first item", " + nested item", " + also nested item", "", "Sample code block:", "", " fmt.Println(\"Hello World!\")"}, st.Description()) } -func dummyBuilder() schemaValidations { +type schemaValidations struct { + current *spec.Schema +} + +func (sv schemaValidations) SetMaximum(val float64, exclusive bool) { + sv.current.Maximum = &val + sv.current.ExclusiveMaximum = exclusive +} + +func (sv schemaValidations) SetMinimum(val float64, exclusive bool) { + sv.current.Minimum = &val + sv.current.ExclusiveMinimum = exclusive +} +func (sv schemaValidations) SetMultipleOf(val float64) { sv.current.MultipleOf = &val } +func (sv schemaValidations) SetMinItems(val int64) { sv.current.MinItems = &val } +func (sv schemaValidations) SetMaxItems(val int64) { sv.current.MaxItems = &val } +func (sv schemaValidations) SetMinLength(val int64) { sv.current.MinLength = &val } +func (sv schemaValidations) SetMaxLength(val int64) { sv.current.MaxLength = &val } +func (sv schemaValidations) SetPattern(val string) { sv.current.Pattern = val } +func (sv schemaValidations) SetUnique(val bool) { sv.current.UniqueItems = val } +func (sv schemaValidations) SetDefault(val any) { sv.current.Default = val } +func (sv schemaValidations) SetExample(val any) { sv.current.Example = val } +func (sv schemaValidations) SetEnum(val string) { + var typ string + if len(sv.current.Type) > 0 { + typ = sv.current.Type[0] + } + sv.current.Enum = ParseEnum(val, &spec.SimpleSchema{Format: sv.current.Format, Type: typ}) +} + +func dummybuilder() schemaValidations { return schemaValidations{new(spec.Schema)} } @@ -143,12 +174,12 @@ maximum: 20 var err error - st := §ionedParser{} + st := &SectionedParser{} st.setTitle = func(_ []string) {} - st.taggers = []tagParser{ - {"Maximum", false, false, nil, &setMaximum{dummyBuilder(), regexp.MustCompile(fmt.Sprintf(rxMaximumFmt, ""))}}, - {"Minimum", false, false, nil, &setMinimum{dummyBuilder(), regexp.MustCompile(fmt.Sprintf(rxMinimumFmt, ""))}}, - {"MultipleOf", false, false, nil, &setMultipleOf{dummyBuilder(), regexp.MustCompile(fmt.Sprintf(rxMultipleOfFmt, ""))}}, + st.taggers = []TagParser{ + {"Maximum", false, false, nil, &SetMaximum{builder: dummybuilder(), rx: regexp.MustCompile(fmt.Sprintf(rxMaximumFmt, ""))}}, + {"Minimum", false, false, nil, &SetMinimum{builder: dummybuilder(), rx: regexp.MustCompile(fmt.Sprintf(rxMinimumFmt, ""))}}, + {"MultipleOf", false, false, nil, &SetMultipleOf{builder: dummybuilder(), rx: regexp.MustCompile(fmt.Sprintf(rxMultipleOfFmt, ""))}}, } err = st.Parse(ascg(block)) @@ -161,12 +192,12 @@ maximum: 20 _, ok = st.matched["Minimum"] assert.TrueT(t, ok) - st = §ionedParser{} + st = &SectionedParser{} st.setTitle = func(_ []string) {} - st.taggers = []tagParser{ - {"Maximum", false, false, nil, &setMaximum{dummyBuilder(), regexp.MustCompile(fmt.Sprintf(rxMaximumFmt, ""))}}, - {"Minimum", false, false, nil, &setMinimum{dummyBuilder(), regexp.MustCompile(fmt.Sprintf(rxMinimumFmt, ""))}}, - {"MultipleOf", false, false, nil, &setMultipleOf{dummyBuilder(), regexp.MustCompile(fmt.Sprintf(rxMultipleOfFmt, ""))}}, + st.taggers = []TagParser{ + {"Maximum", false, false, nil, &SetMaximum{builder: dummybuilder(), rx: regexp.MustCompile(fmt.Sprintf(rxMaximumFmt, ""))}}, + {"Minimum", false, false, nil, &SetMinimum{builder: dummybuilder(), rx: regexp.MustCompile(fmt.Sprintf(rxMinimumFmt, ""))}}, + {"MultipleOf", false, false, nil, &SetMultipleOf{builder: dummybuilder(), rx: regexp.MustCompile(fmt.Sprintf(rxMultipleOfFmt, ""))}}, } err = st.Parse(ascg(block2)) @@ -185,7 +216,7 @@ func TestSectionedParser_Empty(t *testing.T) { var err error - st := §ionedParser{} + st := &SectionedParser{} st.setTitle = func(_ []string) {} ap := newSchemaAnnotationParser("SomeResponse") ap.rx = rxResponseOverride @@ -208,14 +239,14 @@ func testSectionedParserWithBlock( ) { t.Helper() - st := §ionedParser{} + st := &SectionedParser{} st.setTitle = func(_ []string) {} ap := newSchemaAnnotationParser("SomeModel") st.annotation = ap - st.taggers = []tagParser{ - {"Maximum", false, false, nil, &setMaximum{dummyBuilder(), regexp.MustCompile(fmt.Sprintf(rxMaximumFmt, ""))}}, - {"Minimum", false, false, nil, &setMinimum{dummyBuilder(), regexp.MustCompile(fmt.Sprintf(rxMinimumFmt, ""))}}, - {"MultipleOf", false, false, nil, &setMultipleOf{dummyBuilder(), regexp.MustCompile(fmt.Sprintf(rxMultipleOfFmt, ""))}}, + st.taggers = []TagParser{ + {"Maximum", false, false, nil, &SetMaximum{builder: dummybuilder(), rx: regexp.MustCompile(fmt.Sprintf(rxMaximumFmt, ""))}}, + {"Minimum", false, false, nil, &SetMinimum{builder: dummybuilder(), rx: regexp.MustCompile(fmt.Sprintf(rxMinimumFmt, ""))}}, + {"MultipleOf", false, false, nil, &SetMultipleOf{builder: dummybuilder(), rx: regexp.MustCompile(fmt.Sprintf(rxMultipleOfFmt, ""))}}, } err := st.Parse(ascg(block)) @@ -256,6 +287,90 @@ maximum: 20 testSectionedParserWithBlock(t, block, 1, false) } +func TestSectionedParser_NilDoc(t *testing.T) { + st := NewSectionedParser( + WithSetTitle(func(_ []string) {}), + WithSetDescription(func(_ []string) {}), + ) + require.NoError(t, st.Parse(nil)) + assert.Empty(t, st.Title()) + assert.Empty(t, st.Description()) + assert.FalseT(t, st.Ignored()) +} + +func TestSectionedParser_IgnoredAnnotation(t *testing.T) { + const block = `swagger:ignore SomeType + +This should not matter. +` + st := NewSectionedParser( + WithSetTitle(func(_ []string) {}), + ) + err := st.Parse(ascg(block)) + require.NoError(t, err) + assert.TrueT(t, st.Ignored()) +} + +func TestSectionedParser_WithoutSetTitle(t *testing.T) { + // When setTitle is nil, collectTitleDescription cleans up headers + // but does not split title from description. + const block = `Just a description line. +Another line. +` + st := &SectionedParser{} + err := st.Parse(ascg(block)) + require.NoError(t, err) + assert.Nil(t, st.Title()) + assert.Equal(t, []string{"Just a description line.", "Another line."}, st.Description()) +} + +func TestSectionedParser_TagParseError(t *testing.T) { + // When a matched tagger's Parse returns an error, SectionedParser.Parse propagates it. + errParser := &failingParser{} + st := NewSectionedParser( + WithSetTitle(func(_ []string) {}), + WithTaggers( + NewSingleLineTagParser("Failing", errParser), + ), + ) + + const block = `Title. + +minimum: 10 +` + err := st.Parse(ascg(block)) + require.Error(t, err) + assert.ErrorIs(t, err, errForced) +} + +type failingParser struct{} + +var errForced = errors.New("forced error") + +func (f *failingParser) Matches(line string) bool { return rxMinimum.MatchString(line) } +func (f *failingParser) Parse(_ []string) error { return errForced } + +func TestSectionedParser_AnnotationMatchWithHeader(t *testing.T) { + // When the annotation matches and headers have been collected, + // seenTag is set to true — further non-tag lines are skipped. + const block = `swagger:model someModel + +Title. +Description. + +swagger:model anotherModel +This line after a re-match should still be part of the description. +` + ap := newSchemaAnnotationParser("SomeModel") + st := &SectionedParser{} + st.setTitle = func(_ []string) {} + st.annotation = ap + + err := st.Parse(ascg(block)) + require.NoError(t, err) + assert.EqualT(t, "someModel", ap.Name) +} + func ascg(txt string) *ast.CommentGroup { var cg ast.CommentGroup for line := range strings.SplitSeq(txt, "\n") { @@ -265,41 +380,3 @@ func ascg(txt string) *ast.CommentGroup { } return &cg } - -func TestShouldAcceptTag(t *testing.T) { - tagTests := []struct { - tags []string - includeTags map[string]bool - excludeTags map[string]bool - expected bool - }{ - {nil, nil, nil, true}, - {[]string{"app"}, map[string]bool{"app": true}, nil, true}, - {[]string{"app"}, nil, map[string]bool{"app": true}, false}, - } - for _, tt := range tagTests { - actual := shouldAcceptTag(tt.tags, tt.includeTags, tt.excludeTags) - assert.EqualT(t, tt.expected, actual) - } -} - -func TestShouldAcceptPkg(t *testing.T) { - pkgTests := []struct { - path string - includePkgs []string - excludePkgs []string - expected bool - }{ - {"", nil, nil, true}, - {"", nil, []string{"app"}, true}, - {"", []string{"app"}, nil, false}, - {"app", []string{"app"}, nil, true}, - {"app", nil, []string{"app"}, false}, - {"vendor/app", []string{"app"}, nil, true}, - {"vendor/app", nil, []string{"app"}, false}, - } - for _, tt := range pkgTests { - actual := shouldAcceptPkg(tt.path, tt.includePkgs, tt.excludePkgs) - assert.EqualT(t, tt.expected, actual) - } -} diff --git a/internal/parsers/security.go b/internal/parsers/security.go new file mode 100644 index 0000000..7d3f434 --- /dev/null +++ b/internal/parsers/security.go @@ -0,0 +1,103 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package parsers + +import ( + "regexp" + "strings" +) + +type SetSchemes struct { + set func([]string) + rx *regexp.Regexp +} + +func NewSetSchemes(set func([]string)) *SetSchemes { + return &SetSchemes{ + set: set, + rx: rxSchemes, + } +} + +func (ss *SetSchemes) Matches(line string) bool { + return ss.rx.MatchString(line) +} + +func (ss *SetSchemes) Parse(lines []string) error { + if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { + return nil + } + + matches := ss.rx.FindStringSubmatch(lines[0]) + if len(matches) > 1 && len(matches[1]) > 0 { + sch := strings.Split(matches[1], ", ") + + schemes := []string{} + for _, s := range sch { + ts := strings.TrimSpace(s) + if ts != "" { + schemes = append(schemes, ts) + } + } + ss.set(schemes) + } + + return nil +} + +type SetSecurity struct { + set func([]map[string][]string) + rx *regexp.Regexp +} + +func newSetSecurity(rx *regexp.Regexp, setter func([]map[string][]string)) *SetSecurity { + return &SetSecurity{ + set: setter, + rx: rx, + } +} + +func NewSetSecurityScheme(setter func([]map[string][]string)) *SetSecurity { + return &SetSecurity{ + set: setter, + rx: rxSecuritySchemes, + } +} + +func (ss *SetSecurity) Matches(line string) bool { + return ss.rx.MatchString(line) +} + +func (ss *SetSecurity) Parse(lines []string) error { + if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { + return nil + } + + var result []map[string][]string + const kvParts = 2 + for _, line := range lines { + kv := strings.SplitN(line, ":", kvParts) + scopes := []string{} + var key string + + if len(kv) > 1 { + scs := strings.SplitSeq(kv[1], ",") + for scope := range scs { + tr := strings.TrimSpace(scope) + if tr != "" { + tr = strings.SplitAfter(tr, " ")[0] + scopes = append(scopes, strings.TrimSpace(tr)) + } + } + + key = strings.TrimSpace(kv[0]) + + result = append(result, map[string][]string{key: scopes}) + } + } + + ss.set(result) + + return nil +} diff --git a/internal/parsers/security_test.go b/internal/parsers/security_test.go new file mode 100644 index 0000000..6d88115 --- /dev/null +++ b/internal/parsers/security_test.go @@ -0,0 +1,84 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package parsers + +import ( + "testing" + + "github.com/go-openapi/testify/v2/assert" + "github.com/go-openapi/testify/v2/require" +) + +func TestSetSchemes(t *testing.T) { + t.Parallel() + + t.Run("single scheme", func(t *testing.T) { + var got []string + ss := NewSetSchemes(func(v []string) { got = v }) + assert.TrueT(t, ss.Matches("schemes: http")) + require.NoError(t, ss.Parse([]string{"schemes: http"})) + assert.Equal(t, []string{"http"}, got) + }) + + t.Run("multiple schemes", func(t *testing.T) { + var got []string + ss := NewSetSchemes(func(v []string) { got = v }) + require.NoError(t, ss.Parse([]string{"schemes: http, https"})) + assert.Equal(t, []string{"http", "https"}, got) + }) + + t.Run("wss", func(t *testing.T) { + var got []string + ss := NewSetSchemes(func(v []string) { got = v }) + require.NoError(t, ss.Parse([]string{"Schemes: ws, wss"})) + assert.Equal(t, []string{"ws", "wss"}, got) + }) + + t.Run("empty", func(t *testing.T) { + var got []string + ss := NewSetSchemes(func(v []string) { got = v }) + require.NoError(t, ss.Parse(nil)) + require.NoError(t, ss.Parse([]string{})) + require.NoError(t, ss.Parse([]string{""})) + assert.Nil(t, got) + }) + + t.Run("no match", func(t *testing.T) { + ss := NewSetSchemes(nil) + assert.FalseT(t, ss.Matches("something else")) + }) +} + +func TestSetSecurity(t *testing.T) { + t.Parallel() + + t.Run("with scopes", func(t *testing.T) { + var got []map[string][]string + ss := NewSetSecurityScheme(func(v []map[string][]string) { got = v }) + assert.TrueT(t, ss.Matches("security:")) + require.NoError(t, ss.Parse([]string{ + "api_key:", + "oauth2: read:pets, write:pets", + })) + require.Len(t, got, 2) + assert.Equal(t, map[string][]string{"api_key": {}}, got[0]) + assert.Equal(t, map[string][]string{"oauth2": {"read:pets", "write:pets"}}, got[1]) + }) + + t.Run("empty", func(t *testing.T) { + var got []map[string][]string + ss := NewSetSecurityScheme(func(v []map[string][]string) { got = v }) + require.NoError(t, ss.Parse(nil)) + require.NoError(t, ss.Parse([]string{})) + require.NoError(t, ss.Parse([]string{""})) + assert.Nil(t, got) + }) + + t.Run("no colon in line", func(t *testing.T) { + var got []map[string][]string + ss := NewSetSecurityScheme(func(v []map[string][]string) { got = v }) + require.NoError(t, ss.Parse([]string{"no-colon-here"})) + assert.Nil(t, got) // line without colon is skipped + }) +} diff --git a/internal/parsers/tag_parsers.go b/internal/parsers/tag_parsers.go new file mode 100644 index 0000000..16d8ad7 --- /dev/null +++ b/internal/parsers/tag_parsers.go @@ -0,0 +1,86 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package parsers + +import "github.com/go-openapi/codescan/internal/ifaces" + +// TagParser pairs a named tag with a [ifaces.ValueParser] that recognizes and +// extracts its value from comment lines. +// +// A TagParser operates in one of two modes: +// +// - Single-line: the tag matches exactly one line (e.g. "maximum: 10"). +// The [SectionedParser] resets its current tagger after every single-line +// match, so the next line is free to match a different tagger. +// +// - Multi-line: the tag's first matching line is a header (e.g. "consumes:") +// and all subsequent lines are collected as its body until a different +// tagger matches or the comment block ends. The header line itself is NOT +// included in Lines — only the body lines that follow it. +// +// SkipCleanUp controls whether the [SectionedParser] strips comment prefixes +// (// , *, etc.) from the collected Lines before calling Parse. YAML-based +// taggers set this to true because they need the original indentation intact. +// +// Lines is populated by the [SectionedParser] during its scan; after the scan +// completes, Parse is called with those lines to extract the value. +type TagParser struct { + Name string + MultiLine bool + SkipCleanUp bool + Lines []string + Parser ifaces.ValueParser +} + +// NewMultiLineTagParser creates a TagParser that collects all lines following +// the matching header until a different tag or annotation is encountered. +// +// Example usage (from [NewMetaParser]): +// +// NewMultiLineTagParser("TOS", +// newMultilineDropEmptyParser(rxTOS, metaTOSSetter(info)), +// false, // clean up comment prefixes before parsing +// ) +// +// This creates a tagger that recognizes "Terms of Service:" and collects every +// subsequent line into the TOS field, stripping comment prefixes. +func NewMultiLineTagParser(name string, parser ifaces.ValueParser, skipCleanUp bool) TagParser { + return TagParser{ + Name: name, + MultiLine: true, + SkipCleanUp: skipCleanUp, + Parser: parser, + } +} + +// NewSingleLineTagParser creates a TagParser that matches and parses exactly +// one line. After the match, the [SectionedParser] resets its current tagger +// so subsequent lines can match other taggers. +// +// Example usage (from [NewMetaParser]): +// +// NewSingleLineTagParser("Version", +// &setMetaSingle{Spec: swspec, Rx: rxVersion, Set: setInfoVersion}, +// ) +// +// This creates a tagger that recognizes "Version: 1.0.0" and writes the +// captured value into swspec.Info.Version. +func NewSingleLineTagParser(name string, parser ifaces.ValueParser) TagParser { + return TagParser{ + Name: name, + MultiLine: false, + SkipCleanUp: false, + Parser: parser, + } +} + +// Matches delegates to the underlying Parser. +func (st *TagParser) Matches(line string) bool { + return st.Parser.Matches(line) +} + +// Parse delegates to the underlying Parser. +func (st *TagParser) Parse(lines []string) error { + return st.Parser.Parse(lines) +} diff --git a/internal/parsers/validations.go b/internal/parsers/validations.go new file mode 100644 index 0000000..06fc3e9 --- /dev/null +++ b/internal/parsers/validations.go @@ -0,0 +1,610 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package parsers + +import ( + "fmt" + "regexp" + "strconv" + + "github.com/go-openapi/codescan/internal/ifaces" + oaispec "github.com/go-openapi/spec" +) + +var ( + rxMaximum = regexp.MustCompile(fmt.Sprintf(rxMaximumFmt, "")) + rxMinimum = regexp.MustCompile(fmt.Sprintf(rxMinimumFmt, "")) + rxMultipleOf = regexp.MustCompile(fmt.Sprintf(rxMultipleOfFmt, "")) + rxMinItems = regexp.MustCompile(fmt.Sprintf(rxMinItemsFmt, "")) + rxMaxItems = regexp.MustCompile(fmt.Sprintf(rxMaxItemsFmt, "")) + rxMaxLength = regexp.MustCompile(fmt.Sprintf(rxMaxLengthFmt, "")) + rxMinLength = regexp.MustCompile(fmt.Sprintf(rxMinLengthFmt, "")) + rxPattern = regexp.MustCompile(fmt.Sprintf(rxPatternFmt, "")) + rxCollectionFormat = regexp.MustCompile(fmt.Sprintf(rxCollectionFormatFmt, "")) + rxUnique = regexp.MustCompile(fmt.Sprintf(rxUniqueFmt, "")) + rxEnumValidation = regexp.MustCompile(fmt.Sprintf(rxEnumFmt, "")) + rxDefaultValidation = regexp.MustCompile(fmt.Sprintf(rxDefaultFmt, "")) + rxExample = regexp.MustCompile(fmt.Sprintf(rxExampleFmt, "")) +) + +type PrefixRxOption func(string) *regexp.Regexp + +func WithItemsPrefixLevel(level int) PrefixRxOption { + // the expression is 1-index based not 0-index + itemsPrefix := fmt.Sprintf(rxItemsPrefixFmt, level+1) + return func(expr string) *regexp.Regexp { + return Rxf(expr, itemsPrefix) // TODO(fred): cache + } +} + +type SetMaximum struct { + builder ifaces.ValidationBuilder + rx *regexp.Regexp +} + +func NewSetMaximum(builder ifaces.ValidationBuilder, opts ...PrefixRxOption) *SetMaximum { + rx := rxMaximum + for _, apply := range opts { + rx = apply(rxMaximumFmt) + } + + return &SetMaximum{ + builder: builder, + rx: rx, + } +} + +func (sm *SetMaximum) Parse(lines []string) error { + if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { + return nil + } + matches := sm.rx.FindStringSubmatch(lines[0]) + if len(matches) > 2 && len(matches[2]) > 0 { + maximum, err := strconv.ParseFloat(matches[2], 64) + if err != nil { + return err + } + sm.builder.SetMaximum(maximum, matches[1] == "<") + } + return nil +} + +func (sm *SetMaximum) Matches(line string) bool { + return sm.rx.MatchString(line) +} + +type SetMinimum struct { + builder ifaces.ValidationBuilder + rx *regexp.Regexp +} + +func NewSetMinimum(builder ifaces.ValidationBuilder, opts ...PrefixRxOption) *SetMinimum { + rx := rxMinimum + for _, apply := range opts { + rx = apply(rxMinimumFmt) + } + + return &SetMinimum{ + builder: builder, + rx: rx, + } +} + +func (sm *SetMinimum) Matches(line string) bool { + return sm.rx.MatchString(line) +} + +func (sm *SetMinimum) Parse(lines []string) error { + if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { + return nil + } + matches := sm.rx.FindStringSubmatch(lines[0]) + if len(matches) > 2 && len(matches[2]) > 0 { + minimum, err := strconv.ParseFloat(matches[2], 64) + if err != nil { + return err + } + sm.builder.SetMinimum(minimum, matches[1] == ">") + } + return nil +} + +type SetMultipleOf struct { + builder ifaces.ValidationBuilder + rx *regexp.Regexp +} + +func NewSetMultipleOf(builder ifaces.ValidationBuilder, opts ...PrefixRxOption) *SetMultipleOf { + rx := rxMultipleOf + for _, apply := range opts { + rx = apply(rxMultipleOfFmt) + } + + return &SetMultipleOf{ + builder: builder, + rx: rx, + } +} + +func (sm *SetMultipleOf) Matches(line string) bool { + return sm.rx.MatchString(line) +} + +func (sm *SetMultipleOf) Parse(lines []string) error { + if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { + return nil + } + matches := sm.rx.FindStringSubmatch(lines[0]) + if len(matches) > 1 && len(matches[1]) > 0 { + multipleOf, err := strconv.ParseFloat(matches[1], 64) + if err != nil { + return err + } + sm.builder.SetMultipleOf(multipleOf) + } + return nil +} + +type SetMaxItems struct { + builder ifaces.ValidationBuilder + rx *regexp.Regexp +} + +func NewSetMaxItems(builder ifaces.ValidationBuilder, opts ...PrefixRxOption) *SetMaxItems { + rx := rxMaxItems + for _, apply := range opts { + rx = apply(rxMaxItemsFmt) + } + + return &SetMaxItems{ + builder: builder, + rx: rx, + } +} + +func (sm *SetMaxItems) Matches(line string) bool { + return sm.rx.MatchString(line) +} + +func (sm *SetMaxItems) Parse(lines []string) error { + if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { + return nil + } + matches := sm.rx.FindStringSubmatch(lines[0]) + if len(matches) > 1 && len(matches[1]) > 0 { + maxItems, err := strconv.ParseInt(matches[1], 10, 64) + if err != nil { + return err + } + sm.builder.SetMaxItems(maxItems) + } + return nil +} + +type SetMinItems struct { + builder ifaces.ValidationBuilder + rx *regexp.Regexp +} + +func NewSetMinItems(builder ifaces.ValidationBuilder, opts ...PrefixRxOption) *SetMinItems { + rx := rxMinItems + for _, apply := range opts { + rx = apply(rxMinItemsFmt) + } + + return &SetMinItems{ + builder: builder, + rx: rx, + } +} + +func (sm *SetMinItems) Matches(line string) bool { + return sm.rx.MatchString(line) +} + +func (sm *SetMinItems) Parse(lines []string) error { + if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { + return nil + } + matches := sm.rx.FindStringSubmatch(lines[0]) + if len(matches) > 1 && len(matches[1]) > 0 { + minItems, err := strconv.ParseInt(matches[1], 10, 64) + if err != nil { + return err + } + sm.builder.SetMinItems(minItems) + } + return nil +} + +type SetMaxLength struct { + builder ifaces.ValidationBuilder + rx *regexp.Regexp +} + +func NewSetMaxLength(builder ifaces.ValidationBuilder, opts ...PrefixRxOption) *SetMaxLength { + rx := rxMaxLength + for _, apply := range opts { + rx = apply(rxMaxLengthFmt) + } + + return &SetMaxLength{ + builder: builder, + rx: rx, + } +} + +func (sm *SetMaxLength) Parse(lines []string) error { + if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { + return nil + } + matches := sm.rx.FindStringSubmatch(lines[0]) + if len(matches) > 1 && len(matches[1]) > 0 { + maxLength, err := strconv.ParseInt(matches[1], 10, 64) + if err != nil { + return err + } + sm.builder.SetMaxLength(maxLength) + } + return nil +} + +func (sm *SetMaxLength) Matches(line string) bool { + return sm.rx.MatchString(line) +} + +type SetMinLength struct { + builder ifaces.ValidationBuilder + rx *regexp.Regexp +} + +func NewSetMinLength(builder ifaces.ValidationBuilder, opts ...PrefixRxOption) *SetMinLength { + rx := rxMinLength + for _, apply := range opts { + rx = apply(rxMinLengthFmt) + } + + return &SetMinLength{ + builder: builder, + rx: rx, + } +} + +func (sm *SetMinLength) Parse(lines []string) error { + if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { + return nil + } + matches := sm.rx.FindStringSubmatch(lines[0]) + if len(matches) > 1 && len(matches[1]) > 0 { + minLength, err := strconv.ParseInt(matches[1], 10, 64) + if err != nil { + return err + } + sm.builder.SetMinLength(minLength) + } + return nil +} + +func (sm *SetMinLength) Matches(line string) bool { + return sm.rx.MatchString(line) +} + +type SetPattern struct { + builder ifaces.ValidationBuilder + rx *regexp.Regexp +} + +func NewSetPattern(builder ifaces.ValidationBuilder, opts ...PrefixRxOption) *SetPattern { + rx := rxPattern + for _, apply := range opts { + rx = apply(rxPatternFmt) + } + + return &SetPattern{ + builder: builder, + rx: rx, + } +} + +func (sm *SetPattern) Parse(lines []string) error { + if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { + return nil + } + matches := sm.rx.FindStringSubmatch(lines[0]) + if len(matches) > 1 && len(matches[1]) > 0 { + sm.builder.SetPattern(matches[1]) + } + return nil +} + +func (sm *SetPattern) Matches(line string) bool { + return sm.rx.MatchString(line) +} + +type SetCollectionFormat struct { + builder ifaces.OperationValidationBuilder + rx *regexp.Regexp +} + +func NewSetCollectionFormat(builder ifaces.OperationValidationBuilder, opts ...PrefixRxOption) *SetCollectionFormat { + rx := rxCollectionFormat + for _, apply := range opts { + rx = apply(rxCollectionFormatFmt) + } + + return &SetCollectionFormat{ + builder: builder, + rx: rx, + } +} + +func (sm *SetCollectionFormat) Parse(lines []string) error { + if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { + return nil + } + matches := sm.rx.FindStringSubmatch(lines[0]) + if len(matches) > 1 && len(matches[1]) > 0 { + sm.builder.SetCollectionFormat(matches[1]) + } + return nil +} + +func (sm *SetCollectionFormat) Matches(line string) bool { + return sm.rx.MatchString(line) +} + +type SetUnique struct { + builder ifaces.ValidationBuilder + rx *regexp.Regexp +} + +func NewSetUnique(builder ifaces.ValidationBuilder, opts ...PrefixRxOption) *SetUnique { + rx := rxUnique + for _, apply := range opts { + rx = apply(rxUniqueFmt) + } + + return &SetUnique{ + builder: builder, + rx: rx, + } +} + +func (su *SetUnique) Matches(line string) bool { + return su.rx.MatchString(line) +} + +func (su *SetUnique) Parse(lines []string) error { + if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { + return nil + } + matches := su.rx.FindStringSubmatch(lines[0]) + if len(matches) > 1 && len(matches[1]) > 0 { + req, err := strconv.ParseBool(matches[1]) + if err != nil { + return err + } + su.builder.SetUnique(req) + } + return nil +} + +type SetRequiredParam struct { + tgt *oaispec.Parameter +} + +func NewSetRequiredParam(param *oaispec.Parameter) *SetRequiredParam { + return &SetRequiredParam{ + tgt: param, + } +} + +func (su *SetRequiredParam) Matches(line string) bool { + return rxRequired.MatchString(line) +} + +func (su *SetRequiredParam) Parse(lines []string) error { + if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { + return nil + } + matches := rxRequired.FindStringSubmatch(lines[0]) + if len(matches) > 1 && len(matches[1]) > 0 { + req, err := strconv.ParseBool(matches[1]) + if err != nil { + return err + } + su.tgt.Required = req + } + return nil +} + +type SetReadOnlySchema struct { + tgt *oaispec.Schema +} + +func NewSetReadOnlySchema(schema *oaispec.Schema) *SetReadOnlySchema { + return &SetReadOnlySchema{ + tgt: schema, + } +} + +func (su *SetReadOnlySchema) Matches(line string) bool { + return rxReadOnly.MatchString(line) +} + +func (su *SetReadOnlySchema) Parse(lines []string) error { + if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { + return nil + } + matches := rxReadOnly.FindStringSubmatch(lines[0]) + if len(matches) > 1 && len(matches[1]) > 0 { + req, err := strconv.ParseBool(matches[1]) + if err != nil { + return err + } + su.tgt.ReadOnly = req + } + return nil +} + +type SetRequiredSchema struct { + Schema *oaispec.Schema + Field string +} + +func NewSetRequiredSchema(schema *oaispec.Schema, field string) *SetRequiredSchema { + return &SetRequiredSchema{ + Schema: schema, + Field: field, + } +} + +func (su *SetRequiredSchema) Matches(line string) bool { + return rxRequired.MatchString(line) +} + +func (su *SetRequiredSchema) Parse(lines []string) error { + if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { + return nil + } + matches := rxRequired.FindStringSubmatch(lines[0]) + if len(matches) <= 1 || len(matches[1]) == 0 { + return nil + } + + req, err := strconv.ParseBool(matches[1]) + if err != nil { + return err + } + midx := -1 + for i, nm := range su.Schema.Required { + if nm == su.Field { + midx = i + break + } + } + if req { + if midx < 0 { + su.Schema.Required = append(su.Schema.Required, su.Field) + } + } else if midx >= 0 { + su.Schema.Required = append(su.Schema.Required[:midx], su.Schema.Required[midx+1:]...) + } + return nil +} + +type SetDefault struct { + scheme *oaispec.SimpleSchema + builder ifaces.ValidationBuilder + rx *regexp.Regexp +} + +func NewSetDefault(scheme *oaispec.SimpleSchema, builder ifaces.ValidationBuilder, opts ...PrefixRxOption) *SetDefault { + rx := rxDefaultValidation + for _, apply := range opts { + rx = apply(rxDefaultFmt) + } + + return &SetDefault{ + scheme: scheme, + builder: builder, + rx: rx, + } +} + +func (sd *SetDefault) Matches(line string) bool { + return sd.rx.MatchString(line) +} + +func (sd *SetDefault) Parse(lines []string) error { + if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { + return nil + } + + matches := sd.rx.FindStringSubmatch(lines[0]) + if len(matches) > 1 && len(matches[1]) > 0 { + d, err := parseValueFromSchema(matches[1], sd.scheme) + if err != nil { + return err + } + sd.builder.SetDefault(d) + } + + return nil +} + +type SetExample struct { + scheme *oaispec.SimpleSchema + builder ifaces.ValidationBuilder + rx *regexp.Regexp +} + +func NewSetExample(scheme *oaispec.SimpleSchema, builder ifaces.ValidationBuilder, opts ...PrefixRxOption) *SetExample { + rx := rxExample + for _, apply := range opts { + rx = apply(rxExampleFmt) + } + + return &SetExample{ + scheme: scheme, + builder: builder, + rx: rx, + } +} + +func (se *SetExample) Matches(line string) bool { + return se.rx.MatchString(line) +} + +func (se *SetExample) Parse(lines []string) error { + if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { + return nil + } + + matches := se.rx.FindStringSubmatch(lines[0]) + if len(matches) > 1 && len(matches[1]) > 0 { + d, err := parseValueFromSchema(matches[1], se.scheme) + if err != nil { + return err + } + se.builder.SetExample(d) + } + + return nil +} + +type SetDiscriminator struct { + Schema *oaispec.Schema + Field string +} + +func NewSetDiscriminator(schema *oaispec.Schema, field string) *SetDiscriminator { + return &SetDiscriminator{ + Schema: schema, + Field: field, + } +} + +func (su *SetDiscriminator) Matches(line string) bool { + return rxDiscriminator.MatchString(line) +} + +func (su *SetDiscriminator) Parse(lines []string) error { + if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { + return nil + } + matches := rxDiscriminator.FindStringSubmatch(lines[0]) + if len(matches) > 1 && len(matches[1]) > 0 { + req, err := strconv.ParseBool(matches[1]) + if err != nil { + return err + } + if req { + su.Schema.Discriminator = su.Field + } else if su.Schema.Discriminator == su.Field { + su.Schema.Discriminator = "" + } + } + return nil +} diff --git a/internal/parsers/validations_test.go b/internal/parsers/validations_test.go new file mode 100644 index 0000000..4cb6550 --- /dev/null +++ b/internal/parsers/validations_test.go @@ -0,0 +1,608 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package parsers + +import ( + "testing" + + "github.com/go-openapi/testify/v2/assert" + "github.com/go-openapi/testify/v2/require" + + oaispec "github.com/go-openapi/spec" +) + +// validationRecorder captures all calls made to a ValidationBuilder. +type validationRecorder struct { + maximum *float64 + exclusiveMaximum bool + minimum *float64 + exclusiveMinimum bool + multipleOf *float64 + minItems *int64 + maxItems *int64 + minLength *int64 + maxLength *int64 + pattern string + unique *bool + enum string + defaultVal any + exampleVal any + collectionFormat string +} + +func (r *validationRecorder) SetMaximum(v float64, exclusive bool) { + r.maximum = &v + r.exclusiveMaximum = exclusive +} + +func (r *validationRecorder) SetMinimum(v float64, exclusive bool) { + r.minimum = &v + r.exclusiveMinimum = exclusive +} +func (r *validationRecorder) SetMultipleOf(v float64) { r.multipleOf = &v } +func (r *validationRecorder) SetMinItems(v int64) { r.minItems = &v } +func (r *validationRecorder) SetMaxItems(v int64) { r.maxItems = &v } +func (r *validationRecorder) SetMinLength(v int64) { r.minLength = &v } +func (r *validationRecorder) SetMaxLength(v int64) { r.maxLength = &v } +func (r *validationRecorder) SetPattern(v string) { r.pattern = v } +func (r *validationRecorder) SetUnique(v bool) { r.unique = &v } +func (r *validationRecorder) SetEnum(v string) { r.enum = v } +func (r *validationRecorder) SetDefault(v any) { r.defaultVal = v } +func (r *validationRecorder) SetExample(v any) { r.exampleVal = v } +func (r *validationRecorder) SetCollectionFormat(v string) { r.collectionFormat = v } + +func TestSetMaximum(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + line string + wantMatch bool + wantVal float64 + exclusive bool + }{ + {"inclusive", "maximum: 100", true, 100, false}, + {"exclusive", "maximum: < 100", true, 100, true}, + {"decimal", "maximum: 99.5", true, 99.5, false}, + {"negative", "maximum: -10", true, -10, false}, + {"no match", "something else", false, 0, false}, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + rec := &validationRecorder{} + sm := NewSetMaximum(rec) + assert.EqualT(t, tc.wantMatch, sm.Matches(tc.line)) + if tc.wantMatch { + require.NoError(t, sm.Parse([]string{tc.line})) + require.NotNil(t, rec.maximum) + assert.EqualT(t, tc.wantVal, *rec.maximum) + assert.EqualT(t, tc.exclusive, rec.exclusiveMaximum) + } + }) + } + + t.Run("empty lines", func(t *testing.T) { + rec := &validationRecorder{} + sm := NewSetMaximum(rec) + require.NoError(t, sm.Parse(nil)) + require.NoError(t, sm.Parse([]string{})) + require.NoError(t, sm.Parse([]string{""})) + assert.Nil(t, rec.maximum) + }) + + t.Run("parse error", func(t *testing.T) { + rec := &validationRecorder{} + sm := NewSetMaximum(rec) + // Force a match with a non-numeric value via raw regex + require.NoError(t, sm.Parse([]string{"maximum: not-a-number"})) + assert.Nil(t, rec.maximum) // no match because regex won't capture non-numeric + }) +} + +func TestSetMinimum(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + line string + wantMatch bool + wantVal float64 + exclusive bool + }{ + {"inclusive", "minimum: 0", true, 0, false}, + {"exclusive", "minimum: > 0", true, 0, true}, + {"decimal", "min: 1.5", true, 1.5, false}, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + rec := &validationRecorder{} + sm := NewSetMinimum(rec) + assert.EqualT(t, tc.wantMatch, sm.Matches(tc.line)) + if tc.wantMatch { + require.NoError(t, sm.Parse([]string{tc.line})) + require.NotNil(t, rec.minimum) + assert.EqualT(t, tc.wantVal, *rec.minimum) + assert.EqualT(t, tc.exclusive, rec.exclusiveMinimum) + } + }) + } +} + +func TestSetMultipleOf(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + line string + wantMatch bool + wantVal float64 + }{ + {"integer", "multiple of: 5", true, 5}, + {"decimal", "Multiple Of: 0.5", true, 0.5}, + {"no match", "something else", false, 0}, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + rec := &validationRecorder{} + sm := NewSetMultipleOf(rec) + assert.EqualT(t, tc.wantMatch, sm.Matches(tc.line)) + if tc.wantMatch { + require.NoError(t, sm.Parse([]string{tc.line})) + require.NotNil(t, rec.multipleOf) + assert.EqualT(t, tc.wantVal, *rec.multipleOf) + } + }) + } +} + +func TestSetMaxItems(t *testing.T) { + t.Parallel() + + rec := &validationRecorder{} + sm := NewSetMaxItems(rec) + assert.TrueT(t, sm.Matches("max items: 10")) + require.NoError(t, sm.Parse([]string{"max items: 10"})) + require.NotNil(t, rec.maxItems) + assert.EqualT(t, int64(10), *rec.maxItems) +} + +func TestSetMinItems(t *testing.T) { + t.Parallel() + + rec := &validationRecorder{} + sm := NewSetMinItems(rec) + assert.TrueT(t, sm.Matches("min items: 1")) + require.NoError(t, sm.Parse([]string{"min items: 1"})) + require.NotNil(t, rec.minItems) + assert.EqualT(t, int64(1), *rec.minItems) +} + +func TestSetMaxLength(t *testing.T) { + t.Parallel() + + rec := &validationRecorder{} + sm := NewSetMaxLength(rec) + assert.TrueT(t, sm.Matches("max length: 255")) + require.NoError(t, sm.Parse([]string{"max length: 255"})) + require.NotNil(t, rec.maxLength) + assert.EqualT(t, int64(255), *rec.maxLength) +} + +func TestSetMinLength(t *testing.T) { + t.Parallel() + + rec := &validationRecorder{} + sm := NewSetMinLength(rec) + assert.TrueT(t, sm.Matches("min length: 1")) + require.NoError(t, sm.Parse([]string{"min length: 1"})) + require.NotNil(t, rec.minLength) + assert.EqualT(t, int64(1), *rec.minLength) +} + +func TestSetPattern(t *testing.T) { + t.Parallel() + + rec := &validationRecorder{} + sm := NewSetPattern(rec) + assert.TrueT(t, sm.Matches("pattern: ^\\w+$")) + require.NoError(t, sm.Parse([]string{"pattern: ^\\w+$"})) + assert.EqualT(t, "^\\w+$", rec.pattern) +} + +func TestSetCollectionFormat(t *testing.T) { + t.Parallel() + + rec := &validationRecorder{} + sm := NewSetCollectionFormat(rec) + assert.TrueT(t, sm.Matches("collection format: csv")) + require.NoError(t, sm.Parse([]string{"collection format: csv"})) + assert.EqualT(t, "csv", rec.collectionFormat) +} + +func TestSetUnique(t *testing.T) { + t.Parallel() + + tests := []struct { + line string + want bool + }{ + {"unique: true", true}, + {"unique: false", false}, + } + + for _, tc := range tests { + t.Run(tc.line, func(t *testing.T) { + rec := &validationRecorder{} + su := NewSetUnique(rec) + assert.TrueT(t, su.Matches(tc.line)) + require.NoError(t, su.Parse([]string{tc.line})) + require.NotNil(t, rec.unique) + assert.EqualT(t, tc.want, *rec.unique) + }) + } + + t.Run("parse error", func(t *testing.T) { + rec := &validationRecorder{} + su := NewSetUnique(rec) + // unique: accepts only true/false so non-bool won't match + assert.FalseT(t, su.Matches("unique: maybe")) + }) +} + +func TestSetRequiredParam(t *testing.T) { + t.Parallel() + + tests := []struct { + line string + want bool + }{ + {"required: true", true}, + {"required: false", false}, + } + + for _, tc := range tests { + t.Run(tc.line, func(t *testing.T) { + param := new(oaispec.Parameter) + su := NewSetRequiredParam(param) + assert.TrueT(t, su.Matches(tc.line)) + require.NoError(t, su.Parse([]string{tc.line})) + assert.EqualT(t, tc.want, param.Required) + }) + } + + t.Run("empty", func(t *testing.T) { + param := new(oaispec.Parameter) + su := NewSetRequiredParam(param) + require.NoError(t, su.Parse(nil)) + assert.FalseT(t, param.Required) + }) +} + +func TestSetReadOnlySchema(t *testing.T) { + t.Parallel() + + tests := []struct { + line string + want bool + }{ + {"read only: true", true}, + {"readOnly: true", true}, + {"read-only: false", false}, + } + + for _, tc := range tests { + t.Run(tc.line, func(t *testing.T) { + schema := new(oaispec.Schema) + su := NewSetReadOnlySchema(schema) + assert.TrueT(t, su.Matches(tc.line)) + require.NoError(t, su.Parse([]string{tc.line})) + assert.EqualT(t, tc.want, schema.ReadOnly) + }) + } +} + +func TestSetRequiredSchema(t *testing.T) { + t.Parallel() + + t.Run("set required true", func(t *testing.T) { + schema := new(oaispec.Schema) + su := NewSetRequiredSchema(schema, "name") + require.NoError(t, su.Parse([]string{"required: true"})) + assert.Equal(t, []string{"name"}, schema.Required) + }) + + t.Run("set required false removes", func(t *testing.T) { + schema := &oaispec.Schema{} + schema.Required = []string{"name", "age"} + su := NewSetRequiredSchema(schema, "name") + require.NoError(t, su.Parse([]string{"required: false"})) + assert.Equal(t, []string{"age"}, schema.Required) + }) + + t.Run("set required true idempotent", func(t *testing.T) { + schema := &oaispec.Schema{} + schema.Required = []string{"name"} + su := NewSetRequiredSchema(schema, "name") + require.NoError(t, su.Parse([]string{"required: true"})) + assert.Equal(t, []string{"name"}, schema.Required) + }) + + t.Run("set required false not present", func(t *testing.T) { + schema := new(oaispec.Schema) + su := NewSetRequiredSchema(schema, "name") + require.NoError(t, su.Parse([]string{"required: false"})) + assert.Empty(t, schema.Required) + }) + + t.Run("empty lines", func(t *testing.T) { + schema := new(oaispec.Schema) + su := NewSetRequiredSchema(schema, "name") + require.NoError(t, su.Parse(nil)) + require.NoError(t, su.Parse([]string{""})) + }) + + t.Run("no match in line", func(t *testing.T) { + schema := new(oaispec.Schema) + su := NewSetRequiredSchema(schema, "name") + require.NoError(t, su.Parse([]string{"something else"})) + assert.Empty(t, schema.Required) + }) +} + +func TestSetDefault(t *testing.T) { + t.Parallel() + + t.Run("string type", func(t *testing.T) { + rec := &validationRecorder{} + scheme := &oaispec.SimpleSchema{Type: "string"} + sd := NewSetDefault(scheme, rec) + assert.TrueT(t, sd.Matches("default: hello")) + require.NoError(t, sd.Parse([]string{"default: hello"})) + assert.EqualT(t, "hello", rec.defaultVal) + }) + + t.Run("integer type", func(t *testing.T) { + rec := &validationRecorder{} + scheme := &oaispec.SimpleSchema{Type: "integer"} + sd := NewSetDefault(scheme, rec) + require.NoError(t, sd.Parse([]string{"default: 42"})) + assert.EqualT(t, 42, rec.defaultVal) + }) + + t.Run("empty", func(t *testing.T) { + rec := &validationRecorder{} + scheme := &oaispec.SimpleSchema{Type: "string"} + sd := NewSetDefault(scheme, rec) + require.NoError(t, sd.Parse(nil)) + assert.Nil(t, rec.defaultVal) + }) +} + +func TestSetExample(t *testing.T) { + t.Parallel() + + rec := &validationRecorder{} + scheme := &oaispec.SimpleSchema{Type: "string"} + se := NewSetExample(scheme, rec) + assert.TrueT(t, se.Matches("example: foobar")) + require.NoError(t, se.Parse([]string{"example: foobar"})) + assert.EqualT(t, "foobar", rec.exampleVal) +} + +func TestSetDiscriminator(t *testing.T) { + t.Parallel() + + t.Run("set true", func(t *testing.T) { + schema := new(oaispec.Schema) + sd := NewSetDiscriminator(schema, "kind") + assert.TrueT(t, sd.Matches("discriminator: true")) + require.NoError(t, sd.Parse([]string{"discriminator: true"})) + assert.EqualT(t, "kind", schema.Discriminator) + }) + + t.Run("set false clears", func(t *testing.T) { + schema := &oaispec.Schema{} + schema.Discriminator = "kind" + sd := NewSetDiscriminator(schema, "kind") + require.NoError(t, sd.Parse([]string{"discriminator: false"})) + assert.EqualT(t, "", schema.Discriminator) + }) + + t.Run("set false different field", func(t *testing.T) { + schema := &oaispec.Schema{} + schema.Discriminator = "type" + sd := NewSetDiscriminator(schema, "kind") + require.NoError(t, sd.Parse([]string{"discriminator: false"})) + assert.EqualT(t, "type", schema.Discriminator) // unchanged + }) +} + +func TestWithItemsPrefixLevel(t *testing.T) { + t.Parallel() + + rec := &validationRecorder{} + sm := NewSetMaximum(rec, WithItemsPrefixLevel(0)) + line := "items.maximum: 100" + assert.TrueT(t, sm.Matches(line)) + require.NoError(t, sm.Parse([]string{line})) + require.NotNil(t, rec.maximum) + assert.EqualT(t, float64(100), *rec.maximum) + + // Level 1 requires "items.items." + rec2 := &validationRecorder{} + sm2 := NewSetMinimum(rec2, WithItemsPrefixLevel(1)) + line2 := "items.items.minimum: 5" + assert.TrueT(t, sm2.Matches(line2)) + require.NoError(t, sm2.Parse([]string{line2})) + require.NotNil(t, rec2.minimum) + assert.EqualT(t, float64(5), *rec2.minimum) +} + +func TestSetEnum(t *testing.T) { + t.Parallel() + + rec := &validationRecorder{} + se := NewSetEnum(rec) + line := "enum: " + `["a","b","c"]` + assert.TrueT(t, se.Matches(line)) + require.NoError(t, se.Parse([]string{line})) + assert.EqualT(t, `["a","b","c"]`, rec.enum) + + t.Run("empty", func(t *testing.T) { + rec := &validationRecorder{} + se := NewSetEnum(rec) + require.NoError(t, se.Parse(nil)) + require.NoError(t, se.Parse([]string{""})) + assert.EqualT(t, "", rec.enum) + }) +} + +// TestPrefixRxOption_AllConstructors covers the WithItemsPrefixLevel loop body +// in every validation constructor that accepts PrefixRxOption. +func TestPrefixRxOption_AllConstructors(t *testing.T) { + t.Parallel() + + prefix := WithItemsPrefixLevel(0) + + t.Run("SetMultipleOf", func(t *testing.T) { + rec := &validationRecorder{} + sm := NewSetMultipleOf(rec, prefix) + line := "items.multiple of: 3" + assert.TrueT(t, sm.Matches(line)) + require.NoError(t, sm.Parse([]string{line})) + require.NotNil(t, rec.multipleOf) + assert.EqualT(t, float64(3), *rec.multipleOf) + }) + + t.Run("SetMaxItems", func(t *testing.T) { + rec := &validationRecorder{} + sm := NewSetMaxItems(rec, prefix) + line := "items.max items: 10" + assert.TrueT(t, sm.Matches(line)) + require.NoError(t, sm.Parse([]string{line})) + require.NotNil(t, rec.maxItems) + assert.EqualT(t, int64(10), *rec.maxItems) + }) + + t.Run("SetMinItems", func(t *testing.T) { + rec := &validationRecorder{} + sm := NewSetMinItems(rec, prefix) + line := "items.min items: 1" + assert.TrueT(t, sm.Matches(line)) + require.NoError(t, sm.Parse([]string{line})) + require.NotNil(t, rec.minItems) + assert.EqualT(t, int64(1), *rec.minItems) + }) + + t.Run("SetMaxLength", func(t *testing.T) { + rec := &validationRecorder{} + sm := NewSetMaxLength(rec, prefix) + line := "items.max length: 100" + assert.TrueT(t, sm.Matches(line)) + require.NoError(t, sm.Parse([]string{line})) + require.NotNil(t, rec.maxLength) + assert.EqualT(t, int64(100), *rec.maxLength) + }) + + t.Run("SetMinLength", func(t *testing.T) { + rec := &validationRecorder{} + sm := NewSetMinLength(rec, prefix) + line := "items.min length: 1" + assert.TrueT(t, sm.Matches(line)) + require.NoError(t, sm.Parse([]string{line})) + require.NotNil(t, rec.minLength) + assert.EqualT(t, int64(1), *rec.minLength) + }) + + t.Run("SetPattern", func(t *testing.T) { + rec := &validationRecorder{} + sm := NewSetPattern(rec, prefix) + line := "items.pattern: ^[a-z]+$" + assert.TrueT(t, sm.Matches(line)) + require.NoError(t, sm.Parse([]string{line})) + assert.EqualT(t, "^[a-z]+$", rec.pattern) + }) + + t.Run("SetCollectionFormat", func(t *testing.T) { + rec := &validationRecorder{} + sm := NewSetCollectionFormat(rec, prefix) + line := "items.collection format: pipes" + assert.TrueT(t, sm.Matches(line)) + require.NoError(t, sm.Parse([]string{line})) + assert.EqualT(t, "pipes", rec.collectionFormat) + }) + + t.Run("SetUnique", func(t *testing.T) { + rec := &validationRecorder{} + sm := NewSetUnique(rec, prefix) + line := "items.unique: true" + assert.TrueT(t, sm.Matches(line)) + require.NoError(t, sm.Parse([]string{line})) + require.NotNil(t, rec.unique) + assert.TrueT(t, *rec.unique) + }) + + t.Run("SetDefault", func(t *testing.T) { + rec := &validationRecorder{} + scheme := &oaispec.SimpleSchema{Type: "string"} + sm := NewSetDefault(scheme, rec, prefix) + line := "items.default: hello" + assert.TrueT(t, sm.Matches(line)) + require.NoError(t, sm.Parse([]string{line})) + assert.EqualT(t, "hello", rec.defaultVal) + }) + + t.Run("SetExample", func(t *testing.T) { + rec := &validationRecorder{} + scheme := &oaispec.SimpleSchema{Type: "string"} + sm := NewSetExample(scheme, rec, prefix) + line := "items.example: world" + assert.TrueT(t, sm.Matches(line)) + require.NoError(t, sm.Parse([]string{line})) + assert.EqualT(t, "world", rec.exampleVal) + }) + + t.Run("SetEnum", func(t *testing.T) { + rec := &validationRecorder{} + sm := NewSetEnum(rec, prefix) + line := `items.enum: ["x","y"]` + assert.TrueT(t, sm.Matches(line)) + require.NoError(t, sm.Parse([]string{line})) + assert.EqualT(t, `["x","y"]`, rec.enum) + }) +} + +func TestSetDefault_ParseError(t *testing.T) { + t.Parallel() + + rec := &validationRecorder{} + scheme := &oaispec.SimpleSchema{Type: "integer"} + sd := NewSetDefault(scheme, rec) + err := sd.Parse([]string{"default: not-a-number"}) + require.Error(t, err) + assert.Nil(t, rec.defaultVal) +} + +func TestSetExample_ParseError(t *testing.T) { + t.Parallel() + + rec := &validationRecorder{} + scheme := &oaispec.SimpleSchema{Type: "integer"} + se := NewSetExample(scheme, rec) + err := se.Parse([]string{"example: not-a-number"}) + require.Error(t, err) + assert.Nil(t, rec.exampleVal) +} + +func TestSetRequiredSchema_Matches(t *testing.T) { + t.Parallel() + + su := NewSetRequiredSchema(new(oaispec.Schema), "name") + assert.TrueT(t, su.Matches("required: true")) + assert.TrueT(t, su.Matches("Required: false")) + assert.FalseT(t, su.Matches("something else")) +} diff --git a/internal/parsers/yaml_parser.go b/internal/parsers/yaml_parser.go new file mode 100644 index 0000000..8643921 --- /dev/null +++ b/internal/parsers/yaml_parser.go @@ -0,0 +1,106 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package parsers + +import ( + "encoding/json" + "regexp" + "strings" + + "github.com/go-openapi/loads/fmts" + "go.yaml.in/yaml/v3" +) + +type YAMLParserOption func(*YAMLParser) + +func WithSetter(set func(json.RawMessage) error) YAMLParserOption { + return func(p *YAMLParser) { + p.set = set + } +} + +func WithMatcher(rx *regexp.Regexp) YAMLParserOption { + return func(p *YAMLParser) { + p.rx = rx + } +} + +func WithExtensionMatcher() YAMLParserOption { + return func(p *YAMLParser) { + p.rx = rxExtensions + } +} + +type YAMLParser struct { + set func(json.RawMessage) error + rx *regexp.Regexp +} + +func NewYAMLParser(opts ...YAMLParserOption) *YAMLParser { + var y YAMLParser + for _, apply := range opts { + apply(&y) + } + + return &y +} + +func (y *YAMLParser) Parse(lines []string) error { + if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { + return nil + } + + uncommented := make([]string, 0, len(lines)) + uncommented = append(uncommented, removeYamlIndent(lines)...) + + yamlContent := strings.Join(uncommented, "\n") + var yamlValue any + err := yaml.Unmarshal([]byte(yamlContent), &yamlValue) + if err != nil { + return err + } + + var jsonValue json.RawMessage + jsonValue, err = fmts.YAMLToJSON(yamlValue) + if err != nil { + return err + } + + if y.set == nil { + return nil + } + + return y.set(jsonValue) +} + +func (y *YAMLParser) Matches(line string) bool { + if y.rx == nil { + return false + } + + return y.rx.MatchString(line) +} + +// removes indent base on the first line. +// +// The difference with removeIndent is that lines shorter than the indentation are elided. +func removeYamlIndent(spec []string) []string { + if len(spec) == 0 { + return spec + } + + loc := rxIndent.FindStringIndex(spec[0]) + if len(loc) < 2 || loc[1] <= 1 { + return spec + } + + s := make([]string, 0, len(spec)) + for i := range spec { + if len(spec[i]) >= loc[1] { + s = append(s, spec[i][loc[1]-1:]) + } + } + + return s +} diff --git a/internal/parsers/yaml_parser_test.go b/internal/parsers/yaml_parser_test.go new file mode 100644 index 0000000..4c5e470 --- /dev/null +++ b/internal/parsers/yaml_parser_test.go @@ -0,0 +1,138 @@ +package parsers + +import ( + "encoding/json" + "errors" + "testing" + + "github.com/go-openapi/testify/v2/require" +) + +var errSetterFailed = errors.New("setter failed") + +func TestYamlParser(t *testing.T) { + t.Parallel() + + setter := func(out *string, called *int) func(json.RawMessage) error { + return func(in json.RawMessage) error { + *called++ + *out = string(in) + + return nil + } + } + + t.Run("with happy path", func(t *testing.T) { + t.Run("should parse security definitions object as YAML", func(t *testing.T) { + setterCalled := 0 + var actualJSON string + parser := NewYAMLParser(WithMatcher(rxSecurity), WithSetter(setter(&actualJSON, &setterCalled))) + + lines := []string{ + "SecurityDefinitions:", + " api_key:", + " type: apiKey", + " name: X-API-KEY", + " petstore_auth:", + " type: oauth2", + " scopes:", + " 'write:pets': modify pets in your account", + " 'read:pets': read your pets", + } + + require.TrueT(t, parser.Matches(lines[0])) + require.NoError(t, parser.Parse(lines)) + require.EqualT(t, 1, setterCalled) + + const expectedJSON = `{"SecurityDefinitions":{"api_key":{"name":"X-API-KEY","type":"apiKey"},` + + `"petstore_auth":{"scopes":{"read:pets":"read your pets","write:pets":"modify pets in your account"},"type":"oauth2"}}}` + + require.JSONEqT(t, expectedJSON, actualJSON) + }) + }) + + t.Run("with edge cases", func(t *testing.T) { + t.Run("should handle empty input", func(t *testing.T) { + setterCalled := 0 + var actualJSON string + parser := NewYAMLParser(WithMatcher(rxSecurity), WithSetter(setter(&actualJSON, &setterCalled))) + + require.FalseT(t, parser.Matches("")) + require.NoError(t, parser.Parse([]string{})) + require.Zero(t, setterCalled) + }) + + t.Run("should handle nil input", func(t *testing.T) { + setterCalled := 0 + var actualJSON string + parser := NewYAMLParser(WithMatcher(rxSecurity), WithSetter(setter(&actualJSON, &setterCalled))) + + require.NoError(t, parser.Parse(nil)) + require.Zero(t, setterCalled) + }) + + t.Run("should handle bad indentation", func(t *testing.T) { + setterCalled := 0 + var actualJSON string + parser := NewYAMLParser(WithMatcher(rxSecurity), WithSetter(setter(&actualJSON, &setterCalled))) + lines := []string{ + "SecurityDefinitions:", + "\t\tapi_key:", + " type: apiKey", + } + + require.TrueT(t, parser.Matches(lines[0])) + err := parser.Parse(lines) + require.Error(t, err) + require.StringContainsT(t, err.Error(), "yaml: line 2:") + require.Zero(t, setterCalled) + }) + + t.Run("should catch YAML errors", func(t *testing.T) { + setterCalled := 0 + var actualJSON string + parser := NewYAMLParser(WithMatcher(rxSecurity), WithSetter(setter(&actualJSON, &setterCalled))) + lines := []string{ + "SecurityDefinitions:", + " api_key", + " type: apiKey", + } + + require.TrueT(t, parser.Matches(lines[0])) + err := parser.Parse(lines) + require.Error(t, err) + require.StringContainsT(t, err.Error(), "yaml: line 3: mapping value") + require.Zero(t, setterCalled) + }) + + t.Run("should handle nil rx in Matches", func(t *testing.T) { + parser := NewYAMLParser(WithSetter(func(_ json.RawMessage) error { return nil })) + require.FalseT(t, parser.Matches("anything")) + }) + + t.Run("should handle nil setter", func(t *testing.T) { + parser := NewYAMLParser(WithMatcher(rxSecurity)) + lines := []string{ + "SecurityDefinitions:", + " api_key:", + " type: apiKey", + } + require.NoError(t, parser.Parse(lines)) + }) + + t.Run("should propagate setter error", func(t *testing.T) { + parser := NewYAMLParser( + WithMatcher(rxSecurity), + WithSetter(func(_ json.RawMessage) error { return errSetterFailed }), + ) + lines := []string{ + "SecurityDefinitions:", + " api_key:", + " type: apiKey", + } + err := parser.Parse(lines) + require.Error(t, err) + require.ErrorIs(t, err, errSetterFailed) + }) + }) +} diff --git a/internal/parsers/yaml_spec_parser.go b/internal/parsers/yaml_spec_parser.go new file mode 100644 index 0000000..dea7a0e --- /dev/null +++ b/internal/parsers/yaml_spec_parser.go @@ -0,0 +1,199 @@ +package parsers + +import ( + "encoding/json" + "fmt" + "go/ast" + "regexp" + "strings" + + "github.com/go-openapi/loads/fmts" + "go.yaml.in/yaml/v3" +) + +// YAMLSpecScanner aggregates lines in header until it sees `---`, +// the beginning of a YAML spec. +type YAMLSpecScanner struct { + header []string + yamlSpec []string + setTitle func([]string) + setDescription func([]string) + workedOutTitle bool + title []string + skipHeader bool +} + +func NewYAMLSpecScanner(setTitle func([]string), setDescription func([]string)) *YAMLSpecScanner { + return &YAMLSpecScanner{ + setTitle: setTitle, + setDescription: setDescription, + } +} + +func (sp *YAMLSpecScanner) Title() []string { + sp.collectTitleDescription() + return sp.title +} + +func (sp *YAMLSpecScanner) Description() []string { + sp.collectTitleDescription() + return sp.header +} + +func (sp *YAMLSpecScanner) Parse(doc *ast.CommentGroup) error { + if doc == nil { + return nil + } + var startedYAMLSpec bool +COMMENTS: + for _, c := range doc.List { + for line := range strings.SplitSeq(c.Text, "\n") { + if HasAnnotation(line) { + break COMMENTS // a new swagger: annotation terminates this parser + } + + if !startedYAMLSpec { + if rxBeginYAMLSpec.MatchString(line) { + startedYAMLSpec = true + sp.yamlSpec = append(sp.yamlSpec, line) + continue + } + + if !sp.skipHeader { + sp.header = append(sp.header, line) + } + + // no YAML spec yet, moving on + continue + } + + sp.yamlSpec = append(sp.yamlSpec, line) + } + } + if sp.setTitle != nil { + sp.setTitle(sp.Title()) + } + if sp.setDescription != nil { + sp.setDescription(sp.Description()) + } + return nil +} + +func (sp *YAMLSpecScanner) UnmarshalSpec(u func([]byte) error) (err error) { + specYaml := cleanupScannerLines(sp.yamlSpec, rxUncommentYAML) + if len(specYaml) == 0 { + return fmt.Errorf("no spec available to unmarshal: %w", ErrParser) + } + + if !strings.Contains(specYaml[0], "---") { + return fmt.Errorf("yaml spec has to start with `---`: %w", ErrParser) + } + + // remove indentation + specYaml = removeIndent(specYaml) + + // 1. parse yaml lines + yamlValue := make(map[any]any) + + yamlContent := strings.Join(specYaml, "\n") + err = yaml.Unmarshal([]byte(yamlContent), &yamlValue) + if err != nil { + return err + } + + // 2. convert to json + var jsonValue json.RawMessage + jsonValue, err = fmts.YAMLToJSON(yamlValue) + if err != nil { + return err + } + + // 3. unmarshal the json into an interface + var data []byte + data, err = jsonValue.MarshalJSON() + if err != nil { + return err + } + err = u(data) + if err != nil { + return err + } + + // all parsed, returning... + sp.yamlSpec = nil // spec is now consumed, so let's erase the parsed lines + + return nil +} + +func (sp *YAMLSpecScanner) collectTitleDescription() { + if sp.workedOutTitle { + return + } + if sp.setTitle == nil { + sp.header = cleanupScannerLines(sp.header, rxUncommentHeaders) + return + } + + sp.workedOutTitle = true + sp.title, sp.header = collectScannerTitleDescription(sp.header) +} + +// removes indent based on the first line. +func removeIndent(spec []string) []string { + if len(spec) == 0 { + return spec + } + + loc := rxIndent.FindStringIndex(spec[0]) + if len(loc) < 2 || loc[1] <= 1 { + return spec + } + + s := make([]string, len(spec)) + copy(s, spec) + + for i := range s { + if len(s[i]) < loc[1] { + continue + } + + s[i] = spec[i][loc[1]-1:] //nolint:gosec // G602: bounds already checked on line 445 + start := rxNotIndent.FindStringIndex(s[i]) + if len(start) < 2 || start[1] == 0 { + continue + } + + s[i] = strings.Replace(s[i], "\t", " ", start[1]) + } + + return s +} + +func cleanupScannerLines(lines []string, ur *regexp.Regexp) []string { + // bail early when there is nothing to parse + if len(lines) == 0 { + return lines + } + + seenLine := -1 + var lastContent int + + uncommented := make([]string, 0, len(lines)) + for i, v := range lines { + str := ur.ReplaceAllString(v, "") + uncommented = append(uncommented, str) + if str != "" { + if seenLine < 0 { + seenLine = i + } + lastContent = i + } + } + + // fixes issue #50 + if seenLine == -1 { + return nil + } + + return uncommented[seenLine : lastContent+1] +} diff --git a/yamlparser_test.go b/internal/parsers/yaml_spec_parser_test.go similarity index 75% rename from yamlparser_test.go rename to internal/parsers/yaml_spec_parser_test.go index 592ccef..d870bd8 100644 --- a/yamlparser_test.go +++ b/internal/parsers/yaml_spec_parser_test.go @@ -1,119 +1,24 @@ // SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers // SPDX-License-Identifier: Apache-2.0 -package codescan +package parsers import ( - "encoding/json" + "errors" "go/ast" "testing" "github.com/go-openapi/testify/v2/require" ) -func TestYamlParser(t *testing.T) { - t.Parallel() - - setter := func(out *string, called *int) func(json.RawMessage) error { - return func(in json.RawMessage) error { - *called++ - *out = string(in) - - return nil - } - } - - t.Run("with happy path", func(t *testing.T) { - t.Run("should parse security definitions object as YAML", func(t *testing.T) { - setterCalled := 0 - var actualJSON string - parser := newYamlParser(rxSecurity, setter(&actualJSON, &setterCalled)) - - lines := []string{ - "SecurityDefinitions:", - " api_key:", - " type: apiKey", - " name: X-API-KEY", - " petstore_auth:", - " type: oauth2", - " scopes:", - " 'write:pets': modify pets in your account", - " 'read:pets': read your pets", - } - - require.TrueT(t, parser.Matches(lines[0])) - require.NoError(t, parser.Parse(lines)) - require.EqualT(t, 1, setterCalled) - - const expectedJSON = `{"SecurityDefinitions":{"api_key":{"name":"X-API-KEY","type":"apiKey"},` + - `"petstore_auth":{"scopes":{"read:pets":"read your pets","write:pets":"modify pets in your account"},"type":"oauth2"}}}` - - require.JSONEqT(t, expectedJSON, actualJSON) - }) - }) - - t.Run("with edge cases", func(t *testing.T) { - t.Run("should handle empty input", func(t *testing.T) { - setterCalled := 0 - var actualJSON string - parser := newYamlParser(rxSecurity, setter(&actualJSON, &setterCalled)) - - require.FalseT(t, parser.Matches("")) - require.NoError(t, parser.Parse([]string{})) - require.Zero(t, setterCalled) - }) - - t.Run("should handle nil input", func(t *testing.T) { - setterCalled := 0 - var actualJSON string - parser := newYamlParser(rxSecurity, setter(&actualJSON, &setterCalled)) - - require.NoError(t, parser.Parse(nil)) - require.Zero(t, setterCalled) - }) - - t.Run("should handle bad indentation", func(t *testing.T) { - setterCalled := 0 - var actualJSON string - parser := newYamlParser(rxSecurity, setter(&actualJSON, &setterCalled)) - lines := []string{ - "SecurityDefinitions:", - "\t\tapi_key:", - " type: apiKey", - } - - require.TrueT(t, parser.Matches(lines[0])) - err := parser.Parse(lines) - require.Error(t, err) - require.StringContainsT(t, err.Error(), "yaml: line 2:") - require.Zero(t, setterCalled) - }) - - t.Run("should catch YAML errors", func(t *testing.T) { - setterCalled := 0 - var actualJSON string - parser := newYamlParser(rxSecurity, setter(&actualJSON, &setterCalled)) - lines := []string{ - "SecurityDefinitions:", - " api_key", - " type: apiKey", - } - - require.TrueT(t, parser.Matches(lines[0])) - err := parser.Parse(lines) - require.Error(t, err) - require.StringContainsT(t, err.Error(), "yaml: line 3: mapping value") - require.Zero(t, setterCalled) - }) - }) -} +var errCallback = errors.New("callback error") func TestYamlSpecScanner(t *testing.T) { t.Parallel() t.Run("with happy path", func(t *testing.T) { t.Run("should parse operation definition object as YAML", func(t *testing.T) { - parser := new(yamlSpecScanner) + parser := new(YAMLSpecScanner) var title, description []string parser.setTitle = func(lines []string) { title = lines } parser.setDescription = func(lines []string) { description = lines } @@ -183,7 +88,7 @@ func TestYamlSpecScanner(t *testing.T) { }) t.Run("should stop yaml operation block when new tag is found", func(t *testing.T) { - parser := new(yamlSpecScanner) + parser := new(YAMLSpecScanner) var title, description []string parser.setTitle = func(lines []string) { title = lines } parser.setDescription = func(lines []string) { description = lines } @@ -223,7 +128,7 @@ func TestYamlSpecScanner(t *testing.T) { }) t.Run("should stop yaml operation block when new yaml document separator is found", func(t *testing.T) { - parser := new(yamlSpecScanner) + parser := new(YAMLSpecScanner) var title, description []string parser.setTitle = func(lines []string) { title = lines } parser.setDescription = func(lines []string) { description = lines } @@ -265,7 +170,7 @@ func TestYamlSpecScanner(t *testing.T) { t.Run("with edge cases", func(t *testing.T) { t.Run("with empty comment block", func(t *testing.T) { - parser := new(yamlSpecScanner) + parser := new(YAMLSpecScanner) var title, description []string parser.setTitle = func(lines []string) { title = lines } parser.setDescription = func(lines []string) { description = lines } @@ -276,7 +181,7 @@ func TestYamlSpecScanner(t *testing.T) { }) t.Run("with nil comment block", func(t *testing.T) { - parser := new(yamlSpecScanner) + parser := new(YAMLSpecScanner) var title, description []string parser.setTitle = func(lines []string) { title = lines } parser.setDescription = func(lines []string) { description = lines } @@ -286,7 +191,7 @@ func TestYamlSpecScanner(t *testing.T) { }) t.Run("without setTitle", func(t *testing.T) { - parser := new(yamlSpecScanner) + parser := new(YAMLSpecScanner) var description []string parser.setDescription = func(lines []string) { description = lines } @@ -314,6 +219,81 @@ func TestYamlSpecScanner(t *testing.T) { }) } +func TestYAMLSpecScanner_UnmarshalSpec_Errors(t *testing.T) { + t.Parallel() + + t.Run("no spec available", func(t *testing.T) { + parser := new(YAMLSpecScanner) + parser.setTitle = func(_ []string) {} + parser.setDescription = func(_ []string) {} + // Parse with no --- marker → no yamlSpec collected + doc := buildRawTestComments([]string{"just text, no yaml"}) + require.NoError(t, parser.Parse(doc)) + + err := parser.UnmarshalSpec(func(_ []byte) error { return nil }) + require.Error(t, err) + require.ErrorIs(t, err, ErrParser) + }) + + t.Run("spec doesnt start with ---", func(t *testing.T) { + parser := new(YAMLSpecScanner) + // Manually inject yamlSpec without the --- marker + parser.yamlSpec = []string{"summary: test"} + + err := parser.UnmarshalSpec(func(_ []byte) error { return nil }) + require.Error(t, err) + require.ErrorIs(t, err, ErrParser) + }) + + t.Run("invalid yaml", func(t *testing.T) { + parser := new(YAMLSpecScanner) + parser.yamlSpec = []string{"// ---", "// \tbad:", "// yaml"} + + err := parser.UnmarshalSpec(func(_ []byte) error { return nil }) + require.Error(t, err) + }) + + t.Run("unmarshal callback error", func(t *testing.T) { + parser := new(YAMLSpecScanner) + parser.setTitle = func(_ []string) {} + parser.setDescription = func(_ []string) {} + + lines := []string{ + "title", + "---", + "summary: test", + } + doc := buildRawTestComments(lines) + require.NoError(t, parser.Parse(doc)) + + err := parser.UnmarshalSpec(func(_ []byte) error { return errCallback }) + require.Error(t, err) + require.ErrorIs(t, err, errCallback) + }) +} + +func TestNewYAMLSpecScanner(t *testing.T) { + t.Parallel() + + var title, desc []string + scanner := NewYAMLSpecScanner( + func(lines []string) { title = lines }, + func(lines []string) { desc = lines }, + ) + + lines := []string{ + "My Title.", + "", + "My description.", + "---", + "summary: test", + } + doc := buildRawTestComments(lines) + require.NoError(t, scanner.Parse(doc)) + require.Equal(t, []string{"My Title."}, title) + require.Equal(t, []string{"My description."}, desc) +} + func TestRemoveIndent(t *testing.T) { t.Parallel() diff --git a/internal/scanner/declaration.go b/internal/scanner/declaration.go new file mode 100644 index 0000000..f198409 --- /dev/null +++ b/internal/scanner/declaration.go @@ -0,0 +1,145 @@ +package scanner + +import ( + "go/ast" + "go/types" + "strings" + + "github.com/go-openapi/codescan/internal/parsers" + "golang.org/x/tools/go/packages" +) + +type EntityDecl struct { + Comments *ast.CommentGroup + Type *types.Named + Alias *types.Alias // added to supplement Named, after go1.22 + Ident *ast.Ident + Spec *ast.TypeSpec + File *ast.File + Pkg *packages.Package + hasModelAnnotation bool + hasResponseAnnotation bool + hasParameterAnnotation bool +} + +// Obj returns the type name for the declaration defining the named type or alias t. +func (d *EntityDecl) Obj() *types.TypeName { + if d.Type != nil { + return d.Type.Obj() + } + if d.Alias != nil { + return d.Alias.Obj() + } + + panic("invalid EntityDecl: Type and Alias are both nil") +} + +func (d *EntityDecl) ObjType() types.Type { + if d.Type != nil { + return d.Type + } + if d.Alias != nil { + return d.Alias + } + + panic("invalid EntityDecl: Type and Alias are both nil") +} + +func (d *EntityDecl) Names() (name, goName string) { + goName = d.Ident.Name + model, ok := parsers.ModelOverride(d.Comments) + if !ok { + return goName, goName + } + + d.hasModelAnnotation = true + if model == "" { + return goName, goName + } + + return model, goName +} + +func (d *EntityDecl) ResponseNames() (name, goName string) { + goName = d.Ident.Name + response, ok := parsers.ResponseOverride(d.Comments) + if !ok { + return name, goName + } + + d.hasResponseAnnotation = true + if response == "" { + return goName, goName + } + + return response, goName +} + +func (d *EntityDecl) OperationIDs() (result []string) { + if d == nil { + return nil + } + + parameters, ok := parsers.ParametersOverride(d.Comments) + if !ok { + return nil + } + + d.hasParameterAnnotation = true + + for _, parameter := range parameters { + for param := range strings.SplitSeq(parameter, " ") { + name := strings.TrimSpace(param) + if len(name) > 0 { + result = append(result, name) + } + } + } + + return result +} + +func (d *EntityDecl) HasModelAnnotation() bool { + if d.hasModelAnnotation { + return true + } + + _, ok := parsers.ModelOverride(d.Comments) + if !ok { + return false + } + + d.hasModelAnnotation = true + + return true +} + +func (d *EntityDecl) HasResponseAnnotation() bool { + if d.hasResponseAnnotation { + return true + } + + _, ok := parsers.ResponseOverride(d.Comments) + if !ok { + return false + } + + d.hasResponseAnnotation = true + + return true +} + +func (d *EntityDecl) HasParameterAnnotation() bool { + if d.hasParameterAnnotation { + return true + } + + _, ok := parsers.ParametersOverride(d.Comments) + if !ok { + return false + } + + d.hasParameterAnnotation = true + + return true +} diff --git a/internal/scanner/declaration_test.go b/internal/scanner/declaration_test.go new file mode 100644 index 0000000..7c3e282 --- /dev/null +++ b/internal/scanner/declaration_test.go @@ -0,0 +1,272 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package scanner + +import ( + "go/ast" + "go/types" + "slices" + "testing" + + "github.com/go-openapi/testify/v2/assert" + "github.com/go-openapi/testify/v2/require" +) + +func TestEntityDecl(t *testing.T) { + sctx := loadClassificationPkgsCtx(t) + + t.Run("Obj", func(t *testing.T) { + t.Run("named type returns TypeName", func(t *testing.T) { + decl, ok := sctx.FindDecl( + "github.com/go-openapi/codescan/fixtures/goparsing/classification/models", + "User", + ) + require.True(t, ok) + require.NotNil(t, decl.Type) + + obj := decl.Obj() + assert.EqualT(t, "User", obj.Name()) + }) + + t.Run("panics when both Type and Alias are nil", func(t *testing.T) { + decl := &EntityDecl{ + Ident: ast.NewIdent("Bad"), + } + assert.Panics(t, func() { + decl.Obj() + }) + }) + }) + + t.Run("ObjType", func(t *testing.T) { + t.Run("named type returns types.Named", func(t *testing.T) { + decl, ok := sctx.FindDecl( + "github.com/go-openapi/codescan/fixtures/goparsing/classification/models", + "User", + ) + require.True(t, ok) + + objType := decl.ObjType() + _, isNamed := objType.(*types.Named) + assert.True(t, isNamed, "expected *types.Named, got %T", objType) + }) + + t.Run("alias type returns types.Alias", func(t *testing.T) { + // Load the spec fixture which has type aliases (Customer = User). + specCtx, err := NewScanCtx(&Options{ + Packages: []string{"./goparsing/spec"}, + WorkDir: "../../fixtures", + }) + require.NoError(t, err) + + decl, ok := specCtx.FindDecl( + "github.com/go-openapi/codescan/fixtures/goparsing/spec", + "Customer", + ) + require.True(t, ok) + require.NotNil(t, decl.Alias, "Customer should be a type alias") + + objType := decl.ObjType() + _, isAlias := objType.(*types.Alias) + assert.True(t, isAlias, "expected *types.Alias, got %T", objType) + }) + + t.Run("panics when both Type and Alias are nil", func(t *testing.T) { + decl := &EntityDecl{ + Ident: ast.NewIdent("Bad"), + } + assert.Panics(t, func() { + decl.ObjType() + }) + }) + }) + + t.Run("Names", func(t *testing.T) { + t.Run("model with swagger:model annotation uses Go name", func(t *testing.T) { + decl, ok := sctx.FindDecl( + "github.com/go-openapi/codescan/fixtures/goparsing/classification/models", + "User", + ) + require.True(t, ok) + + name, goName := decl.Names() + assert.EqualT(t, "User", goName) + assert.EqualT(t, "User", name) + }) + + t.Run("type without model annotation returns Go name for both", func(t *testing.T) { + decl, ok := sctx.FindDecl( + "github.com/go-openapi/codescan/fixtures/goparsing/classification/operations", + "SimpleOne", + ) + require.True(t, ok) + + name, goName := decl.Names() + assert.EqualT(t, "SimpleOne", goName) + assert.EqualT(t, "SimpleOne", name) + }) + + t.Run("model with override name returns override", func(t *testing.T) { + decl, ok := sctx.FindDecl( + "github.com/go-openapi/codescan/fixtures/goparsing/classification/models", + "BaseStruct", + ) + require.True(t, ok) + + name, goName := decl.Names() + assert.EqualT(t, "BaseStruct", goName) + assert.EqualT(t, "animal", name) // override name from swagger:model animal + }) + }) + + t.Run("ResponseNames", func(t *testing.T) { + t.Run("response with override name", func(t *testing.T) { + decl, ok := sctx.FindDecl( + "github.com/go-openapi/codescan/fixtures/goparsing/classification/operations", + "GenericError", + ) + require.True(t, ok) + + name, goName := decl.ResponseNames() + assert.EqualT(t, "GenericError", goName) + assert.EqualT(t, "genericError", name) + }) + + t.Run("type without response annotation", func(t *testing.T) { + decl, ok := sctx.FindDecl( + "github.com/go-openapi/codescan/fixtures/goparsing/classification/operations", + "SimpleOne", + ) + require.True(t, ok) + + name, goName := decl.ResponseNames() + assert.EqualT(t, "SimpleOne", goName) + assert.EqualT(t, "", name) + }) + + t.Run("response with bare annotation no override", func(t *testing.T) { + decl, ok := sctx.FindDecl( + "github.com/go-openapi/codescan/fixtures/goparsing/classification/operations", + "NumPlatesResp", + ) + require.True(t, ok) + + name, goName := decl.ResponseNames() + assert.EqualT(t, "NumPlatesResp", goName) + assert.EqualT(t, "NumPlatesResp", name) + }) + }) + + t.Run("OperationIDs", func(t *testing.T) { + t.Run("nil receiver returns nil", func(t *testing.T) { + var decl *EntityDecl + assert.Nil(t, decl.OperationIDs()) + }) + + t.Run("type with single parameter annotation", func(t *testing.T) { + decl, ok := sctx.FindDecl( + "github.com/go-openapi/codescan/fixtures/goparsing/classification/operations", + "MyFileParams", + ) + require.True(t, ok) + + ids := decl.OperationIDs() + require.Len(t, ids, 1) + assert.EqualT(t, "myOperation", ids[0]) + }) + + t.Run("type with multiple parameter annotations", func(t *testing.T) { + decl, ok := sctx.FindDecl( + "github.com/go-openapi/codescan/fixtures/goparsing/classification/operations", + "OrderBodyParams", + ) + require.True(t, ok) + + ids := decl.OperationIDs() + require.Len(t, ids, 2) + assert.True(t, slices.Contains(ids, "updateOrder"), "expected ids to contain updateOrder") + assert.True(t, slices.Contains(ids, "createOrder"), "expected ids to contain createOrder") + }) + + t.Run("type without parameter annotation returns nil", func(t *testing.T) { + decl, ok := sctx.FindDecl( + "github.com/go-openapi/codescan/fixtures/goparsing/classification/operations", + "SimpleOne", + ) + require.True(t, ok) + + ids := decl.OperationIDs() + assert.Nil(t, ids) + }) + }) + + t.Run("HasAnnotation caching", func(t *testing.T) { + t.Run("HasModelAnnotation caches result", func(t *testing.T) { + decl, ok := sctx.FindDecl( + "github.com/go-openapi/codescan/fixtures/goparsing/classification/models", + "User", + ) + require.True(t, ok) + + // First call: parses comments + assert.True(t, decl.HasModelAnnotation()) + // Second call: returns cached true + assert.True(t, decl.HasModelAnnotation()) + }) + + t.Run("HasResponseAnnotation caches result", func(t *testing.T) { + decl, ok := sctx.FindDecl( + "github.com/go-openapi/codescan/fixtures/goparsing/classification/operations", + "GenericError", + ) + require.True(t, ok) + + assert.True(t, decl.HasResponseAnnotation()) + // Second call: returns cached true + assert.True(t, decl.HasResponseAnnotation()) + }) + + t.Run("HasParameterAnnotation caches result", func(t *testing.T) { + decl, ok := sctx.FindDecl( + "github.com/go-openapi/codescan/fixtures/goparsing/classification/operations", + "MyFileParams", + ) + require.True(t, ok) + + assert.True(t, decl.HasParameterAnnotation()) + // Second call: returns cached true + assert.True(t, decl.HasParameterAnnotation()) + }) + + t.Run("HasModelAnnotation returns false for non-model", func(t *testing.T) { + decl, ok := sctx.FindDecl( + "github.com/go-openapi/codescan/fixtures/goparsing/classification/operations", + "SimpleOne", + ) + require.True(t, ok) + + assert.False(t, decl.HasModelAnnotation()) + }) + + t.Run("HasResponseAnnotation returns false for non-response", func(t *testing.T) { + decl, ok := sctx.FindDecl( + "github.com/go-openapi/codescan/fixtures/goparsing/classification/operations", + "SimpleOne", + ) + require.True(t, ok) + + assert.False(t, decl.HasResponseAnnotation()) + }) + + t.Run("HasParameterAnnotation returns false for non-parameter", func(t *testing.T) { + decl, ok := sctx.FindDecl( + "github.com/go-openapi/codescan/fixtures/goparsing/classification/operations", + "SimpleOne", + ) + require.True(t, ok) + + assert.False(t, decl.HasParameterAnnotation()) + }) + }) +} diff --git a/internal/scanner/errors.go b/internal/scanner/errors.go new file mode 100644 index 0000000..370db72 --- /dev/null +++ b/internal/scanner/errors.go @@ -0,0 +1,9 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package scanner + +import "errors" + +// ErrScanner is the sentinel error for all errors originating from the scanner package. +var ErrScanner = errors.New("codescan:scanner") diff --git a/internal/scanner/export_test.go b/internal/scanner/export_test.go new file mode 100644 index 0000000..b72454e --- /dev/null +++ b/internal/scanner/export_test.go @@ -0,0 +1,14 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package scanner + +import "golang.org/x/tools/go/packages" + +// PkgForPath exposes the internal package lookup for test use only. +// It is not part of the production API; production code resolves packages +// through typed entry points like FindDecl/FindModel/DeclForType. +func (s *ScanCtx) PkgForPath(pkgPath string) (*packages.Package, bool) { + v, ok := s.app.AllPackages[pkgPath] + return v, ok +} diff --git a/internal/scanner/index.go b/internal/scanner/index.go new file mode 100644 index 0000000..56bf35c --- /dev/null +++ b/internal/scanner/index.go @@ -0,0 +1,398 @@ +package scanner + +import ( + "fmt" + "go/ast" + "go/types" + "regexp" + + "github.com/go-openapi/codescan/internal/logger" + "github.com/go-openapi/codescan/internal/parsers" + "golang.org/x/tools/go/packages" +) + +type TypeIndexOption func(*TypeIndex) + +func WithExcludeDeps(excluded bool) TypeIndexOption { + return func(a *TypeIndex) { + a.excludeDeps = excluded + } +} + +func WithIncludeTags(included map[string]bool) TypeIndexOption { + return func(a *TypeIndex) { + a.includeTags = included + } +} + +func WithExcludeTags(excluded map[string]bool) TypeIndexOption { + return func(a *TypeIndex) { + a.excludeTags = excluded + } +} + +func WithIncludePkgs(included []string) TypeIndexOption { + return func(a *TypeIndex) { + a.includePkgs = included + } +} + +func WithExcludePkgs(excluded []string) TypeIndexOption { + return func(a *TypeIndex) { + a.excludePkgs = excluded + } +} + +func WithXNullableForPointers(enabled bool) TypeIndexOption { + return func(a *TypeIndex) { + a.setXNullableForPointers = enabled + } +} + +func WithRefAliases(enabled bool) TypeIndexOption { + return func(a *TypeIndex) { + a.refAliases = enabled + } +} + +func WithTransparentAliases(enabled bool) TypeIndexOption { + return func(a *TypeIndex) { + a.transparentAliases = enabled + } +} + +func WithDebug(enabled bool) TypeIndexOption { + return func(a *TypeIndex) { + a.debug = enabled + } +} + +type TypeIndex struct { + AllPackages map[string]*packages.Package + Models map[*ast.Ident]*EntityDecl + ExtraModels map[*ast.Ident]*EntityDecl + Meta []parsers.MetaSection + Routes []parsers.ParsedPathContent + Operations []parsers.ParsedPathContent + Parameters []*EntityDecl + Responses []*EntityDecl + excludeDeps bool + includeTags map[string]bool + excludeTags map[string]bool + includePkgs []string + excludePkgs []string + setXNullableForPointers bool + refAliases bool + transparentAliases bool + debug bool +} + +func NewTypeIndex(pkgs []*packages.Package, opts ...TypeIndexOption) (*TypeIndex, error) { + ac := &TypeIndex{ + AllPackages: make(map[string]*packages.Package), + Models: make(map[*ast.Ident]*EntityDecl), + ExtraModels: make(map[*ast.Ident]*EntityDecl), + } + for _, apply := range opts { + apply(ac) + } + + if err := ac.build(pkgs); err != nil { + return nil, err + } + return ac, nil +} + +func (a *TypeIndex) build(pkgs []*packages.Package) error { + for _, pkg := range pkgs { + if _, known := a.AllPackages[pkg.PkgPath]; known { + continue + } + a.AllPackages[pkg.PkgPath] = pkg + if err := a.processPackage(pkg); err != nil { + return err + } + if err := a.walkImports(pkg); err != nil { + return err + } + } + + return nil +} + +func (a *TypeIndex) processPackage(pkg *packages.Package) error { + if !shouldAcceptPkg(pkg.PkgPath, a.includePkgs, a.excludePkgs) { + logger.DebugLogf(a.debug, "package %s is ignored due to rules", pkg.Name) + return nil + } + + for _, file := range pkg.Syntax { + if err := a.processFile(pkg, file); err != nil { + return err + } + } + + return nil +} + +func (a *TypeIndex) processFile(pkg *packages.Package, file *ast.File) error { + n, err := a.detectNodes(file) + if err != nil { + return err + } + + if n&metaNode != 0 { + a.Meta = append(a.Meta, parsers.MetaSection{Comments: file.Doc}) + } + + if n&operationNode != 0 { + a.Operations = a.collectOperationPathAnnotations(file.Comments, a.Operations) + } + + if n&routeNode != 0 { + a.Routes = a.collectRoutePathAnnotations(file.Comments, a.Routes) + } + + a.processFileDecls(pkg, file, n) + + return nil +} + +func (a *TypeIndex) collectOperationPathAnnotations(comments []*ast.CommentGroup, dst []parsers.ParsedPathContent) []parsers.ParsedPathContent { + for _, cmts := range comments { + pp := parsers.ParseOperationPathAnnotation(cmts.List) + if pp.Method == "" { + continue + } + + if !shouldAcceptTag(pp.Tags, a.includeTags, a.excludeTags) { + logger.DebugLogf(a.debug, "operation %s %s is ignored due to tag rules", pp.Method, pp.Path) + continue + } + dst = append(dst, pp) + } + + return dst +} + +func (a *TypeIndex) collectRoutePathAnnotations(comments []*ast.CommentGroup, dst []parsers.ParsedPathContent) []parsers.ParsedPathContent { + for _, cmts := range comments { + pp := parsers.ParseRoutePathAnnotation(cmts.List) + if pp.Method == "" { + continue + } + + if !shouldAcceptTag(pp.Tags, a.includeTags, a.excludeTags) { + logger.DebugLogf(a.debug, "operation %s %s is ignored due to tag rules", pp.Method, pp.Path) + continue + } + dst = append(dst, pp) + } + + return dst +} + +func (a *TypeIndex) processFileDecls(pkg *packages.Package, file *ast.File, n node) { + for _, dt := range file.Decls { + switch fd := dt.(type) { + case *ast.BadDecl: + continue + case *ast.FuncDecl: + if fd.Body == nil { + continue + } + for _, stmt := range fd.Body.List { + if dstm, ok := stmt.(*ast.DeclStmt); ok { + if gd, isGD := dstm.Decl.(*ast.GenDecl); isGD { + a.processDecl(pkg, file, n, gd) + } + } + } + case *ast.GenDecl: + a.processDecl(pkg, file, n, fd) + } + } +} + +func (a *TypeIndex) processDecl(pkg *packages.Package, file *ast.File, n node, gd *ast.GenDecl) { + for _, sp := range gd.Specs { + switch ts := sp.(type) { + case *ast.ValueSpec: + logger.DebugLogf(a.debug, "saw value spec: %v", ts.Names) + return + case *ast.ImportSpec: + logger.DebugLogf(a.debug, "saw import spec: %v", ts.Name) + return + case *ast.TypeSpec: + def, ok := pkg.TypesInfo.Defs[ts.Name] + if !ok { + logger.DebugLogf(a.debug, "couldn't find type info for %s", ts.Name) + continue + } + nt, isNamed := def.Type().(*types.Named) + at, isAliased := def.Type().(*types.Alias) + if !isNamed && !isAliased { + logger.DebugLogf(a.debug, "%s is not a named or aliased type but a %T", ts.Name, def.Type()) + + continue + } + + comments := ts.Doc // type ( /* doc */ Foo struct{} ) + if comments == nil { + comments = gd.Doc // /* doc */ type ( Foo struct{} ) + } + + decl := &EntityDecl{ + Comments: comments, + Type: nt, + Alias: at, + Ident: ts.Name, + Spec: ts, + File: file, + Pkg: pkg, + } + key := ts.Name + switch { + case n&modelNode != 0 && decl.HasModelAnnotation(): + a.Models[key] = decl + case n¶metersNode != 0 && decl.HasParameterAnnotation(): + a.Parameters = append(a.Parameters, decl) + case n&responseNode != 0 && decl.HasResponseAnnotation(): + a.Responses = append(a.Responses, decl) + default: + logger.DebugLogf(a.debug, + "type %q skipped because it is not tagged as a model, a parameter or a response. %s", + decl.Obj().Name(), + "It may reenter the scope because it is a discovered dependency", + ) + } + } + } +} + +func (a *TypeIndex) walkImports(pkg *packages.Package) error { + if a.excludeDeps { + return nil + } + for _, v := range pkg.Imports { + if _, known := a.AllPackages[v.PkgPath]; known { + continue + } + + a.AllPackages[v.PkgPath] = v + if err := a.processPackage(v); err != nil { + return err + } + if err := a.walkImports(v); err != nil { + return err + } + } + + return nil +} + +// detectNodes scans all comment groups in a file and returns a bitmask of +// detected swagger annotation types. Node types like route, operation, and +// meta accumulate freely across comment groups. Struct-level annotations +// (model, parameters, response) are mutually exclusive Within a single +// comment group — mixing them is an error. +func (a *TypeIndex) detectNodes(file *ast.File) (node, error) { + var n node + for _, comments := range file.Comments { + var seenStruct string // tracks the struct annotation for this comment group + for _, cline := range comments.List { + if cline == nil { + continue + } + } + + for _, cline := range comments.List { + if cline == nil { + continue + } + + annotation, ok := parsers.ExtractAnnotation(cline.Text) + if !ok { + continue + } + + switch annotation { + case "route": + n |= routeNode + case "operation": + n |= operationNode + case "model": //nolint:goconst // annotation keyword matched from swagger comment + n |= modelNode + if err := checkStructConflict(&seenStruct, annotation, cline.Text); err != nil { + return 0, err + } + case "meta": + n |= metaNode + case "parameters": + n |= parametersNode + if err := checkStructConflict(&seenStruct, annotation, cline.Text); err != nil { + return 0, err + } + case "response": + n |= responseNode + if err := checkStructConflict(&seenStruct, annotation, cline.Text); err != nil { + return 0, err + } + case "strfmt", "name", "discriminated", "file", "enum", "default", "alias", "type": + // TODO: perhaps collect these and pass along to avoid lookups later on + case "allOf": + case "ignore": + default: + return 0, fmt.Errorf("classifier: unknown swagger annotation %q: %w", annotation, ErrScanner) + } + } + } + + return n, nil +} + +func checkStructConflict(seenStruct *string, annotation string, text string) error { + if *seenStruct != "" && *seenStruct != annotation { + return fmt.Errorf("classifier: already annotated as %s, can't also be %q - %s: %w", *seenStruct, annotation, text, ErrScanner) + } + *seenStruct = annotation + return nil +} + +func shouldAcceptTag(tags []string, includeTags map[string]bool, excludeTags map[string]bool) bool { + for _, tag := range tags { + if len(includeTags) > 0 { + if includeTags[tag] { + return true + } + } else if len(excludeTags) > 0 { + if excludeTags[tag] { + return false + } + } + } + + return len(includeTags) == 0 +} + +func shouldAcceptPkg(path string, includePkgs, excludePkgs []string) bool { + if len(includePkgs) == 0 && len(excludePkgs) == 0 { + return true + } + + for _, pkgName := range includePkgs { + matched, _ := regexp.MatchString(pkgName, path) + if matched { + return true + } + } + + for _, pkgName := range excludePkgs { + matched, _ := regexp.MatchString(pkgName, path) + if matched { + return false + } + } + + return len(includePkgs) == 0 +} diff --git a/internal/scanner/index_test.go b/internal/scanner/index_test.go new file mode 100644 index 0000000..1d5114d --- /dev/null +++ b/internal/scanner/index_test.go @@ -0,0 +1,551 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package scanner + +import ( + "errors" + "go/ast" + "go/token" + "slices" + "testing" + + "github.com/go-openapi/testify/v2/assert" + "github.com/go-openapi/testify/v2/require" + "golang.org/x/tools/go/packages" +) + +func TestShouldAcceptTag(t *testing.T) { + tagTests := []struct { + tags []string + includeTags map[string]bool + excludeTags map[string]bool + expected bool + }{ + {nil, nil, nil, true}, + {[]string{"app"}, map[string]bool{"app": true}, nil, true}, + {[]string{"app"}, nil, map[string]bool{"app": true}, false}, + } + for _, tt := range tagTests { + actual := shouldAcceptTag(tt.tags, tt.includeTags, tt.excludeTags) + assert.EqualT(t, tt.expected, actual) + } +} + +func TestShouldAcceptPkg(t *testing.T) { + pkgTests := []struct { + path string + includePkgs []string + excludePkgs []string + expected bool + }{ + {"", nil, nil, true}, + {"", nil, []string{"app"}, true}, + {"", []string{"app"}, nil, false}, + {"app", []string{"app"}, nil, true}, + {"app", nil, []string{"app"}, false}, + {"vendor/app", []string{"app"}, nil, true}, + {"vendor/app", nil, []string{"app"}, false}, + } + for _, tt := range pkgTests { + actual := shouldAcceptPkg(tt.path, tt.includePkgs, tt.excludePkgs) + assert.EqualT(t, tt.expected, actual) + } +} + +func TestDetectNodes_UnknownAnnotation(t *testing.T) { + file := &ast.File{ + Comments: []*ast.CommentGroup{ + { + List: []*ast.Comment{ + {Text: "// swagger:bogusAnnotation"}, + }, + }, + }, + } + + idx := &TypeIndex{} + _, err := idx.detectNodes(file) + require.Error(t, err) + assert.True(t, errors.Is(err, ErrScanner)) +} + +func TestDetectNodes_AllAnnotationTypes(t *testing.T) { + t.Run("meta node", func(t *testing.T) { + file := &ast.File{ + Comments: []*ast.CommentGroup{ + {List: []*ast.Comment{{Text: "// swagger:meta"}}}, + }, + } + idx := &TypeIndex{} + n, err := idx.detectNodes(file) + require.NoError(t, err) + assert.True(t, n&metaNode != 0) + }) + + t.Run("route node", func(t *testing.T) { + file := &ast.File{ + Comments: []*ast.CommentGroup{ + {List: []*ast.Comment{{Text: "// swagger:route GET /pets pets listPets"}}}, + }, + } + idx := &TypeIndex{} + n, err := idx.detectNodes(file) + require.NoError(t, err) + assert.True(t, n&routeNode != 0) + }) + + t.Run("operation node", func(t *testing.T) { + file := &ast.File{ + Comments: []*ast.CommentGroup{ + {List: []*ast.Comment{{Text: "// swagger:operation GET /pets pets getPet"}}}, + }, + } + idx := &TypeIndex{} + n, err := idx.detectNodes(file) + require.NoError(t, err) + assert.True(t, n&operationNode != 0) + }) + + t.Run("model node", func(t *testing.T) { + file := &ast.File{ + Comments: []*ast.CommentGroup{ + {List: []*ast.Comment{{Text: "// swagger:model"}}}, + }, + } + idx := &TypeIndex{} + n, err := idx.detectNodes(file) + require.NoError(t, err) + assert.True(t, n&modelNode != 0) + }) + + t.Run("parameters node", func(t *testing.T) { + file := &ast.File{ + Comments: []*ast.CommentGroup{ + {List: []*ast.Comment{{Text: "// swagger:parameters myOp"}}}, + }, + } + idx := &TypeIndex{} + n, err := idx.detectNodes(file) + require.NoError(t, err) + assert.True(t, n¶metersNode != 0) + }) + + t.Run("response node", func(t *testing.T) { + file := &ast.File{ + Comments: []*ast.CommentGroup{ + {List: []*ast.Comment{{Text: "// swagger:response myResp"}}}, + }, + } + idx := &TypeIndex{} + n, err := idx.detectNodes(file) + require.NoError(t, err) + assert.True(t, n&responseNode != 0) + }) + + t.Run("ignore annotation is accepted", func(t *testing.T) { + file := &ast.File{ + Comments: []*ast.CommentGroup{ + {List: []*ast.Comment{{Text: "// swagger:ignore MyType"}}}, + }, + } + idx := &TypeIndex{} + _, err := idx.detectNodes(file) + require.NoError(t, err) + }) + + t.Run("allOf annotation is accepted", func(t *testing.T) { + file := &ast.File{ + Comments: []*ast.CommentGroup{ + {List: []*ast.Comment{{Text: "// swagger:allOf MyParent"}}}, + }, + } + idx := &TypeIndex{} + _, err := idx.detectNodes(file) + require.NoError(t, err) + }) + + t.Run("known non-struct annotations are accepted", func(t *testing.T) { + for _, annotation := range []string{"strfmt", "name", "discriminated", "file", "enum", "default", "alias", "type"} { + file := &ast.File{ + Comments: []*ast.CommentGroup{ + {List: []*ast.Comment{{Text: "// swagger:" + annotation + " something"}}}, + }, + } + idx := &TypeIndex{} + _, err := idx.detectNodes(file) + require.NoError(t, err, "annotation %q should be accepted", annotation) + } + }) + + t.Run("no annotations at all", func(t *testing.T) { + file := &ast.File{ + Comments: []*ast.CommentGroup{ + {List: []*ast.Comment{{Text: "// just a regular comment"}}}, + }, + } + idx := &TypeIndex{} + n, err := idx.detectNodes(file) + require.NoError(t, err) + assert.EqualT(t, node(0), n) + }) + + t.Run("nil comment line is skipped", func(t *testing.T) { + file := &ast.File{ + Comments: []*ast.CommentGroup{ + {List: []*ast.Comment{nil, {Text: "// swagger:meta"}}}, + }, + } + idx := &TypeIndex{} + n, err := idx.detectNodes(file) + require.NoError(t, err) + assert.True(t, n&metaNode != 0) + }) +} + +func TestCheckStructConflict(t *testing.T) { + t.Run("no conflict with same annotation", func(t *testing.T) { + seen := "" + err := checkStructConflict(&seen, "model", "// swagger:model Foo") + require.NoError(t, err) + assert.EqualT(t, "model", seen) + + // Same annotation again: no conflict. + err = checkStructConflict(&seen, "model", "// swagger:model Bar") + require.NoError(t, err) + }) + + t.Run("conflict with different struct annotations", func(t *testing.T) { + seen := "model" + err := checkStructConflict(&seen, "parameters", "// swagger:parameters myOp") + require.Error(t, err) + assert.True(t, errors.Is(err, ErrScanner)) + }) + + t.Run("model then response conflicts", func(t *testing.T) { + seen := "model" + err := checkStructConflict(&seen, "response", "// swagger:response myResp") + require.Error(t, err) + assert.True(t, errors.Is(err, ErrScanner)) + }) + + t.Run("parameters then model conflicts", func(t *testing.T) { + seen := "parameters" + err := checkStructConflict(&seen, "model", "// swagger:model Foo") + require.Error(t, err) + assert.True(t, errors.Is(err, ErrScanner)) + }) +} + +func TestDetectNodes_StructConflict(t *testing.T) { + t.Run("model and parameters in same comment group", func(t *testing.T) { + file := &ast.File{ + Comments: []*ast.CommentGroup{ + { + List: []*ast.Comment{ + {Text: "// swagger:model Foo"}, + {Text: "// swagger:parameters myOp"}, + }, + }, + }, + } + idx := &TypeIndex{} + _, err := idx.detectNodes(file) + require.Error(t, err) + assert.True(t, errors.Is(err, ErrScanner)) + }) + + t.Run("model and response in same comment group", func(t *testing.T) { + file := &ast.File{ + Comments: []*ast.CommentGroup{ + { + List: []*ast.Comment{ + {Text: "// swagger:model Foo"}, + {Text: "// swagger:response myResp"}, + }, + }, + }, + } + idx := &TypeIndex{} + _, err := idx.detectNodes(file) + require.Error(t, err) + assert.True(t, errors.Is(err, ErrScanner)) + }) + + t.Run("parameters and response in same comment group", func(t *testing.T) { + file := &ast.File{ + Comments: []*ast.CommentGroup{ + { + List: []*ast.Comment{ + {Text: "// swagger:parameters myOp"}, + {Text: "// swagger:response myResp"}, + }, + }, + }, + } + idx := &TypeIndex{} + _, err := idx.detectNodes(file) + require.Error(t, err) + assert.True(t, errors.Is(err, ErrScanner)) + }) + + t.Run("model in separate comment groups is fine", func(t *testing.T) { + file := &ast.File{ + Comments: []*ast.CommentGroup{ + {List: []*ast.Comment{{Text: "// swagger:model Foo"}}}, + {List: []*ast.Comment{{Text: "// swagger:response myResp"}}}, + }, + } + idx := &TypeIndex{} + n, err := idx.detectNodes(file) + require.NoError(t, err) + assert.True(t, n&modelNode != 0) + assert.True(t, n&responseNode != 0) + }) +} + +func TestNewTypeIndex_ExcludeDeps(t *testing.T) { + sctx, err := NewScanCtx(&Options{ + Packages: []string{ + "./goparsing/classification", + "./goparsing/classification/models", + "./goparsing/classification/operations", + }, + WorkDir: "../../fixtures", + ExcludeDeps: true, + }) + require.NoError(t, err) + require.NotNil(t, sctx) + require.NotNil(t, sctx.app) + + // With ExcludeDeps, imports should NOT be walked: + // the AllPackages map should only contain the explicitly-loaded packages, + // NOT transitive dependencies like strfmt. + _, hasStrfmt := sctx.app.AllPackages["github.com/go-openapi/strfmt"] + assert.False(t, hasStrfmt, "strfmt should not be indexed when ExcludeDeps is true") +} + +func TestNewTypeIndex_IncludeExcludePkgs(t *testing.T) { + t.Run("include only models package", func(t *testing.T) { + sctx, err := NewScanCtx(&Options{ + Packages: []string{ + "./goparsing/classification", + "./goparsing/classification/models", + "./goparsing/classification/operations", + }, + WorkDir: "../../fixtures", + ExcludeDeps: true, + Include: []string{"models"}, + }) + require.NoError(t, err) + + // Only the models package should have been processed for swagger annotations. + // Routes and parameters come from the operations package, so they should be empty. + assert.Empty(t, sctx.app.Routes) + assert.Empty(t, sctx.app.Parameters) + }) + + t.Run("exclude operations package", func(t *testing.T) { + sctx, err := NewScanCtx(&Options{ + Packages: []string{ + "./goparsing/classification", + "./goparsing/classification/models", + "./goparsing/classification/operations", + }, + WorkDir: "../../fixtures", + ExcludeDeps: true, + Exclude: []string{"operations$"}, + }) + require.NoError(t, err) + + // Routes and parameters come from operations, so excluding it should eliminate them. + assert.Empty(t, sctx.app.Routes) + assert.Empty(t, sctx.app.Parameters) + }) +} + +func TestNewTypeIndex_IncludeExcludeTags(t *testing.T) { + t.Run("include tag filters routes", func(t *testing.T) { + sctx, err := NewScanCtx(&Options{ + Packages: []string{ + "./goparsing/classification", + "./goparsing/classification/operations", + }, + WorkDir: "../../fixtures", + ExcludeDeps: true, + IncludeTags: []string{"orders"}, + }) + require.NoError(t, err) + + // Only routes tagged "orders" should be included. + for r := range sctx.Routes() { + assert.True(t, slices.Contains(r.Tags, "orders"), "expected route to have tag 'orders', got tags: %v", r.Tags) + } + }) + + t.Run("exclude tag filters routes", func(t *testing.T) { + sctx, err := NewScanCtx(&Options{ + Packages: []string{ + "./goparsing/classification", + "./goparsing/classification/operations", + }, + WorkDir: "../../fixtures", + ExcludeDeps: true, + ExcludeTags: []string{"orders"}, + }) + require.NoError(t, err) + + // No route should have the "orders" tag. + for r := range sctx.Routes() { + for _, tag := range r.Tags { + assert.True(t, tag != "orders", "route should not have tag 'orders', got tags: %v", r.Tags) + } + } + }) +} + +func TestCollectOperationPathAnnotations(t *testing.T) { + sctx, err := NewScanCtx(&Options{ + Packages: []string{"./goparsing/classification/operations_annotation"}, + WorkDir: "../../fixtures", + }) + require.NoError(t, err) + + // operations_annotation has swagger:operation annotations. + var count int + for range sctx.Operations() { + count++ + } + assert.True(t, count > 0, "expected at least one operation from operations_annotation fixture") +} + +func TestCollectOperationPathAnnotations_TagFiltering(t *testing.T) { + t.Run("include tag filters operations", func(t *testing.T) { + sctx, err := NewScanCtx(&Options{ + Packages: []string{"./goparsing/classification/operations_annotation"}, + WorkDir: "../../fixtures", + ExcludeDeps: true, + IncludeTags: []string{"Events"}, + }) + require.NoError(t, err) + + var count int + for op := range sctx.Operations() { + count++ + // All remaining operations should have the "Events" tag. + assert.True(t, slices.Contains(op.Tags, "Events"), "expected operation to have tag 'Events', got tags: %v", op.Tags) + } + // Should have filtered some operations out. + assert.True(t, count > 0, "expected at least one operation with tag 'Events'") + }) + + t.Run("exclude tag filters operations", func(t *testing.T) { + sctx, err := NewScanCtx(&Options{ + Packages: []string{"./goparsing/classification/operations_annotation"}, + WorkDir: "../../fixtures", + ExcludeDeps: true, + ExcludeTags: []string{"pets"}, + }) + require.NoError(t, err) + + for op := range sctx.Operations() { + for _, tag := range op.Tags { + assert.True(t, tag != "pets", "operation should not have tag 'pets', got tags: %v", op.Tags) + } + } + }) +} + +func TestNewTypeIndex_ErrorPropagation(t *testing.T) { + t.Run("struct annotation conflict propagates error through build chain", func(t *testing.T) { + // The invalid_model_param fixture has swagger:model and swagger:parameters + // on the same struct, which triggers a struct conflict error in detectNodes, + // propagated through processFile → processPackage → build → NewTypeIndex → NewScanCtx. + _, err := NewScanCtx(&Options{ + Packages: []string{"./goparsing/invalid_model_param"}, + WorkDir: "../../fixtures", + }) + require.Error(t, err) + assert.True(t, errors.Is(err, ErrScanner), "expected ErrScanner, got: %v", err) + }) + + t.Run("model and response conflict propagates error", func(t *testing.T) { + _, err := NewScanCtx(&Options{ + Packages: []string{"./goparsing/invalid_model_response"}, + WorkDir: "../../fixtures", + }) + require.Error(t, err) + assert.True(t, errors.Is(err, ErrScanner), "expected ErrScanner, got: %v", err) + }) + + t.Run("param and model conflict propagates error", func(t *testing.T) { + _, err := NewScanCtx(&Options{ + Packages: []string{"./goparsing/invalid_param_model"}, + WorkDir: "../../fixtures", + }) + require.Error(t, err) + assert.True(t, errors.Is(err, ErrScanner), "expected ErrScanner, got: %v", err) + }) + + t.Run("response and model conflict propagates error", func(t *testing.T) { + _, err := NewScanCtx(&Options{ + Packages: []string{"./goparsing/invalid_response_model"}, + WorkDir: "../../fixtures", + }) + require.Error(t, err) + assert.True(t, errors.Is(err, ErrScanner), "expected ErrScanner, got: %v", err) + }) + + t.Run("duplicate package is de-duplicated", func(t *testing.T) { + // Load the same package twice — the second occurrence should be + // skipped (line 109 in build). + sctx, err := NewScanCtx(&Options{ + Packages: []string{ + "./goparsing/petstore/enums", + "./goparsing/petstore/enums", // duplicate + }, + WorkDir: "../../fixtures", + }) + require.NoError(t, err) + require.NotNil(t, sctx) + }) + + t.Run("walkImports error propagation", func(t *testing.T) { + // Load a valid package alongside an invalid one that shares imports. + // The invalid_model_param package is a main package; loading it should + // trigger the conflict error even if other packages are fine. + _, err := NewScanCtx(&Options{ + Packages: []string{ + "./goparsing/petstore/enums", + "./goparsing/invalid_model_param", + }, + WorkDir: "../../fixtures", + }) + require.Error(t, err) + assert.True(t, errors.Is(err, ErrScanner)) + }) +} + +func TestProcessFileDecls_BadDecl(t *testing.T) { + // Ensure processFileDecls skips BadDecl without panicking. + idx := &TypeIndex{ + AllPackages: make(map[string]*packages.Package), + Models: make(map[*ast.Ident]*EntityDecl), + ExtraModels: make(map[*ast.Ident]*EntityDecl), + } + + file := &ast.File{ + Decls: []ast.Decl{ + &ast.BadDecl{}, + &ast.GenDecl{ + Tok: token.IMPORT, + Specs: []ast.Spec{&ast.ImportSpec{Path: &ast.BasicLit{Value: `"fmt"`}}}, + }, + }, + } + + // Should not panic. + assert.NotPanics(t, func() { + idx.processFileDecls(nil, file, 0) + }) +} diff --git a/internal/scanner/options.go b/internal/scanner/options.go new file mode 100644 index 0000000..74f3b8d --- /dev/null +++ b/internal/scanner/options.go @@ -0,0 +1,22 @@ +package scanner + +import "github.com/go-openapi/spec" + +type Options struct { + Packages []string + InputSpec *spec.Swagger + ScanModels bool + WorkDir string + BuildTags string + ExcludeDeps bool + Include []string + Exclude []string + IncludeTags []string + ExcludeTags []string + SetXNullableForPointers bool + RefAliases bool // aliases result in $ref, otherwise aliases are expanded + TransparentAliases bool // aliases are completely transparent, never creating definitions + DescWithRef bool // allow overloaded descriptions together with $ref, otherwise jsonschema draft4 $ref predates everything + SkipExtensions bool // skip generating x-go-* vendor extensions in the spec + Debug bool // enable verbose debug logging during scanning +} diff --git a/internal/scanner/scan_context.go b/internal/scanner/scan_context.go new file mode 100644 index 0000000..28c0708 --- /dev/null +++ b/internal/scanner/scan_context.go @@ -0,0 +1,399 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package scanner + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + "iter" + "log" + "maps" + "slices" + "strings" + + "github.com/go-openapi/codescan/internal/logger" + "github.com/go-openapi/codescan/internal/parsers" + "golang.org/x/tools/go/packages" +) + +const pkgLoadMode = packages.NeedName | packages.NeedFiles | packages.NeedImports | packages.NeedDeps | packages.NeedTypes | packages.NeedSyntax | packages.NeedTypesInfo + +type node uint32 + +const ( + metaNode node = 1 << iota + routeNode + operationNode + modelNode + parametersNode + responseNode +) + +type ScanCtx struct { + pkgs []*packages.Package + app *TypeIndex + debug bool + + opts *Options +} + +func NewScanCtx(opts *Options) (*ScanCtx, error) { + cfg := &packages.Config{ + Dir: opts.WorkDir, + Mode: pkgLoadMode, + Tests: false, + } + if opts.BuildTags != "" { + cfg.BuildFlags = []string{"-tags", opts.BuildTags} + } + + pkgs, err := packages.Load(cfg, opts.Packages...) + if err != nil { + return nil, err + } + + app, err := NewTypeIndex(pkgs, + WithExcludeDeps(opts.ExcludeDeps), + WithIncludeTags(sliceToSet(opts.IncludeTags)), + WithExcludeTags(sliceToSet(opts.ExcludeTags)), + WithIncludePkgs(opts.Include), + WithExcludePkgs(opts.Exclude), + WithXNullableForPointers(opts.SetXNullableForPointers), + WithRefAliases(opts.RefAliases), + WithTransparentAliases(opts.TransparentAliases), + WithDebug(opts.Debug), + ) + if err != nil { + return nil, err + } + + return &ScanCtx{ + pkgs: pkgs, + app: app, + debug: opts.Debug, + opts: opts, + }, nil +} + +func (s *ScanCtx) SkipExtensions() bool { + return s.opts.SkipExtensions +} + +func (s *ScanCtx) DescWithRef() bool { + return s.opts.DescWithRef +} + +func (s *ScanCtx) SetXNullableForPointers() bool { + return s.opts.SetXNullableForPointers +} + +func (s *ScanCtx) TransparentAliases() bool { + return s.opts.TransparentAliases +} + +func (s *ScanCtx) RefAliases() bool { + return s.opts.RefAliases +} + +func (s *ScanCtx) Debug() bool { + return s.debug +} + +func (s *ScanCtx) Meta() iter.Seq[parsers.MetaSection] { + if s.app == nil { + return nil + } + + return slices.Values(s.app.Meta) +} + +func (s *ScanCtx) Operations() iter.Seq[parsers.ParsedPathContent] { + if s.app == nil { + return nil + } + + return slices.Values(s.app.Operations) +} + +func (s *ScanCtx) Routes() iter.Seq[parsers.ParsedPathContent] { + if s.app == nil { + return nil + } + + return slices.Values(s.app.Routes) +} + +func (s *ScanCtx) Responses() iter.Seq[*EntityDecl] { + if s.app == nil { + return nil + } + + return slices.Values(s.app.Responses) +} + +func (s *ScanCtx) Parameters() iter.Seq[*EntityDecl] { + if s.app == nil { + return nil + } + + return slices.Values(s.app.Parameters) +} + +func (s *ScanCtx) Models() iter.Seq2[*ast.Ident, *EntityDecl] { + if s.app == nil { + return nil + } + + return maps.All(s.app.Models) +} + +func (s *ScanCtx) NumExtraModels() int { + if s.app == nil { + return 0 + } + + return len(s.app.ExtraModels) +} + +func (s *ScanCtx) ExtraModels() iter.Seq2[*ast.Ident, *EntityDecl] { + if s.app == nil { + return nil + } + + return maps.All(s.app.ExtraModels) +} + +func (s *ScanCtx) MoveExtraToModel(k *ast.Ident) { + v, ok := s.app.ExtraModels[k] + if !ok { + return + } + + s.app.Models[k] = v + delete(s.app.ExtraModels, k) +} + +func (s *ScanCtx) FindDecl(pkgPath, name string) (*EntityDecl, bool) { + pkg, ok := s.app.AllPackages[pkgPath] + if !ok { + return nil, false + } + + for _, file := range pkg.Syntax { + for _, d := range file.Decls { + gd, ok := d.(*ast.GenDecl) + if !ok { + continue + } + + for _, sp := range gd.Specs { + ts, ok := sp.(*ast.TypeSpec) + if !ok || ts.Name.Name != name { + continue + } + + def, ok := pkg.TypesInfo.Defs[ts.Name] + if !ok { + logger.DebugLogf(s.debug, "couldn't find type info for %s", ts.Name) + continue + } + + nt, isNamed := def.Type().(*types.Named) + at, isAliased := def.Type().(*types.Alias) + if !isNamed && !isAliased { + logger.DebugLogf(s.debug, "%s is not a named or an aliased type but a %T", ts.Name, def.Type()) + continue + } + + comments := ts.Doc // type ( /* doc */ Foo struct{} ) + if comments == nil { + comments = gd.Doc // /* doc */ type ( Foo struct{} ) + } + + return &EntityDecl{ + Comments: comments, + Type: nt, + Alias: at, + Ident: ts.Name, + Spec: ts, + File: file, + Pkg: pkg, + }, true + } + } + } + + return nil, false +} + +func (s *ScanCtx) FindModel(pkgPath, name string) (*EntityDecl, bool) { + for _, cand := range s.app.Models { + ct := cand.Obj() + if ct.Name() == name && ct.Pkg().Path() == pkgPath { + return cand, true + } + } + + if decl, found := s.FindDecl(pkgPath, name); found { + s.app.ExtraModels[decl.Ident] = decl + return decl, true + } + + return nil, false +} + +func (s *ScanCtx) DeclForType(t types.Type) (*EntityDecl, bool) { + switch tpe := t.(type) { + case *types.Pointer: + return s.DeclForType(tpe.Elem()) + case *types.Named: + return s.FindDecl(tpe.Obj().Pkg().Path(), tpe.Obj().Name()) + case *types.Alias: + return s.FindDecl(tpe.Obj().Pkg().Path(), tpe.Obj().Name()) + default: + log.Printf("WARNING: unknown type to find the package for [%T]: %s", t, t.String()) + + return nil, false + } +} + +func (s *ScanCtx) PkgForType(t types.Type) (*packages.Package, bool) { + switch tpe := t.(type) { + // case *types.Basic: + // case *types.Struct: + // case *types.Pointer: + // case *types.Interface: + // case *types.Array: + // case *types.Slice: + // case *types.Map: + case *types.Named: + v, ok := s.app.AllPackages[tpe.Obj().Pkg().Path()] + return v, ok + case *types.Alias: + v, ok := s.app.AllPackages[tpe.Obj().Pkg().Path()] + return v, ok + default: + log.Printf("WARNING: unknown type to find the package for [%T]: %s", t, t.String()) + return nil, false + } +} + +func (s *ScanCtx) FindComments(pkg *packages.Package, name string) (*ast.CommentGroup, bool) { + for _, f := range pkg.Syntax { + for _, d := range f.Decls { + gd, ok := d.(*ast.GenDecl) + if !ok { + continue + } + + for _, s := range gd.Specs { + if ts, ok := s.(*ast.TypeSpec); ok { + if ts.Name.Name == name { + return gd.Doc, true + } + } + } + } + } + return nil, false +} + +func (s *ScanCtx) FindEnumValues(pkg *packages.Package, enumName string) (list []any, descList []string, _ bool) { + for _, f := range pkg.Syntax { + for _, d := range f.Decls { + gd, ok := d.(*ast.GenDecl) + if !ok { + continue + } + + if gd.Tok != token.CONST { + continue + } + + for _, spec := range gd.Specs { + literalValue, description := s.findEnumValue(spec, enumName) + if literalValue == nil { + continue + } + + list = append(list, literalValue) + descList = append(descList, description) + } + } + } + + return list, descList, true +} + +func (s *ScanCtx) findEnumValue(spec ast.Spec, enumName string) (literalValue any, description string) { + vs, ok := spec.(*ast.ValueSpec) + if !ok { + return nil, "" + } + + vsIdent, ok := vs.Type.(*ast.Ident) + if !ok { + return nil, "" + } + + if vsIdent.Name != enumName { + return nil, "" + } + + if len(vs.Values) == 0 { + return nil, "" + } + + bl, ok := vs.Values[0].(*ast.BasicLit) + if !ok { + return nil, "" + } + + literalValue = parsers.GetEnumBasicLitValue(bl) + + // build the enum description + var ( + desc = &strings.Builder{} + namesLen = len(vs.Names) + ) + + fmt.Fprintf(desc, "%v ", literalValue) + for i, name := range vs.Names { + desc.WriteString(name.Name) + if i < namesLen-1 { + desc.WriteString(" ") + } + } + + if vs.Doc != nil { + docListLen := len(vs.Doc.List) + if docListLen > 0 { + desc.WriteString(" ") + } + + for i, doc := range vs.Doc.List { + if doc.Text != "" { + text := strings.TrimPrefix(doc.Text, "//") + desc.WriteString(text) + if i < docListLen-1 { + desc.WriteString(" ") + } + } + } + } + + description = desc.String() + + return literalValue, description +} + +func sliceToSet(names []string) map[string]bool { + result := make(map[string]bool) + for _, v := range names { + result[v] = true + } + return result +} diff --git a/internal/scanner/scan_context_test.go b/internal/scanner/scan_context_test.go new file mode 100644 index 0000000..f33a3aa --- /dev/null +++ b/internal/scanner/scan_context_test.go @@ -0,0 +1,616 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package scanner + +import ( + "go/ast" + "go/token" + "go/types" + "testing" + + "github.com/go-openapi/testify/v2/assert" + "github.com/go-openapi/testify/v2/require" +) + +var classificationCtx *ScanCtx //nolint:gochecknoglobals // test package cache shared across test functions + +func TestApplication_LoadCode(t *testing.T) { + sctx := loadClassificationPkgsCtx(t) + require.NotNil(t, sctx) + require.NotNil(t, sctx.app) + require.Len(t, sctx.app.Models, 39) + require.Len(t, sctx.app.Meta, 1) + require.Len(t, sctx.app.Routes, 7) + require.Empty(t, sctx.app.Operations) + require.Len(t, sctx.app.Parameters, 10) + require.Len(t, sctx.app.Responses, 11) +} + +func TestScanCtx_OptionAccessors(t *testing.T) { + sctx := loadClassificationPkgsCtx(t) + + // Default options: all false. + assert.False(t, sctx.SkipExtensions()) + assert.False(t, sctx.DescWithRef()) + assert.False(t, sctx.SetXNullableForPointers()) + assert.False(t, sctx.TransparentAliases()) + assert.False(t, sctx.RefAliases()) + assert.False(t, sctx.Debug()) +} + +func TestScanCtx_OptionAccessors_Enabled(t *testing.T) { + sctx, err := NewScanCtx(&Options{ + Packages: []string{"./goparsing/classification"}, + WorkDir: "../../fixtures", + SkipExtensions: true, + DescWithRef: true, + SetXNullableForPointers: true, + TransparentAliases: true, + RefAliases: true, + Debug: true, + }) + require.NoError(t, err) + + assert.True(t, sctx.SkipExtensions()) + assert.True(t, sctx.DescWithRef()) + assert.True(t, sctx.SetXNullableForPointers()) + assert.True(t, sctx.TransparentAliases()) + assert.True(t, sctx.RefAliases()) + assert.True(t, sctx.Debug()) +} + +func TestScanCtx_Meta(t *testing.T) { + sctx := loadClassificationPkgsCtx(t) + + var count int + for range sctx.Meta() { + count++ + } + assert.EqualT(t, 1, count) +} + +func TestScanCtx_Operations(t *testing.T) { + sctx := loadClassificationPkgsCtx(t) + + var count int + for range sctx.Operations() { + count++ + } + // The classification fixture has no operation annotations (only route annotations). + assert.EqualT(t, 0, count) +} + +func TestScanCtx_Routes(t *testing.T) { + sctx := loadClassificationPkgsCtx(t) + + var count int + for range sctx.Routes() { + count++ + } + assert.EqualT(t, 7, count) +} + +func TestScanCtx_Responses(t *testing.T) { + sctx := loadClassificationPkgsCtx(t) + + var count int + for range sctx.Responses() { + count++ + } + assert.EqualT(t, 11, count) +} + +func TestScanCtx_Parameters(t *testing.T) { + sctx := loadClassificationPkgsCtx(t) + + var count int + for range sctx.Parameters() { + count++ + } + assert.EqualT(t, 10, count) +} + +func TestScanCtx_Models(t *testing.T) { + sctx := loadClassificationPkgsCtx(t) + + var count int + for range sctx.Models() { + count++ + } + assert.EqualT(t, 39, count) +} + +func TestScanCtx_ExtraModels(t *testing.T) { + sctx := loadClassificationPkgsCtx(t) + + // No extra models initially. + assert.EqualT(t, 0, sctx.NumExtraModels()) + + var count int + for range sctx.ExtraModels() { + count++ + } + assert.EqualT(t, 0, count) +} + +func TestScanCtx_NilApp(t *testing.T) { + // Test all iterator/accessor methods when app is nil. + sctx := &ScanCtx{} + + assert.Nil(t, sctx.Meta()) + assert.Nil(t, sctx.Operations()) + assert.Nil(t, sctx.Routes()) + assert.Nil(t, sctx.Responses()) + assert.Nil(t, sctx.Parameters()) + assert.Nil(t, sctx.Models()) + assert.Nil(t, sctx.ExtraModels()) + assert.EqualT(t, 0, sctx.NumExtraModels()) +} + +func TestScanCtx_PkgForPath(t *testing.T) { + sctx := loadClassificationPkgsCtx(t) + + t.Run("known package", func(t *testing.T) { + pkg, ok := sctx.PkgForPath("github.com/go-openapi/codescan/fixtures/goparsing/classification/models") + assert.True(t, ok) + assert.NotNil(t, pkg) + assert.EqualT(t, "models", pkg.Name) + }) + + t.Run("unknown package", func(t *testing.T) { + _, ok := sctx.PkgForPath("github.com/nonexistent/package") + assert.False(t, ok) + }) +} + +func TestScanCtx_FindDecl(t *testing.T) { + sctx := loadClassificationPkgsCtx(t) + + t.Run("finds existing type", func(t *testing.T) { + decl, ok := sctx.FindDecl( + "github.com/go-openapi/codescan/fixtures/goparsing/classification/models", + "User", + ) + require.True(t, ok) + assert.NotNil(t, decl) + assert.EqualT(t, "User", decl.Ident.Name) + assert.NotNil(t, decl.Pkg) + assert.NotNil(t, decl.File) + assert.NotNil(t, decl.Spec) + }) + + t.Run("unknown package returns false", func(t *testing.T) { + _, ok := sctx.FindDecl("github.com/nonexistent/package", "Foo") + assert.False(t, ok) + }) + + t.Run("unknown type in known package returns false", func(t *testing.T) { + _, ok := sctx.FindDecl( + "github.com/go-openapi/codescan/fixtures/goparsing/classification/models", + "NonExistentType", + ) + assert.False(t, ok) + }) +} + +func TestScanCtx_FindModel(t *testing.T) { + sctx := loadClassificationPkgsCtx(t) + + t.Run("finds model in Models map", func(t *testing.T) { + decl, ok := sctx.FindModel( + "github.com/go-openapi/codescan/fixtures/goparsing/classification/models", + "User", + ) + require.True(t, ok) + assert.NotNil(t, decl) + assert.EqualT(t, "User", decl.Ident.Name) + }) + + t.Run("finds type not in Models, adds to ExtraModels", func(t *testing.T) { + // SimpleOne has no swagger:model annotation, but it exists as a type. + beforeExtra := sctx.NumExtraModels() + + decl, ok := sctx.FindModel( + "github.com/go-openapi/codescan/fixtures/goparsing/classification/operations", + "SimpleOne", + ) + require.True(t, ok) + assert.NotNil(t, decl) + assert.EqualT(t, "SimpleOne", decl.Ident.Name) + + // Should have been added to ExtraModels. + assert.True(t, sctx.NumExtraModels() > beforeExtra) + }) + + t.Run("type that does not exist returns false", func(t *testing.T) { + _, ok := sctx.FindModel( + "github.com/go-openapi/codescan/fixtures/goparsing/classification/models", + "NonExistentType", + ) + assert.False(t, ok) + }) +} + +func TestScanCtx_MoveExtraToModel(t *testing.T) { + sctx, err := NewScanCtx(&Options{ + Packages: []string{ + "./goparsing/classification", + "./goparsing/classification/models", + "./goparsing/classification/operations", + }, + WorkDir: "../../fixtures", + }) + require.NoError(t, err) + + // Find a type that will be added to ExtraModels. + decl, ok := sctx.FindModel( + "github.com/go-openapi/codescan/fixtures/goparsing/classification/operations", + "SimpleOne", + ) + require.True(t, ok) + + numModels := len(sctx.app.Models) + numExtras := sctx.NumExtraModels() + require.True(t, numExtras > 0, "expected at least one extra model") + + // Move it to models. + sctx.MoveExtraToModel(decl.Ident) + + assert.EqualT(t, numModels+1, len(sctx.app.Models)) + assert.EqualT(t, numExtras-1, sctx.NumExtraModels()) + + // Moving a non-existent key should be a no-op. + sctx.MoveExtraToModel(ast.NewIdent("nonexistent")) + assert.EqualT(t, numModels+1, len(sctx.app.Models)) +} + +func TestScanCtx_DeclForType(t *testing.T) { + sctx := loadClassificationPkgsCtx(t) + + t.Run("named type", func(t *testing.T) { + decl, ok := sctx.FindDecl( + "github.com/go-openapi/codescan/fixtures/goparsing/classification/models", + "User", + ) + require.True(t, ok) + require.NotNil(t, decl.Type) + + found, ok := sctx.DeclForType(decl.Type) + assert.True(t, ok) + assert.EqualT(t, "User", found.Ident.Name) + }) + + t.Run("pointer to named type", func(t *testing.T) { + decl, ok := sctx.FindDecl( + "github.com/go-openapi/codescan/fixtures/goparsing/classification/models", + "User", + ) + require.True(t, ok) + require.NotNil(t, decl.Type) + + // Wrap in a pointer type to test the *types.Pointer branch. + ptrType := types.NewPointer(decl.Type) + found, ok := sctx.DeclForType(ptrType) + assert.True(t, ok) + assert.EqualT(t, "User", found.Ident.Name) + }) + + t.Run("basic type returns false", func(t *testing.T) { + // types.Typ[types.Int] is a *types.Basic + _, ok := sctx.DeclForType(types.Typ[types.Int]) + assert.False(t, ok) + }) +} + +func TestScanCtx_DeclForType_Alias(t *testing.T) { + // Load the spec fixture which has type aliases (Customer = User). + sctx, err := NewScanCtx(&Options{ + Packages: []string{"./goparsing/spec"}, + WorkDir: "../../fixtures", + }) + require.NoError(t, err) + + decl, ok := sctx.FindDecl( + "github.com/go-openapi/codescan/fixtures/goparsing/spec", + "Customer", + ) + require.True(t, ok) + require.NotNil(t, decl.Alias, "Customer should be a type alias") + + found, ok := sctx.DeclForType(decl.Alias) + assert.True(t, ok) + assert.EqualT(t, "Customer", found.Ident.Name) +} + +func TestScanCtx_PkgForType(t *testing.T) { + sctx := loadClassificationPkgsCtx(t) + + t.Run("named type returns package", func(t *testing.T) { + decl, ok := sctx.FindDecl( + "github.com/go-openapi/codescan/fixtures/goparsing/classification/models", + "User", + ) + require.True(t, ok) + require.NotNil(t, decl.Type) + + pkg, ok := sctx.PkgForType(decl.Type) + assert.True(t, ok) + assert.EqualT(t, "models", pkg.Name) + }) + + t.Run("basic type returns false", func(t *testing.T) { + _, ok := sctx.PkgForType(types.Typ[types.String]) + assert.False(t, ok) + }) +} + +func TestScanCtx_PkgForType_Alias(t *testing.T) { + // Load the spec fixture which has type aliases (Customer = User). + sctx, err := NewScanCtx(&Options{ + Packages: []string{"./goparsing/spec"}, + WorkDir: "../../fixtures", + }) + require.NoError(t, err) + + decl, ok := sctx.FindDecl( + "github.com/go-openapi/codescan/fixtures/goparsing/spec", + "Customer", + ) + require.True(t, ok) + require.NotNil(t, decl.Alias, "Customer should be a type alias") + + pkg, ok := sctx.PkgForType(decl.Alias) + assert.True(t, ok) + assert.EqualT(t, "spec", pkg.Name) +} + +func TestScanCtx_FindComments(t *testing.T) { + sctx := loadClassificationPkgsCtx(t) + pkg, ok := sctx.PkgForPath("github.com/go-openapi/codescan/fixtures/goparsing/classification/models") + require.True(t, ok) + + t.Run("finds comments for existing type", func(t *testing.T) { + comments, ok := sctx.FindComments(pkg, "User") + assert.True(t, ok) + assert.NotNil(t, comments) + }) + + t.Run("returns false for unknown type", func(t *testing.T) { + _, ok := sctx.FindComments(pkg, "NonExistent") + assert.False(t, ok) + }) +} + +func TestScanCtx_FindEnumValues(t *testing.T) { + // Load the petstore fixture which has enum types. + sctx, err := NewScanCtx(&Options{ + Packages: []string{"./goparsing/petstore/enums"}, + WorkDir: "../../fixtures", + }) + require.NoError(t, err) + + pkg, ok := sctx.PkgForPath("github.com/go-openapi/codescan/fixtures/goparsing/petstore/enums") + require.True(t, ok) + + t.Run("finds enum values for Status type", func(t *testing.T) { + list, descList, ok := sctx.FindEnumValues(pkg, "Status") + assert.True(t, ok) + require.Len(t, list, 3) // available, pending, sold + require.Len(t, descList, 3) + + // Verify the string values were extracted. + assert.EqualT(t, "available", list[0]) + assert.EqualT(t, "pending", list[1]) + assert.EqualT(t, "sold", list[2]) + + // Descriptions should contain the literal value and the constant name. + for _, desc := range descList { + assert.True(t, desc != "", "description should not be empty") + } + }) + + t.Run("finds enum values for Priority type with doc comments", func(t *testing.T) { + list, descList, ok := sctx.FindEnumValues(pkg, "Priority") + assert.True(t, ok) + require.Len(t, list, 2) // PriorityLow=1, PriorityHigh=2 + require.Len(t, descList, 2) + + // Verify int values were extracted. + v0, ok0 := list[0].(int64) + require.True(t, ok0, "expected int64 value, got %T", list[0]) + assert.EqualT(t, int64(1), v0) + + v1, ok1 := list[1].(int64) + require.True(t, ok1, "expected int64 value, got %T", list[1]) + assert.EqualT(t, int64(2), v1) + + // Description for PriorityLow should contain the doc comment text. + // It has a multi-line doc comment ("// low priority\n// items are handled last"). + assert.True(t, len(descList[0]) > 0, "description should contain doc comment text") + }) + + t.Run("non-matching enum name returns empty", func(t *testing.T) { + list, descList, ok := sctx.FindEnumValues(pkg, "NonExistentEnum") + assert.True(t, ok) // always returns true + assert.Empty(t, list) + assert.Empty(t, descList) + }) +} + +func TestScanCtx_FindEnumValues_NoConsts(t *testing.T) { + sctx := loadClassificationPkgsCtx(t) + + // Use a package that has types but no related const enums. + pkg, ok := sctx.PkgForPath("github.com/go-openapi/codescan/fixtures/goparsing/classification/models") + require.True(t, ok) + + list, descList, ok := sctx.FindEnumValues(pkg, "User") + assert.True(t, ok) + assert.Empty(t, list) + assert.Empty(t, descList) +} + +func TestNewScanCtx_WithBuildTags(t *testing.T) { + sctx, err := NewScanCtx(&Options{ + Packages: []string{"./goparsing/classification"}, + WorkDir: "../../fixtures", + BuildTags: "integration", + }) + require.NoError(t, err) + require.NotNil(t, sctx) +} + +func TestNewScanCtx_InvalidPackage(t *testing.T) { + // Loading a nonexistent package should succeed at the Load level + // but produce no useful data (packages.Load doesn't return errors for missing pkgs, + // it returns packages with errors embedded). + sctx, err := NewScanCtx(&Options{ + Packages: []string{"./nonexistent"}, + WorkDir: "../../fixtures", + }) + // This may or may not error depending on go/packages behavior; + // what matters is it doesn't panic. + if err != nil { + return + } + require.NotNil(t, sctx) +} + +func TestScanCtx_findEnumValue_EdgeCases(t *testing.T) { + sctx := &ScanCtx{} + + t.Run("non-ValueSpec returns nil", func(t *testing.T) { + spec := &ast.ImportSpec{Path: &ast.BasicLit{Value: `"fmt"`}} + val, desc := sctx.findEnumValue(spec, "Foo") + assert.Nil(t, val) + assert.EqualT(t, "", desc) + }) + + t.Run("ValueSpec with nil Type returns nil", func(t *testing.T) { + spec := &ast.ValueSpec{ + Names: []*ast.Ident{ast.NewIdent("X")}, + } + val, desc := sctx.findEnumValue(spec, "Foo") + assert.Nil(t, val) + assert.EqualT(t, "", desc) + }) + + t.Run("ValueSpec with selector type returns nil", func(t *testing.T) { + // Type is *ast.SelectorExpr, not *ast.Ident + spec := &ast.ValueSpec{ + Names: []*ast.Ident{ast.NewIdent("X")}, + Type: &ast.SelectorExpr{X: ast.NewIdent("pkg"), Sel: ast.NewIdent("Type")}, + } + val, desc := sctx.findEnumValue(spec, "Foo") + assert.Nil(t, val) + assert.EqualT(t, "", desc) + }) + + t.Run("ValueSpec with non-matching enum name returns nil", func(t *testing.T) { + spec := &ast.ValueSpec{ + Names: []*ast.Ident{ast.NewIdent("X")}, + Type: ast.NewIdent("Bar"), + Values: []ast.Expr{&ast.BasicLit{Kind: token.INT, Value: "1"}}, + } + val, desc := sctx.findEnumValue(spec, "Foo") + assert.Nil(t, val) + assert.EqualT(t, "", desc) + }) + + t.Run("ValueSpec with no values returns nil", func(t *testing.T) { + spec := &ast.ValueSpec{ + Names: []*ast.Ident{ast.NewIdent("X")}, + Type: ast.NewIdent("Foo"), + } + val, desc := sctx.findEnumValue(spec, "Foo") + assert.Nil(t, val) + assert.EqualT(t, "", desc) + }) + + t.Run("ValueSpec with non-BasicLit value returns nil", func(t *testing.T) { + spec := &ast.ValueSpec{ + Names: []*ast.Ident{ast.NewIdent("X")}, + Type: ast.NewIdent("Foo"), + Values: []ast.Expr{ast.NewIdent("someFunc")}, // *ast.Ident, not *ast.BasicLit + } + val, desc := sctx.findEnumValue(spec, "Foo") + assert.Nil(t, val) + assert.EqualT(t, "", desc) + }) + + t.Run("ValueSpec with multiple names builds description", func(t *testing.T) { + spec := &ast.ValueSpec{ + Names: []*ast.Ident{ast.NewIdent("A"), ast.NewIdent("B")}, + Type: ast.NewIdent("Foo"), + Values: []ast.Expr{&ast.BasicLit{Kind: token.INT, Value: "42"}}, + } + val, desc := sctx.findEnumValue(spec, "Foo") + require.NotNil(t, val) + intVal, ok := val.(int64) + require.True(t, ok) + assert.EqualT(t, int64(42), intVal) + // Description should contain "42 A B" + assert.True(t, len(desc) > 0) + }) + + t.Run("ValueSpec with doc comments builds description", func(t *testing.T) { + spec := &ast.ValueSpec{ + Names: []*ast.Ident{ast.NewIdent("X")}, + Type: ast.NewIdent("Foo"), + Values: []ast.Expr{&ast.BasicLit{Kind: token.STRING, Value: `"hello"`}}, + Doc: &ast.CommentGroup{ + List: []*ast.Comment{ + {Text: "// first line"}, + {Text: "// second line"}, + }, + }, + } + val, desc := sctx.findEnumValue(spec, "Foo") + require.NotNil(t, val) + assert.EqualT(t, "hello", val) + // Description should contain the doc comment text. + assert.True(t, len(desc) > 0) + }) +} + +func TestSliceToSet(t *testing.T) { + t.Run("empty slice produces empty map", func(t *testing.T) { + result := sliceToSet(nil) + assert.EqualT(t, 0, len(result)) + }) + + t.Run("populates map correctly", func(t *testing.T) { + result := sliceToSet([]string{"a", "b", "c"}) + assert.EqualT(t, 3, len(result)) + assert.True(t, result["a"]) + assert.True(t, result["b"]) + assert.True(t, result["c"]) + }) + + t.Run("deduplicates entries", func(t *testing.T) { + result := sliceToSet([]string{"x", "x", "y"}) + assert.EqualT(t, 2, len(result)) + }) +} + +func loadClassificationPkgsCtx(t *testing.T) *ScanCtx { + t.Helper() + + if classificationCtx != nil { + return classificationCtx + } + + sctx, err := NewScanCtx(&Options{ + Packages: []string{ + "./goparsing/classification", + "./goparsing/classification/models", + "./goparsing/classification/operations", + }, + WorkDir: "../../fixtures", + }) + require.NoError(t, err) + classificationCtx = sctx + + return classificationCtx +} diff --git a/internal/scantest/classification/verify.go b/internal/scantest/classification/verify.go new file mode 100644 index 0000000..c0d1a01 --- /dev/null +++ b/internal/scantest/classification/verify.go @@ -0,0 +1,35 @@ +package classification + +import ( + "testing" + + oaispec "github.com/go-openapi/spec" + "github.com/go-openapi/testify/v2/assert" + "github.com/go-openapi/testify/v2/require" +) + +func VerifyInfo(t *testing.T, info *oaispec.Info) { + t.Helper() + + require.NotNil(t, info) + assert.EqualT(t, "0.0.1", info.Version) + assert.EqualT(t, "there are no TOS at this moment, use at your own risk we take no responsibility", info.TermsOfService) + assert.EqualT(t, "Petstore API.", info.Title) + + const descr = `the purpose of this application is to provide an application +that is using plain go code to define an API + +This should demonstrate all the possible comment annotations +that are available to turn go code into a fully compliant swagger 2.0 spec` + + assert.EqualT(t, descr, info.Description) + + require.NotNil(t, info.License) + assert.EqualT(t, "MIT", info.License.Name) + assert.EqualT(t, "http://opensource.org/licenses/MIT", info.License.URL) + + require.NotNil(t, info.Contact) + assert.EqualT(t, "John Doe", info.Contact.Name) + assert.EqualT(t, "john.doe@example.com", info.Contact.Email) + assert.EqualT(t, "http://john.doe.com", info.Contact.URL) +} diff --git a/internal/scantest/doc.go b/internal/scantest/doc.go new file mode 100644 index 0000000..3ee5eba --- /dev/null +++ b/internal/scantest/doc.go @@ -0,0 +1,2 @@ +// Package scantest exposes utilities for testing the codescan packages. +package scantest diff --git a/internal/scantest/golden.go b/internal/scantest/golden.go new file mode 100644 index 0000000..88a04e8 --- /dev/null +++ b/internal/scantest/golden.go @@ -0,0 +1,59 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package scantest + +import ( + "encoding/json" + "os" + "path/filepath" + "runtime" + "testing" + + "github.com/go-openapi/testify/v2/assert" + "github.com/go-openapi/testify/v2/require" +) + +// CompareOrDumpJSON marshals got to stable JSON and either writes it to +// /fixtures/integration/golden/ (when UPDATE_GOLDEN=1) or +// asserts that it JSON-equals the stored golden. +// +// This is the regression-testing harness used to detect any behavior change +// in the go-openapi/spec objects produced by the scanner, compared against +// a captured baseline. See .claude/plans/regression-strategy.md. +// +// Golden files are named by content (fixture bundle + object kind + entity), +// not by test name, so they survive test reshuffling. +func CompareOrDumpJSON(t *testing.T, got any, name string) { + t.Helper() + + data, err := json.MarshalIndent(got, "", " ") + require.NoError(t, err) + + path := filepath.Join(goldenDir(), name) + + if os.Getenv("UPDATE_GOLDEN") == "1" { + const ( + dirPerm = 0o700 + filePerm = 0o600 + ) + require.NoError(t, os.MkdirAll(filepath.Dir(path), dirPerm)) + require.NoError(t, os.WriteFile(path, data, filePerm)) + t.Logf("wrote golden %s", name) + return + } + + want, err := os.ReadFile(path) + require.NoError(t, err, "missing golden %s — run with UPDATE_GOLDEN=1 to create", name) + assert.JSONEqT(t, string(want), string(data)) +} + +// goldenDir returns the absolute path to the repo-level golden directory. +func goldenDir() string { + _, thisFile, _, ok := runtime.Caller(0) + if !ok { + panic("scantest: unable to resolve caller for golden path") + } + // thisFile is /internal/scantest/golden.go + return filepath.Clean(filepath.Join(filepath.Dir(thisFile), "..", "..", "fixtures", "integration", "golden")) +} diff --git a/internal/scantest/load.go b/internal/scantest/load.go new file mode 100644 index 0000000..c9ef667 --- /dev/null +++ b/internal/scantest/load.go @@ -0,0 +1,89 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package scantest + +import ( + "path/filepath" + "runtime" + "testing" + + "github.com/go-openapi/codescan/internal/scanner" + "github.com/go-openapi/testify/v2/require" +) + +var ( + petstoreCtx *scanner.ScanCtx //nolint:gochecknoglobals // test package cache shared across test functions + classificationCtx *scanner.ScanCtx //nolint:gochecknoglobals // test package cache shared across test functions + go118ClassificationCtx *scanner.ScanCtx //nolint:gochecknoglobals // test package cache shared across test functions +) + +// FixturesDir returns the absolute path to the repo-level fixtures/ directory, +// so tests can run from any package depth without fragile relative paths. +func FixturesDir() string { + _, thisFile, _, ok := runtime.Caller(0) + if !ok { + panic("scantest: unable to resolve caller for fixtures path") + } + // thisFile is /internal/scantest/load.go + return filepath.Clean(filepath.Join(filepath.Dir(thisFile), "..", "..", "fixtures")) +} + +func LoadPetstorePkgsCtx(t testing.TB, enableDebug bool) *scanner.ScanCtx { + t.Helper() + + if petstoreCtx != nil { + return petstoreCtx + } + + ctx, err := scanner.NewScanCtx(&scanner.Options{ + Packages: []string{"./goparsing/petstore/..."}, + WorkDir: FixturesDir(), + Debug: enableDebug, + }) + + require.NoError(t, err) + petstoreCtx = ctx + + return petstoreCtx +} + +func LoadGo118ClassificationPkgsCtx(t *testing.T) *scanner.ScanCtx { + t.Helper() + + if go118ClassificationCtx != nil { + return go118ClassificationCtx + } + + sctx, err := scanner.NewScanCtx(&scanner.Options{ + Packages: []string{ + "./goparsing/go118", + }, + WorkDir: FixturesDir(), + }) + require.NoError(t, err) + go118ClassificationCtx = sctx + + return go118ClassificationCtx +} + +func LoadClassificationPkgsCtx(t *testing.T) *scanner.ScanCtx { + t.Helper() + + if classificationCtx != nil { + return classificationCtx + } + + sctx, err := scanner.NewScanCtx(&scanner.Options{ + Packages: []string{ + "./goparsing/classification", + "./goparsing/classification/models", + "./goparsing/classification/operations", + }, + WorkDir: FixturesDir(), + }) + require.NoError(t, err) + classificationCtx = sctx + + return classificationCtx +} diff --git a/internal/scantest/mocks/doc.go b/internal/scantest/mocks/doc.go new file mode 100644 index 0000000..fe38e59 --- /dev/null +++ b/internal/scantest/mocks/doc.go @@ -0,0 +1,4 @@ +// Package mocks contains mock types for all interfaces. +// +// mocks are generated by mockery. +package mocks diff --git a/internal/scantest/mocks/mocks.go b/internal/scantest/mocks/mocks.go new file mode 100644 index 0000000..19f2d78 --- /dev/null +++ b/internal/scantest/mocks/mocks.go @@ -0,0 +1,1752 @@ +// Code generated by mockery; DO NOT EDIT. +// github.com/vektra/mockery +// template: matryer + +package mocks + +import ( + "go/types" + "sync" + + "github.com/go-openapi/codescan/internal/ifaces" + "github.com/go-openapi/spec" +) + +// Ensure that MockSwaggerTypable does implement ifaces.SwaggerTypable. +// If this is not the case, regenerate this file with mockery. +var _ ifaces.SwaggerTypable = &MockSwaggerTypable{} + +// MockSwaggerTypable is a mock implementation of ifaces.SwaggerTypable. +// +// func TestSomethingThatUsesSwaggerTypable(t *testing.T) { +// +// // make and configure a mocked ifaces.SwaggerTypable +// mockedSwaggerTypable := &MockSwaggerTypable{ +// AddExtensionFunc: func(key string, value any) { +// panic("mock out the AddExtension method") +// }, +// InFunc: func() string { +// panic("mock out the In method") +// }, +// ItemsFunc: func() ifaces.SwaggerTypable { +// panic("mock out the Items method") +// }, +// LevelFunc: func() int { +// panic("mock out the Level method") +// }, +// SchemaFunc: func() *spec.Schema { +// panic("mock out the Schema method") +// }, +// SetRefFunc: func(ref spec.Ref) { +// panic("mock out the SetRef method") +// }, +// TypedFunc: func(swaggerType string, format string) { +// panic("mock out the Typed method") +// }, +// WithEnumFunc: func(values ...any) { +// panic("mock out the WithEnum method") +// }, +// WithEnumDescriptionFunc: func(desc string) { +// panic("mock out the WithEnumDescription method") +// }, +// } +// +// // use mockedSwaggerTypable in code that requires ifaces.SwaggerTypable +// // and then make assertions. +// +// } +type MockSwaggerTypable struct { + // AddExtensionFunc mocks the AddExtension method. + AddExtensionFunc func(key string, value any) + + // InFunc mocks the In method. + InFunc func() string + + // ItemsFunc mocks the Items method. + ItemsFunc func() ifaces.SwaggerTypable + + // LevelFunc mocks the Level method. + LevelFunc func() int + + // SchemaFunc mocks the Schema method. + SchemaFunc func() *spec.Schema + + // SetRefFunc mocks the SetRef method. + SetRefFunc func(ref spec.Ref) + + // TypedFunc mocks the Typed method. + TypedFunc func(swaggerType string, format string) + + // WithEnumFunc mocks the WithEnum method. + WithEnumFunc func(values ...any) + + // WithEnumDescriptionFunc mocks the WithEnumDescription method. + WithEnumDescriptionFunc func(desc string) + + // calls tracks calls to the methods. + calls struct { + // AddExtension holds details about calls to the AddExtension method. + AddExtension []struct { + // Key is the key argument value. + Key string + // Value is the value argument value. + Value any + } + // In holds details about calls to the In method. + In []struct { + } + // Items holds details about calls to the Items method. + Items []struct { + } + // Level holds details about calls to the Level method. + Level []struct { + } + // Schema holds details about calls to the Schema method. + Schema []struct { + } + // SetRef holds details about calls to the SetRef method. + SetRef []struct { + // Ref is the ref argument value. + Ref spec.Ref + } + // Typed holds details about calls to the Typed method. + Typed []struct { + // SwaggerType is the swaggerType argument value. + SwaggerType string + // Format is the format argument value. + Format string + } + // WithEnum holds details about calls to the WithEnum method. + WithEnum []struct { + // Values is the values argument value. + Values []any + } + // WithEnumDescription holds details about calls to the WithEnumDescription method. + WithEnumDescription []struct { + // Desc is the desc argument value. + Desc string + } + } + lockAddExtension sync.RWMutex + lockIn sync.RWMutex + lockItems sync.RWMutex + lockLevel sync.RWMutex + lockSchema sync.RWMutex + lockSetRef sync.RWMutex + lockTyped sync.RWMutex + lockWithEnum sync.RWMutex + lockWithEnumDescription sync.RWMutex +} + +// AddExtension calls AddExtensionFunc. +func (mock *MockSwaggerTypable) AddExtension(key string, value any) { + if mock.AddExtensionFunc == nil { + panic("MockSwaggerTypable.AddExtensionFunc: method is nil but SwaggerTypable.AddExtension was just called") + } + callInfo := struct { + Key string + Value any + }{ + Key: key, + Value: value, + } + mock.lockAddExtension.Lock() + mock.calls.AddExtension = append(mock.calls.AddExtension, callInfo) + mock.lockAddExtension.Unlock() + mock.AddExtensionFunc(key, value) +} + +// AddExtensionCalls gets all the calls that were made to AddExtension. +// Check the length with: +// +// len(mockedSwaggerTypable.AddExtensionCalls()) +func (mock *MockSwaggerTypable) AddExtensionCalls() []struct { + Key string + Value any +} { + var calls []struct { + Key string + Value any + } + mock.lockAddExtension.RLock() + calls = mock.calls.AddExtension + mock.lockAddExtension.RUnlock() + return calls +} + +// In calls InFunc. +func (mock *MockSwaggerTypable) In() string { + if mock.InFunc == nil { + panic("MockSwaggerTypable.InFunc: method is nil but SwaggerTypable.In was just called") + } + callInfo := struct { + }{} + mock.lockIn.Lock() + mock.calls.In = append(mock.calls.In, callInfo) + mock.lockIn.Unlock() + return mock.InFunc() +} + +// InCalls gets all the calls that were made to In. +// Check the length with: +// +// len(mockedSwaggerTypable.InCalls()) +func (mock *MockSwaggerTypable) InCalls() []struct { +} { + var calls []struct { + } + mock.lockIn.RLock() + calls = mock.calls.In + mock.lockIn.RUnlock() + return calls +} + +// Items calls ItemsFunc. +func (mock *MockSwaggerTypable) Items() ifaces.SwaggerTypable { + if mock.ItemsFunc == nil { + panic("MockSwaggerTypable.ItemsFunc: method is nil but SwaggerTypable.Items was just called") + } + callInfo := struct { + }{} + mock.lockItems.Lock() + mock.calls.Items = append(mock.calls.Items, callInfo) + mock.lockItems.Unlock() + return mock.ItemsFunc() +} + +// ItemsCalls gets all the calls that were made to Items. +// Check the length with: +// +// len(mockedSwaggerTypable.ItemsCalls()) +func (mock *MockSwaggerTypable) ItemsCalls() []struct { +} { + var calls []struct { + } + mock.lockItems.RLock() + calls = mock.calls.Items + mock.lockItems.RUnlock() + return calls +} + +// Level calls LevelFunc. +func (mock *MockSwaggerTypable) Level() int { + if mock.LevelFunc == nil { + panic("MockSwaggerTypable.LevelFunc: method is nil but SwaggerTypable.Level was just called") + } + callInfo := struct { + }{} + mock.lockLevel.Lock() + mock.calls.Level = append(mock.calls.Level, callInfo) + mock.lockLevel.Unlock() + return mock.LevelFunc() +} + +// LevelCalls gets all the calls that were made to Level. +// Check the length with: +// +// len(mockedSwaggerTypable.LevelCalls()) +func (mock *MockSwaggerTypable) LevelCalls() []struct { +} { + var calls []struct { + } + mock.lockLevel.RLock() + calls = mock.calls.Level + mock.lockLevel.RUnlock() + return calls +} + +// Schema calls SchemaFunc. +func (mock *MockSwaggerTypable) Schema() *spec.Schema { + if mock.SchemaFunc == nil { + panic("MockSwaggerTypable.SchemaFunc: method is nil but SwaggerTypable.Schema was just called") + } + callInfo := struct { + }{} + mock.lockSchema.Lock() + mock.calls.Schema = append(mock.calls.Schema, callInfo) + mock.lockSchema.Unlock() + return mock.SchemaFunc() +} + +// SchemaCalls gets all the calls that were made to Schema. +// Check the length with: +// +// len(mockedSwaggerTypable.SchemaCalls()) +func (mock *MockSwaggerTypable) SchemaCalls() []struct { +} { + var calls []struct { + } + mock.lockSchema.RLock() + calls = mock.calls.Schema + mock.lockSchema.RUnlock() + return calls +} + +// SetRef calls SetRefFunc. +func (mock *MockSwaggerTypable) SetRef(ref spec.Ref) { + if mock.SetRefFunc == nil { + panic("MockSwaggerTypable.SetRefFunc: method is nil but SwaggerTypable.SetRef was just called") + } + callInfo := struct { + Ref spec.Ref + }{ + Ref: ref, + } + mock.lockSetRef.Lock() + mock.calls.SetRef = append(mock.calls.SetRef, callInfo) + mock.lockSetRef.Unlock() + mock.SetRefFunc(ref) +} + +// SetRefCalls gets all the calls that were made to SetRef. +// Check the length with: +// +// len(mockedSwaggerTypable.SetRefCalls()) +func (mock *MockSwaggerTypable) SetRefCalls() []struct { + Ref spec.Ref +} { + var calls []struct { + Ref spec.Ref + } + mock.lockSetRef.RLock() + calls = mock.calls.SetRef + mock.lockSetRef.RUnlock() + return calls +} + +// Typed calls TypedFunc. +func (mock *MockSwaggerTypable) Typed(swaggerType string, format string) { + if mock.TypedFunc == nil { + panic("MockSwaggerTypable.TypedFunc: method is nil but SwaggerTypable.Typed was just called") + } + callInfo := struct { + SwaggerType string + Format string + }{ + SwaggerType: swaggerType, + Format: format, + } + mock.lockTyped.Lock() + mock.calls.Typed = append(mock.calls.Typed, callInfo) + mock.lockTyped.Unlock() + mock.TypedFunc(swaggerType, format) +} + +// TypedCalls gets all the calls that were made to Typed. +// Check the length with: +// +// len(mockedSwaggerTypable.TypedCalls()) +func (mock *MockSwaggerTypable) TypedCalls() []struct { + SwaggerType string + Format string +} { + var calls []struct { + SwaggerType string + Format string + } + mock.lockTyped.RLock() + calls = mock.calls.Typed + mock.lockTyped.RUnlock() + return calls +} + +// WithEnum calls WithEnumFunc. +func (mock *MockSwaggerTypable) WithEnum(values ...any) { + if mock.WithEnumFunc == nil { + panic("MockSwaggerTypable.WithEnumFunc: method is nil but SwaggerTypable.WithEnum was just called") + } + callInfo := struct { + Values []any + }{ + Values: values, + } + mock.lockWithEnum.Lock() + mock.calls.WithEnum = append(mock.calls.WithEnum, callInfo) + mock.lockWithEnum.Unlock() + mock.WithEnumFunc(values...) +} + +// WithEnumCalls gets all the calls that were made to WithEnum. +// Check the length with: +// +// len(mockedSwaggerTypable.WithEnumCalls()) +func (mock *MockSwaggerTypable) WithEnumCalls() []struct { + Values []any +} { + var calls []struct { + Values []any + } + mock.lockWithEnum.RLock() + calls = mock.calls.WithEnum + mock.lockWithEnum.RUnlock() + return calls +} + +// WithEnumDescription calls WithEnumDescriptionFunc. +func (mock *MockSwaggerTypable) WithEnumDescription(desc string) { + if mock.WithEnumDescriptionFunc == nil { + panic("MockSwaggerTypable.WithEnumDescriptionFunc: method is nil but SwaggerTypable.WithEnumDescription was just called") + } + callInfo := struct { + Desc string + }{ + Desc: desc, + } + mock.lockWithEnumDescription.Lock() + mock.calls.WithEnumDescription = append(mock.calls.WithEnumDescription, callInfo) + mock.lockWithEnumDescription.Unlock() + mock.WithEnumDescriptionFunc(desc) +} + +// WithEnumDescriptionCalls gets all the calls that were made to WithEnumDescription. +// Check the length with: +// +// len(mockedSwaggerTypable.WithEnumDescriptionCalls()) +func (mock *MockSwaggerTypable) WithEnumDescriptionCalls() []struct { + Desc string +} { + var calls []struct { + Desc string + } + mock.lockWithEnumDescription.RLock() + calls = mock.calls.WithEnumDescription + mock.lockWithEnumDescription.RUnlock() + return calls +} + +// Ensure that MockValidationBuilder does implement ifaces.ValidationBuilder. +// If this is not the case, regenerate this file with mockery. +var _ ifaces.ValidationBuilder = &MockValidationBuilder{} + +// MockValidationBuilder is a mock implementation of ifaces.ValidationBuilder. +// +// func TestSomethingThatUsesValidationBuilder(t *testing.T) { +// +// // make and configure a mocked ifaces.ValidationBuilder +// mockedValidationBuilder := &MockValidationBuilder{ +// SetDefaultFunc: func(defaultValue any) { +// panic("mock out the SetDefault method") +// }, +// SetEnumFunc: func(enumValue string) { +// panic("mock out the SetEnum method") +// }, +// SetExampleFunc: func(example any) { +// panic("mock out the SetExample method") +// }, +// SetMaxItemsFunc: func(maxItems int64) { +// panic("mock out the SetMaxItems method") +// }, +// SetMaxLengthFunc: func(maxLength int64) { +// panic("mock out the SetMaxLength method") +// }, +// SetMaximumFunc: func(maxium float64, isExclusive bool) { +// panic("mock out the SetMaximum method") +// }, +// SetMinItemsFunc: func(minItems int64) { +// panic("mock out the SetMinItems method") +// }, +// SetMinLengthFunc: func(minLength int64) { +// panic("mock out the SetMinLength method") +// }, +// SetMinimumFunc: func(minimum float64, isExclusive bool) { +// panic("mock out the SetMinimum method") +// }, +// SetMultipleOfFunc: func(multiple float64) { +// panic("mock out the SetMultipleOf method") +// }, +// SetPatternFunc: func(pattern string) { +// panic("mock out the SetPattern method") +// }, +// SetUniqueFunc: func(isUniqueItems bool) { +// panic("mock out the SetUnique method") +// }, +// } +// +// // use mockedValidationBuilder in code that requires ifaces.ValidationBuilder +// // and then make assertions. +// +// } +type MockValidationBuilder struct { + // SetDefaultFunc mocks the SetDefault method. + SetDefaultFunc func(defaultValue any) + + // SetEnumFunc mocks the SetEnum method. + SetEnumFunc func(enumValue string) + + // SetExampleFunc mocks the SetExample method. + SetExampleFunc func(example any) + + // SetMaxItemsFunc mocks the SetMaxItems method. + SetMaxItemsFunc func(maxItems int64) + + // SetMaxLengthFunc mocks the SetMaxLength method. + SetMaxLengthFunc func(maxLength int64) + + // SetMaximumFunc mocks the SetMaximum method. + SetMaximumFunc func(maxium float64, isExclusive bool) + + // SetMinItemsFunc mocks the SetMinItems method. + SetMinItemsFunc func(minItems int64) + + // SetMinLengthFunc mocks the SetMinLength method. + SetMinLengthFunc func(minLength int64) + + // SetMinimumFunc mocks the SetMinimum method. + SetMinimumFunc func(minimum float64, isExclusive bool) + + // SetMultipleOfFunc mocks the SetMultipleOf method. + SetMultipleOfFunc func(multiple float64) + + // SetPatternFunc mocks the SetPattern method. + SetPatternFunc func(pattern string) + + // SetUniqueFunc mocks the SetUnique method. + SetUniqueFunc func(isUniqueItems bool) + + // calls tracks calls to the methods. + calls struct { + // SetDefault holds details about calls to the SetDefault method. + SetDefault []struct { + // DefaultValue is the defaultValue argument value. + DefaultValue any + } + // SetEnum holds details about calls to the SetEnum method. + SetEnum []struct { + // EnumValue is the enumValue argument value. + EnumValue string + } + // SetExample holds details about calls to the SetExample method. + SetExample []struct { + // Example is the example argument value. + Example any + } + // SetMaxItems holds details about calls to the SetMaxItems method. + SetMaxItems []struct { + // MaxItems is the maxItems argument value. + MaxItems int64 + } + // SetMaxLength holds details about calls to the SetMaxLength method. + SetMaxLength []struct { + // MaxLength is the maxLength argument value. + MaxLength int64 + } + // SetMaximum holds details about calls to the SetMaximum method. + SetMaximum []struct { + // Maxium is the maxium argument value. + Maxium float64 + // IsExclusive is the isExclusive argument value. + IsExclusive bool + } + // SetMinItems holds details about calls to the SetMinItems method. + SetMinItems []struct { + // MinItems is the minItems argument value. + MinItems int64 + } + // SetMinLength holds details about calls to the SetMinLength method. + SetMinLength []struct { + // MinLength is the minLength argument value. + MinLength int64 + } + // SetMinimum holds details about calls to the SetMinimum method. + SetMinimum []struct { + // Minimum is the minimum argument value. + Minimum float64 + // IsExclusive is the isExclusive argument value. + IsExclusive bool + } + // SetMultipleOf holds details about calls to the SetMultipleOf method. + SetMultipleOf []struct { + // Multiple is the multiple argument value. + Multiple float64 + } + // SetPattern holds details about calls to the SetPattern method. + SetPattern []struct { + // Pattern is the pattern argument value. + Pattern string + } + // SetUnique holds details about calls to the SetUnique method. + SetUnique []struct { + // IsUniqueItems is the isUniqueItems argument value. + IsUniqueItems bool + } + } + lockSetDefault sync.RWMutex + lockSetEnum sync.RWMutex + lockSetExample sync.RWMutex + lockSetMaxItems sync.RWMutex + lockSetMaxLength sync.RWMutex + lockSetMaximum sync.RWMutex + lockSetMinItems sync.RWMutex + lockSetMinLength sync.RWMutex + lockSetMinimum sync.RWMutex + lockSetMultipleOf sync.RWMutex + lockSetPattern sync.RWMutex + lockSetUnique sync.RWMutex +} + +// SetDefault calls SetDefaultFunc. +func (mock *MockValidationBuilder) SetDefault(defaultValue any) { + if mock.SetDefaultFunc == nil { + panic("MockValidationBuilder.SetDefaultFunc: method is nil but ValidationBuilder.SetDefault was just called") + } + callInfo := struct { + DefaultValue any + }{ + DefaultValue: defaultValue, + } + mock.lockSetDefault.Lock() + mock.calls.SetDefault = append(mock.calls.SetDefault, callInfo) + mock.lockSetDefault.Unlock() + mock.SetDefaultFunc(defaultValue) +} + +// SetDefaultCalls gets all the calls that were made to SetDefault. +// Check the length with: +// +// len(mockedValidationBuilder.SetDefaultCalls()) +func (mock *MockValidationBuilder) SetDefaultCalls() []struct { + DefaultValue any +} { + var calls []struct { + DefaultValue any + } + mock.lockSetDefault.RLock() + calls = mock.calls.SetDefault + mock.lockSetDefault.RUnlock() + return calls +} + +// SetEnum calls SetEnumFunc. +func (mock *MockValidationBuilder) SetEnum(enumValue string) { + if mock.SetEnumFunc == nil { + panic("MockValidationBuilder.SetEnumFunc: method is nil but ValidationBuilder.SetEnum was just called") + } + callInfo := struct { + EnumValue string + }{ + EnumValue: enumValue, + } + mock.lockSetEnum.Lock() + mock.calls.SetEnum = append(mock.calls.SetEnum, callInfo) + mock.lockSetEnum.Unlock() + mock.SetEnumFunc(enumValue) +} + +// SetEnumCalls gets all the calls that were made to SetEnum. +// Check the length with: +// +// len(mockedValidationBuilder.SetEnumCalls()) +func (mock *MockValidationBuilder) SetEnumCalls() []struct { + EnumValue string +} { + var calls []struct { + EnumValue string + } + mock.lockSetEnum.RLock() + calls = mock.calls.SetEnum + mock.lockSetEnum.RUnlock() + return calls +} + +// SetExample calls SetExampleFunc. +func (mock *MockValidationBuilder) SetExample(example any) { + if mock.SetExampleFunc == nil { + panic("MockValidationBuilder.SetExampleFunc: method is nil but ValidationBuilder.SetExample was just called") + } + callInfo := struct { + Example any + }{ + Example: example, + } + mock.lockSetExample.Lock() + mock.calls.SetExample = append(mock.calls.SetExample, callInfo) + mock.lockSetExample.Unlock() + mock.SetExampleFunc(example) +} + +// SetExampleCalls gets all the calls that were made to SetExample. +// Check the length with: +// +// len(mockedValidationBuilder.SetExampleCalls()) +func (mock *MockValidationBuilder) SetExampleCalls() []struct { + Example any +} { + var calls []struct { + Example any + } + mock.lockSetExample.RLock() + calls = mock.calls.SetExample + mock.lockSetExample.RUnlock() + return calls +} + +// SetMaxItems calls SetMaxItemsFunc. +func (mock *MockValidationBuilder) SetMaxItems(maxItems int64) { + if mock.SetMaxItemsFunc == nil { + panic("MockValidationBuilder.SetMaxItemsFunc: method is nil but ValidationBuilder.SetMaxItems was just called") + } + callInfo := struct { + MaxItems int64 + }{ + MaxItems: maxItems, + } + mock.lockSetMaxItems.Lock() + mock.calls.SetMaxItems = append(mock.calls.SetMaxItems, callInfo) + mock.lockSetMaxItems.Unlock() + mock.SetMaxItemsFunc(maxItems) +} + +// SetMaxItemsCalls gets all the calls that were made to SetMaxItems. +// Check the length with: +// +// len(mockedValidationBuilder.SetMaxItemsCalls()) +func (mock *MockValidationBuilder) SetMaxItemsCalls() []struct { + MaxItems int64 +} { + var calls []struct { + MaxItems int64 + } + mock.lockSetMaxItems.RLock() + calls = mock.calls.SetMaxItems + mock.lockSetMaxItems.RUnlock() + return calls +} + +// SetMaxLength calls SetMaxLengthFunc. +func (mock *MockValidationBuilder) SetMaxLength(maxLength int64) { + if mock.SetMaxLengthFunc == nil { + panic("MockValidationBuilder.SetMaxLengthFunc: method is nil but ValidationBuilder.SetMaxLength was just called") + } + callInfo := struct { + MaxLength int64 + }{ + MaxLength: maxLength, + } + mock.lockSetMaxLength.Lock() + mock.calls.SetMaxLength = append(mock.calls.SetMaxLength, callInfo) + mock.lockSetMaxLength.Unlock() + mock.SetMaxLengthFunc(maxLength) +} + +// SetMaxLengthCalls gets all the calls that were made to SetMaxLength. +// Check the length with: +// +// len(mockedValidationBuilder.SetMaxLengthCalls()) +func (mock *MockValidationBuilder) SetMaxLengthCalls() []struct { + MaxLength int64 +} { + var calls []struct { + MaxLength int64 + } + mock.lockSetMaxLength.RLock() + calls = mock.calls.SetMaxLength + mock.lockSetMaxLength.RUnlock() + return calls +} + +// SetMaximum calls SetMaximumFunc. +func (mock *MockValidationBuilder) SetMaximum(maxium float64, isExclusive bool) { + if mock.SetMaximumFunc == nil { + panic("MockValidationBuilder.SetMaximumFunc: method is nil but ValidationBuilder.SetMaximum was just called") + } + callInfo := struct { + Maxium float64 + IsExclusive bool + }{ + Maxium: maxium, + IsExclusive: isExclusive, + } + mock.lockSetMaximum.Lock() + mock.calls.SetMaximum = append(mock.calls.SetMaximum, callInfo) + mock.lockSetMaximum.Unlock() + mock.SetMaximumFunc(maxium, isExclusive) +} + +// SetMaximumCalls gets all the calls that were made to SetMaximum. +// Check the length with: +// +// len(mockedValidationBuilder.SetMaximumCalls()) +func (mock *MockValidationBuilder) SetMaximumCalls() []struct { + Maxium float64 + IsExclusive bool +} { + var calls []struct { + Maxium float64 + IsExclusive bool + } + mock.lockSetMaximum.RLock() + calls = mock.calls.SetMaximum + mock.lockSetMaximum.RUnlock() + return calls +} + +// SetMinItems calls SetMinItemsFunc. +func (mock *MockValidationBuilder) SetMinItems(minItems int64) { + if mock.SetMinItemsFunc == nil { + panic("MockValidationBuilder.SetMinItemsFunc: method is nil but ValidationBuilder.SetMinItems was just called") + } + callInfo := struct { + MinItems int64 + }{ + MinItems: minItems, + } + mock.lockSetMinItems.Lock() + mock.calls.SetMinItems = append(mock.calls.SetMinItems, callInfo) + mock.lockSetMinItems.Unlock() + mock.SetMinItemsFunc(minItems) +} + +// SetMinItemsCalls gets all the calls that were made to SetMinItems. +// Check the length with: +// +// len(mockedValidationBuilder.SetMinItemsCalls()) +func (mock *MockValidationBuilder) SetMinItemsCalls() []struct { + MinItems int64 +} { + var calls []struct { + MinItems int64 + } + mock.lockSetMinItems.RLock() + calls = mock.calls.SetMinItems + mock.lockSetMinItems.RUnlock() + return calls +} + +// SetMinLength calls SetMinLengthFunc. +func (mock *MockValidationBuilder) SetMinLength(minLength int64) { + if mock.SetMinLengthFunc == nil { + panic("MockValidationBuilder.SetMinLengthFunc: method is nil but ValidationBuilder.SetMinLength was just called") + } + callInfo := struct { + MinLength int64 + }{ + MinLength: minLength, + } + mock.lockSetMinLength.Lock() + mock.calls.SetMinLength = append(mock.calls.SetMinLength, callInfo) + mock.lockSetMinLength.Unlock() + mock.SetMinLengthFunc(minLength) +} + +// SetMinLengthCalls gets all the calls that were made to SetMinLength. +// Check the length with: +// +// len(mockedValidationBuilder.SetMinLengthCalls()) +func (mock *MockValidationBuilder) SetMinLengthCalls() []struct { + MinLength int64 +} { + var calls []struct { + MinLength int64 + } + mock.lockSetMinLength.RLock() + calls = mock.calls.SetMinLength + mock.lockSetMinLength.RUnlock() + return calls +} + +// SetMinimum calls SetMinimumFunc. +func (mock *MockValidationBuilder) SetMinimum(minimum float64, isExclusive bool) { + if mock.SetMinimumFunc == nil { + panic("MockValidationBuilder.SetMinimumFunc: method is nil but ValidationBuilder.SetMinimum was just called") + } + callInfo := struct { + Minimum float64 + IsExclusive bool + }{ + Minimum: minimum, + IsExclusive: isExclusive, + } + mock.lockSetMinimum.Lock() + mock.calls.SetMinimum = append(mock.calls.SetMinimum, callInfo) + mock.lockSetMinimum.Unlock() + mock.SetMinimumFunc(minimum, isExclusive) +} + +// SetMinimumCalls gets all the calls that were made to SetMinimum. +// Check the length with: +// +// len(mockedValidationBuilder.SetMinimumCalls()) +func (mock *MockValidationBuilder) SetMinimumCalls() []struct { + Minimum float64 + IsExclusive bool +} { + var calls []struct { + Minimum float64 + IsExclusive bool + } + mock.lockSetMinimum.RLock() + calls = mock.calls.SetMinimum + mock.lockSetMinimum.RUnlock() + return calls +} + +// SetMultipleOf calls SetMultipleOfFunc. +func (mock *MockValidationBuilder) SetMultipleOf(multiple float64) { + if mock.SetMultipleOfFunc == nil { + panic("MockValidationBuilder.SetMultipleOfFunc: method is nil but ValidationBuilder.SetMultipleOf was just called") + } + callInfo := struct { + Multiple float64 + }{ + Multiple: multiple, + } + mock.lockSetMultipleOf.Lock() + mock.calls.SetMultipleOf = append(mock.calls.SetMultipleOf, callInfo) + mock.lockSetMultipleOf.Unlock() + mock.SetMultipleOfFunc(multiple) +} + +// SetMultipleOfCalls gets all the calls that were made to SetMultipleOf. +// Check the length with: +// +// len(mockedValidationBuilder.SetMultipleOfCalls()) +func (mock *MockValidationBuilder) SetMultipleOfCalls() []struct { + Multiple float64 +} { + var calls []struct { + Multiple float64 + } + mock.lockSetMultipleOf.RLock() + calls = mock.calls.SetMultipleOf + mock.lockSetMultipleOf.RUnlock() + return calls +} + +// SetPattern calls SetPatternFunc. +func (mock *MockValidationBuilder) SetPattern(pattern string) { + if mock.SetPatternFunc == nil { + panic("MockValidationBuilder.SetPatternFunc: method is nil but ValidationBuilder.SetPattern was just called") + } + callInfo := struct { + Pattern string + }{ + Pattern: pattern, + } + mock.lockSetPattern.Lock() + mock.calls.SetPattern = append(mock.calls.SetPattern, callInfo) + mock.lockSetPattern.Unlock() + mock.SetPatternFunc(pattern) +} + +// SetPatternCalls gets all the calls that were made to SetPattern. +// Check the length with: +// +// len(mockedValidationBuilder.SetPatternCalls()) +func (mock *MockValidationBuilder) SetPatternCalls() []struct { + Pattern string +} { + var calls []struct { + Pattern string + } + mock.lockSetPattern.RLock() + calls = mock.calls.SetPattern + mock.lockSetPattern.RUnlock() + return calls +} + +// SetUnique calls SetUniqueFunc. +func (mock *MockValidationBuilder) SetUnique(isUniqueItems bool) { + if mock.SetUniqueFunc == nil { + panic("MockValidationBuilder.SetUniqueFunc: method is nil but ValidationBuilder.SetUnique was just called") + } + callInfo := struct { + IsUniqueItems bool + }{ + IsUniqueItems: isUniqueItems, + } + mock.lockSetUnique.Lock() + mock.calls.SetUnique = append(mock.calls.SetUnique, callInfo) + mock.lockSetUnique.Unlock() + mock.SetUniqueFunc(isUniqueItems) +} + +// SetUniqueCalls gets all the calls that were made to SetUnique. +// Check the length with: +// +// len(mockedValidationBuilder.SetUniqueCalls()) +func (mock *MockValidationBuilder) SetUniqueCalls() []struct { + IsUniqueItems bool +} { + var calls []struct { + IsUniqueItems bool + } + mock.lockSetUnique.RLock() + calls = mock.calls.SetUnique + mock.lockSetUnique.RUnlock() + return calls +} + +// Ensure that MockValueParser does implement ifaces.ValueParser. +// If this is not the case, regenerate this file with mockery. +var _ ifaces.ValueParser = &MockValueParser{} + +// MockValueParser is a mock implementation of ifaces.ValueParser. +// +// func TestSomethingThatUsesValueParser(t *testing.T) { +// +// // make and configure a mocked ifaces.ValueParser +// mockedValueParser := &MockValueParser{ +// MatchesFunc: func(commentLine string) bool { +// panic("mock out the Matches method") +// }, +// ParseFunc: func(commentlines []string) error { +// panic("mock out the Parse method") +// }, +// } +// +// // use mockedValueParser in code that requires ifaces.ValueParser +// // and then make assertions. +// +// } +type MockValueParser struct { + // MatchesFunc mocks the Matches method. + MatchesFunc func(commentLine string) bool + + // ParseFunc mocks the Parse method. + ParseFunc func(commentlines []string) error + + // calls tracks calls to the methods. + calls struct { + // Matches holds details about calls to the Matches method. + Matches []struct { + // CommentLine is the commentLine argument value. + CommentLine string + } + // Parse holds details about calls to the Parse method. + Parse []struct { + // Commentlines is the commentlines argument value. + Commentlines []string + } + } + lockMatches sync.RWMutex + lockParse sync.RWMutex +} + +// Matches calls MatchesFunc. +func (mock *MockValueParser) Matches(commentLine string) bool { + if mock.MatchesFunc == nil { + panic("MockValueParser.MatchesFunc: method is nil but ValueParser.Matches was just called") + } + callInfo := struct { + CommentLine string + }{ + CommentLine: commentLine, + } + mock.lockMatches.Lock() + mock.calls.Matches = append(mock.calls.Matches, callInfo) + mock.lockMatches.Unlock() + return mock.MatchesFunc(commentLine) +} + +// MatchesCalls gets all the calls that were made to Matches. +// Check the length with: +// +// len(mockedValueParser.MatchesCalls()) +func (mock *MockValueParser) MatchesCalls() []struct { + CommentLine string +} { + var calls []struct { + CommentLine string + } + mock.lockMatches.RLock() + calls = mock.calls.Matches + mock.lockMatches.RUnlock() + return calls +} + +// Parse calls ParseFunc. +func (mock *MockValueParser) Parse(commentlines []string) error { + if mock.ParseFunc == nil { + panic("MockValueParser.ParseFunc: method is nil but ValueParser.Parse was just called") + } + callInfo := struct { + Commentlines []string + }{ + Commentlines: commentlines, + } + mock.lockParse.Lock() + mock.calls.Parse = append(mock.calls.Parse, callInfo) + mock.lockParse.Unlock() + return mock.ParseFunc(commentlines) +} + +// ParseCalls gets all the calls that were made to Parse. +// Check the length with: +// +// len(mockedValueParser.ParseCalls()) +func (mock *MockValueParser) ParseCalls() []struct { + Commentlines []string +} { + var calls []struct { + Commentlines []string + } + mock.lockParse.RLock() + calls = mock.calls.Parse + mock.lockParse.RUnlock() + return calls +} + +// Ensure that MockOperationValidationBuilder does implement ifaces.OperationValidationBuilder. +// If this is not the case, regenerate this file with mockery. +var _ ifaces.OperationValidationBuilder = &MockOperationValidationBuilder{} + +// MockOperationValidationBuilder is a mock implementation of ifaces.OperationValidationBuilder. +// +// func TestSomethingThatUsesOperationValidationBuilder(t *testing.T) { +// +// // make and configure a mocked ifaces.OperationValidationBuilder +// mockedOperationValidationBuilder := &MockOperationValidationBuilder{ +// SetCollectionFormatFunc: func(collectionFormat string) { +// panic("mock out the SetCollectionFormat method") +// }, +// SetDefaultFunc: func(defaultValue any) { +// panic("mock out the SetDefault method") +// }, +// SetEnumFunc: func(enumValue string) { +// panic("mock out the SetEnum method") +// }, +// SetExampleFunc: func(example any) { +// panic("mock out the SetExample method") +// }, +// SetMaxItemsFunc: func(maxItems int64) { +// panic("mock out the SetMaxItems method") +// }, +// SetMaxLengthFunc: func(maxLength int64) { +// panic("mock out the SetMaxLength method") +// }, +// SetMaximumFunc: func(maxium float64, isExclusive bool) { +// panic("mock out the SetMaximum method") +// }, +// SetMinItemsFunc: func(minItems int64) { +// panic("mock out the SetMinItems method") +// }, +// SetMinLengthFunc: func(minLength int64) { +// panic("mock out the SetMinLength method") +// }, +// SetMinimumFunc: func(minimum float64, isExclusive bool) { +// panic("mock out the SetMinimum method") +// }, +// SetMultipleOfFunc: func(multiple float64) { +// panic("mock out the SetMultipleOf method") +// }, +// SetPatternFunc: func(pattern string) { +// panic("mock out the SetPattern method") +// }, +// SetUniqueFunc: func(isUniqueItems bool) { +// panic("mock out the SetUnique method") +// }, +// } +// +// // use mockedOperationValidationBuilder in code that requires ifaces.OperationValidationBuilder +// // and then make assertions. +// +// } +type MockOperationValidationBuilder struct { + // SetCollectionFormatFunc mocks the SetCollectionFormat method. + SetCollectionFormatFunc func(collectionFormat string) + + // SetDefaultFunc mocks the SetDefault method. + SetDefaultFunc func(defaultValue any) + + // SetEnumFunc mocks the SetEnum method. + SetEnumFunc func(enumValue string) + + // SetExampleFunc mocks the SetExample method. + SetExampleFunc func(example any) + + // SetMaxItemsFunc mocks the SetMaxItems method. + SetMaxItemsFunc func(maxItems int64) + + // SetMaxLengthFunc mocks the SetMaxLength method. + SetMaxLengthFunc func(maxLength int64) + + // SetMaximumFunc mocks the SetMaximum method. + SetMaximumFunc func(maxium float64, isExclusive bool) + + // SetMinItemsFunc mocks the SetMinItems method. + SetMinItemsFunc func(minItems int64) + + // SetMinLengthFunc mocks the SetMinLength method. + SetMinLengthFunc func(minLength int64) + + // SetMinimumFunc mocks the SetMinimum method. + SetMinimumFunc func(minimum float64, isExclusive bool) + + // SetMultipleOfFunc mocks the SetMultipleOf method. + SetMultipleOfFunc func(multiple float64) + + // SetPatternFunc mocks the SetPattern method. + SetPatternFunc func(pattern string) + + // SetUniqueFunc mocks the SetUnique method. + SetUniqueFunc func(isUniqueItems bool) + + // calls tracks calls to the methods. + calls struct { + // SetCollectionFormat holds details about calls to the SetCollectionFormat method. + SetCollectionFormat []struct { + // CollectionFormat is the collectionFormat argument value. + CollectionFormat string + } + // SetDefault holds details about calls to the SetDefault method. + SetDefault []struct { + // DefaultValue is the defaultValue argument value. + DefaultValue any + } + // SetEnum holds details about calls to the SetEnum method. + SetEnum []struct { + // EnumValue is the enumValue argument value. + EnumValue string + } + // SetExample holds details about calls to the SetExample method. + SetExample []struct { + // Example is the example argument value. + Example any + } + // SetMaxItems holds details about calls to the SetMaxItems method. + SetMaxItems []struct { + // MaxItems is the maxItems argument value. + MaxItems int64 + } + // SetMaxLength holds details about calls to the SetMaxLength method. + SetMaxLength []struct { + // MaxLength is the maxLength argument value. + MaxLength int64 + } + // SetMaximum holds details about calls to the SetMaximum method. + SetMaximum []struct { + // Maxium is the maxium argument value. + Maxium float64 + // IsExclusive is the isExclusive argument value. + IsExclusive bool + } + // SetMinItems holds details about calls to the SetMinItems method. + SetMinItems []struct { + // MinItems is the minItems argument value. + MinItems int64 + } + // SetMinLength holds details about calls to the SetMinLength method. + SetMinLength []struct { + // MinLength is the minLength argument value. + MinLength int64 + } + // SetMinimum holds details about calls to the SetMinimum method. + SetMinimum []struct { + // Minimum is the minimum argument value. + Minimum float64 + // IsExclusive is the isExclusive argument value. + IsExclusive bool + } + // SetMultipleOf holds details about calls to the SetMultipleOf method. + SetMultipleOf []struct { + // Multiple is the multiple argument value. + Multiple float64 + } + // SetPattern holds details about calls to the SetPattern method. + SetPattern []struct { + // Pattern is the pattern argument value. + Pattern string + } + // SetUnique holds details about calls to the SetUnique method. + SetUnique []struct { + // IsUniqueItems is the isUniqueItems argument value. + IsUniqueItems bool + } + } + lockSetCollectionFormat sync.RWMutex + lockSetDefault sync.RWMutex + lockSetEnum sync.RWMutex + lockSetExample sync.RWMutex + lockSetMaxItems sync.RWMutex + lockSetMaxLength sync.RWMutex + lockSetMaximum sync.RWMutex + lockSetMinItems sync.RWMutex + lockSetMinLength sync.RWMutex + lockSetMinimum sync.RWMutex + lockSetMultipleOf sync.RWMutex + lockSetPattern sync.RWMutex + lockSetUnique sync.RWMutex +} + +// SetCollectionFormat calls SetCollectionFormatFunc. +func (mock *MockOperationValidationBuilder) SetCollectionFormat(collectionFormat string) { + if mock.SetCollectionFormatFunc == nil { + panic("MockOperationValidationBuilder.SetCollectionFormatFunc: method is nil but OperationValidationBuilder.SetCollectionFormat was just called") + } + callInfo := struct { + CollectionFormat string + }{ + CollectionFormat: collectionFormat, + } + mock.lockSetCollectionFormat.Lock() + mock.calls.SetCollectionFormat = append(mock.calls.SetCollectionFormat, callInfo) + mock.lockSetCollectionFormat.Unlock() + mock.SetCollectionFormatFunc(collectionFormat) +} + +// SetCollectionFormatCalls gets all the calls that were made to SetCollectionFormat. +// Check the length with: +// +// len(mockedOperationValidationBuilder.SetCollectionFormatCalls()) +func (mock *MockOperationValidationBuilder) SetCollectionFormatCalls() []struct { + CollectionFormat string +} { + var calls []struct { + CollectionFormat string + } + mock.lockSetCollectionFormat.RLock() + calls = mock.calls.SetCollectionFormat + mock.lockSetCollectionFormat.RUnlock() + return calls +} + +// SetDefault calls SetDefaultFunc. +func (mock *MockOperationValidationBuilder) SetDefault(defaultValue any) { + if mock.SetDefaultFunc == nil { + panic("MockOperationValidationBuilder.SetDefaultFunc: method is nil but OperationValidationBuilder.SetDefault was just called") + } + callInfo := struct { + DefaultValue any + }{ + DefaultValue: defaultValue, + } + mock.lockSetDefault.Lock() + mock.calls.SetDefault = append(mock.calls.SetDefault, callInfo) + mock.lockSetDefault.Unlock() + mock.SetDefaultFunc(defaultValue) +} + +// SetDefaultCalls gets all the calls that were made to SetDefault. +// Check the length with: +// +// len(mockedOperationValidationBuilder.SetDefaultCalls()) +func (mock *MockOperationValidationBuilder) SetDefaultCalls() []struct { + DefaultValue any +} { + var calls []struct { + DefaultValue any + } + mock.lockSetDefault.RLock() + calls = mock.calls.SetDefault + mock.lockSetDefault.RUnlock() + return calls +} + +// SetEnum calls SetEnumFunc. +func (mock *MockOperationValidationBuilder) SetEnum(enumValue string) { + if mock.SetEnumFunc == nil { + panic("MockOperationValidationBuilder.SetEnumFunc: method is nil but OperationValidationBuilder.SetEnum was just called") + } + callInfo := struct { + EnumValue string + }{ + EnumValue: enumValue, + } + mock.lockSetEnum.Lock() + mock.calls.SetEnum = append(mock.calls.SetEnum, callInfo) + mock.lockSetEnum.Unlock() + mock.SetEnumFunc(enumValue) +} + +// SetEnumCalls gets all the calls that were made to SetEnum. +// Check the length with: +// +// len(mockedOperationValidationBuilder.SetEnumCalls()) +func (mock *MockOperationValidationBuilder) SetEnumCalls() []struct { + EnumValue string +} { + var calls []struct { + EnumValue string + } + mock.lockSetEnum.RLock() + calls = mock.calls.SetEnum + mock.lockSetEnum.RUnlock() + return calls +} + +// SetExample calls SetExampleFunc. +func (mock *MockOperationValidationBuilder) SetExample(example any) { + if mock.SetExampleFunc == nil { + panic("MockOperationValidationBuilder.SetExampleFunc: method is nil but OperationValidationBuilder.SetExample was just called") + } + callInfo := struct { + Example any + }{ + Example: example, + } + mock.lockSetExample.Lock() + mock.calls.SetExample = append(mock.calls.SetExample, callInfo) + mock.lockSetExample.Unlock() + mock.SetExampleFunc(example) +} + +// SetExampleCalls gets all the calls that were made to SetExample. +// Check the length with: +// +// len(mockedOperationValidationBuilder.SetExampleCalls()) +func (mock *MockOperationValidationBuilder) SetExampleCalls() []struct { + Example any +} { + var calls []struct { + Example any + } + mock.lockSetExample.RLock() + calls = mock.calls.SetExample + mock.lockSetExample.RUnlock() + return calls +} + +// SetMaxItems calls SetMaxItemsFunc. +func (mock *MockOperationValidationBuilder) SetMaxItems(maxItems int64) { + if mock.SetMaxItemsFunc == nil { + panic("MockOperationValidationBuilder.SetMaxItemsFunc: method is nil but OperationValidationBuilder.SetMaxItems was just called") + } + callInfo := struct { + MaxItems int64 + }{ + MaxItems: maxItems, + } + mock.lockSetMaxItems.Lock() + mock.calls.SetMaxItems = append(mock.calls.SetMaxItems, callInfo) + mock.lockSetMaxItems.Unlock() + mock.SetMaxItemsFunc(maxItems) +} + +// SetMaxItemsCalls gets all the calls that were made to SetMaxItems. +// Check the length with: +// +// len(mockedOperationValidationBuilder.SetMaxItemsCalls()) +func (mock *MockOperationValidationBuilder) SetMaxItemsCalls() []struct { + MaxItems int64 +} { + var calls []struct { + MaxItems int64 + } + mock.lockSetMaxItems.RLock() + calls = mock.calls.SetMaxItems + mock.lockSetMaxItems.RUnlock() + return calls +} + +// SetMaxLength calls SetMaxLengthFunc. +func (mock *MockOperationValidationBuilder) SetMaxLength(maxLength int64) { + if mock.SetMaxLengthFunc == nil { + panic("MockOperationValidationBuilder.SetMaxLengthFunc: method is nil but OperationValidationBuilder.SetMaxLength was just called") + } + callInfo := struct { + MaxLength int64 + }{ + MaxLength: maxLength, + } + mock.lockSetMaxLength.Lock() + mock.calls.SetMaxLength = append(mock.calls.SetMaxLength, callInfo) + mock.lockSetMaxLength.Unlock() + mock.SetMaxLengthFunc(maxLength) +} + +// SetMaxLengthCalls gets all the calls that were made to SetMaxLength. +// Check the length with: +// +// len(mockedOperationValidationBuilder.SetMaxLengthCalls()) +func (mock *MockOperationValidationBuilder) SetMaxLengthCalls() []struct { + MaxLength int64 +} { + var calls []struct { + MaxLength int64 + } + mock.lockSetMaxLength.RLock() + calls = mock.calls.SetMaxLength + mock.lockSetMaxLength.RUnlock() + return calls +} + +// SetMaximum calls SetMaximumFunc. +func (mock *MockOperationValidationBuilder) SetMaximum(maxium float64, isExclusive bool) { + if mock.SetMaximumFunc == nil { + panic("MockOperationValidationBuilder.SetMaximumFunc: method is nil but OperationValidationBuilder.SetMaximum was just called") + } + callInfo := struct { + Maxium float64 + IsExclusive bool + }{ + Maxium: maxium, + IsExclusive: isExclusive, + } + mock.lockSetMaximum.Lock() + mock.calls.SetMaximum = append(mock.calls.SetMaximum, callInfo) + mock.lockSetMaximum.Unlock() + mock.SetMaximumFunc(maxium, isExclusive) +} + +// SetMaximumCalls gets all the calls that were made to SetMaximum. +// Check the length with: +// +// len(mockedOperationValidationBuilder.SetMaximumCalls()) +func (mock *MockOperationValidationBuilder) SetMaximumCalls() []struct { + Maxium float64 + IsExclusive bool +} { + var calls []struct { + Maxium float64 + IsExclusive bool + } + mock.lockSetMaximum.RLock() + calls = mock.calls.SetMaximum + mock.lockSetMaximum.RUnlock() + return calls +} + +// SetMinItems calls SetMinItemsFunc. +func (mock *MockOperationValidationBuilder) SetMinItems(minItems int64) { + if mock.SetMinItemsFunc == nil { + panic("MockOperationValidationBuilder.SetMinItemsFunc: method is nil but OperationValidationBuilder.SetMinItems was just called") + } + callInfo := struct { + MinItems int64 + }{ + MinItems: minItems, + } + mock.lockSetMinItems.Lock() + mock.calls.SetMinItems = append(mock.calls.SetMinItems, callInfo) + mock.lockSetMinItems.Unlock() + mock.SetMinItemsFunc(minItems) +} + +// SetMinItemsCalls gets all the calls that were made to SetMinItems. +// Check the length with: +// +// len(mockedOperationValidationBuilder.SetMinItemsCalls()) +func (mock *MockOperationValidationBuilder) SetMinItemsCalls() []struct { + MinItems int64 +} { + var calls []struct { + MinItems int64 + } + mock.lockSetMinItems.RLock() + calls = mock.calls.SetMinItems + mock.lockSetMinItems.RUnlock() + return calls +} + +// SetMinLength calls SetMinLengthFunc. +func (mock *MockOperationValidationBuilder) SetMinLength(minLength int64) { + if mock.SetMinLengthFunc == nil { + panic("MockOperationValidationBuilder.SetMinLengthFunc: method is nil but OperationValidationBuilder.SetMinLength was just called") + } + callInfo := struct { + MinLength int64 + }{ + MinLength: minLength, + } + mock.lockSetMinLength.Lock() + mock.calls.SetMinLength = append(mock.calls.SetMinLength, callInfo) + mock.lockSetMinLength.Unlock() + mock.SetMinLengthFunc(minLength) +} + +// SetMinLengthCalls gets all the calls that were made to SetMinLength. +// Check the length with: +// +// len(mockedOperationValidationBuilder.SetMinLengthCalls()) +func (mock *MockOperationValidationBuilder) SetMinLengthCalls() []struct { + MinLength int64 +} { + var calls []struct { + MinLength int64 + } + mock.lockSetMinLength.RLock() + calls = mock.calls.SetMinLength + mock.lockSetMinLength.RUnlock() + return calls +} + +// SetMinimum calls SetMinimumFunc. +func (mock *MockOperationValidationBuilder) SetMinimum(minimum float64, isExclusive bool) { + if mock.SetMinimumFunc == nil { + panic("MockOperationValidationBuilder.SetMinimumFunc: method is nil but OperationValidationBuilder.SetMinimum was just called") + } + callInfo := struct { + Minimum float64 + IsExclusive bool + }{ + Minimum: minimum, + IsExclusive: isExclusive, + } + mock.lockSetMinimum.Lock() + mock.calls.SetMinimum = append(mock.calls.SetMinimum, callInfo) + mock.lockSetMinimum.Unlock() + mock.SetMinimumFunc(minimum, isExclusive) +} + +// SetMinimumCalls gets all the calls that were made to SetMinimum. +// Check the length with: +// +// len(mockedOperationValidationBuilder.SetMinimumCalls()) +func (mock *MockOperationValidationBuilder) SetMinimumCalls() []struct { + Minimum float64 + IsExclusive bool +} { + var calls []struct { + Minimum float64 + IsExclusive bool + } + mock.lockSetMinimum.RLock() + calls = mock.calls.SetMinimum + mock.lockSetMinimum.RUnlock() + return calls +} + +// SetMultipleOf calls SetMultipleOfFunc. +func (mock *MockOperationValidationBuilder) SetMultipleOf(multiple float64) { + if mock.SetMultipleOfFunc == nil { + panic("MockOperationValidationBuilder.SetMultipleOfFunc: method is nil but OperationValidationBuilder.SetMultipleOf was just called") + } + callInfo := struct { + Multiple float64 + }{ + Multiple: multiple, + } + mock.lockSetMultipleOf.Lock() + mock.calls.SetMultipleOf = append(mock.calls.SetMultipleOf, callInfo) + mock.lockSetMultipleOf.Unlock() + mock.SetMultipleOfFunc(multiple) +} + +// SetMultipleOfCalls gets all the calls that were made to SetMultipleOf. +// Check the length with: +// +// len(mockedOperationValidationBuilder.SetMultipleOfCalls()) +func (mock *MockOperationValidationBuilder) SetMultipleOfCalls() []struct { + Multiple float64 +} { + var calls []struct { + Multiple float64 + } + mock.lockSetMultipleOf.RLock() + calls = mock.calls.SetMultipleOf + mock.lockSetMultipleOf.RUnlock() + return calls +} + +// SetPattern calls SetPatternFunc. +func (mock *MockOperationValidationBuilder) SetPattern(pattern string) { + if mock.SetPatternFunc == nil { + panic("MockOperationValidationBuilder.SetPatternFunc: method is nil but OperationValidationBuilder.SetPattern was just called") + } + callInfo := struct { + Pattern string + }{ + Pattern: pattern, + } + mock.lockSetPattern.Lock() + mock.calls.SetPattern = append(mock.calls.SetPattern, callInfo) + mock.lockSetPattern.Unlock() + mock.SetPatternFunc(pattern) +} + +// SetPatternCalls gets all the calls that were made to SetPattern. +// Check the length with: +// +// len(mockedOperationValidationBuilder.SetPatternCalls()) +func (mock *MockOperationValidationBuilder) SetPatternCalls() []struct { + Pattern string +} { + var calls []struct { + Pattern string + } + mock.lockSetPattern.RLock() + calls = mock.calls.SetPattern + mock.lockSetPattern.RUnlock() + return calls +} + +// SetUnique calls SetUniqueFunc. +func (mock *MockOperationValidationBuilder) SetUnique(isUniqueItems bool) { + if mock.SetUniqueFunc == nil { + panic("MockOperationValidationBuilder.SetUniqueFunc: method is nil but OperationValidationBuilder.SetUnique was just called") + } + callInfo := struct { + IsUniqueItems bool + }{ + IsUniqueItems: isUniqueItems, + } + mock.lockSetUnique.Lock() + mock.calls.SetUnique = append(mock.calls.SetUnique, callInfo) + mock.lockSetUnique.Unlock() + mock.SetUniqueFunc(isUniqueItems) +} + +// SetUniqueCalls gets all the calls that were made to SetUnique. +// Check the length with: +// +// len(mockedOperationValidationBuilder.SetUniqueCalls()) +func (mock *MockOperationValidationBuilder) SetUniqueCalls() []struct { + IsUniqueItems bool +} { + var calls []struct { + IsUniqueItems bool + } + mock.lockSetUnique.RLock() + calls = mock.calls.SetUnique + mock.lockSetUnique.RUnlock() + return calls +} + +// Ensure that MockObjecter does implement ifaces.Objecter. +// If this is not the case, regenerate this file with mockery. +var _ ifaces.Objecter = &MockObjecter{} + +// MockObjecter is a mock implementation of ifaces.Objecter. +// +// func TestSomethingThatUsesObjecter(t *testing.T) { +// +// // make and configure a mocked ifaces.Objecter +// mockedObjecter := &MockObjecter{ +// ObjFunc: func() *types.TypeName { +// panic("mock out the Obj method") +// }, +// } +// +// // use mockedObjecter in code that requires ifaces.Objecter +// // and then make assertions. +// +// } +type MockObjecter struct { + // ObjFunc mocks the Obj method. + ObjFunc func() *types.TypeName + + // calls tracks calls to the methods. + calls struct { + // Obj holds details about calls to the Obj method. + Obj []struct { + } + } + lockObj sync.RWMutex +} + +// Obj calls ObjFunc. +func (mock *MockObjecter) Obj() *types.TypeName { + if mock.ObjFunc == nil { + panic("MockObjecter.ObjFunc: method is nil but Objecter.Obj was just called") + } + callInfo := struct { + }{} + mock.lockObj.Lock() + mock.calls.Obj = append(mock.calls.Obj, callInfo) + mock.lockObj.Unlock() + return mock.ObjFunc() +} + +// ObjCalls gets all the calls that were made to Obj. +// Check the length with: +// +// len(mockedObjecter.ObjCalls()) +func (mock *MockObjecter) ObjCalls() []struct { +} { + var calls []struct { + } + mock.lockObj.RLock() + calls = mock.calls.Obj + mock.lockObj.RUnlock() + return calls +} diff --git a/internal/scantest/property.go b/internal/scantest/property.go new file mode 100644 index 0000000..12ea759 --- /dev/null +++ b/internal/scantest/property.go @@ -0,0 +1,65 @@ +package scantest + +import ( + "testing" + + oaispec "github.com/go-openapi/spec" + "github.com/go-openapi/testify/v2/assert" + "github.com/go-openapi/testify/v2/require" +) + +func AssertProperty(t *testing.T, schema *oaispec.Schema, typeName, jsonName, format, goName string) { + t.Helper() + + if typeName == "" { + assert.Empty(t, schema.Properties[jsonName].Type) + } else if assert.NotEmpty(t, schema.Properties[jsonName].Type) { + assert.EqualT(t, typeName, schema.Properties[jsonName].Type[0]) + } + + if goName == "" { + assert.Nil(t, schema.Properties[jsonName].Extensions["x-go-name"]) + } else { + assert.Equal(t, goName, schema.Properties[jsonName].Extensions["x-go-name"]) + } + + assert.EqualT(t, format, schema.Properties[jsonName].Format) +} + +func AssertArrayProperty(t *testing.T, schema *oaispec.Schema, typeName, jsonName, format, goName string) { + t.Helper() + + prop := schema.Properties[jsonName] + assert.NotEmpty(t, prop.Type) + assert.TrueT(t, prop.Type.Contains("array")) + require.NotNil(t, prop.Items) + + if typeName != "" { + require.NotNil(t, prop.Items.Schema) + require.NotEmpty(t, prop.Items.Schema.Type) + assert.EqualT(t, typeName, prop.Items.Schema.Type[0]) + } + + assert.Equal(t, goName, prop.Extensions["x-go-name"]) + assert.EqualT(t, format, prop.Items.Schema.Format) +} + +func AssertArrayRef(t *testing.T, schema *oaispec.Schema, jsonName, goName, fragment string) { + t.Helper() + + AssertArrayProperty(t, schema, "", jsonName, "", goName) + prop := schema.Properties[jsonName] + items := prop.Items + require.NotNil(t, items) + + psch := items.Schema + assert.EqualT(t, fragment, psch.Ref.String()) +} + +func AssertRef(t *testing.T, schema *oaispec.Schema, jsonName, _, fragment string) { + t.Helper() + + assert.Empty(t, schema.Properties[jsonName].Type) + psch := schema.Properties[jsonName] + assert.EqualT(t, fragment, psch.Ref.String()) +} diff --git a/meta_test.go b/meta_test.go deleted file mode 100644 index 8cbe944..0000000 --- a/meta_test.go +++ /dev/null @@ -1,183 +0,0 @@ -// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers -// SPDX-License-Identifier: Apache-2.0 - -package codescan - -import ( - goparser "go/parser" - "go/token" - "testing" - - "github.com/go-openapi/testify/v2/assert" - "github.com/go-openapi/testify/v2/require" - - "github.com/go-openapi/spec" -) - -func TestSetInfoVersion(t *testing.T) { - info := new(spec.Swagger) - err := setInfoVersion(info, []string{"0.0.1"}) - require.NoError(t, err) - assert.EqualT(t, "0.0.1", info.Info.Version) -} - -func TestSetInfoLicense(t *testing.T) { - info := new(spec.Swagger) - err := setInfoLicense(info, []string{"MIT http://license.org/MIT"}) - require.NoError(t, err) - assert.EqualT(t, "MIT", info.Info.License.Name) - assert.EqualT(t, "http://license.org/MIT", info.Info.License.URL) -} - -func TestSetInfoContact(t *testing.T) { - info := new(spec.Swagger) - err := setInfoContact(info, []string{"Homer J. Simpson http://simpsons.com"}) - require.NoError(t, err) - assert.EqualT(t, "Homer J. Simpson", info.Info.Contact.Name) - assert.EqualT(t, "homer@simpsons.com", info.Info.Contact.Email) - assert.EqualT(t, "http://simpsons.com", info.Info.Contact.URL) -} - -func TestParseInfo(t *testing.T) { - swspec := new(spec.Swagger) - parser := newMetaParser(swspec) - docFile := "fixtures/goparsing/classification/doc.go" - fileSet := token.NewFileSet() - fileTree, err := goparser.ParseFile(fileSet, docFile, nil, goparser.ParseComments) - if err != nil { - t.FailNow() - } - - err = parser.Parse(fileTree.Doc) - - require.NoError(t, err) - verifyInfo(t, swspec.Info) -} - -func TestParseSwagger(t *testing.T) { - swspec := new(spec.Swagger) - parser := newMetaParser(swspec) - docFile := "fixtures/goparsing/classification/doc.go" - fileSet := token.NewFileSet() - fileTree, err := goparser.ParseFile(fileSet, docFile, nil, goparser.ParseComments) - if err != nil { - t.FailNow() - } - - err = parser.Parse(fileTree.Doc) - verifyMeta(t, swspec) - - require.NoError(t, err) -} - -func verifyMeta(t *testing.T, doc *spec.Swagger) { - assert.NotNil(t, doc) - verifyInfo(t, doc.Info) - assert.Equal(t, []string{"application/json", "application/xml"}, doc.Consumes) - assert.Equal(t, []string{"application/json", "application/xml"}, doc.Produces) - assert.Equal(t, []string{"http", "https"}, doc.Schemes) - assert.Equal(t, []map[string][]string{{"api_key": {}}}, doc.Security) - expectedSecuritySchemaKey := spec.SecurityScheme{ - SecuritySchemeProps: spec.SecuritySchemeProps{ - Type: "apiKey", - In: "header", - Name: "KEY", - }, - } - expectedSecuritySchemaOAuth := spec.SecurityScheme{ - SecuritySchemeProps: spec.SecuritySchemeProps{ //nolint:gosec // G101: false positive, test fixture not real credentials - Type: "oauth2", - In: "header", - AuthorizationURL: "/oauth2/auth", - TokenURL: "/oauth2/token", - Flow: "accessCode", - Scopes: map[string]string{ - "bla1": "foo1", - "bla2": "foo2", - }, - }, - } - expectedExtensions := spec.Extensions{ - "x-meta-array": []any{ - "value1", - "value2", - }, - "x-meta-array-obj": []any{ - map[string]any{ - "name": "obj", - "value": "field", - }, - }, - "x-meta-value": "value", - } - expectedInfoExtensions := spec.Extensions{ - "x-info-array": []any{ - "value1", - "value2", - }, - "x-info-array-obj": []any{ - map[string]any{ - "name": "obj", - "value": "field", - }, - }, - "x-info-value": "value", - } - assert.NotNil(t, doc.SecurityDefinitions["api_key"]) - assert.NotNil(t, doc.SecurityDefinitions["oauth2"]) - assert.Equal(t, spec.SecurityDefinitions{"api_key": &expectedSecuritySchemaKey, "oauth2": &expectedSecuritySchemaOAuth}, doc.SecurityDefinitions) - assert.Equal(t, expectedExtensions, doc.Extensions) - assert.Equal(t, expectedInfoExtensions, doc.Info.Extensions) - assert.EqualT(t, "localhost", doc.Host) - assert.EqualT(t, "/v2", doc.BasePath) -} - -func verifyInfo(t *testing.T, info *spec.Info) { - t.Helper() - - require.NotNil(t, info) - assert.EqualT(t, "0.0.1", info.Version) - assert.EqualT(t, "there are no TOS at this moment, use at your own risk we take no responsibility", info.TermsOfService) - assert.EqualT(t, "Petstore API.", info.Title) - descr := `the purpose of this application is to provide an application -that is using plain go code to define an API - -This should demonstrate all the possible comment annotations -that are available to turn go code into a fully compliant swagger 2.0 spec` - assert.EqualT(t, descr, info.Description) - - require.NotNil(t, info.License) - assert.EqualT(t, "MIT", info.License.Name) - assert.EqualT(t, "http://opensource.org/licenses/MIT", info.License.URL) - - require.NotNil(t, info.Contact) - assert.EqualT(t, "John Doe", info.Contact.Name) - assert.EqualT(t, "john.doe@example.com", info.Contact.Email) - assert.EqualT(t, "http://john.doe.com", info.Contact.URL) -} - -func TestMoreParseMeta(t *testing.T) { - for _, docFile := range []string{ - "fixtures/goparsing/meta/v1/doc.go", - "fixtures/goparsing/meta/v2/doc.go", - "fixtures/goparsing/meta/v3/doc.go", - "fixtures/goparsing/meta/v4/doc.go", - } { - swspec := new(spec.Swagger) - parser := newMetaParser(swspec) - fileSet := token.NewFileSet() - fileTree, err := goparser.ParseFile(fileSet, docFile, nil, goparser.ParseComments) - if err != nil { - t.FailNow() - } - - err = parser.Parse(fileTree.Doc) - require.NoError(t, err) - assert.EqualT(t, "there are no TOS at this moment, use at your own risk we take no responsibility", swspec.Info.TermsOfService) - /* - jazon, err := json.MarshalIndent(swspec.Info, "", " ") - require.NoError(t, err) - t.Logf("%v", string(jazon)) - */ - } -} diff --git a/operations.go b/operations.go deleted file mode 100644 index 2254f17..0000000 --- a/operations.go +++ /dev/null @@ -1,128 +0,0 @@ -// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers -// SPDX-License-Identifier: Apache-2.0 - -package codescan - -import ( - "fmt" - "go/ast" - "regexp" - "strings" - - "github.com/go-openapi/spec" -) - -type operationsBuilder struct { - ctx *scanCtx - path parsedPathContent - operations map[string]*spec.Operation -} - -func (o *operationsBuilder) Build(tgt *spec.Paths) error { - pthObj := tgt.Paths[o.path.Path] - - op := setPathOperation( - o.path.Method, o.path.ID, - &pthObj, o.operations[o.path.ID]) - - op.Tags = o.path.Tags - - sp := new(yamlSpecScanner) - sp.setTitle = func(lines []string) { op.Summary = joinDropLast(lines) } - sp.setDescription = func(lines []string) { op.Description = joinDropLast(lines) } - - if err := sp.Parse(o.path.Remaining); err != nil { - return fmt.Errorf("operation (%s): %w", op.ID, err) - } - if err := sp.UnmarshalSpec(op.UnmarshalJSON); err != nil { - return fmt.Errorf("operation (%s): %w", op.ID, err) - } - - if tgt.Paths == nil { - tgt.Paths = make(map[string]spec.PathItem) - } - - tgt.Paths[o.path.Path] = pthObj - return nil -} - -type parsedPathContent struct { - Method, Path, ID string - Tags []string - Remaining *ast.CommentGroup -} - -func parsePathAnnotation(annotation *regexp.Regexp, lines []*ast.Comment) (cnt parsedPathContent) { - var justMatched bool - - for _, cmt := range lines { - txt := cmt.Text - for line := range strings.SplitSeq(txt, "\n") { - matches := annotation.FindStringSubmatch(line) - if len(matches) > routeTagsIndex { - cnt.Method, cnt.Path, cnt.ID = matches[1], matches[2], matches[len(matches)-1] - cnt.Tags = rxSpace.Split(matches[3], -1) - if len(matches[3]) == 0 { - cnt.Tags = nil - } - justMatched = true - - continue - } - - if cnt.Method == "" { - continue - } - - if cnt.Remaining == nil { - cnt.Remaining = new(ast.CommentGroup) - } - - if !justMatched || strings.TrimSpace(rxStripComments.ReplaceAllString(line, "")) != "" { - cc := new(ast.Comment) - cc.Slash = cmt.Slash - cc.Text = line - cnt.Remaining.List = append(cnt.Remaining.List, cc) - justMatched = false - } - } - } - - return cnt -} - -// assignOrReuse either reuses an existing operation (if the ID matches) -// or assigns op to the slot. -func assignOrReuse(slot **spec.Operation, op *spec.Operation, id string) *spec.Operation { - if *slot != nil && id == (*slot).ID { - return *slot - } - *slot = op - return op -} - -func setPathOperation(method, id string, pthObj *spec.PathItem, op *spec.Operation) *spec.Operation { - if op == nil { - op = new(spec.Operation) - op.ID = id - } - - switch strings.ToUpper(method) { - case "GET": - op = assignOrReuse(&pthObj.Get, op, id) - case "POST": - op = assignOrReuse(&pthObj.Post, op, id) - case "PUT": - op = assignOrReuse(&pthObj.Put, op, id) - case "PATCH": - op = assignOrReuse(&pthObj.Patch, op, id) - case "HEAD": - op = assignOrReuse(&pthObj.Head, op, id) - case "DELETE": - op = assignOrReuse(&pthObj.Delete, op, id) - case "OPTIONS": - op = assignOrReuse(&pthObj.Options, op, id) - } - - return op -} diff --git a/parameters.go b/parameters.go deleted file mode 100644 index c9adf3a..0000000 --- a/parameters.go +++ /dev/null @@ -1,627 +0,0 @@ -// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers -// SPDX-License-Identifier: Apache-2.0 - -package codescan - -import ( - "fmt" - "go/types" - "strings" - - "github.com/go-openapi/spec" -) - -type paramTypable struct { - param *spec.Parameter - skipExt bool -} - -func (pt paramTypable) In() string { return pt.param.In } - -func (pt paramTypable) Level() int { return 0 } - -func (pt paramTypable) Typed(tpe, format string) { - pt.param.Typed(tpe, format) -} - -func (pt paramTypable) SetRef(ref spec.Ref) { - pt.param.Ref = ref -} - -func (pt paramTypable) Items() swaggerTypable { //nolint:ireturn // polymorphic by design - bdt, schema := bodyTypable(pt.param.In, pt.param.Schema, pt.skipExt) - if bdt != nil { - pt.param.Schema = schema - return bdt - } - - if pt.param.Items == nil { - pt.param.Items = new(spec.Items) - } - pt.param.Type = typeArray - return itemsTypable{pt.param.Items, 1, pt.param.In} -} - -func (pt paramTypable) Schema() *spec.Schema { - if pt.param.In != bodyTag { - return nil - } - if pt.param.Schema == nil { - pt.param.Schema = new(spec.Schema) - } - return pt.param.Schema -} - -func (pt paramTypable) AddExtension(key string, value any) { - if pt.param.In == bodyTag { - pt.Schema().AddExtension(key, value) - } else { - pt.param.AddExtension(key, value) - } -} - -func (pt paramTypable) WithEnum(values ...any) { - pt.param.WithEnum(values...) -} - -func (pt paramTypable) WithEnumDescription(desc string) { - if desc == "" { - return - } - pt.param.AddExtension(extEnumDesc, desc) -} - -type itemsTypable struct { - items *spec.Items - level int - in string -} - -func (pt itemsTypable) In() string { return pt.in } // TODO(fred): inherit from param - -func (pt itemsTypable) Level() int { return pt.level } - -func (pt itemsTypable) Typed(tpe, format string) { - pt.items.Typed(tpe, format) -} - -func (pt itemsTypable) SetRef(ref spec.Ref) { - pt.items.Ref = ref -} - -func (pt itemsTypable) Schema() *spec.Schema { - return nil -} - -func (pt itemsTypable) Items() swaggerTypable { //nolint:ireturn // polymorphic by design - if pt.items.Items == nil { - pt.items.Items = new(spec.Items) - } - pt.items.Type = typeArray - return itemsTypable{pt.items.Items, pt.level + 1, pt.in} -} - -func (pt itemsTypable) AddExtension(key string, value any) { - pt.items.AddExtension(key, value) -} - -func (pt itemsTypable) WithEnum(values ...any) { - pt.items.WithEnum(values...) -} - -func (pt itemsTypable) WithEnumDescription(_ string) { - // no -} - -type paramValidations struct { - current *spec.Parameter -} - -func (sv paramValidations) SetMaximum(val float64, exclusive bool) { - sv.current.Maximum = &val - sv.current.ExclusiveMaximum = exclusive -} - -func (sv paramValidations) SetMinimum(val float64, exclusive bool) { - sv.current.Minimum = &val - sv.current.ExclusiveMinimum = exclusive -} -func (sv paramValidations) SetMultipleOf(val float64) { sv.current.MultipleOf = &val } -func (sv paramValidations) SetMinItems(val int64) { sv.current.MinItems = &val } -func (sv paramValidations) SetMaxItems(val int64) { sv.current.MaxItems = &val } -func (sv paramValidations) SetMinLength(val int64) { sv.current.MinLength = &val } -func (sv paramValidations) SetMaxLength(val int64) { sv.current.MaxLength = &val } -func (sv paramValidations) SetPattern(val string) { sv.current.Pattern = val } -func (sv paramValidations) SetUnique(val bool) { sv.current.UniqueItems = val } -func (sv paramValidations) SetCollectionFormat(val string) { sv.current.CollectionFormat = val } -func (sv paramValidations) SetEnum(val string) { - sv.current.Enum = parseEnum(val, &spec.SimpleSchema{Type: sv.current.Type, Format: sv.current.Format}) -} -func (sv paramValidations) SetDefault(val any) { sv.current.Default = val } -func (sv paramValidations) SetExample(val any) { sv.current.Example = val } - -type itemsValidations struct { - current *spec.Items -} - -func (sv itemsValidations) SetMaximum(val float64, exclusive bool) { - sv.current.Maximum = &val - sv.current.ExclusiveMaximum = exclusive -} - -func (sv itemsValidations) SetMinimum(val float64, exclusive bool) { - sv.current.Minimum = &val - sv.current.ExclusiveMinimum = exclusive -} -func (sv itemsValidations) SetMultipleOf(val float64) { sv.current.MultipleOf = &val } -func (sv itemsValidations) SetMinItems(val int64) { sv.current.MinItems = &val } -func (sv itemsValidations) SetMaxItems(val int64) { sv.current.MaxItems = &val } -func (sv itemsValidations) SetMinLength(val int64) { sv.current.MinLength = &val } -func (sv itemsValidations) SetMaxLength(val int64) { sv.current.MaxLength = &val } -func (sv itemsValidations) SetPattern(val string) { sv.current.Pattern = val } -func (sv itemsValidations) SetUnique(val bool) { sv.current.UniqueItems = val } -func (sv itemsValidations) SetCollectionFormat(val string) { sv.current.CollectionFormat = val } -func (sv itemsValidations) SetEnum(val string) { - sv.current.Enum = parseEnum(val, &spec.SimpleSchema{Type: sv.current.Type, Format: sv.current.Format}) -} -func (sv itemsValidations) SetDefault(val any) { sv.current.Default = val } -func (sv itemsValidations) SetExample(val any) { sv.current.Example = val } - -type parameterBuilder struct { - ctx *scanCtx - decl *entityDecl - postDecls []*entityDecl -} - -func (p *parameterBuilder) Build(operations map[string]*spec.Operation) error { - // check if there is a swagger:parameters tag that is followed by one or more words, - // these words are the ids of the operations this parameter struct applies to - // once type name is found convert it to a schema, by looking up the schema in the - // parameters dictionary that got passed into this parse method - for _, opid := range p.decl.OperationIDs() { - operation, ok := operations[opid] - if !ok { - operation = new(spec.Operation) - operations[opid] = operation - operation.ID = opid - } - debugLogf(p.ctx.debug, "building parameters for: %s", opid) - - // analyze struct body for fields etc - // each exported struct field: - // * gets a type mapped to a go primitive - // * perhaps gets a format - // * has to document the validations that apply for the type and the field - // * when the struct field points to a model it becomes a ref: #/definitions/ModelName - // * comments that aren't tags is used as the description - if err := p.buildFromType(p.decl.ObjType(), operation, make(map[string]spec.Parameter)); err != nil { - return err - } - } - - return nil -} - -func (p *parameterBuilder) buildFromType(otpe types.Type, op *spec.Operation, seen map[string]spec.Parameter) error { - switch tpe := otpe.(type) { - case *types.Pointer: - return p.buildFromType(tpe.Elem(), op, seen) - case *types.Named: - return p.buildNamedType(tpe, op, seen) - case *types.Alias: - debugLogf(p.ctx.debug, "alias(parameters.buildFromType): got alias %v to %v", tpe, tpe.Rhs()) - return p.buildAlias(tpe, op, seen) - default: - return fmt.Errorf("unhandled type (%T): %s: %w", otpe, tpe.String(), ErrCodeScan) - } -} - -func (p *parameterBuilder) buildNamedType(tpe *types.Named, op *spec.Operation, seen map[string]spec.Parameter) error { - o := tpe.Obj() - if isAny(o) || isStdError(o) { - return fmt.Errorf("%s type not supported in the context of a parameters section definition: %w", o.Name(), ErrCodeScan) - } - mustNotBeABuiltinType(o) - - switch stpe := o.Type().Underlying().(type) { - case *types.Struct: - debugLogf(p.ctx.debug, "build from named type %s: %T", o.Name(), tpe) - if decl, found := p.ctx.DeclForType(o.Type()); found { - return p.buildFromStruct(decl, stpe, op, seen) - } - - return p.buildFromStruct(p.decl, stpe, op, seen) - default: - return fmt.Errorf("unhandled type (%T): %s: %w", stpe, o.Type().Underlying().String(), ErrCodeScan) - } -} - -func (p *parameterBuilder) buildAlias(tpe *types.Alias, op *spec.Operation, seen map[string]spec.Parameter) error { - o := tpe.Obj() - if isAny(o) || isStdError(o) { - return fmt.Errorf("%s type not supported in the context of a parameters section definition: %w", o.Name(), ErrCodeScan) - } - mustNotBeABuiltinType(o) - mustHaveRightHandSide(tpe) - - rhs := tpe.Rhs() - - // If transparent aliases are enabled, use the underlying type directly without creating a definition - if p.ctx.app.transparentAliases { - return p.buildFromType(rhs, op, seen) - } - - decl, ok := p.ctx.FindModel(o.Pkg().Path(), o.Name()) - if !ok { - return fmt.Errorf("can't find source file for aliased type: %v -> %v: %w", tpe, rhs, ErrCodeScan) - } - p.postDecls = append(p.postDecls, decl) // mark the left-hand side as discovered - - switch rtpe := rhs.(type) { - // load declaration for named unaliased type - case *types.Named: - o := rtpe.Obj() - if o.Pkg() == nil { - break // builtin - } - decl, found := p.ctx.FindModel(o.Pkg().Path(), o.Name()) - if !found { - return fmt.Errorf("can't find source file for target type of alias: %v -> %v: %w", tpe, rtpe, ErrCodeScan) - } - p.postDecls = append(p.postDecls, decl) - case *types.Alias: - o := rtpe.Obj() - if o.Pkg() == nil { - break // builtin - } - decl, found := p.ctx.FindModel(o.Pkg().Path(), o.Name()) - if !found { - return fmt.Errorf("can't find source file for target type of alias: %v -> %v: %w", tpe, rtpe, ErrCodeScan) - } - p.postDecls = append(p.postDecls, decl) - } - - return p.buildFromType(rhs, op, seen) -} - -func (p *parameterBuilder) buildFromField(fld *types.Var, tpe types.Type, typable swaggerTypable, seen map[string]spec.Parameter) error { - debugLogf(p.ctx.debug, "build from field %s: %T", fld.Name(), tpe) - - switch ftpe := tpe.(type) { - case *types.Basic: - return swaggerSchemaForType(ftpe.Name(), typable) - case *types.Struct: - return p.buildFromFieldStruct(ftpe, typable) - case *types.Pointer: - return p.buildFromField(fld, ftpe.Elem(), typable, seen) - case *types.Interface: - return p.buildFromFieldInterface(ftpe, typable) - case *types.Array: - return p.buildFromField(fld, ftpe.Elem(), typable.Items(), seen) - case *types.Slice: - return p.buildFromField(fld, ftpe.Elem(), typable.Items(), seen) - case *types.Map: - return p.buildFromFieldMap(ftpe, typable) - case *types.Named: - return p.buildNamedField(ftpe, typable) - case *types.Alias: - debugLogf(p.ctx.debug, "alias(parameters.buildFromField): got alias %v to %v", ftpe, ftpe.Rhs()) // TODO - return p.buildFieldAlias(ftpe, typable, fld, seen) - default: - return fmt.Errorf("unknown type for %s: %T: %w", fld.String(), fld.Type(), ErrCodeScan) - } -} - -func (p *parameterBuilder) buildFromFieldStruct(tpe *types.Struct, typable swaggerTypable) error { - sb := schemaBuilder{ - decl: p.decl, - ctx: p.ctx, - } - - if err := sb.buildFromType(tpe, typable); err != nil { - return err - } - p.postDecls = append(p.postDecls, sb.postDecls...) - - return nil -} - -func (p *parameterBuilder) buildFromFieldMap(ftpe *types.Map, typable swaggerTypable) error { - schema := new(spec.Schema) - typable.Schema().Typed("object", "").AdditionalProperties = &spec.SchemaOrBool{ - Schema: schema, - } - - sb := schemaBuilder{ - decl: p.decl, - ctx: p.ctx, - } - - if err := sb.buildFromType(ftpe.Elem(), schemaTypable{schema, typable.Level() + 1, p.ctx.opts.SkipExtensions}); err != nil { - return err - } - - return nil -} - -func (p *parameterBuilder) buildFromFieldInterface(tpe *types.Interface, typable swaggerTypable) error { - sb := schemaBuilder{ - decl: p.decl, - ctx: p.ctx, - } - - if err := sb.buildFromType(tpe, typable); err != nil { - return err - } - - p.postDecls = append(p.postDecls, sb.postDecls...) - - return nil -} - -func (p *parameterBuilder) buildNamedField(ftpe *types.Named, typable swaggerTypable) error { - o := ftpe.Obj() - if isAny(o) { - // e.g. Field interface{} or Field any - return nil - } - if isStdError(o) { - return fmt.Errorf("%s type not supported in the context of a parameter definition: %w", o.Name(), ErrCodeScan) - } - mustNotBeABuiltinType(o) - - decl, found := p.ctx.DeclForType(o.Type()) - if !found { - return fmt.Errorf("unable to find package and source file for: %s: %w", ftpe.String(), ErrCodeScan) - } - - if isStdTime(o) { - typable.Typed("string", "date-time") - return nil - } - - if sfnm, isf := strfmtName(decl.Comments); isf { - typable.Typed("string", sfnm) - return nil - } - - sb := &schemaBuilder{ctx: p.ctx, decl: decl} - sb.inferNames() - if err := sb.buildFromType(decl.ObjType(), typable); err != nil { - return err - } - - p.postDecls = append(p.postDecls, sb.postDecls...) - - return nil -} - -func (p *parameterBuilder) buildFieldAlias(tpe *types.Alias, typable swaggerTypable, fld *types.Var, seen map[string]spec.Parameter) error { - o := tpe.Obj() - if isAny(o) { - // e.g. Field interface{} or Field any - _ = typable.Schema() - - return nil // just leave an empty schema - } - if isStdError(o) { - return fmt.Errorf("%s type not supported in the context of a parameter definition: %w", o.Name(), ErrCodeScan) - } - mustNotBeABuiltinType(o) - mustHaveRightHandSide(tpe) - - rhs := tpe.Rhs() - - // If transparent aliases are enabled, use the underlying type directly without creating a definition - if p.ctx.app.transparentAliases { - sb := schemaBuilder{ - decl: p.decl, - ctx: p.ctx, - } - if err := sb.buildFromType(rhs, typable); err != nil { - return err - } - p.postDecls = append(p.postDecls, sb.postDecls...) - return nil - } - - decl, ok := p.ctx.FindModel(o.Pkg().Path(), o.Name()) - if !ok { - return fmt.Errorf("can't find source file for aliased type: %v -> %v: %w", tpe, rhs, ErrCodeScan) - } - p.postDecls = append(p.postDecls, decl) // mark the left-hand side as discovered - - if typable.In() != bodyTag || !p.ctx.app.refAliases { - // if ref option is disabled, and always for non-body parameters: just expand the alias - unaliased := types.Unalias(tpe) - return p.buildFromField(fld, unaliased, typable, seen) - } - - // for parameters that are full-fledged schemas, consider expanding or ref'ing - switch rtpe := rhs.(type) { - // load declaration for named RHS type (might be an alias itself) - case *types.Named: - o := rtpe.Obj() - if o.Pkg() == nil { - break // builtin - } - - decl, found := p.ctx.FindModel(o.Pkg().Path(), o.Name()) - if !found { - return fmt.Errorf("can't find source file for target type of alias: %v -> %v: %w", tpe, rtpe, ErrCodeScan) - } - - return p.makeRef(decl, typable) - case *types.Alias: - o := rtpe.Obj() - if o.Pkg() == nil { - break // builtin - } - - decl, found := p.ctx.FindModel(o.Pkg().Path(), o.Name()) - if !found { - return fmt.Errorf("can't find source file for target type of alias: %v -> %v: %w", tpe, rtpe, ErrCodeScan) - } - - return p.makeRef(decl, typable) - } - - // anonymous type: just expand it - return p.buildFromField(fld, rhs, typable, seen) -} - -func spExtensionsSetter(ps *spec.Parameter, skipExt bool) func(*spec.Extensions) { - return func(exts *spec.Extensions) { - for name, value := range *exts { - addExtension(&ps.VendorExtensible, name, value, skipExt) - } - } -} - -func (p *parameterBuilder) buildFromStruct(decl *entityDecl, tpe *types.Struct, op *spec.Operation, seen map[string]spec.Parameter) error { - numFields := tpe.NumFields() - - if numFields == 0 { - return nil - } - - sequence := make([]string, 0, numFields) - for i := range numFields { - fld := tpe.Field(i) - - if fld.Embedded() { - if err := p.buildFromType(fld.Type(), op, seen); err != nil { - return err - } - continue - } - - name, err := p.processParamField(fld, decl, seen) - if err != nil { - return err - } - if name != "" { - sequence = append(sequence, name) - } - } - - for _, k := range sequence { - p := seen[k] - for i, v := range op.Parameters { - if v.Name == k { - op.Parameters = append(op.Parameters[:i], op.Parameters[i+1:]...) - break - } - } - op.Parameters = append(op.Parameters, p) - } - - return nil -} - -// processParamField processes a single non-embedded struct field for parameter building. -// Returns the parameter name if the field was processed, or "" if it was skipped. -func (p *parameterBuilder) processParamField(fld *types.Var, decl *entityDecl, seen map[string]spec.Parameter) (string, error) { - if !fld.Exported() { - debugLogf(p.ctx.debug, "skipping field %s because it's not exported", fld.Name()) - return "", nil - } - - afld := findASTField(decl.File, fld.Pos()) - if afld == nil { - debugLogf(p.ctx.debug, "can't find source associated with %s", fld.String()) - return "", nil - } - - if ignored(afld.Doc) { - return "", nil - } - - name, ignore, _, _, err := parseJSONTag(afld) - if err != nil { - return "", err - } - if ignore { - return "", nil - } - - in := paramInQuery - // scan for param location first, this changes some behavior down the line - if afld.Doc != nil { - for _, cmt := range afld.Doc.List { - for line := range strings.SplitSeq(cmt.Text, "\n") { - matches := rxIn.FindStringSubmatch(line) - if len(matches) > 0 && len(strings.TrimSpace(matches[1])) > 0 { - in = strings.TrimSpace(matches[1]) - } - } - } - } - - ps := seen[name] - ps.In = in - var pty swaggerTypable = paramTypable{&ps, p.ctx.opts.SkipExtensions} - if in == bodyTag { - pty = schemaTypable{pty.Schema(), 0, p.ctx.opts.SkipExtensions} - } - - if in == "formData" && afld.Doc != nil && fileParam(afld.Doc) { - pty.Typed("file", "") - } else if err := p.buildFromField(fld, fld.Type(), pty, seen); err != nil { - return "", err - } - - if strfmtName, ok := strfmtName(afld.Doc); ok { - ps.Typed("string", strfmtName) - ps.Ref = spec.Ref{} - ps.Items = nil - } - - sp := new(sectionedParser) - sp.setDescription = func(lines []string) { - ps.Description = joinDropLast(lines) - enumDesc := getEnumDesc(ps.Extensions) - if enumDesc != "" { - ps.Description += "\n" + enumDesc - } - } - - if ps.Ref.String() != "" { - setupRefParamTaggers(sp, &ps, p.ctx.opts.SkipExtensions, p.ctx.debug) - } else { - if err := setupInlineParamTaggers(sp, &ps, name, afld, p.ctx.opts.SkipExtensions, p.ctx.debug); err != nil { - return "", err - } - } - - if err := sp.Parse(afld.Doc); err != nil { - return "", err - } - if ps.In == "path" { - ps.Required = true - } - - if ps.Name == "" { - ps.Name = name - } - - if name != fld.Name() { - addExtension(&ps.VendorExtensible, "x-go-name", fld.Name(), p.ctx.opts.SkipExtensions) - } - - seen[name] = ps - return name, nil -} - -func (p *parameterBuilder) makeRef(decl *entityDecl, prop swaggerTypable) error { - nm, _ := decl.Names() - ref, err := spec.NewRef("#/definitions/" + nm) - if err != nil { - return err - } - - prop.SetRef(ref) - p.postDecls = append(p.postDecls, decl) // mark the $ref target as discovered - - return nil -} diff --git a/parser.go b/parser.go deleted file mode 100644 index a9109b5..0000000 --- a/parser.go +++ /dev/null @@ -1,1808 +0,0 @@ -// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers -// SPDX-License-Identifier: Apache-2.0 - -package codescan - -import ( - "encoding/json" - "fmt" - "go/ast" - "go/types" - "log" - "reflect" - "regexp" - "slices" - "strconv" - "strings" - - "go.yaml.in/yaml/v3" - - "github.com/go-openapi/loads/fmts" - "github.com/go-openapi/spec" -) - -const ( - // Go builtin type names used for type-to-schema mapping. - goTypeByte = "byte" - goTypeFloat64 = "float64" - goTypeInt = "int" - goTypeInt16 = "int16" - goTypeInt32 = "int32" - goTypeInt64 = "int64" - - // kvParts is the number of parts when splitting key:value pairs. - kvParts = 2 - // logCallerDepth is the caller depth for log.Output. - logCallerDepth = 2 - // minAnnotationMatch is the minimum submatch count for an annotation regex. - minAnnotationMatch = 2 - // routeTagsIndex is the regex submatch index where route tags begin. - routeTagsIndex = 3 -) - -func shouldAcceptTag(tags []string, includeTags map[string]bool, excludeTags map[string]bool) bool { - for _, tag := range tags { - if len(includeTags) > 0 { - if includeTags[tag] { - return true - } - } else if len(excludeTags) > 0 { - if excludeTags[tag] { - return false - } - } - } - - return len(includeTags) == 0 -} - -func shouldAcceptPkg(path string, includePkgs, excludePkgs []string) bool { - if len(includePkgs) == 0 && len(excludePkgs) == 0 { - return true - } - - for _, pkgName := range includePkgs { - matched, _ := regexp.MatchString(pkgName, path) - if matched { - return true - } - } - - for _, pkgName := range excludePkgs { - matched, _ := regexp.MatchString(pkgName, path) - if matched { - return false - } - } - - return len(includePkgs) == 0 -} - -// Many thanks go to https://github.com/yvasiyarov/swagger -// this is loosely based on that implementation but for swagger 2.0 - -func joinDropLast(lines []string) string { - l := len(lines) - lns := lines - if l > 0 && len(strings.TrimSpace(lines[l-1])) == 0 { - lns = lines[:l-1] - } - return strings.Join(lns, "\n") -} - -func removeEmptyLines(lines []string) []string { - notEmpty := make([]string, 0, len(lines)) - - for _, l := range lines { - if len(strings.TrimSpace(l)) > 0 { - notEmpty = append(notEmpty, l) - } - } - - return notEmpty -} - -func rxf(rxp, ar string) *regexp.Regexp { - return regexp.MustCompile(fmt.Sprintf(rxp, ar)) -} - -func allOfMember(comments *ast.CommentGroup) bool { - return commentMatcher(rxAllOf)(comments) -} - -func fileParam(comments *ast.CommentGroup) bool { - return commentMatcher(rxFileUpload)(comments) -} - -func strfmtName(comments *ast.CommentGroup) (string, bool) { - return commentSubMatcher(rxStrFmt)(comments) -} - -func ignored(comments *ast.CommentGroup) bool { - return commentMatcher(rxIgnoreOverride)(comments) -} - -func enumName(comments *ast.CommentGroup) (string, bool) { - return commentSubMatcher(rxEnum)(comments) -} - -func aliasParam(comments *ast.CommentGroup) bool { - return commentMatcher(rxAlias)(comments) -} - -func commentMatcher(rx *regexp.Regexp) func(*ast.CommentGroup) bool { - return func(comments *ast.CommentGroup) bool { - if comments == nil { - return false - } - - return slices.ContainsFunc(comments.List, func(cmt *ast.Comment) bool { - for ln := range strings.SplitSeq(cmt.Text, "\n") { - if rx.MatchString(ln) { - return true - } - } - - return false - }) - } -} - -func commentSubMatcher(rx *regexp.Regexp) func(*ast.CommentGroup) (string, bool) { - return func(comments *ast.CommentGroup) (string, bool) { - if comments == nil { - return "", false - } - - for _, cmt := range comments.List { - for ln := range strings.SplitSeq(cmt.Text, "\n") { - matches := rx.FindStringSubmatch(ln) - if len(matches) > 1 && len(strings.TrimSpace(matches[1])) > 0 { - return strings.TrimSpace(matches[1]), true - } - } - } - - return "", false - } -} - -func isAliasParam(prop swaggerTypable) bool { - param, ok := prop.(paramTypable) - if !ok { - return false - } - - return param.param.In == "query" || - param.param.In == "path" || - param.param.In == "formData" -} - -func defaultName(comments *ast.CommentGroup) (string, bool) { - return commentSubMatcher(rxDefault)(comments) -} - -func typeName(comments *ast.CommentGroup) (string, bool) { - return commentSubMatcher(rxType)(comments) -} - -type swaggerTypable interface { - Typed(swaggerType string, format string) - SetRef(ref spec.Ref) - Items() swaggerTypable - Schema() *spec.Schema - Level() int - AddExtension(key string, value any) - WithEnum(values ...any) - WithEnumDescription(desc string) - In() string -} - -// Map all Go builtin types that have Json representation to Swagger/Json types. -// See https://golang.org/pkg/builtin/ and http://swagger.io/specification/ -func swaggerSchemaForType(typeName string, prop swaggerTypable) error { - switch typeName { - case typeBool: - prop.Typed("boolean", "") - case goTypeByte: - prop.Typed("integer", "uint8") - case "complex128", "complex64": - return fmt.Errorf("unsupported builtin %q (no JSON marshaller): %w", typeName, ErrCodeScan) - case "error": - // TODO: error is often marshalled into a string but not always (e.g. errors package creates - // errors that are marshalled into an empty object), this could be handled the same way - // custom JSON marshallers are handled (in future) - prop.Typed("string", "") - case "float32": - prop.Typed("number", "float") - case goTypeFloat64: - prop.Typed("number", "double") - case goTypeInt: - prop.Typed("integer", goTypeInt64) - case goTypeInt16: - prop.Typed("integer", goTypeInt16) - case goTypeInt32: - prop.Typed("integer", goTypeInt32) - case goTypeInt64: - prop.Typed("integer", goTypeInt64) - case "int8": - prop.Typed("integer", "int8") - case "rune": - prop.Typed("integer", goTypeInt32) - case typeString: - prop.Typed("string", "") - case "uint": - prop.Typed("integer", "uint64") - case "uint16": - prop.Typed("integer", "uint16") - case "uint32": - prop.Typed("integer", "uint32") - case "uint64": - prop.Typed("integer", "uint64") - case "uint8": - prop.Typed("integer", "uint8") - case "uintptr": - prop.Typed("integer", "uint64") - case "object": - prop.Typed("object", "") - default: - return fmt.Errorf("unsupported type %q: %w", typeName, ErrCodeScan) - } - return nil -} - -func newMultiLineTagParser(name string, parser valueParser, skipCleanUp bool) tagParser { - return tagParser{ - Name: name, - MultiLine: true, - SkipCleanUp: skipCleanUp, - Parser: parser, - } -} - -func newSingleLineTagParser(name string, parser valueParser) tagParser { - return tagParser{ - Name: name, - MultiLine: false, - SkipCleanUp: false, - Parser: parser, - } -} - -type tagParser struct { - Name string - MultiLine bool - SkipCleanUp bool - Lines []string - Parser valueParser -} - -func (st *tagParser) Matches(line string) bool { - return st.Parser.Matches(line) -} - -func (st *tagParser) Parse(lines []string) error { - return st.Parser.Parse(lines) -} - -type yamlParser struct { - set func(json.RawMessage) error - rx *regexp.Regexp -} - -func newYamlParser(rx *regexp.Regexp, setter func(json.RawMessage) error) *yamlParser { - return &yamlParser{ - set: setter, - rx: rx, - } -} - -func (y *yamlParser) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - - uncommented := make([]string, 0, len(lines)) - uncommented = append(uncommented, removeYamlIndent(lines)...) - - yamlContent := strings.Join(uncommented, "\n") - var yamlValue any - err := yaml.Unmarshal([]byte(yamlContent), &yamlValue) - if err != nil { - return err - } - - var jsonValue json.RawMessage - jsonValue, err = fmts.YAMLToJSON(yamlValue) - if err != nil { - return err - } - - return y.set(jsonValue) -} - -func (y *yamlParser) Matches(line string) bool { - return y.rx.MatchString(line) -} - -// aggregates lines in header until it sees `---`, -// the beginning of a YAML spec. -type yamlSpecScanner struct { - header []string - yamlSpec []string - setTitle func([]string) - setDescription func([]string) - workedOutTitle bool - title []string - skipHeader bool -} - -func (sp *yamlSpecScanner) Title() []string { - sp.collectTitleDescription() - return sp.title -} - -func (sp *yamlSpecScanner) Description() []string { - sp.collectTitleDescription() - return sp.header -} - -func (sp *yamlSpecScanner) Parse(doc *ast.CommentGroup) error { - if doc == nil { - return nil - } - var startedYAMLSpec bool -COMMENTS: - for _, c := range doc.List { - for line := range strings.SplitSeq(c.Text, "\n") { - if rxSwaggerAnnotation.MatchString(line) { - break COMMENTS // a new swagger: annotation terminates this parser - } - - if !startedYAMLSpec { - if rxBeginYAMLSpec.MatchString(line) { - startedYAMLSpec = true - sp.yamlSpec = append(sp.yamlSpec, line) - continue - } - - if !sp.skipHeader { - sp.header = append(sp.header, line) - } - - // no YAML spec yet, moving on - continue - } - - sp.yamlSpec = append(sp.yamlSpec, line) - } - } - if sp.setTitle != nil { - sp.setTitle(sp.Title()) - } - if sp.setDescription != nil { - sp.setDescription(sp.Description()) - } - return nil -} - -func (sp *yamlSpecScanner) UnmarshalSpec(u func([]byte) error) (err error) { - specYaml := cleanupScannerLines(sp.yamlSpec, rxUncommentYAML) - if len(specYaml) == 0 { - return fmt.Errorf("no spec available to unmarshal: %w", ErrCodeScan) - } - - if !strings.Contains(specYaml[0], "---") { - return fmt.Errorf("yaml spec has to start with `---`: %w", ErrCodeScan) - } - - // remove indentation - specYaml = removeIndent(specYaml) - - // 1. parse yaml lines - yamlValue := make(map[any]any) - - yamlContent := strings.Join(specYaml, "\n") - err = yaml.Unmarshal([]byte(yamlContent), &yamlValue) - if err != nil { - return err - } - - // 2. convert to json - var jsonValue json.RawMessage - jsonValue, err = fmts.YAMLToJSON(yamlValue) - if err != nil { - return err - } - - // 3. unmarshal the json into an interface - var data []byte - data, err = jsonValue.MarshalJSON() - if err != nil { - return err - } - err = u(data) - if err != nil { - return err - } - - // all parsed, returning... - sp.yamlSpec = nil // spec is now consumed, so let's erase the parsed lines - - return nil -} - -func (sp *yamlSpecScanner) collectTitleDescription() { - if sp.workedOutTitle { - return - } - if sp.setTitle == nil { - sp.header = cleanupScannerLines(sp.header, rxUncommentHeaders) - return - } - - sp.workedOutTitle = true - sp.title, sp.header = collectScannerTitleDescription(sp.header) -} - -// removes indent based on the first line. -func removeIndent(spec []string) []string { - if len(spec) == 0 { - return spec - } - - loc := rxIndent.FindStringIndex(spec[0]) - if len(loc) < 2 || loc[1] <= 1 { - return spec - } - - s := make([]string, len(spec)) - copy(s, spec) - - for i := range s { - if len(s[i]) < loc[1] { - continue - } - - s[i] = spec[i][loc[1]-1:] //nolint:gosec // G602: bounds already checked on line 445 - start := rxNotIndent.FindStringIndex(s[i]) - if len(start) < 2 || start[1] == 0 { - continue - } - - s[i] = strings.Replace(s[i], "\t", " ", start[1]) - } - - return s -} - -// removes indent base on the first line. -// -// The difference with removeIndent is that lines shorter than the indentation are elided. -func removeYamlIndent(spec []string) []string { - if len(spec) == 0 { - return spec - } - - loc := rxIndent.FindStringIndex(spec[0]) - if len(loc) < 2 || loc[1] <= 1 { - return spec - } - - s := make([]string, 0, len(spec)) - for i := range spec { - if len(spec[i]) >= loc[1] { - s = append(s, spec[i][loc[1]-1:]) - } - } - - return s -} - -// aggregates lines in header until it sees a tag. -type sectionedParser struct { - header []string - matched map[string]tagParser - annotation valueParser - - seenTag bool - skipHeader bool - setTitle func([]string) - setDescription func([]string) - workedOutTitle bool - taggers []tagParser - currentTagger *tagParser - title []string - ignored bool -} - -func (st *sectionedParser) Title() []string { - st.collectTitleDescription() - return st.title -} - -func (st *sectionedParser) Description() []string { - st.collectTitleDescription() - return st.header -} - -func (st *sectionedParser) Parse(doc *ast.CommentGroup) error { - if doc == nil { - return nil - } - -COMMENTS: - for _, c := range doc.List { - for line := range strings.SplitSeq(c.Text, "\n") { - if st.parseLine(line) { - break COMMENTS - } - } - } - - if st.setTitle != nil { - st.setTitle(st.Title()) - } - - if st.setDescription != nil { - st.setDescription(st.Description()) - } - - for _, mt := range st.matched { - if !mt.SkipCleanUp { - mt.Lines = cleanupScannerLines(mt.Lines, rxUncommentHeaders) - } - if err := mt.Parse(mt.Lines); err != nil { - return err - } - } - - return nil -} - -// parseLine processes a single comment line. It returns true when the -// caller should stop processing further comments (break COMMENTS). -func (st *sectionedParser) parseLine(line string) (stop bool) { - if rxSwaggerAnnotation.MatchString(line) { - if rxIgnoreOverride.MatchString(line) { - st.ignored = true - return true // an explicit ignore terminates this parser - } - if st.annotation == nil || !st.annotation.Matches(line) { - return true // a new swagger: annotation terminates this parser - } - - _ = st.annotation.Parse([]string{line}) - if len(st.header) > 0 { - st.seenTag = true - } - return false - } - - var matched bool - for _, tg := range st.taggers { - tagger := tg - if tagger.Matches(line) { - st.seenTag = true - st.currentTagger = &tagger - matched = true - break - } - } - - if st.currentTagger == nil { - if !st.skipHeader && !st.seenTag { - st.header = append(st.header, line) - } - return false - } - - if st.currentTagger.MultiLine && matched { - // the first line of a multiline tagger doesn't count - return false - } - - ts, ok := st.matched[st.currentTagger.Name] - if !ok { - ts = *st.currentTagger - } - ts.Lines = append(ts.Lines, line) - if st.matched == nil { - st.matched = make(map[string]tagParser) - } - st.matched[st.currentTagger.Name] = ts - - if !st.currentTagger.MultiLine { - st.currentTagger = nil - } - return false -} - -func (st *sectionedParser) collectTitleDescription() { - if st.workedOutTitle { - return - } - if st.setTitle == nil { - st.header = cleanupScannerLines(st.header, rxUncommentHeaders) - return - } - - st.workedOutTitle = true - st.title, st.header = collectScannerTitleDescription(st.header) -} - -type validationBuilder interface { //nolint:interfacebloat // mirrors the full set of Swagger validation properties - SetMaximum(maxium float64, isExclusive bool) - SetMinimum(minimum float64, isExclusive bool) - SetMultipleOf(multiple float64) - - SetMinItems(minItems int64) - SetMaxItems(maxItems int64) - - SetMinLength(minLength int64) - SetMaxLength(maxLength int64) - SetPattern(pattern string) - - SetUnique(isUniqueItems bool) - SetEnum(enumValue string) - SetDefault(defaultValue any) - SetExample(example any) -} - -type valueParser interface { - Parse(commentlines []string) error - Matches(commentLine string) bool -} - -type operationValidationBuilder interface { - validationBuilder - SetCollectionFormat(collectionFormat string) -} - -type setMaximum struct { - builder validationBuilder - rx *regexp.Regexp -} - -func (sm *setMaximum) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - matches := sm.rx.FindStringSubmatch(lines[0]) - if len(matches) > 2 && len(matches[2]) > 0 { - maximum, err := strconv.ParseFloat(matches[2], 64) - if err != nil { - return err - } - sm.builder.SetMaximum(maximum, matches[1] == "<") - } - return nil -} - -func (sm *setMaximum) Matches(line string) bool { - return sm.rx.MatchString(line) -} - -type setMinimum struct { - builder validationBuilder - rx *regexp.Regexp -} - -func (sm *setMinimum) Matches(line string) bool { - return sm.rx.MatchString(line) -} - -func (sm *setMinimum) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - matches := sm.rx.FindStringSubmatch(lines[0]) - if len(matches) > 2 && len(matches[2]) > 0 { - minimum, err := strconv.ParseFloat(matches[2], 64) - if err != nil { - return err - } - sm.builder.SetMinimum(minimum, matches[1] == ">") - } - return nil -} - -type setMultipleOf struct { - builder validationBuilder - rx *regexp.Regexp -} - -func (sm *setMultipleOf) Matches(line string) bool { - return sm.rx.MatchString(line) -} - -func (sm *setMultipleOf) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - matches := sm.rx.FindStringSubmatch(lines[0]) - if len(matches) > 1 && len(matches[1]) > 0 { - multipleOf, err := strconv.ParseFloat(matches[1], 64) - if err != nil { - return err - } - sm.builder.SetMultipleOf(multipleOf) - } - return nil -} - -type setMaxItems struct { - builder validationBuilder - rx *regexp.Regexp -} - -func (sm *setMaxItems) Matches(line string) bool { - return sm.rx.MatchString(line) -} - -func (sm *setMaxItems) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - matches := sm.rx.FindStringSubmatch(lines[0]) - if len(matches) > 1 && len(matches[1]) > 0 { - maxItems, err := strconv.ParseInt(matches[1], 10, 64) - if err != nil { - return err - } - sm.builder.SetMaxItems(maxItems) - } - return nil -} - -type setMinItems struct { - builder validationBuilder - rx *regexp.Regexp -} - -func (sm *setMinItems) Matches(line string) bool { - return sm.rx.MatchString(line) -} - -func (sm *setMinItems) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - matches := sm.rx.FindStringSubmatch(lines[0]) - if len(matches) > 1 && len(matches[1]) > 0 { - minItems, err := strconv.ParseInt(matches[1], 10, 64) - if err != nil { - return err - } - sm.builder.SetMinItems(minItems) - } - return nil -} - -type setMaxLength struct { - builder validationBuilder - rx *regexp.Regexp -} - -func (sm *setMaxLength) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - matches := sm.rx.FindStringSubmatch(lines[0]) - if len(matches) > 1 && len(matches[1]) > 0 { - maxLength, err := strconv.ParseInt(matches[1], 10, 64) - if err != nil { - return err - } - sm.builder.SetMaxLength(maxLength) - } - return nil -} - -func (sm *setMaxLength) Matches(line string) bool { - return sm.rx.MatchString(line) -} - -type setMinLength struct { - builder validationBuilder - rx *regexp.Regexp -} - -func (sm *setMinLength) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - matches := sm.rx.FindStringSubmatch(lines[0]) - if len(matches) > 1 && len(matches[1]) > 0 { - minLength, err := strconv.ParseInt(matches[1], 10, 64) - if err != nil { - return err - } - sm.builder.SetMinLength(minLength) - } - return nil -} - -func (sm *setMinLength) Matches(line string) bool { - return sm.rx.MatchString(line) -} - -type setPattern struct { - builder validationBuilder - rx *regexp.Regexp -} - -func (sm *setPattern) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - matches := sm.rx.FindStringSubmatch(lines[0]) - if len(matches) > 1 && len(matches[1]) > 0 { - sm.builder.SetPattern(matches[1]) - } - return nil -} - -func (sm *setPattern) Matches(line string) bool { - return sm.rx.MatchString(line) -} - -type setCollectionFormat struct { - builder operationValidationBuilder - rx *regexp.Regexp -} - -func (sm *setCollectionFormat) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - matches := sm.rx.FindStringSubmatch(lines[0]) - if len(matches) > 1 && len(matches[1]) > 0 { - sm.builder.SetCollectionFormat(matches[1]) - } - return nil -} - -func (sm *setCollectionFormat) Matches(line string) bool { - return sm.rx.MatchString(line) -} - -type setUnique struct { - builder validationBuilder - rx *regexp.Regexp -} - -func (su *setUnique) Matches(line string) bool { - return su.rx.MatchString(line) -} - -func (su *setUnique) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - matches := su.rx.FindStringSubmatch(lines[0]) - if len(matches) > 1 && len(matches[1]) > 0 { - req, err := strconv.ParseBool(matches[1]) - if err != nil { - return err - } - su.builder.SetUnique(req) - } - return nil -} - -type setEnum struct { - builder validationBuilder - rx *regexp.Regexp -} - -func (se *setEnum) Matches(line string) bool { - return se.rx.MatchString(line) -} - -func (se *setEnum) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - matches := se.rx.FindStringSubmatch(lines[0]) - if len(matches) > 1 && len(matches[1]) > 0 { - se.builder.SetEnum(matches[1]) - } - return nil -} - -func parseValueFromSchema(s string, schema *spec.SimpleSchema) (any, error) { - if schema != nil { - switch strings.Trim(schema.TypeName(), "\"") { - case "integer", "int", "int64", "int32", "int16": - return strconv.Atoi(s) - case "bool", "boolean": - return strconv.ParseBool(s) - case "number", "float64", "float32": - return strconv.ParseFloat(s, 64) - case "object": - var obj map[string]any - if err := json.Unmarshal([]byte(s), &obj); err != nil { - return s, nil //nolint:nilerr // fallback: return raw string when JSON is invalid - } - return obj, nil - case "array": - var slice []any - if err := json.Unmarshal([]byte(s), &slice); err != nil { - return s, nil //nolint:nilerr // fallback: return raw string when JSON is invalid - } - return slice, nil - default: - return s, nil - } - } else { - return s, nil - } -} - -type setDefault struct { - scheme *spec.SimpleSchema - builder validationBuilder - rx *regexp.Regexp -} - -func (sd *setDefault) Matches(line string) bool { - return sd.rx.MatchString(line) -} - -func (sd *setDefault) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - matches := sd.rx.FindStringSubmatch(lines[0]) - if len(matches) > 1 && len(matches[1]) > 0 { - d, err := parseValueFromSchema(matches[1], sd.scheme) - if err != nil { - return err - } - sd.builder.SetDefault(d) - } - return nil -} - -type setExample struct { - scheme *spec.SimpleSchema - builder validationBuilder - rx *regexp.Regexp -} - -func (se *setExample) Matches(line string) bool { - return se.rx.MatchString(line) -} - -func (se *setExample) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - matches := se.rx.FindStringSubmatch(lines[0]) - if len(matches) > 1 && len(matches[1]) > 0 { - d, err := parseValueFromSchema(matches[1], se.scheme) - if err != nil { - return err - } - se.builder.SetExample(d) - } - return nil -} - -type matchOnlyParam struct { - tgt *spec.Parameter - rx *regexp.Regexp -} - -func (mo *matchOnlyParam) Matches(line string) bool { - return mo.rx.MatchString(line) -} - -func (mo *matchOnlyParam) Parse(_ []string) error { - return nil -} - -type setRequiredParam struct { - tgt *spec.Parameter -} - -func (su *setRequiredParam) Matches(line string) bool { - return rxRequired.MatchString(line) -} - -func (su *setRequiredParam) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - matches := rxRequired.FindStringSubmatch(lines[0]) - if len(matches) > 1 && len(matches[1]) > 0 { - req, err := strconv.ParseBool(matches[1]) - if err != nil { - return err - } - su.tgt.Required = req - } - return nil -} - -type setReadOnlySchema struct { - tgt *spec.Schema -} - -func (su *setReadOnlySchema) Matches(line string) bool { - return rxReadOnly.MatchString(line) -} - -func (su *setReadOnlySchema) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - matches := rxReadOnly.FindStringSubmatch(lines[0]) - if len(matches) > 1 && len(matches[1]) > 0 { - req, err := strconv.ParseBool(matches[1]) - if err != nil { - return err - } - su.tgt.ReadOnly = req - } - return nil -} - -type setDeprecatedOp struct { - tgt *spec.Operation -} - -func (su *setDeprecatedOp) Matches(line string) bool { - return rxDeprecated.MatchString(line) -} - -func (su *setDeprecatedOp) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - matches := rxDeprecated.FindStringSubmatch(lines[0]) - if len(matches) > 1 && len(matches[1]) > 0 { - req, err := strconv.ParseBool(matches[1]) - if err != nil { - return err - } - su.tgt.Deprecated = req - } - return nil -} - -type setDiscriminator struct { - schema *spec.Schema - field string -} - -func (su *setDiscriminator) Matches(line string) bool { - return rxDiscriminator.MatchString(line) -} - -func (su *setDiscriminator) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - matches := rxDiscriminator.FindStringSubmatch(lines[0]) - if len(matches) > 1 && len(matches[1]) > 0 { - req, err := strconv.ParseBool(matches[1]) - if err != nil { - return err - } - if req { - su.schema.Discriminator = su.field - } else if su.schema.Discriminator == su.field { - su.schema.Discriminator = "" - } - } - return nil -} - -type setRequiredSchema struct { - schema *spec.Schema - field string -} - -func (su *setRequiredSchema) Matches(line string) bool { - return rxRequired.MatchString(line) -} - -func (su *setRequiredSchema) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - matches := rxRequired.FindStringSubmatch(lines[0]) - if len(matches) <= 1 || len(matches[1]) == 0 { - return nil - } - - req, err := strconv.ParseBool(matches[1]) - if err != nil { - return err - } - midx := -1 - for i, nm := range su.schema.Required { - if nm == su.field { - midx = i - break - } - } - if req { - if midx < 0 { - su.schema.Required = append(su.schema.Required, su.field) - } - } else if midx >= 0 { - su.schema.Required = append(su.schema.Required[:midx], su.schema.Required[midx+1:]...) - } - return nil -} - -func newMultilineDropEmptyParser(rx *regexp.Regexp, set func([]string)) *multiLineDropEmptyParser { - return &multiLineDropEmptyParser{ - rx: rx, - set: set, - } -} - -type multiLineDropEmptyParser struct { - set func([]string) - rx *regexp.Regexp -} - -func (m *multiLineDropEmptyParser) Matches(line string) bool { - return m.rx.MatchString(line) -} - -func (m *multiLineDropEmptyParser) Parse(lines []string) error { - m.set(removeEmptyLines(lines)) - return nil -} - -func newSetSchemes(set func([]string)) *setSchemes { - return &setSchemes{ - set: set, - rx: rxSchemes, - } -} - -type setSchemes struct { - set func([]string) - rx *regexp.Regexp -} - -func (ss *setSchemes) Matches(line string) bool { - return ss.rx.MatchString(line) -} - -func (ss *setSchemes) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - matches := ss.rx.FindStringSubmatch(lines[0]) - if len(matches) > 1 && len(matches[1]) > 0 { - sch := strings.Split(matches[1], ", ") - - schemes := []string{} - for _, s := range sch { - ts := strings.TrimSpace(s) - if ts != "" { - schemes = append(schemes, ts) - } - } - ss.set(schemes) - } - return nil -} - -func newSetSecurity(rx *regexp.Regexp, setter func([]map[string][]string)) *setSecurity { - return &setSecurity{ - set: setter, - rx: rx, - } -} - -type setSecurity struct { - set func([]map[string][]string) - rx *regexp.Regexp -} - -func (ss *setSecurity) Matches(line string) bool { - return ss.rx.MatchString(line) -} - -func (ss *setSecurity) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - - var result []map[string][]string - for _, line := range lines { - kv := strings.SplitN(line, ":", kvParts) - scopes := []string{} - var key string - - if len(kv) > 1 { - scs := strings.SplitSeq(kv[1], ",") - for scope := range scs { - tr := strings.TrimSpace(scope) - if tr != "" { - tr = strings.SplitAfter(tr, " ")[0] - scopes = append(scopes, strings.TrimSpace(tr)) - } - } - - key = strings.TrimSpace(kv[0]) - - result = append(result, map[string][]string{key: scopes}) - } - } - ss.set(result) - return nil -} - -func newSetResponses(definitions map[string]spec.Schema, responses map[string]spec.Response, setter func(*spec.Response, map[int]spec.Response)) *setOpResponses { - return &setOpResponses{ - set: setter, - rx: rxResponses, - definitions: definitions, - responses: responses, - } -} - -type setOpResponses struct { - set func(*spec.Response, map[int]spec.Response) - rx *regexp.Regexp - definitions map[string]spec.Schema - responses map[string]spec.Response -} - -func (ss *setOpResponses) Matches(line string) bool { - return ss.rx.MatchString(line) -} - -// responseTag used when specifying a response to point to a defined swagger:response. -const responseTag = "response" - -// bodyTag used when specifying a response to point to a model/schema. -const bodyTag = "body" - -// descriptionTag used when specifying a response that gives a description of the response. -const descriptionTag = "description" - -func parseTags(line string) (modelOrResponse string, arrays int, isDefinitionRef bool, description string, err error) { - tags := strings.Split(line, " ") - parsedModelOrResponse := false - - for i, tagAndValue := range tags { - tagValList := strings.SplitN(tagAndValue, ":", kvParts) - var tag, value string - if len(tagValList) > 1 { - tag = tagValList[0] - value = tagValList[1] - } else { - // TODO: Print a warning, and in the long term, do not support not tagged values - // Add a default tag if none is supplied - if i == 0 { - tag = responseTag - } else { - tag = descriptionTag - } - value = tagValList[0] - } - - foundModelOrResponse := false - if !parsedModelOrResponse { - if tag == bodyTag { - foundModelOrResponse = true - isDefinitionRef = true - } - if tag == responseTag { - foundModelOrResponse = true - isDefinitionRef = false - } - } - if foundModelOrResponse { - // Read the model or response tag - parsedModelOrResponse = true - // Check for nested arrays - arrays = 0 - for strings.HasPrefix(value, "[]") { - arrays++ - value = value[2:] - } - // What's left over is the model name - modelOrResponse = value - continue - } - - if tag == descriptionTag { - // Descriptions are special, they read the rest of the line - descriptionWords := []string{value} - if i < len(tags)-1 { - descriptionWords = append(descriptionWords, tags[i+1:]...) - } - description = strings.Join(descriptionWords, " ") - break - } - - if tag == responseTag || tag == bodyTag { - err = fmt.Errorf("valid tag %s, but not in a valid position: %w", tag, ErrCodeScan) - } else { - err = fmt.Errorf("invalid tag: %s: %w", tag, ErrCodeScan) - } - // return error - return modelOrResponse, arrays, isDefinitionRef, description, err - } - - // TODO: Maybe do, if !parsedModelOrResponse {return some error} - return modelOrResponse, arrays, isDefinitionRef, description, err -} - -func assignResponse(key string, resp spec.Response, def *spec.Response, scr map[int]spec.Response) (*spec.Response, map[int]spec.Response) { - if strings.EqualFold("default", key) { - if def == nil { - def = &resp - } - return def, scr - } - - if sc, err := strconv.Atoi(key); err == nil { - if scr == nil { - scr = make(map[int]spec.Response) - } - scr[sc] = resp - } - - return def, scr -} - -func (ss *setOpResponses) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - - var def *spec.Response - var scr map[int]spec.Response - - for _, line := range lines { - var err error - def, scr, err = ss.parseResponseLine(line, def, scr) - if err != nil { - return err - } - } - - ss.set(def, scr) - return nil -} - -func (ss *setOpResponses) parseResponseLine(line string, def *spec.Response, scr map[int]spec.Response) (*spec.Response, map[int]spec.Response, error) { - kv := strings.SplitN(line, ":", kvParts) - if len(kv) <= 1 { - return def, scr, nil - } - - key := strings.TrimSpace(kv[0]) - if key == "" { - return def, scr, nil - } - - value := strings.TrimSpace(kv[1]) - if value == "" { - def, scr = assignResponse(key, spec.Response{}, def, scr) - return def, scr, nil - } - - refTarget, arrays, isDefinitionRef, description, err := parseTags(value) - if err != nil { - return def, scr, err - } - - // A possible exception for having a definition - if _, ok := ss.responses[refTarget]; !ok { - if _, ok := ss.definitions[refTarget]; ok { - isDefinitionRef = true - } - } - - var ref spec.Ref - if isDefinitionRef { - if description == "" { - description = refTarget - } - ref, err = spec.NewRef("#/definitions/" + refTarget) - } else { - ref, err = spec.NewRef("#/responses/" + refTarget) - } - if err != nil { - return def, scr, err - } - - // description should used on anyway. - resp := spec.Response{ResponseProps: spec.ResponseProps{Description: description}} - - if isDefinitionRef { - resp.Schema = new(spec.Schema) - resp.Description = description - if arrays == 0 { - resp.Schema.Ref = ref - } else { - cs := resp.Schema - for range arrays { - cs.Typed("array", "") - cs.Items = new(spec.SchemaOrArray) - cs.Items.Schema = new(spec.Schema) - cs = cs.Items.Schema - } - cs.Ref = ref - } - // ref. could be empty while use description tag - } else if len(refTarget) > 0 { - resp.Ref = ref - } - - def, scr = assignResponse(key, resp, def, scr) - return def, scr, nil -} - -func parseEnumOld(val string, s *spec.SimpleSchema) []any { - list := strings.Split(val, ",") - interfaceSlice := make([]any, len(list)) - for i, d := range list { - v, err := parseValueFromSchema(d, s) - if err != nil { - interfaceSlice[i] = d - continue - } - - interfaceSlice[i] = v - } - return interfaceSlice -} - -func parseEnum(val string, s *spec.SimpleSchema) []any { - // obtain the raw elements of the list to latter process them with the parseValueFromSchema - var rawElements []json.RawMessage - if err := json.Unmarshal([]byte(val), &rawElements); err != nil { - log.Print("WARNING: item list for enum is not a valid JSON array, using the old deprecated format") - return parseEnumOld(val, s) - } - - interfaceSlice := make([]any, len(rawElements)) - - for i, d := range rawElements { - ds, err := strconv.Unquote(string(d)) - if err != nil { - ds = string(d) - } - - v, err := parseValueFromSchema(ds, s) - if err != nil { - interfaceSlice[i] = ds - continue - } - - interfaceSlice[i] = v - } - - return interfaceSlice -} - -// alphaChars used when parsing for Vendor Extensions. -const alphaChars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" - -func newSetExtensions(setter func(*spec.Extensions), debug bool) *setOpExtensions { - return &setOpExtensions{ - set: setter, - rx: rxExtensions, - debug: debug, - } -} - -type setOpExtensions struct { - set func(*spec.Extensions) - rx *regexp.Regexp - debug bool -} - -type extensionObject struct { - Extension string - Root any -} - -type extensionParsingStack []any - -// Helper function to walk back through extensions until the proper nest level is reached. -func (stack *extensionParsingStack) walkBack(rawLines []string, lineIndex int) { - indent := strings.IndexAny(rawLines[lineIndex], alphaChars) - nextIndent := strings.IndexAny(rawLines[lineIndex+1], alphaChars) - if nextIndent >= indent { - return - } - - // Pop elements off the stack until we're back where we need to be - runbackIndex := 0 - poppedIndent := 1000 - for { - checkIndent := strings.IndexAny(rawLines[lineIndex-runbackIndex], alphaChars) - if nextIndent == checkIndent { - break - } - if checkIndent < poppedIndent { - *stack = (*stack)[:len(*stack)-1] - poppedIndent = checkIndent - } - runbackIndex++ - } -} - -// Recursively parses through the given extension lines, building and adding extension objects as it goes. -// Extensions may be key:value pairs, arrays, or objects. -func buildExtensionObjects(rawLines []string, cleanLines []string, lineIndex int, extObjs *[]extensionObject, stack *extensionParsingStack) { - if lineIndex >= len(rawLines) { - if stack != nil { - if ext, ok := (*stack)[0].(extensionObject); ok { - *extObjs = append(*extObjs, ext) - } - } - return - } - - kv := strings.SplitN(cleanLines[lineIndex], ":", kvParts) - key := strings.TrimSpace(kv[0]) - if key == "" { - // Some odd empty line - return - } - - nextIsList := false - if lineIndex < len(rawLines)-1 { - next := strings.SplitAfterN(cleanLines[lineIndex+1], ":", kvParts) - nextIsList = len(next) == 1 - } - - if len(kv) <= 1 { - // Should be a list item - if stack == nil || len(*stack) == 0 { - return - } - stackIndex := len(*stack) - 1 - list, ok := (*stack)[stackIndex].(*[]string) - if !ok { - panic(fmt.Errorf("internal error: expected *[]string, got %T: %w", (*stack)[stackIndex], ErrCodeScan)) - } - *list = append(*list, key) - (*stack)[stackIndex] = list - if lineIndex < len(rawLines)-1 && !rxAllowedExtensions.MatchString(cleanLines[lineIndex+1]) { - stack.walkBack(rawLines, lineIndex) - } - buildExtensionObjects(rawLines, cleanLines, lineIndex+1, extObjs, stack) - return - } - - // Should be the start of a map or a key:value pair - value := strings.TrimSpace(kv[1]) - - if rxAllowedExtensions.MatchString(key) { - buildNewExtension(key, value, nextIsList, stack, rawLines, cleanLines, lineIndex, extObjs) - return - } - - if stack == nil || len(*stack) == 0 { - return - } - - buildStackEntry(key, value, nextIsList, stack, rawLines, cleanLines, lineIndex) - buildExtensionObjects(rawLines, cleanLines, lineIndex+1, extObjs, stack) -} - -// buildNewExtension handles the start of a new x- extension key. -func buildNewExtension(key, value string, nextIsList bool, stack *extensionParsingStack, rawLines, cleanLines []string, lineIndex int, extObjs *[]extensionObject) { - // Flush any previous extension on the stack - if stack != nil { - if ext, ok := (*stack)[0].(extensionObject); ok { - *extObjs = append(*extObjs, ext) - } - } - - if value != "" { - ext := extensionObject{ - Extension: key, - } - // Extension is simple key:value pair, no stack - rootMap := make(map[string]string) - rootMap[key] = value - ext.Root = rootMap - *extObjs = append(*extObjs, ext) - buildExtensionObjects(rawLines, cleanLines, lineIndex+1, extObjs, nil) - return - } - - ext := extensionObject{ - Extension: key, - } - if nextIsList { - // Extension is an array - rootMap := make(map[string]*[]string) - rootList := make([]string, 0) - rootMap[key] = &rootList - ext.Root = rootMap - stack = &extensionParsingStack{} - *stack = append(*stack, ext) - rootListMap, ok := ext.Root.(map[string]*[]string) - if !ok { - panic(fmt.Errorf("internal error: expected map[string]*[]string, got %T: %w", ext.Root, ErrCodeScan)) - } - *stack = append(*stack, rootListMap[key]) - } else { - // Extension is an object - rootMap := make(map[string]any) - innerMap := make(map[string]any) - rootMap[key] = innerMap - ext.Root = rootMap - stack = &extensionParsingStack{} - *stack = append(*stack, ext) - *stack = append(*stack, innerMap) - } - buildExtensionObjects(rawLines, cleanLines, lineIndex+1, extObjs, stack) -} - -func assertStackMap(stack *extensionParsingStack, index int) map[string]any { - asMap, ok := (*stack)[index].(map[string]any) - if !ok { - panic(fmt.Errorf("internal error: stack index expected to be map[string]any, but got %T: %w", (*stack)[index], ErrCodeScan)) - } - return asMap -} - -// buildStackEntry adds a key/value, nested list, or nested map to the current stack. -func buildStackEntry(key, value string, nextIsList bool, stack *extensionParsingStack, rawLines, cleanLines []string, lineIndex int) { - stackIndex := len(*stack) - 1 - if value == "" { - asMap := assertStackMap(stack, stackIndex) - if nextIsList { - // start of new list - newList := make([]string, 0) - asMap[key] = &newList - *stack = append(*stack, &newList) - } else { - // start of new map - newMap := make(map[string]any) - asMap[key] = newMap - *stack = append(*stack, newMap) - } - return - } - - // key:value - if reflect.TypeOf((*stack)[stackIndex]).Kind() == reflect.Map { - asMap := assertStackMap(stack, stackIndex) - asMap[key] = value - } - if lineIndex < len(rawLines)-1 && !rxAllowedExtensions.MatchString(cleanLines[lineIndex+1]) { - stack.walkBack(rawLines, lineIndex) - } -} - -func (ss *setOpExtensions) Matches(line string) bool { - return ss.rx.MatchString(line) -} - -func (ss *setOpExtensions) Parse(lines []string) error { - if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) { - return nil - } - - cleanLines := cleanupScannerLines(lines, rxUncommentHeaders) - - exts := new(spec.VendorExtensible) - extList := make([]extensionObject, 0) - buildExtensionObjects(lines, cleanLines, 0, &extList, nil) - - // Extensions can be one of the following: - // key:value pair - // list/array - // object - for _, ext := range extList { - if m, ok := ext.Root.(map[string]string); ok { - exts.AddExtension(ext.Extension, m[ext.Extension]) - } else if m, ok := ext.Root.(map[string]*[]string); ok { - exts.AddExtension(ext.Extension, *m[ext.Extension]) - } else if m, ok := ext.Root.(map[string]any); ok { - exts.AddExtension(ext.Extension, m[ext.Extension]) - } else { - debugLogf(ss.debug, "Unknown Extension type: %s", fmt.Sprint(reflect.TypeOf(ext.Root))) - } - } - - ss.set(&exts.Extensions) - return nil -} - -var unsupportedTypes = map[string]struct{}{ //nolint:gochecknoglobals // immutable lookup table - "complex64": {}, - "complex128": {}, -} - -type objecter interface { - Obj() *types.TypeName -} - -func unsupportedBuiltinType(tpe types.Type) bool { - unaliased := types.Unalias(tpe) - - switch ftpe := unaliased.(type) { - case *types.Basic: - return unsupportedBasic(ftpe) - case *types.TypeParam: - return true - case *types.Chan: - return true - case *types.Signature: - return true - case objecter: - return unsupportedBuiltin(ftpe) - default: - return false - } -} - -func unsupportedBuiltin(tpe objecter) bool { - o := tpe.Obj() - if o == nil { - return false - } - - if o.Pkg() != nil { - if o.Pkg().Path() == "unsafe" { - return true - } - - return false // not a builtin type - } - - _, found := unsupportedTypes[o.Name()] - - return found -} - -func unsupportedBasic(tpe *types.Basic) bool { - if tpe.Kind() == types.UnsafePointer { - return true - } - - _, found := unsupportedTypes[tpe.Name()] - - return found -} - -func cleanupScannerLines(lines []string, ur *regexp.Regexp) []string { - // bail early when there is nothing to parse - if len(lines) == 0 { - return lines - } - - seenLine := -1 - var lastContent int - - uncommented := make([]string, 0, len(lines)) - for i, v := range lines { - str := ur.ReplaceAllString(v, "") - uncommented = append(uncommented, str) - if str != "" { - if seenLine < 0 { - seenLine = i - } - lastContent = i - } - } - - // fixes issue #50 - if seenLine == -1 { - return nil - } - - return uncommented[seenLine : lastContent+1] -} diff --git a/responses.go b/responses.go deleted file mode 100644 index b6c0f76..0000000 --- a/responses.go +++ /dev/null @@ -1,548 +0,0 @@ -// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers -// SPDX-License-Identifier: Apache-2.0 - -package codescan - -import ( - "fmt" - "go/types" - "strings" - - "github.com/go-openapi/spec" -) - -type responseTypable struct { - in string - header *spec.Header - response *spec.Response - skipExt bool -} - -func (ht responseTypable) In() string { return ht.in } - -func (ht responseTypable) Level() int { return 0 } - -func (ht responseTypable) Typed(tpe, format string) { - ht.header.Typed(tpe, format) -} - -func bodyTypable(in string, schema *spec.Schema, skipExt bool) (swaggerTypable, *spec.Schema) { //nolint:ireturn // polymorphic by design - if in == bodyTag { - // get the schema for items on the schema property - if schema == nil { - schema = new(spec.Schema) - } - if schema.Items == nil { - schema.Items = new(spec.SchemaOrArray) - } - if schema.Items.Schema == nil { - schema.Items.Schema = new(spec.Schema) - } - schema.Typed("array", "") - return schemaTypable{schema.Items.Schema, 1, skipExt}, schema - } - return nil, nil -} - -func (ht responseTypable) Items() swaggerTypable { //nolint:ireturn // polymorphic by design - bdt, schema := bodyTypable(ht.in, ht.response.Schema, ht.skipExt) - if bdt != nil { - ht.response.Schema = schema - return bdt - } - - if ht.header.Items == nil { - ht.header.Items = new(spec.Items) - } - ht.header.Type = "array" - return itemsTypable{ht.header.Items, 1, "header"} -} - -func (ht responseTypable) SetRef(ref spec.Ref) { - // having trouble seeing the usefulness of this one here - ht.Schema().Ref = ref -} - -func (ht responseTypable) Schema() *spec.Schema { - if ht.response.Schema == nil { - ht.response.Schema = new(spec.Schema) - } - return ht.response.Schema -} - -func (ht responseTypable) SetSchema(schema *spec.Schema) { - ht.response.Schema = schema -} - -func (ht responseTypable) CollectionOf(items *spec.Items, format string) { - ht.header.CollectionOf(items, format) -} - -func (ht responseTypable) AddExtension(key string, value any) { - ht.response.AddExtension(key, value) -} - -func (ht responseTypable) WithEnum(values ...any) { - ht.header.WithEnum(values) -} - -func (ht responseTypable) WithEnumDescription(_ string) { - // no -} - -type headerValidations struct { - current *spec.Header -} - -func (sv headerValidations) SetMaximum(val float64, exclusive bool) { - sv.current.Maximum = &val - sv.current.ExclusiveMaximum = exclusive -} - -func (sv headerValidations) SetMinimum(val float64, exclusive bool) { - sv.current.Minimum = &val - sv.current.ExclusiveMinimum = exclusive -} - -func (sv headerValidations) SetMultipleOf(val float64) { - sv.current.MultipleOf = &val -} - -func (sv headerValidations) SetMinItems(val int64) { - sv.current.MinItems = &val -} - -func (sv headerValidations) SetMaxItems(val int64) { - sv.current.MaxItems = &val -} - -func (sv headerValidations) SetMinLength(val int64) { - sv.current.MinLength = &val -} - -func (sv headerValidations) SetMaxLength(val int64) { - sv.current.MaxLength = &val -} - -func (sv headerValidations) SetPattern(val string) { - sv.current.Pattern = val -} - -func (sv headerValidations) SetUnique(val bool) { - sv.current.UniqueItems = val -} - -func (sv headerValidations) SetCollectionFormat(val string) { - sv.current.CollectionFormat = val -} - -func (sv headerValidations) SetEnum(val string) { - sv.current.Enum = parseEnum(val, &spec.SimpleSchema{Type: sv.current.Type, Format: sv.current.Format}) -} - -func (sv headerValidations) SetDefault(val any) { sv.current.Default = val } - -func (sv headerValidations) SetExample(val any) { sv.current.Example = val } - -type responseBuilder struct { - ctx *scanCtx - decl *entityDecl - postDecls []*entityDecl -} - -func (r *responseBuilder) Build(responses map[string]spec.Response) error { - // check if there is a swagger:response tag that is followed by one or more words, - // these words are the ids of the operations this parameter struct applies to - // once type name is found convert it to a schema, by looking up the schema in the - // parameters dictionary that got passed into this parse method - - name, _ := r.decl.ResponseNames() - response := responses[name] - debugLogf(r.ctx.debug, "building response: %s", name) - - // analyze doc comment for the model - sp := new(sectionedParser) - sp.setDescription = func(lines []string) { response.Description = joinDropLast(lines) } - if err := sp.Parse(r.decl.Comments); err != nil { - return err - } - - // analyze struct body for fields etc - // each exported struct field: - // * gets a type mapped to a go primitive - // * perhaps gets a format - // * has to document the validations that apply for the type and the field - // * when the struct field points to a model it becomes a ref: #/definitions/ModelName - // * comments that aren't tags is used as the description - if err := r.buildFromType(r.decl.ObjType(), &response, make(map[string]bool)); err != nil { - return err - } - responses[name] = response - return nil -} - -func (r *responseBuilder) buildFromField(fld *types.Var, tpe types.Type, typable swaggerTypable, seen map[string]bool) error { - debugLogf(r.ctx.debug, "build from field %s: %T", fld.Name(), tpe) - - switch ftpe := tpe.(type) { - case *types.Basic: - return swaggerSchemaForType(ftpe.Name(), typable) - case *types.Struct: - return r.buildFromFieldStruct(ftpe, typable) - case *types.Pointer: - return r.buildFromField(fld, ftpe.Elem(), typable, seen) - case *types.Interface: - return r.buildFromFieldInterface(ftpe, typable) - case *types.Array: - return r.buildFromField(fld, ftpe.Elem(), typable.Items(), seen) - case *types.Slice: - return r.buildFromField(fld, ftpe.Elem(), typable.Items(), seen) - case *types.Map: - return r.buildFromFieldMap(ftpe, typable) - case *types.Named: - return r.buildNamedField(ftpe, typable) - case *types.Alias: - debugLogf(r.ctx.debug, "alias(responses.buildFromField): got alias %v to %v", ftpe, ftpe.Rhs()) - return r.buildFieldAlias(ftpe, typable, fld, seen) - default: - return fmt.Errorf("unknown type for %s: %T: %w", fld.String(), fld.Type(), ErrCodeScan) - } -} - -func (r *responseBuilder) buildFromFieldStruct(ftpe *types.Struct, typable swaggerTypable) error { - sb := schemaBuilder{ - decl: r.decl, - ctx: r.ctx, - } - - if err := sb.buildFromType(ftpe, typable); err != nil { - return err - } - - r.postDecls = append(r.postDecls, sb.postDecls...) - - return nil -} - -func (r *responseBuilder) buildFromFieldMap(ftpe *types.Map, typable swaggerTypable) error { - schema := new(spec.Schema) - typable.Schema().Typed("object", "").AdditionalProperties = &spec.SchemaOrBool{ - Schema: schema, - } - - sb := schemaBuilder{ - decl: r.decl, - ctx: r.ctx, - } - - if err := sb.buildFromType(ftpe.Elem(), schemaTypable{schema, typable.Level() + 1, r.ctx.opts.SkipExtensions}); err != nil { - return err - } - - r.postDecls = append(r.postDecls, sb.postDecls...) - - return nil -} - -func (r *responseBuilder) buildFromFieldInterface(tpe types.Type, typable swaggerTypable) error { - sb := schemaBuilder{ - decl: r.decl, - ctx: r.ctx, - } - if err := sb.buildFromType(tpe, typable); err != nil { - return err - } - r.postDecls = append(r.postDecls, sb.postDecls...) - - return nil -} - -func (r *responseBuilder) buildFromType(otpe types.Type, resp *spec.Response, seen map[string]bool) error { - switch tpe := otpe.(type) { - case *types.Pointer: - return r.buildFromType(tpe.Elem(), resp, seen) - case *types.Named: - return r.buildNamedType(tpe, resp, seen) - case *types.Alias: - debugLogf(r.ctx.debug, "alias(responses.buildFromType): got alias %v to %v", tpe, tpe.Rhs()) - return r.buildAlias(tpe, resp, seen) - default: - return fmt.Errorf("anonymous types are currently not supported for responses: %w", ErrCodeScan) - } -} - -func (r *responseBuilder) buildNamedType(tpe *types.Named, resp *spec.Response, seen map[string]bool) error { - o := tpe.Obj() - if isAny(o) || isStdError(o) { - return fmt.Errorf("%s type not supported in the context of a responses section definition: %w", o.Name(), ErrCodeScan) - } - mustNotBeABuiltinType(o) - // ICI - - switch stpe := o.Type().Underlying().(type) { // TODO(fred): this is wrong without checking for aliases? - case *types.Struct: - debugLogf(r.ctx.debug, "build from type %s: %T", o.Name(), tpe) - if decl, found := r.ctx.DeclForType(o.Type()); found { - return r.buildFromStruct(decl, stpe, resp, seen) - } - return r.buildFromStruct(r.decl, stpe, resp, seen) - - default: - if decl, found := r.ctx.DeclForType(o.Type()); found { - var schema spec.Schema - typable := schemaTypable{schema: &schema, level: 0, skipExt: r.ctx.opts.SkipExtensions} - - d := decl.Obj() - if isStdTime(d) { - typable.Typed("string", "date-time") - return nil - } - if sfnm, isf := strfmtName(decl.Comments); isf { - typable.Typed("string", sfnm) - return nil - } - sb := &schemaBuilder{ctx: r.ctx, decl: decl} - sb.inferNames() - if err := sb.buildFromType(tpe.Underlying(), typable); err != nil { - return err - } - resp.WithSchema(&schema) - r.postDecls = append(r.postDecls, sb.postDecls...) - return nil - } - return fmt.Errorf("responses can only be structs, did you mean for %s to be the response body?: %w", tpe.String(), ErrCodeScan) - } -} - -func (r *responseBuilder) buildAlias(tpe *types.Alias, resp *spec.Response, seen map[string]bool) error { - // panic("yay") - o := tpe.Obj() - if isAny(o) || isStdError(o) { - // wrong: TODO(fred): see what object exactly we want to build here - figure out with specific tests - return fmt.Errorf("%s type not supported in the context of a responses section definition: %w", o.Name(), ErrCodeScan) - } - mustNotBeABuiltinType(o) - mustHaveRightHandSide(tpe) - - rhs := tpe.Rhs() - - // If transparent aliases are enabled, use the underlying type directly without creating a definition - if r.ctx.app.transparentAliases { - return r.buildFromType(rhs, resp, seen) - } - - decl, ok := r.ctx.FindModel(o.Pkg().Path(), o.Name()) - if !ok { - return fmt.Errorf("can't find source file for aliased type: %v -> %v: %w", tpe, rhs, ErrCodeScan) - } - r.postDecls = append(r.postDecls, decl) // mark the left-hand side as discovered - - if !r.ctx.app.refAliases { - // expand alias - unaliased := types.Unalias(tpe) - return r.buildFromType(unaliased.Underlying(), resp, seen) - } - - switch rtpe := rhs.(type) { - // load declaration for named unaliased type - case *types.Named: - o := rtpe.Obj() - if o.Pkg() == nil { - break // builtin - } - - typable := schemaTypable{schema: &spec.Schema{}, level: 0, skipExt: r.ctx.opts.SkipExtensions} - return r.makeRef(decl, typable) - case *types.Alias: - o := rtpe.Obj() - if o.Pkg() == nil { - break // builtin - } - - typable := schemaTypable{schema: &spec.Schema{}, level: 0, skipExt: r.ctx.opts.SkipExtensions} - - return r.makeRef(decl, typable) - } - - return r.buildFromType(rhs, resp, seen) -} - -func (r *responseBuilder) buildNamedField(ftpe *types.Named, typable swaggerTypable) error { - decl, found := r.ctx.DeclForType(ftpe.Obj().Type()) - if !found { - return fmt.Errorf("unable to find package and source file for: %s: %w", ftpe.String(), ErrCodeScan) - } - - d := decl.Obj() - if isStdTime(d) { - typable.Typed("string", "date-time") - return nil - } - - if sfnm, isf := strfmtName(decl.Comments); isf { - typable.Typed("string", sfnm) - return nil - } - - sb := &schemaBuilder{ctx: r.ctx, decl: decl} - sb.inferNames() - if err := sb.buildFromType(decl.ObjType(), typable); err != nil { - return err - } - - r.postDecls = append(r.postDecls, sb.postDecls...) - - return nil -} - -func (r *responseBuilder) buildFieldAlias(tpe *types.Alias, typable swaggerTypable, fld *types.Var, seen map[string]bool) error { - _ = fld - _ = seen - o := tpe.Obj() - if isAny(o) { - // e.g. Field interface{} or Field any - _ = typable.Schema() - - return nil // just leave an empty schema - } - - // If transparent aliases are enabled, use the underlying type directly without creating a definition - if r.ctx.app.transparentAliases { - sb := schemaBuilder{ - decl: r.decl, - ctx: r.ctx, - } - if err := sb.buildFromType(tpe.Rhs(), typable); err != nil { - return err - } - r.postDecls = append(r.postDecls, sb.postDecls...) - return nil - } - - decl, ok := r.ctx.FindModel(o.Pkg().Path(), o.Name()) - if !ok { - return fmt.Errorf("can't find source file for aliased type: %v: %w", tpe, ErrCodeScan) - } - r.postDecls = append(r.postDecls, decl) // mark the left-hand side as discovered - - return r.makeRef(decl, typable) -} - -func (r *responseBuilder) buildFromStruct(decl *entityDecl, tpe *types.Struct, resp *spec.Response, seen map[string]bool) error { - if tpe.NumFields() == 0 { - return nil - } - - for fld := range tpe.Fields() { - if fld.Embedded() { - if err := r.buildFromType(fld.Type(), resp, seen); err != nil { - return err - } - continue - } - if fld.Anonymous() { - debugLogf(r.ctx.debug, "skipping anonymous field") - continue - } - - if err := r.processResponseField(fld, decl, resp, seen); err != nil { - return err - } - } - - for k := range resp.Headers { - if !seen[k] { - delete(resp.Headers, k) - } - } - return nil -} - -func (r *responseBuilder) processResponseField(fld *types.Var, decl *entityDecl, resp *spec.Response, seen map[string]bool) error { - if !fld.Exported() { - return nil - } - - afld := findASTField(decl.File, fld.Pos()) - if afld == nil { - debugLogf(r.ctx.debug, "can't find source associated with %s", fld.String()) - return nil - } - - if ignored(afld.Doc) { - debugLogf(r.ctx.debug, "field %v is deliberately ignored", fld) - return nil - } - - name, ignore, _, _, err := parseJSONTag(afld) - if err != nil { - return err - } - if ignore { - return nil - } - - var in string - // scan for param location first, this changes some behavior down the line - if afld.Doc != nil { - for _, cmt := range afld.Doc.List { - for line := range strings.SplitSeq(cmt.Text, "\n") { - matches := rxIn.FindStringSubmatch(line) - if len(matches) > 0 && len(strings.TrimSpace(matches[1])) > 0 { - in = strings.TrimSpace(matches[1]) - } - } - } - } - - ps := resp.Headers[name] - - // support swagger:file for response - // An API operation can return a file, such as an image or PDF. In this case, - // define the response schema with type: file and specify the appropriate MIME types in the produces section. - if afld.Doc != nil && fileParam(afld.Doc) { - resp.Schema = &spec.Schema{} - resp.Schema.Typed("file", "") - } else { - debugLogf(r.ctx.debug, "build response %v (%v) (not a file)", fld, fld.Type()) - if err := r.buildFromField(fld, fld.Type(), responseTypable{in, &ps, resp, r.ctx.opts.SkipExtensions}, seen); err != nil { - return err - } - } - - if strfmtName, ok := strfmtName(afld.Doc); ok { - ps.Typed("string", strfmtName) - } - - sp := new(sectionedParser) - sp.setDescription = func(lines []string) { ps.Description = joinDropLast(lines) } - if err := setupResponseHeaderTaggers(sp, &ps, name, afld); err != nil { - return err - } - - if err := sp.Parse(afld.Doc); err != nil { - return err - } - - if in != bodyTag { - seen[name] = true - if resp.Headers == nil { - resp.Headers = make(map[string]spec.Header) - } - resp.Headers[name] = ps - } - return nil -} - -func (r *responseBuilder) makeRef(decl *entityDecl, prop swaggerTypable) error { - nm, _ := decl.Names() - ref, err := spec.NewRef("#/definitions/" + nm) - if err != nil { - return err - } - - prop.SetRef(ref) - r.postDecls = append(r.postDecls, decl) // mark the $ref target as discovered - - return nil -} diff --git a/resume b/resume deleted file mode 100644 index 5edfecf..0000000 --- a/resume +++ /dev/null @@ -1 +0,0 @@ -claude --resume bede006e-f3b0-4a5e-a6e3-3fa30ffed38d diff --git a/routes.go b/routes.go deleted file mode 100644 index 0bbe7ff..0000000 --- a/routes.go +++ /dev/null @@ -1,95 +0,0 @@ -// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers -// SPDX-License-Identifier: Apache-2.0 - -package codescan - -import ( - "fmt" - - "github.com/go-openapi/spec" -) - -func opConsumesSetter(op *spec.Operation) func([]string) { - return func(consumes []string) { op.Consumes = consumes } -} - -func opProducesSetter(op *spec.Operation) func([]string) { - return func(produces []string) { op.Produces = produces } -} - -func opSchemeSetter(op *spec.Operation) func([]string) { - return func(schemes []string) { op.Schemes = schemes } -} - -func opSecurityDefsSetter(op *spec.Operation) func([]map[string][]string) { - return func(securityDefs []map[string][]string) { op.Security = securityDefs } -} - -func opResponsesSetter(op *spec.Operation) func(*spec.Response, map[int]spec.Response) { - return func(def *spec.Response, scr map[int]spec.Response) { - if op.Responses == nil { - op.Responses = new(spec.Responses) - } - op.Responses.Default = def - op.Responses.StatusCodeResponses = scr - } -} - -func opParamSetter(op *spec.Operation) func([]*spec.Parameter) { - return func(params []*spec.Parameter) { - for _, v := range params { - op.AddParam(v) - } - } -} - -func opExtensionsSetter(op *spec.Operation) func(*spec.Extensions) { - return func(exts *spec.Extensions) { - for name, value := range *exts { - op.AddExtension(name, value) - } - } -} - -type routesBuilder struct { - ctx *scanCtx - route parsedPathContent - definitions map[string]spec.Schema - operations map[string]*spec.Operation - responses map[string]spec.Response - parameters []*spec.Parameter -} - -func (r *routesBuilder) Build(tgt *spec.Paths) error { - pthObj := tgt.Paths[r.route.Path] - op := setPathOperation( - r.route.Method, r.route.ID, - &pthObj, r.operations[r.route.ID]) - - op.Tags = r.route.Tags - - sp := new(sectionedParser) - sp.setTitle = func(lines []string) { op.Summary = joinDropLast(lines) } - sp.setDescription = func(lines []string) { op.Description = joinDropLast(lines) } - sr := newSetResponses(r.definitions, r.responses, opResponsesSetter(op)) - spa := newSetParams(r.parameters, opParamSetter(op)) - sp.taggers = []tagParser{ - newMultiLineTagParser("Consumes", newMultilineDropEmptyParser(rxConsumes, opConsumesSetter(op)), false), - newMultiLineTagParser("Produces", newMultilineDropEmptyParser(rxProduces, opProducesSetter(op)), false), - newSingleLineTagParser("Schemes", newSetSchemes(opSchemeSetter(op))), - newMultiLineTagParser("Security", newSetSecurity(rxSecuritySchemes, opSecurityDefsSetter(op)), false), - newMultiLineTagParser("Parameters", spa, false), - newMultiLineTagParser("Responses", sr, false), - newSingleLineTagParser("Deprecated", &setDeprecatedOp{op}), - newMultiLineTagParser("Extensions", newSetExtensions(opExtensionsSetter(op), r.ctx.debug), true), - } - if err := sp.Parse(r.route.Remaining); err != nil { - return fmt.Errorf("operation (%s): %w", op.ID, err) - } - - if tgt.Paths == nil { - tgt.Paths = make(map[string]spec.PathItem) - } - tgt.Paths[r.route.Path] = pthObj - return nil -} diff --git a/schema.go b/schema.go deleted file mode 100644 index 099c1c7..0000000 --- a/schema.go +++ /dev/null @@ -1,1754 +0,0 @@ -// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers -// SPDX-License-Identifier: Apache-2.0 - -package codescan - -import ( - "encoding/json" - "fmt" - "go/ast" - "go/importer" - "go/token" - "go/types" - "log" - "reflect" - "strconv" - "strings" - - "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/go/packages" - - "github.com/go-openapi/spec" -) - -func addExtension(ve *spec.VendorExtensible, key string, value any, skip bool) { - if skip { - return - } - - ve.AddExtension(key, value) -} - -type schemaTypable struct { - schema *spec.Schema - level int - skipExt bool -} - -func (st schemaTypable) In() string { return "body" } - -func (st schemaTypable) Typed(tpe, format string) { - st.schema.Typed(tpe, format) -} - -func (st schemaTypable) SetRef(ref spec.Ref) { - st.schema.Ref = ref -} - -func (st schemaTypable) Schema() *spec.Schema { - return st.schema -} - -func (st schemaTypable) Items() swaggerTypable { //nolint:ireturn // polymorphic by design - if st.schema.Items == nil { - st.schema.Items = new(spec.SchemaOrArray) - } - if st.schema.Items.Schema == nil { - st.schema.Items.Schema = new(spec.Schema) - } - - st.schema.Typed("array", "") - return schemaTypable{st.schema.Items.Schema, st.level + 1, st.skipExt} -} - -func (st schemaTypable) AdditionalProperties() swaggerTypable { //nolint:ireturn // polymorphic by design - if st.schema.AdditionalProperties == nil { - st.schema.AdditionalProperties = new(spec.SchemaOrBool) - } - if st.schema.AdditionalProperties.Schema == nil { - st.schema.AdditionalProperties.Schema = new(spec.Schema) - } - - st.schema.Typed("object", "") - return schemaTypable{st.schema.AdditionalProperties.Schema, st.level + 1, st.skipExt} -} - -func (st schemaTypable) Level() int { return st.level } - -func (st schemaTypable) AddExtension(key string, value any) { - addExtension(&st.schema.VendorExtensible, key, value, st.skipExt) -} - -func (st schemaTypable) WithEnum(values ...any) { - st.schema.WithEnum(values...) -} - -func (st schemaTypable) WithEnumDescription(desc string) { - if desc == "" { - return - } - st.AddExtension(extEnumDesc, desc) -} - -type schemaValidations struct { - current *spec.Schema -} - -func (sv schemaValidations) SetMaximum(val float64, exclusive bool) { - sv.current.Maximum = &val - sv.current.ExclusiveMaximum = exclusive -} - -func (sv schemaValidations) SetMinimum(val float64, exclusive bool) { - sv.current.Minimum = &val - sv.current.ExclusiveMinimum = exclusive -} -func (sv schemaValidations) SetMultipleOf(val float64) { sv.current.MultipleOf = &val } -func (sv schemaValidations) SetMinItems(val int64) { sv.current.MinItems = &val } -func (sv schemaValidations) SetMaxItems(val int64) { sv.current.MaxItems = &val } -func (sv schemaValidations) SetMinLength(val int64) { sv.current.MinLength = &val } -func (sv schemaValidations) SetMaxLength(val int64) { sv.current.MaxLength = &val } -func (sv schemaValidations) SetPattern(val string) { sv.current.Pattern = val } -func (sv schemaValidations) SetUnique(val bool) { sv.current.UniqueItems = val } -func (sv schemaValidations) SetDefault(val any) { sv.current.Default = val } -func (sv schemaValidations) SetExample(val any) { sv.current.Example = val } -func (sv schemaValidations) SetEnum(val string) { - var typ string - if len(sv.current.Type) > 0 { - typ = sv.current.Type[0] - } - sv.current.Enum = parseEnum(val, &spec.SimpleSchema{Format: sv.current.Format, Type: typ}) -} - -type schemaBuilder struct { - ctx *scanCtx - decl *entityDecl - GoName string - Name string - annotated bool - discovered []*entityDecl - postDecls []*entityDecl -} - -func (s *schemaBuilder) Build(definitions map[string]spec.Schema) error { - s.inferNames() - - schema := definitions[s.Name] - err := s.buildFromDecl(s.decl, &schema) - if err != nil { - return err - } - definitions[s.Name] = schema - return nil -} - -func (s *schemaBuilder) inferNames() { - if s.GoName != "" { - return - } - - goName := s.decl.Ident.Name - name := goName - - defer func() { - s.GoName = goName - s.Name = name - }() - - if s.decl.Comments == nil { - return - } - -DECLS: - for _, cmt := range s.decl.Comments.List { - for ln := range strings.SplitSeq(cmt.Text, "\n") { - matches := rxModelOverride.FindStringSubmatch(ln) - if len(matches) > 0 { - s.annotated = true - } - if len(matches) > 1 && len(matches[1]) > 0 { - name = matches[1] - break DECLS - } - } - } -} - -func (s *schemaBuilder) buildFromDecl(_ *entityDecl, schema *spec.Schema) error { - // analyze doc comment for the model - // This includes parsing "example", "default" and other validation at the top-level declaration. - sp := s.createParser("", schema, schema, nil) - sp.setTitle = func(lines []string) { schema.Title = joinDropLast(lines) } - sp.setDescription = func(lines []string) { - schema.Description = joinDropLast(lines) - enumDesc := getEnumDesc(schema.Extensions) - if enumDesc != "" { - schema.Description += "\n" + enumDesc - } - } - if err := sp.Parse(s.decl.Comments); err != nil { - return err - } - - // if the type is marked to ignore, just return - if sp.ignored { - return nil - } - - defer func() { - if schema.Ref.String() == "" { - // unless this is a $ref, we add traceability of the origin of this schema in source - if s.Name != s.GoName { - addExtension(&schema.VendorExtensible, "x-go-name", s.GoName, s.ctx.opts.SkipExtensions) - } - addExtension(&schema.VendorExtensible, "x-go-package", s.decl.Obj().Pkg().Path(), s.ctx.opts.SkipExtensions) - } - }() - - switch tpe := s.decl.ObjType().(type) { - // TODO(fredbi): we may safely remove all the cases here that are not Named or Alias - case *types.Basic: - debugLogf(s.ctx.debug, "basic: %v", tpe.Name()) - return nil - case *types.Struct: - return s.buildFromStruct(s.decl, tpe, schema, make(map[string]string)) - case *types.Interface: - return s.buildFromInterface(s.decl, tpe, schema, make(map[string]string)) - case *types.Array: - debugLogf(s.ctx.debug, "array: %v -> %v", s.decl.Ident.Name, tpe.Elem().String()) - return nil - case *types.Slice: - debugLogf(s.ctx.debug, "slice: %v -> %v", s.decl.Ident.Name, tpe.Elem().String()) - return nil - case *types.Map: - debugLogf(s.ctx.debug, "map: %v -> [%v]%v", s.decl.Ident.Name, tpe.Key().String(), tpe.Elem().String()) - return nil - case *types.Named: - debugLogf(s.ctx.debug, "named: %v", tpe) - return s.buildDeclNamed(tpe, schema) - case *types.Alias: - debugLogf(s.ctx.debug, "alias: %v -> %v", tpe, tpe.Rhs()) - tgt := schemaTypable{schema, 0, s.ctx.opts.SkipExtensions} - - return s.buildDeclAlias(tpe, tgt) - case *types.TypeParam: - log.Printf("WARNING: generic type parameters are not supported yet %[1]v (%[1]T). Skipped", tpe) - return nil - case *types.Chan: - log.Printf("WARNING: channels are not supported %[1]v (%[1]T). Skipped", tpe) - return nil - case *types.Signature: - log.Printf("WARNING: functions are not supported %[1]v (%[1]T). Skipped", tpe) - return nil - default: - log.Printf("WARNING: missing parser for type %T, skipping model: %s\n", tpe, s.Name) - return nil - } -} - -func (s *schemaBuilder) buildDeclNamed(tpe *types.Named, schema *spec.Schema) error { - if unsupportedBuiltin(tpe) { - log.Printf("WARNING: skipped unsupported builtin type: %v", tpe) - - return nil - } - o := tpe.Obj() - - mustNotBeABuiltinType(o) - - debugLogf(s.ctx.debug, "got the named type object: %s.%s | isAlias: %t | exported: %t", o.Pkg().Path(), o.Name(), o.IsAlias(), o.Exported()) - if isStdTime(o) { - schema.Typed("string", "date-time") - return nil - } - - ps := schemaTypable{schema, 0, s.ctx.opts.SkipExtensions} - ti := s.decl.Pkg.TypesInfo.Types[s.decl.Spec.Type] - if !ti.IsType() { - return fmt.Errorf("declaration is not a type: %v: %w", o, ErrCodeScan) - } - - return s.buildFromType(ti.Type, ps) -} - -// buildFromTextMarshal renders a type that marshals as text as a string. -func (s *schemaBuilder) buildFromTextMarshal(tpe types.Type, tgt swaggerTypable) error { - if typePtr, ok := tpe.(*types.Pointer); ok { - return s.buildFromTextMarshal(typePtr.Elem(), tgt) - } - - typeNamed, ok := tpe.(*types.Named) - if !ok { - tgt.Typed("string", "") - return nil - } - - tio := typeNamed.Obj() - if isStdError(tio) { - tgt.AddExtension("x-go-type", tio.Name()) - return swaggerSchemaForType(tio.Name(), tgt) - } - - debugLogf(s.ctx.debug, "named refined type %s.%s", tio.Pkg().Path(), tio.Name()) - pkg, found := s.ctx.PkgForType(tpe) - - if strings.ToLower(tio.Name()) == "uuid" { - tgt.Typed("string", "uuid") - return nil - } - - if !found { - // this must be a builtin - debugLogf(s.ctx.debug, "skipping because package is nil: %v", tpe) - return nil - } - - if isStdTime(tio) { - tgt.Typed("string", "date-time") - return nil - } - - if isStdJSONRawMessage(tio) { - tgt.Typed("object", "") // TODO: this should actually be any type - return nil - } - - cmt, hasComments := s.ctx.FindComments(pkg, tio.Name()) - if !hasComments { - cmt = new(ast.CommentGroup) - } - - if sfnm, isf := strfmtName(cmt); isf { - tgt.Typed("string", sfnm) - return nil - } - - tgt.Typed("string", "") - tgt.AddExtension("x-go-type", tio.Pkg().Path()+"."+tio.Name()) - - return nil -} - -func (s *schemaBuilder) buildFromType(tpe types.Type, tgt swaggerTypable) error { - // check if the type implements encoding.TextMarshaler interface - // if so, the type is rendered as a string. - debugLogf(s.ctx.debug, "schema buildFromType %v (%T)", tpe, tpe) - - if isTextMarshaler(tpe) { - return s.buildFromTextMarshal(tpe, tgt) - } - - switch titpe := tpe.(type) { - case *types.Basic: - if unsupportedBuiltinType(titpe) { - log.Printf("WARNING: skipped unsupported builtin type: %v", tpe) - return nil - } - return swaggerSchemaForType(titpe.String(), tgt) - case *types.Pointer: - return s.buildFromType(titpe.Elem(), tgt) - case *types.Struct: - return s.buildFromStruct(s.decl, titpe, tgt.Schema(), make(map[string]string)) - case *types.Interface: - return s.buildFromInterface(s.decl, titpe, tgt.Schema(), make(map[string]string)) - case *types.Slice: - // anonymous slice - return s.buildFromType(titpe.Elem(), tgt.Items()) - case *types.Array: - // anonymous array - return s.buildFromType(titpe.Elem(), tgt.Items()) - case *types.Map: - return s.buildFromMap(titpe, tgt) - case *types.Named: - // a named type, e.g. type X struct {} - return s.buildNamedType(titpe, tgt) - case *types.Alias: - // a named alias, e.g. type X = {RHS type}. - debugLogf(s.ctx.debug, "alias(schema.buildFromType): got alias %v to %v", titpe, titpe.Rhs()) - return s.buildAlias(titpe, tgt) - case *types.TypeParam: - log.Printf("WARNING: generic type parameters are not supported yet %[1]v (%[1]T). Skipped", titpe) - return nil - case *types.Chan: - log.Printf("WARNING: channels are not supported %[1]v (%[1]T). Skipped", tpe) - return nil - case *types.Signature: - log.Printf("WARNING: functions are not supported %[1]v (%[1]T). Skipped", tpe) - return nil - default: - panic(fmt.Errorf("ERROR: can't determine refined type %[1]v (%[1]T): %w", titpe, errInternal)) - } -} - -func (s *schemaBuilder) buildNamedType(titpe *types.Named, tgt swaggerTypable) error { - tio := titpe.Obj() - if unsupportedBuiltin(titpe) { - log.Printf("WARNING: skipped unsupported builtin type: %v", titpe) - return nil - } - - if isAny(tio) { - // e.g type X any or type X interface{} - _ = tgt.Schema() - - return nil - } - - // special case of the "error" interface. - if isStdError(tio) { - tgt.AddExtension("x-go-type", tio.Name()) - return swaggerSchemaForType(tio.Name(), tgt) - } - - // special case of the "time.Time" type - if isStdTime(tio) { - tgt.Typed("string", "date-time") - return nil - } - - // special case of the "json.RawMessage" type - if isStdJSONRawMessage(tio) { - tgt.Typed("object", "") // TODO: this should actually be any type - return nil - } - - pkg, found := s.ctx.PkgForType(titpe) - debugLogf(s.ctx.debug, "named refined type %s.%s", pkg, tio.Name()) - if !found { - // this must be a builtin - // - // This could happen for example when using unsupported types such as complex64, complex128, uintptr, - // or type constraints such as comparable. - debugLogf(s.ctx.debug, "skipping because package is nil (builtin type): %v", tio) - - return nil - } - - cmt, hasComments := s.ctx.FindComments(pkg, tio.Name()) - if !hasComments { - cmt = new(ast.CommentGroup) - } - - if tn, ok := typeName(cmt); ok { - if err := swaggerSchemaForType(tn, tgt); err == nil { - return nil - } - // For unsupported swagger:type values (e.g., "array"), fall through - // to underlying type resolution so the full schema (including items - // for slices) is properly built. Build directly from the underlying - // type to bypass the named-type $ref creation. - return s.buildFromType(titpe.Underlying(), tgt) - } - - if s.decl.Spec.Assign.IsValid() { - debugLogf(s.ctx.debug, "found assignment: %s.%s", tio.Pkg().Path(), tio.Name()) - return s.buildFromType(titpe.Underlying(), tgt) - } - - if titpe.TypeArgs() != nil && titpe.TypeArgs().Len() > 0 { - return s.buildFromType(titpe.Underlying(), tgt) - } - - // invariant: the Underlying cannot be an alias or named type - switch utitpe := titpe.Underlying().(type) { - case *types.Struct: - return s.buildNamedStruct(tio, cmt, tgt) - case *types.Interface: - debugLogf(s.ctx.debug, "found interface: %s.%s", tio.Pkg().Path(), tio.Name()) - - decl, found := s.ctx.FindModel(tio.Pkg().Path(), tio.Name()) - if !found { - return fmt.Errorf("can't find source file for type: %v: %w", utitpe, ErrCodeScan) - } - - return s.makeRef(decl, tgt) - case *types.Basic: - return s.buildNamedBasic(tio, pkg, cmt, utitpe, tgt) - case *types.Array: - return s.buildNamedArray(tio, cmt, utitpe.Elem(), tgt) - case *types.Slice: - return s.buildNamedSlice(tio, cmt, utitpe.Elem(), tgt) - case *types.Map: - debugLogf(s.ctx.debug, "found map type: %s.%s", tio.Pkg().Path(), tio.Name()) - - if decl, ok := s.ctx.FindModel(tio.Pkg().Path(), tio.Name()); ok { - return s.makeRef(decl, tgt) - } - return nil - case *types.TypeParam: - log.Printf("WARNING: generic type parameters are not supported yet %[1]v (%[1]T). Skipped", utitpe) - return nil - case *types.Chan: - log.Printf("WARNING: channels are not supported %[1]v (%[1]T). Skipped", utitpe) - return nil - case *types.Signature: - log.Printf("WARNING: functions are not supported %[1]v (%[1]T). Skipped", utitpe) - return nil - default: - log.Printf( - "WARNING: can't figure out object type for named type (%T): %v [alias: %t]", - titpe.Underlying(), titpe.Underlying(), titpe.Obj().IsAlias(), - ) - - return nil - } -} - -func (s *schemaBuilder) buildNamedBasic(tio *types.TypeName, pkg *packages.Package, cmt *ast.CommentGroup, utitpe *types.Basic, tgt swaggerTypable) error { - if unsupportedBuiltinType(utitpe) { - log.Printf("WARNING: skipped unsupported builtin type: %v", utitpe) - return nil - } - - debugLogf(s.ctx.debug, "found primitive type: %s.%s", tio.Pkg().Path(), tio.Name()) - - if sfnm, isf := strfmtName(cmt); isf { - tgt.Typed("string", sfnm) - return nil - } - - if enumName, ok := enumName(cmt); ok { - enumValues, enumDesces, _ := s.ctx.FindEnumValues(pkg, enumName) - if len(enumValues) > 0 { - tgt.WithEnum(enumValues...) - enumTypeName := reflect.TypeOf(enumValues[0]).String() - _ = swaggerSchemaForType(enumTypeName, tgt) - } - if len(enumDesces) > 0 { - tgt.WithEnumDescription(strings.Join(enumDesces, "\n")) - } - return nil - } - - if defaultName, ok := defaultName(cmt); ok { - debugLogf(s.ctx.debug, "default name: %s", defaultName) - return nil - } - - if typeName, ok := typeName(cmt); ok { - _ = swaggerSchemaForType(typeName, tgt) - return nil - } - - if isAliasParam(tgt) || aliasParam(cmt) { - err := swaggerSchemaForType(utitpe.Name(), tgt) - if err == nil { - return nil - } - } - - if decl, ok := s.ctx.FindModel(tio.Pkg().Path(), tio.Name()); ok { - return s.makeRef(decl, tgt) - } - - return swaggerSchemaForType(utitpe.String(), tgt) -} - -func (s *schemaBuilder) buildNamedStruct(tio *types.TypeName, cmt *ast.CommentGroup, tgt swaggerTypable) error { - debugLogf(s.ctx.debug, "found struct: %s.%s", tio.Pkg().Path(), tio.Name()) - - decl, ok := s.ctx.FindModel(tio.Pkg().Path(), tio.Name()) - if !ok { - debugLogf(s.ctx.debug, "could not find model in index: %s.%s", tio.Pkg().Path(), tio.Name()) - return nil - } - - o := decl.Obj() - if isStdTime(o) { - tgt.Typed("string", "date-time") - return nil - } - - if sfnm, isf := strfmtName(cmt); isf { - tgt.Typed("string", sfnm) - return nil - } - - if tn, ok := typeName(cmt); ok { - if err := swaggerSchemaForType(tn, tgt); err == nil { - return nil - } - // For unsupported swagger:type values, fall through to makeRef - // rather than silently returning an empty schema. - } - - return s.makeRef(decl, tgt) -} - -func (s *schemaBuilder) buildNamedArray(tio *types.TypeName, cmt *ast.CommentGroup, elem types.Type, tgt swaggerTypable) error { - debugLogf(s.ctx.debug, "found array type: %s.%s", tio.Pkg().Path(), tio.Name()) - - if sfnm, isf := strfmtName(cmt); isf { - if sfnm == goTypeByte { - tgt.Typed("string", sfnm) - return nil - } - if sfnm == "bsonobjectid" { - tgt.Typed("string", sfnm) - return nil - } - - tgt.Items().Typed("string", sfnm) - return nil - } - // When swagger:type is set to an unsupported value (e.g., "array"), - // skip the $ref and inline the array schema with proper items type. - if tn, ok := typeName(cmt); ok { - if err := swaggerSchemaForType(tn, tgt); err != nil { - return s.buildFromType(elem, tgt.Items()) - } - return nil - } - if decl, ok := s.ctx.FindModel(tio.Pkg().Path(), tio.Name()); ok { - return s.makeRef(decl, tgt) - } - return s.buildFromType(elem, tgt.Items()) -} - -func (s *schemaBuilder) buildNamedSlice(tio *types.TypeName, cmt *ast.CommentGroup, elem types.Type, tgt swaggerTypable) error { - debugLogf(s.ctx.debug, "found slice type: %s.%s", tio.Pkg().Path(), tio.Name()) - - if sfnm, isf := strfmtName(cmt); isf { - if sfnm == goTypeByte { - tgt.Typed("string", sfnm) - return nil - } - tgt.Items().Typed("string", sfnm) - return nil - } - // When swagger:type is set to an unsupported value (e.g., "array"), - // skip the $ref and inline the slice schema with proper items type. - // This preserves the field's description that would be lost with $ref. - if tn, ok := typeName(cmt); ok { - if err := swaggerSchemaForType(tn, tgt); err != nil { - // Unsupported type name (e.g., "array") — build inline from element type. - return s.buildFromType(elem, tgt.Items()) - } - return nil - } - if decl, ok := s.ctx.FindModel(tio.Pkg().Path(), tio.Name()); ok { - return s.makeRef(decl, tgt) - } - return s.buildFromType(elem, tgt.Items()) -} - -// buildDeclAlias builds a top-level alias declaration. -func (s *schemaBuilder) buildDeclAlias(tpe *types.Alias, tgt swaggerTypable) error { - if unsupportedBuiltinType(tpe) { - log.Printf("WARNING: skipped unsupported builtin type: %v", tpe) - return nil - } - - o := tpe.Obj() - if isAny(o) { - _ = tgt.Schema() // this is mutating tgt to create an empty schema - return nil - } - if isStdError(o) { - tgt.AddExtension("x-go-type", o.Name()) - return swaggerSchemaForType(o.Name(), tgt) - } - mustNotBeABuiltinType(o) - - if isStdTime(o) { - tgt.Typed("string", "date-time") - return nil - } - - mustHaveRightHandSide(tpe) - rhs := tpe.Rhs() - - // If transparent aliases are enabled, use the underlying type directly without creating a definition - if s.ctx.app.transparentAliases { - return s.buildFromType(rhs, tgt) - } - - decl, ok := s.ctx.FindModel(o.Pkg().Path(), o.Name()) - if !ok { - return fmt.Errorf("can't find source file for aliased type: %v -> %v: %w", tpe, rhs, ErrCodeScan) - } - - s.postDecls = append(s.postDecls, decl) // mark the left-hand side as discovered - - if !s.ctx.app.refAliases { - // expand alias - return s.buildFromType(tpe.Underlying(), tgt) - } - - // resolve alias to named type as $ref - switch rtpe := rhs.(type) { - // named declarations: we construct a $ref to the right-hand side target of the alias - case *types.Named: - ro := rtpe.Obj() - rdecl, found := s.ctx.FindModel(ro.Pkg().Path(), ro.Name()) - if !found { - return fmt.Errorf("can't find source file for target type of alias: %v -> %v: %w", tpe, rtpe, ErrCodeScan) - } - - return s.makeRef(rdecl, tgt) - case *types.Alias: - ro := rtpe.Obj() - if unsupportedBuiltin(rtpe) { - log.Printf("WARNING: skipped unsupported builtin type: %v", rtpe) - return nil - } - if isAny(ro) { - // e.g. type X = any - _ = tgt.Schema() // this is mutating tgt to create an empty schema - return nil - } - if isStdError(ro) { - // e.g. type X = error - tgt.AddExtension("x-go-type", o.Name()) - return swaggerSchemaForType(o.Name(), tgt) - } - mustNotBeABuiltinType(ro) // TODO(fred): there are a few other cases - - rdecl, found := s.ctx.FindModel(ro.Pkg().Path(), ro.Name()) - if !found { - return fmt.Errorf("can't find source file for target type of alias: %v -> %v: %w", tpe, rtpe, ErrCodeScan) - } - - return s.makeRef(rdecl, tgt) - } - - // alias to anonymous type - return s.buildFromType(rhs, tgt) -} - -func (s *schemaBuilder) buildAnonymousInterface(it *types.Interface, tgt swaggerTypable, decl *entityDecl) error { - tgt.Typed("object", "") - - for fld := range it.ExplicitMethods() { - if err := s.processAnonInterfaceMethod(fld, it, decl, tgt.Schema()); err != nil { - return err - } - } - - return nil -} - -func (s *schemaBuilder) processAnonInterfaceMethod(fld *types.Func, it *types.Interface, decl *entityDecl, schema *spec.Schema) error { - if !fld.Exported() { - return nil - } - sig, isSignature := fld.Type().(*types.Signature) - if !isSignature { - return nil - } - if sig.Params().Len() > 0 { - return nil - } - if sig.Results() == nil || sig.Results().Len() != 1 { - return nil - } - - afld := findASTField(decl.File, fld.Pos()) - if afld == nil { - debugLogf(s.ctx.debug, "can't find source associated with %s for %s", fld.String(), it.String()) - return nil - } - - if ignored(afld.Doc) { - return nil - } - - name := nameOverride(fld.Name(), afld.Doc) - - if schema.Properties == nil { - schema.Properties = make(map[string]spec.Schema) - } - ps := schema.Properties[name] - if err := s.buildFromType(sig.Results().At(0).Type(), schemaTypable{&ps, 0, s.ctx.opts.SkipExtensions}); err != nil { - return err - } - if sfName, isStrfmt := strfmtName(afld.Doc); isStrfmt { - ps.Typed("string", sfName) - ps.Ref = spec.Ref{} - ps.Items = nil - } - - if err := s.createParser(name, schema, &ps, afld).Parse(afld.Doc); err != nil { - return err - } - - if ps.Ref.String() == "" && name != fld.Name() { - ps.AddExtension("x-go-name", fld.Name()) - } - - if s.ctx.app.setXNullableForPointers { - _, isPointer := fld.Type().(*types.Signature).Results().At(0).Type().(*types.Pointer) - noNullableExt := ps.Extensions == nil || - (ps.Extensions["x-nullable"] == nil && ps.Extensions["x-isnullable"] == nil) - if isPointer && noNullableExt { - ps.AddExtension("x-nullable", true) - } - } - - schema.Properties[name] = ps - return nil -} - -// buildAlias builds a reference to an alias from another type. -func (s *schemaBuilder) buildAlias(tpe *types.Alias, tgt swaggerTypable) error { - if unsupportedBuiltinType(tpe) { - log.Printf("WARNING: skipped unsupported builtin type: %v", tpe) - - return nil - } - - o := tpe.Obj() - if isAny(o) { - _ = tgt.Schema() - return nil - } - mustNotBeABuiltinType(o) - - // If transparent aliases are enabled, use the underlying type directly - if s.ctx.app.transparentAliases { - return s.buildFromType(tpe.Rhs(), tgt) - } - - decl, ok := s.ctx.FindModel(o.Pkg().Path(), o.Name()) - if !ok { - return fmt.Errorf("can't find source file for aliased type: %v: %w", tpe, ErrCodeScan) - } - - return s.makeRef(decl, tgt) -} - -func (s *schemaBuilder) buildFromMap(titpe *types.Map, tgt swaggerTypable) error { - // check if key is a string type, or knows how to marshall to text. - // If not, print a message and skip the map property. - // - // Only maps with string keys can go into additional properties - - sch := tgt.Schema() - if sch == nil { - return fmt.Errorf("items doesn't support maps: %w", ErrCodeScan) - } - - eleProp := schemaTypable{sch, tgt.Level(), s.ctx.opts.SkipExtensions} - key := titpe.Key() - if key.Underlying().String() == "string" || isTextMarshaler(key) { - return s.buildFromType(titpe.Elem(), eleProp.AdditionalProperties()) - } - - return nil -} - -func (s *schemaBuilder) buildFromInterface(decl *entityDecl, it *types.Interface, schema *spec.Schema, seen map[string]string) error { - if it.Empty() { - // return an empty schema for empty interfaces - return nil - } - - var ( - tgt *spec.Schema - hasAllOf bool - ) - - var flist []*ast.Field - if specType, ok := decl.Spec.Type.(*ast.InterfaceType); ok { - flist = make([]*ast.Field, it.NumEmbeddeds()+it.NumExplicitMethods()) - copy(flist, specType.Methods.List) - } - - // First collect the embedded interfaces - // create refs when: - // - // 1. the embedded interface is decorated with an allOf annotation - // 2. the embedded interface is an alias - for fld := range it.EmbeddedTypes() { - if tgt == nil { - tgt = &spec.Schema{} - } - - fieldHasAllOf, err := s.processEmbeddedType(fld, flist, decl, schema, seen) - if err != nil { - return err - } - hasAllOf = hasAllOf || fieldHasAllOf - } - - if tgt == nil { - tgt = schema - } - - // We can finally build the actual schema for the struct - if tgt.Properties == nil { - tgt.Properties = make(map[string]spec.Schema) - } - tgt.Typed("object", "") - - for fld := range it.ExplicitMethods() { - if err := s.processInterfaceMethod(fld, it, decl, tgt, seen); err != nil { - return err - } - } - - if tgt == nil { - return nil - } - if hasAllOf && len(tgt.Properties) > 0 { - schema.AllOf = append(schema.AllOf, *tgt) - } - - for k := range tgt.Properties { - if _, ok := seen[k]; !ok { - delete(tgt.Properties, k) - } - } - - return nil -} - -func (s *schemaBuilder) processEmbeddedType(fld types.Type, flist []*ast.Field, decl *entityDecl, schema *spec.Schema, seen map[string]string) (fieldHasAllOf bool, err error) { - debugLogf(s.ctx.debug, "inspecting embedded type in interface: %v", fld) - - switch ftpe := fld.(type) { - case *types.Named: - debugLogf(s.ctx.debug, "embedded named type (buildInterface): %v", ftpe) - o := ftpe.Obj() - if isAny(o) || isStdError(o) { - return false, nil - } - return s.buildNamedInterface(ftpe, flist, decl, schema, seen) - case *types.Interface: - debugLogf(s.ctx.debug, "embedded anonymous interface type (buildInterface): %v", ftpe) - var aliasedSchema spec.Schema - ps := schemaTypable{schema: &aliasedSchema, skipExt: s.ctx.opts.SkipExtensions} - if err = s.buildAnonymousInterface(ftpe, ps, decl); err != nil { - return false, err - } - if aliasedSchema.Ref.String() != "" || len(aliasedSchema.Properties) > 0 || len(aliasedSchema.AllOf) > 0 { - fieldHasAllOf = true - schema.AddToAllOf(aliasedSchema) - } - case *types.Alias: - debugLogf(s.ctx.debug, "embedded alias (buildInterface): %v -> %v", ftpe, ftpe.Rhs()) - var aliasedSchema spec.Schema - ps := schemaTypable{schema: &aliasedSchema, skipExt: s.ctx.opts.SkipExtensions} - if err = s.buildAlias(ftpe, ps); err != nil { - return false, err - } - if aliasedSchema.Ref.String() != "" || len(aliasedSchema.Properties) > 0 || len(aliasedSchema.AllOf) > 0 { - fieldHasAllOf = true - schema.AddToAllOf(aliasedSchema) - } - case *types.Union: - log.Printf("WARNING: union type constraints are not supported yet %[1]v (%[1]T). Skipped", ftpe) - case *types.TypeParam: - log.Printf("WARNING: generic type parameters are not supported yet %[1]v (%[1]T). Skipped", ftpe) - case *types.Chan: - log.Printf("WARNING: channels are not supported %[1]v (%[1]T). Skipped", ftpe) - case *types.Signature: - log.Printf("WARNING: functions are not supported %[1]v (%[1]T). Skipped", ftpe) - default: - log.Printf( - "WARNING: can't figure out object type for allOf named type (%T): %v", - ftpe, ftpe.Underlying(), - ) - } - - debugLogf(s.ctx.debug, "got embedded interface: %v {%T}, fieldHasAllOf: %t", fld, fld, fieldHasAllOf) - return fieldHasAllOf, nil -} - -func findASTField(file *ast.File, pos token.Pos) *ast.Field { - ans, _ := astutil.PathEnclosingInterval(file, pos, pos) - for _, an := range ans { - if at, valid := an.(*ast.Field); valid { - return at - } - } - return nil -} - -func nameOverride(defaultName string, doc *ast.CommentGroup) string { - name := defaultName - if doc != nil { - for _, cmt := range doc.List { - for ln := range strings.SplitSeq(cmt.Text, "\n") { - matches := rxName.FindStringSubmatch(ln) - if ml := len(matches); ml > 1 { - name = matches[ml-1] - } - } - } - } - return name -} - -func (s *schemaBuilder) processInterfaceMethod(fld *types.Func, it *types.Interface, decl *entityDecl, tgt *spec.Schema, seen map[string]string) error { - if !fld.Exported() { - return nil - } - sig, isSignature := fld.Type().(*types.Signature) - if !isSignature { - return nil - } - if sig.Params().Len() > 0 { - return nil - } - if sig.Results() == nil || sig.Results().Len() != 1 { - return nil - } - - afld := findASTField(decl.File, fld.Pos()) - if afld == nil { - debugLogf(s.ctx.debug, "can't find source associated with %s for %s", fld.String(), it.String()) - return nil - } - - // if the field is annotated with swagger:ignore, ignore it - if ignored(afld.Doc) { - return nil - } - - name := nameOverride(fld.Name(), afld.Doc) - ps := tgt.Properties[name] - if err := s.buildFromType(sig.Results().At(0).Type(), schemaTypable{&ps, 0, s.ctx.opts.SkipExtensions}); err != nil { - return err - } - if sfName, isStrfmt := strfmtName(afld.Doc); isStrfmt { - ps.Typed("string", sfName) - ps.Ref = spec.Ref{} - ps.Items = nil - } - - if err := s.createParser(name, tgt, &ps, afld).Parse(afld.Doc); err != nil { - return err - } - - if ps.Ref.String() == "" && name != fld.Name() { - ps.AddExtension("x-go-name", fld.Name()) - } - - if s.ctx.app.setXNullableForPointers { - _, isPointer := fld.Type().(*types.Signature).Results().At(0).Type().(*types.Pointer) - noNullableExt := ps.Extensions == nil || - (ps.Extensions["x-nullable"] == nil && ps.Extensions["x-isnullable"] == nil) - if isPointer && noNullableExt { - ps.AddExtension("x-nullable", true) - } - } - - seen[name] = fld.Name() - tgt.Properties[name] = ps - return nil -} - -func (s *schemaBuilder) buildNamedInterface(ftpe *types.Named, flist []*ast.Field, decl *entityDecl, schema *spec.Schema, seen map[string]string) (hasAllOf bool, err error) { - o := ftpe.Obj() - var afld *ast.Field - - for _, an := range flist { - if len(an.Names) != 0 { - continue - } - - tpp := decl.Pkg.TypesInfo.Types[an.Type] - if tpp.Type.String() != o.Type().String() { - continue - } - - // decl. - debugLogf(s.ctx.debug, "maybe interface field %s: %s(%T)", o.Name(), o.Type().String(), o.Type()) - afld = an - break - } - - if afld == nil { - debugLogf(s.ctx.debug, "can't find source associated with %s", ftpe.String()) - return hasAllOf, nil - } - - // if the field is annotated with swagger:ignore, ignore it - if ignored(afld.Doc) { - return hasAllOf, nil - } - - if !allOfMember(afld.Doc) { - var newSch spec.Schema - if err = s.buildEmbedded(o.Type(), &newSch, seen); err != nil { - return hasAllOf, err - } - schema.AllOf = append(schema.AllOf, newSch) - hasAllOf = true - - return hasAllOf, nil - } - - hasAllOf = true - - var newSch spec.Schema - // when the embedded struct is annotated with swagger:allOf it will be used as allOf property - // otherwise the fields will just be included as normal properties - if err = s.buildAllOf(o.Type(), &newSch); err != nil { - return hasAllOf, err - } - - if afld.Doc != nil { - for _, cmt := range afld.Doc.List { - for ln := range strings.SplitSeq(cmt.Text, "\n") { - matches := rxAllOf.FindStringSubmatch(ln) - ml := len(matches) - if ml <= 1 { - continue - } - - mv := matches[ml-1] - if mv != "" { - schema.AddExtension("x-class", mv) - } - } - } - } - - schema.AllOf = append(schema.AllOf, newSch) - - return hasAllOf, nil -} - -func extractAllOfClass(doc *ast.CommentGroup, schema *spec.Schema) { - if doc == nil { - return - } - for _, cmt := range doc.List { - for ln := range strings.SplitSeq(cmt.Text, "\n") { - matches := rxAllOf.FindStringSubmatch(ln) - if ml := len(matches); ml > 1 { - if mv := matches[ml-1]; mv != "" { - schema.AddExtension("x-class", mv) - } - } - } - } -} - -func (s *schemaBuilder) buildFromStruct(decl *entityDecl, st *types.Struct, schema *spec.Schema, seen map[string]string) error { - cmt, hasComments := s.ctx.FindComments(decl.Pkg, decl.Obj().Name()) - if !hasComments { - cmt = new(ast.CommentGroup) - } - name, ok := typeName(cmt) - if ok { - _ = swaggerSchemaForType(name, schemaTypable{schema: schema, skipExt: s.ctx.opts.SkipExtensions}) - return nil - } - // First pass: scan anonymous/embedded fields for allOf composition. - // Returns the target schema for properties (may differ from schema when allOf is used). - tgt, hasAllOf, err := s.scanEmbeddedFields(decl, st, schema, seen) - if err != nil { - return err - } - - if tgt == nil { - if schema != nil { - tgt = schema - } else { - tgt = &spec.Schema{} - } - } - if tgt.Properties == nil { - tgt.Properties = make(map[string]spec.Schema) - } - tgt.Typed("object", "") - - // Second pass: build properties from non-embedded exported fields. - if err := s.buildStructFields(decl, st, tgt, seen); err != nil { - return err - } - - if tgt == nil { - return nil - } - if hasAllOf && len(tgt.Properties) > 0 { - schema.AllOf = append(schema.AllOf, *tgt) - } - for k := range tgt.Properties { - if _, ok := seen[k]; !ok { - delete(tgt.Properties, k) - } - } - return nil -} - -// scanEmbeddedFields iterates over anonymous struct fields to detect allOf composition. -// It returns: -// - tgt: the schema that should receive properties (nil if no embedded fields were processed, -// schema itself for plain embeds, or a new schema when allOf is detected) -// - hasAllOf: whether any allOf member was found -func (s *schemaBuilder) scanEmbeddedFields(decl *entityDecl, st *types.Struct, schema *spec.Schema, seen map[string]string) (tgt *spec.Schema, hasAllOf bool, err error) { - for i := range st.NumFields() { - fld := st.Field(i) - if !fld.Anonymous() { - debugLogf(s.ctx.debug, "skipping field %q for allOf scan because not anonymous", fld.Name()) - continue - } - tg := st.Tag(i) - - debugLogf(s.ctx.debug, - "maybe allof field(%t) %s: %s (%T) [%q](anon: %t, embedded: %t)", - fld.IsField(), fld.Name(), fld.Type().String(), fld.Type(), tg, fld.Anonymous(), fld.Embedded(), - ) - afld := findASTField(decl.File, fld.Pos()) - if afld == nil { - debugLogf(s.ctx.debug, "can't find source associated with %s for %s", fld.String(), st.String()) - continue - } - - if ignored(afld.Doc) { - continue - } - - _, ignore, _, _, err := parseJSONTag(afld) - if err != nil { - return nil, false, err - } - if ignore { - continue - } - - _, isAliased := fld.Type().(*types.Alias) - - if !allOfMember(afld.Doc) && !isAliased { - // Plain embed: merge fields into the main schema - if tgt == nil { - tgt = schema - } - if err := s.buildEmbedded(fld.Type(), tgt, seen); err != nil { - return nil, false, err - } - continue - } - - if isAliased { - debugLogf(s.ctx.debug, "alias member in struct: %v", fld) - } - - // allOf member: fields go into a separate schema, embedded struct becomes an allOf entry - hasAllOf = true - if tgt == nil { - tgt = &spec.Schema{} - } - var newSch spec.Schema - if err := s.buildAllOf(fld.Type(), &newSch); err != nil { - return nil, false, err - } - - extractAllOfClass(afld.Doc, schema) - schema.AllOf = append(schema.AllOf, newSch) - } - - return tgt, hasAllOf, nil -} - -func (s *schemaBuilder) buildStructFields(decl *entityDecl, st *types.Struct, tgt *spec.Schema, seen map[string]string) error { - for fld := range st.Fields() { - if err := s.processStructField(fld, decl, tgt, seen); err != nil { - return err - } - } - return nil -} - -func (s *schemaBuilder) processStructField(fld *types.Var, decl *entityDecl, tgt *spec.Schema, seen map[string]string) error { - if fld.Embedded() || !fld.Exported() { - return nil - } - - afld := findASTField(decl.File, fld.Pos()) - if afld == nil { - debugLogf(s.ctx.debug, "can't find source associated with %s", fld.String()) - return nil - } - - if ignored(afld.Doc) { - return nil - } - - name, ignore, isString, omitEmpty, err := parseJSONTag(afld) - if err != nil { - return err - } - - if ignore { - for seenTagName, seenFieldName := range seen { - if seenFieldName == fld.Name() { - delete(tgt.Properties, seenTagName) - break - } - } - return nil - } - - ps := tgt.Properties[name] - if err = s.buildFromType(fld.Type(), schemaTypable{&ps, 0, s.ctx.opts.SkipExtensions}); err != nil { - return err - } - if isString { - ps.Typed("string", ps.Format) - ps.Ref = spec.Ref{} - ps.Items = nil - } - - if sfName, isStrfmt := strfmtName(afld.Doc); isStrfmt { - ps.Typed("string", sfName) - ps.Ref = spec.Ref{} - ps.Items = nil - } - - if err = s.createParser(name, tgt, &ps, afld).Parse(afld.Doc); err != nil { - return err - } - - if ps.Ref.String() == "" && name != fld.Name() { - addExtension(&ps.VendorExtensible, "x-go-name", fld.Name(), s.ctx.opts.SkipExtensions) - } - - if s.ctx.app.setXNullableForPointers { - if _, isPointer := fld.Type().(*types.Pointer); isPointer && !omitEmpty && - (ps.Extensions == nil || (ps.Extensions["x-nullable"] == nil && ps.Extensions["x-isnullable"] == nil)) { - ps.AddExtension("x-nullable", true) - } - } - - // we have 2 cases: - // 1. field with different name override tag - // 2. field with different name removes tag - // so we need to save both tag&name - seen[name] = fld.Name() - tgt.Properties[name] = ps - return nil -} - -func (s *schemaBuilder) buildAllOf(tpe types.Type, schema *spec.Schema) error { - debugLogf(s.ctx.debug, "allOf %s", tpe.Underlying()) - - switch ftpe := tpe.(type) { - case *types.Pointer: - return s.buildAllOf(ftpe.Elem(), schema) - case *types.Named: - return s.buildNamedAllOf(ftpe, schema) - case *types.Alias: - debugLogf(s.ctx.debug, "allOf member is alias %v => %v", ftpe, ftpe.Rhs()) - tgt := schemaTypable{schema: schema, skipExt: s.ctx.opts.SkipExtensions} - return s.buildAlias(ftpe, tgt) - case *types.TypeParam: - log.Printf("WARNING: generic type parameters are not supported yet %[1]v (%[1]T). Skipped", ftpe) - return nil - case *types.Chan: - log.Printf("WARNING: channels are not supported %[1]v (%[1]T). Skipped", ftpe) - return nil - case *types.Signature: - log.Printf("WARNING: functions are not supported %[1]v (%[1]T). Skipped", ftpe) - return nil - default: - log.Printf("WARNING: missing allOf parser for a %T, skipping field", ftpe) - return fmt.Errorf("unable to resolve allOf member for: %v: %w", ftpe, ErrCodeScan) - } -} - -func (s *schemaBuilder) buildNamedAllOf(ftpe *types.Named, schema *spec.Schema) error { - switch utpe := ftpe.Underlying().(type) { - case *types.Struct: - decl, found := s.ctx.FindModel(ftpe.Obj().Pkg().Path(), ftpe.Obj().Name()) - if !found { - return fmt.Errorf("can't find source file for struct: %s: %w", ftpe.String(), ErrCodeScan) - } - - if isStdTime(ftpe.Obj()) { - schema.Typed("string", "date-time") - return nil - } - - if sfnm, isf := strfmtName(decl.Comments); isf { - schema.Typed("string", sfnm) - return nil - } - - if decl.HasModelAnnotation() { - return s.makeRef(decl, schemaTypable{schema, 0, s.ctx.opts.SkipExtensions}) - } - - return s.buildFromStruct(decl, utpe, schema, make(map[string]string)) - case *types.Interface: - decl, found := s.ctx.FindModel(ftpe.Obj().Pkg().Path(), ftpe.Obj().Name()) - if !found { - return fmt.Errorf("can't find source file for interface: %s: %w", ftpe.String(), ErrCodeScan) - } - - if sfnm, isf := strfmtName(decl.Comments); isf { - schema.Typed("string", sfnm) - return nil - } - - if decl.HasModelAnnotation() { - return s.makeRef(decl, schemaTypable{schema, 0, s.ctx.opts.SkipExtensions}) - } - - return s.buildFromInterface(decl, utpe, schema, make(map[string]string)) - case *types.TypeParam: - log.Printf("WARNING: generic type parameters are not supported yet %[1]v (%[1]T). Skipped", ftpe) - return nil - case *types.Chan: - log.Printf("WARNING: channels are not supported %[1]v (%[1]T). Skipped", ftpe) - return nil - case *types.Signature: - log.Printf("WARNING: functions are not supported %[1]v (%[1]T). Skipped", ftpe) - return nil - default: - log.Printf( - "WARNING: can't figure out object type for allOf named type (%T): %v", - ftpe, utpe, - ) - return fmt.Errorf("unable to locate source file for allOf (%T): %v: %w", - ftpe, utpe, ErrCodeScan, - ) - } -} - -func (s *schemaBuilder) buildEmbedded(tpe types.Type, schema *spec.Schema, seen map[string]string) error { - debugLogf(s.ctx.debug, "embedded %v", tpe.Underlying()) - - switch ftpe := tpe.(type) { - case *types.Pointer: - return s.buildEmbedded(ftpe.Elem(), schema, seen) - case *types.Named: - return s.buildNamedEmbedded(ftpe, schema, seen) - case *types.Alias: - debugLogf(s.ctx.debug, "embedded alias %v => %v", ftpe, ftpe.Rhs()) - tgt := schemaTypable{schema, 0, s.ctx.opts.SkipExtensions} - return s.buildAlias(ftpe, tgt) - case *types.Union: // e.g. type X interface{ ~uint16 | ~float32 } - log.Printf("WARNING: union type constraints are not supported yet %[1]v (%[1]T). Skipped", ftpe) - return nil - case *types.TypeParam: - log.Printf("WARNING: generic type parameters are not supported yet %[1]v (%[1]T). Skipped", ftpe) - return nil - case *types.Chan: - log.Printf("WARNING: channels are not supported %[1]v (%[1]T). Skipped", ftpe) - return nil - case *types.Signature: - log.Printf("WARNING: functions are not supported %[1]v (%[1]T). Skipped", ftpe) - return nil - default: - log.Printf("WARNING: Missing embedded parser for a %T, skipping model\n", ftpe) - return nil - } -} - -func (s *schemaBuilder) buildNamedEmbedded(ftpe *types.Named, schema *spec.Schema, seen map[string]string) error { - debugLogf(s.ctx.debug, "embedded named type: %T", ftpe.Underlying()) - if unsupportedBuiltin(ftpe) { - log.Printf("WARNING: skipped unsupported builtin type: %v", ftpe) - - return nil - } - - switch utpe := ftpe.Underlying().(type) { - case *types.Struct: - decl, found := s.ctx.FindModel(ftpe.Obj().Pkg().Path(), ftpe.Obj().Name()) - if !found { - return fmt.Errorf("can't find source file for struct: %s: %w", ftpe.String(), ErrCodeScan) - } - - return s.buildFromStruct(decl, utpe, schema, seen) - case *types.Interface: - if utpe.Empty() { - return nil - } - o := ftpe.Obj() - if isAny(o) { - return nil - } - if isStdError(o) { - tgt := schemaTypable{schema: schema, skipExt: s.ctx.opts.SkipExtensions} - tgt.AddExtension("x-go-type", o.Name()) - return swaggerSchemaForType(o.Name(), tgt) - } - mustNotBeABuiltinType(o) - - decl, found := s.ctx.FindModel(o.Pkg().Path(), o.Name()) - if !found { - return fmt.Errorf("can't find source file for struct: %s: %w", ftpe.String(), ErrCodeScan) - } - return s.buildFromInterface(decl, utpe, schema, seen) - case *types.Union: // e.g. type X interface{ ~uint16 | ~float32 } - log.Printf("WARNING: union type constraints are not supported yet %[1]v (%[1]T). Skipped", utpe) - return nil - case *types.TypeParam: - log.Printf("WARNING: generic type parameters are not supported yet %[1]v (%[1]T). Skipped", utpe) - return nil - case *types.Chan: - log.Printf("WARNING: channels are not supported %[1]v (%[1]T). Skipped", utpe) - return nil - case *types.Signature: - log.Printf("WARNING: functions are not supported %[1]v (%[1]T). Skipped", utpe) - return nil - default: - log.Printf("WARNING: can't figure out object type for embedded named type (%T): %v", - ftpe, utpe, - ) - return nil - } -} - -func (s *schemaBuilder) makeRef(decl *entityDecl, prop swaggerTypable) error { - nm, _ := decl.Names() - ref, err := spec.NewRef("#/definitions/" + nm) - if err != nil { - return err - } - prop.SetRef(ref) - s.postDecls = append(s.postDecls, decl) - return nil -} - -func (s *schemaBuilder) createParser(nm string, schema, ps *spec.Schema, fld *ast.Field) *sectionedParser { - sp := new(sectionedParser) - - schemeType, err := ps.Type.MarshalJSON() - if err != nil { - return nil - } - - if ps.Ref.String() != "" && !s.ctx.opts.DescWithRef { - // if DescWithRef option is enabled, allow the tagged documentation to flow alongside the $ref - // otherwise behave as expected by jsonschema draft4: $ref predates all sibling keys. - sp.taggers = []tagParser{ - newSingleLineTagParser("required", &setRequiredSchema{schema, nm}), - } - - return sp - } - - sp.setDescription = func(lines []string) { - ps.Description = joinDropLast(lines) - enumDesc := getEnumDesc(ps.Extensions) - if enumDesc != "" { - ps.Description += "\n" + enumDesc - } - } - sp.taggers = []tagParser{ - newSingleLineTagParser("maximum", &setMaximum{schemaValidations{ps}, rxf(rxMaximumFmt, "")}), - newSingleLineTagParser("minimum", &setMinimum{schemaValidations{ps}, rxf(rxMinimumFmt, "")}), - newSingleLineTagParser("multipleOf", &setMultipleOf{schemaValidations{ps}, rxf(rxMultipleOfFmt, "")}), - newSingleLineTagParser("minLength", &setMinLength{schemaValidations{ps}, rxf(rxMinLengthFmt, "")}), - newSingleLineTagParser("maxLength", &setMaxLength{schemaValidations{ps}, rxf(rxMaxLengthFmt, "")}), - newSingleLineTagParser("pattern", &setPattern{schemaValidations{ps}, rxf(rxPatternFmt, "")}), - newSingleLineTagParser("minItems", &setMinItems{schemaValidations{ps}, rxf(rxMinItemsFmt, "")}), - newSingleLineTagParser("maxItems", &setMaxItems{schemaValidations{ps}, rxf(rxMaxItemsFmt, "")}), - newSingleLineTagParser("unique", &setUnique{schemaValidations{ps}, rxf(rxUniqueFmt, "")}), - newSingleLineTagParser("enum", &setEnum{schemaValidations{ps}, rxf(rxEnumFmt, "")}), - newSingleLineTagParser("default", &setDefault{&spec.SimpleSchema{Type: string(schemeType)}, schemaValidations{ps}, rxf(rxDefaultFmt, "")}), - newSingleLineTagParser("type", &setDefault{&spec.SimpleSchema{Type: string(schemeType)}, schemaValidations{ps}, rxf(rxDefaultFmt, "")}), - newSingleLineTagParser("example", &setExample{&spec.SimpleSchema{Type: string(schemeType)}, schemaValidations{ps}, rxf(rxExampleFmt, "")}), - newSingleLineTagParser("required", &setRequiredSchema{schema, nm}), - newSingleLineTagParser("readOnly", &setReadOnlySchema{ps}), - newSingleLineTagParser("discriminator", &setDiscriminator{schema, nm}), - newMultiLineTagParser("YAMLExtensionsBlock", newYamlParser(rxExtensions, schemaVendorExtensibleSetter(ps)), true), - } - - itemsTaggers := func(items *spec.Schema, level int) []tagParser { - schemeType, err := items.Type.MarshalJSON() - if err != nil { - return nil - } - // the expression is 1-index based not 0-index - itemsPrefix := fmt.Sprintf(rxItemsPrefixFmt, level+1) - return []tagParser{ - newSingleLineTagParser(fmt.Sprintf("items%dMaximum", level), &setMaximum{schemaValidations{items}, rxf(rxMaximumFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dMinimum", level), &setMinimum{schemaValidations{items}, rxf(rxMinimumFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dMultipleOf", level), &setMultipleOf{schemaValidations{items}, rxf(rxMultipleOfFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dMinLength", level), &setMinLength{schemaValidations{items}, rxf(rxMinLengthFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dMaxLength", level), &setMaxLength{schemaValidations{items}, rxf(rxMaxLengthFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dPattern", level), &setPattern{schemaValidations{items}, rxf(rxPatternFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dMinItems", level), &setMinItems{schemaValidations{items}, rxf(rxMinItemsFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dMaxItems", level), &setMaxItems{schemaValidations{items}, rxf(rxMaxItemsFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dUnique", level), &setUnique{schemaValidations{items}, rxf(rxUniqueFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dEnum", level), &setEnum{schemaValidations{items}, rxf(rxEnumFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dDefault", level), - &setDefault{&spec.SimpleSchema{Type: string(schemeType)}, schemaValidations{items}, rxf(rxDefaultFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dExample", level), - &setExample{&spec.SimpleSchema{Type: string(schemeType)}, schemaValidations{items}, rxf(rxExampleFmt, itemsPrefix)}), - } - } - - var parseArrayTypes func(expr ast.Expr, items *spec.SchemaOrArray, level int) ([]tagParser, error) - parseArrayTypes = func(expr ast.Expr, items *spec.SchemaOrArray, level int) ([]tagParser, error) { - if items == nil || items.Schema == nil { - return []tagParser{}, nil - } - switch iftpe := expr.(type) { - case *ast.ArrayType: - eleTaggers := itemsTaggers(items.Schema, level) - sp.taggers = append(eleTaggers, sp.taggers...) - otherTaggers, err := parseArrayTypes(iftpe.Elt, items.Schema.Items, level+1) - if err != nil { - return nil, err - } - return otherTaggers, nil - case *ast.Ident: - taggers := []tagParser{} - if iftpe.Obj == nil { - taggers = itemsTaggers(items.Schema, level) - } - otherTaggers, err := parseArrayTypes(expr, items.Schema.Items, level+1) - if err != nil { - return nil, err - } - return append(taggers, otherTaggers...), nil - case *ast.StarExpr: - otherTaggers, err := parseArrayTypes(iftpe.X, items, level) - if err != nil { - return nil, err - } - return otherTaggers, nil - default: - return nil, fmt.Errorf("unknown field type element for %q: %w", nm, ErrCodeScan) - } - } - - if fld == nil { - // the parser may be called outside the context of struct field. - // In that case, just return the outcome of the parsing now. - return sp - } - - // check if this is a primitive, if so parse the validations from the - // doc comments of the slice declaration. - if ftped, ok := fld.Type.(*ast.ArrayType); ok { - taggers, err := parseArrayTypes(ftped.Elt, ps.Items, 0) - if err != nil { - return sp - } - sp.taggers = append(taggers, sp.taggers...) - } - - return sp -} - -func schemaVendorExtensibleSetter(meta *spec.Schema) func(json.RawMessage) error { - return func(jsonValue json.RawMessage) error { - var jsonData spec.Extensions - err := json.Unmarshal(jsonValue, &jsonData) - if err != nil { - return err - } - for k := range jsonData { - if !rxAllowedExtensions.MatchString(k) { - return fmt.Errorf("invalid schema extension name, should start from `x-`: %s: %w", k, ErrCodeScan) - } - } - meta.Extensions = jsonData - return nil - } -} - -type tagOptions []string - -func (t tagOptions) Contain(option string) bool { - for i := 1; i < len(t); i++ { - if t[i] == option { - return true - } - } - return false -} - -func (t tagOptions) Name() string { - return t[0] -} - -func parseJSONTag(field *ast.Field) (name string, ignore, isString, omitEmpty bool, err error) { - if len(field.Names) > 0 { - name = field.Names[0].Name - } - if field.Tag == nil || len(strings.TrimSpace(field.Tag.Value)) == 0 { - return name, false, false, false, nil - } - - tv, err := strconv.Unquote(field.Tag.Value) - if err != nil { - return name, false, false, false, err - } - - if strings.TrimSpace(tv) != "" { - st := reflect.StructTag(tv) - jsonParts := tagOptions(strings.Split(st.Get("json"), ",")) - - if jsonParts.Contain("string") { - // Need to check if the field type is a scalar. Otherwise, the - // ",string" directive doesn't apply. - isString = isFieldStringable(field.Type) - } - - omitEmpty = jsonParts.Contain("omitempty") - - switch jsonParts.Name() { - case "-": - return name, true, isString, omitEmpty, nil - case "": - return name, false, isString, omitEmpty, nil - default: - return jsonParts.Name(), false, isString, omitEmpty, nil - } - } - return name, false, false, false, nil -} - -// isFieldStringable check if the field type is a scalar. If the field type is -// *ast.StarExpr and is pointer type, check if it refers to a scalar. -// Otherwise, the ",string" directive doesn't apply. -func isFieldStringable(tpe ast.Expr) bool { - if ident, ok := tpe.(*ast.Ident); ok { - switch ident.Name { - case goTypeInt, "int8", goTypeInt16, goTypeInt32, goTypeInt64, - "uint", "uint8", "uint16", "uint32", "uint64", - goTypeFloat64, typeString, typeBool: - return true - } - } else if starExpr, ok := tpe.(*ast.StarExpr); ok { - return isFieldStringable(starExpr.X) - } else { - return false - } - return false -} - -func isTextMarshaler(tpe types.Type) bool { - encodingPkg, err := importer.Default().Import("encoding") - if err != nil { - return false - } - // Proposal for enhancement: there should be a better way to check this than hardcoding the TextMarshaler iface. - obj := encodingPkg.Scope().Lookup("TextMarshaler") - if obj == nil { - return false - } - ifc, ok := obj.Type().Underlying().(*types.Interface) - if !ok { - return false - } - - return types.Implements(tpe, ifc) -} - -func isStdTime(o *types.TypeName) bool { - return o.Pkg() != nil && o.Pkg().Name() == "time" && o.Name() == "Time" -} - -func isStdError(o *types.TypeName) bool { - return o.Pkg() == nil && o.Name() == "error" -} - -func isStdJSONRawMessage(o *types.TypeName) bool { - return o.Pkg() != nil && o.Pkg().Path() == "encoding/json" && o.Name() == "RawMessage" -} - -func isAny(o *types.TypeName) bool { - return o.Pkg() == nil && o.Name() == "any" -} diff --git a/schema_go118_test.go b/schema_go118_test.go deleted file mode 100644 index 4c59f17..0000000 --- a/schema_go118_test.go +++ /dev/null @@ -1,152 +0,0 @@ -// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers -// SPDX-License-Identifier: Apache-2.0 - -package codescan - -import ( - "testing" - - "github.com/go-openapi/testify/v2/assert" - "github.com/go-openapi/testify/v2/require" - - "github.com/go-openapi/spec" -) - -var go118ClassificationCtx *scanCtx //nolint:gochecknoglobals // test package cache shared across test functions - -func loadGo118ClassificationPkgsCtx(t *testing.T) *scanCtx { - t.Helper() - - if go118ClassificationCtx != nil { - return go118ClassificationCtx - } - - sctx, err := newScanCtx(&Options{ - Packages: []string{ - "./goparsing/go118", - }, - WorkDir: "fixtures", - }) - require.NoError(t, err) - go118ClassificationCtx = sctx - - return go118ClassificationCtx -} - -func getGo118ClassificationModel(sctx *scanCtx, nm string) *entityDecl { - decl, ok := sctx.FindDecl(fixturesModule+"/goparsing/go118", nm) - if !ok { - return nil - } - return decl -} - -func TestGo118SwaggerTypeNamed(t *testing.T) { - sctx := loadGo118ClassificationPkgsCtx(t) - decl := getGo118ClassificationModel(sctx, "NamedWithType") - require.NotNil(t, decl) - prs := &schemaBuilder{ - ctx: sctx, - decl: decl, - } - models := make(map[string]spec.Schema) - require.NoError(t, prs.Build(models)) - schema := models["namedWithType"] - - assertProperty(t, &schema, "object", "some_map", "", "SomeMap") -} - -func TestGo118AliasedModels(t *testing.T) { - sctx := loadGo118ClassificationPkgsCtx(t) - - names := []string{ - "SomeObject", - } - - defs := make(map[string]spec.Schema) - for _, nm := range names { - decl := getGo118ClassificationModel(sctx, nm) - require.NotNil(t, decl) - - prs := &schemaBuilder{ - decl: decl, - ctx: sctx, - } - require.NoError(t, prs.Build(defs)) - } - - for k := range defs { - for i, b := range names { - if b == k { - // remove the entry from the collection - names = append(names[:i], names[i+1:]...) - } - } - } - if assert.Empty(t, names) { - // map types - assertMapDefinition(t, defs, "SomeObject", "object", "", "") - } -} - -func TestGo118InterfaceField(t *testing.T) { - sctx := loadGo118ClassificationPkgsCtx(t) - decl := getGo118ClassificationModel(sctx, "Interfaced") - require.NotNil(t, decl) - prs := &schemaBuilder{ - ctx: sctx, - decl: decl, - } - models := make(map[string]spec.Schema) - require.NoError(t, prs.Build(models)) - - schema := models["Interfaced"] - assertProperty(t, &schema, "", "custom_data", "", "CustomData") -} - -func TestGo118ParameterParser_Issue2011(t *testing.T) { - sctx := loadGo118ClassificationPkgsCtx(t) - operations := make(map[string]*spec.Operation) - td := getParameter(sctx, "NumPlates") - prs := ¶meterBuilder{ - ctx: sctx, - decl: td, - } - require.NoError(t, prs.Build(operations)) - - op := operations["putNumPlate"] - require.NotNil(t, op) - require.Len(t, op.Parameters, 1) - sch := op.Parameters[0].Schema - require.NotNil(t, sch) -} - -func TestGo118ParseResponses_Issue2011(t *testing.T) { - sctx := loadGo118ClassificationPkgsCtx(t) - responses := make(map[string]spec.Response) - td := getResponse(sctx, "NumPlatesResp") - prs := &responseBuilder{ - ctx: sctx, - decl: td, - } - require.NoError(t, prs.Build(responses)) - - resp := responses["NumPlatesResp"] - require.Empty(t, resp.Headers) - require.NotNil(t, resp.Schema) -} - -func TestGo118_Issue2809(t *testing.T) { - sctx := loadGo118ClassificationPkgsCtx(t) - decl := getGo118ClassificationModel(sctx, "transportErr") - require.NotNil(t, decl) - prs := &schemaBuilder{ - ctx: sctx, - decl: decl, - } - models := make(map[string]spec.Schema) - require.NoError(t, prs.Build(models)) - - schema := models["transportErr"] - assertProperty(t, &schema, "", "data", "", "Data") -} diff --git a/schema_test.go b/schema_test.go deleted file mode 100644 index 96a44cd..0000000 --- a/schema_test.go +++ /dev/null @@ -1,2661 +0,0 @@ -// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers -// SPDX-License-Identifier: Apache-2.0 - -package codescan - -import ( - "encoding/json" - "os" - "path/filepath" - "testing" - - "gopkg.in/yaml.v2" - - "github.com/go-openapi/testify/v2/assert" - "github.com/go-openapi/testify/v2/require" - - "github.com/go-openapi/spec" -) - -const ( - epsilon = 1e-9 - - // fixturesModule is the module path of the fixtures nested module. - fixturesModule = "github.com/go-openapi/codescan/fixtures" -) - -func assertHasExtension(t *testing.T, sch spec.Schema, ext string) { - t.Helper() - pkg, hasExt := sch.Extensions.GetString(ext) - assert.TrueT(t, hasExt) - assert.NotEmpty(t, pkg) -} - -func assertHasGoPackageExt(t *testing.T, sch spec.Schema) { - t.Helper() - assertHasExtension(t, sch, "x-go-package") -} - -func assertHasTitle(t *testing.T, sch spec.Schema) { - t.Helper() - assert.NotEmpty(t, sch.Title) -} - -func assertHasNoTitle(t *testing.T, sch spec.Schema) { - t.Helper() - assert.Empty(t, sch.Title) -} - -func TestSchemaBuilder_Struct_Tag(t *testing.T) { - sctx := loadPetstorePkgsCtx(t) - var td *entityDecl - for k := range sctx.app.Models { - if k.Name != "Tag" { - continue - } - td = sctx.app.Models[k] - break - } - require.NotNil(t, td) - - prs := &schemaBuilder{ - ctx: sctx, - decl: td, - } - result := make(map[string]spec.Schema) - require.NoError(t, prs.Build(result)) -} - -func TestSchemaBuilder_Struct_Pet(t *testing.T) { - // Debug = true - // defer func() { Debug = false }() - - sctx := loadPetstorePkgsCtx(t) - var td *entityDecl - for k := range sctx.app.Models { - if k.Name != "Pet" { - continue - } - td = sctx.app.Models[k] - break - } - require.NotNil(t, td) - - prs := &schemaBuilder{ - ctx: sctx, - decl: td, - } - result := make(map[string]spec.Schema) - require.NoError(t, prs.Build(result)) -} - -func TestSchemaBuilder_Struct_Order(t *testing.T) { - // Debug = true - // defer func() { Debug = false }() - - sctx := loadPetstorePkgsCtx(t) - var td *entityDecl - for k := range sctx.app.Models { - if k.Name != "Order" { - continue - } - td = sctx.app.Models[k] - break - } - require.NotNil(t, td) - - prs := &schemaBuilder{ - ctx: sctx, - decl: td, - } - result := make(map[string]spec.Schema) - require.NoError(t, prs.Build(result)) -} - -func TestSchemaBuilder(t *testing.T) { - sctx := loadClassificationPkgsCtx(t) - decl := getClassificationModel(sctx, "NoModel") - require.NotNil(t, decl) - prs := &schemaBuilder{ - ctx: sctx, - decl: decl, - } - models := make(map[string]spec.Schema) - require.NoError(t, prs.Build(models)) - schema := models["NoModel"] - - assert.Equal(t, spec.StringOrArray([]string{"object"}), schema.Type) - assert.EqualT(t, "NoModel is a struct without an annotation.", schema.Title) - assert.EqualT(t, "NoModel exists in a package\nbut is not annotated with the swagger model annotations\nso it should now show up in a test.", schema.Description) - assert.Len(t, schema.Required, 3) - assert.Len(t, schema.Properties, 12) - - assertProperty(t, &schema, "integer", "id", "int64", "ID") - prop, ok := schema.Properties["id"] - assert.EqualT(t, "ID of this no model instance.\nids in this application start at 11 and are smaller than 1000", prop.Description) - assert.TrueT(t, ok, "should have had an 'id' property") - assert.InDeltaT(t, 1000.00, *prop.Maximum, epsilon) - assert.TrueT(t, prop.ExclusiveMaximum, "'id' should have had an exclusive maximum") - assert.NotNil(t, prop.Minimum) - assert.InDeltaT(t, 10.00, *prop.Minimum, epsilon) - assert.TrueT(t, prop.ExclusiveMinimum, "'id' should have had an exclusive minimum") - assert.Equal(t, 11, prop.Default, "ID default value is incorrect") - - assertProperty(t, &schema, "string", "NoNameOmitEmpty", "", "") - prop, ok = schema.Properties["NoNameOmitEmpty"] - assert.EqualT(t, "A field which has omitempty set but no name", prop.Description) - assert.TrueT(t, ok, "should have had an 'NoNameOmitEmpty' property") - - assertProperty(t, &schema, "string", "noteb64", "byte", "Note") - prop, ok = schema.Properties["noteb64"] - assert.TrueT(t, ok, "should have a 'noteb64' property") - assert.Nil(t, prop.Items) - - assertProperty(t, &schema, "integer", "score", "int32", "Score") - prop, ok = schema.Properties["score"] - assert.EqualT(t, "The Score of this model", prop.Description) - assert.TrueT(t, ok, "should have had a 'score' property") - assert.InDeltaT(t, 45.00, *prop.Maximum, epsilon) - assert.FalseT(t, prop.ExclusiveMaximum, "'score' should not have had an exclusive maximum") - assert.NotNil(t, prop.Minimum) - assert.InDeltaT(t, 3.00, *prop.Minimum, epsilon) - assert.FalseT(t, prop.ExclusiveMinimum, "'score' should not have had an exclusive minimum") - assert.EqualValues(t, 27, prop.Example) - require.NotNil(t, prop.MultipleOf, "'score' should have had a multipleOf") - assert.InDeltaT(t, 3.00, *prop.MultipleOf, epsilon, "'score' should have had multipleOf 3") - - expectedNameExtensions := spec.Extensions{ - "x-go-name": "Name", - "x-property-array": []any{ - "value1", - "value2", - }, - "x-property-array-obj": []any{ - map[string]any{ - "name": "obj", - "value": "field", - }, - }, - "x-property-value": "value", - } - - assertProperty(t, &schema, "string", "name", "", "Name") - prop, ok = schema.Properties["name"] - assert.TrueT(t, ok) - assert.EqualT(t, "Name of this no model instance", prop.Description) - require.NotNil(t, prop.MinLength) - require.NotNil(t, prop.MaxLength) - assert.EqualT(t, int64(4), *prop.MinLength) - assert.EqualT(t, int64(50), *prop.MaxLength) - assert.EqualT(t, "[A-Za-z0-9-.]*", prop.Pattern) - assert.Equal(t, expectedNameExtensions, prop.Extensions) - - assertProperty(t, &schema, "string", "created", "date-time", "Created") - prop, ok = schema.Properties["created"] - assert.EqualT(t, "Created holds the time when this entry was created", prop.Description) - assert.TrueT(t, ok, "should have a 'created' property") - assert.TrueT(t, prop.ReadOnly, "'created' should be read only") - - assertProperty(t, &schema, "string", "gocreated", "date-time", "GoTimeCreated") - prop, ok = schema.Properties["gocreated"] - assert.EqualT(t, "GoTimeCreated holds the time when this entry was created in go time.Time", prop.Description) - assert.TrueT(t, ok, "should have a 'gocreated' property") - - assertArrayProperty(t, &schema, "string", "foo_slice", "", "FooSlice") - prop, ok = schema.Properties["foo_slice"] - assert.EqualT(t, "a FooSlice has foos which are strings", prop.Description) - assert.TrueT(t, ok, "should have a 'foo_slice' property") - require.NotNil(t, prop.Items, "foo_slice should have had an items property") - require.NotNil(t, prop.Items.Schema, "foo_slice.items should have had a schema property") - assert.TrueT(t, prop.UniqueItems, "'foo_slice' should have unique items") - assert.EqualT(t, int64(3), *prop.MinItems, "'foo_slice' should have had 3 min items") - assert.EqualT(t, int64(10), *prop.MaxItems, "'foo_slice' should have had 10 max items") - itprop := prop.Items.Schema - assert.EqualT(t, int64(3), *itprop.MinLength, "'foo_slice.items.minLength' should have been 3") - assert.EqualT(t, int64(10), *itprop.MaxLength, "'foo_slice.items.maxLength' should have been 10") - assert.EqualT(t, "\\w+", itprop.Pattern, "'foo_slice.items.pattern' should have \\w+") - - assertArrayProperty(t, &schema, "string", "time_slice", "date-time", "TimeSlice") - prop, ok = schema.Properties["time_slice"] - assert.EqualT(t, "a TimeSlice is a slice of times", prop.Description) - assert.TrueT(t, ok, "should have a 'time_slice' property") - require.NotNil(t, prop.Items, "time_slice should have had an items property") - require.NotNil(t, prop.Items.Schema, "time_slice.items should have had a schema property") - assert.TrueT(t, prop.UniqueItems, "'time_slice' should have unique items") - assert.EqualT(t, int64(3), *prop.MinItems, "'time_slice' should have had 3 min items") - assert.EqualT(t, int64(10), *prop.MaxItems, "'time_slice' should have had 10 max items") - - assertArrayProperty(t, &schema, "array", "bar_slice", "", "BarSlice") - prop, ok = schema.Properties["bar_slice"] - assert.EqualT(t, "a BarSlice has bars which are strings", prop.Description) - assert.TrueT(t, ok, "should have a 'bar_slice' property") - require.NotNil(t, prop.Items, "bar_slice should have had an items property") - require.NotNil(t, prop.Items.Schema, "bar_slice.items should have had a schema property") - assert.TrueT(t, prop.UniqueItems, "'bar_slice' should have unique items") - assert.EqualT(t, int64(3), *prop.MinItems, "'bar_slice' should have had 3 min items") - assert.EqualT(t, int64(10), *prop.MaxItems, "'bar_slice' should have had 10 max items") - - itprop = prop.Items.Schema - require.NotNil(t, itprop) - assert.EqualT(t, int64(4), *itprop.MinItems, "'bar_slice.items.minItems' should have been 4") - assert.EqualT(t, int64(9), *itprop.MaxItems, "'bar_slice.items.maxItems' should have been 9") - - itprop2 := itprop.Items.Schema - require.NotNil(t, itprop2) - assert.EqualT(t, int64(5), *itprop2.MinItems, "'bar_slice.items.items.minItems' should have been 5") - assert.EqualT(t, int64(8), *itprop2.MaxItems, "'bar_slice.items.items.maxItems' should have been 8") - - itprop3 := itprop2.Items.Schema - require.NotNil(t, itprop3) - assert.EqualT(t, int64(3), *itprop3.MinLength, "'bar_slice.items.items.items.minLength' should have been 3") - assert.EqualT(t, int64(10), *itprop3.MaxLength, "'bar_slice.items.items.items.maxLength' should have been 10") - assert.EqualT(t, "\\w+", itprop3.Pattern, "'bar_slice.items.items.items.pattern' should have \\w+") - - assertArrayProperty(t, &schema, "array", "deep_time_slice", "", "DeepTimeSlice") - prop, ok = schema.Properties["deep_time_slice"] - assert.EqualT(t, "a DeepSlice has bars which are time", prop.Description) - assert.TrueT(t, ok, "should have a 'deep_time_slice' property") - require.NotNil(t, prop.Items, "deep_time_slice should have had an items property") - require.NotNil(t, prop.Items.Schema, "deep_time_slice.items should have had a schema property") - assert.TrueT(t, prop.UniqueItems, "'deep_time_slice' should have unique items") - assert.EqualT(t, int64(3), *prop.MinItems, "'deep_time_slice' should have had 3 min items") - assert.EqualT(t, int64(10), *prop.MaxItems, "'deep_time_slice' should have had 10 max items") - itprop = prop.Items.Schema - require.NotNil(t, itprop) - assert.EqualT(t, int64(4), *itprop.MinItems, "'deep_time_slice.items.minItems' should have been 4") - assert.EqualT(t, int64(9), *itprop.MaxItems, "'deep_time_slice.items.maxItems' should have been 9") - - itprop2 = itprop.Items.Schema - require.NotNil(t, itprop2) - assert.EqualT(t, int64(5), *itprop2.MinItems, "'deep_time_slice.items.items.minItems' should have been 5") - assert.EqualT(t, int64(8), *itprop2.MaxItems, "'deep_time_slice.items.items.maxItems' should have been 8") - - itprop3 = itprop2.Items.Schema - require.NotNil(t, itprop3) - - assertArrayProperty(t, &schema, "object", "items", "", "Items") - prop, ok = schema.Properties["items"] - assert.TrueT(t, ok, "should have an 'items' slice") - assert.NotNil(t, prop.Items, "items should have had an items property") - assert.NotNil(t, prop.Items.Schema, "items.items should have had a schema property") - itprop = prop.Items.Schema - assert.Len(t, itprop.Properties, 5) - assert.Len(t, itprop.Required, 4) - assertProperty(t, itprop, "integer", "id", "int32", "ID") - iprop, ok := itprop.Properties["id"] - assert.TrueT(t, ok) - assert.EqualT(t, "ID of this no model instance.\nids in this application start at 11 and are smaller than 1000", iprop.Description) - require.NotNil(t, iprop.Maximum) - assert.InDeltaT(t, 1000.00, *iprop.Maximum, epsilon) - assert.TrueT(t, iprop.ExclusiveMaximum, "'id' should have had an exclusive maximum") - require.NotNil(t, iprop.Minimum) - assert.InDeltaT(t, 10.00, *iprop.Minimum, epsilon) - assert.TrueT(t, iprop.ExclusiveMinimum, "'id' should have had an exclusive minimum") - assert.Equal(t, 11, iprop.Default, "ID default value is incorrect") - - assertRef(t, itprop, "pet", "Pet", "#/definitions/pet") - iprop, ok = itprop.Properties["pet"] - assert.TrueT(t, ok) - if itprop.Ref.String() != "" { - assert.EqualT(t, "The Pet to add to this NoModel items bucket.\nPets can appear more than once in the bucket", iprop.Description) - } - - assertProperty(t, itprop, "integer", "quantity", "int16", "Quantity") - iprop, ok = itprop.Properties["quantity"] - assert.TrueT(t, ok) - assert.EqualT(t, "The amount of pets to add to this bucket.", iprop.Description) - assert.InDeltaT(t, 1.00, *iprop.Minimum, epsilon) - assert.InDeltaT(t, 10.00, *iprop.Maximum, epsilon) - - assertProperty(t, itprop, "string", "expiration", "date-time", "Expiration") - iprop, ok = itprop.Properties["expiration"] - assert.TrueT(t, ok) - assert.EqualT(t, "A dummy expiration date.", iprop.Description) - - assertProperty(t, itprop, "string", "notes", "", "Notes") - iprop, ok = itprop.Properties["notes"] - assert.TrueT(t, ok) - assert.EqualT(t, "Notes to add to this item.\nThis can be used to add special instructions.", iprop.Description) - - decl2 := getClassificationModel(sctx, "StoreOrder") - require.NotNil(t, decl2) - require.NoError(t, (&schemaBuilder{decl: decl2, ctx: sctx}).Build(models)) - msch, ok := models["order"] - pn := fixturesModule + "/goparsing/classification/models" - assert.TrueT(t, ok) - assert.Equal(t, pn, msch.Extensions["x-go-package"]) - assert.Equal(t, "StoreOrder", msch.Extensions["x-go-name"]) -} - -func TestSchemaBuilder_AddExtensions(t *testing.T) { - sctx := loadClassificationPkgsCtx(t) - models := make(map[string]spec.Schema) - decl := getClassificationModel(sctx, "StoreOrder") - require.NotNil(t, decl) - require.NoError(t, (&schemaBuilder{decl: decl, ctx: sctx}).Build(models)) - - msch, ok := models["order"] - pn := fixturesModule + "/goparsing/classification/models" - assert.TrueT(t, ok) - assert.Equal(t, pn, msch.Extensions["x-go-package"]) - assert.Equal(t, "StoreOrder", msch.Extensions["x-go-name"]) - assert.EqualT(t, "StoreOrder represents an order in this application.", msch.Title) -} - -func TestTextMarhalCustomType(t *testing.T) { - sctx := loadClassificationPkgsCtx(t) - decl := getClassificationModel(sctx, "TextMarshalModel") - require.NotNil(t, decl) - prs := &schemaBuilder{ - ctx: sctx, - decl: decl, - } - models := make(map[string]spec.Schema) - require.NoError(t, prs.Build(models)) - schema := models["TextMarshalModel"] - assertProperty(t, &schema, "string", "id", "uuid", "ID") - assertArrayProperty(t, &schema, "string", "ids", "uuid", "IDs") - assertProperty(t, &schema, "string", "struct", "", "Struct") - assertProperty(t, &schema, "string", "map", "", "Map") - assertMapProperty(t, &schema, "string", "mapUUID", "uuid", "MapUUID") - assertRef(t, &schema, "url", "URL", "#/definitions/URL") - assertProperty(t, &schema, "string", "time", "date-time", "Time") - assertProperty(t, &schema, "string", "structStrfmt", "date-time", "StructStrfmt") - assertProperty(t, &schema, "string", "structStrfmtPtr", "date-time", "StructStrfmtPtr") - assertProperty(t, &schema, "string", "customUrl", "url", "CustomURL") -} - -func TestEmbeddedTypes(t *testing.T) { - sctx := loadClassificationPkgsCtx(t) - decl := getClassificationModel(sctx, "ComplexerOne") - require.NotNil(t, decl) - prs := &schemaBuilder{ - ctx: sctx, - decl: decl, - } - models := make(map[string]spec.Schema) - require.NoError(t, prs.Build(models)) - schema := models["ComplexerOne"] - assertProperty(t, &schema, "integer", "age", "int32", "Age") - assertProperty(t, &schema, "integer", "id", "int64", "ID") - assertProperty(t, &schema, "string", "createdAt", "date-time", "CreatedAt") - assertProperty(t, &schema, "string", "extra", "", "Extra") - assertProperty(t, &schema, "string", "name", "", "Name") - assertProperty(t, &schema, "string", "notes", "", "Notes") -} - -func TestParsePrimitiveSchemaProperty(t *testing.T) { - sctx := loadClassificationPkgsCtx(t) - decl := getClassificationModel(sctx, "PrimateModel") - require.NotNil(t, decl) - prs := &schemaBuilder{ - ctx: sctx, - decl: decl, - } - models := make(map[string]spec.Schema) - require.NoError(t, prs.Build(models)) - - schema := models["PrimateModel"] - assertProperty(t, &schema, "boolean", "a", "", "A") - assertProperty(t, &schema, "integer", "b", "int32", "B") - assertProperty(t, &schema, "string", "c", "", "C") - assertProperty(t, &schema, "integer", "d", "int64", "D") - assertProperty(t, &schema, "integer", "e", "int8", "E") - assertProperty(t, &schema, "integer", "f", "int16", "F") - assertProperty(t, &schema, "integer", "g", "int32", "G") - assertProperty(t, &schema, "integer", "h", "int64", "H") - assertProperty(t, &schema, "integer", "i", "uint64", "I") - assertProperty(t, &schema, "integer", "j", "uint8", "J") - assertProperty(t, &schema, "integer", "k", "uint16", "K") - assertProperty(t, &schema, "integer", "l", "uint32", "L") - assertProperty(t, &schema, "integer", "m", "uint64", "M") - assertProperty(t, &schema, "number", "n", "float", "N") - assertProperty(t, &schema, "number", "o", "double", "O") - assertProperty(t, &schema, "integer", "p", "uint8", "P") - assertProperty(t, &schema, "integer", "q", "uint64", "Q") -} - -func TestParseStringFormatSchemaProperty(t *testing.T) { - sctx := loadClassificationPkgsCtx(t) - decl := getClassificationModel(sctx, "FormattedModel") - require.NotNil(t, decl) - prs := &schemaBuilder{ - ctx: sctx, - decl: decl, - } - models := make(map[string]spec.Schema) - require.NoError(t, prs.Build(models)) - - schema := models["FormattedModel"] - assertProperty(t, &schema, "string", "a", "byte", "A") - assertProperty(t, &schema, "string", "b", "creditcard", "B") - assertProperty(t, &schema, "string", "c", "date", "C") - assertProperty(t, &schema, "string", "d", "date-time", "D") - assertProperty(t, &schema, "string", "e", "duration", "E") - assertProperty(t, &schema, "string", "f", "email", "F") - assertProperty(t, &schema, "string", "g", "hexcolor", "G") - assertProperty(t, &schema, "string", "h", "hostname", "H") - assertProperty(t, &schema, "string", "i", "ipv4", "I") - assertProperty(t, &schema, "string", "j", "ipv6", "J") - assertProperty(t, &schema, "string", "k", "isbn", "K") - assertProperty(t, &schema, "string", "l", "isbn10", "L") - assertProperty(t, &schema, "string", "m", "isbn13", "M") - assertProperty(t, &schema, "string", "n", "rgbcolor", "N") - assertProperty(t, &schema, "string", "o", "ssn", "O") - assertProperty(t, &schema, "string", "p", "uri", "P") - assertProperty(t, &schema, "string", "q", "uuid", "Q") - assertProperty(t, &schema, "string", "r", "uuid3", "R") - assertProperty(t, &schema, "string", "s", "uuid4", "S") - assertProperty(t, &schema, "string", "t", "uuid5", "T") - assertProperty(t, &schema, "string", "u", "mac", "U") -} - -func TestStringStructTag(t *testing.T) { - sctx := loadClassificationPkgsCtx(t) - decl := getClassificationModel(sctx, "JSONString") - require.NotNil(t, decl) - prs := &schemaBuilder{ - ctx: sctx, - decl: decl, - } - models := make(map[string]spec.Schema) - require.NoError(t, prs.Build(models)) - - sch := models["jsonString"] - assertProperty(t, &sch, "string", "someInt", "int64", "SomeInt") - assertProperty(t, &sch, "string", "someInt8", "int8", "SomeInt8") - assertProperty(t, &sch, "string", "someInt16", "int16", "SomeInt16") - assertProperty(t, &sch, "string", "someInt32", "int32", "SomeInt32") - assertProperty(t, &sch, "string", "someInt64", "int64", "SomeInt64") - assertProperty(t, &sch, "string", "someUint", "uint64", "SomeUint") - assertProperty(t, &sch, "string", "someUint8", "uint8", "SomeUint8") - assertProperty(t, &sch, "string", "someUint16", "uint16", "SomeUint16") - assertProperty(t, &sch, "string", "someUint32", "uint32", "SomeUint32") - assertProperty(t, &sch, "string", "someUint64", "uint64", "SomeUint64") - assertProperty(t, &sch, "string", "someFloat64", "double", "SomeFloat64") - assertProperty(t, &sch, "string", "someString", "", "SomeString") - assertProperty(t, &sch, "string", "someBool", "", "SomeBool") - assertProperty(t, &sch, "string", "SomeDefaultInt", "int64", "") - - prop, ok := sch.Properties["somethingElse"] - if assert.TrueT(t, ok) { - assert.NotEqual(t, "string", prop.Type) - } -} - -func TestPtrFieldStringStructTag(t *testing.T) { - sctx := loadClassificationPkgsCtx(t) - decl := getClassificationModel(sctx, "JSONPtrString") - require.NotNil(t, decl) - prs := &schemaBuilder{ - ctx: sctx, - decl: decl, - } - models := make(map[string]spec.Schema) - require.NoError(t, prs.Build(models)) - - sch := models["jsonPtrString"] - assertProperty(t, &sch, "string", "someInt", "int64", "SomeInt") - assertProperty(t, &sch, "string", "someInt8", "int8", "SomeInt8") - assertProperty(t, &sch, "string", "someInt16", "int16", "SomeInt16") - assertProperty(t, &sch, "string", "someInt32", "int32", "SomeInt32") - assertProperty(t, &sch, "string", "someInt64", "int64", "SomeInt64") - assertProperty(t, &sch, "string", "someUint", "uint64", "SomeUint") - assertProperty(t, &sch, "string", "someUint8", "uint8", "SomeUint8") - assertProperty(t, &sch, "string", "someUint16", "uint16", "SomeUint16") - assertProperty(t, &sch, "string", "someUint32", "uint32", "SomeUint32") - assertProperty(t, &sch, "string", "someUint64", "uint64", "SomeUint64") - assertProperty(t, &sch, "string", "someFloat64", "double", "SomeFloat64") - assertProperty(t, &sch, "string", "someString", "", "SomeString") - assertProperty(t, &sch, "string", "someBool", "", "SomeBool") - - prop, ok := sch.Properties["somethingElse"] - if assert.TrueT(t, ok) { - assert.NotEqual(t, "string", prop.Type) - } -} - -func TestIgnoredStructField(t *testing.T) { - sctx := loadClassificationPkgsCtx(t) - decl := getClassificationModel(sctx, "IgnoredFields") - require.NotNil(t, decl) - prs := &schemaBuilder{ - ctx: sctx, - decl: decl, - } - models := make(map[string]spec.Schema) - require.NoError(t, prs.Build(models)) - - sch := models["ignoredFields"] - assertProperty(t, &sch, "string", "someIncludedField", "", "SomeIncludedField") - assertProperty(t, &sch, "string", "someErroneouslyIncludedField", "", "SomeErroneouslyIncludedField") - assert.Len(t, sch.Properties, 2) -} - -func TestParseStructFields(t *testing.T) { - sctx := loadClassificationPkgsCtx(t) - decl := getClassificationModel(sctx, "SimpleComplexModel") - require.NotNil(t, decl) - prs := &schemaBuilder{ - ctx: sctx, - decl: decl, - } - models := make(map[string]spec.Schema) - require.NoError(t, prs.Build(models)) - - schema := models["SimpleComplexModel"] - assertProperty(t, &schema, "object", "emb", "", "Emb") - eSchema := schema.Properties["emb"] - assertProperty(t, &eSchema, "integer", "cid", "int64", "CID") - assertProperty(t, &eSchema, "string", "baz", "", "Baz") - - assertRef(t, &schema, "top", "Top", "#/definitions/Something") - assertRef(t, &schema, "notSel", "NotSel", "#/definitions/NotSelected") -} - -func TestParsePointerFields(t *testing.T) { - sctx := loadClassificationPkgsCtx(t) - decl := getClassificationModel(sctx, "Pointdexter") - require.NotNil(t, decl) - prs := &schemaBuilder{ - ctx: sctx, - decl: decl, - } - models := make(map[string]spec.Schema) - require.NoError(t, prs.Build(models)) - - schema := models["Pointdexter"] - - assertProperty(t, &schema, "integer", "id", "int64", "ID") - assertProperty(t, &schema, "string", "name", "", "Name") - assertProperty(t, &schema, "object", "emb", "", "Emb") - assertProperty(t, &schema, "string", "t", "uuid5", "T") - eSchema := schema.Properties["emb"] - assertProperty(t, &eSchema, "integer", "cid", "int64", "CID") - assertProperty(t, &eSchema, "string", "baz", "", "Baz") - - assertRef(t, &schema, "top", "Top", "#/definitions/Something") - assertRef(t, &schema, "notSel", "NotSel", "#/definitions/NotSelected") -} - -func TestEmbeddedStarExpr(t *testing.T) { - sctx := loadClassificationPkgsCtx(t) - decl := getClassificationModel(sctx, "EmbeddedStarExpr") - require.NotNil(t, decl) - prs := &schemaBuilder{ - ctx: sctx, - decl: decl, - } - models := make(map[string]spec.Schema) - require.NoError(t, prs.Build(models)) - - schema := models["EmbeddedStarExpr"] - - assertProperty(t, &schema, "integer", "embeddedMember", "int64", "EmbeddedMember") - assertProperty(t, &schema, "integer", "notEmbedded", "int64", "NotEmbedded") -} - -func TestArrayOfPointers(t *testing.T) { - sctx := loadClassificationPkgsCtx(t) - decl := getClassificationModel(sctx, "Cars") - require.NotNil(t, decl) - prs := &schemaBuilder{ - ctx: sctx, - decl: decl, - } - models := make(map[string]spec.Schema) - require.NoError(t, prs.Build(models)) - - schema := models["cars"] - assertProperty(t, &schema, "array", "cars", "", "Cars") -} - -func TestOverridingOneIgnore(t *testing.T) { - sctx := loadClassificationPkgsCtx(t) - decl := getClassificationModel(sctx, "OverridingOneIgnore") - require.NotNil(t, decl) - prs := &schemaBuilder{ - ctx: sctx, - decl: decl, - } - models := make(map[string]spec.Schema) - require.NoError(t, prs.Build(models)) - - schema := models["OverridingOneIgnore"] - - assertProperty(t, &schema, "integer", "id", "int64", "ID") - assertProperty(t, &schema, "string", "name", "", "Name") - assert.Len(t, schema.Properties, 2) -} - -type collectionAssertions struct { - assertProperty func(t *testing.T, schema *spec.Schema, typeName, jsonName, format, goName string) - assertRef func(t *testing.T, schema *spec.Schema, jsonName, goName, fragment string) - nestedSchema func(prop spec.Schema) *spec.Schema -} - -func testParseCollectionFields( - t *testing.T, - modelName string, - ca collectionAssertions, -) { - t.Helper() - sctx := loadClassificationPkgsCtx(t) - decl := getClassificationModel(sctx, modelName) - require.NotNil(t, decl) - prs := &schemaBuilder{ - ctx: sctx, - decl: decl, - } - models := make(map[string]spec.Schema) - require.NoError(t, prs.Build(models)) - - schema := models[modelName] - - ca.assertProperty(t, &schema, "integer", "ids", "int64", "IDs") - ca.assertProperty(t, &schema, "string", "names", "", "Names") - ca.assertProperty(t, &schema, "string", "uuids", "uuid", "UUIDs") - ca.assertProperty(t, &schema, "object", "embs", "", "Embs") - eSchema := ca.nestedSchema(schema.Properties["embs"]) - ca.assertProperty(t, eSchema, "integer", "cid", "int64", "CID") - ca.assertProperty(t, eSchema, "string", "baz", "", "Baz") - - ca.assertRef(t, &schema, "tops", "Tops", "#/definitions/Something") - ca.assertRef(t, &schema, "notSels", "NotSels", "#/definitions/NotSelected") - - ca.assertProperty(t, &schema, "integer", "ptrIds", "int64", "PtrIDs") - ca.assertProperty(t, &schema, "string", "ptrNames", "", "PtrNames") - ca.assertProperty(t, &schema, "string", "ptrUuids", "uuid", "PtrUUIDs") - ca.assertProperty(t, &schema, "object", "ptrEmbs", "", "PtrEmbs") - eSchema = ca.nestedSchema(schema.Properties["ptrEmbs"]) - ca.assertProperty(t, eSchema, "integer", "ptrCid", "int64", "PtrCID") - ca.assertProperty(t, eSchema, "string", "ptrBaz", "", "PtrBaz") - - ca.assertRef(t, &schema, "ptrTops", "PtrTops", "#/definitions/Something") - ca.assertRef(t, &schema, "ptrNotSels", "PtrNotSels", "#/definitions/NotSelected") -} - -func TestParseSliceFields(t *testing.T) { - testParseCollectionFields(t, "SliceAndDice", collectionAssertions{ - assertProperty: assertArrayProperty, - assertRef: assertArrayRef, - nestedSchema: func(prop spec.Schema) *spec.Schema { return prop.Items.Schema }, - }) -} - -func TestParseMapFields(t *testing.T) { - testParseCollectionFields(t, "MapTastic", collectionAssertions{ - assertProperty: assertMapProperty, - assertRef: assertMapRef, - nestedSchema: func(prop spec.Schema) *spec.Schema { return prop.AdditionalProperties.Schema }, - }) -} - -func TestInterfaceField(t *testing.T) { - sctx := loadClassificationPkgsCtx(t) - decl := getClassificationModel(sctx, "Interfaced") - require.NotNil(t, decl) - prs := &schemaBuilder{ - ctx: sctx, - decl: decl, - } - models := make(map[string]spec.Schema) - require.NoError(t, prs.Build(models)) - - schema := models["Interfaced"] - assertProperty(t, &schema, "", "custom_data", "", "CustomData") -} - -func TestAliasedTypes(t *testing.T) { - sctx := loadClassificationPkgsCtx(t) - decl := getClassificationModel(sctx, "OtherTypes") - require.NotNil(t, decl) - prs := &schemaBuilder{ - ctx: sctx, - decl: decl, - } - models := make(map[string]spec.Schema) - require.NoError(t, prs.Build(models)) - - schema := models["OtherTypes"] - assertRef(t, &schema, "named", "Named", "#/definitions/SomeStringType") - assertRef(t, &schema, "numbered", "Numbered", "#/definitions/SomeIntType") - assertProperty(t, &schema, "string", "dated", "date-time", "Dated") - assertRef(t, &schema, "timed", "Timed", "#/definitions/SomeTimedType") - assertRef(t, &schema, "petted", "Petted", "#/definitions/SomePettedType") - assertRef(t, &schema, "somethinged", "Somethinged", "#/definitions/SomethingType") - assertRef(t, &schema, "strMap", "StrMap", "#/definitions/SomeStringMap") - assertRef(t, &schema, "strArrMap", "StrArrMap", "#/definitions/SomeArrayStringMap") - - assertRef(t, &schema, "manyNamed", "ManyNamed", "#/definitions/SomeStringsType") - assertRef(t, &schema, "manyNumbered", "ManyNumbered", "#/definitions/SomeIntsType") - assertArrayProperty(t, &schema, "string", "manyDated", "date-time", "ManyDated") - assertRef(t, &schema, "manyTimed", "ManyTimed", "#/definitions/SomeTimedsType") - assertRef(t, &schema, "manyPetted", "ManyPetted", "#/definitions/SomePettedsType") - assertRef(t, &schema, "manySomethinged", "ManySomethinged", "#/definitions/SomethingsType") - - assertArrayRef(t, &schema, "nameds", "Nameds", "#/definitions/SomeStringType") - assertArrayRef(t, &schema, "numbereds", "Numbereds", "#/definitions/SomeIntType") - assertArrayProperty(t, &schema, "string", "dateds", "date-time", "Dateds") - assertArrayRef(t, &schema, "timeds", "Timeds", "#/definitions/SomeTimedType") - assertArrayRef(t, &schema, "petteds", "Petteds", "#/definitions/SomePettedType") - assertArrayRef(t, &schema, "somethingeds", "Somethingeds", "#/definitions/SomethingType") - - assertRef(t, &schema, "modsNamed", "ModsNamed", "#/definitions/modsSomeStringType") - assertRef(t, &schema, "modsNumbered", "ModsNumbered", "#/definitions/modsSomeIntType") - assertProperty(t, &schema, "string", "modsDated", "date-time", "ModsDated") - assertRef(t, &schema, "modsTimed", "ModsTimed", "#/definitions/modsSomeTimedType") - assertRef(t, &schema, "modsPetted", "ModsPetted", "#/definitions/modsSomePettedType") - - assertArrayRef(t, &schema, "modsNameds", "ModsNameds", "#/definitions/modsSomeStringType") - assertArrayRef(t, &schema, "modsNumbereds", "ModsNumbereds", "#/definitions/modsSomeIntType") - assertArrayProperty(t, &schema, "string", "modsDateds", "date-time", "ModsDateds") - assertArrayRef(t, &schema, "modsTimeds", "ModsTimeds", "#/definitions/modsSomeTimedType") - assertArrayRef(t, &schema, "modsPetteds", "ModsPetteds", "#/definitions/modsSomePettedType") - - assertRef(t, &schema, "manyModsNamed", "ManyModsNamed", "#/definitions/modsSomeStringsType") - assertRef(t, &schema, "manyModsNumbered", "ManyModsNumbered", "#/definitions/modsSomeIntsType") - assertArrayProperty(t, &schema, "string", "manyModsDated", "date-time", "ManyModsDated") - assertRef(t, &schema, "manyModsTimed", "ManyModsTimed", "#/definitions/modsSomeTimedsType") - assertRef(t, &schema, "manyModsPetted", "ManyModsPetted", "#/definitions/modsSomePettedsType") - assertRef(t, &schema, "manyModsPettedPtr", "ManyModsPettedPtr", "#/definitions/modsSomePettedsPtrType") - - assertProperty(t, &schema, "string", "namedAlias", "", "NamedAlias") - assertProperty(t, &schema, "integer", "numberedAlias", "int64", "NumberedAlias") - assertArrayProperty(t, &schema, "string", "namedsAlias", "", "NamedsAlias") - assertArrayProperty(t, &schema, "integer", "numberedsAlias", "int64", "NumberedsAlias") -} - -func TestAliasedModels(t *testing.T) { - sctx := loadClassificationPkgsCtx(t) - - names := []string{ - "SomeStringType", - "SomeIntType", - "SomeTimeType", - "SomeTimedType", - "SomePettedType", - "SomethingType", - "SomeStringsType", - "SomeIntsType", - "SomeTimesType", - "SomeTimedsType", - "SomePettedsType", - "SomethingsType", - "SomeObject", - "SomeStringMap", - "SomeIntMap", - "SomeTimeMap", - "SomeTimedMap", - "SomePettedMap", - "SomeSomethingMap", - } - - defs := make(map[string]spec.Schema) - for _, nm := range names { - decl := getClassificationModel(sctx, nm) - require.NotNil(t, decl) - - prs := &schemaBuilder{ - decl: decl, - ctx: sctx, - } - require.NoError(t, prs.Build(defs)) - } - - for k := range defs { - for i, b := range names { - if b == k { - // remove the entry from the collection - names = append(names[:i], names[i+1:]...) - } - } - } - if assert.Empty(t, names) { - // single value types - assertDefinition(t, defs, "SomeStringType", "string", "") - assertDefinition(t, defs, "SomeIntType", "integer", "int64") - assertDefinition(t, defs, "SomeTimeType", "string", "date-time") - assertDefinition(t, defs, "SomeTimedType", "string", "date-time") - assertRefDefinition(t, defs, "SomePettedType", "#/definitions/pet", "") - assertRefDefinition(t, defs, "SomethingType", "#/definitions/Something", "") - - // slice types - assertArrayDefinition(t, defs, "SomeStringsType", "string", "", "") - assertArrayDefinition(t, defs, "SomeIntsType", "integer", "int64", "") - assertArrayDefinition(t, defs, "SomeTimesType", "string", "date-time", "") - assertArrayDefinition(t, defs, "SomeTimedsType", "string", "date-time", "") - assertArrayWithRefDefinition(t, defs, "SomePettedsType", "#/definitions/pet", "") - assertArrayWithRefDefinition(t, defs, "SomethingsType", "#/definitions/Something", "") - - // map types - assertMapDefinition(t, defs, "SomeObject", "object", "", "") - assertMapDefinition(t, defs, "SomeStringMap", "string", "", "") - assertMapDefinition(t, defs, "SomeIntMap", "integer", "int64", "") - assertMapDefinition(t, defs, "SomeTimeMap", "string", "date-time", "") - assertMapDefinition(t, defs, "SomeTimedMap", "string", "date-time", "") - assertMapWithRefDefinition(t, defs, "SomePettedMap", "#/definitions/pet", "") - assertMapWithRefDefinition(t, defs, "SomeSomethingMap", "#/definitions/Something", "") - } -} - -func TestAliasedTopLevelModels(t *testing.T) { - t.Run("with options: no scan models, with aliases as ref", func(t *testing.T) { - t.Run("with goparsing/spec", func(t *testing.T) { - sctx, err := newScanCtx(&Options{ - Packages: []string{ - "./goparsing/spec", - }, - WorkDir: "fixtures", - ScanModels: false, - RefAliases: true, - }) - require.NoError(t, err) - - t.Run("should find User definition in source", func(t *testing.T) { - _, hasUser := sctx.FindDecl(fixturesModule+"/goparsing/spec", "User") - require.TrueT(t, hasUser) - }) - - var decl *entityDecl - t.Run("should find Customer definition in source", func(t *testing.T) { - var hasCustomer bool - decl, hasCustomer = sctx.FindDecl(fixturesModule+"/goparsing/spec", "Customer") - require.TrueT(t, hasCustomer) - }) - - t.Run("with schema builder", func(t *testing.T) { - require.NotNil(t, decl) - builder := &schemaBuilder{ - ctx: sctx, - decl: decl, - } - - t.Run("should build model for Customer", func(t *testing.T) { - models := make(map[string]spec.Schema) - require.NoError(t, builder.Build(models)) - - assertRefDefinition(t, models, "Customer", "#/definitions/User", "") - }) - - t.Run("should have discovered models for User and Customer", func(t *testing.T) { - require.Len(t, builder.postDecls, 2) - foundUserIndex := -1 - foundCustomerIndex := -1 - - for i, discoveredDecl := range builder.postDecls { - switch discoveredDecl.Obj().Name() { - case "User": - foundUserIndex = i - case "Customer": - foundCustomerIndex = i - } - } - require.GreaterOrEqualT(t, foundUserIndex, 0) - require.GreaterOrEqualT(t, foundCustomerIndex, 0) - - userBuilder := &schemaBuilder{ - ctx: sctx, - decl: builder.postDecls[foundUserIndex], - } - - t.Run("should build model for User", func(t *testing.T) { - models := make(map[string]spec.Schema) - require.NoError(t, userBuilder.Build(models)) - - require.MapContainsT(t, models, "User") - - user := models["User"] - assert.TrueT(t, user.Type.Contains("object")) - - userProperties := user.Properties - require.MapContainsT(t, userProperties, "name") - }) - }) - }) - }) - }) - - t.Run("with options: no scan models, without aliases as ref", func(t *testing.T) { - t.Run("with goparsing/spec", func(t *testing.T) { - sctx, err := newScanCtx(&Options{ - Packages: []string{ - "./goparsing/spec", - }, - WorkDir: "fixtures", - ScanModels: false, - RefAliases: false, - }) - require.NoError(t, err) - - t.Run("should find User definition in source", func(t *testing.T) { - _, hasUser := sctx.FindDecl(fixturesModule+"/goparsing/spec", "User") - require.TrueT(t, hasUser) - }) - - var decl *entityDecl - t.Run("should find Customer definition in source", func(t *testing.T) { - var hasCustomer bool - decl, hasCustomer = sctx.FindDecl(fixturesModule+"/goparsing/spec", "Customer") - require.TrueT(t, hasCustomer) - }) - - t.Run("with schema builder", func(t *testing.T) { - require.NotNil(t, decl) - builder := &schemaBuilder{ - ctx: sctx, - decl: decl, - } - - t.Run("should build model for Customer", func(t *testing.T) { - models := make(map[string]spec.Schema) - require.NoError(t, builder.Build(models)) - - require.MapContainsT(t, models, "Customer") - customer := models["Customer"] - require.MapNotContainsT(t, models, "User") - - assert.TrueT(t, customer.Type.Contains("object")) - - customerProperties := customer.Properties - assert.MapContainsT(t, customerProperties, "name") - assert.NotEmpty(t, customer.Title) - }) - - t.Run("should have discovered only Customer", func(t *testing.T) { - require.Len(t, builder.postDecls, 1) - discovered := builder.postDecls[0] - assert.EqualT(t, "Customer", discovered.Obj().Name()) - }) - }) - }) - }) -} - -func TestAliasedSchemas(t *testing.T) { - fixturesPath := filepath.Join("fixtures", "goparsing", "go123", "aliased", "schema") - var sp *spec.Swagger - t.Run("end-to-end source scan should succeed", func(t *testing.T) { - var err error - sp, err = Run(&Options{ - WorkDir: fixturesPath, - BuildTags: "testscanner", // fixture code is excluded from normal build - ScanModels: true, - RefAliases: true, - }) - require.NoError(t, err) - }) - - if enableSpecOutput { - // for debugging, output the resulting spec as YAML - yml, err := marshalToYAMLFormat(sp) - require.NoError(t, err) - - _, _ = os.Stdout.Write(yml) - } - - t.Run("type aliased to any should yield an empty schema", func(t *testing.T) { - anything, ok := sp.Definitions["Anything"] - require.TrueT(t, ok) - - assertHasGoPackageExt(t, anything) - assertHasTitle(t, anything) - - // after stripping extension and title, should be empty - anything.VendorExtensible = spec.VendorExtensible{} - anything.Title = "" - assert.Equal(t, spec.Schema{}, anything) - }) - - t.Run("type aliased to an empty struct should yield an empty object", func(t *testing.T) { - empty, ok := sp.Definitions["Empty"] - require.TrueT(t, ok) - - assertHasGoPackageExt(t, empty) - assertHasTitle(t, empty) - - // after stripping extension and title, should be empty - empty.VendorExtensible = spec.VendorExtensible{} - empty.Title = "" - emptyObject := &spec.Schema{} - emptyObject = emptyObject.Typed("object", "").WithProperties(map[string]spec.Schema{}) - assert.Equal(t, *emptyObject, empty) - }) - - t.Run("struct fields defined as any or interface{} should yield properties with an empty schema", func(t *testing.T) { - testAliasedExtendedIDAllOf(t, sp) - }) - - t.Run("aliased primitive types remain unaffected", func(t *testing.T) { - uuid, ok := sp.Definitions["UUID"] - require.TrueT(t, ok) - - assertHasGoPackageExt(t, uuid) - assertHasTitle(t, uuid) - - // after strip extension, should be equal to integer with format - uuid.VendorExtensible = spec.VendorExtensible{} - uuid.Title = "" - intSchema := &spec.Schema{} - intSchema = intSchema.Typed("integer", "int64") - assert.Equal(t, *intSchema, uuid) - }) - - t.Run("with struct having fields aliased to any or interface{}", func(t *testing.T) { - order, ok := sp.Definitions["order"] - require.TrueT(t, ok) - - t.Run("field defined on an alias should produce a ref", func(t *testing.T) { - t.Run("with alias to any", func(t *testing.T) { - _, ok = order.Properties["DeliveryOption"] - require.TrueT(t, ok) - assertRef(t, &order, "DeliveryOption", "", "#/definitions/Anything") // points to an alias to any - }) - - t.Run("with alias to primitive type", func(t *testing.T) { - _, ok = order.Properties["id"] - require.TrueT(t, ok) - assertRef(t, &order, "id", "", "#/definitions/UUID") // points to an alias to any - }) - - t.Run("with alias to struct type", func(t *testing.T) { - _, ok = order.Properties["extended_id"] - require.TrueT(t, ok) - assertRef(t, &order, "extended_id", "", "#/definitions/ExtendedID") // points to an alias to any - }) - - t.Run("inside anonymous array", func(t *testing.T) { - items, ok := order.Properties["items"] - require.TrueT(t, ok) - - require.NotNil(t, items) - require.NotNil(t, items.Items) - - assert.TrueT(t, items.Type.Contains("array")) - t.Run("field as any should render as empty object", func(t *testing.T) { - require.NotNil(t, items.Items.Schema) - itemsSchema := items.Items.Schema - assert.TrueT(t, itemsSchema.Type.Contains("object")) - - require.MapContainsT(t, itemsSchema.Properties, "extra_options") - extraOptions := itemsSchema.Properties["extra_options"] - assertHasExtension(t, extraOptions, "x-go-name") - - extraOptions.VendorExtensible = spec.VendorExtensible{} - empty := spec.Schema{} - assert.Equal(t, empty, extraOptions) - }) - }) - }) - - t.Run("struct field defined as any should produce an empty schema", func(t *testing.T) { - extras, ok := order.Properties["Extras"] - require.TrueT(t, ok) - assert.Equal(t, spec.Schema{}, extras) - }) - - t.Run("struct field defined as interface{} should produce an empty schema", func(t *testing.T) { - extras, ok := order.Properties["MoreExtras"] - require.TrueT(t, ok) - assert.Equal(t, spec.Schema{}, extras) - }) - }) - - t.Run("type redefinitions and syntactic aliases to any should render the same", func(t *testing.T) { - whatnot, ok := sp.Definitions["whatnot"] - require.TrueT(t, ok) - // after strip extension, should be empty - whatnot.VendorExtensible = spec.VendorExtensible{} - assert.Equal(t, spec.Schema{}, whatnot) - - whatnotAlias, ok := sp.Definitions["whatnot_alias"] - require.TrueT(t, ok) - // after strip extension, should be empty - whatnotAlias.VendorExtensible = spec.VendorExtensible{} - assert.Equal(t, spec.Schema{}, whatnotAlias) - - whatnot2, ok := sp.Definitions["whatnot2"] - require.TrueT(t, ok) - // after strip extension, should be empty - whatnot2.VendorExtensible = spec.VendorExtensible{} - assert.Equal(t, spec.Schema{}, whatnot2) - - whatnot2Alias, ok := sp.Definitions["whatnot2_alias"] - require.TrueT(t, ok) - // after strip extension, should be empty - whatnot2Alias.VendorExtensible = spec.VendorExtensible{} - assert.Equal(t, spec.Schema{}, whatnot2Alias) - }) - - t.Run("alias to another alias is resolved as a ref", func(t *testing.T) { - void, ok := sp.Definitions["void"] - require.TrueT(t, ok) - - assertIsRef(t, &void, "#/definitions/Empty") // points to another alias - }) - - t.Run("type redefinition to anonymous is not an alias and is resolved as an object", func(t *testing.T) { - empty, ok := sp.Definitions["empty_redefinition"] - require.TrueT(t, ok) - - assertHasGoPackageExt(t, empty) - assertHasNoTitle(t, empty) - - // after stripping extension and title, should be empty - empty.VendorExtensible = spec.VendorExtensible{} - emptyObject := &spec.Schema{} - emptyObject = emptyObject.Typed("object", "").WithProperties(map[string]spec.Schema{}) - assert.Equal(t, *emptyObject, empty) - }) - - t.Run("alias to a named interface should render as a $ref", func(t *testing.T) { - iface, ok := sp.Definitions["iface_alias"] - require.TrueT(t, ok) - - assertIsRef(t, &iface, "#/definitions/iface") // points to an interface - }) - - t.Run("interface redefinition is not an alias and should render as a $ref", func(t *testing.T) { - iface, ok := sp.Definitions["iface_redefinition"] - require.TrueT(t, ok) - - assertIsRef(t, &iface, "#/definitions/iface") // points to an interface - }) - - t.Run("anonymous interface should render a schema", func(t *testing.T) { - iface, ok := sp.Definitions["anonymous_iface"] - require.TrueT(t, ok) - - require.NotEmpty(t, iface.Properties) - require.MapContainsT(t, iface.Properties, "String") - }) - - t.Run("anonymous struct should render as an anonymous schema", func(t *testing.T) { - obj, ok := sp.Definitions["anonymous_struct"] - require.TrueT(t, ok) - - require.NotEmpty(t, obj.Properties) - require.MapContainsT(t, obj.Properties, "A") - - a := obj.Properties["A"] - assert.TrueT(t, a.Type.Contains("object")) - require.MapContainsT(t, a.Properties, "B") - b := a.Properties["B"] - assert.TrueT(t, b.Type.Contains("integer")) - }) - - t.Run("standalone model with a tag should be rendered", func(t *testing.T) { - shouldSee, ok := sp.Definitions["ShouldSee"] - require.TrueT(t, ok) - assert.TrueT(t, shouldSee.Type.Contains("boolean")) - }) - - t.Run("standalone model without a tag should not be rendered", func(t *testing.T) { - _, ok := sp.Definitions["ShouldNotSee"] - require.FalseT(t, ok) - - _, ok = sp.Definitions["ShouldNotSeeSlice"] - require.FalseT(t, ok) - - _, ok = sp.Definitions["ShouldNotSeeMap"] - require.FalseT(t, ok) - }) - - t.Run("with aliases in slices and arrays", func(t *testing.T) { - t.Run("slice redefinition should render as schema", func(t *testing.T) { - t.Run("with anonymous slice", func(t *testing.T) { - slice, ok := sp.Definitions["slice_type"] // []any - require.TrueT(t, ok) - assert.TrueT(t, slice.Type.Contains("array")) - require.NotNil(t, slice.Items) - require.NotNil(t, slice.Items.Schema) - - assert.Equal(t, &spec.Schema{}, slice.Items.Schema) - }) - - t.Run("with anonymous struct", func(t *testing.T) { - slice, ok := sp.Definitions["slice_of_structs"] // type X = []struct{} - require.TrueT(t, ok) - assert.TrueT(t, slice.Type.Contains("array")) - - require.NotNil(t, slice.Items) - require.NotNil(t, slice.Items.Schema) - - emptyObject := &spec.Schema{} - emptyObject = emptyObject.Typed("object", "").WithProperties(map[string]spec.Schema{}) - assert.Equal(t, emptyObject, slice.Items.Schema) - }) - }) - - t.Run("alias to anonymous slice should render as schema", func(t *testing.T) { - t.Run("with anonymous slice", func(t *testing.T) { - slice, ok := sp.Definitions["slice_alias"] // type X = []any - require.TrueT(t, ok) - assert.TrueT(t, slice.Type.Contains("array")) - - require.NotNil(t, slice.Items) - require.NotNil(t, slice.Items.Schema) - - assert.Equal(t, &spec.Schema{}, slice.Items.Schema) - }) - - t.Run("with anonymous struct", func(t *testing.T) { - slice, ok := sp.Definitions["slice_of_structs_alias"] // type X = []struct{} - require.TrueT(t, ok) - assert.TrueT(t, slice.Type.Contains("array")) - require.NotNil(t, slice.Items) - require.NotNil(t, slice.Items.Schema) - - emptyObject := &spec.Schema{} - emptyObject = emptyObject.Typed("object", "").WithProperties(map[string]spec.Schema{}) - assert.Equal(t, emptyObject, slice.Items.Schema) - }) - }) - - t.Run("alias to named alias to anonymous slice should render as ref", func(t *testing.T) { - slice, ok := sp.Definitions["slice_to_slice"] // type X = Slice - require.TrueT(t, ok) - assertIsRef(t, &slice, "#/definitions/slice_type") // points to a named alias - }) - }) - - t.Run("with aliases in interfaces", func(t *testing.T) { - testAliasedInterfaceVariants(t, sp) - }) - - t.Run("with aliases in embedded types", func(t *testing.T) { - testAliasedEmbeddedTypes(t, sp) - }) -} - -func testAliasedExtendedIDAllOf(t *testing.T, sp *spec.Swagger) { - t.Helper() - extended, ok := sp.Definitions["ExtendedID"] - require.TrueT(t, ok) - - t.Run("struct with an embedded alias should render as allOf", func(t *testing.T) { - require.Len(t, extended.AllOf, 2) - assertHasTitle(t, extended) - - foundAliased := false - foundProps := false - for idx, member := range extended.AllOf { - isProps := len(member.Properties) > 0 - isAlias := member.Ref.String() != "" - - switch { - case isProps: - props := member - t.Run("with property of type any", func(t *testing.T) { - evenMore, ok := props.Properties["EvenMore"] - require.TrueT(t, ok) - assert.Equal(t, spec.Schema{}, evenMore) - }) - - t.Run("with property of type interface{}", func(t *testing.T) { - evenMore, ok := props.Properties["StillMore"] - require.TrueT(t, ok) - assert.Equal(t, spec.Schema{}, evenMore) - }) - - t.Run("non-aliased properties remain unaffected", func(t *testing.T) { - more, ok := props.Properties["more"] - require.TrueT(t, ok) - - assertHasExtension(t, more, "x-go-name") // because we have a struct tag - assertHasNoTitle(t, more) - - // after stripping extension and title, should be empty - more.VendorExtensible = spec.VendorExtensible{} - - strSchema := &spec.Schema{} - strSchema = strSchema.Typed("string", "") - assert.Equal(t, *strSchema, more) - }) - foundProps = true - case isAlias: - assertIsRef(t, &member, "#/definitions/Empty") - foundAliased = true - default: - assert.Failf(t, "embedded members in struct are not as expected", "unexpected member in allOf: %d", idx) - } - } - require.TrueT(t, foundProps) - require.TrueT(t, foundAliased) - }) -} - -func testAliasedInterfaceVariants(t *testing.T, sp *spec.Swagger) { - t.Helper() - - t.Run("should render anonymous interface as a schema", func(t *testing.T) { - iface, ok := sp.Definitions["anonymous_iface"] // e.g. type X interface{ String() string} - require.TrueT(t, ok) - - require.TrueT(t, iface.Type.Contains("object")) - require.MapContainsT(t, iface.Properties, "String") - prop := iface.Properties["String"] - require.TrueT(t, prop.Type.Contains("string")) - assert.Len(t, iface.Properties, 1) - }) - - t.Run("alias to an anonymous interface should render as a $ref", func(t *testing.T) { - iface, ok := sp.Definitions["anonymous_iface_alias"] - require.TrueT(t, ok) - - assertIsRef(t, &iface, "#/definitions/anonymous_iface") // points to an anonymous interface - }) - - t.Run("named interface should render as a schema", func(t *testing.T) { - iface, ok := sp.Definitions["iface"] - require.TrueT(t, ok) - - require.TrueT(t, iface.Type.Contains("object")) - require.MapContainsT(t, iface.Properties, "Get") - prop := iface.Properties["Get"] - require.TrueT(t, prop.Type.Contains("string")) - assert.Len(t, iface.Properties, 1) - }) - - t.Run("named interface with embedded types should render as allOf", func(t *testing.T) { - iface, ok := sp.Definitions["iface_embedded"] - require.TrueT(t, ok) - - require.Len(t, iface.AllOf, 2) - foundEmbedded := false - foundMethod := false - for idx, member := range iface.AllOf { - require.TrueT(t, member.Type.Contains("object")) - require.NotEmpty(t, member.Properties) - require.Len(t, member.Properties, 1) - propGet, isEmbedded := member.Properties["Get"] - propMethod, isMethod := member.Properties["Dump"] - - switch { - case isEmbedded: - assert.TrueT(t, propGet.Type.Contains("string")) - foundEmbedded = true - case isMethod: - assert.TrueT(t, propMethod.Type.Contains("array")) - foundMethod = true - default: - assert.Failf(t, "embedded members in interface are not as expected", "unexpected member in allOf: %d", idx) - } - } - require.TrueT(t, foundEmbedded) - require.TrueT(t, foundMethod) - }) - - t.Run("named interface with embedded anonymous interface should render as allOf", func(t *testing.T) { - iface, ok := sp.Definitions["iface_embedded_anonymous"] - require.TrueT(t, ok) - - require.Len(t, iface.AllOf, 2) - foundEmbedded := false - foundAnonymous := false - for idx, member := range iface.AllOf { - require.TrueT(t, member.Type.Contains("object")) - require.NotEmpty(t, member.Properties) - require.Len(t, member.Properties, 1) - propGet, isEmbedded := member.Properties["String"] - propAnonymous, isAnonymous := member.Properties["Error"] - - switch { - case isEmbedded: - assert.TrueT(t, propGet.Type.Contains("string")) - foundEmbedded = true - case isAnonymous: - assert.TrueT(t, propAnonymous.Type.Contains("string")) - foundAnonymous = true - default: - assert.Failf(t, "embedded members in interface are not as expected", "unexpected member in allOf: %d", idx) - } - } - require.TrueT(t, foundEmbedded) - require.TrueT(t, foundAnonymous) - }) - - t.Run("composition of empty interfaces is rendered as an empty schema", func(t *testing.T) { - iface, ok := sp.Definitions["iface_embedded_empty"] - require.TrueT(t, ok) - - iface.VendorExtensible = spec.VendorExtensible{} - assert.Equal(t, spec.Schema{}, iface) - }) - - t.Run("interface embedded with an alias should be rendered as allOf, with a ref", func(t *testing.T) { - iface, ok := sp.Definitions["iface_embedded_with_alias"] - require.TrueT(t, ok) - - require.Len(t, iface.AllOf, 3) - foundEmbedded := false - foundEmbeddedAnon := false - foundRef := false - for idx, member := range iface.AllOf { - propGet, isEmbedded := member.Properties["String"] - propAnonymous, isAnonymous := member.Properties["Dump"] - isRef := member.Ref.String() != "" - - switch { - case isEmbedded: - require.TrueT(t, member.Type.Contains("object")) - require.Len(t, member.Properties, 1) - assert.TrueT(t, propGet.Type.Contains("string")) - foundEmbedded = true - case isAnonymous: - require.TrueT(t, member.Type.Contains("object")) - require.Len(t, member.Properties, 1) - assert.TrueT(t, propAnonymous.Type.Contains("array")) - foundEmbeddedAnon = true - case isRef: - require.Empty(t, member.Properties) - assertIsRef(t, &member, "#/definitions/iface_alias") - foundRef = true - default: - assert.Failf(t, "embedded members in interface are not as expected", "unexpected member in allOf: %d", idx) - } - } - require.TrueT(t, foundEmbedded) - require.TrueT(t, foundEmbeddedAnon) - require.TrueT(t, foundRef) - }) -} - -func testAliasedEmbeddedTypes(t *testing.T, sp *spec.Swagger) { - t.Helper() - - t.Run("embedded alias should render as a $ref", func(t *testing.T) { - iface, ok := sp.Definitions["embedded_with_alias"] - require.TrueT(t, ok) - - require.Len(t, iface.AllOf, 3) - foundAnything := false - foundUUID := false - foundProps := false - for idx, member := range iface.AllOf { - isProps := len(member.Properties) > 0 - isRef := member.Ref.String() != "" - - switch { - case isProps: - require.TrueT(t, member.Type.Contains("object")) - require.Len(t, member.Properties, 3) - assert.MapContainsT(t, member.Properties, "EvenMore") - foundProps = true - case isRef: - switch member.Ref.String() { - case "#/definitions/Anything": - foundAnything = true - case "#/definitions/UUID": - foundUUID = true - default: - assert.Failf(t, - "embedded members in interface are not as expected", "unexpected $ref for member (%v): %d", - member.Ref, idx, - ) - } - default: - assert.Failf(t, "embedded members in interface are not as expected", "unexpected member in allOf: %d", idx) - } - } - require.TrueT(t, foundAnything) - require.TrueT(t, foundUUID) - require.TrueT(t, foundProps) - }) -} - -func TestSpecialSchemas(t *testing.T) { - fixturesPath := filepath.Join("fixtures", "goparsing", "go123", "special") - var sp *spec.Swagger - - t.Run("end-to-end source scan should succeed", func(t *testing.T) { - var err error - sp, err = Run(&Options{ - WorkDir: fixturesPath, - BuildTags: "testscanner", // fixture code is excluded from normal build - ScanModels: true, - RefAliases: true, - }) - require.NoError(t, err) - }) - - if enableSpecOutput { - // for debugging, output the resulting spec as YAML - yml, err := marshalToYAMLFormat(sp) - require.NoError(t, err) - - _, _ = os.Stdout.Write(yml) - } - - t.Run("top-level primitive declaration should render just fine", func(t *testing.T) { - primitive, ok := sp.Definitions["primitive"] - require.TrueT(t, ok) - - require.TrueT(t, primitive.Type.Contains("string")) - }) - - t.Run("alias to unsafe pointer at top level should render empty", func(t *testing.T) { - uptr, ok := sp.Definitions["unsafe_pointer_alias"] - require.TrueT(t, ok) - var empty spec.Schema - uptr.VendorExtensible = spec.VendorExtensible{} - require.Equal(t, empty, uptr) - }) - - t.Run("alias to uintptr at top level should render as integer", func(t *testing.T) { - uptr, ok := sp.Definitions["upointer_alias"] - require.TrueT(t, ok) - require.TrueT(t, uptr.Type.Contains("integer")) - require.EqualT(t, "uint64", uptr.Format) - }) - - t.Run("top-level map[string]... should render just fine", func(t *testing.T) { - gomap, ok := sp.Definitions["go_map"] - require.TrueT(t, ok) - require.TrueT(t, gomap.Type.Contains("object")) - require.NotNil(t, gomap.AdditionalProperties) - - mapSchema := gomap.AdditionalProperties.Schema - require.NotNil(t, mapSchema) - require.TrueT(t, mapSchema.Type.Contains("integer")) - require.EqualT(t, "uint16", mapSchema.Format) - }) - - t.Run("untagged struct referenced by a tagged model should be discovered", func(t *testing.T) { - gostruct, ok := sp.Definitions["GoStruct"] - require.TrueT(t, ok) - require.TrueT(t, gostruct.Type.Contains("object")) - require.NotEmpty(t, gostruct.Properties) - - t.Run("pointer property should render just fine", func(t *testing.T) { - a, ok := gostruct.Properties["A"] - require.TrueT(t, ok) - require.TrueT(t, a.Type.Contains("number")) - require.EqualT(t, "float", a.Format) - }) - }) - - t.Run("tagged unsupported map type should render empty", func(t *testing.T) { - idx, ok := sp.Definitions["index_map"] - require.TrueT(t, ok) - var empty spec.Schema - idx.VendorExtensible = spec.VendorExtensible{} - require.Equal(t, empty, idx) - }) - - t.Run("redefinition of the builtin error type should render as a string", func(t *testing.T) { - goerror, ok := sp.Definitions["go_error"] - require.TrueT(t, ok) - require.TrueT(t, goerror.Type.Contains("string")) - - t.Run("a type based on the error builtin should be decorated with a x-go-type: error extension", func(t *testing.T) { - val, hasExt := goerror.Extensions.GetString("x-go-type") - assert.TrueT(t, hasExt) - assert.EqualT(t, "error", val) - }) - }) - - t.Run("with SpecialTypes struct", func(t *testing.T) { - testSpecialTypesStruct(t, sp) - }) - - t.Run("with generic types", func(t *testing.T) { - // NOTE: codescan does not really support generic types. - // This test just makes sure generic definitions don't crash the scanner. - // - // The general approach of the scanner is to make an empty schema out of anything - // it doesn't understand. - - // generic_constraint - t.Run("generic type constraint should render like an interface", func(t *testing.T) { - generic, ok := sp.Definitions["generic_constraint"] - require.TrueT(t, ok) - require.Len(t, generic.AllOf, 1) // scanner only understood one member, and skipped the ~uint16 member is doesn't understand - member := generic.AllOf[0] - require.TrueT(t, member.Type.Contains("object")) - require.Len(t, member.Properties, 1) - prop, ok := member.Properties["Uint"] - require.TrueT(t, ok) - require.TrueT(t, prop.Type.Contains("integer")) - require.EqualT(t, "uint16", prop.Format) - }) - - // numerical_constraint - t.Run("generic type constraint with union type should render an empty schema", func(t *testing.T) { - generic, ok := sp.Definitions["numerical_constraint"] - require.TrueT(t, ok) - var empty spec.Schema - generic.VendorExtensible = spec.VendorExtensible{} - require.Equal(t, empty, generic) - }) - - // generic_map - t.Run("generic map should render an empty schema", func(t *testing.T) { - generic, ok := sp.Definitions["generic_map"] - require.TrueT(t, ok) - var empty spec.Schema - generic.VendorExtensible = spec.VendorExtensible{} - require.Equal(t, empty, generic) - }) - - // generic_map_alias - t.Run("generic map alias to an anonymous generic type should render an empty schema", func(t *testing.T) { - generic, ok := sp.Definitions["generic_map_alias"] - require.TrueT(t, ok) - var empty spec.Schema - generic.VendorExtensible = spec.VendorExtensible{} - require.Equal(t, empty, generic) - }) - - // generic_indirect - t.Run("generic map alias to a named generic type should render a ref", func(t *testing.T) { - generic, ok := sp.Definitions["generic_indirect"] - require.TrueT(t, ok) - assertIsRef(t, &generic, "#/definitions/generic_map_alias") - }) - - // generic_slice - t.Run("generic slice should render as an array of empty schemas", func(t *testing.T) { - generic, ok := sp.Definitions["generic_slice"] - require.TrueT(t, ok) - require.TrueT(t, generic.Type.Contains("array")) - require.NotNil(t, generic.Items) - itemsSchema := generic.Items.Schema - require.NotNil(t, itemsSchema) - var empty spec.Schema - require.Equal(t, &empty, itemsSchema) - }) - - // union_alias: - t.Run("alias to type constraint should render a ref", func(t *testing.T) { - generic, ok := sp.Definitions["union_alias"] - require.TrueT(t, ok) - assertIsRef(t, &generic, "#/definitions/numerical_constraint") - }) - }) -} - -func testSpecialTypesStruct(t *testing.T, sp *spec.Swagger) { - t.Helper() - - t.Run("in spite of all the pitfalls, the struct should be rendered", func(t *testing.T) { - special, ok := sp.Definitions["special_types"] - require.TrueT(t, ok) - require.TrueT(t, special.Type.Contains("object")) - props := special.Properties - require.NotEmpty(t, props) - require.Empty(t, special.AllOf) - - t.Run("property pointer to struct should render as a ref", func(t *testing.T) { - ptr, ok := props["PtrStruct"] - require.TrueT(t, ok) - assertIsRef(t, &ptr, "#/definitions/GoStruct") - }) - - t.Run("property as time.Time should render as a formatted string", func(t *testing.T) { - str, ok := props["ShouldBeStringTime"] - require.TrueT(t, ok) - require.TrueT(t, str.Type.Contains("string")) - require.EqualT(t, "date-time", str.Format) - }) - - t.Run("property as *time.Time should also render as a formatted string", func(t *testing.T) { - str, ok := props["ShouldAlsoBeStringTime"] - require.TrueT(t, ok) - require.TrueT(t, str.Type.Contains("string")) - require.EqualT(t, "date-time", str.Format) - }) - - t.Run("property as builtin error should render as a string", func(t *testing.T) { - goerror, ok := props["Err"] - require.TrueT(t, ok) - require.TrueT(t, goerror.Type.Contains("string")) - - t.Run("a type based on the error builtin should be decorated with a x-go-type: error extension", func(t *testing.T) { - val, hasExt := goerror.Extensions.GetString("x-go-type") - assert.TrueT(t, hasExt) - assert.EqualT(t, "error", val) - }) - }) - - t.Run("type recognized as a text marshaler should render as a string", func(t *testing.T) { - m, ok := props["Marshaler"] - require.TrueT(t, ok) - require.TrueT(t, m.Type.Contains("string")) - - t.Run("a type based on the encoding.TextMarshaler decorated with a x-go-type extension", func(t *testing.T) { - val, hasExt := m.Extensions.GetString("x-go-type") - assert.TrueT(t, hasExt) - assert.EqualT(t, fixturesModule+"/goparsing/go123/special.IsATextMarshaler", val) - }) - }) - - t.Run("a json.RawMessage should be recognized and render as an object (yes this is wrong)", func(t *testing.T) { - m, ok := props["Message"] - require.TrueT(t, ok) - require.TrueT(t, m.Type.Contains("object")) - }) - - t.Run("type time.Duration is not recognized as a special type and should just render as a ref", func(t *testing.T) { - d, ok := props["Duration"] - require.TrueT(t, ok) - assertIsRef(t, &d, "#/definitions/Duration") - - t.Run("discovered definition should be an integer", func(t *testing.T) { - duration, ok := sp.Definitions["Duration"] - require.TrueT(t, ok) - require.TrueT(t, duration.Type.Contains("integer")) - require.EqualT(t, "int64", duration.Format) - - t.Run("time.Duration schema should be decorated with a x-go-package: time", func(t *testing.T) { - val, hasExt := duration.Extensions.GetString("x-go-package") - assert.TrueT(t, hasExt) - assert.EqualT(t, "time", val) - }) - }) - }) - - testSpecialTypesStrfmt(t, props) - - t.Run("a property which is a map should render just fine, with a ref", func(t *testing.T) { - mm, ok := props["Map"] - require.TrueT(t, ok) - require.TrueT(t, mm.Type.Contains("object")) - require.NotNil(t, mm.AdditionalProperties) - mapSchema := mm.AdditionalProperties.Schema - require.NotNil(t, mapSchema) - assertIsRef(t, mapSchema, "#/definitions/GoStruct") - }) - - t.Run("a property which is a named array type should render as a ref", func(t *testing.T) { - na, ok := props["NamedArray"] - require.TrueT(t, ok) - assertIsRef(t, &na, "#/definitions/go_array") - }) - - testSpecialTypesWhatNot(t, sp, props) - }) -} - -func testSpecialTypesStrfmt(t *testing.T, props map[string]spec.Schema) { - t.Helper() - - t.Run("with strfmt types", func(t *testing.T) { - t.Run("a strfmt.Date should be recognized and render as a formatted string", func(t *testing.T) { - d, ok := props["FormatDate"] - require.TrueT(t, ok) - require.TrueT(t, d.Type.Contains("string")) - require.EqualT(t, "date", d.Format) - }) - - t.Run("a strfmt.DateTime should be recognized and render as a formatted string", func(t *testing.T) { - d, ok := props["FormatTime"] - require.TrueT(t, ok) - require.TrueT(t, d.Type.Contains("string")) - require.EqualT(t, "date-time", d.Format) - }) - - t.Run("a strfmt.UUID should be recognized and render as a formatted string", func(t *testing.T) { - u, ok := props["FormatUUID"] - require.TrueT(t, ok) - require.TrueT(t, u.Type.Contains("string")) - require.EqualT(t, "uuid", u.Format) - }) - - t.Run("a pointer to strfmt.UUID should be recognized and render as a formatted string", func(t *testing.T) { - u, ok := props["PtrFormatUUID"] - require.TrueT(t, ok) - require.TrueT(t, u.Type.Contains("string")) - require.EqualT(t, "uuid", u.Format) - }) - }) -} - -func testSpecialTypesWhatNot(t *testing.T, sp *spec.Swagger, props map[string]spec.Schema) { - t.Helper() - - t.Run(`with the "WhatNot" anonymous inner struct`, func(t *testing.T) { - t.Run("should render as an anonymous schema, in spite of all the unsupported things", func(t *testing.T) { - wn, ok := props["WhatNot"] - require.TrueT(t, ok) - require.TrueT(t, wn.Type.Contains("object")) - require.NotEmpty(t, wn.Properties) - - markedProps := make([]string, 0) - - for _, unsupportedProp := range []string{ - "AA", // complex128 - "A", // complex64 - "B", // chan int - "C", // func() - "D", // func() string - "E", // unsafe.Pointer - } { - t.Run("with property "+unsupportedProp, func(t *testing.T) { - prop, ok := wn.Properties[unsupportedProp] - require.TrueT(t, ok) - markedProps = append(markedProps, unsupportedProp) - - t.Run("unsupported type in property should render as an empty schema", func(t *testing.T) { - var empty spec.Schema - require.Equal(t, empty, prop) - }) - }) - } - - for _, supportedProp := range []string{ - "F", // uintptr - "G", - "H", - "I", - "J", - "K", - } { - t.Run("with property "+supportedProp, func(t *testing.T) { - prop, ok := wn.Properties[supportedProp] - require.TrueT(t, ok) - markedProps = append(markedProps, supportedProp) - - switch supportedProp { - case "F": - t.Run("uintptr should render as integer", func(t *testing.T) { - require.TrueT(t, prop.Type.Contains("integer")) - require.EqualT(t, "uint64", prop.Format) - }) - case "G", "H": - t.Run( - "math/big types are not recognized as special types and as TextMarshalers they render as string", - func(t *testing.T) { - require.TrueT(t, prop.Type.Contains("string")) - }) - case "I": - t.Run("go array should render as a json array", func(t *testing.T) { - require.TrueT(t, prop.Type.Contains("array")) - require.NotNil(t, prop.Items) - itemsSchema := prop.Items.Schema - require.NotNil(t, itemsSchema) - - require.TrueT(t, itemsSchema.Type.Contains("integer")) - // [5]byte is not recognized an array of bytes, but of uint8 - // (internally this is the same for go) - require.EqualT(t, "uint8", itemsSchema.Format) - }) - case "J", "K": - t.Run("reflect types should render just fine", func(t *testing.T) { - var dest string - if supportedProp == "J" { - dest = "Type" - } else { - dest = "Value" - } - assertIsRef(t, &prop, "#/definitions/"+dest) - - t.Run("the $ref should exist", func(t *testing.T) { - deref, ok := sp.Definitions[dest] - require.TrueT(t, ok) - val, hasExt := deref.Extensions.GetString("x-go-package") - assert.TrueT(t, hasExt) - assert.EqualT(t, "reflect", val) - }) - }) - } - }) - } - - t.Run("we should not have any property left in WhatNot", func(t *testing.T) { - for _, key := range markedProps { - delete(wn.Properties, key) - } - - require.Empty(t, wn.Properties) - }) - - t.Run("surprisingly, a tagged unexported top-level definition can be rendered", func(t *testing.T) { - unexported, ok := sp.Definitions["unexported"] - require.TrueT(t, ok) - require.TrueT(t, unexported.Type.Contains("object")) - }) - - t.Run("the IsATextMarshaler type is not identified as a discovered type and is not rendered", func(t *testing.T) { - _, ok := sp.Definitions["IsATextMarshaler"] - require.FalseT(t, ok) - }) - - t.Run("a top-level go array should render just fine", func(t *testing.T) { - // Notice that the semantics of fixed length are lost in this mapping - goarray, ok := sp.Definitions["go_array"] - require.TrueT(t, ok) - require.TrueT(t, goarray.Type.Contains("array")) - require.NotNil(t, goarray.Items) - itemsSchema := goarray.Items.Schema - require.NotNil(t, itemsSchema) - require.TrueT(t, itemsSchema.Type.Contains("integer")) - require.EqualT(t, "int64", itemsSchema.Format) - }) - }) - }) -} - -func TestEmbeddedAllOf(t *testing.T) { - sctx := loadClassificationPkgsCtx(t) - decl := getClassificationModel(sctx, "AllOfModel") - require.NotNil(t, decl) - prs := &schemaBuilder{ - ctx: sctx, - decl: decl, - } - models := make(map[string]spec.Schema) - require.NoError(t, prs.Build(models)) - schema := models["AllOfModel"] - - require.Len(t, schema.AllOf, 3) - asch := schema.AllOf[0] - assertProperty(t, &asch, "integer", "age", "int32", "Age") - assertProperty(t, &asch, "integer", "id", "int64", "ID") - assertProperty(t, &asch, "string", "name", "", "Name") - - asch = schema.AllOf[1] - assert.EqualT(t, "#/definitions/withNotes", asch.Ref.String()) - - asch = schema.AllOf[2] - assertProperty(t, &asch, "string", "createdAt", "date-time", "CreatedAt") - assertProperty(t, &asch, "integer", "did", "int64", "DID") - assertProperty(t, &asch, "string", "cat", "", "Cat") -} - -func TestPointersAreNullableByDefaultWhenSetXNullableForPointersIsSet(t *testing.T) { - assertModel := func(sctx *scanCtx, packagePath, modelName string) { - decl, _ := sctx.FindDecl(packagePath, modelName) - require.NotNil(t, decl) - prs := &schemaBuilder{ - ctx: sctx, - decl: decl, - } - models := make(map[string]spec.Schema) - require.NoError(t, prs.Build(models)) - - schema := models[modelName] - require.Len(t, schema.Properties, 5) - - require.MapContainsT(t, schema.Properties, "Value1") - assert.Equal(t, true, schema.Properties["Value1"].Extensions["x-nullable"]) - require.MapContainsT(t, schema.Properties, "Value2") - assert.MapNotContainsT(t, schema.Properties["Value2"].Extensions, "x-nullable") - require.MapContainsT(t, schema.Properties, "Value3") - assert.Equal(t, false, schema.Properties["Value3"].Extensions["x-nullable"]) - require.MapContainsT(t, schema.Properties, "Value4") - assert.MapNotContainsT(t, schema.Properties["Value4"].Extensions, "x-nullable") - assert.Equal(t, false, schema.Properties["Value4"].Extensions["x-isnullable"]) - require.MapContainsT(t, schema.Properties, "Value5") - assert.MapNotContainsT(t, schema.Properties["Value5"].Extensions, "x-nullable") - } - - packagePattern := "./enhancements/pointers-nullable-by-default" - packagePath := fixturesModule + "/enhancements/pointers-nullable-by-default" - sctx, err := newScanCtx(&Options{Packages: []string{packagePattern}, WorkDir: "fixtures", SetXNullableForPointers: true}) - require.NoError(t, err) - - assertModel(sctx, packagePath, "Item") - assertModel(sctx, packagePath, "ItemInterface") -} - -func TestPointersAreNotNullableByDefaultWhenSetXNullableForPointersIsNotSet(t *testing.T) { - assertModel := func(sctx *scanCtx, packagePath, modelName string) { - decl, _ := sctx.FindDecl(packagePath, modelName) - require.NotNil(t, decl) - prs := &schemaBuilder{ - ctx: sctx, - decl: decl, - } - models := make(map[string]spec.Schema) - require.NoError(t, prs.Build(models)) - - schema := models[modelName] - require.Len(t, schema.Properties, 5) - - require.MapContainsT(t, schema.Properties, "Value1") - assert.MapNotContainsT(t, schema.Properties["Value1"].Extensions, "x-nullable") - require.MapContainsT(t, schema.Properties, "Value2") - assert.MapNotContainsT(t, schema.Properties["Value2"].Extensions, "x-nullable") - require.MapContainsT(t, schema.Properties, "Value3") - assert.Equal(t, false, schema.Properties["Value3"].Extensions["x-nullable"]) - require.MapContainsT(t, schema.Properties, "Value4") - assert.MapNotContainsT(t, schema.Properties["Value4"].Extensions, "x-nullable") - assert.Equal(t, false, schema.Properties["Value4"].Extensions["x-isnullable"]) - require.MapContainsT(t, schema.Properties, "Value5") - assert.MapNotContainsT(t, schema.Properties["Value5"].Extensions, "x-nullable") - } - - packagePattern := "./enhancements/pointers-nullable-by-default" - packagePath := fixturesModule + "/enhancements/pointers-nullable-by-default" - sctx, err := newScanCtx(&Options{Packages: []string{packagePattern}, WorkDir: "fixtures"}) - require.NoError(t, err) - - assertModel(sctx, packagePath, "Item") - assertModel(sctx, packagePath, "ItemInterface") -} - -func TestSwaggerTypeNamed(t *testing.T) { - sctx := loadClassificationPkgsCtx(t) - decl := getClassificationModel(sctx, "NamedWithType") - require.NotNil(t, decl) - prs := &schemaBuilder{ - ctx: sctx, - decl: decl, - } - models := make(map[string]spec.Schema) - require.NoError(t, prs.Build(models)) - schema := models["namedWithType"] - - assertProperty(t, &schema, "object", "some_map", "", "SomeMap") -} - -func TestSwaggerTypeNamedWithGenerics(t *testing.T) { - tests := map[string]func(t *testing.T, models map[string]spec.Schema){ - "NamedStringResults": func(t *testing.T, models map[string]spec.Schema) { - schema := models["namedStringResults"] - assertArrayProperty(t, &schema, "string", "matches", "", "Matches") - }, - "NamedStoreOrderResults": func(t *testing.T, models map[string]spec.Schema) { - schema := models["namedStoreOrderResults"] - assertArrayRef(t, &schema, "matches", "Matches", "#/definitions/order") - }, - "NamedStringSlice": func(t *testing.T, models map[string]spec.Schema) { - assertArrayDefinition(t, models, "namedStringSlice", "string", "", "NamedStringSlice") - }, - "NamedStoreOrderSlice": func(t *testing.T, models map[string]spec.Schema) { - assertArrayWithRefDefinition(t, models, "namedStoreOrderSlice", "#/definitions/order", "NamedStoreOrderSlice") - }, - "NamedStringMap": func(t *testing.T, models map[string]spec.Schema) { - assertMapDefinition(t, models, "namedStringMap", "string", "", "NamedStringMap") - }, - "NamedStoreOrderMap": func(t *testing.T, models map[string]spec.Schema) { - assertMapWithRefDefinition(t, models, "namedStoreOrderMap", "#/definitions/order", "NamedStoreOrderMap") - }, - "NamedMapOfStoreOrderSlices": func(t *testing.T, models map[string]spec.Schema) { - assertMapDefinition(t, models, "namedMapOfStoreOrderSlices", "array", "", "NamedMapOfStoreOrderSlices") - arraySchema := models["namedMapOfStoreOrderSlices"].AdditionalProperties.Schema - assertArrayWithRefDefinition(t, map[string]spec.Schema{ - "array": *arraySchema, - }, "array", "#/definitions/order", "") - }, - } - - for testName, testFunc := range tests { - t.Run(testName, func(t *testing.T) { - sctx := loadClassificationPkgsCtx(t) - decl := getClassificationModel(sctx, testName) - require.NotNil(t, decl) - prs := &schemaBuilder{ - ctx: sctx, - decl: decl, - } - models := make(map[string]spec.Schema) - require.NoError(t, prs.Build(models)) - testFunc(t, models) - }) - } -} - -func TestSwaggerTypeStruct(t *testing.T) { - sctx := loadClassificationPkgsCtx(t) - decl := getClassificationModel(sctx, "NullString") - require.NotNil(t, decl) - prs := &schemaBuilder{ - ctx: sctx, - decl: decl, - } - models := make(map[string]spec.Schema) - require.NoError(t, prs.Build(models)) - schema := models["NullString"] - - assert.TrueT(t, schema.Type.Contains("string")) -} - -func TestStructDiscriminators(t *testing.T) { - sctx := loadClassificationPkgsCtx(t) - - models := make(map[string]spec.Schema) - for _, tn := range []string{"BaseStruct", "Giraffe", "Gazelle"} { - decl := getClassificationModel(sctx, tn) - require.NotNil(t, decl) - prs := &schemaBuilder{ - ctx: sctx, - decl: decl, - } - require.NoError(t, prs.Build(models)) - } - - schema := models["animal"] - - assert.Equal(t, "BaseStruct", schema.Extensions["x-go-name"]) - assert.EqualT(t, "jsonClass", schema.Discriminator) - - sch := models["gazelle"] - assert.Len(t, sch.AllOf, 2) - cl, _ := sch.Extensions.GetString("x-class") - assert.EqualT(t, "a.b.c.d.E", cl) - cl, _ = sch.Extensions.GetString("x-go-name") - assert.EqualT(t, "Gazelle", cl) - - sch = models["giraffe"] - assert.Len(t, sch.AllOf, 2) - cl, _ = sch.Extensions.GetString("x-class") - assert.Empty(t, cl) - cl, _ = sch.Extensions.GetString("x-go-name") - assert.EqualT(t, "Giraffe", cl) - - // sch = noModelDefs["lion"] - - // b, _ := json.MarshalIndent(sch, "", " ") - // fmt.Println(string(b)) -} - -func TestInterfaceDiscriminators(t *testing.T) { - sctx := loadClassificationPkgsCtx(t) - models := make(map[string]spec.Schema) - for _, tn := range []string{"BaseStruct", "Identifiable", "WaterType", "Fish", "TeslaCar", "ModelS", "ModelX", "ModelA", "Cars"} { - decl := getClassificationModel(sctx, tn) - require.NotNil(t, decl) - - prs := &schemaBuilder{ - ctx: sctx, - decl: decl, - } - require.NoError(t, prs.Build(models)) - } - - schema, ok := models["fish"] - - if assert.TrueT(t, ok) && assert.Len(t, schema.AllOf, 5) { - sch := schema.AllOf[3] - assert.Len(t, sch.Properties, 1) - assertProperty(t, &sch, "string", "colorName", "", "ColorName") - - sch = schema.AllOf[2] - assert.EqualT(t, "#/definitions/extra", sch.Ref.String()) - - sch = schema.AllOf[0] - assert.Len(t, sch.Properties, 1) - assertProperty(t, &sch, "integer", "id", "int64", "ID") - - sch = schema.AllOf[1] - assert.EqualT(t, "#/definitions/water", sch.Ref.String()) - - sch = schema.AllOf[4] - assert.Len(t, sch.Properties, 2) - assertProperty(t, &sch, "string", "name", "", "Name") - assertProperty(t, &sch, "string", "jsonClass", "", "StructType") - assert.EqualT(t, "jsonClass", sch.Discriminator) - } - - schema, ok = models["modelS"] - if assert.TrueT(t, ok) { - assert.Len(t, schema.AllOf, 2) - cl, _ := schema.Extensions.GetString("x-class") - assert.EqualT(t, "com.tesla.models.ModelS", cl) - cl, _ = schema.Extensions.GetString("x-go-name") - assert.EqualT(t, "ModelS", cl) - - sch := schema.AllOf[0] - assert.EqualT(t, "#/definitions/TeslaCar", sch.Ref.String()) - sch = schema.AllOf[1] - assert.Len(t, sch.Properties, 1) - assertProperty(t, &sch, "string", "edition", "", "Edition") - } - - schema, ok = models["modelA"] - if assert.TrueT(t, ok) { - cl, _ := schema.Extensions.GetString("x-go-name") - assert.EqualT(t, "ModelA", cl) - - sch, ok := schema.Properties["Tesla"] - if assert.TrueT(t, ok) { - assert.EqualT(t, "#/definitions/TeslaCar", sch.Ref.String()) - } - - assertProperty(t, &schema, "integer", "doors", "int64", "Doors") - } -} - -func TestAddExtension(t *testing.T) { - ve := &spec.VendorExtensible{ - Extensions: make(spec.Extensions), - } - - key := "x-go-name" - value := "Name" - addExtension(ve, key, value, false) - veStr, ok := ve.Extensions[key].(string) - require.TrueT(t, ok) - assert.EqualT(t, value, veStr) - - key2 := "x-go-package" - value2 := "schema" - addExtension(ve, key2, value2, false) - veStr2, ok := ve.Extensions[key2].(string) - require.TrueT(t, ok) - assert.EqualT(t, value2, veStr2) - - key3 := "x-go-class" - value3 := "Spec" - addExtension(ve, key3, value3, true) - assert.Nil(t, ve.Extensions[key3]) -} - -func getClassificationModel(sctx *scanCtx, nm string) *entityDecl { - decl, ok := sctx.FindDecl(fixturesModule+"/goparsing/classification/models", nm) - if !ok { - return nil - } - return decl -} - -func assertArrayProperty(t *testing.T, schema *spec.Schema, typeName, jsonName, format, goName string) { - t.Helper() - - prop := schema.Properties[jsonName] - assert.NotEmpty(t, prop.Type) - assert.TrueT(t, prop.Type.Contains("array")) - require.NotNil(t, prop.Items) - if typeName != "" { - require.NotNil(t, prop.Items.Schema) - require.NotEmpty(t, prop.Items.Schema.Type) - assert.EqualT(t, typeName, prop.Items.Schema.Type[0]) - } - assert.Equal(t, goName, prop.Extensions["x-go-name"]) - assert.EqualT(t, format, prop.Items.Schema.Format) -} - -func assertArrayRef(t *testing.T, schema *spec.Schema, jsonName, goName, fragment string) { - t.Helper() - - assertArrayProperty(t, schema, "", jsonName, "", goName) - psch := schema.Properties[jsonName].Items.Schema - assert.EqualT(t, fragment, psch.Ref.String()) -} - -func assertProperty(t *testing.T, schema *spec.Schema, typeName, jsonName, format, goName string) { - t.Helper() - - if typeName == "" { - assert.Empty(t, schema.Properties[jsonName].Type) - } else if assert.NotEmpty(t, schema.Properties[jsonName].Type) { - assert.EqualT(t, typeName, schema.Properties[jsonName].Type[0]) - } - if goName == "" { - assert.Nil(t, schema.Properties[jsonName].Extensions["x-go-name"]) - } else { - assert.Equal(t, goName, schema.Properties[jsonName].Extensions["x-go-name"]) - } - assert.EqualT(t, format, schema.Properties[jsonName].Format) -} - -func assertRef(t *testing.T, schema *spec.Schema, jsonName, _, fragment string) { - t.Helper() - - assert.Empty(t, schema.Properties[jsonName].Type) - psch := schema.Properties[jsonName] - assert.EqualT(t, fragment, psch.Ref.String()) -} - -func assertIsRef(t *testing.T, schema *spec.Schema, fragment string) { - t.Helper() - - assert.EqualT(t, fragment, schema.Ref.String()) -} - -func assertDefinition(t *testing.T, defs map[string]spec.Schema, defName, typeName, formatName string) { - t.Helper() - - schema, ok := defs[defName] - if assert.TrueT(t, ok) { - if assert.NotEmpty(t, schema.Type) { - assert.EqualT(t, typeName, schema.Type[0]) - assert.Nil(t, schema.Extensions["x-go-name"]) - assert.EqualT(t, formatName, schema.Format) - } - } -} - -func assertMapDefinition(t *testing.T, defs map[string]spec.Schema, defName, typeName, formatName, goName string) { - t.Helper() - - schema, ok := defs[defName] - require.TrueT(t, ok) - require.NotEmpty(t, schema.Type) - - assert.EqualT(t, "object", schema.Type[0]) - adl := schema.AdditionalProperties - - require.NotNil(t, adl) - require.NotNil(t, adl.Schema) - - if len(adl.Schema.Type) > 0 { - assert.EqualT(t, typeName, adl.Schema.Type[0]) - } - assert.EqualT(t, formatName, adl.Schema.Format) - - assertExtension(t, schema, goName) -} - -func assertExtension(t *testing.T, schema spec.Schema, goName string) { - t.Helper() - - if goName != "" { - assert.Equal(t, goName, schema.Extensions["x-go-name"]) - - return - } - - assert.Nil(t, schema.Extensions["x-go-name"]) -} - -func assertMapWithRefDefinition(t *testing.T, defs map[string]spec.Schema, defName, refURL, goName string) { - t.Helper() - - schema, ok := defs[defName] - require.TrueT(t, ok) - require.NotEmpty(t, schema.Type) - assert.EqualT(t, "object", schema.Type[0]) - adl := schema.AdditionalProperties - require.NotNil(t, adl) - require.NotNil(t, adl.Schema) - require.NotZero(t, adl.Schema.Ref) - assert.EqualT(t, refURL, adl.Schema.Ref.String()) - assertExtension(t, schema, goName) -} - -func assertArrayDefinition(t *testing.T, defs map[string]spec.Schema, defName, typeName, formatName, goName string) { - t.Helper() - - schema, ok := defs[defName] - require.TrueT(t, ok) - require.NotEmpty(t, schema.Type) - assert.EqualT(t, "array", schema.Type[0]) - adl := schema.Items - require.NotNil(t, adl) - require.NotNil(t, adl.Schema) - assert.EqualT(t, typeName, adl.Schema.Type[0]) - assert.EqualT(t, formatName, adl.Schema.Format) - assertExtension(t, schema, goName) -} - -func assertArrayWithRefDefinition(t *testing.T, defs map[string]spec.Schema, defName, refURL, goName string) { - t.Helper() - - schema, ok := defs[defName] - require.TrueT(t, ok) - require.NotEmpty(t, schema.Type) - assert.EqualT(t, "array", schema.Type[0]) - adl := schema.Items - require.NotNil(t, adl) - require.NotNil(t, adl.Schema) - require.NotZero(t, adl.Schema.Ref) - assert.EqualT(t, refURL, adl.Schema.Ref.String()) - assertExtension(t, schema, goName) -} - -func assertRefDefinition(t *testing.T, defs map[string]spec.Schema, defName, refURL, goName string) { - schema, ok := defs[defName] - if assert.TrueT(t, ok) { - if assert.NotZero(t, schema.Ref) { - url := schema.Ref.String() - assert.EqualT(t, refURL, url) - if goName != "" { - assert.Equal(t, goName, schema.Extensions["x-go-name"]) - } else { - assert.Nil(t, schema.Extensions["x-go-name"]) - } - } - } -} - -func assertMapProperty(t *testing.T, schema *spec.Schema, typeName, jsonName, format, goName string) { - prop := schema.Properties[jsonName] - assert.NotEmpty(t, prop.Type) - assert.TrueT(t, prop.Type.Contains("object")) - assert.NotNil(t, prop.AdditionalProperties) - if typeName != "" { - assert.EqualT(t, typeName, prop.AdditionalProperties.Schema.Type[0]) - } - assert.Equal(t, goName, prop.Extensions["x-go-name"]) - assert.EqualT(t, format, prop.AdditionalProperties.Schema.Format) -} - -func assertMapRef(t *testing.T, schema *spec.Schema, jsonName, goName, fragment string) { - assertMapProperty(t, schema, "", jsonName, "", goName) - psch := schema.Properties[jsonName].AdditionalProperties.Schema - assert.EqualT(t, fragment, psch.Ref.String()) -} - -func marshalToYAMLFormat(swspec any) ([]byte, error) { - b, err := json.Marshal(swspec) - if err != nil { - return nil, err - } - - var jsonObj any - if err := yaml.Unmarshal(b, &jsonObj); err != nil { - return nil, err - } - - return yaml.Marshal(jsonObj) -} - -func TestEmbeddedDescriptionAndTags(t *testing.T) { - packagePattern := "./bugs/3125/minimal" - packagePath := fixturesModule + "/bugs/3125/minimal" - sctx, err := newScanCtx(&Options{ - Packages: []string{packagePattern}, - WorkDir: "fixtures", - DescWithRef: true, - }) - require.NoError(t, err) - decl, _ := sctx.FindDecl(packagePath, "Item") - require.NotNil(t, decl) - prs := &schemaBuilder{ - ctx: sctx, - decl: decl, - } - models := make(map[string]spec.Schema) - require.NoError(t, prs.Build(models)) - schema := models["Item"] - - assert.Equal(t, []string{"value1", "value2"}, schema.Required) - require.Len(t, schema.Properties, 2) - - require.MapContainsT(t, schema.Properties, "value1") - assert.EqualT(t, "Nullable value", schema.Properties["value1"].Description) - assert.Equal(t, true, schema.Properties["value1"].Extensions["x-nullable"]) - - require.MapContainsT(t, schema.Properties, "value2") - assert.EqualT(t, "Non-nullable value", schema.Properties["value2"].Description) - assert.MapNotContainsT(t, schema.Properties["value2"].Extensions, "x-nullable") - assert.Equal(t, `{"value": 42}`, schema.Properties["value2"].Example) -} - -func TestIssue2540(t *testing.T) { - t.Run("should produce example and default for top level declaration only", - testIssue2540(false, `{ - "Book": { - "description": "At this moment, a book is only described by its publishing date\nand author.", - "type": "object", - "title": "Book holds all relevant information about a book.", - "example": "{ \"Published\": 2026, \"Author\": \"Fred\" }", - "default": "{ \"Published\": 1900, \"Author\": \"Unknown\" }", - "properties": { - "Author": { - "$ref": "#/definitions/Author" - }, - "Published": { - "type": "integer", - "format": "int64", - "minimum": 0, - "example": 2021 - } - } - } - }`), - ) - t.Run("should produce example and default for top level declaration and embedded $ref field", - testIssue2540(true, `{ - "Book": { - "description": "At this moment, a book is only described by its publishing date\nand author.", - "type": "object", - "title": "Book holds all relevant information about a book.", - "example": "{ \"Published\": 2026, \"Author\": \"Fred\" }", - "default": "{ \"Published\": 1900, \"Author\": \"Unknown\" }", - "properties": { - "Author": { - "$ref": "#/definitions/Author", - "example": "{ \"Name\": \"Tolkien\" }" - }, - "Published": { - "type": "integer", - "format": "int64", - "minimum": 0, - "example": 2021 - } - } - } - }`), - ) -} - -func testIssue2540(descWithRef bool, expectedJSON string) func(*testing.T) { - return func(t *testing.T) { - packagePattern := "./bugs/2540/foo" - packagePath := fixturesModule + "/bugs/2540/foo" - sctx, err := newScanCtx(&Options{ - Packages: []string{packagePattern}, - WorkDir: "fixtures", - DescWithRef: descWithRef, - SkipExtensions: true, - }) - require.NoError(t, err) - - decl, _ := sctx.FindDecl(packagePath, "Book") - require.NotNil(t, decl) - prs := &schemaBuilder{ - ctx: sctx, - decl: decl, - } - - models := make(map[string]spec.Schema) - require.NoError(t, prs.Build(models)) - - b, err := json.Marshal(models) - require.NoError(t, err) - assert.JSONEqT(t, expectedJSON, string(b)) - } -} - -func TestSetEnumDoesNotPanic(t *testing.T) { - dir := t.TempDir() - - src := ` - package failure - - // swagger:model Order - type Order struct { - State State ` + "`json:\"state\"`" + ` - } - - // State represents the state of an order. - // enum: ["created","processed"] - type State string - ` - err := os.WriteFile(filepath.Join(dir, "model.go"), []byte(src), 0o600) - require.NoError(t, err) - - goMod := ` - module failure - go 1.23` - err = os.WriteFile(filepath.Join(dir, "go.mod"), []byte(goMod), 0o600) - require.NoError(t, err) - - _, err = Run(&Options{ - WorkDir: dir, - ScanModels: true, - }) - - require.NoError(t, err) -} - -func TestSwaggerTypeNamedArray(t *testing.T) { - sctx := loadClassificationPkgsCtx(t) - decl := getClassificationModel(sctx, "NamedWithArrayType") - require.NotNil(t, decl) - prs := &schemaBuilder{ - ctx: sctx, - decl: decl, - } - models := make(map[string]spec.Schema) - require.NoError(t, prs.Build(models)) - schema := models["namedWithArrayType"] - - // swagger:type array on a named []string type should produce - // an inlined array with string items, not a $ref. - assertArrayProperty(t, &schema, "string", "tags", "", "Tags") -} - -func TestSwaggerTypeNamedFixedArray(t *testing.T) { - sctx := loadClassificationPkgsCtx(t) - decl := getClassificationModel(sctx, "NamedWithFixedArrayType") - require.NotNil(t, decl) - prs := &schemaBuilder{ - ctx: sctx, - decl: decl, - } - models := make(map[string]spec.Schema) - require.NoError(t, prs.Build(models)) - schema := models["namedWithFixedArrayType"] - - // swagger:type array on a named [5]string type should produce - // an inlined array with string items via buildNamedArray. - assertArrayProperty(t, &schema, "string", "labels", "", "Labels") -} - -func TestSwaggerTypeBadValueOnStruct(t *testing.T) { - sctx := loadClassificationPkgsCtx(t) - decl := getClassificationModel(sctx, "NamedWithBadStructType") - require.NotNil(t, decl) - prs := &schemaBuilder{ - ctx: sctx, - decl: decl, - } - models := make(map[string]spec.Schema) - require.NoError(t, prs.Build(models)) - schema := models["namedWithBadStructType"] - - // swagger:type with an unsupported value on a struct should not - // produce an empty schema — it should either inline the struct - // or create a $ref. The key assertion is that the property exists - // and is not empty (i.e., the error was not silently swallowed). - prop := schema.Properties["nested"] - hasType := len(prop.Type) > 0 - hasRef := prop.Ref.String() != "" - hasProps := len(prop.Properties) > 0 - assert.TrueT(t, hasType || hasRef || hasProps, - "expected nested property to have type, $ref, or properties — not an empty schema") -} - -func TestSwaggerTypeObjectOnStruct(t *testing.T) { - sctx := loadClassificationPkgsCtx(t) - decl := getClassificationModel(sctx, "NamedWithObjectStructType") - require.NotNil(t, decl) - prs := &schemaBuilder{ - ctx: sctx, - decl: decl, - } - models := make(map[string]spec.Schema) - require.NoError(t, prs.Build(models)) - schema := models["namedWithObjectStructType"] - - // swagger:type object on a struct should inline as type:object, - // preserving the field's description. - prop := schema.Properties["headers"] - assert.TrueT(t, prop.Type.Contains("object")) - assert.Empty(t, prop.Ref.String(), "should not have $ref when swagger:type object is set") -} diff --git a/taggers.go b/taggers.go deleted file mode 100644 index 1070cc8..0000000 --- a/taggers.go +++ /dev/null @@ -1,136 +0,0 @@ -// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers -// SPDX-License-Identifier: Apache-2.0 - -package codescan - -import ( - "fmt" - "go/ast" - - "github.com/go-openapi/spec" -) - -// itemsTaggers builds tag parsers for array items at a given nesting level. -func itemsTaggers(items *spec.Items, level int) []tagParser { - // the expression is 1-index based not 0-index - itemsPrefix := fmt.Sprintf(rxItemsPrefixFmt, level+1) - - return []tagParser{ - newSingleLineTagParser(fmt.Sprintf("items%dMaximum", level), &setMaximum{itemsValidations{items}, rxf(rxMaximumFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dMinimum", level), &setMinimum{itemsValidations{items}, rxf(rxMinimumFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dMultipleOf", level), &setMultipleOf{itemsValidations{items}, rxf(rxMultipleOfFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dMinLength", level), &setMinLength{itemsValidations{items}, rxf(rxMinLengthFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dMaxLength", level), &setMaxLength{itemsValidations{items}, rxf(rxMaxLengthFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dPattern", level), &setPattern{itemsValidations{items}, rxf(rxPatternFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dCollectionFormat", level), &setCollectionFormat{itemsValidations{items}, rxf(rxCollectionFormatFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dMinItems", level), &setMinItems{itemsValidations{items}, rxf(rxMinItemsFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dMaxItems", level), &setMaxItems{itemsValidations{items}, rxf(rxMaxItemsFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dUnique", level), &setUnique{itemsValidations{items}, rxf(rxUniqueFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dEnum", level), &setEnum{itemsValidations{items}, rxf(rxEnumFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dDefault", level), &setDefault{&items.SimpleSchema, itemsValidations{items}, rxf(rxDefaultFmt, itemsPrefix)}), - newSingleLineTagParser(fmt.Sprintf("items%dExample", level), &setExample{&items.SimpleSchema, itemsValidations{items}, rxf(rxExampleFmt, itemsPrefix)}), - } -} - -// parseArrayTypes recursively builds tag parsers for nested array types. -func parseArrayTypes(sp *sectionedParser, name string, expr ast.Expr, items *spec.Items, level int) ([]tagParser, error) { - if items == nil { - return []tagParser{}, nil - } - switch iftpe := expr.(type) { - case *ast.ArrayType: - eleTaggers := itemsTaggers(items, level) - sp.taggers = append(eleTaggers, sp.taggers...) - return parseArrayTypes(sp, name, iftpe.Elt, items.Items, level+1) - case *ast.SelectorExpr: - return parseArrayTypes(sp, name, iftpe.Sel, items.Items, level+1) - case *ast.Ident: - taggers := []tagParser{} - if iftpe.Obj == nil { - taggers = itemsTaggers(items, level) - } - otherTaggers, err := parseArrayTypes(sp, name, expr, items.Items, level+1) - if err != nil { - return nil, err - } - return append(taggers, otherTaggers...), nil - case *ast.StarExpr: - return parseArrayTypes(sp, name, iftpe.X, items, level) - default: - return nil, fmt.Errorf("unknown field type ele for %q: %w", name, ErrCodeScan) - } -} - -// setupRefParamTaggers configures taggers for a parameter that is a $ref. -func setupRefParamTaggers(sp *sectionedParser, ps *spec.Parameter, skipExt, debug bool) { - sp.taggers = []tagParser{ - newSingleLineTagParser("in", &matchOnlyParam{ps, rxIn}), - newSingleLineTagParser("required", &matchOnlyParam{ps, rxRequired}), - newMultiLineTagParser("Extensions", newSetExtensions(spExtensionsSetter(ps, skipExt), debug), true), - } -} - -// setupInlineParamTaggers configures taggers for a fully-defined inline parameter. -func setupInlineParamTaggers(sp *sectionedParser, ps *spec.Parameter, name string, afld *ast.Field, skipExt, debug bool) error { - sp.taggers = []tagParser{ - newSingleLineTagParser("in", &matchOnlyParam{ps, rxIn}), - newSingleLineTagParser("maximum", &setMaximum{paramValidations{ps}, rxf(rxMaximumFmt, "")}), - newSingleLineTagParser("minimum", &setMinimum{paramValidations{ps}, rxf(rxMinimumFmt, "")}), - newSingleLineTagParser("multipleOf", &setMultipleOf{paramValidations{ps}, rxf(rxMultipleOfFmt, "")}), - newSingleLineTagParser("minLength", &setMinLength{paramValidations{ps}, rxf(rxMinLengthFmt, "")}), - newSingleLineTagParser("maxLength", &setMaxLength{paramValidations{ps}, rxf(rxMaxLengthFmt, "")}), - newSingleLineTagParser("pattern", &setPattern{paramValidations{ps}, rxf(rxPatternFmt, "")}), - newSingleLineTagParser("collectionFormat", &setCollectionFormat{paramValidations{ps}, rxf(rxCollectionFormatFmt, "")}), - newSingleLineTagParser("minItems", &setMinItems{paramValidations{ps}, rxf(rxMinItemsFmt, "")}), - newSingleLineTagParser("maxItems", &setMaxItems{paramValidations{ps}, rxf(rxMaxItemsFmt, "")}), - newSingleLineTagParser("unique", &setUnique{paramValidations{ps}, rxf(rxUniqueFmt, "")}), - newSingleLineTagParser("enum", &setEnum{paramValidations{ps}, rxf(rxEnumFmt, "")}), - newSingleLineTagParser("default", &setDefault{&ps.SimpleSchema, paramValidations{ps}, rxf(rxDefaultFmt, "")}), - newSingleLineTagParser("example", &setExample{&ps.SimpleSchema, paramValidations{ps}, rxf(rxExampleFmt, "")}), - newSingleLineTagParser("required", &setRequiredParam{ps}), - newMultiLineTagParser("Extensions", newSetExtensions(spExtensionsSetter(ps, skipExt), debug), true), - } - - // check if this is a primitive, if so parse the validations from the - // doc comments of the slice declaration. - if ftped, ok := afld.Type.(*ast.ArrayType); ok { - taggers, err := parseArrayTypes(sp, name, ftped.Elt, ps.Items, 0) - if err != nil { - return err - } - sp.taggers = append(taggers, sp.taggers...) - } - - return nil -} - -// setupResponseHeaderTaggers configures taggers for a response header field. -func setupResponseHeaderTaggers(sp *sectionedParser, ps *spec.Header, name string, afld *ast.Field) error { - sp.taggers = []tagParser{ - newSingleLineTagParser("maximum", &setMaximum{headerValidations{ps}, rxf(rxMaximumFmt, "")}), - newSingleLineTagParser("minimum", &setMinimum{headerValidations{ps}, rxf(rxMinimumFmt, "")}), - newSingleLineTagParser("multipleOf", &setMultipleOf{headerValidations{ps}, rxf(rxMultipleOfFmt, "")}), - newSingleLineTagParser("minLength", &setMinLength{headerValidations{ps}, rxf(rxMinLengthFmt, "")}), - newSingleLineTagParser("maxLength", &setMaxLength{headerValidations{ps}, rxf(rxMaxLengthFmt, "")}), - newSingleLineTagParser("pattern", &setPattern{headerValidations{ps}, rxf(rxPatternFmt, "")}), - newSingleLineTagParser("collectionFormat", &setCollectionFormat{headerValidations{ps}, rxf(rxCollectionFormatFmt, "")}), - newSingleLineTagParser("minItems", &setMinItems{headerValidations{ps}, rxf(rxMinItemsFmt, "")}), - newSingleLineTagParser("maxItems", &setMaxItems{headerValidations{ps}, rxf(rxMaxItemsFmt, "")}), - newSingleLineTagParser("unique", &setUnique{headerValidations{ps}, rxf(rxUniqueFmt, "")}), - newSingleLineTagParser("enum", &setEnum{headerValidations{ps}, rxf(rxEnumFmt, "")}), - newSingleLineTagParser("default", &setDefault{&ps.SimpleSchema, headerValidations{ps}, rxf(rxDefaultFmt, "")}), - newSingleLineTagParser("example", &setExample{&ps.SimpleSchema, headerValidations{ps}, rxf(rxExampleFmt, "")}), - } - - // check if this is a primitive, if so parse the validations from the - // doc comments of the slice declaration. - if ftped, ok := afld.Type.(*ast.ArrayType); ok { - taggers, err := parseArrayTypes(sp, name, ftped.Elt, ps.Items, 0) - if err != nil { - return err - } - sp.taggers = append(taggers, sp.taggers...) - } - - return nil -} From 04a9b65ff3aba0160f831adc0bcb36e48686365b Mon Sep 17 00:00:00 2001 From: Frederic BIDON Date: Sun, 19 Apr 2026 18:06:19 +0200 Subject: [PATCH 2/2] chore: backport to refactored layout commit "swagger:type array by falling through to underlying type resolution" Signed-off-by: Frederic BIDON --- .../enhancements/swagger-type-array/types.go | 62 +++++++++ .../enhancements_swagger_type_array.json | 54 ++++++++ .../golden/go123_special_spec.json | 124 ------------------ internal/builders/schema/schema.go | 40 +++++- .../integration/coverage_enhancements_test.go | 17 +++ internal/integration/schema_special_test.go | 11 ++ internal/scanner/index.go | 2 +- internal/scanner/index_test.go | 14 +- internal/scanner/scan_context_test.go | 4 +- 9 files changed, 188 insertions(+), 140 deletions(-) create mode 100644 fixtures/enhancements/swagger-type-array/types.go create mode 100644 fixtures/integration/golden/enhancements_swagger_type_array.json diff --git a/fixtures/enhancements/swagger-type-array/types.go b/fixtures/enhancements/swagger-type-array/types.go new file mode 100644 index 0000000..df8c36b --- /dev/null +++ b/fixtures/enhancements/swagger-type-array/types.go @@ -0,0 +1,62 @@ +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +// Package swagger_type_array exercises the schemaBuilder fallthrough path +// added by #11: when swagger:type is set to a value not recognised by +// swaggerSchemaForType (e.g. "array"), the builder falls through to the +// underlying type resolution rather than silently emitting an empty schema. +package swagger_type_array + +// NamedStringSlice is a named slice type carrying swagger:type array. +// With the fix it expands to {type: "array", items: {type: "string"}} via +// buildNamedSlice, preserving the field description on the enclosing +// struct instead of dropping it behind a $ref. +// +// swagger:type array +type NamedStringSlice []string + +// NamedFixedArray is a named fixed-length array type carrying swagger:type +// array. The fix makes buildNamedArray inline it rather than fall through +// to $ref. +// +// swagger:type array +type NamedFixedArray [5]string + +// StructWithBadType is a struct whose swagger:type is set to an +// unrecognised value. The fix ensures buildNamedStruct falls through to +// makeRef so the property is still serialisable — the key assertion is +// that the referenced schema is not empty. +// +// swagger:type badvalue +// swagger:model structWithBadType +type StructWithBadType struct { + Name string `json:"name"` +} + +// ObjectStruct carries swagger:type object (unsupported by +// swaggerSchemaForType for structs). The fix inlines the struct as +// type:object rather than producing an empty schema. +// +// swagger:type object +// swagger:model objectStruct +type ObjectStruct struct { + Key string `json:"key"` + Value string `json:"value"` +} + +// Payload aggregates all four cases so a full scan walks every branch. +// +// swagger:model payload +type Payload struct { + // Tags for this item. + Tags NamedStringSlice `json:"tags"` + + // Labels for this item. + Labels NamedFixedArray `json:"labels"` + + // The nested struct with an unsupported swagger:type. + Nested StructWithBadType `json:"nested"` + + // Headers for this request. + Headers ObjectStruct `json:"headers"` +} diff --git a/fixtures/integration/golden/enhancements_swagger_type_array.json b/fixtures/integration/golden/enhancements_swagger_type_array.json new file mode 100644 index 0000000..b9d971a --- /dev/null +++ b/fixtures/integration/golden/enhancements_swagger_type_array.json @@ -0,0 +1,54 @@ +{ + "swagger": "2.0", + "paths": {}, + "definitions": { + "objectStruct": { + "type": "object", + "x-go-name": "ObjectStruct", + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/swagger-type-array" + }, + "payload": { + "type": "object", + "title": "Payload aggregates all four cases so a full scan walks every branch.", + "properties": { + "headers": { + "description": "Headers for this request.", + "type": "object", + "x-go-name": "Headers" + }, + "labels": { + "description": "Labels for this item.", + "type": "array", + "items": { + "type": "string" + }, + "x-go-name": "Labels" + }, + "nested": { + "type": "object", + "properties": { + "name": { + "type": "string", + "x-go-name": "Name" + } + }, + "x-go-name": "Nested" + }, + "tags": { + "description": "Tags for this item.", + "type": "array", + "items": { + "type": "string" + }, + "x-go-name": "Tags" + } + }, + "x-go-name": "Payload", + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/swagger-type-array" + }, + "structWithBadType": { + "x-go-name": "StructWithBadType", + "x-go-package": "github.com/go-openapi/codescan/fixtures/enhancements/swagger-type-array" + } + } +} \ No newline at end of file diff --git a/fixtures/integration/golden/go123_special_spec.json b/fixtures/integration/golden/go123_special_spec.json index 6a50e41..fb2e108 100644 --- a/fixtures/integration/golden/go123_special_spec.json +++ b/fixtures/integration/golden/go123_special_spec.json @@ -2,12 +2,6 @@ "swagger": "2.0", "paths": {}, "definitions": { - "ChanDir": { - "type": "integer", - "format": "int64", - "title": "ChanDir represents a channel type's direction.", - "x-go-package": "reflect" - }, "Duration": { "description": "A Duration represents the elapsed time between two instants\nas an int64 nanosecond count. The representation limits the\nlargest representable duration to approximately 290 years.", "type": "integer", @@ -24,124 +18,6 @@ }, "x-go-package": "github.com/go-openapi/codescan/fixtures/goparsing/go123/special" }, - "Kind": { - "description": "The zero Kind is not a valid kind.", - "type": "integer", - "format": "uint64", - "title": "A Kind represents the specific kind of type that a [Type] represents.", - "x-go-package": "reflect" - }, - "Type": { - "description": "Not all methods apply to all kinds of types. Restrictions,\nif any, are noted in the documentation for each method.\nUse the Kind method to find out the kind of type before\ncalling kind-specific methods. Calling a method\ninappropriate to the kind of type causes a run-time panic.\n\nType values are comparable, such as with the == operator,\nso they can be used as map keys.\nTwo Type values are equal if they represent identical types.", - "type": "object", - "title": "Type is the representation of a Go type.", - "properties": { - "Align": { - "description": "Align returns the alignment in bytes of a value of\nthis type when allocated in memory.", - "type": "integer", - "format": "int64" - }, - "Bits": { - "description": "Bits returns the size of the type in bits.\nIt panics if the type's Kind is not one of the\nsized or unsized Int, Uint, Float, or Complex kinds.", - "type": "integer", - "format": "int64" - }, - "CanSeq": { - "description": "CanSeq reports whether a [Value] with this type can be iterated over using [Value.Seq].", - "type": "boolean" - }, - "CanSeq2": { - "description": "CanSeq2 reports whether a [Value] with this type can be iterated over using [Value.Seq2].", - "type": "boolean" - }, - "ChanDir": { - "$ref": "#/definitions/ChanDir" - }, - "Comparable": { - "description": "Comparable reports whether values of this type are comparable.\nEven if Comparable returns true, the comparison may still panic.\nFor example, values of interface type are comparable,\nbut the comparison will panic if their dynamic type is not comparable.", - "type": "boolean" - }, - "Elem": { - "$ref": "#/definitions/Type" - }, - "FieldAlign": { - "description": "FieldAlign returns the alignment in bytes of a value of\nthis type when used as a field in a struct.", - "type": "integer", - "format": "int64" - }, - "Fields": { - "description": "Fields returns an iterator over each struct field for struct type t. The sequence is\nequivalent to calling Field successively for each index i in the range [0, NumField()).\nIt panics if the type's Kind is not Struct." - }, - "Ins": { - "description": "Ins returns an iterator over each input parameter of function type t. The sequence\nis equivalent to calling In successively for each index i in the range [0, NumIn()).\nIt panics if the type's Kind is not Func." - }, - "IsVariadic": { - "description": "IsVariadic reports whether a function type's final input parameter\nis a \"...\" parameter. If so, t.In(t.NumIn() - 1) returns the parameter's\nimplicit actual type []T.\n\nFor concreteness, if t represents func(x int, y ... float64), then\n\nt.NumIn() == 2\nt.In(0) is the reflect.Type for \"int\"\nt.In(1) is the reflect.Type for \"[]float64\"\nt.IsVariadic() == true\n\nIsVariadic panics if the type's Kind is not Func.", - "type": "boolean" - }, - "Key": { - "$ref": "#/definitions/Type" - }, - "Kind": { - "$ref": "#/definitions/Kind" - }, - "Len": { - "description": "Len returns an array type's length.\nIt panics if the type's Kind is not Array.", - "type": "integer", - "format": "int64" - }, - "Methods": { - "description": "Methods returns an iterator over each method in the type's method set. The sequence is\nequivalent to calling Method successively for each index i in the range [0, NumMethod())." - }, - "Name": { - "description": "Name returns the type's name within its package for a defined type.\nFor other (non-defined) types it returns the empty string.", - "type": "string" - }, - "NumField": { - "description": "NumField returns a struct type's field count.\nIt panics if the type's Kind is not Struct.", - "type": "integer", - "format": "int64" - }, - "NumIn": { - "description": "NumIn returns a function type's input parameter count.\nIt panics if the type's Kind is not Func.", - "type": "integer", - "format": "int64" - }, - "NumMethod": { - "description": "NumMethod returns the number of methods accessible using Method.\n\nFor a non-interface type, it returns the number of exported methods.\n\nFor an interface type, it returns the number of exported and unexported methods.", - "type": "integer", - "format": "int64" - }, - "NumOut": { - "description": "NumOut returns a function type's output parameter count.\nIt panics if the type's Kind is not Func.", - "type": "integer", - "format": "int64" - }, - "Outs": { - "description": "Outs returns an iterator over each output parameter of function type t. The sequence\nis equivalent to calling Out successively for each index i in the range [0, NumOut()).\nIt panics if the type's Kind is not Func." - }, - "PkgPath": { - "description": "PkgPath returns a defined type's package path, that is, the import path\nthat uniquely identifies the package, such as \"encoding/base64\".\nIf the type was predeclared (string, error) or not defined (*T, struct{},\n[]int, or A where A is an alias for a non-defined type), the package path\nwill be the empty string.", - "type": "string" - }, - "Size": { - "description": "Size returns the number of bytes needed to store\na value of the given type; it is analogous to unsafe.Sizeof.", - "type": "integer", - "format": "uint64" - }, - "String": { - "description": "String returns a string representation of the type.\nThe string representation may use shortened package names\n(e.g., base64 instead of \"encoding/base64\") and is not\nguaranteed to be unique among types. To test for type identity,\ncompare the Types directly.", - "type": "string" - } - }, - "x-go-package": "reflect" - }, - "Value": { - "description": "Not all methods apply to all kinds of values. Restrictions,\nif any, are noted in the documentation for each method.\nUse the Kind method to find out the kind of value before\ncalling kind-specific methods. Calling a method\ninappropriate to the kind of type causes a run time panic.\n\nThe zero Value represents no value.\nIts [Value.IsValid] method returns false, its Kind method returns [Invalid],\nits String method returns \"\u003cinvalid Value\u003e\", and all other methods panic.\nMost functions and methods never return an invalid value.\nIf one does, its documentation states the conditions explicitly.\n\nA Value can be used concurrently by multiple goroutines provided that\nthe underlying Go value can be used concurrently for the equivalent\ndirect operations.\n\nTo compare two Values, compare the results of the Interface method.\nUsing == on two Values does not compare the underlying values\nthey represent.", - "type": "object", - "title": "Value is the reflection interface to a Go value.", - "x-go-package": "reflect" - }, "generic_constraint": { "allOf": [ { diff --git a/internal/builders/schema/schema.go b/internal/builders/schema/schema.go index bf88b7a..ec641e7 100644 --- a/internal/builders/schema/schema.go +++ b/internal/builders/schema/schema.go @@ -346,10 +346,15 @@ func (s *Builder) buildNamedType(titpe *types.Named, tgt ifaces.SwaggerTypable) cmt = new(ast.CommentGroup) } - if typeName, ok := parsers.TypeName(cmt); ok { - _ = resolvers.SwaggerSchemaForType(typeName, tgt) - - return nil + if tn, ok := parsers.TypeName(cmt); ok { + if err := resolvers.SwaggerSchemaForType(tn, tgt); err == nil { + return nil + } + // For unsupported swagger:type values (e.g., "array"), fall through + // to underlying type resolution so the full schema (including items + // for slices) is properly built. Build directly from the underlying + // type to bypass the named-type $ref creation. + return s.buildFromType(titpe.Underlying(), tgt) } if s.decl.Spec.Assign.IsValid() { @@ -478,9 +483,12 @@ func (s *Builder) buildNamedStruct(tio *types.TypeName, cmt *ast.CommentGroup, t return nil } - if typeName, ok := parsers.TypeName(cmt); ok { - _ = resolvers.SwaggerSchemaForType(typeName, tgt) - return nil + if tn, ok := parsers.TypeName(cmt); ok { + if err := resolvers.SwaggerSchemaForType(tn, tgt); err == nil { + return nil + } + // For unsupported swagger:type values, fall through to makeRef + // rather than silently returning an empty schema. } return s.makeRef(decl, tgt) @@ -502,6 +510,14 @@ func (s *Builder) buildNamedArray(tio *types.TypeName, cmt *ast.CommentGroup, el tgt.Items().Typed("string", sfnm) return nil } + // When swagger:type is set to an unsupported value (e.g., "array"), + // skip the $ref and inline the array schema with proper items type. + if tn, ok := parsers.TypeName(cmt); ok { + if err := resolvers.SwaggerSchemaForType(tn, tgt); err != nil { + return s.buildFromType(elem, tgt.Items()) + } + return nil + } if decl, ok := s.ctx.FindModel(tio.Pkg().Path(), tio.Name()); ok { return s.makeRef(decl, tgt) } @@ -519,6 +535,16 @@ func (s *Builder) buildNamedSlice(tio *types.TypeName, cmt *ast.CommentGroup, el tgt.Items().Typed("string", sfnm) return nil } + // When swagger:type is set to an unsupported value (e.g., "array"), + // skip the $ref and inline the slice schema with proper items type. + // This preserves the field's description that would be lost with $ref. + if tn, ok := parsers.TypeName(cmt); ok { + if err := resolvers.SwaggerSchemaForType(tn, tgt); err != nil { + // Unsupported type name (e.g., "array") — build inline from element type. + return s.buildFromType(elem, tgt.Items()) + } + return nil + } if decl, ok := s.ctx.FindModel(tio.Pkg().Path(), tio.Name()); ok { return s.makeRef(decl, tgt) } diff --git a/internal/integration/coverage_enhancements_test.go b/internal/integration/coverage_enhancements_test.go index 9ba5be7..a9b93f6 100644 --- a/internal/integration/coverage_enhancements_test.go +++ b/internal/integration/coverage_enhancements_test.go @@ -164,6 +164,23 @@ func TestCoverage_NamedBasic(t *testing.T) { scantest.CompareOrDumpJSON(t, doc, "enhancements_named_basic.json") } +// TestCoverage_SwaggerTypeArray exercises the fallthrough introduced by +// upstream #11: when swagger:type is set to a value not recognised by +// SwaggerSchemaForType (e.g. "array"), the builder resolves the underlying +// type instead of emitting an empty schema. Covers buildNamedSlice, +// buildNamedArray and buildNamedStruct fallthrough branches. +func TestCoverage_SwaggerTypeArray(t *testing.T) { + doc, err := codescan.Run(&codescan.Options{ + Packages: []string{"./enhancements/swagger-type-array/..."}, + WorkDir: scantest.FixturesDir(), + ScanModels: true, + }) + require.NoError(t, err) + require.NotNil(t, doc) + + scantest.CompareOrDumpJSON(t, doc, "enhancements_swagger_type_array.json") +} + func TestCoverage_RefAliasChain(t *testing.T) { doc, err := codescan.Run(&codescan.Options{ Packages: []string{"./enhancements/ref-alias-chain/..."}, diff --git a/internal/integration/schema_special_test.go b/internal/integration/schema_special_test.go index 89319ec..4848507 100644 --- a/internal/integration/schema_special_test.go +++ b/internal/integration/schema_special_test.go @@ -186,6 +186,17 @@ func TestSpecialSchemas(t *testing.T) { }) }) + // Strip stdlib-dependent definitions whose shape drifts across Go + // versions (e.g. reflect.Type grew iterator methods in Go 1.26). The + // sub-tests above already pin the scanner behaviour (the fields resolve + // to a $ref with x-go-package: reflect); the snapshot only needs to + // cover the stable, fixture-local parts of the spec. + for name, def := range sp.Definitions { + if pkg, ok := def.Extensions.GetString("x-go-package"); ok && pkg == "reflect" { + delete(sp.Definitions, name) + } + } + scantest.CompareOrDumpJSON(t, sp, "go123_special_spec.json") } diff --git a/internal/scanner/index.go b/internal/scanner/index.go index 56bf35c..f189893 100644 --- a/internal/scanner/index.go +++ b/internal/scanner/index.go @@ -321,7 +321,7 @@ func (a *TypeIndex) detectNodes(file *ast.File) (node, error) { n |= routeNode case "operation": n |= operationNode - case "model": //nolint:goconst // annotation keyword matched from swagger comment + case "model": // annotation keyword matched from swagger comment. n |= modelNode if err := checkStructConflict(&seenStruct, annotation, cline.Text); err != nil { return 0, err diff --git a/internal/scanner/index_test.go b/internal/scanner/index_test.go index 1d5114d..7e73287 100644 --- a/internal/scanner/index_test.go +++ b/internal/scanner/index_test.go @@ -15,6 +15,8 @@ import ( "golang.org/x/tools/go/packages" ) +const modelAnnotation = "model" + func TestShouldAcceptTag(t *testing.T) { tagTests := []struct { tags []string @@ -206,24 +208,24 @@ func TestDetectNodes_AllAnnotationTypes(t *testing.T) { func TestCheckStructConflict(t *testing.T) { t.Run("no conflict with same annotation", func(t *testing.T) { seen := "" - err := checkStructConflict(&seen, "model", "// swagger:model Foo") + err := checkStructConflict(&seen, modelAnnotation, "// swagger:model Foo") require.NoError(t, err) - assert.EqualT(t, "model", seen) + assert.EqualT(t, modelAnnotation, seen) // Same annotation again: no conflict. - err = checkStructConflict(&seen, "model", "// swagger:model Bar") + err = checkStructConflict(&seen, modelAnnotation, "// swagger:model Bar") require.NoError(t, err) }) t.Run("conflict with different struct annotations", func(t *testing.T) { - seen := "model" + seen := modelAnnotation err := checkStructConflict(&seen, "parameters", "// swagger:parameters myOp") require.Error(t, err) assert.True(t, errors.Is(err, ErrScanner)) }) t.Run("model then response conflicts", func(t *testing.T) { - seen := "model" + seen := modelAnnotation err := checkStructConflict(&seen, "response", "// swagger:response myResp") require.Error(t, err) assert.True(t, errors.Is(err, ErrScanner)) @@ -231,7 +233,7 @@ func TestCheckStructConflict(t *testing.T) { t.Run("parameters then model conflicts", func(t *testing.T) { seen := "parameters" - err := checkStructConflict(&seen, "model", "// swagger:model Foo") + err := checkStructConflict(&seen, modelAnnotation, "// swagger:model Foo") require.Error(t, err) assert.True(t, errors.Is(err, ErrScanner)) }) diff --git a/internal/scanner/scan_context_test.go b/internal/scanner/scan_context_test.go index f33a3aa..a4631f8 100644 --- a/internal/scanner/scan_context_test.go +++ b/internal/scanner/scan_context_test.go @@ -19,7 +19,7 @@ func TestApplication_LoadCode(t *testing.T) { sctx := loadClassificationPkgsCtx(t) require.NotNil(t, sctx) require.NotNil(t, sctx.app) - require.Len(t, sctx.app.Models, 39) + require.Len(t, sctx.app.Models, 45) require.Len(t, sctx.app.Meta, 1) require.Len(t, sctx.app.Routes, 7) require.Empty(t, sctx.app.Operations) @@ -118,7 +118,7 @@ func TestScanCtx_Models(t *testing.T) { for range sctx.Models() { count++ } - assert.EqualT(t, 39, count) + assert.EqualT(t, 45, count) } func TestScanCtx_ExtraModels(t *testing.T) {