diff --git a/.protoc_version b/.protoc_version index dfa88c8e..a0d6856d 100644 --- a/.protoc_version +++ b/.protoc_version @@ -1 +1 @@ -26.1 +27.0 diff --git a/Makefile b/Makefile index 52528630..206d9e0e 100644 --- a/Makefile +++ b/Makefile @@ -82,7 +82,7 @@ lintfix: $(BIN)/golangci-lint ## Automatically fix some lint errors cd internal/benchmarks && $(BIN)/golangci-lint run --fix .PHONY: generate -generate: $(BIN)/license-header $(BIN)/goyacc test-descriptors ## Regenerate code and licenses +generate: $(BIN)/license-header $(BIN)/goyacc test-descriptors ext-features-descriptors ## Regenerate code and licenses PATH="$(BIN)$(PATH_SEP)$(PATH)" $(GO) generate ./... @# We want to operate on a list of modified and new files, excluding @# deleted and ignored files. git-ls-files can't do this alone. comm -23 takes @@ -134,6 +134,13 @@ $(PROTOC): $(CACHE)/protoc-$(PROTOC_VERSION).zip unzip -o -q $< -d $(PROTOC_DIR) && \ touch $@ +.PHONY: wellknownimports +wellknownimports: $(PROTOC) $(sort $(wildcard $(PROTOC_DIR)/include/google/protobuf/*.proto)) $(sort $(wildcard $(PROTOC_DIR)/include/google/protobuf/*/*.proto)) + @rm -rf wellknownimports/google 2>/dev/null && true + @mkdir -p wellknownimports/google/protobuf/compiler + cp -R $(PROTOC_DIR)/include/google/protobuf/*.proto wellknownimports/google/protobuf + cp -R $(PROTOC_DIR)/include/google/protobuf/compiler/*.proto wellknownimports/google/protobuf/compiler + internal/testdata/all.protoset: $(PROTOC) $(sort $(wildcard internal/testdata/*.proto)) cd $(@D) && $(PROTOC) --descriptor_set_out=$(@F) --include_imports -I. $(filter-out protoc,$(^F)) @@ -149,11 +156,11 @@ internal/testdata/desc_test_proto3_optional.protoset: $(PROTOC) internal/testdat internal/testdata/descriptor_impl_tests.protoset: $(PROTOC) internal/testdata/desc_test2.proto internal/testdata/desc_test_complex.proto internal/testdata/desc_test_defaults.proto internal/testdata/desc_test_proto3.proto internal/testdata/desc_test_proto3_optional.proto cd $(@D) && $(PROTOC) --descriptor_set_out=$(@F) --include_imports -I. $(filter-out protoc,$(^F)) -internal/testdata/descriptor_editions_impl_tests.protoset: $(PROTOC) internal/testdata/editions/all_default_features.proto internal/testdata/editions/features_with_overrides.proto - cd $(@D)/editions && $(PROTOC) --experimental_editions --descriptor_set_out=../$(@F) --include_imports -I. $(filter-out protoc,$(^F)) +internal/testdata/descriptor_editions_impl_tests.protoset: $(PROTOC) internal/testdata/editions/all_default_features.proto internal/testdata/editions/features_with_overrides.proto internal/testdata/editions/file_default_delimited.proto + cd $(@D)/editions && $(PROTOC) --descriptor_set_out=../$(@F) --include_imports -I. $(filter-out protoc,$(^F)) internal/testdata/editions/all.protoset: $(PROTOC) $(sort $(wildcard internal/testdata/editions/*.proto)) - cd $(@D) && $(PROTOC) --experimental_editions --descriptor_set_out=$(@F) --include_imports -I. $(filter-out protoc,$(^F)) + cd $(@D) && $(PROTOC) --descriptor_set_out=$(@F) --include_imports -I. $(filter-out protoc,$(^F)) internal/testdata/source_info.protoset: $(PROTOC) internal/testdata/desc_test_options.proto internal/testdata/desc_test_comments.proto internal/testdata/desc_test_complex.proto cd $(@D) && $(PROTOC) --descriptor_set_out=$(@F) --include_source_info -I. $(filter-out protoc,$(^F)) @@ -168,7 +175,7 @@ internal/testdata/options/test_proto3.protoset: $(PROTOC) internal/testdata/opti cd $(@D) && $(PROTOC) --descriptor_set_out=$(@F) -I. $(filter-out protoc,$(^F)) internal/testdata/options/test_editions.protoset: $(PROTOC) internal/testdata/options/test_editions.proto - cd $(@D) && $(PROTOC) --experimental_editions --descriptor_set_out=$(@F) -I. $(filter-out protoc,$(^F)) + cd $(@D) && $(PROTOC) --descriptor_set_out=$(@F) -I. $(filter-out protoc,$(^F)) .PHONY: test-descriptors test-descriptors: internal/testdata/all.protoset @@ -183,3 +190,11 @@ test-descriptors: internal/testdata/options/options.protoset test-descriptors: internal/testdata/options/test.protoset test-descriptors: internal/testdata/options/test_proto3.protoset test-descriptors: internal/testdata/options/test_editions.protoset + +internal/featuresext/cpp_features.protoset: $(PROTOC) + cd $(@D) && $(PROTOC) --descriptor_set_out=$(@F) google/protobuf/cpp_features.proto +internal/featuresext/java_features.protoset: $(PROTOC) + cd $(@D) && $(PROTOC) --descriptor_set_out=$(@F) google/protobuf/java_features.proto + +.PHONY: ext-features-descriptors +ext-features-descriptors: internal/featuresext/cpp_features.protoset internal/featuresext/java_features.protoset diff --git a/editionstesting/editionstesting.go b/editionstesting/editionstesting.go deleted file mode 100644 index 53450625..00000000 --- a/editionstesting/editionstesting.go +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright 2020-2024 Buf Technologies, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Package editionstesting is a temporary package that allows users to test -// functionality related to Protobuf editions while that support is not yet -// complete. Once that support is complete, this package will be removed. -package editionstesting - -import "github.com/bufbuild/protocompile/internal/editions" - -// AllowEditions can be called to opt into this repo's support for Protobuf -// editions. This is primarily intended for testing. This repo's support -// for editions is not yet complete. -// -// Once the implementation of editions is complete, this function will be -// REMOVED and editions will be allowed by all usages of this repo. -// -// The internal flag that this touches is not synchronized and calling this -// function is not thread-safe. So this must be called prior to using the -// compiler, ideally from an init() function or as one of the first things -// done from a main() function. -func AllowEditions() { - editions.AllowEditions = true -} diff --git a/go.mod b/go.mod index 02dc0fe2..a2092485 100644 --- a/go.mod +++ b/go.mod @@ -6,7 +6,7 @@ require ( github.com/google/go-cmp v0.6.0 github.com/stretchr/testify v1.9.0 golang.org/x/sync v0.7.0 - google.golang.org/protobuf v1.33.1-0.20240408130810-98873a205002 + google.golang.org/protobuf v1.34.2-0.20240529085009-ca837e5c658b ) require ( diff --git a/go.sum b/go.sum index 9dce9b96..64edbefa 100644 --- a/go.sum +++ b/go.sum @@ -8,8 +8,8 @@ github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsT github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M= golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -google.golang.org/protobuf v1.33.1-0.20240408130810-98873a205002 h1:V7Da7qt0MkY3noVANIMVBk28nOnijADeOR3i5Hcvpj4= -google.golang.org/protobuf v1.33.1-0.20240408130810-98873a205002/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= +google.golang.org/protobuf v1.34.2-0.20240529085009-ca837e5c658b h1:lkoSgiT5AF1nbA/WLFPX1H0fvBiNGQekpG17/3aIsck= +google.golang.org/protobuf v1.34.2-0.20240529085009-ca837e5c658b/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= diff --git a/go.work.sum b/go.work.sum index 869cf4a6..d977cf05 100644 --- a/go.work.sum +++ b/go.work.sum @@ -20,6 +20,7 @@ github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= github.com/bufbuild/protocompile v0.2.1-0.20230123224550-da57cd758c2f/go.mod h1:tleDrpPTlLUVmgnEoN6qBliKWqJaZFJXqZdFjTd+ocU= +github.com/bufbuild/protocompile v0.13.0/go.mod h1:dr++fGGeMPWHv7jPeT06ZKukm45NJscd7rUxQVzEKRk= github.com/census-instrumentation/opencensus-proto v0.2.1 h1:glEXhBS5PSLLv4IXzLA5yPRVX4bilULVyxxbrfOtDAk= github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= diff --git a/internal/benchmarks/go.mod b/internal/benchmarks/go.mod index e43c2e0d..7257e407 100644 --- a/internal/benchmarks/go.mod +++ b/internal/benchmarks/go.mod @@ -3,11 +3,11 @@ module github.com/bufbuild/protocompile/internal/benchmarks go 1.19 require ( - github.com/bufbuild/protocompile v0.10.0 + github.com/bufbuild/protocompile v0.13.0 github.com/igrmk/treemap/v2 v2.0.1 github.com/jhump/protoreflect v1.14.1 // MUST NOT be updated to v1.15 or higher github.com/stretchr/testify v1.9.0 - google.golang.org/protobuf v1.33.1-0.20240408130810-98873a205002 + google.golang.org/protobuf v1.34.1 ) require golang.org/x/sync v0.7.0 diff --git a/internal/benchmarks/go.sum b/internal/benchmarks/go.sum index f64b2a9b..c29746aa 100644 --- a/internal/benchmarks/go.sum +++ b/internal/benchmarks/go.sum @@ -1,7 +1,7 @@ cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/bufbuild/protocompile v0.10.0 h1:+jW/wnLMLxaCEG8AX9lD0bQ5v9h1RUiMKOBOT5ll9dM= -github.com/bufbuild/protocompile v0.10.0/go.mod h1:G9qQIQo0xZ6Uyj6CMNz0saGmx2so+KONo8/KrELABiY= +github.com/bufbuild/protocompile v0.13.0 h1:6cwUB0Y2tSvmNxsbunwzmIto3xOlJOV7ALALuVOs92M= +github.com/bufbuild/protocompile v0.13.0/go.mod h1:dr++fGGeMPWHv7jPeT06ZKukm45NJscd7rUxQVzEKRk= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= @@ -114,8 +114,8 @@ google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpAD google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.33.1-0.20240408130810-98873a205002 h1:V7Da7qt0MkY3noVANIMVBk28nOnijADeOR3i5Hcvpj4= -google.golang.org/protobuf v1.33.1-0.20240408130810-98873a205002/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= +google.golang.org/protobuf v1.34.1 h1:9ddQBjfCyZPOHPUiPxpYESBLc+T8P3E+Vo4IbKZgFWg= +google.golang.org/protobuf v1.34.1/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= diff --git a/internal/editions/editions.go b/internal/editions/editions.go index 53f361cd..850ac369 100644 --- a/internal/editions/editions.go +++ b/internal/editions/editions.go @@ -20,6 +20,7 @@ package editions import ( "fmt" + "strings" "sync" "google.golang.org/protobuf/encoding/prototext" @@ -30,20 +31,22 @@ import ( "google.golang.org/protobuf/types/dynamicpb" ) -var ( - // AllowEditions is set to true in tests to enable editions syntax for testing. - // This will be removed and editions will be allowed by non-test code once the - // implementation is complete. - AllowEditions = false +const ( + // MinSupportedEdition is the earliest edition supported by this module. + // It should be 2023 (the first edition) for the indefinite future. + MinSupportedEdition = descriptorpb.Edition_EDITION_2023 + + // MaxSupportedEdition is the most recent edition supported by this module. + MaxSupportedEdition = descriptorpb.Edition_EDITION_2023 +) +var ( // SupportedEditions is the exhaustive set of editions that protocompile // can support. We don't allow it to compile future/unknown editions, to // make sure we don't generate incorrect descriptors, in the event that // a future edition introduces a change or new feature that requires // new logic in the compiler. - SupportedEditions = map[string]descriptorpb.Edition{ - "2023": descriptorpb.Edition_EDITION_2023, - } + SupportedEditions = computeSupportedEditions(MinSupportedEdition, MaxSupportedEdition) // FeatureSetDescriptor is the message descriptor for the compiled-in // version (in the descriptorpb package) of the google.protobuf.FeatureSet @@ -88,6 +91,12 @@ func ResolveFeature( features = withFeatures.GetFeatures() } + // TODO: adaptFeatureSet is only looking at the first field. But if we needed to + // support an extension field inside a custom feature, we'd really need + // to check all fields. That gets particularly complicated if the traversal + // path of fields includes list and map values. Luckily, features are not + // supposed to be repeated and not supposed to themselves have extensions. + // So this should be fine, at least for now. msgRef, err := adaptFeatureSet(features, fields[0]) if err != nil { return protoreflect.Value{}, err @@ -251,45 +260,52 @@ func GetFeatureDefault(edition descriptorpb.Edition, container protoreflect.Mess func adaptFeatureSet(msg *descriptorpb.FeatureSet, field protoreflect.FieldDescriptor) (protoreflect.Message, error) { msgRef := msg.ProtoReflect() - if field.IsExtension() { - // Extensions can always be used directly with the feature set, even if - // field.ContainingMessage() != FeatureSetDescriptor. - if msgRef.Has(field) || len(msgRef.GetUnknown()) == 0 { - return msgRef, nil - } - // The field is not present, but the message has unrecognized values. So - // let's try to parse the unrecognized bytes, just in case they contain - // this extension. - temp := &descriptorpb.FeatureSet{} - unmarshaler := prototext.UnmarshalOptions{ - AllowPartial: true, - Resolver: resolverForExtension{field}, - } - if err := unmarshaler.Unmarshal(msgRef.GetUnknown(), temp); err != nil { - return nil, fmt.Errorf("failed to parse unrecognized fields of FeatureSet: %w", err) + var actualField protoreflect.FieldDescriptor + switch { + case field.IsExtension(): + // Extensions can be used directly with the feature set, even if + // field.ContainingMessage() != FeatureSetDescriptor. But only if + // the value is either not a message or is a message with the + // right descriptor, i.e. val.Descriptor() == field.Message(). + if actualField = actualDescriptor(msgRef, field); actualField == nil || actualField == field { + if msgRef.Has(field) || len(msgRef.GetUnknown()) == 0 { + return msgRef, nil + } + // The field is not present, but the message has unrecognized values. So + // let's try to parse the unrecognized bytes, just in case they contain + // this extension. + temp := &descriptorpb.FeatureSet{} + unmarshaler := proto.UnmarshalOptions{ + AllowPartial: true, + Resolver: resolverForExtension{field}, + } + if err := unmarshaler.Unmarshal(msgRef.GetUnknown(), temp); err != nil { + return nil, fmt.Errorf("failed to parse unrecognized fields of FeatureSet: %w", err) + } + return temp.ProtoReflect(), nil } - return temp.ProtoReflect(), nil - } - - if field.ContainingMessage() == FeatureSetDescriptor { + case field.ContainingMessage() == FeatureSetDescriptor: // Known field, not dynamically generated. Can directly use with the feature set. return msgRef, nil + default: + actualField = FeatureSetDescriptor.Fields().ByNumber(field.Number()) } - // If we get here, we have a dynamic field descriptor. We want to copy its - // value into a dynamic message, which requires marshalling/unmarshalling. - msgField := FeatureSetDescriptor.Fields().ByNumber(field.Number()) + // If we get here, we have a dynamic field descriptor or an extension + // descriptor whose message type does not match the descriptor of the + // stored value. We need to copy its value into a dynamic message, + // which requires marshalling/unmarshalling. // We only need to copy over the unrecognized bytes (if any) // and the same field (if present). data := msgRef.GetUnknown() - if msgField != nil && msgRef.Has(msgField) { + if actualField != nil && msgRef.Has(actualField) { subset := &descriptorpb.FeatureSet{} - subset.ProtoReflect().Set(msgField, msgRef.Get(msgField)) - fieldBytes, err := proto.MarshalOptions{AllowPartial: true}.Marshal(subset) + subset.ProtoReflect().Set(actualField, msgRef.Get(actualField)) + var err error + data, err = proto.MarshalOptions{AllowPartial: true}.MarshalAppend(data, subset) if err != nil { return nil, fmt.Errorf("failed to marshal FeatureSet field %s to bytes: %w", field.Name(), err) } - data = append(data, fieldBytes...) } if len(data) == 0 { // No relevant data to copy over, so we can just return @@ -339,3 +355,66 @@ func asExtensionType(ext protoreflect.ExtensionDescriptor) protoreflect.Extensio } return dynamicpb.NewExtensionType(ext) } + +func computeSupportedEditions(min, max descriptorpb.Edition) map[string]descriptorpb.Edition { + supportedEditions := map[string]descriptorpb.Edition{} + for editionNum := range descriptorpb.Edition_name { + edition := descriptorpb.Edition(editionNum) + if edition >= min && edition <= max { + name := strings.TrimPrefix(edition.String(), "EDITION_") + supportedEditions[name] = edition + } + } + return supportedEditions +} + +// actualDescriptor returns the actual field descriptor referenced by msg that +// corresponds to the given ext (i.e. same number). It returns nil if msg has +// no reference, if the actual descriptor is the same as ext, or if ext is +// otherwise safe to use as is. +func actualDescriptor(msg protoreflect.Message, ext protoreflect.ExtensionDescriptor) protoreflect.FieldDescriptor { + if !msg.Has(ext) || ext.Message() == nil { + // nothing to match; safe as is + return nil + } + val := msg.Get(ext) + switch { + case ext.IsMap(): // should not actually be possible + expectedDescriptor := ext.MapValue().Message() + if expectedDescriptor == nil { + return nil // nothing to match + } + // We know msg.Has(field) is true, from above, so there's at least one entry. + var matches bool + val.Map().Range(func(_ protoreflect.MapKey, val protoreflect.Value) bool { + matches = val.Message().Descriptor() == expectedDescriptor + return false + }) + if matches { + return nil + } + case ext.IsList(): + // We know msg.Has(field) is true, from above, so there's at least one entry. + if val.List().Get(0).Message().Descriptor() == ext.Message() { + return nil + } + case !ext.IsMap(): + if val.Message().Descriptor() == ext.Message() { + return nil + } + } + // The underlying message descriptors do not match. So we need to return + // the actual field descriptor. Sadly, protoreflect.Message provides no way + // to query the field descriptor in a message by number. For non-extensions, + // one can query the associated message descriptor. But for extensions, we + // have to do the slow thing, and range through all fields looking for it. + var actualField protoreflect.FieldDescriptor + msg.Range(func(fd protoreflect.FieldDescriptor, _ protoreflect.Value) bool { + if fd.Number() == ext.Number() { + actualField = fd + return false + } + return true + }) + return actualField +} diff --git a/internal/editions/editions_test.go b/internal/editions/editions_test.go index f87e0550..b802cf9d 100644 --- a/internal/editions/editions_test.go +++ b/internal/editions/editions_test.go @@ -48,3 +48,20 @@ func TestGetEditionDefaults(t *testing.T) { assert.Equal(t, descriptorpb.FeatureSet_EXPLICIT, features.GetFieldPresence()) assert.Equal(t, descriptorpb.FeatureSet_PACKED, features.GetRepeatedFieldEncoding()) } + +func TestComputeSupportedEditions(t *testing.T) { + t.Parallel() + assert.Equal(t, + map[string]descriptorpb.Edition{ + "2023": descriptorpb.Edition_EDITION_2023, + }, + computeSupportedEditions(descriptorpb.Edition_EDITION_2023, descriptorpb.Edition_EDITION_2023), + ) + assert.Equal(t, + map[string]descriptorpb.Edition{ + "2023": descriptorpb.Edition_EDITION_2023, + "2024": descriptorpb.Edition_EDITION_2024, + }, + computeSupportedEditions(descriptorpb.Edition_EDITION_2023, descriptorpb.Edition_EDITION_2024), + ) +} diff --git a/internal/featuresext/cpp_features.protoset b/internal/featuresext/cpp_features.protoset new file mode 100644 index 00000000..106ad8e4 Binary files /dev/null and b/internal/featuresext/cpp_features.protoset differ diff --git a/internal/featuresext/featuresext.go b/internal/featuresext/featuresext.go new file mode 100644 index 00000000..892524e6 --- /dev/null +++ b/internal/featuresext/featuresext.go @@ -0,0 +1,84 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package featuresext provides file descriptors for the +// "google/protobuf/cpp_features.proto" and "google/protobuf/java_features.proto" +// standard import files. Unlike the other standard/well-known +// imports, these files have no standard Go package in their +// runtime with generated code. So in order to make them available +// as "standard imports" to compiler users, we must embed these +// descriptors into a Go package. +package featuresext + +import ( + _ "embed" + "fmt" + "sync" + + "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/reflect/protodesc" + "google.golang.org/protobuf/reflect/protoreflect" + "google.golang.org/protobuf/reflect/protoregistry" + "google.golang.org/protobuf/types/descriptorpb" +) + +var ( + //go:embed cpp_features.protoset + cppFeatures []byte + + //go:embed java_features.protoset + javaFeatures []byte + + initOnce sync.Once + initCppFeatures protoreflect.FileDescriptor + initCppErr error + initJavaFeatures protoreflect.FileDescriptor + initJavaErr error +) + +func initDescriptors() { + initOnce.Do(func() { + initCppFeatures, initCppErr = buildDescriptor("google/protobuf/cpp_features.proto", cppFeatures) + initJavaFeatures, initJavaErr = buildDescriptor("google/protobuf/java_features.proto", javaFeatures) + }) +} + +func CppFeaturesDescriptor() (protoreflect.FileDescriptor, error) { + initDescriptors() + return initCppFeatures, initCppErr +} + +func JavaFeaturesDescriptor() (protoreflect.FileDescriptor, error) { + initDescriptors() + return initJavaFeatures, initJavaErr +} + +func buildDescriptor(name string, data []byte) (protoreflect.FileDescriptor, error) { + var files descriptorpb.FileDescriptorSet + err := proto.Unmarshal(data, &files) + if err != nil { + return nil, fmt.Errorf("failed to load descriptor for %q: %w", name, err) + } + if len(files.File) != 1 { + return nil, fmt.Errorf("failed to load descriptor for %q: expected embedded descriptor set to contain exactly one file but it instead has %d", name, len(files.File)) + } + if files.File[0].GetName() != name { + return nil, fmt.Errorf("failed to load descriptor for %q: embedded descriptor contains wrong file %q", name, files.File[0].GetName()) + } + descriptor, err := protodesc.NewFile(files.File[0], protoregistry.GlobalFiles) + if err != nil { + return nil, fmt.Errorf("failed to load descriptor for %q: %w", name, err) + } + return descriptor, nil +} diff --git a/internal/featuresext/featuresext_test.go b/internal/featuresext/featuresext_test.go new file mode 100644 index 00000000..9cb0d16e --- /dev/null +++ b/internal/featuresext/featuresext_test.go @@ -0,0 +1,37 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package featuresext + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "google.golang.org/protobuf/reflect/protoreflect" +) + +func TestFeaturesExt(t *testing.T) { + t.Parallel() + + file, err := CppFeaturesDescriptor() + require.NoError(t, err) + assert.Equal(t, protoreflect.FullName("pb"), file.Package()) + assert.NotNil(t, file.Extensions().ByName("cpp")) + + file, err = JavaFeaturesDescriptor() + require.NoError(t, err) + assert.Equal(t, protoreflect.FullName("pb"), file.Package()) + assert.NotNil(t, file.Extensions().ByName("java")) +} diff --git a/internal/featuresext/java_features.protoset b/internal/featuresext/java_features.protoset new file mode 100644 index 00000000..60de3eb7 Binary files /dev/null and b/internal/featuresext/java_features.protoset differ diff --git a/options/message_sets.go b/internal/messageset/messageset.go similarity index 91% rename from options/message_sets.go rename to internal/messageset/messageset.go index 2096eb58..850a0c66 100644 --- a/options/message_sets.go +++ b/internal/messageset/messageset.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package options +package messageset import ( "math" @@ -28,7 +28,9 @@ var ( messageSetSupportInit sync.Once ) -func canSerializeMessageSets() bool { +// CanSupportMessageSets returns true if the protobuf-go runtime supports +// serializing messages with the message set wire format. +func CanSupportMessageSets() bool { messageSetSupportInit.Do(func() { // We check using the protodesc package, instead of just relying // on protolegacy build tag, in case someone links in a fork of diff --git a/options/message_sets_protolegacy_test.go b/internal/messageset/messageset_protolegacy_test.go similarity index 92% rename from options/message_sets_protolegacy_test.go rename to internal/messageset/messageset_protolegacy_test.go index a09aca93..c5c712cc 100644 --- a/options/message_sets_protolegacy_test.go +++ b/internal/messageset/messageset_protolegacy_test.go @@ -15,7 +15,7 @@ //go:build protolegacy // +build protolegacy -package options +package messageset import ( "testing" @@ -25,5 +25,5 @@ import ( func TestCanSerializeMessageSets(t *testing.T) { t.Parallel() - assert.True(t, canSerializeMessageSets()) + assert.True(t, CanSupportMessageSets()) } diff --git a/options/message_sets_test.go b/internal/messageset/messageset_test.go similarity index 92% rename from options/message_sets_test.go rename to internal/messageset/messageset_test.go index aaca726f..0b3c2bc3 100644 --- a/options/message_sets_test.go +++ b/internal/messageset/messageset_test.go @@ -15,7 +15,7 @@ //go:build !protolegacy // +build !protolegacy -package options +package messageset import ( "testing" @@ -25,5 +25,5 @@ import ( func TestCanSerializeMessageSets(t *testing.T) { t.Parallel() - assert.False(t, canSerializeMessageSets()) + assert.False(t, CanSupportMessageSets()) } diff --git a/internal/protoc/protoc.go b/internal/protoc/protoc.go index 8abdab62..5f59d376 100644 --- a/internal/protoc/protoc.go +++ b/internal/protoc/protoc.go @@ -116,7 +116,7 @@ func writeFileToDisk(files map[string]string) (string, error) { } func invokeProtoc(protoPath string, fileNames []string) (stdout []byte, err error) { - args := []string{"--experimental_editions", "-I", protoPath, "-o", os.DevNull} + args := []string{"-I", protoPath, "-o", os.DevNull} args = append(args, fileNames...) protocPath, err := BinaryPath("../") if err != nil { diff --git a/internal/testdata/all.protoset b/internal/testdata/all.protoset index 8b23264b..b8d15622 100644 Binary files a/internal/testdata/all.protoset and b/internal/testdata/all.protoset differ diff --git a/internal/testdata/desc_test_complex.protoset b/internal/testdata/desc_test_complex.protoset index 2a7aa933..72a22ad3 100644 Binary files a/internal/testdata/desc_test_complex.protoset and b/internal/testdata/desc_test_complex.protoset differ diff --git a/internal/testdata/desc_test_proto3_optional.protoset b/internal/testdata/desc_test_proto3_optional.protoset index 036e6173..9e1e8aec 100644 Binary files a/internal/testdata/desc_test_proto3_optional.protoset and b/internal/testdata/desc_test_proto3_optional.protoset differ diff --git a/internal/testdata/descriptor_editions_impl_tests.protoset b/internal/testdata/descriptor_editions_impl_tests.protoset index 481d69b6..780b5d76 100644 Binary files a/internal/testdata/descriptor_editions_impl_tests.protoset and b/internal/testdata/descriptor_editions_impl_tests.protoset differ diff --git a/internal/testdata/descriptor_impl_tests.protoset b/internal/testdata/descriptor_impl_tests.protoset index 09b74b39..cb8327c8 100644 Binary files a/internal/testdata/descriptor_impl_tests.protoset and b/internal/testdata/descriptor_impl_tests.protoset differ diff --git a/internal/testdata/editions/all.protoset b/internal/testdata/editions/all.protoset index 481d69b6..780b5d76 100644 Binary files a/internal/testdata/editions/all.protoset and b/internal/testdata/editions/all.protoset differ diff --git a/internal/testdata/editions/features_with_overrides.proto b/internal/testdata/editions/features_with_overrides.proto index eaf4246c..390f6c7a 100644 --- a/internal/testdata/editions/features_with_overrides.proto +++ b/internal/testdata/editions/features_with_overrides.proto @@ -49,3 +49,10 @@ enum EyeColor { service FooService { rpc DoSomething(Foo) returns (Foo); } + +message Baz { + Group group = 1 [features.message_encoding=DELIMITED]; // looks like a group + message Group { + string name = 1; + } +} diff --git a/internal/testdata/editions/file_default_delimited.proto b/internal/testdata/editions/file_default_delimited.proto new file mode 100644 index 00000000..77beaa00 --- /dev/null +++ b/internal/testdata/editions/file_default_delimited.proto @@ -0,0 +1,12 @@ +edition = "2023"; + +package foo.bar.baz; + +option features.message_encoding = DELIMITED; + +message TestMessage { + TestMessage child = 1; + repeated TestMessage descendants = 2; + map string_map = 3; + map message_map = 4; +} diff --git a/internal/tools/go.mod b/internal/tools/go.mod index 70585aca..9a098e80 100644 --- a/internal/tools/go.mod +++ b/internal/tools/go.mod @@ -3,9 +3,9 @@ module github.com/bufbuild/protocompile/internal/tools go 1.18 require ( - github.com/bufbuild/buf v1.30.1 + github.com/bufbuild/buf v1.31.0 github.com/golangci/golangci-lint v1.55.2 - golang.org/x/tools v0.20.0 + golang.org/x/tools v0.21.0 ) require ( @@ -66,7 +66,7 @@ require ( github.com/go-xmlfmt/xmlfmt v1.1.2 // indirect github.com/gobwas/glob v0.2.3 // indirect github.com/gofrs/flock v0.8.1 // indirect - github.com/gofrs/uuid/v5 v5.0.0 // indirect + github.com/gofrs/uuid/v5 v5.1.0 // indirect github.com/golang/protobuf v1.5.4 // indirect github.com/golangci/check v0.0.0-20180506172741-cfe4005ccda2 // indirect github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a // indirect @@ -182,11 +182,11 @@ require ( go.uber.org/atomic v1.11.0 // indirect go.uber.org/multierr v1.11.0 // indirect go.uber.org/zap v1.27.0 // indirect - golang.org/x/exp v0.0.0-20240325151524-a685a6edb6d8 // indirect + golang.org/x/exp v0.0.0-20240416160154-fe59bbe5cc7f // indirect golang.org/x/exp/typeparams v0.0.0-20230307190834-24139beb5833 // indirect golang.org/x/mod v0.17.0 // indirect golang.org/x/sync v0.7.0 // indirect - golang.org/x/sys v0.19.0 // indirect + golang.org/x/sys v0.20.0 // indirect golang.org/x/text v0.14.0 // indirect google.golang.org/protobuf v1.33.0 // indirect gopkg.in/ini.v1 v1.67.0 // indirect diff --git a/internal/tools/go.sum b/internal/tools/go.sum index 0b99f5ec..81d65ee5 100644 --- a/internal/tools/go.sum +++ b/internal/tools/go.sum @@ -95,8 +95,8 @@ github.com/breml/bidichk v0.2.7 h1:dAkKQPLl/Qrk7hnP6P+E0xOodrq8Us7+U0o4UBOAlQY= github.com/breml/bidichk v0.2.7/go.mod h1:YodjipAGI9fGcYM7II6wFvGhdMYsC5pHDlGzqvEW3tQ= github.com/breml/errchkjson v0.3.6 h1:VLhVkqSBH96AvXEyclMR37rZslRrY2kcyq+31HCsVrA= github.com/breml/errchkjson v0.3.6/go.mod h1:jhSDoFheAF2RSDOlCfhHO9KqhZgAYLyvHe7bRCX8f/U= -github.com/bufbuild/buf v1.30.1 h1:QFtanwsXodoGFAwzXFXGXpzBkb7N2u8ZDyA3jWB4Pbs= -github.com/bufbuild/buf v1.30.1/go.mod h1:7W8DJnj76wQa55EA3z2CmDxS0/nsHh8FqtE00dyDAdA= +github.com/bufbuild/buf v1.31.0 h1:YHLGIr8bjcLaTCIw0+/bCAvJLiR8u46QTwKvn7miSEg= +github.com/bufbuild/buf v1.31.0/go.mod h1:LlxpG2LF33f1Ixw29BTt0pyLriLzg3rXY1K9XQVHSio= github.com/butuzov/ireturn v0.2.2 h1:jWI36dxXwVrI+RnXDwux2IZOewpmfv930OuIRfaBUJ0= github.com/butuzov/ireturn v0.2.2/go.mod h1:RfGHUvvAuFFxoHKf4Z8Yxuh6OjlCw1KvR2zM1NFHeBk= github.com/butuzov/mirror v1.1.0 h1:ZqX54gBVMXu78QLoiqdwpl2mgmoOJTk7s4p4o+0avZI= @@ -196,8 +196,8 @@ github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y= github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= github.com/gofrs/flock v0.8.1 h1:+gYjHKf32LDeiEEFhQaotPbLuUXjY5ZqxKgXy7n59aw= github.com/gofrs/flock v0.8.1/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU= -github.com/gofrs/uuid/v5 v5.0.0 h1:p544++a97kEL+svbcFbCQVM9KFu0Yo25UoISXGNNH9M= -github.com/gofrs/uuid/v5 v5.0.0/go.mod h1:CDOjlDMVAtN56jqyRUZh58JT31Tiw7/oQyEXZV+9bD8= +github.com/gofrs/uuid/v5 v5.1.0 h1:S5rqVKIigghZTCBKPCw0Y+bXkn26K3TB5mvQq2Ix8dk= +github.com/gofrs/uuid/v5 v5.1.0/go.mod h1:CDOjlDMVAtN56jqyRUZh58JT31Tiw7/oQyEXZV+9bD8= github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= @@ -278,7 +278,7 @@ github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hf github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20240327155427-868f304927ed h1:n8QtJTrwsv3P7dNxPaMeNkMcxvUpqocsHLr8iDLGlQI= +github.com/google/pprof v0.0.0-20240422182052-72c8669ad3e7 h1:3q13T5NW3mlTJZM6B5UAsf2N5NYFbYWIyI3W8DlvBDU= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= @@ -337,7 +337,7 @@ github.com/kisielk/gotool v1.0.0 h1:AV2c/EiW3KqPNT9ZKl07ehoAGi4C5/01Cfbblndcapg= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/kkHAIKE/contextcheck v1.1.4 h1:B6zAaLhOEEcjvUgIYEqystmnFk1Oemn8bvJhbt0GMb8= github.com/kkHAIKE/contextcheck v1.1.4/go.mod h1:1+i/gWqokIa+dm31mqGLZhZJ7Uh44DJGZVmr6QRBNJg= -github.com/klauspost/compress v1.17.7 h1:ehO88t2UGzQK66LMdE8tibEd1ErmzZjNEqWkjLAKQQg= +github.com/klauspost/compress v1.17.8 h1:YcnTYrq7MikUT7k0Yb5eceMmALQPYBW/Xltxn0NAMnU= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= @@ -614,8 +614,8 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0 golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= -golang.org/x/exp v0.0.0-20240325151524-a685a6edb6d8 h1:aAcj0Da7eBAtrTp03QXWvm88pSyOt+UgdZw2BFZ+lEw= -golang.org/x/exp v0.0.0-20240325151524-a685a6edb6d8/go.mod h1:CQ1k9gNrJ50XIzaKCRR2hssIjF07kZFEiieALBM/ARQ= +golang.org/x/exp v0.0.0-20240416160154-fe59bbe5cc7f h1:99ci1mjWVBWwJiEKYY6jWa4d2nTQVIEhZIptnrVb1XY= +golang.org/x/exp v0.0.0-20240416160154-fe59bbe5cc7f/go.mod h1:/lliqkxwWAhPjf5oSOIJup2XcqJaw8RGS6k3TGEc7GI= golang.org/x/exp/typeparams v0.0.0-20220428152302-39d4317da171/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= golang.org/x/exp/typeparams v0.0.0-20230203172020-98cc5a0785f9/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= golang.org/x/exp/typeparams v0.0.0-20230307190834-24139beb5833 h1:jWGQJV4niP+CCmFW9ekjA9Zx8vYORzOUH2/Nl5WPuLQ= @@ -693,7 +693,7 @@ golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco= golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY= golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= -golang.org/x/net v0.24.0 h1:1PcaxkF854Fu3+lvBIx5SYn9wRlBzzcnHZSiaFFAb0w= +golang.org/x/net v0.25.0 h1:d/OCCoBEUq33pjydKrGQhw7IlUPI2Oylr+8qLx49kac= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -777,8 +777,8 @@ golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.19.0 h1:q5f1RH2jigJ1MoAWp2KTp3gm5zAGFUTarQZ5U386+4o= -golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.20.0 h1:Od9JTbYCk261bKm4M/mw7AklTlFYIa0bIp9BgSm1S8Y= +golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= @@ -869,8 +869,8 @@ golang.org/x/tools v0.2.0/go.mod h1:y4OqIKeOV/fWJetJ8bXPU1sEVniLMIyDAZWeHdV+NTA= golang.org/x/tools v0.3.0/go.mod h1:/rWhSS2+zyEVwoJf8YAX6L2f0ntZ7Kn/mGgAWcipA5k= golang.org/x/tools v0.5.0/go.mod h1:N+Kgy78s5I24c24dU8OfWNEotWjutIs8SnJvn5IDq+k= golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= -golang.org/x/tools v0.20.0 h1:hz/CVckiOxybQvFw6h7b/q80NTr9IUQb4s1IIzW7KNY= -golang.org/x/tools v0.20.0/go.mod h1:WvitBU7JJf6A4jOdg4S1tviW9bhUxkgeCui/0JHctQg= +golang.org/x/tools v0.21.0 h1:qc0xYgIbsSDt9EyWz05J5wfa7LOVW0YTLOXrqdLAWIw= +golang.org/x/tools v0.21.0/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/linker/descriptors.go b/linker/descriptors.go index e8ce0ebc..cd43dcce 100644 --- a/linker/descriptors.go +++ b/linker/descriptors.go @@ -1015,10 +1015,9 @@ func (f *fldDescriptors) ByTextName(s string) protoreflect.FieldDescriptor { if fld != nil { return fld } - // Groups use type name instead + // Groups use type name instead, so we fallback to slow search for _, fld := range f.fields { - if fld.proto.GetType() == descriptorpb.FieldDescriptorProto_TYPE_GROUP && - string(fld.Message().Name()) == s { + if fld.TextName() == s { return fld } } @@ -1127,7 +1126,8 @@ func (f *fldDescriptor) Cardinality() protoreflect.Cardinality { } func (f *fldDescriptor) Kind() protoreflect.Kind { - if f.proto.GetType() == descriptorpb.FieldDescriptorProto_TYPE_MESSAGE && f.Syntax() == protoreflect.Editions { + if f.proto.GetType() == descriptorpb.FieldDescriptorProto_TYPE_MESSAGE && f.Syntax() == protoreflect.Editions && + !f.IsMap() && !f.parentIsMap() { // In editions, "group encoding" (aka "delimited encoding") is toggled // via a feature. So we report group kind when that feature is enabled. messageEncoding := resolveFeature(f, messageEncodingField) @@ -1153,9 +1153,21 @@ func (f *fldDescriptor) TextName() string { if f.IsExtension() { return fmt.Sprintf("[%s]", f.FullName()) } + if f.looksLikeGroup() { + // groups use the type name + return string(protoreflect.FullName(f.proto.GetTypeName()).Name()) + } return string(f.Name()) } +func (f *fldDescriptor) looksLikeGroup() bool { + // It looks like a group if it uses group/delimited encoding (checked via f.Kind) + // and the message type is a sibling whose name is a mixed-case version of the field name. + return f.Kind() == protoreflect.GroupKind && + f.Message().FullName().Parent() == f.FullName().Parent() && + string(f.Name()) == strings.ToLower(string(f.Message().Name())) +} + func (f *fldDescriptor) HasPresence() bool { if f.proto.GetLabel() == descriptorpb.FieldDescriptorProto_LABEL_REPEATED { return false @@ -1229,6 +1241,11 @@ func (f *fldDescriptor) isMapEntry() bool { return f.Message().IsMapEntry() } +func (f *fldDescriptor) parentIsMap() bool { + parent, ok := f.parent.(protoreflect.MessageDescriptor) + return ok && parent.IsMapEntry() +} + func (f *fldDescriptor) MapKey() protoreflect.FieldDescriptor { if !f.IsMap() { return nil diff --git a/linker/descriptors_ext_test.go b/linker/descriptors_ext_test.go index e6125fa5..1a03a62b 100644 --- a/linker/descriptors_ext_test.go +++ b/linker/descriptors_ext_test.go @@ -55,6 +55,7 @@ func TestFields(t *testing.T) { {"desc_test_proto3_optional.proto", files}, {"all_default_features.proto", editionFiles}, {"features_with_overrides.proto", editionFiles}, + {"file_default_delimited.proto", editionFiles}, } for _, testCase := range testCases { testCase := testCase // must not capture loop variable below, for thread safety @@ -150,10 +151,17 @@ func checkAttributesInFields(t *testing.T, exp, actual protoreflect.ExtensionDes } assert.Equal(t, expFld.Number(), actFld.Number(), "%s: field number at index %d (%s)", where, i, expFld.Name()) assert.Equal(t, expFld.Cardinality(), actFld.Cardinality(), "%s: field cardinality at index %d (%s)", where, i, expFld.Name()) - assert.Equal(t, expFld.Kind(), actFld.Kind(), "%s: field kind at index %d (%s)", where, i, expFld.Name()) + if isMapOrMapEntryMessageValue(actFld) { + // TODO: Remove this branch and just use the check below once the protobuf-go runtime fixes + // https://github.com/golang/protobuf/issues/1615 + assert.Equal(t, protoreflect.MessageKind, actFld.Kind(), "%s: field kind at index %d (%s)", where, i, expFld.Name()) + } else { + assert.Equal(t, expFld.Kind(), actFld.Kind(), "%s: field kind at index %d (%s)", where, i, expFld.Name()) + } assert.Equal(t, expFld.IsList(), actFld.IsList(), "%s: field is list at index %d (%s)", where, i, expFld.Name()) assert.Equal(t, expFld.IsMap(), actFld.IsMap(), "%s: field is map at index %d (%s)", where, i, expFld.Name()) assert.Equal(t, expFld.JSONName(), actFld.JSONName(), "%s: field json name at index %d (%s)", where, i, expFld.Name()) + assert.Equal(t, expFld.TextName(), actFld.TextName(), "%s: field text name at index %d (%s)", where, i, expFld.Name()) assert.Equal(t, expFld.HasJSONName(), actFld.HasJSONName(), "%s: field has json name at index %d (%s)", where, i, expFld.Name()) assert.Equal(t, expFld.IsExtension(), actFld.IsExtension(), "%s: field is extension at index %d (%s)", where, i, expFld.Name()) assert.Equal(t, expFld.IsPacked(), actFld.IsPacked(), "%s: field is packed at index %d (%s)", where, i, expFld.Name()) @@ -240,3 +248,14 @@ func checkAttributesInEnums(t *testing.T, exp, actual protoreflect.EnumDescripto assert.Equal(t, expEnum.IsClosed(), actEnum.IsClosed(), "%s: enum is closed at index %d (%s)", where, i, expEnum.Name()) } } + +func isMapOrMapEntryMessageValue(field protoreflect.FieldDescriptor) bool { + if field.IsMap() { + return true // map field + } + if field.Message() == nil { + return false // not a message field + } + parent, ok := field.Parent().(protoreflect.MessageDescriptor) + return ok && parent.IsMapEntry() +} diff --git a/linker/linker_test.go b/linker/linker_test.go index 42757981..c1f1ce5f 100644 --- a/linker/linker_test.go +++ b/linker/linker_test.go @@ -19,7 +19,6 @@ import ( "errors" "fmt" "io" - "os" "os/exec" "sort" "strings" @@ -30,24 +29,20 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/reflect/protodesc" + "google.golang.org/protobuf/reflect/protoreflect" "google.golang.org/protobuf/reflect/protoregistry" "google.golang.org/protobuf/types/descriptorpb" "github.com/bufbuild/protocompile" - "github.com/bufbuild/protocompile/internal/editions" + "github.com/bufbuild/protocompile/internal/messageset" "github.com/bufbuild/protocompile/internal/protoc" "github.com/bufbuild/protocompile/internal/prototest" "github.com/bufbuild/protocompile/linker" + "github.com/bufbuild/protocompile/protoutil" "github.com/bufbuild/protocompile/reporter" ) -func TestMain(m *testing.M) { - // Enable just for tests. - editions.AllowEditions = true - status := m.Run() - os.Exit(status) -} - func TestSimpleLink(t *testing.T) { t.Parallel() compiler := protocompile.Compiler{ @@ -125,6 +120,7 @@ func TestLinkerValidation(t *testing.T) { // Expected error message - leave empty if input is expected to succeed expectedErr string expectedDiffWithProtoc bool + expectProtodescFail bool }{ "success_multi_namespace": { input: map[string]string{ @@ -475,6 +471,26 @@ func TestLinkerValidation(t *testing.T) { }, expectedErr: "foo.proto:4:1: error in file options: some required fields missing: (f).b", }, + "success_extensions_do_not_inherit_file_field_presence": { + input: map[string]string{ + "test.proto": ` + edition = "2023"; + option features.field_presence = IMPLICIT; + message Foo { + extensions 1 to 100; + } + enum Enum { + option features.enum_type = CLOSED; + ZERO = 0; + ONE = 1; + } + extend Foo { + string s = 1 [default="abc"]; + Enum en = 2; + repeated Enum ens = 3; + }`, + }, + }, "failure_message_set_wire_format_scalar": { input: map[string]string{ "foo.proto": "message Foo { option message_set_wire_format = true; extensions 1 to 100; } extend Foo { optional int32 bar = 1; }", @@ -485,6 +501,7 @@ func TestLinkerValidation(t *testing.T) { input: map[string]string{ "foo.proto": "message Foo { option message_set_wire_format = true; extensions 1 to 100; } extend Foo { optional Foo bar = 1; }", }, + expectProtodescFail: !messageset.CanSupportMessageSets(), }, "failure_tag_out_of_range": { input: map[string]string{ @@ -496,6 +513,7 @@ func TestLinkerValidation(t *testing.T) { input: map[string]string{ "foo.proto": "message Foo { option message_set_wire_format = true; extensions 1 to max; } extend Foo { optional Foo bar = 536870912; }", }, + expectProtodescFail: !messageset.CanSupportMessageSets(), }, "failure_message_set_wire_format_repeated": { input: map[string]string{ @@ -572,9 +590,6 @@ func TestLinkerValidation(t *testing.T) { extend google.protobuf.MessageOptions { optional Foo foo = 10001; } message Baz { option (foo) = { bar< name: "abc" > }; }`, }, - // protoc will allow this in v27.0: - // https://github.com/protocolbuffers/protobuf/commit/29c69ff00b58b60e67fcf40fd810009bd39b86c6 - expectedDiffWithProtoc: true, }, "success_group_in_custom_option_msg_literal": { // However, groups MAY be referred by group name (i.e. message name) @@ -652,9 +667,6 @@ func TestLinkerValidation(t *testing.T) { message Baz { option (foo) = { Bar< name: "abc" > }; }`, }, expectedErr: `foo.proto:8:32: message Baz: option (foo): field Bar not found`, - // protoc will fix this in v27.0: - // https://github.com/protocolbuffers/protobuf/commit/29c69ff00b58b60e67fcf40fd810009bd39b86c6 - expectedDiffWithProtoc: true, }, "failure_not_looks_like_group_in_custom_option_msg_literal_wrong_field_name": { input: map[string]string{ @@ -2954,7 +2966,28 @@ func TestLinkerValidation(t *testing.T) { } `, }, - expectedErr: `test.proto:14:25: expected extension with number 1 to be named .foo.bar, not foo.s, per declaration at test.proto:8:25`, + expectedErr: `test.proto:14:25: expected extension with number 1 to be named foo.bar, not foo.s, per declaration at test.proto:8:25`, + }, + "failure_extension_name_does_not_match_declaration2": { + input: map[string]string{ + "test.proto": ` + syntax = "proto2"; + package foo; + message A { + extensions 1 [ + declaration={ + number: 1 + full_name: ".foo.bar" + type: "string" + } + ]; + } + extend A { + optional string s = 1; + } + `, + }, + expectedErr: `test.proto:13:25: expected extension with number 1 to be named foo.bar, not foo.s, per declaration at test.proto:7:25`, }, "failure_extension_type_does_not_match_declaration": { input: map[string]string{ @@ -2978,6 +3011,27 @@ func TestLinkerValidation(t *testing.T) { }, expectedErr: `test.proto:14:18: expected extension with number 1 to have type string, not uint32, per declaration at test.proto:9:25`, }, + "failure_extension_type_does_not_match_declaration2": { + input: map[string]string{ + "test.proto": ` + syntax = "proto2"; + package foo; + message A { + extensions 1 [ + declaration={ + number: 1 + full_name: ".foo.bar" + type: ".foo.A" + } + ]; + } + extend A { + optional uint32 bar = 1; + } + `, + }, + expectedErr: `test.proto:13:18: expected extension with number 1 to have type foo.A, not uint32, per declaration at test.proto:8:25`, + }, "failure_extension_label_does_not_match_declaration": { input: map[string]string{ "test.proto": ` @@ -3023,6 +3077,27 @@ func TestLinkerValidation(t *testing.T) { }, expectedErr: `test.proto:14:9: expected extension with number 1 to be optional, not repeated, per declaration at test.proto:6:17`, }, + "failure_extension_label_does_not_match_declaration3": { + input: map[string]string{ + "test.proto": ` + syntax = "proto2"; + package foo; + message A { + extensions 1 [ + declaration={ + number: 1 + full_name: ".foo.bar" + type: "string" + } + ]; + } + extend A { + repeated string bar = 1; + } + `, + }, + expectedErr: `test.proto:13:9: expected extension with number 1 to be optional, not repeated, per declaration at test.proto:5:17`, + }, "failure_extension_matches_no_declaration": { input: map[string]string{ "test.proto": ` @@ -3045,6 +3120,44 @@ func TestLinkerValidation(t *testing.T) { }, expectedErr: `test.proto:14:31: expected extension with number 3 to be declared in type foo.A, but no declaration found at test.proto:5:17`, }, + "failure_extension_matches_no_declaration2": { + input: map[string]string{ + "test.proto": ` + syntax = "proto2"; + package foo; + message A { + extensions 1 to 3 [ + declaration={ + number: 1 + full_name: ".foo.bar" + type: "string" + } + ]; + } + extend A { + repeated string bar = 3; + } + `, + }, + expectedErr: `test.proto:13:31: expected extension with number 3 to be declared in type foo.A, but no declaration found at test.proto:4:9`, + }, + "failure_extension_matches_no_declaration3": { + input: map[string]string{ + "test.proto": ` + syntax = "proto2"; + package foo; + message A { + extensions 1 to 3 [ + verification=DECLARATION + ]; + } + extend A { + repeated string bar = 3; + } + `, + }, + expectedErr: `test.proto:9:31: expected extension with number 3 to be declared in type foo.A, but no declaration found at test.proto:5:17`, + }, "success_field_presence": { input: map[string]string{ "test.proto": ` @@ -3340,6 +3453,372 @@ func TestLinkerValidation(t *testing.T) { }, expectedErr: `test.proto:7:26: ctype option cannot be CORD for extension fields`, }, + "success_feature_within_lifetime": { + input: map[string]string{ + "feature.proto": ` + syntax = "proto2"; + package google.protobuf; + message FileOptions { + optional FeatureSet features = 50 [ + // Ignored on "features" itself, only + // validated on fields therein. + feature_support = { + edition_introduced: EDITION_2024 + } + ]; + } + message FeatureSet { + optional bool flag = 20 [ + feature_support = { + edition_introduced: EDITION_PROTO2 + edition_deprecated: EDITION_2024 + edition_removed: EDITION_99997_TEST_ONLY + } + ]; + optional bool other = 21 [ + feature_support = { + edition_introduced: EDITION_PROTO2 + edition_deprecated: EDITION_2024 + edition_removed: EDITION_99997_TEST_ONLY + } + ]; + } + `, + "test.proto": ` + edition = "2023"; + import "feature.proto"; + package foo; + option features = { flag: true }; + option features.other = true; + `, + }, + }, + "success_feature_deprecated": { + input: map[string]string{ + "feature.proto": ` + syntax = "proto2"; + package google.protobuf; + message FileOptions { + optional FeatureSet features = 50 [ + // Ignored on "features" itself, only + // validated on fields therein. + feature_support = { + edition_introduced: EDITION_PROTO2 + edition_removed: EDITION_2023 + } + ]; + } + message FeatureSet { + optional bool flag = 20 [ + feature_support = { + edition_introduced: EDITION_PROTO2 + edition_deprecated: EDITION_PROTO3 + deprecation_warning: "foo" + } + ]; + optional bool other = 21 [ + feature_support = { + edition_introduced: EDITION_PROTO2 + edition_deprecated: EDITION_PROTO3 + deprecation_warning: "foo" + } + ]; + } + `, + "test.proto": ` + edition = "2023"; + import "feature.proto"; + package foo; + option features = { flag: true }; + option features.other = true; + `, + }, + }, + "failure_feature_not_yet_introduced": { + input: map[string]string{ + "feature.proto": ` + syntax = "proto2"; + package google.protobuf; + message FileOptions { + optional FeatureSet features = 50; + } + message FeatureSet { + optional bool flag = 20 [ + feature_support = { + edition_introduced: EDITION_2024 + } + ]; + } + `, + "test.proto": ` + edition = "2023"; + import "feature.proto"; + package foo; + option features.flag = true; + `, + }, + expectedErr: `test.proto:4:1: field "google.protobuf.FeatureSet.flag" was not introduced until edition 2024`, + }, + "failure_feature_not_yet_introduced_msg_literal": { + input: map[string]string{ + "feature.proto": ` + syntax = "proto2"; + package google.protobuf; + message FileOptions { + optional FeatureSet features = 50; + } + message FeatureSet { + optional bool flag = 20 [ + feature_support = { + edition_introduced: EDITION_2024 + } + ]; + } + `, + "test.proto": ` + edition = "2023"; + import "feature.proto"; + package foo; + option features = { flag: true }; + `, + }, + expectedErr: `test.proto:4:21: field "google.protobuf.FeatureSet.flag" was not introduced until edition 2024`, + }, + "failure_feature_removed": { + input: map[string]string{ + "feature.proto": ` + syntax = "proto2"; + package google.protobuf; + message FileOptions { + optional FeatureSet features = 50; + } + message FeatureSet { + optional bool flag = 20 [ + feature_support = { + edition_removed: EDITION_PROTO3 + } + ]; + } + `, + "test.proto": ` + edition = "2023"; + import "feature.proto"; + package foo; + option features.flag = true; + `, + }, + expectedErr: `test.proto:4:1: field "google.protobuf.FeatureSet.flag" was removed in edition proto3`, + }, + "failure_feature_removed_msg_literal": { + input: map[string]string{ + "feature.proto": ` + syntax = "proto2"; + package google.protobuf; + message FileOptions { + optional FeatureSet features = 50; + } + message FeatureSet { + optional bool flag = 20 [ + feature_support = { + edition_removed: EDITION_PROTO3 + } + ]; + } + `, + "test.proto": ` + edition = "2023"; + import "feature.proto"; + package foo; + option features = { flag: true }; + `, + }, + expectedErr: `test.proto:4:21: field "google.protobuf.FeatureSet.flag" was removed in edition proto3`, + }, + "success_custom_feature_within_lifetime": { + input: map[string]string{ + "feature.proto": ` + edition = "2023"; + package foo; + import "google/protobuf/descriptor.proto"; + message CustomFeatures { + bool flag = 1 [ + feature_support = { + edition_introduced: EDITION_2023 + edition_deprecated: EDITION_2024 + edition_removed: EDITION_2024 + } + ]; + } + extend google.protobuf.FeatureSet { + CustomFeatures custom = 9995; + } + `, + "test.proto": ` + edition = "2023"; + package foo; + import "feature.proto"; + option features.(custom).flag = true; + `, + }, + }, + "failure_custom_feature_in_same_file": { + input: map[string]string{ + "test.proto": ` + edition = "2023"; + import "google/protobuf/descriptor.proto"; + package foo; + message CustomFeatures { + bool flag = 1 [ + feature_support = { + edition_introduced: EDITION_2023 + edition_deprecated: EDITION_2023 + edition_removed: EDITION_2024 + } + ]; + } + extend google.protobuf.FeatureSet { + CustomFeatures custom = 9995; + } + option features.(custom).flag = true; + `, + }, + expectedErr: `test.proto:16:1: custom feature (foo.custom) cannot be used from the same file in which it is defined`, + }, + "success_custom_feature_deprecated": { + input: map[string]string{ + "feature.proto": ` + edition = "2023"; + package foo; + import "google/protobuf/descriptor.proto"; + message CustomFeatures { + bool flag = 1 [ + feature_support = { + edition_introduced: EDITION_2023 + edition_deprecated: EDITION_2023 + edition_removed: EDITION_2024 + } + ]; + } + extend google.protobuf.FeatureSet { + CustomFeatures custom = 9995; + } + `, + "test.proto": ` + edition = "2023"; + package foo; + import "feature.proto"; + option features.(custom).flag = true; + `, + }, + }, + "failure_custom_feature_not_yet_introduced": { + input: map[string]string{ + "feature.proto": ` + edition = "2023"; + package foo; + import "google/protobuf/descriptor.proto"; + message CustomFeatures { + bool flag = 1 [ + feature_support = { + edition_introduced: EDITION_2024 + edition_deprecated: EDITION_2024 + } + ]; + } + extend google.protobuf.FeatureSet { + CustomFeatures custom = 9995; + } + `, + "test.proto": ` + edition = "2023"; + package foo; + import "feature.proto"; + option features.(custom).flag = true; + `, + }, + expectedErr: `test.proto:4:1: field "foo.CustomFeatures.flag" was not introduced until edition 2024`, + }, + "failure_custom_feature_not_yet_introduced_msg_literal": { + input: map[string]string{ + "feature.proto": ` + edition = "2023"; + package foo; + import "google/protobuf/descriptor.proto"; + message CustomFeatures { + bool flag = 1 [ + feature_support = { + edition_introduced: EDITION_2024 + edition_deprecated: EDITION_2024 + } + ]; + } + extend google.protobuf.FeatureSet { + CustomFeatures custom = 9995; + } + `, + "test.proto": ` + edition = "2023"; + package foo; + import "feature.proto"; + option features.(custom) = { flag: true }; + `, + }, + expectedErr: `test.proto:4:30: field "foo.CustomFeatures.flag" was not introduced until edition 2024`, + }, + "failure_custom_feature_not_yet_introduced_msg_literal2": { + input: map[string]string{ + "feature.proto": ` + edition = "2023"; + package foo; + import "google/protobuf/descriptor.proto"; + message CustomFeatures { + bool flag = 1 [ + feature_support = { + edition_introduced: EDITION_2024 + edition_deprecated: EDITION_2024 + } + ]; + } + extend google.protobuf.FeatureSet { + CustomFeatures custom = 9995; + } + `, + "test.proto": ` + edition = "2023"; + package foo; + import "feature.proto"; + option features = { [foo.custom]: { flag: true } }; + `, + }, + expectedErr: `test.proto:4:37: field "foo.CustomFeatures.flag" was not introduced until edition 2024`, + }, + "failure_custom_feature_removed": { + input: map[string]string{ + "feature.proto": ` + edition = "2023"; + package foo; + import "google/protobuf/descriptor.proto"; + message CustomFeatures { + bool flag = 1 [ + feature_support = { + edition_introduced: EDITION_PROTO2 + edition_removed: EDITION_2023 + } + ]; + } + extend google.protobuf.FeatureSet { + CustomFeatures custom = 9995; + } + `, + "test.proto": ` + edition = "2023"; + package foo; + import "feature.proto"; + option features.(custom).flag = true; + `, + }, + expectedErr: `test.proto:4:1: field "foo.CustomFeatures.flag" was removed in edition 2023`, + }, } for name, tc := range testCases { @@ -3354,7 +3833,7 @@ func TestLinkerValidation(t *testing.T) { for filename, data := range tc.input { tc.input[filename] = removePrefixIndent(data) } - _, errs := compile(t, tc.input) + files, errs := compile(t, tc.input) actualErrs := make([]string, len(errs)) for i := range errs { @@ -3413,6 +3892,17 @@ func TestLinkerValidation(t *testing.T) { } } + // Make sure protobuf-go can handle resulting files + if len(errs) == 0 && len(files) > 0 { + err := convertToProtoreflectDescriptors(files) + if tc.expectProtodescFail { + // This is a known case where it cannot handle the file. + require.Error(t, err) + } else { + require.NoError(t, err) + } + } + // parse with protoc passProtoc := testByProtoc(t, tc.input, tc.inputOrder) if tc.expectedErr == "" { @@ -3984,3 +4474,28 @@ func testByProtoc(t *testing.T, files map[string]string, fileNames []string) boo require.NoError(t, err) return true } + +func convertToProtoreflectDescriptors(files linker.Files) error { + allFiles := make(map[string]*descriptorpb.FileDescriptorProto, len(files)) + addFileDescriptorsToMap(files, allFiles) + fileSlice := make([]*descriptorpb.FileDescriptorProto, 0, len(allFiles)) + for _, fileProto := range allFiles { + fileSlice = append(fileSlice, fileProto) + } + _, err := protodesc.NewFiles(&descriptorpb.FileDescriptorSet{File: fileSlice}) + return err +} + +func addFileDescriptorsToMap[F protoreflect.FileDescriptor](files []F, allFiles map[string]*descriptorpb.FileDescriptorProto) { + for _, file := range files { + if _, exists := allFiles[file.Path()]; exists { + continue // already added this one + } + allFiles[file.Path()] = protoutil.ProtoFromFileDescriptor(file) + deps := make([]protoreflect.FileDescriptor, file.Imports().Len()) + for i := 0; i < file.Imports().Len(); i++ { + deps[i] = file.Imports().Get(i).FileDescriptor + } + addFileDescriptorsToMap(deps, allFiles) + } +} diff --git a/linker/validate.go b/linker/validate.go index 6d621dd2..582272e3 100644 --- a/linker/validate.go +++ b/linker/validate.go @@ -267,7 +267,7 @@ func (r *result) validateExtension(fd *fldDescriptor, handler *reporter.Handler) if extRangeOpts == nil { break } - if extRangeOpts.GetVerification() == descriptorpb.ExtensionRangeOptions_UNVERIFIED { + if len(extRangeOpts.Declaration) == 0 && extRangeOpts.GetVerification() != descriptorpb.ExtensionRangeOptions_DECLARATION { break } var found bool @@ -294,7 +294,7 @@ func (r *result) validateExtension(fd *fldDescriptor, handler *reporter.Handler) span, _ := findExtensionRangeOptionSpan(msg.ParentFile(), msg, i, extRange, internal.ExtensionRangeOptionsDeclarationTag, int32(j), internal.ExtensionRangeOptionsDeclarationFullNameTag) err := handler.HandleErrorf(info, "expected extension with number %d to be named %s, not %s, per declaration at %v", - fd.Number(), extDecl.GetFullName(), fd.FullName(), span.Start()) + fd.Number(), strings.TrimPrefix(extDecl.GetFullName(), "."), fd.FullName(), span.Start()) if err != nil { return err } @@ -305,7 +305,7 @@ func (r *result) validateExtension(fd *fldDescriptor, handler *reporter.Handler) span, _ := findExtensionRangeOptionSpan(msg.ParentFile(), msg, i, extRange, internal.ExtensionRangeOptionsDeclarationTag, int32(j), internal.ExtensionRangeOptionsDeclarationTypeTag) err := handler.HandleErrorf(info, "expected extension with number %d to have type %s, not %s, per declaration at %v", - fd.Number(), extDecl.GetType(), getTypeName(fd), span.Start()) + fd.Number(), strings.TrimPrefix(extDecl.GetType(), "."), getTypeName(fd), span.Start()) if err != nil { return err } @@ -590,17 +590,15 @@ func (r *result) validateExtensionDeclarations(md *msgDescriptor, handler *repor // nothing to check continue } - // Strange that the "has_verification" check is here, but this - // mimics protoc: - // https://github.com/protocolbuffers/protobuf/blob/v26.1/src/google/protobuf/descriptor.cc#L8187-L8188 - // The effect is that you can add declarations even when the range - // is in default unverified state, and they just have no effect. ¯\_(ツ)_/¯ - if opts.Verification != nil && opts.GetVerification() == descriptorpb.ExtensionRangeOptions_UNVERIFIED { + // If any declarations are present, verification is assumed to be + // DECLARATION. It's an error for declarations to be present but the + // verification field explicitly set to something other than that. + if opts.Verification != nil && opts.GetVerification() != descriptorpb.ExtensionRangeOptions_DECLARATION { span, ok := findExtensionRangeOptionSpan(r, md, i, extRange, internal.ExtensionRangeOptionsVerificationTag) if !ok { span, _ = findExtensionRangeOptionSpan(r, md, i, extRange, internal.ExtensionRangeOptionsDeclarationTag, 0) } - if err := handler.HandleErrorf(span, "extension range cannot have declarations and have verification of UNVERIFIED"); err != nil { + if err := handler.HandleErrorf(span, "extension range cannot have declarations and have verification of %s", opts.GetVerification()); err != nil { return err } } diff --git a/options/options.go b/options/options.go index 1e822531..33fe538e 100644 --- a/options/options.go +++ b/options/options.go @@ -41,6 +41,7 @@ import ( "github.com/bufbuild/protocompile/ast" "github.com/bufbuild/protocompile/internal" + "github.com/bufbuild/protocompile/internal/messageset" "github.com/bufbuild/protocompile/linker" "github.com/bufbuild/protocompile/parser" "github.com/bufbuild/protocompile/reporter" @@ -532,13 +533,40 @@ func interpretElementOptions[Elem elementType[OptsStruct, Opts], OptsStruct any, customOpts bool, ) error { opts := elem.GetOptions() - uo := opts.GetUninterpretedOption() - if len(uo) > 0 { - remain, err := interp.interpretOptions(fqn, target.t, elem, opts, uo, customOpts) + uninterpreted := opts.GetUninterpretedOption() + if len(uninterpreted) > 0 { + remain, err := interp.interpretOptions(fqn, target.t, elem, opts, uninterpreted, customOpts) if err != nil { return err } target.setUninterpretedOptions(opts, remain) + } else if customOpts { + // If customOpts is true, we are in second pass of interpreting. + // For second pass, even if there are no options to interpret, we still + // need to verify feature usage. + features := opts.GetFeatures() + var msg protoreflect.Message + if len(features.ProtoReflect().GetUnknown()) > 0 { + // We need to first convert to a message that uses the sources' definition + // of FeatureSet. + optsDesc := opts.ProtoReflect().Descriptor() + optsFqn := string(optsDesc.FullName()) + if md := interp.resolveOptionsType(optsFqn); md != nil { + dm := dynamicpb.NewMessage(md) + if err := cloneInto(dm, opts, interp.resolver); err != nil { + node := interp.file.Node(elem) + return interp.reporter.HandleError(reporter.Error(interp.nodeInfo(node), err)) + } + msg = dm + } + } + if msg == nil { + msg = opts.ProtoReflect() + } + err := interp.validateRecursive(false, msg, "", elem, nil, false, false, false) + if err != nil { + return err + } } return nil } @@ -576,7 +604,6 @@ func (interp *interpreter) interpretOptions( ElementType: descriptorType(element), } var remain []*descriptorpb.UninterpretedOption - var optNodes []*ast.OptionNode for _, uo := range uninterpreted { isCustom := uo.Name[0].GetIsExtension() if isCustom != customOpts { @@ -611,14 +638,24 @@ func (interp *interpreter) interpretOptions( } return nil, err } - if optn, ok := node.(*ast.OptionNode); ok { - optNodes = append(optNodes, optn) - if srcInfo != nil { + + if srcInfo != nil { + if optn, ok := node.(*ast.OptionNode); ok { interp.index[optn] = srcInfo } } } + // customOpts is true for the second pass, which is also when we want to validate feature usage. + doValidation := customOpts + if doValidation { + validateRequiredFields := !interp.lenient + err := interp.validateRecursive(validateRequiredFields, msg, "", element, nil, false, false, false) + if err != nil { + return nil, err + } + } + if interp.lenient { // If we're lenient, then we don't want to clobber the passed in message // and leave it partially populated. So we convert into a copy first @@ -629,6 +666,13 @@ func (interp *interpreter) interpretOptions( // the work we've done so far. return uninterpreted, nil } + if doValidation { + if err := proto.CheckInitialized(optsClone); err != nil { + // Conversion from dynamic message failed to set some required fields. + // TODO above applies here as well... + return uninterpreted, nil + } + } // conversion from dynamic message above worked, so now // it is safe to overwrite the passed in message proto.Reset(opts) @@ -637,10 +681,6 @@ func (interp *interpreter) interpretOptions( return remain, nil } - if err := interp.validateRecursive(msg, "", element, nil, false, optNodes); err != nil { - return nil, err - } - // now try to convert into the passed in message and fail if not successful if err := cloneInto(opts, msg.Interface(), interp.resolver); err != nil { node := interp.file.Node(element) @@ -659,7 +699,7 @@ func (interp *interpreter) checkFieldUsage( node ast.Node, ) error { msgOpts, _ := fld.ContainingMessage().Options().(*descriptorpb.MessageOptions) - if msgOpts.GetMessageSetWireFormat() && !canSerializeMessageSets() { + if msgOpts.GetMessageSetWireFormat() && !messageset.CanSupportMessageSets() { err := interp.reporter.HandleErrorf(interp.nodeInfo(node), "field %q may not be used in an option: it uses 'message set wire format' legacy proto1 feature which is not supported", fld.FullName()) if err != nil { return err @@ -693,6 +733,10 @@ func targetTypeString(t descriptorpb.FieldOptions_OptionTargetType) string { return strings.ToLower(strings.ReplaceAll(strings.TrimPrefix(t.String(), "TARGET_TYPE_"), "_", " ")) } +func editionString(t descriptorpb.Edition) string { + return strings.ToLower(strings.ReplaceAll(strings.TrimPrefix(t.String(), "EDITION_"), "_", "-")) +} + func cloneInto(dest proto.Message, src proto.Message, res linker.Resolver) error { if dest.ProtoReflect().Descriptor() == src.ProtoReflect().Descriptor() { proto.Reset(dest) @@ -724,35 +768,30 @@ func cloneInto(dest proto.Message, src proto.Message, res linker.Resolver) error } func (interp *interpreter) validateRecursive( + validateRequiredFields bool, msg protoreflect.Message, prefix string, element proto.Message, path []int32, + isFeatures bool, + inFeatures bool, inMap bool, - optNodes []*ast.OptionNode, ) error { - flds := msg.Descriptor().Fields() - var missingFields []string - for i := 0; i < flds.Len(); i++ { - fld := flds.Get(i) - if fld.Cardinality() == protoreflect.Required && !msg.Has(fld) { - missingFields = append(missingFields, fmt.Sprintf("%s%s", prefix, fld.Name())) - } - } - if len(missingFields) > 0 { - node, _ := findOptionNode[*ast.OptionNode]( - path, - optionsRanger(optNodes), - func(n *ast.OptionNode) *sourceinfo.OptionSourceInfo { - return interp.index[n] - }, - ) - if node == nil { - node = interp.file.Node(element) + if validateRequiredFields { + flds := msg.Descriptor().Fields() + var missingFields []string + for i := 0; i < flds.Len(); i++ { + fld := flds.Get(i) + if fld.Cardinality() == protoreflect.Required && !msg.Has(fld) { + missingFields = append(missingFields, fmt.Sprintf("%s%s", prefix, fld.Name())) + } } - err := interp.reporter.HandleErrorf(interp.nodeInfo(node), "error in %s options: some required fields missing: %v", descriptorType(element), strings.Join(missingFields, ", ")) - if err != nil { - return err + if len(missingFields) > 0 { + node := interp.findOptionNode(path, element) + err := interp.reporter.HandleErrorf(interp.nodeInfo(node), "error in %s options: some required fields missing: %v", descriptorType(element), strings.Join(missingFields, ", ")) + if err != nil { + return err + } } } @@ -762,11 +801,82 @@ func (interp *interpreter) validateRecursive( if !inMap { chpath = append(chpath, int32(fld.Number())) } + chInFeatures := isFeatures || inFeatures + chIsFeatures := !chInFeatures && len(path) == 0 && fld.Name() == "features" + + if (isFeatures || (inFeatures && fld.IsExtension())) && + interp.file.FileNode().Name() == fld.ParentFile().Path() { + var what, name string + if fld.IsExtension() { + what = "custom feature" + name = "(" + string(fld.FullName()) + ")" + } else { + what = "feature" + name = string(fld.Name()) + } + node := interp.findOptionNode(path, element) + err = interp.reporter.HandleErrorf(interp.nodeInfo(node), "%s %s cannot be used from the same file in which it is defined", what, name) + if err != nil { + return false + } + } + + if chInFeatures { + // Validate feature usage against feature settings. + + // First, check the feature support settings of the field. + opts, _ := fld.Options().(*descriptorpb.FieldOptions) + edition := interp.file.FileDescriptorProto().GetEdition() + if opts != nil && opts.FeatureSupport != nil { + err = interp.validateFeatureSupport(edition, opts.FeatureSupport, "field", string(fld.FullName()), chpath, element) + if err != nil { + return false + } + } + // Then, if it's an enum or has an enum, check the feature support settings of the enum values. + var enum protoreflect.EnumDescriptor + if fld.Enum() != nil { + enum = fld.Enum() + } else if fld.IsMap() && fld.MapValue().Enum() != nil { + enum = fld.MapValue().Enum() + } + if enum != nil { + switch { + case fld.IsMap(): + val.Map().Range(func(k protoreflect.MapKey, v protoreflect.Value) bool { + // Can't construct path to particular map entry since we don't this entry's index. + // So we leave chpath alone, and it will have to point to the whole map value (or + // the first entry if the map is de-structured across multiple option statements). + err = interp.validateEnumValueFeatureSupport(edition, enum, v.Enum(), chpath, element) + return err == nil + }) + if err != nil { + return false + } + case fld.IsList(): + sl := val.List() + for i := 0; i < sl.Len(); i++ { + v := sl.Get(i) + err = interp.validateEnumValueFeatureSupport(edition, enum, v.Enum(), append(chpath, int32(i)), element) + if err != nil { + return false + } + } + default: + err = interp.validateEnumValueFeatureSupport(edition, enum, val.Enum(), chpath, element) + if err != nil { + return false + } + } + } + } + + // If it's a message or contains a message, recursively validate fields in those messages. switch { case fld.IsMap() && fld.MapValue().Message() != nil: val.Map().Range(func(k protoreflect.MapKey, v protoreflect.Value) bool { chprefix := fmt.Sprintf("%s%s[%v].", prefix, fieldName(fld), k) - err = interp.validateRecursive(v.Message(), chprefix, element, chpath, true, optNodes) + err = interp.validateRecursive(validateRequiredFields, v.Message(), chprefix, element, chpath, chIsFeatures, chInFeatures, true) return err == nil }) if err != nil { @@ -780,14 +890,14 @@ func (interp *interpreter) validateRecursive( if !inMap { chpath = append(chpath, int32(i)) } - err = interp.validateRecursive(v.Message(), chprefix, element, chpath, inMap, optNodes) + err = interp.validateRecursive(validateRequiredFields, v.Message(), chprefix, element, chpath, chIsFeatures, chInFeatures, inMap) if err != nil { return false } } case !fld.IsMap() && fld.Message() != nil: chprefix := fmt.Sprintf("%s%s.", prefix, fieldName(fld)) - err = interp.validateRecursive(val.Message(), chprefix, element, chpath, inMap, optNodes) + err = interp.validateRecursive(validateRequiredFields, val.Message(), chprefix, element, chpath, chIsFeatures, chInFeatures, inMap) if err != nil { return false } @@ -797,6 +907,79 @@ func (interp *interpreter) validateRecursive( return err } +func (interp *interpreter) validateEnumValueFeatureSupport( + edition descriptorpb.Edition, + enum protoreflect.EnumDescriptor, + number protoreflect.EnumNumber, + path []int32, + element proto.Message, +) error { + enumVal := enum.Values().ByNumber(number) + if enumVal == nil { + return nil + } + enumValOpts, _ := enumVal.Options().(*descriptorpb.EnumValueOptions) + if enumValOpts == nil || enumValOpts.FeatureSupport == nil { + return nil + } + return interp.validateFeatureSupport(edition, enumValOpts.FeatureSupport, "enum value", string(enumVal.Name()), path, element) +} + +func (interp *interpreter) validateFeatureSupport( + edition descriptorpb.Edition, + featureSupport *descriptorpb.FieldOptions_FeatureSupport, + what string, + name string, + path []int32, + element proto.Message, +) error { + if featureSupport.EditionIntroduced != nil && edition < featureSupport.GetEditionIntroduced() { + node := interp.findOptionNode(path, element) + err := interp.reporter.HandleErrorf(interp.nodeInfo(node), "%s %q was not introduced until edition %s", what, name, editionString(featureSupport.GetEditionIntroduced())) + if err != nil { + return err + } + } + if featureSupport.EditionRemoved != nil && edition >= featureSupport.GetEditionRemoved() { + node := interp.findOptionNode(path, element) + err := interp.reporter.HandleErrorf(interp.nodeInfo(node), "%s %q was removed in edition %s", what, name, editionString(featureSupport.GetEditionRemoved())) + if err != nil { + return err + } + } + if featureSupport.EditionDeprecated != nil && edition >= featureSupport.GetEditionDeprecated() { + node := interp.findOptionNode(path, element) + var suffix string + if featureSupport.GetDeprecationWarning() != "" { + suffix = ": " + featureSupport.GetDeprecationWarning() + } + interp.reporter.HandleWarningf(interp.nodeInfo(node), "%s %q is deprecated as of edition %s%s", what, name, editionString(featureSupport.GetEditionDeprecated()), suffix) + } + return nil +} + +func (interp *interpreter) findOptionNode( + path []int32, + element proto.Message, +) ast.Node { + elementNode := interp.file.Node(element) + nodeWithOpts, _ := elementNode.(ast.NodeWithOptions) + if nodeWithOpts == nil { + return elementNode + } + node, _ := findOptionNode[*ast.OptionNode]( + path, + optionsRanger{nodeWithOpts}, + func(n *ast.OptionNode) *sourceinfo.OptionSourceInfo { + return interp.index[n] + }, + ) + if node != nil { + return node + } + return elementNode +} + func findOptionNode[N ast.Node]( path []int32, nodes interface { @@ -826,14 +1009,14 @@ func findOptionNode[N ast.Node]( return bestMatch, bestMatchLen } -type optionsRanger []*ast.OptionNode +type optionsRanger struct { + node ast.NodeWithOptions +} func (r optionsRanger) Range(f func(*ast.OptionNode, ast.ValueNode) bool) { - for _, elem := range r { - if !f(elem, elem.Val) { - return - } - } + r.node.RangeOptions(func(optNode *ast.OptionNode) bool { + return f(optNode, optNode.Val) + }) } type valueRanger []ast.ValueNode diff --git a/options/options_test.go b/options/options_test.go index ca70913a..013b8fc6 100644 --- a/options/options_test.go +++ b/options/options_test.go @@ -34,7 +34,6 @@ import ( "google.golang.org/protobuf/types/descriptorpb" "github.com/bufbuild/protocompile" - "github.com/bufbuild/protocompile/internal/editions" "github.com/bufbuild/protocompile/internal/prototest" "github.com/bufbuild/protocompile/linker" "github.com/bufbuild/protocompile/options" @@ -43,13 +42,6 @@ import ( "github.com/bufbuild/protocompile/reporter" ) -func TestMain(m *testing.M) { - // Enable just for tests. - editions.AllowEditions = true - status := m.Run() - os.Exit(status) -} - type ident string type aggregate string @@ -509,3 +501,111 @@ func TestInterpretOptionsWithoutASTNoOp(t *testing.T) { prototest.AssertMessagesEqual(t, fd, fdFromNoAST, file.Path()) } } + +func TestInterpretOptionsFeatureLifetimeWarnings(t *testing.T) { + t.Parallel() + sources := map[string]string{ + "features.proto": ` + syntax = "proto2"; + package google.protobuf; + message FileOptions { + optional FeatureSet features = 50; + } + message FeatureSet { + extensions 1000; + optional bool okay = 200 [ + feature_support = { + edition_introduced: EDITION_2023 + } + ]; + optional bool removed = 201 [ + feature_support = { + edition_introduced: EDITION_2023 + edition_removed: EDITION_2024 + } + ]; + optional bool deprecated = 202 [ + feature_support = { + edition_introduced: EDITION_2023 + edition_deprecated: EDITION_2023 + deprecation_warning: "do not use this!" + } + ]; + optional bool deprecated_and_removed = 203 [ + feature_support = { + edition_introduced: EDITION_2023 + edition_deprecated: EDITION_2023 + deprecation_warning: "don't use this either!" + edition_removed: EDITION_2024 + } + ]; + } + `, + "custom_features.proto": ` + edition = "2023"; + import "features.proto"; + extend google.protobuf.FeatureSet { + Custom custom = 1000; + } + message Custom { + bool okay = 200 [ + feature_support = { + edition_introduced: EDITION_2023 + } + ]; + bool removed = 201 [ + feature_support = { + edition_introduced: EDITION_2023 + edition_removed: EDITION_2024 + } + ]; + bool deprecated = 202 [ + feature_support = { + edition_introduced: EDITION_2023 + edition_deprecated: EDITION_2023 + deprecation_warning: "custom feature is not to be used" + } + ]; + bool deprecated_and_removed = 203 [ + feature_support = { + edition_introduced: EDITION_2023 + edition_deprecated: EDITION_2023 + deprecation_warning: "other custom feature is not to be used either" + edition_removed: EDITION_2024 + } + ]; + } + `, + "test.proto": ` + edition = "2023"; + import "features.proto"; + import "custom_features.proto"; + option features.okay = true; + option features.deprecated = true; + option features.deprecated_and_removed = true; + option features.(custom).okay = true; + option features.(custom).deprecated = true; + option features.(custom).deprecated_and_removed = true; + `, + } + var warnings []string + rep := reporter.NewReporter(nil, func(err reporter.ErrorWithPos) { + warnings = append(warnings, err.Error()) + }) + compiler := &protocompile.Compiler{ + Resolver: protocompile.WithStandardImports(&protocompile.SourceResolver{ + Accessor: protocompile.SourceAccessorFromMap(sources), + }), + Reporter: rep, + } + _, err := compiler.Compile(context.Background(), "test.proto") + require.NoError(t, err) + expectedWarnings := []string{ + `test.proto:10:25: field "Custom.deprecated_and_removed" is deprecated as of edition 2023: other custom feature is not to be used either`, + `test.proto:6:25: field "google.protobuf.FeatureSet.deprecated" is deprecated as of edition 2023: do not use this!`, + `test.proto:7:25: field "google.protobuf.FeatureSet.deprecated_and_removed" is deprecated as of edition 2023: don't use this either!`, + `test.proto:9:25: field "Custom.deprecated" is deprecated as of edition 2023: custom feature is not to be used`, + } + sort.Strings(warnings) + assert.Equal(t, expectedWarnings, warnings) +} diff --git a/parser/result.go b/parser/result.go index fac2bd6f..6aa922a1 100644 --- a/parser/result.go +++ b/parser/result.go @@ -109,12 +109,6 @@ func (r *result) createFileDescriptor(filename string, file *ast.FileNode, handl fd.Syntax = proto.String(file.Syntax.Syntax.AsString()) } case file.Edition != nil: - if !editions.AllowEditions { - nodeInfo := file.NodeInfo(file.Edition.Edition) - if handler.HandleErrorf(nodeInfo, `editions are not yet supported; use syntax proto2 or proto3 instead`) != nil { - return - } - } edition := file.Edition.Edition.AsString() syntax = protoreflect.Editions diff --git a/parser/validate_test.go b/parser/validate_test.go index 6f68c391..4793c49d 100644 --- a/parser/validate_test.go +++ b/parser/validate_test.go @@ -16,7 +16,6 @@ package parser import ( "errors" - "os" "os/exec" "strings" "testing" @@ -24,18 +23,10 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/bufbuild/protocompile/internal/editions" "github.com/bufbuild/protocompile/internal/protoc" "github.com/bufbuild/protocompile/reporter" ) -func TestMain(m *testing.M) { - // Enable just for tests. - editions.AllowEditions = true - status := m.Run() - os.Exit(status) -} - func TestBasicValidation(t *testing.T) { t.Parallel() testCases := map[string]struct { diff --git a/protoutil/editions_test.go b/protoutil/editions_test.go index 00061f24..2024234e 100644 --- a/protoutil/editions_test.go +++ b/protoutil/editions_test.go @@ -16,29 +16,24 @@ package protoutil_test import ( "context" - "os" "testing" "github.com/stretchr/testify/require" "google.golang.org/protobuf/proto" "google.golang.org/protobuf/reflect/protodesc" "google.golang.org/protobuf/reflect/protoreflect" + "google.golang.org/protobuf/reflect/protoregistry" "google.golang.org/protobuf/types/descriptorpb" "google.golang.org/protobuf/types/dynamicpb" + "google.golang.org/protobuf/types/gofeaturespb" "github.com/bufbuild/protocompile" "github.com/bufbuild/protocompile/internal/editions" "github.com/bufbuild/protocompile/linker" "github.com/bufbuild/protocompile/protoutil" + "github.com/bufbuild/protocompile/walk" ) -func TestMain(m *testing.M) { - // Enable just for tests. - editions.AllowEditions = true - status := m.Run() - os.Exit(status) -} - func TestResolveFeature(t *testing.T) { t.Parallel() testResolveFeature(t) @@ -326,30 +321,40 @@ func TestResolveCustomFeature(t *testing.T) { }), } file, _ := compileFile(t, "test.proto", sourceResolver, descriptorProto) - optionsFile := file.FindImportByPath("options.proto") - extType := dynamicpb.NewExtensionType(optionsFile.FindDescriptorByName("test.custom").(protoreflect.ExtensionDescriptor)) - feature := optionsFile.FindDescriptorByName("test.CustomFeatures.encabulate").(protoreflect.FieldDescriptor) //nolint:errcheck - - val, err := protoutil.ResolveCustomFeature(file, extType, feature) - require.NoError(t, err) - require.Equal(t, testCase.expectedEncabulate, val.Bool()) - - // Same value for an element therein - elem := file.FindDescriptorByName("Foo") - require.NotNil(t, elem) - val, err = protoutil.ResolveCustomFeature(elem, extType, feature) - require.NoError(t, err) - require.Equal(t, testCase.expectedEncabulate, val.Bool()) - - // Check the other feature field, too - feature = optionsFile.FindDescriptorByName("test.CustomFeatures.nitz").(protoreflect.FieldDescriptor) //nolint:errcheck - val, err = protoutil.ResolveCustomFeature(file, extType, feature) - require.NoError(t, err) - require.Equal(t, protoreflect.EnumNumber(testCase.expectedNitz), val.Enum()) + // First we resolve the feature with the given file. + // Then we'll do a second pass where we resolve the + // feature, but all extensions are unrecognized. Both + // ways should work. + for _, clearKnownExts := range []bool{false, true} { + if clearKnownExts { + clearKnownExtensionsFromFile(t, protoutil.ProtoFromFileDescriptor(file)) + } - val, err = protoutil.ResolveCustomFeature(elem, extType, feature) - require.NoError(t, err) - require.Equal(t, protoreflect.EnumNumber(testCase.expectedNitz), val.Enum()) + optionsFile := file.FindImportByPath("options.proto") + extType := dynamicpb.NewExtensionType(optionsFile.FindDescriptorByName("test.custom").(protoreflect.ExtensionDescriptor)) + feature := optionsFile.FindDescriptorByName("test.CustomFeatures.encabulate").(protoreflect.FieldDescriptor) //nolint:errcheck + + val, err := protoutil.ResolveCustomFeature(file, extType, feature) + require.NoError(t, err) + require.Equal(t, testCase.expectedEncabulate, val.Bool()) + + // Same value for an element therein + elem := file.FindDescriptorByName("Foo") + require.NotNil(t, elem) + val, err = protoutil.ResolveCustomFeature(elem, extType, feature) + require.NoError(t, err) + require.Equal(t, testCase.expectedEncabulate, val.Bool()) + + // Check the other feature field, too + feature = optionsFile.FindDescriptorByName("test.CustomFeatures.nitz").(protoreflect.FieldDescriptor) //nolint:errcheck + val, err = protoutil.ResolveCustomFeature(file, extType, feature) + require.NoError(t, err) + require.Equal(t, protoreflect.EnumNumber(testCase.expectedNitz), val.Enum()) + + val, err = protoutil.ResolveCustomFeature(elem, extType, feature) + require.NoError(t, err) + require.Equal(t, protoreflect.EnumNumber(testCase.expectedNitz), val.Enum()) + } }) } @@ -373,35 +378,186 @@ func TestResolveCustomFeature(t *testing.T) { }), } file, _ := compileFile(t, "test.proto", sourceResolver, descriptorProto) - optionsFile := file.FindImportByPath("options.proto") - extType := dynamicpb.NewExtensionType(optionsFile.FindDescriptorByName("test.custom").(protoreflect.ExtensionDescriptor)) - feature := optionsFile.FindDescriptorByName("test.CustomFeatures.encabulate").(protoreflect.FieldDescriptor) //nolint:errcheck + // First we resolve the feature with the given file. + // Then we'll do a second pass where we resolve the + // feature, but all extensions are unrecognized. Both + // ways should work. + for _, clearKnownExts := range []bool{false, true} { + if clearKnownExts { + clearKnownExtensionsFromFile(t, protoutil.ProtoFromFileDescriptor(file)) + } - val, err := protoutil.ResolveCustomFeature(file, extType, feature) - require.NoError(t, err) - // Default for edition - require.False(t, val.Bool()) + optionsFile := file.FindImportByPath("options.proto") + extType := dynamicpb.NewExtensionType(optionsFile.FindDescriptorByName("test.custom").(protoreflect.ExtensionDescriptor)) + feature := optionsFile.FindDescriptorByName("test.CustomFeatures.encabulate").(protoreflect.FieldDescriptor) //nolint:errcheck - // Override - field := file.FindDescriptorByName("Bar.name") - require.NotNil(t, field) - val, err = protoutil.ResolveCustomFeature(field, extType, feature) - require.NoError(t, err) - require.True(t, val.Bool()) + val, err := protoutil.ResolveCustomFeature(file, extType, feature) + require.NoError(t, err) + // Default for edition + require.False(t, val.Bool()) - // Check the other feature field, too - feature = optionsFile.FindDescriptorByName("test.CustomFeatures.nitz").(protoreflect.FieldDescriptor) //nolint:errcheck - val, err = protoutil.ResolveCustomFeature(file, extType, feature) - require.NoError(t, err) - require.Equal(t, protoreflect.EnumNumber(3), val.Enum()) + // Override + field := file.FindDescriptorByName("Bar.name") + require.NotNil(t, field) + val, err = protoutil.ResolveCustomFeature(field, extType, feature) + require.NoError(t, err) + require.True(t, val.Bool()) - val, err = protoutil.ResolveCustomFeature(field, extType, feature) - require.NoError(t, err) - require.Equal(t, protoreflect.EnumNumber(2), val.Enum()) + // Check the other feature field, too + feature = optionsFile.FindDescriptorByName("test.CustomFeatures.nitz").(protoreflect.FieldDescriptor) //nolint:errcheck + val, err = protoutil.ResolveCustomFeature(file, extType, feature) + require.NoError(t, err) + require.Equal(t, protoreflect.EnumNumber(3), val.Enum()) + + val, err = protoutil.ResolveCustomFeature(field, extType, feature) + require.NoError(t, err) + require.Equal(t, protoreflect.EnumNumber(2), val.Enum()) + } }) } -func compileFile(t *testing.T, filename string, sources *protocompile.SourceResolver, deps ...*descriptorpb.FileDescriptorProto) (result linker.File, featureSet protoreflect.MessageDescriptor) { +func TestResolveCustomFeature_Generated(t *testing.T) { + t.Parallel() + descriptorProto := protodesc.ToFileDescriptorProto( + (*descriptorpb.FileDescriptorProto)(nil).ProtoReflect().Descriptor().ParentFile(), + ) + goFeaturesProto := protodesc.ToFileDescriptorProto( + (*gofeaturespb.GoFeatures)(nil).ProtoReflect().Descriptor().ParentFile(), + ) + + // We can do proto2 and proto3 in the same way since they + // can't override feature values. + preEditionsTestCases := []struct { + syntax string + expectedValue bool + }{ + { + syntax: "proto2", + expectedValue: true, + }, + { + syntax: "proto3", + expectedValue: false, + }, + } + for _, testCase := range preEditionsTestCases { + testCase := testCase + t.Run(testCase.syntax, func(t *testing.T) { + t.Parallel() + sourceResolver := &protocompile.SourceResolver{ + Accessor: protocompile.SourceAccessorFromMap(map[string]string{ + "test.proto": ` + syntax = "` + testCase.syntax + `"; + import "google/protobuf/go_features.proto"; + enum Foo { + ZERO = 0; + }`, + }), + } + file, _ := compileFile(t, "test.proto", sourceResolver, descriptorProto, goFeaturesProto) + // First we resolve the feature with the given file. + // Then we'll do a second pass where we resolve the + // feature, but all extensions are unrecognized. Both + // ways should work. + for _, clearKnownExts := range []bool{false, true} { + if clearKnownExts { + clearKnownExtensionsFromFile(t, protoutil.ProtoFromFileDescriptor(file)) + } + + extType := gofeaturespb.E_Go + feature := gofeaturespb.E_Go.TypeDescriptor().Message().Fields().ByName("legacy_unmarshal_json_enum") + require.NotNil(t, feature) + + // Default for edition + val, err := protoutil.ResolveCustomFeature(file, extType, feature) + require.NoError(t, err) + require.Equal(t, testCase.expectedValue, val.Bool()) + + // Same value for an element therein + elem := file.FindDescriptorByName("Foo") + require.NotNil(t, elem) + val, err = protoutil.ResolveCustomFeature(elem, extType, feature) + require.NoError(t, err) + require.Equal(t, testCase.expectedValue, val.Bool()) + } + }) + } + + editionsTestCases := []struct { + name string + source string + exopectedValue bool + }{ + { + name: "editions-2023-default", + source: ` + edition = "2023"; + import "google/protobuf/go_features.proto"; + enum Foo { + ZERO = 0; + }`, + exopectedValue: false, + }, + { + name: "editions-override", + source: ` + edition = "2023"; + import "google/protobuf/go_features.proto"; + enum Foo { + option features.(pb.go).legacy_unmarshal_json_enum = true; + ZERO = 0; + }`, + exopectedValue: true, + }, + } + + for _, testCase := range editionsTestCases { + testCase := testCase + t.Run(testCase.name, func(t *testing.T) { + t.Parallel() + + sourceResolver := &protocompile.SourceResolver{ + Accessor: protocompile.SourceAccessorFromMap(map[string]string{ + "test.proto": testCase.source, + }), + } + file, _ := compileFile(t, "test.proto", sourceResolver, descriptorProto, goFeaturesProto) + // First we resolve the feature with the given file. + // Then we'll do a second pass where we resolve the + // feature, but all extensions are unrecognized. Both + // ways should work. + for _, clearKnownExts := range []bool{false, true} { + if clearKnownExts { + clearKnownExtensionsFromFile(t, protoutil.ProtoFromFileDescriptor(file)) + } + + extType := gofeaturespb.E_Go + feature := gofeaturespb.E_Go.TypeDescriptor().Message().Fields().ByName("legacy_unmarshal_json_enum") + require.NotNil(t, feature) + + val, err := protoutil.ResolveCustomFeature(file, extType, feature) + require.NoError(t, err) + // Edition default is false, and can't be overridden at the file level, + // so this should always be false. + require.False(t, val.Bool()) + + // Override + elem := file.FindDescriptorByName("Foo") + require.NotNil(t, elem) + val, err = protoutil.ResolveCustomFeature(elem, extType, feature) + require.NoError(t, err) + require.Equal(t, testCase.exopectedValue, val.Bool()) + } + }) + } +} + +func compileFile( + t *testing.T, + filename string, + sources *protocompile.SourceResolver, + deps ...*descriptorpb.FileDescriptorProto, +) (result linker.File, featureSet protoreflect.MessageDescriptor) { t.Helper() if sources == nil { sources = &protocompile.SourceResolver{ @@ -449,3 +605,44 @@ func addDepsToResolver(resolver protocompile.Resolver, deps ...*descriptorpb.Fil return resolver.FindFileByPath(path) }) } + +func clearKnownExtensionsFromFile(t *testing.T, file *descriptorpb.FileDescriptorProto) { + t.Helper() + clearKnownExtensionsFromOptions(t, file.GetOptions()) + err := walk.DescriptorProtos(file, func(name protoreflect.FullName, element proto.Message) error { + switch element := element.(type) { + case *descriptorpb.DescriptorProto: + clearKnownExtensionsFromOptions(t, element.GetOptions()) + for _, extRange := range element.GetExtensionRange() { + clearKnownExtensionsFromOptions(t, extRange.GetOptions()) + } + case *descriptorpb.FieldDescriptorProto: + clearKnownExtensionsFromOptions(t, element.GetOptions()) + case *descriptorpb.OneofDescriptorProto: + clearKnownExtensionsFromOptions(t, element.GetOptions()) + case *descriptorpb.EnumDescriptorProto: + clearKnownExtensionsFromOptions(t, element.GetOptions()) + case *descriptorpb.EnumValueDescriptorProto: + clearKnownExtensionsFromOptions(t, element.GetOptions()) + case *descriptorpb.ServiceDescriptorProto: + clearKnownExtensionsFromOptions(t, element.GetOptions()) + case *descriptorpb.MethodDescriptorProto: + clearKnownExtensionsFromOptions(t, element.GetOptions()) + } + return nil + }) + require.NoError(t, err) +} + +func clearKnownExtensionsFromOptions(t *testing.T, options proto.Message) { + t.Helper() + if options == nil || !options.ProtoReflect().IsValid() { + return // nothing to do + } + data, err := proto.Marshal(options) + require.NoError(t, err) + // We unmarshal from bytes, with a nil resolver, so all extensions + // will remain unrecognized. + err = proto.UnmarshalOptions{Resolver: (*protoregistry.Types)(nil)}.Unmarshal(data, options) + require.NoError(t, err) +} diff --git a/resolver.go b/resolver.go index 7ff40eb0..400d554b 100644 --- a/resolver.go +++ b/resolver.go @@ -180,6 +180,27 @@ func SourceAccessorFromMap(srcs map[string]string) func(string) (io.ReadCloser, // WithStandardImports returns a new resolver that knows about the same standard // imports that are included with protoc. +// +// Note that this uses the descriptors embedded in generated code in the packages +// of the Protobuf Go module, except for "google/protobuf/cpp_features.proto" and +// "google/protobuf/java_features.proto". For those two files, compiled descriptors +// are embedded in this module because there is no package in the Protobuf Go module +// that contains generated code for those files. This resolver also provides results +// for the "google/protobuf/go_features.proto", which is technically not a standard +// file (it is not included with protoc) but is included in generated code in the +// Protobuf Go module. +// +// As of v0.14.0 of this module (and v1.34.2 of the Protobuf Go module and v27.0 of +// Protobuf), the contents of the standard import "google/protobuf/descriptor.proto" +// contain extension declarations which are *absent* from the descriptors that this +// resolver returns. That is because extension declarations are only retained in +// source, not at runtime, which means they are not available in the embedded +// descriptors in generated code. +// +// To use versions of the standard imports that *do* include these extension +// declarations, see wellknownimports.WithStandardImports instead. As of this +// writing, the declarations are only needed to prevent source files from +// illegally re-defining the custom features for C++, Java, and Go. func WithStandardImports(r Resolver) Resolver { return ResolverFunc(func(name string) (SearchResult, error) { res, err := r.FindFileByPath(name) diff --git a/std_imports.go b/std_imports.go index 58c61dac..a31232ac 100644 --- a/std_imports.go +++ b/std_imports.go @@ -17,7 +17,8 @@ package protocompile import ( "google.golang.org/protobuf/reflect/protoreflect" "google.golang.org/protobuf/reflect/protoregistry" - _ "google.golang.org/protobuf/types/known/anypb" // link in packages that include the standard protos included with protoc. + _ "google.golang.org/protobuf/types/gofeaturespb" // link in packages that include the standard protos included with protoc. + _ "google.golang.org/protobuf/types/known/anypb" _ "google.golang.org/protobuf/types/known/apipb" _ "google.golang.org/protobuf/types/known/durationpb" _ "google.golang.org/protobuf/types/known/emptypb" @@ -28,6 +29,8 @@ import ( _ "google.golang.org/protobuf/types/known/typepb" _ "google.golang.org/protobuf/types/known/wrapperspb" _ "google.golang.org/protobuf/types/pluginpb" + + "github.com/bufbuild/protocompile/internal/featuresext" ) // All files that are included with protoc are also included with this package @@ -44,6 +47,7 @@ func init() { "google/protobuf/duration.proto", "google/protobuf/empty.proto", "google/protobuf/field_mask.proto", + "google/protobuf/go_features.proto", "google/protobuf/source_context.proto", "google/protobuf/struct.proto", "google/protobuf/timestamp.proto", @@ -59,4 +63,34 @@ func init() { } standardImports[fn] = fd } + + otherFeatures := []struct { + Name string + GetDescriptor func() (protoreflect.FileDescriptor, error) + }{ + { + Name: "google/protobuf/cpp_features.proto", + GetDescriptor: featuresext.CppFeaturesDescriptor, + }, + { + Name: "google/protobuf/java_features.proto", + GetDescriptor: featuresext.JavaFeaturesDescriptor, + }, + } + for _, feature := range otherFeatures { + // First see if the program has generated Go code for this + // file linked in: + fd, err := protoregistry.GlobalFiles.FindFileByPath(feature.Name) + if err == nil { + standardImports[feature.Name] = fd + continue + } + fd, err = feature.GetDescriptor() + if err != nil { + // For these extensions to FeatureSet, we are lenient. If + // we can't load them, just ignore them. + continue + } + standardImports[feature.Name] = fd + } } diff --git a/std_imports_test.go b/std_imports_test.go index 91242788..fcfc4021 100644 --- a/std_imports_test.go +++ b/std_imports_test.go @@ -29,6 +29,7 @@ func TestStdImports(t *testing.T) { c := Compiler{Resolver: WithStandardImports(&SourceResolver{})} ctx := context.Background() for name, fileProto := range standardImports { + t.Log(name) fds, err := c.Compile(ctx, name) if err != nil { t.Errorf("failed to compile %q: %v", name, err) diff --git a/supported_editions.go b/supported_editions.go new file mode 100644 index 00000000..72bd51f1 --- /dev/null +++ b/supported_editions.go @@ -0,0 +1,30 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package protocompile + +import ( + "google.golang.org/protobuf/types/descriptorpb" + + "github.com/bufbuild/protocompile/internal/editions" +) + +// IsEditionSupported returns true if this module can compile sources for +// the given edition. This returns true for the special EDITION_PROTO2 and +// EDITION_PROTO3 as well as all actual editions supported. +func IsEditionSupported(edition descriptorpb.Edition) bool { + return edition == descriptorpb.Edition_EDITION_PROTO2 || + edition == descriptorpb.Edition_EDITION_PROTO3 || + (edition >= editions.MinSupportedEdition && edition <= editions.MaxSupportedEdition) +} diff --git a/supported_editions_test.go b/supported_editions_test.go new file mode 100644 index 00000000..ffcf3df1 --- /dev/null +++ b/supported_editions_test.go @@ -0,0 +1,45 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package protocompile + +import ( + "math" + "testing" + + "github.com/stretchr/testify/assert" + "google.golang.org/protobuf/types/descriptorpb" +) + +func TestIsEditionSupported(t *testing.T) { + t.Parallel() + + var min, max descriptorpb.Edition + min = math.MaxInt32 + + for editionNum := range descriptorpb.Edition_name { + edition := descriptorpb.Edition(editionNum) + if IsEditionSupported(edition) { + if edition < min { + min = edition + } + if edition > max { + max = edition + } + } + } + + assert.Equal(t, descriptorpb.Edition_EDITION_PROTO2, min) + assert.Equal(t, descriptorpb.Edition_EDITION_2023, max) +} diff --git a/wellknownimports/google/protobuf/any.proto b/wellknownimports/google/protobuf/any.proto new file mode 100644 index 00000000..3f2ac23d --- /dev/null +++ b/wellknownimports/google/protobuf/any.proto @@ -0,0 +1,146 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.protobuf; + +option go_package = "google.golang.org/protobuf/types/known/anypb"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "AnyProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; + +// `Any` contains an arbitrary serialized protocol buffer message along with a +// URL that describes the type of the serialized message. +// +// Protobuf library provides support to pack/unpack Any values in the form +// of utility functions or additional generated methods of the Any type. +// +// Example 1: Pack and unpack a message in C++. +// +// Foo foo = ...; +// Any any; +// any.PackFrom(foo); +// ... +// if (any.UnpackTo(&foo)) { +// ... +// } +// +// Example 2: Pack and unpack a message in Java. +// +// Foo foo = ...; +// Any any = Any.pack(foo); +// ... +// if (any.is(Foo.class)) { +// foo = any.unpack(Foo.class); +// } +// // or ... +// if (any.isSameTypeAs(Foo.getDefaultInstance())) { +// foo = any.unpack(Foo.getDefaultInstance()); +// } +// +// Example 3: Pack and unpack a message in Python. +// +// foo = Foo(...) +// any = Any() +// any.Pack(foo) +// ... +// if any.Is(Foo.DESCRIPTOR): +// any.Unpack(foo) +// ... +// +// Example 4: Pack and unpack a message in Go +// +// foo := &pb.Foo{...} +// any, err := anypb.New(foo) +// if err != nil { +// ... +// } +// ... +// foo := &pb.Foo{} +// if err := any.UnmarshalTo(foo); err != nil { +// ... +// } +// +// The pack methods provided by protobuf library will by default use +// 'type.googleapis.com/full.type.name' as the type URL and the unpack +// methods only use the fully qualified type name after the last '/' +// in the type URL, for example "foo.bar.com/x/y.z" will yield type +// name "y.z". +// +// JSON +// ==== +// The JSON representation of an `Any` value uses the regular +// representation of the deserialized, embedded message, with an +// additional field `@type` which contains the type URL. Example: +// +// package google.profile; +// message Person { +// string first_name = 1; +// string last_name = 2; +// } +// +// { +// "@type": "type.googleapis.com/google.profile.Person", +// "firstName": , +// "lastName": +// } +// +// If the embedded message type is well-known and has a custom JSON +// representation, that representation will be embedded adding a field +// `value` which holds the custom JSON in addition to the `@type` +// field. Example (for message [google.protobuf.Duration][]): +// +// { +// "@type": "type.googleapis.com/google.protobuf.Duration", +// "value": "1.212s" +// } +// +message Any { + // A URL/resource name that uniquely identifies the type of the serialized + // protocol buffer message. This string must contain at least + // one "/" character. The last segment of the URL's path must represent + // the fully qualified name of the type (as in + // `path/google.protobuf.Duration`). The name should be in a canonical form + // (e.g., leading "." is not accepted). + // + // In practice, teams usually precompile into the binary all types that they + // expect it to use in the context of Any. However, for URLs which use the + // scheme `http`, `https`, or no scheme, one can optionally set up a type + // server that maps type URLs to message definitions as follows: + // + // * If no scheme is provided, `https` is assumed. + // * An HTTP GET on the URL must yield a [google.protobuf.Type][] + // value in binary format, or produce an error. + // * Applications are allowed to cache lookup results based on the + // URL, or have them precompiled into a binary to avoid any + // lookup. Therefore, binary compatibility needs to be preserved + // on changes to types. (Use versioned type names to manage + // breaking changes.) + // + // Note: this functionality is not currently available in the official + // protobuf release, and it is not used for type URLs beginning with + // type.googleapis.com. As of May 2023, there are no widely used type server + // implementations and no plans to implement one. + // + // Schemes other than `http`, `https` (or the empty scheme) might be + // used with implementation specific semantics. + // + string type_url = 1; + + // Must be a valid serialized protocol buffer of the above specified type. + bytes value = 2; +} diff --git a/wellknownimports/google/protobuf/api.proto b/wellknownimports/google/protobuf/api.proto new file mode 100644 index 00000000..c454f3b7 --- /dev/null +++ b/wellknownimports/google/protobuf/api.proto @@ -0,0 +1,191 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.protobuf; + +import "google/protobuf/source_context.proto"; +import "google/protobuf/type.proto"; + +option java_package = "com.google.protobuf"; +option java_outer_classname = "ApiProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; +option go_package = "google.golang.org/protobuf/types/known/apipb"; + +// Api is a light-weight descriptor for an API Interface. +// +// Interfaces are also described as "protocol buffer services" in some contexts, +// such as by the "service" keyword in a .proto file, but they are different +// from API Services, which represent a concrete implementation of an interface +// as opposed to simply a description of methods and bindings. They are also +// sometimes simply referred to as "APIs" in other contexts, such as the name of +// this message itself. See https://cloud.google.com/apis/design/glossary for +// detailed terminology. +message Api { + // The fully qualified name of this interface, including package name + // followed by the interface's simple name. + string name = 1; + + // The methods of this interface, in unspecified order. + repeated Method methods = 2; + + // Any metadata attached to the interface. + repeated Option options = 3; + + // A version string for this interface. If specified, must have the form + // `major-version.minor-version`, as in `1.10`. If the minor version is + // omitted, it defaults to zero. If the entire version field is empty, the + // major version is derived from the package name, as outlined below. If the + // field is not empty, the version in the package name will be verified to be + // consistent with what is provided here. + // + // The versioning schema uses [semantic + // versioning](http://semver.org) where the major version number + // indicates a breaking change and the minor version an additive, + // non-breaking change. Both version numbers are signals to users + // what to expect from different versions, and should be carefully + // chosen based on the product plan. + // + // The major version is also reflected in the package name of the + // interface, which must end in `v`, as in + // `google.feature.v1`. For major versions 0 and 1, the suffix can + // be omitted. Zero major versions must only be used for + // experimental, non-GA interfaces. + // + string version = 4; + + // Source context for the protocol buffer service represented by this + // message. + SourceContext source_context = 5; + + // Included interfaces. See [Mixin][]. + repeated Mixin mixins = 6; + + // The source syntax of the service. + Syntax syntax = 7; +} + +// Method represents a method of an API interface. +message Method { + // The simple name of this method. + string name = 1; + + // A URL of the input message type. + string request_type_url = 2; + + // If true, the request is streamed. + bool request_streaming = 3; + + // The URL of the output message type. + string response_type_url = 4; + + // If true, the response is streamed. + bool response_streaming = 5; + + // Any metadata attached to the method. + repeated Option options = 6; + + // The source syntax of this method. + Syntax syntax = 7; +} + +// Declares an API Interface to be included in this interface. The including +// interface must redeclare all the methods from the included interface, but +// documentation and options are inherited as follows: +// +// - If after comment and whitespace stripping, the documentation +// string of the redeclared method is empty, it will be inherited +// from the original method. +// +// - Each annotation belonging to the service config (http, +// visibility) which is not set in the redeclared method will be +// inherited. +// +// - If an http annotation is inherited, the path pattern will be +// modified as follows. Any version prefix will be replaced by the +// version of the including interface plus the [root][] path if +// specified. +// +// Example of a simple mixin: +// +// package google.acl.v1; +// service AccessControl { +// // Get the underlying ACL object. +// rpc GetAcl(GetAclRequest) returns (Acl) { +// option (google.api.http).get = "/v1/{resource=**}:getAcl"; +// } +// } +// +// package google.storage.v2; +// service Storage { +// rpc GetAcl(GetAclRequest) returns (Acl); +// +// // Get a data record. +// rpc GetData(GetDataRequest) returns (Data) { +// option (google.api.http).get = "/v2/{resource=**}"; +// } +// } +// +// Example of a mixin configuration: +// +// apis: +// - name: google.storage.v2.Storage +// mixins: +// - name: google.acl.v1.AccessControl +// +// The mixin construct implies that all methods in `AccessControl` are +// also declared with same name and request/response types in +// `Storage`. A documentation generator or annotation processor will +// see the effective `Storage.GetAcl` method after inherting +// documentation and annotations as follows: +// +// service Storage { +// // Get the underlying ACL object. +// rpc GetAcl(GetAclRequest) returns (Acl) { +// option (google.api.http).get = "/v2/{resource=**}:getAcl"; +// } +// ... +// } +// +// Note how the version in the path pattern changed from `v1` to `v2`. +// +// If the `root` field in the mixin is specified, it should be a +// relative path under which inherited HTTP paths are placed. Example: +// +// apis: +// - name: google.storage.v2.Storage +// mixins: +// - name: google.acl.v1.AccessControl +// root: acls +// +// This implies the following inherited HTTP annotation: +// +// service Storage { +// // Get the underlying ACL object. +// rpc GetAcl(GetAclRequest) returns (Acl) { +// option (google.api.http).get = "/v2/acls/{resource=**}:getAcl"; +// } +// ... +// } +message Mixin { + // The fully qualified name of the interface which is included. + string name = 1; + + // If non-empty specifies a path under which inherited HTTP paths + // are rooted. + string root = 2; +} diff --git a/wellknownimports/google/protobuf/compiler/plugin.proto b/wellknownimports/google/protobuf/compiler/plugin.proto new file mode 100644 index 00000000..8e0b02af --- /dev/null +++ b/wellknownimports/google/protobuf/compiler/plugin.proto @@ -0,0 +1,187 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Author: kenton@google.com (Kenton Varda) +// +// protoc (aka the Protocol Compiler) can be extended via plugins. A plugin is +// just a program that reads a CodeGeneratorRequest from stdin and writes a +// CodeGeneratorResponse to stdout. +// +// Plugins written using C++ can use google/protobuf/compiler/plugin.h instead +// of dealing with the raw protocol defined here. +// +// A plugin executable needs only to be placed somewhere in the path. The +// plugin should be named "protoc-gen-$NAME", and will then be used when the +// flag "--${NAME}_out" is passed to protoc. + +syntax = "proto2"; + +package google.protobuf.compiler; +option java_package = "com.google.protobuf.compiler"; +option java_outer_classname = "PluginProtos"; + +option csharp_namespace = "Google.Protobuf.Compiler"; +option go_package = "google.golang.org/protobuf/types/pluginpb"; + +import "google/protobuf/descriptor.proto"; + +// The version number of protocol compiler. +message Version { + optional int32 major = 1; + optional int32 minor = 2; + optional int32 patch = 3; + // A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should + // be empty for mainline stable releases. + optional string suffix = 4; +} + +// An encoded CodeGeneratorRequest is written to the plugin's stdin. +message CodeGeneratorRequest { + // The .proto files that were explicitly listed on the command-line. The + // code generator should generate code only for these files. Each file's + // descriptor will be included in proto_file, below. + repeated string file_to_generate = 1; + + // The generator parameter passed on the command-line. + optional string parameter = 2; + + // FileDescriptorProtos for all files in files_to_generate and everything + // they import. The files will appear in topological order, so each file + // appears before any file that imports it. + // + // Note: the files listed in files_to_generate will include runtime-retention + // options only, but all other files will include source-retention options. + // The source_file_descriptors field below is available in case you need + // source-retention options for files_to_generate. + // + // protoc guarantees that all proto_files will be written after + // the fields above, even though this is not technically guaranteed by the + // protobuf wire format. This theoretically could allow a plugin to stream + // in the FileDescriptorProtos and handle them one by one rather than read + // the entire set into memory at once. However, as of this writing, this + // is not similarly optimized on protoc's end -- it will store all fields in + // memory at once before sending them to the plugin. + // + // Type names of fields and extensions in the FileDescriptorProto are always + // fully qualified. + repeated FileDescriptorProto proto_file = 15; + + // File descriptors with all options, including source-retention options. + // These descriptors are only provided for the files listed in + // files_to_generate. + repeated FileDescriptorProto source_file_descriptors = 17; + + // The version number of protocol compiler. + optional Version compiler_version = 3; +} + +// The plugin writes an encoded CodeGeneratorResponse to stdout. +message CodeGeneratorResponse { + // Error message. If non-empty, code generation failed. The plugin process + // should exit with status code zero even if it reports an error in this way. + // + // This should be used to indicate errors in .proto files which prevent the + // code generator from generating correct code. Errors which indicate a + // problem in protoc itself -- such as the input CodeGeneratorRequest being + // unparseable -- should be reported by writing a message to stderr and + // exiting with a non-zero status code. + optional string error = 1; + + // A bitmask of supported features that the code generator supports. + // This is a bitwise "or" of values from the Feature enum. + optional uint64 supported_features = 2; + + // Sync with code_generator.h. + enum Feature { + FEATURE_NONE = 0; + FEATURE_PROTO3_OPTIONAL = 1; + FEATURE_SUPPORTS_EDITIONS = 2; + } + + // The minimum edition this plugin supports. This will be treated as an + // Edition enum, but we want to allow unknown values. It should be specified + // according the edition enum value, *not* the edition number. Only takes + // effect for plugins that have FEATURE_SUPPORTS_EDITIONS set. + optional int32 minimum_edition = 3; + + // The maximum edition this plugin supports. This will be treated as an + // Edition enum, but we want to allow unknown values. It should be specified + // according the edition enum value, *not* the edition number. Only takes + // effect for plugins that have FEATURE_SUPPORTS_EDITIONS set. + optional int32 maximum_edition = 4; + + // Represents a single generated file. + message File { + // The file name, relative to the output directory. The name must not + // contain "." or ".." components and must be relative, not be absolute (so, + // the file cannot lie outside the output directory). "/" must be used as + // the path separator, not "\". + // + // If the name is omitted, the content will be appended to the previous + // file. This allows the generator to break large files into small chunks, + // and allows the generated text to be streamed back to protoc so that large + // files need not reside completely in memory at one time. Note that as of + // this writing protoc does not optimize for this -- it will read the entire + // CodeGeneratorResponse before writing files to disk. + optional string name = 1; + + // If non-empty, indicates that the named file should already exist, and the + // content here is to be inserted into that file at a defined insertion + // point. This feature allows a code generator to extend the output + // produced by another code generator. The original generator may provide + // insertion points by placing special annotations in the file that look + // like: + // @@protoc_insertion_point(NAME) + // The annotation can have arbitrary text before and after it on the line, + // which allows it to be placed in a comment. NAME should be replaced with + // an identifier naming the point -- this is what other generators will use + // as the insertion_point. Code inserted at this point will be placed + // immediately above the line containing the insertion point (thus multiple + // insertions to the same point will come out in the order they were added). + // The double-@ is intended to make it unlikely that the generated code + // could contain things that look like insertion points by accident. + // + // For example, the C++ code generator places the following line in the + // .pb.h files that it generates: + // // @@protoc_insertion_point(namespace_scope) + // This line appears within the scope of the file's package namespace, but + // outside of any particular class. Another plugin can then specify the + // insertion_point "namespace_scope" to generate additional classes or + // other declarations that should be placed in this scope. + // + // Note that if the line containing the insertion point begins with + // whitespace, the same whitespace will be added to every line of the + // inserted text. This is useful for languages like Python, where + // indentation matters. In these languages, the insertion point comment + // should be indented the same amount as any inserted code will need to be + // in order to work correctly in that context. + // + // The code generator that generates the initial file and the one which + // inserts into it must both run as part of a single invocation of protoc. + // Code generators are executed in the order in which they appear on the + // command line. + // + // If |insertion_point| is present, |name| must also be present. + optional string insertion_point = 2; + + // The file contents. + optional string content = 15; + + // Information describing the file content being inserted. If an insertion + // point is used, this information will be appropriately offset and inserted + // into the code generation metadata for the generated files. + optional GeneratedCodeInfo generated_code_info = 16; + } + repeated File file = 15; +} diff --git a/wellknownimports/google/protobuf/cpp_features.proto b/wellknownimports/google/protobuf/cpp_features.proto new file mode 100644 index 00000000..059c9043 --- /dev/null +++ b/wellknownimports/google/protobuf/cpp_features.proto @@ -0,0 +1,65 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package pb; + +import "google/protobuf/descriptor.proto"; + +extend google.protobuf.FeatureSet { + optional CppFeatures cpp = 1000; +} + +message CppFeatures { + // Whether or not to treat an enum field as closed. This option is only + // applicable to enum fields, and will be removed in the future. It is + // consistent with the legacy behavior of using proto3 enum types for proto2 + // fields. + optional bool legacy_closed_enum = 1 [ + retention = RETENTION_RUNTIME, + targets = TARGET_TYPE_FIELD, + targets = TARGET_TYPE_FILE, + // TODO Enable this in google3 once protoc rolls out. + feature_support = { + edition_introduced: EDITION_2023, + edition_deprecated: EDITION_2023, + deprecation_warning: "The legacy closed enum treatment in C++ is " + "deprecated and is scheduled to be removed in " + "edition 2025. Mark enum type on the enum " + "definitions themselves rather than on fields.", + }, + edition_defaults = { edition: EDITION_PROTO2, value: "true" }, + edition_defaults = { edition: EDITION_PROTO3, value: "false" } + ]; + + enum StringType { + STRING_TYPE_UNKNOWN = 0; + VIEW = 1; + CORD = 2; + STRING = 3; + } + + optional StringType string_type = 2 [ + retention = RETENTION_RUNTIME, + targets = TARGET_TYPE_FIELD, + targets = TARGET_TYPE_FILE, + // TODO Enable this in google3 once protoc rolls out. + feature_support = { + edition_introduced: EDITION_2023, + }, + edition_defaults = { edition: EDITION_PROTO2, value: "STRING" }, + edition_defaults = { edition: EDITION_2024, value: "VIEW" } + ]; +} diff --git a/wellknownimports/google/protobuf/descriptor.proto b/wellknownimports/google/protobuf/descriptor.proto new file mode 100644 index 00000000..318eacf7 --- /dev/null +++ b/wellknownimports/google/protobuf/descriptor.proto @@ -0,0 +1,1285 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// The messages in this file describe the definitions found in .proto files. +// A valid .proto file can be translated directly to a FileDescriptorProto +// without any other information (e.g. without reading its imports). + +syntax = "proto2"; + +package google.protobuf; + +option go_package = "google.golang.org/protobuf/types/descriptorpb"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "DescriptorProtos"; +option csharp_namespace = "Google.Protobuf.Reflection"; +option objc_class_prefix = "GPB"; +option cc_enable_arenas = true; + +// descriptor.proto must be optimized for speed because reflection-based +// algorithms don't work during bootstrapping. +option optimize_for = SPEED; + +// The protocol compiler can output a FileDescriptorSet containing the .proto +// files it parses. +message FileDescriptorSet { + repeated FileDescriptorProto file = 1; +} + +// The full set of known editions. +enum Edition { + // A placeholder for an unknown edition value. + EDITION_UNKNOWN = 0; + + // A placeholder edition for specifying default behaviors *before* a feature + // was first introduced. This is effectively an "infinite past". + EDITION_LEGACY = 900; + + // Legacy syntax "editions". These pre-date editions, but behave much like + // distinct editions. These can't be used to specify the edition of proto + // files, but feature definitions must supply proto2/proto3 defaults for + // backwards compatibility. + EDITION_PROTO2 = 998; + EDITION_PROTO3 = 999; + + // Editions that have been released. The specific values are arbitrary and + // should not be depended on, but they will always be time-ordered for easy + // comparison. + EDITION_2023 = 1000; + EDITION_2024 = 1001; + + // Placeholder editions for testing feature resolution. These should not be + // used or relyed on outside of tests. + EDITION_1_TEST_ONLY = 1; + EDITION_2_TEST_ONLY = 2; + EDITION_99997_TEST_ONLY = 99997; + EDITION_99998_TEST_ONLY = 99998; + EDITION_99999_TEST_ONLY = 99999; + + // Placeholder for specifying unbounded edition support. This should only + // ever be used by plugins that can expect to never require any changes to + // support a new edition. + EDITION_MAX = 0x7FFFFFFF; +} + +// Describes a complete .proto file. +message FileDescriptorProto { + optional string name = 1; // file name, relative to root of source tree + optional string package = 2; // e.g. "foo", "foo.bar", etc. + + // Names of files imported by this file. + repeated string dependency = 3; + // Indexes of the public imported files in the dependency list above. + repeated int32 public_dependency = 10; + // Indexes of the weak imported files in the dependency list. + // For Google-internal migration only. Do not use. + repeated int32 weak_dependency = 11; + + // All top-level definitions in this file. + repeated DescriptorProto message_type = 4; + repeated EnumDescriptorProto enum_type = 5; + repeated ServiceDescriptorProto service = 6; + repeated FieldDescriptorProto extension = 7; + + optional FileOptions options = 8; + + // This field contains optional information about the original source code. + // You may safely remove this entire field without harming runtime + // functionality of the descriptors -- the information is needed only by + // development tools. + optional SourceCodeInfo source_code_info = 9; + + // The syntax of the proto file. + // The supported values are "proto2", "proto3", and "editions". + // + // If `edition` is present, this value must be "editions". + optional string syntax = 12; + + // The edition of the proto file. + optional Edition edition = 14; +} + +// Describes a message type. +message DescriptorProto { + optional string name = 1; + + repeated FieldDescriptorProto field = 2; + repeated FieldDescriptorProto extension = 6; + + repeated DescriptorProto nested_type = 3; + repeated EnumDescriptorProto enum_type = 4; + + message ExtensionRange { + optional int32 start = 1; // Inclusive. + optional int32 end = 2; // Exclusive. + + optional ExtensionRangeOptions options = 3; + } + repeated ExtensionRange extension_range = 5; + + repeated OneofDescriptorProto oneof_decl = 8; + + optional MessageOptions options = 7; + + // Range of reserved tag numbers. Reserved tag numbers may not be used by + // fields or extension ranges in the same message. Reserved ranges may + // not overlap. + message ReservedRange { + optional int32 start = 1; // Inclusive. + optional int32 end = 2; // Exclusive. + } + repeated ReservedRange reserved_range = 9; + // Reserved field names, which may not be used by fields in the same message. + // A given name may only be reserved once. + repeated string reserved_name = 10; +} + +message ExtensionRangeOptions { + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + message Declaration { + // The extension number declared within the extension range. + optional int32 number = 1; + + // The fully-qualified name of the extension field. There must be a leading + // dot in front of the full name. + optional string full_name = 2; + + // The fully-qualified type name of the extension field. Unlike + // Metadata.type, Declaration.type must have a leading dot for messages + // and enums. + optional string type = 3; + + // If true, indicates that the number is reserved in the extension range, + // and any extension field with the number will fail to compile. Set this + // when a declared extension field is deleted. + optional bool reserved = 5; + + // If true, indicates that the extension must be defined as repeated. + // Otherwise the extension must be defined as optional. + optional bool repeated = 6; + + reserved 4; // removed is_repeated + } + + // For external users: DO NOT USE. We are in the process of open sourcing + // extension declaration and executing internal cleanups before it can be + // used externally. + repeated Declaration declaration = 2 [retention = RETENTION_SOURCE]; + + // Any features defined in the specific edition. + optional FeatureSet features = 50; + + // The verification state of the extension range. + enum VerificationState { + // All the extensions of the range must be declared. + DECLARATION = 0; + UNVERIFIED = 1; + } + + // The verification state of the range. + // TODO: flip the default to DECLARATION once all empty ranges + // are marked as UNVERIFIED. + optional VerificationState verification = 3 + [default = UNVERIFIED, retention = RETENTION_SOURCE]; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +// Describes a field within a message. +message FieldDescriptorProto { + enum Type { + // 0 is reserved for errors. + // Order is weird for historical reasons. + TYPE_DOUBLE = 1; + TYPE_FLOAT = 2; + // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + // negative values are likely. + TYPE_INT64 = 3; + TYPE_UINT64 = 4; + // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + // negative values are likely. + TYPE_INT32 = 5; + TYPE_FIXED64 = 6; + TYPE_FIXED32 = 7; + TYPE_BOOL = 8; + TYPE_STRING = 9; + // Tag-delimited aggregate. + // Group type is deprecated and not supported after google.protobuf. However, Proto3 + // implementations should still be able to parse the group wire format and + // treat group fields as unknown fields. In Editions, the group wire format + // can be enabled via the `message_encoding` feature. + TYPE_GROUP = 10; + TYPE_MESSAGE = 11; // Length-delimited aggregate. + + // New in version 2. + TYPE_BYTES = 12; + TYPE_UINT32 = 13; + TYPE_ENUM = 14; + TYPE_SFIXED32 = 15; + TYPE_SFIXED64 = 16; + TYPE_SINT32 = 17; // Uses ZigZag encoding. + TYPE_SINT64 = 18; // Uses ZigZag encoding. + } + + enum Label { + // 0 is reserved for errors + LABEL_OPTIONAL = 1; + LABEL_REPEATED = 3; + // The required label is only allowed in google.protobuf. In proto3 and Editions + // it's explicitly prohibited. In Editions, the `field_presence` feature + // can be used to get this behavior. + LABEL_REQUIRED = 2; + } + + optional string name = 1; + optional int32 number = 3; + optional Label label = 4; + + // If type_name is set, this need not be set. If both this and type_name + // are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + optional Type type = 5; + + // For message and enum types, this is the name of the type. If the name + // starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + // rules are used to find the type (i.e. first the nested types within this + // message are searched, then within the parent, on up to the root + // namespace). + optional string type_name = 6; + + // For extensions, this is the name of the type being extended. It is + // resolved in the same manner as type_name. + optional string extendee = 2; + + // For numeric types, contains the original text representation of the value. + // For booleans, "true" or "false". + // For strings, contains the default text contents (not escaped in any way). + // For bytes, contains the C escaped value. All bytes >= 128 are escaped. + optional string default_value = 7; + + // If set, gives the index of a oneof in the containing type's oneof_decl + // list. This field is a member of that oneof. + optional int32 oneof_index = 9; + + // JSON name of this field. The value is set by protocol compiler. If the + // user has set a "json_name" option on this field, that option's value + // will be used. Otherwise, it's deduced from the field's name by converting + // it to camelCase. + optional string json_name = 10; + + optional FieldOptions options = 8; + + // If true, this is a proto3 "optional". When a proto3 field is optional, it + // tracks presence regardless of field type. + // + // When proto3_optional is true, this field must belong to a oneof to signal + // to old proto3 clients that presence is tracked for this field. This oneof + // is known as a "synthetic" oneof, and this field must be its sole member + // (each proto3 optional field gets its own synthetic oneof). Synthetic oneofs + // exist in the descriptor only, and do not generate any API. Synthetic oneofs + // must be ordered after all "real" oneofs. + // + // For message fields, proto3_optional doesn't create any semantic change, + // since non-repeated message fields always track presence. However it still + // indicates the semantic detail of whether the user wrote "optional" or not. + // This can be useful for round-tripping the .proto file. For consistency we + // give message fields a synthetic oneof also, even though it is not required + // to track presence. This is especially important because the parser can't + // tell if a field is a message or an enum, so it must always create a + // synthetic oneof. + // + // Proto2 optional fields do not set this flag, because they already indicate + // optional with `LABEL_OPTIONAL`. + optional bool proto3_optional = 17; +} + +// Describes a oneof. +message OneofDescriptorProto { + optional string name = 1; + optional OneofOptions options = 2; +} + +// Describes an enum type. +message EnumDescriptorProto { + optional string name = 1; + + repeated EnumValueDescriptorProto value = 2; + + optional EnumOptions options = 3; + + // Range of reserved numeric values. Reserved values may not be used by + // entries in the same enum. Reserved ranges may not overlap. + // + // Note that this is distinct from DescriptorProto.ReservedRange in that it + // is inclusive such that it can appropriately represent the entire int32 + // domain. + message EnumReservedRange { + optional int32 start = 1; // Inclusive. + optional int32 end = 2; // Inclusive. + } + + // Range of reserved numeric values. Reserved numeric values may not be used + // by enum values in the same enum declaration. Reserved ranges may not + // overlap. + repeated EnumReservedRange reserved_range = 4; + + // Reserved enum value names, which may not be reused. A given name may only + // be reserved once. + repeated string reserved_name = 5; +} + +// Describes a value within an enum. +message EnumValueDescriptorProto { + optional string name = 1; + optional int32 number = 2; + + optional EnumValueOptions options = 3; +} + +// Describes a service. +message ServiceDescriptorProto { + optional string name = 1; + repeated MethodDescriptorProto method = 2; + + optional ServiceOptions options = 3; +} + +// Describes a method of a service. +message MethodDescriptorProto { + optional string name = 1; + + // Input and output type names. These are resolved in the same way as + // FieldDescriptorProto.type_name, but must refer to a message type. + optional string input_type = 2; + optional string output_type = 3; + + optional MethodOptions options = 4; + + // Identifies if client streams multiple client messages + optional bool client_streaming = 5 [default = false]; + // Identifies if server streams multiple server messages + optional bool server_streaming = 6 [default = false]; +} + +// =================================================================== +// Options + +// Each of the definitions above may have "options" attached. These are +// just annotations which may cause code to be generated slightly differently +// or may contain hints for code that manipulates protocol messages. +// +// Clients may define custom options as extensions of the *Options messages. +// These extensions may not yet be known at parsing time, so the parser cannot +// store the values in them. Instead it stores them in a field in the *Options +// message called uninterpreted_option. This field must have the same name +// across all *Options messages. We then use this field to populate the +// extensions when we build a descriptor, at which point all protos have been +// parsed and so all extensions are known. +// +// Extension numbers for custom options may be chosen as follows: +// * For options which will only be used within a single application or +// organization, or for experimental options, use field numbers 50000 +// through 99999. It is up to you to ensure that you do not use the +// same number for multiple options. +// * For options which will be published and used publicly by multiple +// independent entities, e-mail protobuf-global-extension-registry@google.com +// to reserve extension numbers. Simply provide your project name (e.g. +// Objective-C plugin) and your project website (if available) -- there's no +// need to explain how you intend to use them. Usually you only need one +// extension number. You can declare multiple options with only one extension +// number by putting them in a sub-message. See the Custom Options section of +// the docs for examples: +// https://developers.google.com/protocol-buffers/docs/proto#options +// If this turns out to be popular, a web service will be set up +// to automatically assign option numbers. + +message FileOptions { + + // Sets the Java package where classes generated from this .proto will be + // placed. By default, the proto package is used, but this is often + // inappropriate because proto packages do not normally start with backwards + // domain names. + optional string java_package = 1; + + // Controls the name of the wrapper Java class generated for the .proto file. + // That class will always contain the .proto file's getDescriptor() method as + // well as any top-level extensions defined in the .proto file. + // If java_multiple_files is disabled, then all the other classes from the + // .proto file will be nested inside the single wrapper outer class. + optional string java_outer_classname = 8; + + // If enabled, then the Java code generator will generate a separate .java + // file for each top-level message, enum, and service defined in the .proto + // file. Thus, these types will *not* be nested inside the wrapper class + // named by java_outer_classname. However, the wrapper class will still be + // generated to contain the file's getDescriptor() method as well as any + // top-level extensions defined in the file. + optional bool java_multiple_files = 10 [default = false]; + + // This option does nothing. + optional bool java_generate_equals_and_hash = 20 [deprecated=true]; + + // A proto2 file can set this to true to opt in to UTF-8 checking for Java, + // which will throw an exception if invalid UTF-8 is parsed from the wire or + // assigned to a string field. + // + // TODO: clarify exactly what kinds of field types this option + // applies to, and update these docs accordingly. + // + // Proto3 files already perform these checks. Setting the option explicitly to + // false has no effect: it cannot be used to opt proto3 files out of UTF-8 + // checks. + optional bool java_string_check_utf8 = 27 [default = false]; + + // Generated classes can be optimized for speed or code size. + enum OptimizeMode { + SPEED = 1; // Generate complete code for parsing, serialization, + // etc. + CODE_SIZE = 2; // Use ReflectionOps to implement these methods. + LITE_RUNTIME = 3; // Generate code using MessageLite and the lite runtime. + } + optional OptimizeMode optimize_for = 9 [default = SPEED]; + + // Sets the Go package where structs generated from this .proto will be + // placed. If omitted, the Go package will be derived from the following: + // - The basename of the package import path, if provided. + // - Otherwise, the package statement in the .proto file, if present. + // - Otherwise, the basename of the .proto file, without extension. + optional string go_package = 11; + + // Should generic services be generated in each language? "Generic" services + // are not specific to any particular RPC system. They are generated by the + // main code generators in each language (without additional plugins). + // Generic services were the only kind of service generation supported by + // early versions of google.protobuf. + // + // Generic services are now considered deprecated in favor of using plugins + // that generate code specific to your particular RPC system. Therefore, + // these default to false. Old code which depends on generic services should + // explicitly set them to true. + optional bool cc_generic_services = 16 [default = false]; + optional bool java_generic_services = 17 [default = false]; + optional bool py_generic_services = 18 [default = false]; + reserved 42; // removed php_generic_services + reserved "php_generic_services"; + + // Is this file deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for everything in the file, or it will be completely ignored; in the very + // least, this is a formalization for deprecating files. + optional bool deprecated = 23 [default = false]; + + // Enables the use of arenas for the proto messages in this file. This applies + // only to generated classes for C++. + optional bool cc_enable_arenas = 31 [default = true]; + + // Sets the objective c class prefix which is prepended to all objective c + // generated classes from this .proto. There is no default. + optional string objc_class_prefix = 36; + + // Namespace for generated classes; defaults to the package. + optional string csharp_namespace = 37; + + // By default Swift generators will take the proto package and CamelCase it + // replacing '.' with underscore and use that to prefix the types/symbols + // defined. When this options is provided, they will use this value instead + // to prefix the types/symbols defined. + optional string swift_prefix = 39; + + // Sets the php class prefix which is prepended to all php generated classes + // from this .proto. Default is empty. + optional string php_class_prefix = 40; + + // Use this option to change the namespace of php generated classes. Default + // is empty. When this option is empty, the package name will be used for + // determining the namespace. + optional string php_namespace = 41; + + // Use this option to change the namespace of php generated metadata classes. + // Default is empty. When this option is empty, the proto file name will be + // used for determining the namespace. + optional string php_metadata_namespace = 44; + + // Use this option to change the package of ruby generated classes. Default + // is empty. When this option is not set, the package name will be used for + // determining the ruby package. + optional string ruby_package = 45; + + // Any features defined in the specific edition. + optional FeatureSet features = 50; + + // The parser stores options it doesn't recognize here. + // See the documentation for the "Options" section above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. + // See the documentation for the "Options" section above. + extensions 1000 to max; + + reserved 38; +} + +message MessageOptions { + // Set true to use the old proto1 MessageSet wire format for extensions. + // This is provided for backwards-compatibility with the MessageSet wire + // format. You should not use this for any other reason: It's less + // efficient, has fewer features, and is more complicated. + // + // The message must be defined exactly as follows: + // message Foo { + // option message_set_wire_format = true; + // extensions 4 to max; + // } + // Note that the message cannot have any defined fields; MessageSets only + // have extensions. + // + // All extensions of your type must be singular messages; e.g. they cannot + // be int32s, enums, or repeated messages. + // + // Because this is an option, the above two restrictions are not enforced by + // the protocol compiler. + optional bool message_set_wire_format = 1 [default = false]; + + // Disables the generation of the standard "descriptor()" accessor, which can + // conflict with a field of the same name. This is meant to make migration + // from proto1 easier; new code should avoid fields named "descriptor". + optional bool no_standard_descriptor_accessor = 2 [default = false]; + + // Is this message deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the message, or it will be completely ignored; in the very least, + // this is a formalization for deprecating messages. + optional bool deprecated = 3 [default = false]; + + reserved 4, 5, 6; + + // Whether the message is an automatically generated map entry type for the + // maps field. + // + // For maps fields: + // map map_field = 1; + // The parsed descriptor looks like: + // message MapFieldEntry { + // option map_entry = true; + // optional KeyType key = 1; + // optional ValueType value = 2; + // } + // repeated MapFieldEntry map_field = 1; + // + // Implementations may choose not to generate the map_entry=true message, but + // use a native map in the target language to hold the keys and values. + // The reflection APIs in such implementations still need to work as + // if the field is a repeated message field. + // + // NOTE: Do not set the option in .proto files. Always use the maps syntax + // instead. The option should only be implicitly set by the proto compiler + // parser. + optional bool map_entry = 7; + + reserved 8; // javalite_serializable + reserved 9; // javanano_as_lite + + // Enable the legacy handling of JSON field name conflicts. This lowercases + // and strips underscored from the fields before comparison in proto3 only. + // The new behavior takes `json_name` into account and applies to proto2 as + // well. + // + // This should only be used as a temporary measure against broken builds due + // to the change in behavior for JSON field name conflicts. + // + // TODO This is legacy behavior we plan to remove once downstream + // teams have had time to migrate. + optional bool deprecated_legacy_json_field_conflicts = 11 [deprecated = true]; + + // Any features defined in the specific edition. + optional FeatureSet features = 12; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message FieldOptions { + // The ctype option instructs the C++ code generator to use a different + // representation of the field than it normally would. See the specific + // options below. This option is only implemented to support use of + // [ctype=CORD] and [ctype=STRING] (the default) on non-repeated fields of + // type "bytes" in the open source release -- sorry, we'll try to include + // other types in a future version! + optional CType ctype = 1 [default = STRING]; + enum CType { + // Default mode. + STRING = 0; + + // The option [ctype=CORD] may be applied to a non-repeated field of type + // "bytes". It indicates that in C++, the data should be stored in a Cord + // instead of a string. For very large strings, this may reduce memory + // fragmentation. It may also allow better performance when parsing from a + // Cord, or when parsing with aliasing enabled, as the parsed Cord may then + // alias the original buffer. + CORD = 1; + + STRING_PIECE = 2; + } + // The packed option can be enabled for repeated primitive fields to enable + // a more efficient representation on the wire. Rather than repeatedly + // writing the tag and type for each element, the entire array is encoded as + // a single length-delimited blob. In proto3, only explicit setting it to + // false will avoid using packed encoding. This option is prohibited in + // Editions, but the `repeated_field_encoding` feature can be used to control + // the behavior. + optional bool packed = 2; + + // The jstype option determines the JavaScript type used for values of the + // field. The option is permitted only for 64 bit integral and fixed types + // (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + // is represented as JavaScript string, which avoids loss of precision that + // can happen when a large value is converted to a floating point JavaScript. + // Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + // use the JavaScript "number" type. The behavior of the default option + // JS_NORMAL is implementation dependent. + // + // This option is an enum to permit additional types to be added, e.g. + // goog.math.Integer. + optional JSType jstype = 6 [default = JS_NORMAL]; + enum JSType { + // Use the default type. + JS_NORMAL = 0; + + // Use JavaScript strings. + JS_STRING = 1; + + // Use JavaScript numbers. + JS_NUMBER = 2; + } + + // Should this field be parsed lazily? Lazy applies only to message-type + // fields. It means that when the outer message is initially parsed, the + // inner message's contents will not be parsed but instead stored in encoded + // form. The inner message will actually be parsed when it is first accessed. + // + // This is only a hint. Implementations are free to choose whether to use + // eager or lazy parsing regardless of the value of this option. However, + // setting this option true suggests that the protocol author believes that + // using lazy parsing on this field is worth the additional bookkeeping + // overhead typically needed to implement it. + // + // This option does not affect the public interface of any generated code; + // all method signatures remain the same. Furthermore, thread-safety of the + // interface is not affected by this option; const methods remain safe to + // call from multiple threads concurrently, while non-const methods continue + // to require exclusive access. + // + // Note that lazy message fields are still eagerly verified to check + // ill-formed wireformat or missing required fields. Calling IsInitialized() + // on the outer message would fail if the inner message has missing required + // fields. Failed verification would result in parsing failure (except when + // uninitialized messages are acceptable). + optional bool lazy = 5 [default = false]; + + // unverified_lazy does no correctness checks on the byte stream. This should + // only be used where lazy with verification is prohibitive for performance + // reasons. + optional bool unverified_lazy = 15 [default = false]; + + // Is this field deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for accessors, or it will be completely ignored; in the very least, this + // is a formalization for deprecating fields. + optional bool deprecated = 3 [default = false]; + + // For Google-internal migration only. Do not use. + optional bool weak = 10 [default = false]; + + // Indicate that the field value should not be printed out when using debug + // formats, e.g. when the field contains sensitive credentials. + optional bool debug_redact = 16 [default = false]; + + // If set to RETENTION_SOURCE, the option will be omitted from the binary. + // Note: as of January 2023, support for this is in progress and does not yet + // have an effect (b/264593489). + enum OptionRetention { + RETENTION_UNKNOWN = 0; + RETENTION_RUNTIME = 1; + RETENTION_SOURCE = 2; + } + + optional OptionRetention retention = 17; + + // This indicates the types of entities that the field may apply to when used + // as an option. If it is unset, then the field may be freely used as an + // option on any kind of entity. Note: as of January 2023, support for this is + // in progress and does not yet have an effect (b/264593489). + enum OptionTargetType { + TARGET_TYPE_UNKNOWN = 0; + TARGET_TYPE_FILE = 1; + TARGET_TYPE_EXTENSION_RANGE = 2; + TARGET_TYPE_MESSAGE = 3; + TARGET_TYPE_FIELD = 4; + TARGET_TYPE_ONEOF = 5; + TARGET_TYPE_ENUM = 6; + TARGET_TYPE_ENUM_ENTRY = 7; + TARGET_TYPE_SERVICE = 8; + TARGET_TYPE_METHOD = 9; + } + + repeated OptionTargetType targets = 19; + + message EditionDefault { + optional Edition edition = 3; + optional string value = 2; // Textproto value. + } + repeated EditionDefault edition_defaults = 20; + + // Any features defined in the specific edition. + optional FeatureSet features = 21; + + // Information about the support window of a feature. + message FeatureSupport { + // The edition that this feature was first available in. In editions + // earlier than this one, the default assigned to EDITION_LEGACY will be + // used, and proto files will not be able to override it. + optional Edition edition_introduced = 1; + + // The edition this feature becomes deprecated in. Using this after this + // edition may trigger warnings. + optional Edition edition_deprecated = 2; + + // The deprecation warning text if this feature is used after the edition it + // was marked deprecated in. + optional string deprecation_warning = 3; + + // The edition this feature is no longer available in. In editions after + // this one, the last default assigned will be used, and proto files will + // not be able to override it. + optional Edition edition_removed = 4; + } + optional FeatureSupport feature_support = 22; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; + + reserved 4; // removed jtype + reserved 18; // reserve target, target_obsolete_do_not_use +} + +message OneofOptions { + // Any features defined in the specific edition. + optional FeatureSet features = 1; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message EnumOptions { + + // Set this option to true to allow mapping different tag names to the same + // value. + optional bool allow_alias = 2; + + // Is this enum deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the enum, or it will be completely ignored; in the very least, this + // is a formalization for deprecating enums. + optional bool deprecated = 3 [default = false]; + + reserved 5; // javanano_as_lite + + // Enable the legacy handling of JSON field name conflicts. This lowercases + // and strips underscored from the fields before comparison in proto3 only. + // The new behavior takes `json_name` into account and applies to proto2 as + // well. + // TODO Remove this legacy behavior once downstream teams have + // had time to migrate. + optional bool deprecated_legacy_json_field_conflicts = 6 [deprecated = true]; + + // Any features defined in the specific edition. + optional FeatureSet features = 7; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message EnumValueOptions { + // Is this enum value deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the enum value, or it will be completely ignored; in the very least, + // this is a formalization for deprecating enum values. + optional bool deprecated = 1 [default = false]; + + // Any features defined in the specific edition. + optional FeatureSet features = 2; + + // Indicate that fields annotated with this enum value should not be printed + // out when using debug formats, e.g. when the field contains sensitive + // credentials. + optional bool debug_redact = 3 [default = false]; + + // Information about the support window of a feature value. + optional FieldOptions.FeatureSupport feature_support = 4; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message ServiceOptions { + + // Any features defined in the specific edition. + optional FeatureSet features = 34; + + // Note: Field numbers 1 through 32 are reserved for Google's internal RPC + // framework. We apologize for hoarding these numbers to ourselves, but + // we were already using them long before we decided to release Protocol + // Buffers. + + // Is this service deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the service, or it will be completely ignored; in the very least, + // this is a formalization for deprecating services. + optional bool deprecated = 33 [default = false]; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message MethodOptions { + + // Note: Field numbers 1 through 32 are reserved for Google's internal RPC + // framework. We apologize for hoarding these numbers to ourselves, but + // we were already using them long before we decided to release Protocol + // Buffers. + + // Is this method deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the method, or it will be completely ignored; in the very least, + // this is a formalization for deprecating methods. + optional bool deprecated = 33 [default = false]; + + // Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + // or neither? HTTP based RPC implementation may choose GET verb for safe + // methods, and PUT verb for idempotent methods instead of the default POST. + enum IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0; + NO_SIDE_EFFECTS = 1; // implies idempotent + IDEMPOTENT = 2; // idempotent, but may have side effects + } + optional IdempotencyLevel idempotency_level = 34 + [default = IDEMPOTENCY_UNKNOWN]; + + // Any features defined in the specific edition. + optional FeatureSet features = 35; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +// A message representing a option the parser does not recognize. This only +// appears in options protos created by the compiler::Parser class. +// DescriptorPool resolves these when building Descriptor objects. Therefore, +// options protos in descriptor objects (e.g. returned by Descriptor::options(), +// or produced by Descriptor::CopyTo()) will never have UninterpretedOptions +// in them. +message UninterpretedOption { + // The name of the uninterpreted option. Each string represents a segment in + // a dot-separated name. is_extension is true iff a segment represents an + // extension (denoted with parentheses in options specs in .proto files). + // E.g.,{ ["foo", false], ["bar.baz", true], ["moo", false] } represents + // "foo.(bar.baz).moo". + message NamePart { + required string name_part = 1; + required bool is_extension = 2; + } + repeated NamePart name = 2; + + // The value of the uninterpreted option, in whatever type the tokenizer + // identified it as during parsing. Exactly one of these should be set. + optional string identifier_value = 3; + optional uint64 positive_int_value = 4; + optional int64 negative_int_value = 5; + optional double double_value = 6; + optional bytes string_value = 7; + optional string aggregate_value = 8; +} + +// =================================================================== +// Features + +// TODO Enums in C++ gencode (and potentially other languages) are +// not well scoped. This means that each of the feature enums below can clash +// with each other. The short names we've chosen maximize call-site +// readability, but leave us very open to this scenario. A future feature will +// be designed and implemented to handle this, hopefully before we ever hit a +// conflict here. +message FeatureSet { + enum FieldPresence { + FIELD_PRESENCE_UNKNOWN = 0; + EXPLICIT = 1; + IMPLICIT = 2; + LEGACY_REQUIRED = 3; + } + optional FieldPresence field_presence = 1 [ + retention = RETENTION_RUNTIME, + targets = TARGET_TYPE_FIELD, + targets = TARGET_TYPE_FILE, + // TODO Enable this in google3 once protoc rolls out. + feature_support = { + edition_introduced: EDITION_2023, + }, + edition_defaults = { edition: EDITION_PROTO2, value: "EXPLICIT" }, + edition_defaults = { edition: EDITION_PROTO3, value: "IMPLICIT" }, + edition_defaults = { edition: EDITION_2023, value: "EXPLICIT" } + ]; + + enum EnumType { + ENUM_TYPE_UNKNOWN = 0; + OPEN = 1; + CLOSED = 2; + } + optional EnumType enum_type = 2 [ + retention = RETENTION_RUNTIME, + targets = TARGET_TYPE_ENUM, + targets = TARGET_TYPE_FILE, + // TODO Enable this in google3 once protoc rolls out. + feature_support = { + edition_introduced: EDITION_2023, + }, + edition_defaults = { edition: EDITION_PROTO2, value: "CLOSED" }, + edition_defaults = { edition: EDITION_PROTO3, value: "OPEN" } + ]; + + enum RepeatedFieldEncoding { + REPEATED_FIELD_ENCODING_UNKNOWN = 0; + PACKED = 1; + EXPANDED = 2; + } + optional RepeatedFieldEncoding repeated_field_encoding = 3 [ + retention = RETENTION_RUNTIME, + targets = TARGET_TYPE_FIELD, + targets = TARGET_TYPE_FILE, + // TODO Enable this in google3 once protoc rolls out. + feature_support = { + edition_introduced: EDITION_2023, + }, + edition_defaults = { edition: EDITION_PROTO2, value: "EXPANDED" }, + edition_defaults = { edition: EDITION_PROTO3, value: "PACKED" } + ]; + + enum Utf8Validation { + UTF8_VALIDATION_UNKNOWN = 0; + VERIFY = 2; + NONE = 3; + reserved 1; + } + optional Utf8Validation utf8_validation = 4 [ + retention = RETENTION_RUNTIME, + targets = TARGET_TYPE_FIELD, + targets = TARGET_TYPE_FILE, + // TODO Enable this in google3 once protoc rolls out. + feature_support = { + edition_introduced: EDITION_2023, + }, + edition_defaults = { edition: EDITION_PROTO2, value: "NONE" }, + edition_defaults = { edition: EDITION_PROTO3, value: "VERIFY" } + ]; + + enum MessageEncoding { + MESSAGE_ENCODING_UNKNOWN = 0; + LENGTH_PREFIXED = 1; + DELIMITED = 2; + } + optional MessageEncoding message_encoding = 5 [ + retention = RETENTION_RUNTIME, + targets = TARGET_TYPE_FIELD, + targets = TARGET_TYPE_FILE, + // TODO Enable this in google3 once protoc rolls out. + feature_support = { + edition_introduced: EDITION_2023, + }, + edition_defaults = { edition: EDITION_PROTO2, value: "LENGTH_PREFIXED" } + ]; + + enum JsonFormat { + JSON_FORMAT_UNKNOWN = 0; + ALLOW = 1; + LEGACY_BEST_EFFORT = 2; + } + optional JsonFormat json_format = 6 [ + retention = RETENTION_RUNTIME, + targets = TARGET_TYPE_MESSAGE, + targets = TARGET_TYPE_ENUM, + targets = TARGET_TYPE_FILE, + // TODO Enable this in google3 once protoc rolls out. + feature_support = { + edition_introduced: EDITION_2023, + }, + edition_defaults = { edition: EDITION_PROTO2, value: "LEGACY_BEST_EFFORT" }, + edition_defaults = { edition: EDITION_PROTO3, value: "ALLOW" } + ]; + + reserved 999; + + extensions 1000 to 9994 [ + declaration = { + number: 1000, + full_name: ".pb.cpp", + type: ".pb.CppFeatures" + }, + declaration = { + number: 1001, + full_name: ".pb.java", + type: ".pb.JavaFeatures" + }, + declaration = { number: 1002, full_name: ".pb.go", type: ".pb.GoFeatures" }, + declaration = { + number: 9990, + full_name: ".pb.proto1", + type: ".pb.Proto1Features" + } + ]; + + extensions 9995 to 9999; // For internal testing + extensions 10000; // for https://github.com/bufbuild/protobuf-es +} + +// A compiled specification for the defaults of a set of features. These +// messages are generated from FeatureSet extensions and can be used to seed +// feature resolution. The resolution with this object becomes a simple search +// for the closest matching edition, followed by proto merges. +message FeatureSetDefaults { + // A map from every known edition with a unique set of defaults to its + // defaults. Not all editions may be contained here. For a given edition, + // the defaults at the closest matching edition ordered at or before it should + // be used. This field must be in strict ascending order by edition. + message FeatureSetEditionDefault { + optional Edition edition = 3; + + // Defaults of features that can be overridden in this edition. + optional FeatureSet overridable_features = 4; + + // Defaults of features that can't be overridden in this edition. + optional FeatureSet fixed_features = 5; + + reserved 1, 2; + reserved "features"; + } + repeated FeatureSetEditionDefault defaults = 1; + + // The minimum supported edition (inclusive) when this was constructed. + // Editions before this will not have defaults. + optional Edition minimum_edition = 4; + + // The maximum known edition (inclusive) when this was constructed. Editions + // after this will not have reliable defaults. + optional Edition maximum_edition = 5; +} + +// =================================================================== +// Optional source code info + +// Encapsulates information about the original source file from which a +// FileDescriptorProto was generated. +message SourceCodeInfo { + // A Location identifies a piece of source code in a .proto file which + // corresponds to a particular definition. This information is intended + // to be useful to IDEs, code indexers, documentation generators, and similar + // tools. + // + // For example, say we have a file like: + // message Foo { + // optional string foo = 1; + // } + // Let's look at just the field definition: + // optional string foo = 1; + // ^ ^^ ^^ ^ ^^^ + // a bc de f ghi + // We have the following locations: + // span path represents + // [a,i) [ 4, 0, 2, 0 ] The whole field definition. + // [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + // [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + // [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + // [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + // + // Notes: + // - A location may refer to a repeated field itself (i.e. not to any + // particular index within it). This is used whenever a set of elements are + // logically enclosed in a single code segment. For example, an entire + // extend block (possibly containing multiple extension definitions) will + // have an outer location whose path refers to the "extensions" repeated + // field without an index. + // - Multiple locations may have the same path. This happens when a single + // logical declaration is spread out across multiple places. The most + // obvious example is the "extend" block again -- there may be multiple + // extend blocks in the same scope, each of which will have the same path. + // - A location's span is not always a subset of its parent's span. For + // example, the "extendee" of an extension declaration appears at the + // beginning of the "extend" block and is shared by all extensions within + // the block. + // - Just because a location's span is a subset of some other location's span + // does not mean that it is a descendant. For example, a "group" defines + // both a type and a field in a single declaration. Thus, the locations + // corresponding to the type and field and their components will overlap. + // - Code which tries to interpret locations should probably be designed to + // ignore those that it doesn't understand, as more types of locations could + // be recorded in the future. + repeated Location location = 1; + message Location { + // Identifies which part of the FileDescriptorProto was defined at this + // location. + // + // Each element is a field number or an index. They form a path from + // the root FileDescriptorProto to the place where the definition appears. + // For example, this path: + // [ 4, 3, 2, 7, 1 ] + // refers to: + // file.message_type(3) // 4, 3 + // .field(7) // 2, 7 + // .name() // 1 + // This is because FileDescriptorProto.message_type has field number 4: + // repeated DescriptorProto message_type = 4; + // and DescriptorProto.field has field number 2: + // repeated FieldDescriptorProto field = 2; + // and FieldDescriptorProto.name has field number 1: + // optional string name = 1; + // + // Thus, the above path gives the location of a field name. If we removed + // the last element: + // [ 4, 3, 2, 7 ] + // this path refers to the whole field declaration (from the beginning + // of the label to the terminating semicolon). + repeated int32 path = 1 [packed = true]; + + // Always has exactly three or four elements: start line, start column, + // end line (optional, otherwise assumed same as start line), end column. + // These are packed into a single field for efficiency. Note that line + // and column numbers are zero-based -- typically you will want to add + // 1 to each before displaying to a user. + repeated int32 span = 2 [packed = true]; + + // If this SourceCodeInfo represents a complete declaration, these are any + // comments appearing before and after the declaration which appear to be + // attached to the declaration. + // + // A series of line comments appearing on consecutive lines, with no other + // tokens appearing on those lines, will be treated as a single comment. + // + // leading_detached_comments will keep paragraphs of comments that appear + // before (but not connected to) the current element. Each paragraph, + // separated by empty lines, will be one comment element in the repeated + // field. + // + // Only the comment content is provided; comment markers (e.g. //) are + // stripped out. For block comments, leading whitespace and an asterisk + // will be stripped from the beginning of each line other than the first. + // Newlines are included in the output. + // + // Examples: + // + // optional int32 foo = 1; // Comment attached to foo. + // // Comment attached to bar. + // optional int32 bar = 2; + // + // optional string baz = 3; + // // Comment attached to baz. + // // Another line attached to baz. + // + // // Comment attached to moo. + // // + // // Another line attached to moo. + // optional double moo = 4; + // + // // Detached comment for corge. This is not leading or trailing comments + // // to moo or corge because there are blank lines separating it from + // // both. + // + // // Detached comment for corge paragraph 2. + // + // optional string corge = 5; + // /* Block comment attached + // * to corge. Leading asterisks + // * will be removed. */ + // /* Block comment attached to + // * grault. */ + // optional int32 grault = 6; + // + // // ignored detached comments. + optional string leading_comments = 3; + optional string trailing_comments = 4; + repeated string leading_detached_comments = 6; + } +} + +// Describes the relationship between generated code and its original source +// file. A GeneratedCodeInfo message is associated with only one generated +// source file, but may contain references to different source .proto files. +message GeneratedCodeInfo { + // An Annotation connects some span of text in generated code to an element + // of its generating .proto file. + repeated Annotation annotation = 1; + message Annotation { + // Identifies the element in the original source .proto file. This field + // is formatted the same as SourceCodeInfo.Location.path. + repeated int32 path = 1 [packed = true]; + + // Identifies the filesystem path to the original source .proto. + optional string source_file = 2; + + // Identifies the starting offset in bytes in the generated code + // that relates to the identified object. + optional int32 begin = 3; + + // Identifies the ending offset in bytes in the generated code that + // relates to the identified object. The end offset should be one past + // the last relevant byte (so the length of the text = end - begin). + optional int32 end = 4; + + // Represents the identified object's effect on the element in the original + // .proto file. + enum Semantic { + // There is no effect or the effect is indescribable. + NONE = 0; + // The element is set or otherwise mutated. + SET = 1; + // An alias to the element is returned. + ALIAS = 2; + } + optional Semantic semantic = 5; + } +} diff --git a/wellknownimports/google/protobuf/duration.proto b/wellknownimports/google/protobuf/duration.proto new file mode 100644 index 00000000..42427a7d --- /dev/null +++ b/wellknownimports/google/protobuf/duration.proto @@ -0,0 +1,99 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.protobuf; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/protobuf/types/known/durationpb"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "DurationProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; + +// A Duration represents a signed, fixed-length span of time represented +// as a count of seconds and fractions of seconds at nanosecond +// resolution. It is independent of any calendar and concepts like "day" +// or "month". It is related to Timestamp in that the difference between +// two Timestamp values is a Duration and it can be added or subtracted +// from a Timestamp. Range is approximately +-10,000 years. +// +// # Examples +// +// Example 1: Compute Duration from two Timestamps in pseudo code. +// +// Timestamp start = ...; +// Timestamp end = ...; +// Duration duration = ...; +// +// duration.seconds = end.seconds - start.seconds; +// duration.nanos = end.nanos - start.nanos; +// +// if (duration.seconds < 0 && duration.nanos > 0) { +// duration.seconds += 1; +// duration.nanos -= 1000000000; +// } else if (duration.seconds > 0 && duration.nanos < 0) { +// duration.seconds -= 1; +// duration.nanos += 1000000000; +// } +// +// Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. +// +// Timestamp start = ...; +// Duration duration = ...; +// Timestamp end = ...; +// +// end.seconds = start.seconds + duration.seconds; +// end.nanos = start.nanos + duration.nanos; +// +// if (end.nanos < 0) { +// end.seconds -= 1; +// end.nanos += 1000000000; +// } else if (end.nanos >= 1000000000) { +// end.seconds += 1; +// end.nanos -= 1000000000; +// } +// +// Example 3: Compute Duration from datetime.timedelta in Python. +// +// td = datetime.timedelta(days=3, minutes=10) +// duration = Duration() +// duration.FromTimedelta(td) +// +// # JSON Mapping +// +// In JSON format, the Duration type is encoded as a string rather than an +// object, where the string ends in the suffix "s" (indicating seconds) and +// is preceded by the number of seconds, with nanoseconds expressed as +// fractional seconds. For example, 3 seconds with 0 nanoseconds should be +// encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should +// be expressed in JSON format as "3.000000001s", and 3 seconds and 1 +// microsecond should be expressed in JSON format as "3.000001s". +// +message Duration { + // Signed seconds of the span of time. Must be from -315,576,000,000 + // to +315,576,000,000 inclusive. Note: these bounds are computed from: + // 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years + int64 seconds = 1; + + // Signed fractions of a second at nanosecond resolution of the span + // of time. Durations less than one second are represented with a 0 + // `seconds` field and a positive or negative `nanos` field. For durations + // of one second or more, a non-zero value for the `nanos` field must be + // of the same sign as the `seconds` field. Must be from -999,999,999 + // to +999,999,999 inclusive. + int32 nanos = 2; +} diff --git a/wellknownimports/google/protobuf/empty.proto b/wellknownimports/google/protobuf/empty.proto new file mode 100644 index 00000000..54a3be5d --- /dev/null +++ b/wellknownimports/google/protobuf/empty.proto @@ -0,0 +1,35 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.protobuf; + +option go_package = "google.golang.org/protobuf/types/known/emptypb"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "EmptyProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; +option cc_enable_arenas = true; + +// A generic empty message that you can re-use to avoid defining duplicated +// empty messages in your APIs. A typical example is to use it as the request +// or the response type of an API method. For instance: +// +// service Foo { +// rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); +// } +// +message Empty {} diff --git a/wellknownimports/google/protobuf/field_mask.proto b/wellknownimports/google/protobuf/field_mask.proto new file mode 100644 index 00000000..a0d639b4 --- /dev/null +++ b/wellknownimports/google/protobuf/field_mask.proto @@ -0,0 +1,229 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.protobuf; + +option java_package = "com.google.protobuf"; +option java_outer_classname = "FieldMaskProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; +option go_package = "google.golang.org/protobuf/types/known/fieldmaskpb"; +option cc_enable_arenas = true; + +// `FieldMask` represents a set of symbolic field paths, for example: +// +// paths: "f.a" +// paths: "f.b.d" +// +// Here `f` represents a field in some root message, `a` and `b` +// fields in the message found in `f`, and `d` a field found in the +// message in `f.b`. +// +// Field masks are used to specify a subset of fields that should be +// returned by a get operation or modified by an update operation. +// Field masks also have a custom JSON encoding (see below). +// +// # Field Masks in Projections +// +// When used in the context of a projection, a response message or +// sub-message is filtered by the API to only contain those fields as +// specified in the mask. For example, if the mask in the previous +// example is applied to a response message as follows: +// +// f { +// a : 22 +// b { +// d : 1 +// x : 2 +// } +// y : 13 +// } +// z: 8 +// +// The result will not contain specific values for fields x,y and z +// (their value will be set to the default, and omitted in proto text +// output): +// +// +// f { +// a : 22 +// b { +// d : 1 +// } +// } +// +// A repeated field is not allowed except at the last position of a +// paths string. +// +// If a FieldMask object is not present in a get operation, the +// operation applies to all fields (as if a FieldMask of all fields +// had been specified). +// +// Note that a field mask does not necessarily apply to the +// top-level response message. In case of a REST get operation, the +// field mask applies directly to the response, but in case of a REST +// list operation, the mask instead applies to each individual message +// in the returned resource list. In case of a REST custom method, +// other definitions may be used. Where the mask applies will be +// clearly documented together with its declaration in the API. In +// any case, the effect on the returned resource/resources is required +// behavior for APIs. +// +// # Field Masks in Update Operations +// +// A field mask in update operations specifies which fields of the +// targeted resource are going to be updated. The API is required +// to only change the values of the fields as specified in the mask +// and leave the others untouched. If a resource is passed in to +// describe the updated values, the API ignores the values of all +// fields not covered by the mask. +// +// If a repeated field is specified for an update operation, new values will +// be appended to the existing repeated field in the target resource. Note that +// a repeated field is only allowed in the last position of a `paths` string. +// +// If a sub-message is specified in the last position of the field mask for an +// update operation, then new value will be merged into the existing sub-message +// in the target resource. +// +// For example, given the target message: +// +// f { +// b { +// d: 1 +// x: 2 +// } +// c: [1] +// } +// +// And an update message: +// +// f { +// b { +// d: 10 +// } +// c: [2] +// } +// +// then if the field mask is: +// +// paths: ["f.b", "f.c"] +// +// then the result will be: +// +// f { +// b { +// d: 10 +// x: 2 +// } +// c: [1, 2] +// } +// +// An implementation may provide options to override this default behavior for +// repeated and message fields. +// +// In order to reset a field's value to the default, the field must +// be in the mask and set to the default value in the provided resource. +// Hence, in order to reset all fields of a resource, provide a default +// instance of the resource and set all fields in the mask, or do +// not provide a mask as described below. +// +// If a field mask is not present on update, the operation applies to +// all fields (as if a field mask of all fields has been specified). +// Note that in the presence of schema evolution, this may mean that +// fields the client does not know and has therefore not filled into +// the request will be reset to their default. If this is unwanted +// behavior, a specific service may require a client to always specify +// a field mask, producing an error if not. +// +// As with get operations, the location of the resource which +// describes the updated values in the request message depends on the +// operation kind. In any case, the effect of the field mask is +// required to be honored by the API. +// +// ## Considerations for HTTP REST +// +// The HTTP kind of an update operation which uses a field mask must +// be set to PATCH instead of PUT in order to satisfy HTTP semantics +// (PUT must only be used for full updates). +// +// # JSON Encoding of Field Masks +// +// In JSON, a field mask is encoded as a single string where paths are +// separated by a comma. Fields name in each path are converted +// to/from lower-camel naming conventions. +// +// As an example, consider the following message declarations: +// +// message Profile { +// User user = 1; +// Photo photo = 2; +// } +// message User { +// string display_name = 1; +// string address = 2; +// } +// +// In proto a field mask for `Profile` may look as such: +// +// mask { +// paths: "user.display_name" +// paths: "photo" +// } +// +// In JSON, the same mask is represented as below: +// +// { +// mask: "user.displayName,photo" +// } +// +// # Field Masks and Oneof Fields +// +// Field masks treat fields in oneofs just as regular fields. Consider the +// following message: +// +// message SampleMessage { +// oneof test_oneof { +// string name = 4; +// SubMessage sub_message = 9; +// } +// } +// +// The field mask can be: +// +// mask { +// paths: "name" +// } +// +// Or: +// +// mask { +// paths: "sub_message" +// } +// +// Note that oneof type names ("test_oneof" in this case) cannot be used in +// paths. +// +// ## Field Mask Verification +// +// The implementation of any API method which has a FieldMask type field in the +// request should verify the included field paths, and return an +// `INVALID_ARGUMENT` error if any path is unmappable. +message FieldMask { + // The set of field mask paths. + repeated string paths = 1; +} diff --git a/wellknownimports/google/protobuf/java_features.proto b/wellknownimports/google/protobuf/java_features.proto new file mode 100644 index 00000000..e9177c70 --- /dev/null +++ b/wellknownimports/google/protobuf/java_features.proto @@ -0,0 +1,78 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto2"; + +package pb; + +import "google/protobuf/descriptor.proto"; + +option java_package = "com.google.protobuf"; +option java_outer_classname = "JavaFeaturesProto"; + +extend google.protobuf.FeatureSet { + optional JavaFeatures java = 1001; +} + +message JavaFeatures { + // Whether or not to treat an enum field as closed. This option is only + // applicable to enum fields, and will be removed in the future. It is + // consistent with the legacy behavior of using proto3 enum types for proto2 + // fields. + optional bool legacy_closed_enum = 1 [ + retention = RETENTION_RUNTIME, + targets = TARGET_TYPE_FIELD, + targets = TARGET_TYPE_FILE, + // TODO Enable this in google3 once protoc rolls out. + feature_support = { + edition_introduced: EDITION_2023, + edition_deprecated: EDITION_2023, + deprecation_warning: "The legacy closed enum treatment in Java is " + "deprecated and is scheduled to be removed in " + "edition 2025. Mark enum type on the enum " + "definitions themselves rather than on fields.", + }, + edition_defaults = { edition: EDITION_PROTO2, value: "true" }, + edition_defaults = { edition: EDITION_PROTO3, value: "false" } + ]; + + // The UTF8 validation strategy to use. See go/editions-utf8-validation for + // more information on this feature. + enum Utf8Validation { + // Invalid default, which should never be used. + UTF8_VALIDATION_UNKNOWN = 0; + // Respect the UTF8 validation behavior specified by the global + // utf8_validation feature. + DEFAULT = 1; + // Verifies UTF8 validity overriding the global utf8_validation + // feature. This represents the legacy java_string_check_utf8 option. + VERIFY = 2; + } + optional Utf8Validation utf8_validation = 2 [ + retention = RETENTION_RUNTIME, + targets = TARGET_TYPE_FIELD, + targets = TARGET_TYPE_FILE, + // TODO Enable this in google3 once protoc rolls out. + feature_support = { + edition_introduced: EDITION_2023, + edition_deprecated: EDITION_2023, + deprecation_warning: "The Java-specific utf8 validation feature is " + "deprecated and is scheduled to be removed in " + "edition 2025. Utf8 validation behavior should " + "use the global cross-language utf8_validation " + "feature.", + }, + edition_defaults = { edition: EDITION_PROTO2, value: "DEFAULT" } + ]; +} diff --git a/wellknownimports/google/protobuf/source_context.proto b/wellknownimports/google/protobuf/source_context.proto new file mode 100644 index 00000000..0aadb11a --- /dev/null +++ b/wellknownimports/google/protobuf/source_context.proto @@ -0,0 +1,32 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.protobuf; + +option java_package = "com.google.protobuf"; +option java_outer_classname = "SourceContextProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; +option go_package = "google.golang.org/protobuf/types/known/sourcecontextpb"; + +// `SourceContext` represents information about the source of a +// protobuf element, like the file in which it is defined. +message SourceContext { + // The path-qualified name of the .proto file that contained the associated + // protobuf element. For example: `"google/protobuf/source_context.proto"`. + string file_name = 1; +} diff --git a/wellknownimports/google/protobuf/struct.proto b/wellknownimports/google/protobuf/struct.proto new file mode 100644 index 00000000..e34f65ec --- /dev/null +++ b/wellknownimports/google/protobuf/struct.proto @@ -0,0 +1,79 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.protobuf; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/protobuf/types/known/structpb"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "StructProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; + +// `Struct` represents a structured data value, consisting of fields +// which map to dynamically typed values. In some languages, `Struct` +// might be supported by a native representation. For example, in +// scripting languages like JS a struct is represented as an +// object. The details of that representation are described together +// with the proto support for the language. +// +// The JSON representation for `Struct` is JSON object. +message Struct { + // Unordered map of dynamically typed values. + map fields = 1; +} + +// `Value` represents a dynamically typed value which can be either +// null, a number, a string, a boolean, a recursive struct value, or a +// list of values. A producer of value is expected to set one of these +// variants. Absence of any variant indicates an error. +// +// The JSON representation for `Value` is JSON value. +message Value { + // The kind of value. + oneof kind { + // Represents a null value. + NullValue null_value = 1; + // Represents a double value. + double number_value = 2; + // Represents a string value. + string string_value = 3; + // Represents a boolean value. + bool bool_value = 4; + // Represents a structured value. + Struct struct_value = 5; + // Represents a repeated `Value`. + ListValue list_value = 6; + } +} + +// `NullValue` is a singleton enumeration to represent the null value for the +// `Value` type union. +// +// The JSON representation for `NullValue` is JSON `null`. +enum NullValue { + // Null value. + NULL_VALUE = 0; +} + +// `ListValue` is a wrapper around a repeated field of values. +// +// The JSON representation for `ListValue` is JSON array. +message ListValue { + // Repeated field of dynamically typed values. + repeated Value values = 1; +} diff --git a/wellknownimports/google/protobuf/timestamp.proto b/wellknownimports/google/protobuf/timestamp.proto new file mode 100644 index 00000000..1ea69a19 --- /dev/null +++ b/wellknownimports/google/protobuf/timestamp.proto @@ -0,0 +1,128 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.protobuf; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/protobuf/types/known/timestamppb"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "TimestampProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; + +// A Timestamp represents a point in time independent of any time zone or local +// calendar, encoded as a count of seconds and fractions of seconds at +// nanosecond resolution. The count is relative to an epoch at UTC midnight on +// January 1, 1970, in the proleptic Gregorian calendar which extends the +// Gregorian calendar backwards to year one. +// +// All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap +// second table is needed for interpretation, using a [24-hour linear +// smear](https://developers.google.com/time/smear). +// +// The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By +// restricting to that range, we ensure that we can convert to and from [RFC +// 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. +// +// # Examples +// +// Example 1: Compute Timestamp from POSIX `time()`. +// +// Timestamp timestamp; +// timestamp.set_seconds(time(NULL)); +// timestamp.set_nanos(0); +// +// Example 2: Compute Timestamp from POSIX `gettimeofday()`. +// +// struct timeval tv; +// gettimeofday(&tv, NULL); +// +// Timestamp timestamp; +// timestamp.set_seconds(tv.tv_sec); +// timestamp.set_nanos(tv.tv_usec * 1000); +// +// Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. +// +// FILETIME ft; +// GetSystemTimeAsFileTime(&ft); +// UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; +// +// // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z +// // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. +// Timestamp timestamp; +// timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); +// timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); +// +// Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. +// +// long millis = System.currentTimeMillis(); +// +// Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) +// .setNanos((int) ((millis % 1000) * 1000000)).build(); +// +// Example 5: Compute Timestamp from Java `Instant.now()`. +// +// Instant now = Instant.now(); +// +// Timestamp timestamp = +// Timestamp.newBuilder().setSeconds(now.getEpochSecond()) +// .setNanos(now.getNano()).build(); +// +// Example 6: Compute Timestamp from current time in Python. +// +// timestamp = Timestamp() +// timestamp.GetCurrentTime() +// +// # JSON Mapping +// +// In JSON format, the Timestamp type is encoded as a string in the +// [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the +// format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" +// where {year} is always expressed using four digits while {month}, {day}, +// {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional +// seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), +// are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone +// is required. A proto3 JSON serializer should always use UTC (as indicated by +// "Z") when printing the Timestamp type and a proto3 JSON parser should be +// able to accept both UTC and other timezones (as indicated by an offset). +// +// For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past +// 01:30 UTC on January 15, 2017. +// +// In JavaScript, one can convert a Date object to this format using the +// standard +// [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) +// method. In Python, a standard `datetime.datetime` object can be converted +// to this format using +// [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with +// the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use +// the Joda Time's [`ISODateTimeFormat.dateTime()`]( +// http://joda-time.sourceforge.net/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime() +// ) to obtain a formatter capable of generating timestamps in this format. +// +message Timestamp { + // Represents seconds of UTC time since Unix epoch + // 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to + // 9999-12-31T23:59:59Z inclusive. + int64 seconds = 1; + + // Non-negative fractions of a second at nanosecond resolution. Negative + // second values with fractions must still have non-negative nanos values + // that count forward in time. Must be from 0 to 999,999,999 + // inclusive. + int32 nanos = 2; +} diff --git a/wellknownimports/google/protobuf/type.proto b/wellknownimports/google/protobuf/type.proto new file mode 100644 index 00000000..b0cf1c72 --- /dev/null +++ b/wellknownimports/google/protobuf/type.proto @@ -0,0 +1,177 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.protobuf; + +import "google/protobuf/any.proto"; +import "google/protobuf/source_context.proto"; + +option cc_enable_arenas = true; +option java_package = "com.google.protobuf"; +option java_outer_classname = "TypeProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; +option go_package = "google.golang.org/protobuf/types/known/typepb"; + +// A protocol buffer message type. +message Type { + // The fully qualified message name. + string name = 1; + // The list of fields. + repeated Field fields = 2; + // The list of types appearing in `oneof` definitions in this type. + repeated string oneofs = 3; + // The protocol buffer options. + repeated Option options = 4; + // The source context. + SourceContext source_context = 5; + // The source syntax. + Syntax syntax = 6; + // The source edition string, only valid when syntax is SYNTAX_EDITIONS. + string edition = 7; +} + +// A single field of a message type. +message Field { + // Basic field types. + enum Kind { + // Field type unknown. + TYPE_UNKNOWN = 0; + // Field type double. + TYPE_DOUBLE = 1; + // Field type float. + TYPE_FLOAT = 2; + // Field type int64. + TYPE_INT64 = 3; + // Field type uint64. + TYPE_UINT64 = 4; + // Field type int32. + TYPE_INT32 = 5; + // Field type fixed64. + TYPE_FIXED64 = 6; + // Field type fixed32. + TYPE_FIXED32 = 7; + // Field type bool. + TYPE_BOOL = 8; + // Field type string. + TYPE_STRING = 9; + // Field type group. Proto2 syntax only, and deprecated. + TYPE_GROUP = 10; + // Field type message. + TYPE_MESSAGE = 11; + // Field type bytes. + TYPE_BYTES = 12; + // Field type uint32. + TYPE_UINT32 = 13; + // Field type enum. + TYPE_ENUM = 14; + // Field type sfixed32. + TYPE_SFIXED32 = 15; + // Field type sfixed64. + TYPE_SFIXED64 = 16; + // Field type sint32. + TYPE_SINT32 = 17; + // Field type sint64. + TYPE_SINT64 = 18; + } + + // Whether a field is optional, required, or repeated. + enum Cardinality { + // For fields with unknown cardinality. + CARDINALITY_UNKNOWN = 0; + // For optional fields. + CARDINALITY_OPTIONAL = 1; + // For required fields. Proto2 syntax only. + CARDINALITY_REQUIRED = 2; + // For repeated fields. + CARDINALITY_REPEATED = 3; + } + + // The field type. + Kind kind = 1; + // The field cardinality. + Cardinality cardinality = 2; + // The field number. + int32 number = 3; + // The field name. + string name = 4; + // The field type URL, without the scheme, for message or enumeration + // types. Example: `"type.googleapis.com/google.protobuf.Timestamp"`. + string type_url = 6; + // The index of the field type in `Type.oneofs`, for message or enumeration + // types. The first type has index 1; zero means the type is not in the list. + int32 oneof_index = 7; + // Whether to use alternative packed wire representation. + bool packed = 8; + // The protocol buffer options. + repeated Option options = 9; + // The field JSON name. + string json_name = 10; + // The string value of the default value of this field. Proto2 syntax only. + string default_value = 11; +} + +// Enum type definition. +message Enum { + // Enum type name. + string name = 1; + // Enum value definitions. + repeated EnumValue enumvalue = 2; + // Protocol buffer options. + repeated Option options = 3; + // The source context. + SourceContext source_context = 4; + // The source syntax. + Syntax syntax = 5; + // The source edition string, only valid when syntax is SYNTAX_EDITIONS. + string edition = 6; +} + +// Enum value definition. +message EnumValue { + // Enum value name. + string name = 1; + // Enum value number. + int32 number = 2; + // Protocol buffer options. + repeated Option options = 3; +} + +// A protocol buffer option, which can be attached to a message, field, +// enumeration, etc. +message Option { + // The option's name. For protobuf built-in options (options defined in + // descriptor.proto), this is the short name. For example, `"map_entry"`. + // For custom options, it should be the fully-qualified name. For example, + // `"google.api.http"`. + string name = 1; + // The option's value packed in an Any message. If the value is a primitive, + // the corresponding wrapper type defined in google/protobuf/wrappers.proto + // should be used. If the value is an enum, it should be stored as an int32 + // value using the google.protobuf.Int32Value type. + Any value = 2; +} + +// The syntax in which a protocol buffer element is defined. +enum Syntax { + // Syntax `proto2`. + SYNTAX_PROTO2 = 0; + // Syntax `proto3`. + SYNTAX_PROTO3 = 1; + // Syntax `editions`. + SYNTAX_EDITIONS = 2; +} diff --git a/wellknownimports/google/protobuf/wrappers.proto b/wellknownimports/google/protobuf/wrappers.proto new file mode 100644 index 00000000..609e9163 --- /dev/null +++ b/wellknownimports/google/protobuf/wrappers.proto @@ -0,0 +1,97 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.protobuf; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/protobuf/types/known/wrapperspb"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "WrappersProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; + +// Wrapper message for `double`. +// +// The JSON representation for `DoubleValue` is JSON number. +message DoubleValue { + // The double value. + double value = 1; +} + +// Wrapper message for `float`. +// +// The JSON representation for `FloatValue` is JSON number. +message FloatValue { + // The float value. + float value = 1; +} + +// Wrapper message for `int64`. +// +// The JSON representation for `Int64Value` is JSON string. +message Int64Value { + // The int64 value. + int64 value = 1; +} + +// Wrapper message for `uint64`. +// +// The JSON representation for `UInt64Value` is JSON string. +message UInt64Value { + // The uint64 value. + uint64 value = 1; +} + +// Wrapper message for `int32`. +// +// The JSON representation for `Int32Value` is JSON number. +message Int32Value { + // The int32 value. + int32 value = 1; +} + +// Wrapper message for `uint32`. +// +// The JSON representation for `UInt32Value` is JSON number. +message UInt32Value { + // The uint32 value. + uint32 value = 1; +} + +// Wrapper message for `bool`. +// +// The JSON representation for `BoolValue` is JSON `true` and `false`. +message BoolValue { + // The bool value. + bool value = 1; +} + +// Wrapper message for `string`. +// +// The JSON representation for `StringValue` is JSON string. +message StringValue { + // The string value. + string value = 1; +} + +// Wrapper message for `bytes`. +// +// The JSON representation for `BytesValue` is JSON string. +message BytesValue { + // The bytes value. + bytes value = 1; +} diff --git a/wellknownimports/wellknownimports.go b/wellknownimports/wellknownimports.go new file mode 100644 index 00000000..7d0c8f74 --- /dev/null +++ b/wellknownimports/wellknownimports.go @@ -0,0 +1,53 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package wellknownimports provides source code for the well-known import +// files for use with a protocompile.Compiler. +package wellknownimports + +import ( + "embed" + "io" + + "github.com/bufbuild/protocompile" +) + +//go:embed google/protobuf/*.proto google/protobuf/*/*.proto +var files embed.FS + +// WithStandardImports returns a new resolver that can provide the source code for the +// standard imports that are included with protoc. This differs from +// protocompile.WithStandardImports, which uses descriptors embedded in generated +// code in the Protobuf Go module. That function is lighter weight, and does not need +// to bring in additional embedded data outside the Protobuf Go runtime. This version +// includes its own embedded versions of the source files. +// +// Unlike protocompile.WithStandardImports, this resolver does not provide results for +// "google/protobuf/go_features.proto" file. This resolver is backed by source files +// that are shipped with the Protobuf installation, which does not include that file. +// +// It is possible that the source code provided by this resolver differs from the +// source code used to create the descriptors provided by protocompile.WithStandardImports. +// That is because that other function depends on the Protobuf Go module, which could +// resolve in user programs to a different version than was used to build this package. +func WithStandardImports(resolver protocompile.Resolver) protocompile.Resolver { + return protocompile.CompositeResolver{ + resolver, + &protocompile.SourceResolver{ + Accessor: func(path string) (io.ReadCloser, error) { + return files.Open(path) + }, + }, + } +} diff --git a/wellknownimports/wellknownimports_test.go b/wellknownimports/wellknownimports_test.go new file mode 100644 index 00000000..b7b0bf38 --- /dev/null +++ b/wellknownimports/wellknownimports_test.go @@ -0,0 +1,114 @@ +// Copyright 2020-2024 Buf Technologies, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package wellknownimports + +import ( + "context" + "io" + "os" + "testing" + + "github.com/stretchr/testify/require" + "google.golang.org/protobuf/reflect/protoreflect" + "google.golang.org/protobuf/types/descriptorpb" + + "github.com/bufbuild/protocompile" + "github.com/bufbuild/protocompile/linker" +) + +func TestWithStandardImports(t *testing.T) { + t.Parallel() + wellKnownImports := []string{ + "google/protobuf/any.proto", + "google/protobuf/api.proto", + "google/protobuf/compiler/plugin.proto", + "google/protobuf/cpp_features.proto", + "google/protobuf/descriptor.proto", + "google/protobuf/duration.proto", + "google/protobuf/empty.proto", + "google/protobuf/field_mask.proto", + "google/protobuf/java_features.proto", + "google/protobuf/source_context.proto", + "google/protobuf/struct.proto", + "google/protobuf/timestamp.proto", + "google/protobuf/type.proto", + "google/protobuf/wrappers.proto", + } + // make sure we can successfully compile them all + c := protocompile.Compiler{ + Resolver: WithStandardImports(&protocompile.SourceResolver{ + Accessor: func(path string) (io.ReadCloser, error) { + return nil, os.ErrNotExist + }, + }), + RetainASTs: true, + } + ctx := context.Background() + for _, name := range wellKnownImports { + t.Log(name) + fds, err := c.Compile(ctx, name) + if err != nil { + t.Errorf("failed to compile %q: %v", name, err) + continue + } + if len(fds) != 1 { + t.Errorf("Compile returned wrong number of descriptors: expecting 1, got %d", len(fds)) + continue + } + // Make sure they were built from source + result, ok := fds[0].(linker.Result) + require.True(t, ok) + require.NotNil(t, result.AST()) + + if name == "google/protobuf/descriptor.proto" { + // verify the extension declarations are present + d := fds[0].FindDescriptorByName("google.protobuf.FeatureSet") + require.NotNil(t, d) + md, ok := d.(protoreflect.MessageDescriptor) + require.True(t, ok) + var extRangeCount int + for i := 0; i < md.ExtensionRanges().Len(); i++ { + opts, ok := md.ExtensionRangeOptions(i).(*descriptorpb.ExtensionRangeOptions) + require.True(t, ok) + extRangeCount += len(opts.GetDeclaration()) + } + require.Greater(t, extRangeCount, 0, "no declarations found for FeatureSet for %q", name) + } + } +} + +func TestCantRedefineWellKnownCustomFeature(t *testing.T) { + t.Parallel() + c := protocompile.Compiler{ + Resolver: WithStandardImports(&protocompile.SourceResolver{ + Accessor: protocompile.SourceAccessorFromMap(map[string]string{ + "features.proto": ` + edition = "2023"; + import "google/protobuf/descriptor.proto"; + message Custom { + bool flag = 1; + } + extend google.protobuf.FeatureSet { + // tag 1000 is declared by pb.cpp so shouldn't be allowed + Custom custom = 1000; + } + `, + }), + }), + } + ctx := context.Background() + _, err := c.Compile(ctx, "features.proto") + require.ErrorContains(t, err, `features.proto:9:56: expected extension with number 1000 to be named pb.cpp, not custom, per declaration at google/protobuf/descriptor.proto`) +}