Sync from Piper @480194141

PROTOBUF_SYNC_PIPER
pull/10760/head
Mike Kruskal 2022-10-10 21:05:53 -07:00
parent eb6ebf9dd0
commit 23f1481386
100 changed files with 2079 additions and 10473 deletions

3
.gitmodules vendored
View File

@ -6,3 +6,6 @@
path = third_party/abseil-cpp
url = https://github.com/abseil/abseil-cpp.git
branch = lts_2022_06_23
[submodule "third_party/jsoncpp"]
path = third_party/jsoncpp
url = https://github.com/open-source-parsers/jsoncpp.git

View File

@ -26,6 +26,17 @@
* Save code space by avoiding inlining of large-in-aggregate code-space MessageLite::~MessageLite destructor.
* Breaking change: delete Arena::Init
* Make a PROTOBUF_POISON/UNPOISON to reduce noise in the source
* Put alignment functions in "arena_align.h"
* Split off `cleanup` arena functions into "arena_cleanup.h"
* Fix signed / unsigned match in CHECK_EQ
* Kill Atomic<>. it's not pulling it's weight
* Move AllocationPolicy out of arena_impl, and unify arena_config for bazel
* Fix failure case in table-driven parser.
* Add a new JSON parser.
* Removed old JSON parsing code.
* Introduce the Printer::{SetRedactDebugString,SetRandomizeDebugString} private flags.
* Introduce global flags to control Printer::{SetRedactDebugString, SetRandomizeDebugString}.
* proto3 string fields no longer trigger clang-tidy warning bugprone-branch-clone.
Kotlin
@ -41,11 +52,17 @@
* More thoroughly annotate public generated code in Java lite protocol buffers.
* Fixed Bug in proto3 java lite repeated enum fields. Failed to call copyOnWrite before modifying previously built message. Causes modification to already "built" messages that should be immutable.
* Fix Java reflection serialization of empty packed fields.
* Refactoring java full runtime to reuse sub-message builders and prepare to migrate parsing logic from parse constructor to builder.
* Fix TextFormat parser to build up recurring (but supposedly not repeated) sub-messages directly from text rather than building a new sub-message and merging the fully formed message into the existing field.
* Make message-type extensions merge from wire-format instead of building up instances and merging afterwards. This has much better performance.
* Change the Lite runtime to prefer merging from the wireformat into mutable messages rather than building up a new immutable object before merging. This way results in fewer allocations and copy operations.
* Move proto wireformat parsing functionality from the private "parsing constructor" to the Builder class.
Python
* Changes ordering of printed fields in .pyi files from lexicographic to the same ordering found in the proto descriptor.
* Adds GeneratedCodeInfo annotations to python proto .pyi outputs as a base64 encoded docstring in the last line of the .pyi file for code analysis tools.
* Fix message factory's behavior in python cpp extension to return same message classes for same descriptor, even if the factories are different.
* Add type annotation for enum value fields in enum classes.
Compiler
* Print full path name of source .proto file on error

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -31,7 +31,6 @@
package com.google.protobuf;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.ElementType.TYPE;
import static java.lang.annotation.RetentionPolicy.CLASS;
import java.lang.annotation.Documented;
@ -45,6 +44,6 @@ import java.lang.annotation.Target;
* annotated with {@code @CheckReturnValue} to exempt specific methods from the default.
*/
@Documented
@Target({METHOD, TYPE})
@Target(METHOD) // TODO(kak): consider adding CONSTRUCTOR later if necessary
@Retention(CLASS)
@interface CanIgnoreReturnValue {}

View File

@ -36,7 +36,6 @@ package protobuf.experimental;
option java_package = "com.google.protobuf.testing";
option java_outer_classname = "Proto2Testing";
message Proto2SpecialFieldName {
optional double regular_name = 1;
optional int32 cached_size = 2;

View File

@ -36,7 +36,6 @@ package protobuf.experimental.lite;
option java_package = "com.google.protobuf.testing";
option java_outer_classname = "Proto2TestingLite";
message Proto2MessageLite {
enum TestEnum {

View File

@ -36,7 +36,7 @@ build_php_c() {
test_php_c
}
mkdir build
mkdir -p build
pushd build
cmake ..
cmake --build . -- -j20

View File

@ -1,7 +1 @@
BasedOnStyle: Google
# Ignore pddm directives.
CommentPragmas: '^%'
# Following the rest of the protobuf code.
ColumnLimit: 80

View File

@ -340,6 +340,10 @@ cc_dist_library(
tags = ["manual"],
deps = [
"//src/google/protobuf:arena",
"//src/google/protobuf:arena_align",
"//src/google/protobuf:arena_allocation_policy",
"//src/google/protobuf:arena_cleanup",
"//src/google/protobuf:arena_config",
"//src/google/protobuf:protobuf_lite",
"//src/google/protobuf/io",
"//src/google/protobuf/io:io_win32",
@ -361,6 +365,10 @@ cc_dist_library(
"//src/google/protobuf:wkt_cc_proto",
"//src/google/protobuf:protobuf_nowkt",
"//src/google/protobuf:arena",
"//src/google/protobuf:arena_align",
"//src/google/protobuf:arena_allocation_policy",
"//src/google/protobuf:arena_cleanup",
"//src/google/protobuf:arena_config",
"//src/google/protobuf:protobuf_lite",
"//src/google/protobuf:port_def",
"//src/google/protobuf/compiler:importer",
@ -371,6 +379,14 @@ cc_dist_library(
"//src/google/protobuf/io:tokenizer",
"//src/google/protobuf/io:zero_copy_sink",
"//src/google/protobuf/json",
"//src/google/protobuf/json:descriptor_traits",
"//src/google/protobuf/json:lexer",
"//src/google/protobuf/json:message_path",
"//src/google/protobuf/json:parser",
"//src/google/protobuf/json:unparser",
"//src/google/protobuf/json:untyped_message",
"//src/google/protobuf/json:writer",
"//src/google/protobuf/json:zero_copy_buffered_stream",
"//src/google/protobuf/stubs",
"//src/google/protobuf/stubs:lite",
"//src/google/protobuf/util:delimited_message_util",

View File

@ -1,8 +1,7 @@
load("@bazel_skylib//lib:versions.bzl", "versions")
load("@bazel_skylib//lib:collections.bzl", "collections")
load("@rules_cc//cc:defs.bzl", "cc_library", "objc_library")
load("@rules_cc//cc:defs.bzl", "objc_library")
load("@rules_proto//proto:defs.bzl", "ProtoInfo")
load("@rules_python//python:defs.bzl", "py_library", "py_test")
load("@rules_python//python:defs.bzl", "py_library")
def _GetPath(ctx, path):
if ctx.label.workspace_root:
@ -41,13 +40,14 @@ def _SourceDir(ctx):
def _ObjcBase(srcs):
return [
"".join([token.capitalize() for token in src[:-len(".proto")].split("_")])
for src in srcs]
for src in srcs
]
def _ObjcHdrs(srcs):
return[src + ".pbobjc.h" for src in _ObjcBase(srcs)]
return [src + ".pbobjc.h" for src in _ObjcBase(srcs)]
def _ObjcSrcs(srcs):
return[src + ".pbobjc.m" for src in _ObjcBase(srcs)]
return [src + ".pbobjc.m" for src in _ObjcBase(srcs)]
def _ObjcOuts(srcs, out_type):
if out_type == "hdrs":
@ -68,7 +68,8 @@ def _RubyOuts(srcs):
def _CsharpOuts(srcs):
return [
"".join([token.capitalize() for token in src[:-len(".proto")].split("_")]) + ".cs"
for src in srcs]
for src in srcs
]
ProtoGenInfo = provider(
fields = ["srcs", "import_flags", "deps"],
@ -97,7 +98,7 @@ def _proto_gen_impl(ctx):
if ctx.attr.includes:
for include in ctx.attr.includes:
import_flags += ["-I"+_GetPath(ctx,include)]
import_flags += ["-I" + _GetPath(ctx, include)]
import_flags = depset(direct = import_flags)
@ -150,6 +151,7 @@ def _proto_gen_impl(ctx):
outs.extend(_PyOuts([src.basename], use_grpc_plugin = use_grpc_plugin))
elif lang == "ruby":
outs.extend(_RubyOuts([src.basename]))
# Otherwise, rely on user-supplied outs.
args += [("--%s_out=" + path_tpl) % (lang, gen_dir)]
@ -262,7 +264,7 @@ _proto_gen = rule(
"langs": attr.string_list(),
"outs": attr.string_list(),
"out_type": attr.string(
default = "all"
default = "all",
),
},
output_to_genfiles = True,
@ -642,7 +644,7 @@ def _source_proto_library(
native.filegroup(
name = name,
srcs = [":%s_genproto"%name],
srcs = [":%s_genproto" % name],
testonly = testonly,
visibility = visibility,
**kwargs

View File

@ -275,6 +275,13 @@ py_test(
deps = [":python_test_lib"],
)
py_test(
name = "field_mask_test",
srcs = ["google/protobuf/internal/field_mask_test.py"],
imports = ["."],
deps = [":python_test_lib"],
)
py_test(
name = "generator_test",
srcs = ["google/protobuf/internal/generator_test.py"],
@ -453,12 +460,10 @@ pkg_files(
"google/protobuf/pyext/README",
"google/protobuf/python_protobuf.h",
"internal.bzl",
"mox.py",
"python_version.py",
"release.sh",
"setup.cfg",
"setup.py",
"stubout.py",
"tox.ini",
],
strip_prefix = strip_prefix.from_root(""),

View File

@ -0,0 +1,333 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Contains FieldMask class."""
from google.protobuf.descriptor import FieldDescriptor
class FieldMask(object):
"""Class for FieldMask message type."""
__slots__ = ()
def ToJsonString(self):
"""Converts FieldMask to string according to proto3 JSON spec."""
camelcase_paths = []
for path in self.paths:
camelcase_paths.append(_SnakeCaseToCamelCase(path))
return ','.join(camelcase_paths)
def FromJsonString(self, value):
"""Converts string to FieldMask according to proto3 JSON spec."""
if not isinstance(value, str):
raise ValueError('FieldMask JSON value not a string: {!r}'.format(value))
self.Clear()
if value:
for path in value.split(','):
self.paths.append(_CamelCaseToSnakeCase(path))
def IsValidForDescriptor(self, message_descriptor):
"""Checks whether the FieldMask is valid for Message Descriptor."""
for path in self.paths:
if not _IsValidPath(message_descriptor, path):
return False
return True
def AllFieldsFromDescriptor(self, message_descriptor):
"""Gets all direct fields of Message Descriptor to FieldMask."""
self.Clear()
for field in message_descriptor.fields:
self.paths.append(field.name)
def CanonicalFormFromMask(self, mask):
"""Converts a FieldMask to the canonical form.
Removes paths that are covered by another path. For example,
"foo.bar" is covered by "foo" and will be removed if "foo"
is also in the FieldMask. Then sorts all paths in alphabetical order.
Args:
mask: The original FieldMask to be converted.
"""
tree = _FieldMaskTree(mask)
tree.ToFieldMask(self)
def Union(self, mask1, mask2):
"""Merges mask1 and mask2 into this FieldMask."""
_CheckFieldMaskMessage(mask1)
_CheckFieldMaskMessage(mask2)
tree = _FieldMaskTree(mask1)
tree.MergeFromFieldMask(mask2)
tree.ToFieldMask(self)
def Intersect(self, mask1, mask2):
"""Intersects mask1 and mask2 into this FieldMask."""
_CheckFieldMaskMessage(mask1)
_CheckFieldMaskMessage(mask2)
tree = _FieldMaskTree(mask1)
intersection = _FieldMaskTree()
for path in mask2.paths:
tree.IntersectPath(path, intersection)
intersection.ToFieldMask(self)
def MergeMessage(
self, source, destination,
replace_message_field=False, replace_repeated_field=False):
"""Merges fields specified in FieldMask from source to destination.
Args:
source: Source message.
destination: The destination message to be merged into.
replace_message_field: Replace message field if True. Merge message
field if False.
replace_repeated_field: Replace repeated field if True. Append
elements of repeated field if False.
"""
tree = _FieldMaskTree(self)
tree.MergeMessage(
source, destination, replace_message_field, replace_repeated_field)
def _IsValidPath(message_descriptor, path):
"""Checks whether the path is valid for Message Descriptor."""
parts = path.split('.')
last = parts.pop()
for name in parts:
field = message_descriptor.fields_by_name.get(name)
if (field is None or
field.label == FieldDescriptor.LABEL_REPEATED or
field.type != FieldDescriptor.TYPE_MESSAGE):
return False
message_descriptor = field.message_type
return last in message_descriptor.fields_by_name
def _CheckFieldMaskMessage(message):
"""Raises ValueError if message is not a FieldMask."""
message_descriptor = message.DESCRIPTOR
if (message_descriptor.name != 'FieldMask' or
message_descriptor.file.name != 'google/protobuf/field_mask.proto'):
raise ValueError('Message {0} is not a FieldMask.'.format(
message_descriptor.full_name))
def _SnakeCaseToCamelCase(path_name):
"""Converts a path name from snake_case to camelCase."""
result = []
after_underscore = False
for c in path_name:
if c.isupper():
raise ValueError(
'Fail to print FieldMask to Json string: Path name '
'{0} must not contain uppercase letters.'.format(path_name))
if after_underscore:
if c.islower():
result.append(c.upper())
after_underscore = False
else:
raise ValueError(
'Fail to print FieldMask to Json string: The '
'character after a "_" must be a lowercase letter '
'in path name {0}.'.format(path_name))
elif c == '_':
after_underscore = True
else:
result += c
if after_underscore:
raise ValueError('Fail to print FieldMask to Json string: Trailing "_" '
'in path name {0}.'.format(path_name))
return ''.join(result)
def _CamelCaseToSnakeCase(path_name):
"""Converts a field name from camelCase to snake_case."""
result = []
for c in path_name:
if c == '_':
raise ValueError('Fail to parse FieldMask: Path name '
'{0} must not contain "_"s.'.format(path_name))
if c.isupper():
result += '_'
result += c.lower()
else:
result += c
return ''.join(result)
class _FieldMaskTree(object):
"""Represents a FieldMask in a tree structure.
For example, given a FieldMask "foo.bar,foo.baz,bar.baz",
the FieldMaskTree will be:
[_root] -+- foo -+- bar
| |
| +- baz
|
+- bar --- baz
In the tree, each leaf node represents a field path.
"""
__slots__ = ('_root',)
def __init__(self, field_mask=None):
"""Initializes the tree by FieldMask."""
self._root = {}
if field_mask:
self.MergeFromFieldMask(field_mask)
def MergeFromFieldMask(self, field_mask):
"""Merges a FieldMask to the tree."""
for path in field_mask.paths:
self.AddPath(path)
def AddPath(self, path):
"""Adds a field path into the tree.
If the field path to add is a sub-path of an existing field path
in the tree (i.e., a leaf node), it means the tree already matches
the given path so nothing will be added to the tree. If the path
matches an existing non-leaf node in the tree, that non-leaf node
will be turned into a leaf node with all its children removed because
the path matches all the node's children. Otherwise, a new path will
be added.
Args:
path: The field path to add.
"""
node = self._root
for name in path.split('.'):
if name not in node:
node[name] = {}
elif not node[name]:
# Pre-existing empty node implies we already have this entire tree.
return
node = node[name]
# Remove any sub-trees we might have had.
node.clear()
def ToFieldMask(self, field_mask):
"""Converts the tree to a FieldMask."""
field_mask.Clear()
_AddFieldPaths(self._root, '', field_mask)
def IntersectPath(self, path, intersection):
"""Calculates the intersection part of a field path with this tree.
Args:
path: The field path to calculates.
intersection: The out tree to record the intersection part.
"""
node = self._root
for name in path.split('.'):
if name not in node:
return
elif not node[name]:
intersection.AddPath(path)
return
node = node[name]
intersection.AddLeafNodes(path, node)
def AddLeafNodes(self, prefix, node):
"""Adds leaf nodes begin with prefix to this tree."""
if not node:
self.AddPath(prefix)
for name in node:
child_path = prefix + '.' + name
self.AddLeafNodes(child_path, node[name])
def MergeMessage(
self, source, destination,
replace_message, replace_repeated):
"""Merge all fields specified by this tree from source to destination."""
_MergeMessage(
self._root, source, destination, replace_message, replace_repeated)
def _StrConvert(value):
"""Converts value to str if it is not."""
# This file is imported by c extension and some methods like ClearField
# requires string for the field name. py2/py3 has different text
# type and may use unicode.
if not isinstance(value, str):
return value.encode('utf-8')
return value
def _MergeMessage(
node, source, destination, replace_message, replace_repeated):
"""Merge all fields specified by a sub-tree from source to destination."""
source_descriptor = source.DESCRIPTOR
for name in node:
child = node[name]
field = source_descriptor.fields_by_name[name]
if field is None:
raise ValueError('Error: Can\'t find field {0} in message {1}.'.format(
name, source_descriptor.full_name))
if child:
# Sub-paths are only allowed for singular message fields.
if (field.label == FieldDescriptor.LABEL_REPEATED or
field.cpp_type != FieldDescriptor.CPPTYPE_MESSAGE):
raise ValueError('Error: Field {0} in message {1} is not a singular '
'message field and cannot have sub-fields.'.format(
name, source_descriptor.full_name))
if source.HasField(name):
_MergeMessage(
child, getattr(source, name), getattr(destination, name),
replace_message, replace_repeated)
continue
if field.label == FieldDescriptor.LABEL_REPEATED:
if replace_repeated:
destination.ClearField(_StrConvert(name))
repeated_source = getattr(source, name)
repeated_destination = getattr(destination, name)
repeated_destination.MergeFrom(repeated_source)
else:
if field.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE:
if replace_message:
destination.ClearField(_StrConvert(name))
if source.HasField(name):
getattr(destination, name).MergeFrom(getattr(source, name))
else:
setattr(destination, name, getattr(source, name))
def _AddFieldPaths(node, prefix, field_mask):
"""Adds the field paths descended from node to field_mask."""
if not node and prefix:
field_mask.paths.append(prefix)
return
for name in sorted(node):
if prefix:
child_path = prefix + '.' + name
else:
child_path = name
_AddFieldPaths(node[name], child_path, field_mask)

View File

@ -0,0 +1,400 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Test for google.protobuf.internal.well_known_types."""
import unittest
from google.protobuf import field_mask_pb2
from google.protobuf import map_unittest_pb2
from google.protobuf import unittest_pb2
from google.protobuf.internal import field_mask
from google.protobuf.internal import test_util
from google.protobuf import descriptor
class FieldMaskTest(unittest.TestCase):
def testStringFormat(self):
mask = field_mask_pb2.FieldMask()
self.assertEqual('', mask.ToJsonString())
mask.paths.append('foo')
self.assertEqual('foo', mask.ToJsonString())
mask.paths.append('bar')
self.assertEqual('foo,bar', mask.ToJsonString())
mask.FromJsonString('')
self.assertEqual('', mask.ToJsonString())
mask.FromJsonString('foo')
self.assertEqual(['foo'], mask.paths)
mask.FromJsonString('foo,bar')
self.assertEqual(['foo', 'bar'], mask.paths)
# Test camel case
mask.Clear()
mask.paths.append('foo_bar')
self.assertEqual('fooBar', mask.ToJsonString())
mask.paths.append('bar_quz')
self.assertEqual('fooBar,barQuz', mask.ToJsonString())
mask.FromJsonString('')
self.assertEqual('', mask.ToJsonString())
self.assertEqual([], mask.paths)
mask.FromJsonString('fooBar')
self.assertEqual(['foo_bar'], mask.paths)
mask.FromJsonString('fooBar,barQuz')
self.assertEqual(['foo_bar', 'bar_quz'], mask.paths)
def testDescriptorToFieldMask(self):
mask = field_mask_pb2.FieldMask()
msg_descriptor = unittest_pb2.TestAllTypes.DESCRIPTOR
mask.AllFieldsFromDescriptor(msg_descriptor)
self.assertEqual(76, len(mask.paths))
self.assertTrue(mask.IsValidForDescriptor(msg_descriptor))
for field in msg_descriptor.fields:
self.assertTrue(field.name in mask.paths)
def testIsValidForDescriptor(self):
msg_descriptor = unittest_pb2.TestAllTypes.DESCRIPTOR
# Empty mask
mask = field_mask_pb2.FieldMask()
self.assertTrue(mask.IsValidForDescriptor(msg_descriptor))
# All fields from descriptor
mask.AllFieldsFromDescriptor(msg_descriptor)
self.assertTrue(mask.IsValidForDescriptor(msg_descriptor))
# Child under optional message
mask.paths.append('optional_nested_message.bb')
self.assertTrue(mask.IsValidForDescriptor(msg_descriptor))
# Repeated field is only allowed in the last position of path
mask.paths.append('repeated_nested_message.bb')
self.assertFalse(mask.IsValidForDescriptor(msg_descriptor))
# Invalid top level field
mask = field_mask_pb2.FieldMask()
mask.paths.append('xxx')
self.assertFalse(mask.IsValidForDescriptor(msg_descriptor))
# Invalid field in root
mask = field_mask_pb2.FieldMask()
mask.paths.append('xxx.zzz')
self.assertFalse(mask.IsValidForDescriptor(msg_descriptor))
# Invalid field in internal node
mask = field_mask_pb2.FieldMask()
mask.paths.append('optional_nested_message.xxx.zzz')
self.assertFalse(mask.IsValidForDescriptor(msg_descriptor))
# Invalid field in leaf
mask = field_mask_pb2.FieldMask()
mask.paths.append('optional_nested_message.xxx')
self.assertFalse(mask.IsValidForDescriptor(msg_descriptor))
def testCanonicalFrom(self):
mask = field_mask_pb2.FieldMask()
out_mask = field_mask_pb2.FieldMask()
# Paths will be sorted.
mask.FromJsonString('baz.quz,bar,foo')
out_mask.CanonicalFormFromMask(mask)
self.assertEqual('bar,baz.quz,foo', out_mask.ToJsonString())
# Duplicated paths will be removed.
mask.FromJsonString('foo,bar,foo')
out_mask.CanonicalFormFromMask(mask)
self.assertEqual('bar,foo', out_mask.ToJsonString())
# Sub-paths of other paths will be removed.
mask.FromJsonString('foo.b1,bar.b1,foo.b2,bar')
out_mask.CanonicalFormFromMask(mask)
self.assertEqual('bar,foo.b1,foo.b2', out_mask.ToJsonString())
# Test more deeply nested cases.
mask.FromJsonString(
'foo.bar.baz1,foo.bar.baz2.quz,foo.bar.baz2')
out_mask.CanonicalFormFromMask(mask)
self.assertEqual('foo.bar.baz1,foo.bar.baz2',
out_mask.ToJsonString())
mask.FromJsonString(
'foo.bar.baz1,foo.bar.baz2,foo.bar.baz2.quz')
out_mask.CanonicalFormFromMask(mask)
self.assertEqual('foo.bar.baz1,foo.bar.baz2',
out_mask.ToJsonString())
mask.FromJsonString(
'foo.bar.baz1,foo.bar.baz2,foo.bar.baz2.quz,foo.bar')
out_mask.CanonicalFormFromMask(mask)
self.assertEqual('foo.bar', out_mask.ToJsonString())
mask.FromJsonString(
'foo.bar.baz1,foo.bar.baz2,foo.bar.baz2.quz,foo')
out_mask.CanonicalFormFromMask(mask)
self.assertEqual('foo', out_mask.ToJsonString())
def testUnion(self):
mask1 = field_mask_pb2.FieldMask()
mask2 = field_mask_pb2.FieldMask()
out_mask = field_mask_pb2.FieldMask()
mask1.FromJsonString('foo,baz')
mask2.FromJsonString('bar,quz')
out_mask.Union(mask1, mask2)
self.assertEqual('bar,baz,foo,quz', out_mask.ToJsonString())
# Overlap with duplicated paths.
mask1.FromJsonString('foo,baz.bb')
mask2.FromJsonString('baz.bb,quz')
out_mask.Union(mask1, mask2)
self.assertEqual('baz.bb,foo,quz', out_mask.ToJsonString())
# Overlap with paths covering some other paths.
mask1.FromJsonString('foo.bar.baz,quz')
mask2.FromJsonString('foo.bar,bar')
out_mask.Union(mask1, mask2)
self.assertEqual('bar,foo.bar,quz', out_mask.ToJsonString())
src = unittest_pb2.TestAllTypes()
with self.assertRaises(ValueError):
out_mask.Union(src, mask2)
def testIntersect(self):
mask1 = field_mask_pb2.FieldMask()
mask2 = field_mask_pb2.FieldMask()
out_mask = field_mask_pb2.FieldMask()
# Test cases without overlapping.
mask1.FromJsonString('foo,baz')
mask2.FromJsonString('bar,quz')
out_mask.Intersect(mask1, mask2)
self.assertEqual('', out_mask.ToJsonString())
self.assertEqual(len(out_mask.paths), 0)
self.assertEqual(out_mask.paths, [])
# Overlap with duplicated paths.
mask1.FromJsonString('foo,baz.bb')
mask2.FromJsonString('baz.bb,quz')
out_mask.Intersect(mask1, mask2)
self.assertEqual('baz.bb', out_mask.ToJsonString())
# Overlap with paths covering some other paths.
mask1.FromJsonString('foo.bar.baz,quz')
mask2.FromJsonString('foo.bar,bar')
out_mask.Intersect(mask1, mask2)
self.assertEqual('foo.bar.baz', out_mask.ToJsonString())
mask1.FromJsonString('foo.bar,bar')
mask2.FromJsonString('foo.bar.baz,quz')
out_mask.Intersect(mask1, mask2)
self.assertEqual('foo.bar.baz', out_mask.ToJsonString())
# Intersect '' with ''
mask1.Clear()
mask2.Clear()
mask1.paths.append('')
mask2.paths.append('')
self.assertEqual(mask1.paths, [''])
self.assertEqual('', mask1.ToJsonString())
out_mask.Intersect(mask1, mask2)
self.assertEqual(out_mask.paths, [])
def testMergeMessageWithoutMapFields(self):
# Test merge one field.
src = unittest_pb2.TestAllTypes()
test_util.SetAllFields(src)
for field in src.DESCRIPTOR.fields:
if field.containing_oneof:
continue
field_name = field.name
dst = unittest_pb2.TestAllTypes()
# Only set one path to mask.
mask = field_mask_pb2.FieldMask()
mask.paths.append(field_name)
mask.MergeMessage(src, dst)
# The expected result message.
msg = unittest_pb2.TestAllTypes()
if field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
repeated_src = getattr(src, field_name)
repeated_msg = getattr(msg, field_name)
if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
for item in repeated_src:
repeated_msg.add().CopyFrom(item)
else:
repeated_msg.extend(repeated_src)
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
getattr(msg, field_name).CopyFrom(getattr(src, field_name))
else:
setattr(msg, field_name, getattr(src, field_name))
# Only field specified in mask is merged.
self.assertEqual(msg, dst)
# Test merge nested fields.
nested_src = unittest_pb2.NestedTestAllTypes()
nested_dst = unittest_pb2.NestedTestAllTypes()
nested_src.child.payload.optional_int32 = 1234
nested_src.child.child.payload.optional_int32 = 5678
mask = field_mask_pb2.FieldMask()
mask.FromJsonString('child.payload')
mask.MergeMessage(nested_src, nested_dst)
self.assertEqual(1234, nested_dst.child.payload.optional_int32)
self.assertEqual(0, nested_dst.child.child.payload.optional_int32)
mask.FromJsonString('child.child.payload')
mask.MergeMessage(nested_src, nested_dst)
self.assertEqual(1234, nested_dst.child.payload.optional_int32)
self.assertEqual(5678, nested_dst.child.child.payload.optional_int32)
nested_dst.Clear()
mask.FromJsonString('child.child.payload')
mask.MergeMessage(nested_src, nested_dst)
self.assertEqual(0, nested_dst.child.payload.optional_int32)
self.assertEqual(5678, nested_dst.child.child.payload.optional_int32)
nested_dst.Clear()
mask.FromJsonString('child')
mask.MergeMessage(nested_src, nested_dst)
self.assertEqual(1234, nested_dst.child.payload.optional_int32)
self.assertEqual(5678, nested_dst.child.child.payload.optional_int32)
# Test MergeOptions.
nested_dst.Clear()
nested_dst.child.payload.optional_int64 = 4321
# Message fields will be merged by default.
mask.FromJsonString('child.payload')
mask.MergeMessage(nested_src, nested_dst)
self.assertEqual(1234, nested_dst.child.payload.optional_int32)
self.assertEqual(4321, nested_dst.child.payload.optional_int64)
# Change the behavior to replace message fields.
mask.FromJsonString('child.payload')
mask.MergeMessage(nested_src, nested_dst, True, False)
self.assertEqual(1234, nested_dst.child.payload.optional_int32)
self.assertEqual(0, nested_dst.child.payload.optional_int64)
# By default, fields missing in source are not cleared in destination.
nested_dst.payload.optional_int32 = 1234
self.assertTrue(nested_dst.HasField('payload'))
mask.FromJsonString('payload')
mask.MergeMessage(nested_src, nested_dst)
self.assertTrue(nested_dst.HasField('payload'))
# But they are cleared when replacing message fields.
nested_dst.Clear()
nested_dst.payload.optional_int32 = 1234
mask.FromJsonString('payload')
mask.MergeMessage(nested_src, nested_dst, True, False)
self.assertFalse(nested_dst.HasField('payload'))
nested_src.payload.repeated_int32.append(1234)
nested_dst.payload.repeated_int32.append(5678)
# Repeated fields will be appended by default.
mask.FromJsonString('payload.repeatedInt32')
mask.MergeMessage(nested_src, nested_dst)
self.assertEqual(2, len(nested_dst.payload.repeated_int32))
self.assertEqual(5678, nested_dst.payload.repeated_int32[0])
self.assertEqual(1234, nested_dst.payload.repeated_int32[1])
# Change the behavior to replace repeated fields.
mask.FromJsonString('payload.repeatedInt32')
mask.MergeMessage(nested_src, nested_dst, False, True)
self.assertEqual(1, len(nested_dst.payload.repeated_int32))
self.assertEqual(1234, nested_dst.payload.repeated_int32[0])
# Test Merge oneof field.
new_msg = unittest_pb2.TestOneof2()
dst = unittest_pb2.TestOneof2()
dst.foo_message.moo_int = 1
mask = field_mask_pb2.FieldMask()
mask.FromJsonString('fooMessage,fooLazyMessage.mooInt')
mask.MergeMessage(new_msg, dst)
self.assertTrue(dst.HasField('foo_message'))
self.assertFalse(dst.HasField('foo_lazy_message'))
def testMergeMessageWithMapField(self):
empty_map = map_unittest_pb2.TestRecursiveMapMessage()
src_level_2 = map_unittest_pb2.TestRecursiveMapMessage()
src_level_2.a['src level 2'].CopyFrom(empty_map)
src = map_unittest_pb2.TestRecursiveMapMessage()
src.a['common key'].CopyFrom(src_level_2)
src.a['src level 1'].CopyFrom(src_level_2)
dst_level_2 = map_unittest_pb2.TestRecursiveMapMessage()
dst_level_2.a['dst level 2'].CopyFrom(empty_map)
dst = map_unittest_pb2.TestRecursiveMapMessage()
dst.a['common key'].CopyFrom(dst_level_2)
dst.a['dst level 1'].CopyFrom(empty_map)
mask = field_mask_pb2.FieldMask()
mask.FromJsonString('a')
mask.MergeMessage(src, dst)
# map from dst is replaced with map from src.
self.assertEqual(dst.a['common key'], src_level_2)
self.assertEqual(dst.a['src level 1'], src_level_2)
self.assertEqual(dst.a['dst level 1'], empty_map)
def testMergeErrors(self):
src = unittest_pb2.TestAllTypes()
dst = unittest_pb2.TestAllTypes()
mask = field_mask_pb2.FieldMask()
test_util.SetAllFields(src)
mask.FromJsonString('optionalInt32.field')
with self.assertRaises(ValueError) as e:
mask.MergeMessage(src, dst)
self.assertEqual('Error: Field optional_int32 in message '
'protobuf_unittest.TestAllTypes is not a singular '
'message field and cannot have sub-fields.',
str(e.exception))
def testSnakeCaseToCamelCase(self):
self.assertEqual('fooBar',
field_mask._SnakeCaseToCamelCase('foo_bar'))
self.assertEqual('FooBar',
field_mask._SnakeCaseToCamelCase('_foo_bar'))
self.assertEqual('foo3Bar',
field_mask._SnakeCaseToCamelCase('foo3_bar'))
# No uppercase letter is allowed.
self.assertRaisesRegex(
ValueError,
'Fail to print FieldMask to Json string: Path name Foo must '
'not contain uppercase letters.',
field_mask._SnakeCaseToCamelCase, 'Foo')
# Any character after a "_" must be a lowercase letter.
# 1. "_" cannot be followed by another "_".
# 2. "_" cannot be followed by a digit.
# 3. "_" cannot appear as the last character.
self.assertRaisesRegex(
ValueError,
'Fail to print FieldMask to Json string: The character after a '
'"_" must be a lowercase letter in path name foo__bar.',
field_mask._SnakeCaseToCamelCase, 'foo__bar')
self.assertRaisesRegex(
ValueError,
'Fail to print FieldMask to Json string: The character after a '
'"_" must be a lowercase letter in path name foo_3bar.',
field_mask._SnakeCaseToCamelCase, 'foo_3bar')
self.assertRaisesRegex(
ValueError,
'Fail to print FieldMask to Json string: Trailing "_" in path '
'name foo_bar_.', field_mask._SnakeCaseToCamelCase, 'foo_bar_')
def testCamelCaseToSnakeCase(self):
self.assertEqual('foo_bar',
field_mask._CamelCaseToSnakeCase('fooBar'))
self.assertEqual('_foo_bar',
field_mask._CamelCaseToSnakeCase('FooBar'))
self.assertEqual('foo3_bar',
field_mask._CamelCaseToSnakeCase('foo3Bar'))
self.assertRaisesRegex(
ValueError,
'Fail to parse FieldMask: Path name foo_bar must not contain "_"s.',
field_mask._CamelCaseToSnakeCase, 'foo_bar')
if __name__ == '__main__':
unittest.main()

View File

@ -30,7 +30,6 @@
syntax = "proto2";
package google.protobuf.python.internal;
message TestEnumValues {
@ -53,4 +52,3 @@ message TestMissingEnumValues {
message JustString {
required string dummy = 1;
}

View File

@ -44,7 +44,9 @@ import calendar
import collections.abc
import datetime
from google.protobuf.descriptor import FieldDescriptor
from google.protobuf.internal import field_mask
FieldMask = field_mask.FieldMask
_TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S'
_NANOS_PER_SECOND = 1000000000
@ -430,306 +432,6 @@ def _RoundTowardZero(value, divider):
return result
class FieldMask(object):
"""Class for FieldMask message type."""
__slots__ = ()
def ToJsonString(self):
"""Converts FieldMask to string according to proto3 JSON spec."""
camelcase_paths = []
for path in self.paths:
camelcase_paths.append(_SnakeCaseToCamelCase(path))
return ','.join(camelcase_paths)
def FromJsonString(self, value):
"""Converts string to FieldMask according to proto3 JSON spec."""
if not isinstance(value, str):
raise ValueError('FieldMask JSON value not a string: {!r}'.format(value))
self.Clear()
if value:
for path in value.split(','):
self.paths.append(_CamelCaseToSnakeCase(path))
def IsValidForDescriptor(self, message_descriptor):
"""Checks whether the FieldMask is valid for Message Descriptor."""
for path in self.paths:
if not _IsValidPath(message_descriptor, path):
return False
return True
def AllFieldsFromDescriptor(self, message_descriptor):
"""Gets all direct fields of Message Descriptor to FieldMask."""
self.Clear()
for field in message_descriptor.fields:
self.paths.append(field.name)
def CanonicalFormFromMask(self, mask):
"""Converts a FieldMask to the canonical form.
Removes paths that are covered by another path. For example,
"foo.bar" is covered by "foo" and will be removed if "foo"
is also in the FieldMask. Then sorts all paths in alphabetical order.
Args:
mask: The original FieldMask to be converted.
"""
tree = _FieldMaskTree(mask)
tree.ToFieldMask(self)
def Union(self, mask1, mask2):
"""Merges mask1 and mask2 into this FieldMask."""
_CheckFieldMaskMessage(mask1)
_CheckFieldMaskMessage(mask2)
tree = _FieldMaskTree(mask1)
tree.MergeFromFieldMask(mask2)
tree.ToFieldMask(self)
def Intersect(self, mask1, mask2):
"""Intersects mask1 and mask2 into this FieldMask."""
_CheckFieldMaskMessage(mask1)
_CheckFieldMaskMessage(mask2)
tree = _FieldMaskTree(mask1)
intersection = _FieldMaskTree()
for path in mask2.paths:
tree.IntersectPath(path, intersection)
intersection.ToFieldMask(self)
def MergeMessage(
self, source, destination,
replace_message_field=False, replace_repeated_field=False):
"""Merges fields specified in FieldMask from source to destination.
Args:
source: Source message.
destination: The destination message to be merged into.
replace_message_field: Replace message field if True. Merge message
field if False.
replace_repeated_field: Replace repeated field if True. Append
elements of repeated field if False.
"""
tree = _FieldMaskTree(self)
tree.MergeMessage(
source, destination, replace_message_field, replace_repeated_field)
def _IsValidPath(message_descriptor, path):
"""Checks whether the path is valid for Message Descriptor."""
parts = path.split('.')
last = parts.pop()
for name in parts:
field = message_descriptor.fields_by_name.get(name)
if (field is None or
field.label == FieldDescriptor.LABEL_REPEATED or
field.type != FieldDescriptor.TYPE_MESSAGE):
return False
message_descriptor = field.message_type
return last in message_descriptor.fields_by_name
def _CheckFieldMaskMessage(message):
"""Raises ValueError if message is not a FieldMask."""
message_descriptor = message.DESCRIPTOR
if (message_descriptor.name != 'FieldMask' or
message_descriptor.file.name != 'google/protobuf/field_mask.proto'):
raise ValueError('Message {0} is not a FieldMask.'.format(
message_descriptor.full_name))
def _SnakeCaseToCamelCase(path_name):
"""Converts a path name from snake_case to camelCase."""
result = []
after_underscore = False
for c in path_name:
if c.isupper():
raise ValueError(
'Fail to print FieldMask to Json string: Path name '
'{0} must not contain uppercase letters.'.format(path_name))
if after_underscore:
if c.islower():
result.append(c.upper())
after_underscore = False
else:
raise ValueError(
'Fail to print FieldMask to Json string: The '
'character after a "_" must be a lowercase letter '
'in path name {0}.'.format(path_name))
elif c == '_':
after_underscore = True
else:
result += c
if after_underscore:
raise ValueError('Fail to print FieldMask to Json string: Trailing "_" '
'in path name {0}.'.format(path_name))
return ''.join(result)
def _CamelCaseToSnakeCase(path_name):
"""Converts a field name from camelCase to snake_case."""
result = []
for c in path_name:
if c == '_':
raise ValueError('Fail to parse FieldMask: Path name '
'{0} must not contain "_"s.'.format(path_name))
if c.isupper():
result += '_'
result += c.lower()
else:
result += c
return ''.join(result)
class _FieldMaskTree(object):
"""Represents a FieldMask in a tree structure.
For example, given a FieldMask "foo.bar,foo.baz,bar.baz",
the FieldMaskTree will be:
[_root] -+- foo -+- bar
| |
| +- baz
|
+- bar --- baz
In the tree, each leaf node represents a field path.
"""
__slots__ = ('_root',)
def __init__(self, field_mask=None):
"""Initializes the tree by FieldMask."""
self._root = {}
if field_mask:
self.MergeFromFieldMask(field_mask)
def MergeFromFieldMask(self, field_mask):
"""Merges a FieldMask to the tree."""
for path in field_mask.paths:
self.AddPath(path)
def AddPath(self, path):
"""Adds a field path into the tree.
If the field path to add is a sub-path of an existing field path
in the tree (i.e., a leaf node), it means the tree already matches
the given path so nothing will be added to the tree. If the path
matches an existing non-leaf node in the tree, that non-leaf node
will be turned into a leaf node with all its children removed because
the path matches all the node's children. Otherwise, a new path will
be added.
Args:
path: The field path to add.
"""
node = self._root
for name in path.split('.'):
if name not in node:
node[name] = {}
elif not node[name]:
# Pre-existing empty node implies we already have this entire tree.
return
node = node[name]
# Remove any sub-trees we might have had.
node.clear()
def ToFieldMask(self, field_mask):
"""Converts the tree to a FieldMask."""
field_mask.Clear()
_AddFieldPaths(self._root, '', field_mask)
def IntersectPath(self, path, intersection):
"""Calculates the intersection part of a field path with this tree.
Args:
path: The field path to calculates.
intersection: The out tree to record the intersection part.
"""
node = self._root
for name in path.split('.'):
if name not in node:
return
elif not node[name]:
intersection.AddPath(path)
return
node = node[name]
intersection.AddLeafNodes(path, node)
def AddLeafNodes(self, prefix, node):
"""Adds leaf nodes begin with prefix to this tree."""
if not node:
self.AddPath(prefix)
for name in node:
child_path = prefix + '.' + name
self.AddLeafNodes(child_path, node[name])
def MergeMessage(
self, source, destination,
replace_message, replace_repeated):
"""Merge all fields specified by this tree from source to destination."""
_MergeMessage(
self._root, source, destination, replace_message, replace_repeated)
def _StrConvert(value):
"""Converts value to str if it is not."""
# This file is imported by c extension and some methods like ClearField
# requires string for the field name. py2/py3 has different text
# type and may use unicode.
if not isinstance(value, str):
return value.encode('utf-8')
return value
def _MergeMessage(
node, source, destination, replace_message, replace_repeated):
"""Merge all fields specified by a sub-tree from source to destination."""
source_descriptor = source.DESCRIPTOR
for name in node:
child = node[name]
field = source_descriptor.fields_by_name[name]
if field is None:
raise ValueError('Error: Can\'t find field {0} in message {1}.'.format(
name, source_descriptor.full_name))
if child:
# Sub-paths are only allowed for singular message fields.
if (field.label == FieldDescriptor.LABEL_REPEATED or
field.cpp_type != FieldDescriptor.CPPTYPE_MESSAGE):
raise ValueError('Error: Field {0} in message {1} is not a singular '
'message field and cannot have sub-fields.'.format(
name, source_descriptor.full_name))
if source.HasField(name):
_MergeMessage(
child, getattr(source, name), getattr(destination, name),
replace_message, replace_repeated)
continue
if field.label == FieldDescriptor.LABEL_REPEATED:
if replace_repeated:
destination.ClearField(_StrConvert(name))
repeated_source = getattr(source, name)
repeated_destination = getattr(destination, name)
repeated_destination.MergeFrom(repeated_source)
else:
if field.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE:
if replace_message:
destination.ClearField(_StrConvert(name))
if source.HasField(name):
getattr(destination, name).MergeFrom(getattr(source, name))
else:
setattr(destination, name, getattr(source, name))
def _AddFieldPaths(node, prefix, field_mask):
"""Adds the field paths descended from node to field_mask."""
if not node and prefix:
field_mask.paths.append(prefix)
return
for name in sorted(node):
if prefix:
child_path = prefix + '.' + name
else:
child_path = name
_AddFieldPaths(node[name], child_path, field_mask)
def _SetStructValue(struct_value, value):
if value is None:
struct_value.null_value = 0

View File

@ -38,15 +38,11 @@ import unittest
from google.protobuf import any_pb2
from google.protobuf import duration_pb2
from google.protobuf import field_mask_pb2
from google.protobuf import struct_pb2
from google.protobuf import timestamp_pb2
from google.protobuf import map_unittest_pb2
from google.protobuf import unittest_pb2
from google.protobuf.internal import any_test_pb2
from google.protobuf.internal import test_util
from google.protobuf.internal import well_known_types
from google.protobuf import descriptor
from google.protobuf import text_format
from google.protobuf.internal import _parameterized
@ -390,362 +386,6 @@ class TimeUtilTest(TimeUtilTestBase):
message.ToJsonString)
class FieldMaskTest(unittest.TestCase):
def testStringFormat(self):
mask = field_mask_pb2.FieldMask()
self.assertEqual('', mask.ToJsonString())
mask.paths.append('foo')
self.assertEqual('foo', mask.ToJsonString())
mask.paths.append('bar')
self.assertEqual('foo,bar', mask.ToJsonString())
mask.FromJsonString('')
self.assertEqual('', mask.ToJsonString())
mask.FromJsonString('foo')
self.assertEqual(['foo'], mask.paths)
mask.FromJsonString('foo,bar')
self.assertEqual(['foo', 'bar'], mask.paths)
# Test camel case
mask.Clear()
mask.paths.append('foo_bar')
self.assertEqual('fooBar', mask.ToJsonString())
mask.paths.append('bar_quz')
self.assertEqual('fooBar,barQuz', mask.ToJsonString())
mask.FromJsonString('')
self.assertEqual('', mask.ToJsonString())
self.assertEqual([], mask.paths)
mask.FromJsonString('fooBar')
self.assertEqual(['foo_bar'], mask.paths)
mask.FromJsonString('fooBar,barQuz')
self.assertEqual(['foo_bar', 'bar_quz'], mask.paths)
def testDescriptorToFieldMask(self):
mask = field_mask_pb2.FieldMask()
msg_descriptor = unittest_pb2.TestAllTypes.DESCRIPTOR
mask.AllFieldsFromDescriptor(msg_descriptor)
self.assertEqual(76, len(mask.paths))
self.assertTrue(mask.IsValidForDescriptor(msg_descriptor))
for field in msg_descriptor.fields:
self.assertTrue(field.name in mask.paths)
def testIsValidForDescriptor(self):
msg_descriptor = unittest_pb2.TestAllTypes.DESCRIPTOR
# Empty mask
mask = field_mask_pb2.FieldMask()
self.assertTrue(mask.IsValidForDescriptor(msg_descriptor))
# All fields from descriptor
mask.AllFieldsFromDescriptor(msg_descriptor)
self.assertTrue(mask.IsValidForDescriptor(msg_descriptor))
# Child under optional message
mask.paths.append('optional_nested_message.bb')
self.assertTrue(mask.IsValidForDescriptor(msg_descriptor))
# Repeated field is only allowed in the last position of path
mask.paths.append('repeated_nested_message.bb')
self.assertFalse(mask.IsValidForDescriptor(msg_descriptor))
# Invalid top level field
mask = field_mask_pb2.FieldMask()
mask.paths.append('xxx')
self.assertFalse(mask.IsValidForDescriptor(msg_descriptor))
# Invalid field in root
mask = field_mask_pb2.FieldMask()
mask.paths.append('xxx.zzz')
self.assertFalse(mask.IsValidForDescriptor(msg_descriptor))
# Invalid field in internal node
mask = field_mask_pb2.FieldMask()
mask.paths.append('optional_nested_message.xxx.zzz')
self.assertFalse(mask.IsValidForDescriptor(msg_descriptor))
# Invalid field in leaf
mask = field_mask_pb2.FieldMask()
mask.paths.append('optional_nested_message.xxx')
self.assertFalse(mask.IsValidForDescriptor(msg_descriptor))
def testCanonicalFrom(self):
mask = field_mask_pb2.FieldMask()
out_mask = field_mask_pb2.FieldMask()
# Paths will be sorted.
mask.FromJsonString('baz.quz,bar,foo')
out_mask.CanonicalFormFromMask(mask)
self.assertEqual('bar,baz.quz,foo', out_mask.ToJsonString())
# Duplicated paths will be removed.
mask.FromJsonString('foo,bar,foo')
out_mask.CanonicalFormFromMask(mask)
self.assertEqual('bar,foo', out_mask.ToJsonString())
# Sub-paths of other paths will be removed.
mask.FromJsonString('foo.b1,bar.b1,foo.b2,bar')
out_mask.CanonicalFormFromMask(mask)
self.assertEqual('bar,foo.b1,foo.b2', out_mask.ToJsonString())
# Test more deeply nested cases.
mask.FromJsonString(
'foo.bar.baz1,foo.bar.baz2.quz,foo.bar.baz2')
out_mask.CanonicalFormFromMask(mask)
self.assertEqual('foo.bar.baz1,foo.bar.baz2',
out_mask.ToJsonString())
mask.FromJsonString(
'foo.bar.baz1,foo.bar.baz2,foo.bar.baz2.quz')
out_mask.CanonicalFormFromMask(mask)
self.assertEqual('foo.bar.baz1,foo.bar.baz2',
out_mask.ToJsonString())
mask.FromJsonString(
'foo.bar.baz1,foo.bar.baz2,foo.bar.baz2.quz,foo.bar')
out_mask.CanonicalFormFromMask(mask)
self.assertEqual('foo.bar', out_mask.ToJsonString())
mask.FromJsonString(
'foo.bar.baz1,foo.bar.baz2,foo.bar.baz2.quz,foo')
out_mask.CanonicalFormFromMask(mask)
self.assertEqual('foo', out_mask.ToJsonString())
def testUnion(self):
mask1 = field_mask_pb2.FieldMask()
mask2 = field_mask_pb2.FieldMask()
out_mask = field_mask_pb2.FieldMask()
mask1.FromJsonString('foo,baz')
mask2.FromJsonString('bar,quz')
out_mask.Union(mask1, mask2)
self.assertEqual('bar,baz,foo,quz', out_mask.ToJsonString())
# Overlap with duplicated paths.
mask1.FromJsonString('foo,baz.bb')
mask2.FromJsonString('baz.bb,quz')
out_mask.Union(mask1, mask2)
self.assertEqual('baz.bb,foo,quz', out_mask.ToJsonString())
# Overlap with paths covering some other paths.
mask1.FromJsonString('foo.bar.baz,quz')
mask2.FromJsonString('foo.bar,bar')
out_mask.Union(mask1, mask2)
self.assertEqual('bar,foo.bar,quz', out_mask.ToJsonString())
src = unittest_pb2.TestAllTypes()
with self.assertRaises(ValueError):
out_mask.Union(src, mask2)
def testIntersect(self):
mask1 = field_mask_pb2.FieldMask()
mask2 = field_mask_pb2.FieldMask()
out_mask = field_mask_pb2.FieldMask()
# Test cases without overlapping.
mask1.FromJsonString('foo,baz')
mask2.FromJsonString('bar,quz')
out_mask.Intersect(mask1, mask2)
self.assertEqual('', out_mask.ToJsonString())
self.assertEqual(len(out_mask.paths), 0)
self.assertEqual(out_mask.paths, [])
# Overlap with duplicated paths.
mask1.FromJsonString('foo,baz.bb')
mask2.FromJsonString('baz.bb,quz')
out_mask.Intersect(mask1, mask2)
self.assertEqual('baz.bb', out_mask.ToJsonString())
# Overlap with paths covering some other paths.
mask1.FromJsonString('foo.bar.baz,quz')
mask2.FromJsonString('foo.bar,bar')
out_mask.Intersect(mask1, mask2)
self.assertEqual('foo.bar.baz', out_mask.ToJsonString())
mask1.FromJsonString('foo.bar,bar')
mask2.FromJsonString('foo.bar.baz,quz')
out_mask.Intersect(mask1, mask2)
self.assertEqual('foo.bar.baz', out_mask.ToJsonString())
# Intersect '' with ''
mask1.Clear()
mask2.Clear()
mask1.paths.append('')
mask2.paths.append('')
self.assertEqual(mask1.paths, [''])
self.assertEqual('', mask1.ToJsonString())
out_mask.Intersect(mask1, mask2)
self.assertEqual(out_mask.paths, [])
def testMergeMessageWithoutMapFields(self):
# Test merge one field.
src = unittest_pb2.TestAllTypes()
test_util.SetAllFields(src)
for field in src.DESCRIPTOR.fields:
if field.containing_oneof:
continue
field_name = field.name
dst = unittest_pb2.TestAllTypes()
# Only set one path to mask.
mask = field_mask_pb2.FieldMask()
mask.paths.append(field_name)
mask.MergeMessage(src, dst)
# The expected result message.
msg = unittest_pb2.TestAllTypes()
if field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
repeated_src = getattr(src, field_name)
repeated_msg = getattr(msg, field_name)
if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
for item in repeated_src:
repeated_msg.add().CopyFrom(item)
else:
repeated_msg.extend(repeated_src)
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
getattr(msg, field_name).CopyFrom(getattr(src, field_name))
else:
setattr(msg, field_name, getattr(src, field_name))
# Only field specified in mask is merged.
self.assertEqual(msg, dst)
# Test merge nested fields.
nested_src = unittest_pb2.NestedTestAllTypes()
nested_dst = unittest_pb2.NestedTestAllTypes()
nested_src.child.payload.optional_int32 = 1234
nested_src.child.child.payload.optional_int32 = 5678
mask = field_mask_pb2.FieldMask()
mask.FromJsonString('child.payload')
mask.MergeMessage(nested_src, nested_dst)
self.assertEqual(1234, nested_dst.child.payload.optional_int32)
self.assertEqual(0, nested_dst.child.child.payload.optional_int32)
mask.FromJsonString('child.child.payload')
mask.MergeMessage(nested_src, nested_dst)
self.assertEqual(1234, nested_dst.child.payload.optional_int32)
self.assertEqual(5678, nested_dst.child.child.payload.optional_int32)
nested_dst.Clear()
mask.FromJsonString('child.child.payload')
mask.MergeMessage(nested_src, nested_dst)
self.assertEqual(0, nested_dst.child.payload.optional_int32)
self.assertEqual(5678, nested_dst.child.child.payload.optional_int32)
nested_dst.Clear()
mask.FromJsonString('child')
mask.MergeMessage(nested_src, nested_dst)
self.assertEqual(1234, nested_dst.child.payload.optional_int32)
self.assertEqual(5678, nested_dst.child.child.payload.optional_int32)
# Test MergeOptions.
nested_dst.Clear()
nested_dst.child.payload.optional_int64 = 4321
# Message fields will be merged by default.
mask.FromJsonString('child.payload')
mask.MergeMessage(nested_src, nested_dst)
self.assertEqual(1234, nested_dst.child.payload.optional_int32)
self.assertEqual(4321, nested_dst.child.payload.optional_int64)
# Change the behavior to replace message fields.
mask.FromJsonString('child.payload')
mask.MergeMessage(nested_src, nested_dst, True, False)
self.assertEqual(1234, nested_dst.child.payload.optional_int32)
self.assertEqual(0, nested_dst.child.payload.optional_int64)
# By default, fields missing in source are not cleared in destination.
nested_dst.payload.optional_int32 = 1234
self.assertTrue(nested_dst.HasField('payload'))
mask.FromJsonString('payload')
mask.MergeMessage(nested_src, nested_dst)
self.assertTrue(nested_dst.HasField('payload'))
# But they are cleared when replacing message fields.
nested_dst.Clear()
nested_dst.payload.optional_int32 = 1234
mask.FromJsonString('payload')
mask.MergeMessage(nested_src, nested_dst, True, False)
self.assertFalse(nested_dst.HasField('payload'))
nested_src.payload.repeated_int32.append(1234)
nested_dst.payload.repeated_int32.append(5678)
# Repeated fields will be appended by default.
mask.FromJsonString('payload.repeatedInt32')
mask.MergeMessage(nested_src, nested_dst)
self.assertEqual(2, len(nested_dst.payload.repeated_int32))
self.assertEqual(5678, nested_dst.payload.repeated_int32[0])
self.assertEqual(1234, nested_dst.payload.repeated_int32[1])
# Change the behavior to replace repeated fields.
mask.FromJsonString('payload.repeatedInt32')
mask.MergeMessage(nested_src, nested_dst, False, True)
self.assertEqual(1, len(nested_dst.payload.repeated_int32))
self.assertEqual(1234, nested_dst.payload.repeated_int32[0])
# Test Merge oneof field.
new_msg = unittest_pb2.TestOneof2()
dst = unittest_pb2.TestOneof2()
dst.foo_message.moo_int = 1
mask = field_mask_pb2.FieldMask()
mask.FromJsonString('fooMessage,fooLazyMessage.mooInt')
mask.MergeMessage(new_msg, dst)
self.assertTrue(dst.HasField('foo_message'))
self.assertFalse(dst.HasField('foo_lazy_message'))
def testMergeMessageWithMapField(self):
empty_map = map_unittest_pb2.TestRecursiveMapMessage()
src_level_2 = map_unittest_pb2.TestRecursiveMapMessage()
src_level_2.a['src level 2'].CopyFrom(empty_map)
src = map_unittest_pb2.TestRecursiveMapMessage()
src.a['common key'].CopyFrom(src_level_2)
src.a['src level 1'].CopyFrom(src_level_2)
dst_level_2 = map_unittest_pb2.TestRecursiveMapMessage()
dst_level_2.a['dst level 2'].CopyFrom(empty_map)
dst = map_unittest_pb2.TestRecursiveMapMessage()
dst.a['common key'].CopyFrom(dst_level_2)
dst.a['dst level 1'].CopyFrom(empty_map)
mask = field_mask_pb2.FieldMask()
mask.FromJsonString('a')
mask.MergeMessage(src, dst)
# map from dst is replaced with map from src.
self.assertEqual(dst.a['common key'], src_level_2)
self.assertEqual(dst.a['src level 1'], src_level_2)
self.assertEqual(dst.a['dst level 1'], empty_map)
def testMergeErrors(self):
src = unittest_pb2.TestAllTypes()
dst = unittest_pb2.TestAllTypes()
mask = field_mask_pb2.FieldMask()
test_util.SetAllFields(src)
mask.FromJsonString('optionalInt32.field')
with self.assertRaises(ValueError) as e:
mask.MergeMessage(src, dst)
self.assertEqual('Error: Field optional_int32 in message '
'protobuf_unittest.TestAllTypes is not a singular '
'message field and cannot have sub-fields.',
str(e.exception))
def testSnakeCaseToCamelCase(self):
self.assertEqual('fooBar',
well_known_types._SnakeCaseToCamelCase('foo_bar'))
self.assertEqual('FooBar',
well_known_types._SnakeCaseToCamelCase('_foo_bar'))
self.assertEqual('foo3Bar',
well_known_types._SnakeCaseToCamelCase('foo3_bar'))
# No uppercase letter is allowed.
self.assertRaisesRegex(
ValueError,
'Fail to print FieldMask to Json string: Path name Foo must '
'not contain uppercase letters.',
well_known_types._SnakeCaseToCamelCase, 'Foo')
# Any character after a "_" must be a lowercase letter.
# 1. "_" cannot be followed by another "_".
# 2. "_" cannot be followed by a digit.
# 3. "_" cannot appear as the last character.
self.assertRaisesRegex(
ValueError,
'Fail to print FieldMask to Json string: The character after a '
'"_" must be a lowercase letter in path name foo__bar.',
well_known_types._SnakeCaseToCamelCase, 'foo__bar')
self.assertRaisesRegex(
ValueError,
'Fail to print FieldMask to Json string: The character after a '
'"_" must be a lowercase letter in path name foo_3bar.',
well_known_types._SnakeCaseToCamelCase, 'foo_3bar')
self.assertRaisesRegex(
ValueError,
'Fail to print FieldMask to Json string: Trailing "_" in path '
'name foo_bar_.', well_known_types._SnakeCaseToCamelCase, 'foo_bar_')
def testCamelCaseToSnakeCase(self):
self.assertEqual('foo_bar',
well_known_types._CamelCaseToSnakeCase('fooBar'))
self.assertEqual('_foo_bar',
well_known_types._CamelCaseToSnakeCase('FooBar'))
self.assertEqual('foo3_bar',
well_known_types._CamelCaseToSnakeCase('foo3Bar'))
self.assertRaisesRegex(
ValueError,
'Fail to parse FieldMask: Path name foo_bar must not contain "_"s.',
well_known_types._CamelCaseToSnakeCase, 'foo_bar')
class StructTest(unittest.TestCase):
def testStruct(self):

File diff suppressed because it is too large Load Diff

View File

@ -1,143 +0,0 @@
#!/usr/bin/python2.4
#
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This file is used for testing. The original is at:
# http://code.google.com/p/pymox/
import inspect
class StubOutForTesting:
"""Sample Usage:
You want os.path.exists() to always return true during testing.
stubs = StubOutForTesting()
stubs.Set(os.path, 'exists', lambda x: 1)
...
stubs.UnsetAll()
The above changes os.path.exists into a lambda that returns 1. Once
the ... part of the code finishes, the UnsetAll() looks up the old value
of os.path.exists and restores it.
"""
def __init__(self):
self.cache = []
self.stubs = []
def __del__(self):
self.SmartUnsetAll()
self.UnsetAll()
def SmartSet(self, obj, attr_name, new_attr):
"""Replace obj.attr_name with new_attr. This method is smart and works
at the module, class, and instance level while preserving proper
inheritance. It will not stub out C types however unless that has been
explicitly allowed by the type.
This method supports the case where attr_name is a staticmethod or a
classmethod of obj.
Notes:
- If obj is an instance, then it is its class that will actually be
stubbed. Note that the method Set() does not do that: if obj is
an instance, it (and not its class) will be stubbed.
- The stubbing is using the builtin getattr and setattr. So, the __get__
and __set__ will be called when stubbing (TODO: A better idea would
probably be to manipulate obj.__dict__ instead of getattr() and
setattr()).
Raises AttributeError if the attribute cannot be found.
"""
if (inspect.ismodule(obj) or
(not inspect.isclass(obj) and obj.__dict__.has_key(attr_name))):
orig_obj = obj
orig_attr = getattr(obj, attr_name)
else:
if not inspect.isclass(obj):
mro = list(inspect.getmro(obj.__class__))
else:
mro = list(inspect.getmro(obj))
mro.reverse()
orig_attr = None
for cls in mro:
try:
orig_obj = cls
orig_attr = getattr(obj, attr_name)
except AttributeError:
continue
if orig_attr is None:
raise AttributeError("Attribute not found.")
# Calling getattr() on a staticmethod transforms it to a 'normal' function.
# We need to ensure that we put it back as a staticmethod.
old_attribute = obj.__dict__.get(attr_name)
if old_attribute is not None and isinstance(old_attribute, staticmethod):
orig_attr = staticmethod(orig_attr)
self.stubs.append((orig_obj, attr_name, orig_attr))
setattr(orig_obj, attr_name, new_attr)
def SmartUnsetAll(self):
"""Reverses all the SmartSet() calls, restoring things to their original
definition. Its okay to call SmartUnsetAll() repeatedly, as later calls
have no effect if no SmartSet() calls have been made.
"""
self.stubs.reverse()
for args in self.stubs:
setattr(*args)
self.stubs = []
def Set(self, parent, child_name, new_child):
"""Replace child_name's old definition with new_child, in the context
of the given parent. The parent could be a module when the child is a
function at module scope. Or the parent could be a class when a class'
method is being replaced. The named child is set to new_child, while
the prior definition is saved away for later, when UnsetAll() is called.
This method supports the case where child_name is a staticmethod or a
classmethod of parent.
"""
old_child = getattr(parent, child_name)
old_attribute = parent.__dict__.get(child_name)
if old_attribute is not None and isinstance(old_attribute, staticmethod):
old_child = staticmethod(old_child)
self.cache.append((parent, old_child, child_name))
setattr(parent, child_name, new_child)
def UnsetAll(self):
"""Reverses all the Set() calls, restoring things to their original
definition. Its okay to call UnsetAll() repeatedly, as later calls have
no effect if no Set() calls have been made.
"""
# Undo calls to Set() in reverse order, in case Set() was called on the
# same arguments repeatedly (want the original call to be last one undone)
self.cache.reverse()
for (parent, old_child, child_name) in self.cache:
setattr(parent, child_name, old_child)
self.cache = []

View File

@ -24,6 +24,8 @@ set(libprotobuf_srcs
${protobuf_SOURCE_DIR}/src/google/protobuf/any.cc
${protobuf_SOURCE_DIR}/src/google/protobuf/any_lite.cc
${protobuf_SOURCE_DIR}/src/google/protobuf/arena.cc
${protobuf_SOURCE_DIR}/src/google/protobuf/arena_align.cc
${protobuf_SOURCE_DIR}/src/google/protobuf/arena_config.cc
${protobuf_SOURCE_DIR}/src/google/protobuf/arenastring.cc
${protobuf_SOURCE_DIR}/src/google/protobuf/arenaz_sampler.cc
${protobuf_SOURCE_DIR}/src/google/protobuf/compiler/importer.cc
@ -53,6 +55,13 @@ set(libprotobuf_srcs
${protobuf_SOURCE_DIR}/src/google/protobuf/io/zero_copy_stream.cc
${protobuf_SOURCE_DIR}/src/google/protobuf/io/zero_copy_stream_impl.cc
${protobuf_SOURCE_DIR}/src/google/protobuf/io/zero_copy_stream_impl_lite.cc
${protobuf_SOURCE_DIR}/src/google/protobuf/json/internal/lexer.cc
${protobuf_SOURCE_DIR}/src/google/protobuf/json/internal/message_path.cc
${protobuf_SOURCE_DIR}/src/google/protobuf/json/internal/parser.cc
${protobuf_SOURCE_DIR}/src/google/protobuf/json/internal/unparser.cc
${protobuf_SOURCE_DIR}/src/google/protobuf/json/internal/untyped_message.cc
${protobuf_SOURCE_DIR}/src/google/protobuf/json/internal/writer.cc
${protobuf_SOURCE_DIR}/src/google/protobuf/json/internal/zero_copy_buffered_stream.cc
${protobuf_SOURCE_DIR}/src/google/protobuf/json/json.cc
${protobuf_SOURCE_DIR}/src/google/protobuf/map.cc
${protobuf_SOURCE_DIR}/src/google/protobuf/map_field.cc
@ -93,6 +102,9 @@ set(libprotobuf_hdrs
${protobuf_SOURCE_DIR}/src/google/protobuf/wrappers.pb.h
${protobuf_SOURCE_DIR}/src/google/protobuf/any.h
${protobuf_SOURCE_DIR}/src/google/protobuf/arena.h
${protobuf_SOURCE_DIR}/src/google/protobuf/arena_align.h
${protobuf_SOURCE_DIR}/src/google/protobuf/arena_allocation_policy.h
${protobuf_SOURCE_DIR}/src/google/protobuf/arena_cleanup.h
${protobuf_SOURCE_DIR}/src/google/protobuf/arena_config.h
${protobuf_SOURCE_DIR}/src/google/protobuf/arena_impl.h
${protobuf_SOURCE_DIR}/src/google/protobuf/arenastring.h
@ -129,6 +141,16 @@ set(libprotobuf_hdrs
${protobuf_SOURCE_DIR}/src/google/protobuf/io/zero_copy_stream.h
${protobuf_SOURCE_DIR}/src/google/protobuf/io/zero_copy_stream_impl.h
${protobuf_SOURCE_DIR}/src/google/protobuf/io/zero_copy_stream_impl_lite.h
${protobuf_SOURCE_DIR}/src/google/protobuf/json/internal/descriptor_traits.h
${protobuf_SOURCE_DIR}/src/google/protobuf/json/internal/lexer.h
${protobuf_SOURCE_DIR}/src/google/protobuf/json/internal/message_path.h
${protobuf_SOURCE_DIR}/src/google/protobuf/json/internal/parser.h
${protobuf_SOURCE_DIR}/src/google/protobuf/json/internal/parser_traits.h
${protobuf_SOURCE_DIR}/src/google/protobuf/json/internal/unparser.h
${protobuf_SOURCE_DIR}/src/google/protobuf/json/internal/unparser_traits.h
${protobuf_SOURCE_DIR}/src/google/protobuf/json/internal/untyped_message.h
${protobuf_SOURCE_DIR}/src/google/protobuf/json/internal/writer.h
${protobuf_SOURCE_DIR}/src/google/protobuf/json/internal/zero_copy_buffered_stream.h
${protobuf_SOURCE_DIR}/src/google/protobuf/json/json.h
${protobuf_SOURCE_DIR}/src/google/protobuf/map.h
${protobuf_SOURCE_DIR}/src/google/protobuf/map_entry.h
@ -159,7 +181,6 @@ set(libprotobuf_hdrs
${protobuf_SOURCE_DIR}/src/google/protobuf/stubs/platform_macros.h
${protobuf_SOURCE_DIR}/src/google/protobuf/stubs/port.h
${protobuf_SOURCE_DIR}/src/google/protobuf/stubs/status_macros.h
${protobuf_SOURCE_DIR}/src/google/protobuf/stubs/stl_util.h
${protobuf_SOURCE_DIR}/src/google/protobuf/stubs/strutil.h
${protobuf_SOURCE_DIR}/src/google/protobuf/text_format.h
${protobuf_SOURCE_DIR}/src/google/protobuf/unknown_field_set.h
@ -179,6 +200,8 @@ set(libprotobuf_hdrs
set(libprotobuf_lite_srcs
${protobuf_SOURCE_DIR}/src/google/protobuf/any_lite.cc
${protobuf_SOURCE_DIR}/src/google/protobuf/arena.cc
${protobuf_SOURCE_DIR}/src/google/protobuf/arena_align.cc
${protobuf_SOURCE_DIR}/src/google/protobuf/arena_config.cc
${protobuf_SOURCE_DIR}/src/google/protobuf/arenastring.cc
${protobuf_SOURCE_DIR}/src/google/protobuf/arenaz_sampler.cc
${protobuf_SOURCE_DIR}/src/google/protobuf/extension_set.cc
@ -208,6 +231,9 @@ set(libprotobuf_lite_srcs
set(libprotobuf_lite_hdrs
${protobuf_SOURCE_DIR}/src/google/protobuf/any.h
${protobuf_SOURCE_DIR}/src/google/protobuf/arena.h
${protobuf_SOURCE_DIR}/src/google/protobuf/arena_align.h
${protobuf_SOURCE_DIR}/src/google/protobuf/arena_allocation_policy.h
${protobuf_SOURCE_DIR}/src/google/protobuf/arena_cleanup.h
${protobuf_SOURCE_DIR}/src/google/protobuf/arena_config.h
${protobuf_SOURCE_DIR}/src/google/protobuf/arena_impl.h
${protobuf_SOURCE_DIR}/src/google/protobuf/arenastring.h
@ -246,7 +272,6 @@ set(libprotobuf_lite_hdrs
${protobuf_SOURCE_DIR}/src/google/protobuf/stubs/platform_macros.h
${protobuf_SOURCE_DIR}/src/google/protobuf/stubs/port.h
${protobuf_SOURCE_DIR}/src/google/protobuf/stubs/status_macros.h
${protobuf_SOURCE_DIR}/src/google/protobuf/stubs/stl_util.h
${protobuf_SOURCE_DIR}/src/google/protobuf/stubs/strutil.h
${protobuf_SOURCE_DIR}/src/google/protobuf/wire_format_lite.h
)

View File

@ -604,6 +604,7 @@ proto_library(
visibility = [
"//:__pkg__",
"//conformance:__pkg__",
"@upb//:__subpackages__",
],
)
@ -614,6 +615,7 @@ proto_library(
visibility = [
"//:__pkg__",
"//conformance:__pkg__",
"@upb//:__subpackages__",
],
deps = [
":any_proto",

View File

@ -93,7 +93,6 @@ option objc_class_prefix = "GPB";
// in the type URL, for example "foo.bar.com/x/y.z" will yield type
// name "y.z".
//
//
// JSON
//
// The JSON representation of an `Any` value uses the regular

View File

@ -82,7 +82,6 @@ message Api {
// be omitted. Zero major versions must only be used for
// experimental, non-GA interfaces.
//
//
string version = 4;
// Source context for the protocol buffer service represented by this

View File

@ -39,7 +39,6 @@
#include "absl/synchronization/mutex.h"
#include "google/protobuf/message_lite.h"
#include "google/protobuf/parse_context.h"
#include "google/protobuf/stubs/stl_util.h"
// clang-format off
#include "google/protobuf/port_def.inc"

View File

@ -89,7 +89,6 @@
#include "google/protobuf/io/printer.h"
#include "google/protobuf/io/zero_copy_stream_impl.h"
#include "google/protobuf/text_format.h"
#include "google/protobuf/stubs/stl_util.h"
// Must be included last.
@ -872,7 +871,7 @@ CommandLineInterface::MemoryOutputStream::~MemoryOutputStream() {
// Now copy in the data.
std::string::size_type data_pos = 0;
char* target_ptr = ::google::protobuf::string_as_array(target) + pos;
char* target_ptr = &(*target)[pos];
while (data_pos < data_.size()) {
// Copy indent.
memcpy(target_ptr, indent_.data(), indent_.size());
@ -889,8 +888,7 @@ CommandLineInterface::MemoryOutputStream::~MemoryOutputStream() {
}
UpdateMetadata(data_, pos, data_.size() + indent_size, indent_.size());
GOOGLE_CHECK_EQ(target_ptr,
::google::protobuf::string_as_array(target) + pos + data_.size() + indent_size);
GOOGLE_CHECK_EQ(target_ptr, &(*target)[pos] + data_.size() + indent_size);
}
}
}

View File

@ -56,7 +56,6 @@
#include "absl/container/flat_hash_map.h"
#include "absl/strings/substitute.h"
#include "google/protobuf/compiler/cpp/helpers.h"
#include "google/protobuf/stubs/stl_util.h"
namespace google {
namespace protobuf {

View File

@ -1,3 +1,33 @@
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#ifndef GOOGLE_PROTOBUF_COMPILER_CPP_CPP_GENERATOR_H_
#define GOOGLE_PROTOBUF_COMPILER_CPP_CPP_GENERATOR_H_

View File

@ -271,6 +271,12 @@ struct NumToEntryTable {
static NumToEntryTable MakeNumToEntryTable(
const std::vector<const FieldDescriptor*>& field_descriptors);
static int FieldNameDataSize(const std::vector<uint8_t>& data) {
// We add a +1 here to allow for a NUL termination character. It makes the
// codegen nicer.
return data.empty() ? 0 : data.size() + 1;
}
void ParseFunctionGenerator::GenerateDataDecls(io::Printer* printer) {
if (!should_generate_tctable()) {
return;
@ -288,9 +294,7 @@ void ParseFunctionGenerator::GenerateDataDecls(io::Printer* printer) {
"TcParseTable<$1$, $2$, $3$, $4$, $5$> _table_;\n",
tc_table_info_->table_size_log2, ordered_fields_.size(),
tc_table_info_->aux_entries.size(),
// We add a +1 here to allow for a NUL termination character. It makes the
// codegen nicer.
tc_table_info_->field_name_data.size() + 1,
FieldNameDataSize(tc_table_info_->field_name_data),
field_num_to_entry_table.size16());
if (should_generate_guarded_tctable()) {
format.Outdent();
@ -450,7 +454,7 @@ void ParseFunctionGenerator::GenerateTailCallTable(Formatter& format) {
"{\n",
tc_table_info_->table_size_log2, ordered_fields_.size(),
tc_table_info_->aux_entries.size(),
tc_table_info_->field_name_data.size() + 1, // See above for why +1
FieldNameDataSize(tc_table_info_->field_name_data),
field_num_to_entry_table.size16());
{
auto table_scope = format.ScopedIndent();
@ -608,12 +612,12 @@ void ParseFunctionGenerator::GenerateTailCallTable(Formatter& format) {
format("}}, {{\n");
}
} // ordered_fields_.empty()
{
// field_names[]
auto field_name_scope = format.ScopedIndent();
GenerateFieldNames(format);
}
format("}},\n");
{
// field_names[]
auto field_name_scope = format.ScopedIndent();
GenerateFieldNames(format);
}
format("}},\n");
}
format("};\n\n"); // _table_
}
@ -832,6 +836,11 @@ void ParseFunctionGenerator::GenerateFieldEntries(Formatter& format) {
}
void ParseFunctionGenerator::GenerateFieldNames(Formatter& format) {
if (tc_table_info_->field_name_data.empty()) {
// No names to output.
return;
}
// We could just output the bytes directly, but we want it to look better than
// that in the source code. Also, it is more efficient for compilation time to
// have a literal string than an initializer list of chars.
@ -854,8 +863,8 @@ void ParseFunctionGenerator::GenerateFieldNames(Formatter& format) {
format("\"\n");
// Then print each name in a line of its own
for (; sizes < sizes_end && sizes[0] != 0; p += *sizes++) {
format("\"$1$\"\n", std::string(p, p + *sizes));
for (; sizes < sizes_end; p += *sizes++) {
if (*sizes != 0) format("\"$1$\"\n", std::string(p, p + *sizes));
}
}

View File

@ -76,7 +76,6 @@
#include "google/protobuf/io/coded_stream.h"
#include "google/protobuf/io/zero_copy_stream_impl.h"
#include "google/protobuf/test_util2.h"
#include "google/protobuf/stubs/stl_util.h"
// Must be included last.
#include "google/protobuf/port_def.inc"
@ -664,7 +663,7 @@ TEST(GENERATED_MESSAGE_TEST_NAME, SerializationToArray) {
TestUtil::SetAllFields(&message1);
int size = message1.ByteSizeLong();
data.resize(size);
uint8_t* start = reinterpret_cast<uint8_t*>(::google::protobuf::string_as_array(&data));
uint8_t* start = reinterpret_cast<uint8_t*>(&data[0]);
uint8_t* end = message1.SerializeWithCachedSizesToArray(start);
EXPECT_EQ(size, end - start);
EXPECT_TRUE(message2.ParseFromString(data));
@ -678,8 +677,7 @@ TEST(GENERATED_MESSAGE_TEST_NAME, PackedFieldsSerializationToArray) {
TestUtil::SetPackedFields(&packed_message1);
int packed_size = packed_message1.ByteSizeLong();
packed_data.resize(packed_size);
uint8_t* start =
reinterpret_cast<uint8_t*>(::google::protobuf::string_as_array(&packed_data));
uint8_t* start = reinterpret_cast<uint8_t*>(&packed_data[0]);
uint8_t* end = packed_message1.SerializeWithCachedSizesToArray(start);
EXPECT_EQ(packed_size, end - start);
EXPECT_TRUE(packed_message2.ParseFromString(packed_data));
@ -696,7 +694,7 @@ TEST(GENERATED_MESSAGE_TEST_NAME, SerializationToStream) {
data.resize(size);
{
// Allow the output stream to buffer only one byte at a time.
io::ArrayOutputStream array_stream(::google::protobuf::string_as_array(&data), size, 1);
io::ArrayOutputStream array_stream(&data[0], size, 1);
io::CodedOutputStream output_stream(&array_stream);
message1.SerializeWithCachedSizes(&output_stream);
EXPECT_FALSE(output_stream.HadError());
@ -715,7 +713,7 @@ TEST(GENERATED_MESSAGE_TEST_NAME, PackedFieldsSerializationToStream) {
data.resize(size);
{
// Allow the output stream to buffer only one byte at a time.
io::ArrayOutputStream array_stream(::google::protobuf::string_as_array(&data), size, 1);
io::ArrayOutputStream array_stream(&data[0], size, 1);
io::CodedOutputStream output_stream(&array_stream);
message1.SerializeWithCachedSizes(&output_stream);
EXPECT_FALSE(output_stream.HadError());
@ -1841,7 +1839,7 @@ std::string data;
message1.set_foo_int(123);
int size = message1.ByteSizeLong();
data.resize(size);
uint8_t* start = reinterpret_cast<uint8_t*>(::google::protobuf::string_as_array(&data));
uint8_t* start = reinterpret_cast<uint8_t*>(&data[0]);
uint8_t* end = message1.SerializeWithCachedSizesToArray(start);
EXPECT_EQ(size, end - start);
EXPECT_TRUE(message2.ParseFromString(data));
@ -1855,7 +1853,7 @@ EXPECT_EQ(message2.foo_int(), 123);
message1.set_foo_string("foo");
int size = message1.ByteSizeLong();
data.resize(size);
uint8_t* start = reinterpret_cast<uint8_t*>(::google::protobuf::string_as_array(&data));
uint8_t* start = reinterpret_cast<uint8_t*>(&data[0]);
uint8_t* end = message1.SerializeWithCachedSizesToArray(start);
EXPECT_EQ(size, end - start);
EXPECT_TRUE(message2.ParseFromString(data));
@ -1870,7 +1868,7 @@ EXPECT_EQ(message2.foo_int(), 123);
message1.set_foo_bytes("moo");
int size = message1.ByteSizeLong();
data.resize(size);
uint8_t* start = reinterpret_cast<uint8_t*>(::google::protobuf::string_as_array(&data));
uint8_t* start = reinterpret_cast<uint8_t*>(&data[0]);
uint8_t* end = message1.SerializeWithCachedSizesToArray(start);
EXPECT_EQ(size, end - start);
EXPECT_TRUE(message2.ParseFromString(data));
@ -1884,7 +1882,7 @@ EXPECT_EQ(message2.foo_int(), 123);
message1.set_foo_enum(UNITTEST::TestOneof2::FOO);
int size = message1.ByteSizeLong();
data.resize(size);
uint8_t* start = reinterpret_cast<uint8_t*>(::google::protobuf::string_as_array(&data));
uint8_t* start = reinterpret_cast<uint8_t*>(&data[0]);
uint8_t* end = message1.SerializeWithCachedSizesToArray(start);
EXPECT_EQ(size, end - start);
EXPECT_TRUE(message2.ParseFromString(data));
@ -1898,7 +1896,7 @@ EXPECT_EQ(message2.foo_int(), 123);
message1.mutable_foo_message()->set_moo_int(234);
int size = message1.ByteSizeLong();
data.resize(size);
uint8_t* start = reinterpret_cast<uint8_t*>(::google::protobuf::string_as_array(&data));
uint8_t* start = reinterpret_cast<uint8_t*>(&data[0]);
uint8_t* end = message1.SerializeWithCachedSizesToArray(start);
EXPECT_EQ(size, end - start);
EXPECT_TRUE(message2.ParseFromString(data));
@ -1912,7 +1910,7 @@ EXPECT_EQ(message2.foo_int(), 123);
message1.mutable_foogroup()->set_a(345);
int size = message1.ByteSizeLong();
data.resize(size);
uint8_t* start = reinterpret_cast<uint8_t*>(::google::protobuf::string_as_array(&data));
uint8_t* start = reinterpret_cast<uint8_t*>(&data[0]);
uint8_t* end = message1.SerializeWithCachedSizesToArray(start);
EXPECT_EQ(size, end - start);
EXPECT_TRUE(message2.ParseFromString(data));
@ -1937,11 +1935,11 @@ data.resize(size);
{
// Allow the output stream to buffer only one byte at a time.
io::ArrayOutputStream array_stream(::google::protobuf::string_as_array(&data), size, 1);
io::CodedOutputStream output_stream(&array_stream);
message1.SerializeWithCachedSizes(&output_stream);
EXPECT_FALSE(output_stream.HadError());
EXPECT_EQ(size, output_stream.ByteCount());
io::ArrayOutputStream array_stream(&data[0], size, 1);
io::CodedOutputStream output_stream(&array_stream);
message1.SerializeWithCachedSizes(&output_stream);
EXPECT_FALSE(output_stream.HadError());
EXPECT_EQ(size, output_stream.ByteCount());
}
EXPECT_TRUE(message2.ParseFromString(data));
@ -1958,11 +1956,11 @@ data.resize(size);
{
// Allow the output stream to buffer only one byte at a time.
io::ArrayOutputStream array_stream(::google::protobuf::string_as_array(&data), size, 1);
io::CodedOutputStream output_stream(&array_stream);
message1.SerializeWithCachedSizes(&output_stream);
EXPECT_FALSE(output_stream.HadError());
EXPECT_EQ(size, output_stream.ByteCount());
io::ArrayOutputStream array_stream(&data[0], size, 1);
io::CodedOutputStream output_stream(&array_stream);
message1.SerializeWithCachedSizes(&output_stream);
EXPECT_FALSE(output_stream.HadError());
EXPECT_EQ(size, output_stream.ByteCount());
}
EXPECT_TRUE(message2.ParseFromString(data));
@ -1980,11 +1978,11 @@ data.resize(size);
{
// Allow the output stream to buffer only one byte at a time.
io::ArrayOutputStream array_stream(::google::protobuf::string_as_array(&data), size, 1);
io::CodedOutputStream output_stream(&array_stream);
message1.SerializeWithCachedSizes(&output_stream);
EXPECT_FALSE(output_stream.HadError());
EXPECT_EQ(size, output_stream.ByteCount());
io::ArrayOutputStream array_stream(&data[0], size, 1);
io::CodedOutputStream output_stream(&array_stream);
message1.SerializeWithCachedSizes(&output_stream);
EXPECT_FALSE(output_stream.HadError());
EXPECT_EQ(size, output_stream.ByteCount());
}
EXPECT_TRUE(message2.ParseFromString(data));
@ -2001,11 +1999,11 @@ data.resize(size);
{
// Allow the output stream to buffer only one byte at a time.
io::ArrayOutputStream array_stream(::google::protobuf::string_as_array(&data), size, 1);
io::CodedOutputStream output_stream(&array_stream);
message1.SerializeWithCachedSizes(&output_stream);
EXPECT_FALSE(output_stream.HadError());
EXPECT_EQ(size, output_stream.ByteCount());
io::ArrayOutputStream array_stream(&data[0], size, 1);
io::CodedOutputStream output_stream(&array_stream);
message1.SerializeWithCachedSizes(&output_stream);
EXPECT_FALSE(output_stream.HadError());
EXPECT_EQ(size, output_stream.ByteCount());
}
EXPECT_TRUE(message2.ParseFromString(data));
@ -2022,11 +2020,11 @@ data.resize(size);
{
// Allow the output stream to buffer only one byte at a time.
io::ArrayOutputStream array_stream(::google::protobuf::string_as_array(&data), size, 1);
io::CodedOutputStream output_stream(&array_stream);
message1.SerializeWithCachedSizes(&output_stream);
EXPECT_FALSE(output_stream.HadError());
EXPECT_EQ(size, output_stream.ByteCount());
io::ArrayOutputStream array_stream(&data[0], size, 1);
io::CodedOutputStream output_stream(&array_stream);
message1.SerializeWithCachedSizes(&output_stream);
EXPECT_FALSE(output_stream.HadError());
EXPECT_EQ(size, output_stream.ByteCount());
}
EXPECT_TRUE(message2.ParseFromString(data));
@ -2043,11 +2041,11 @@ data.resize(size);
{
// Allow the output stream to buffer only one byte at a time.
io::ArrayOutputStream array_stream(::google::protobuf::string_as_array(&data), size, 1);
io::CodedOutputStream output_stream(&array_stream);
message1.SerializeWithCachedSizes(&output_stream);
EXPECT_FALSE(output_stream.HadError());
EXPECT_EQ(size, output_stream.ByteCount());
io::ArrayOutputStream array_stream(&data[0], size, 1);
io::CodedOutputStream output_stream(&array_stream);
message1.SerializeWithCachedSizes(&output_stream);
EXPECT_FALSE(output_stream.HadError());
EXPECT_EQ(size, output_stream.ByteCount());
}
EXPECT_TRUE(message2.ParseFromString(data));

View File

@ -51,7 +51,6 @@
#include "google/protobuf/compiler/importer.h"
#include "google/protobuf/descriptor.h"
#include "google/protobuf/io/zero_copy_stream_impl.h"
#include "google/protobuf/stubs/stl_util.h"
namespace google {
namespace protobuf {

View File

@ -1,3 +1,33 @@
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#ifndef GOOGLE_PROTOBUF_COMPILER_JAVA_JAVA_GENERATOR_H_
#define GOOGLE_PROTOBUF_COMPILER_JAVA_JAVA_GENERATOR_H_

View File

@ -36,8 +36,8 @@
#include "absl/strings/escaping.h"
#include "absl/strings/str_cat.h"
#include "google/protobuf/compiler/objectivec/names.h"
#include "google/protobuf/compiler/objectivec/helpers.h"
#include "google/protobuf/compiler/objectivec/names.h"
#include "google/protobuf/compiler/objectivec/text_format_decode_data.h"
#include "google/protobuf/io/printer.h"
@ -58,8 +58,7 @@ std::string SafelyPrintIntToCode(int v) {
} // namespace
EnumGenerator::EnumGenerator(const EnumDescriptor* descriptor)
: descriptor_(descriptor),
name_(EnumName(descriptor_)) {
: descriptor_(descriptor), name_(EnumName(descriptor_)) {
// Track the names for the enum values, and if an alias overlaps a base
// value, skip making a name for it. Likewise if two alias overlap, the
// first one wins.
@ -121,25 +120,29 @@ void EnumGenerator::GenerateHeader(io::Printer* printer) {
// doesn't have to bother with the `enum_extensibility` attribute, as the
// default will be what is needed.
printer->Print("$comments$typedef$deprecated_attribute$ GPB_ENUM($name$) {\n",
"comments", enum_comments,
"deprecated_attribute", GetOptionalDeprecatedAttribute(descriptor_, descriptor_->file()),
"name", name_);
printer->Print(
"$comments$typedef$deprecated_attribute$ GPB_ENUM($name$) {\n",
"comments", enum_comments, "deprecated_attribute",
GetOptionalDeprecatedAttribute(descriptor_, descriptor_->file()), "name",
name_);
printer->Indent();
if (HasPreservingUnknownEnumSemantics(descriptor_->file())) {
// Include the unknown value.
printer->Print(
"/**\n"
" * Value used if any message's field encounters a value that is not defined\n"
" * by this enum. The message will also have C functions to get/set the rawValue\n"
" * of the field.\n"
" **/\n"
"$name$_GPBUnrecognizedEnumeratorValue = kGPBUnrecognizedEnumeratorValue,\n",
"name", name_);
// clang-format off
"/**\n"
" * Value used if any message's field encounters a value that is not defined\n"
" * by this enum. The message will also have C functions to get/set the rawValue\n"
" * of the field.\n"
" **/\n"
"$name$_GPBUnrecognizedEnumeratorValue = kGPBUnrecognizedEnumeratorValue,\n",
// clang-format on
"name", name_);
}
for (int i = 0; i < all_values_.size(); i++) {
if (alias_values_to_skip_.find(all_values_[i]) != alias_values_to_skip_.end()) {
if (alias_values_to_skip_.find(all_values_[i]) !=
alias_values_to_skip_.end()) {
continue;
}
if (all_values_[i]->GetSourceLocation(&location)) {
@ -152,14 +155,14 @@ void EnumGenerator::GenerateHeader(io::Printer* printer) {
}
}
printer->Print(
"$name$$deprecated_attribute$ = $value$,\n",
"name", EnumValueName(all_values_[i]),
"deprecated_attribute", GetOptionalDeprecatedAttribute(all_values_[i]),
"value", SafelyPrintIntToCode(all_values_[i]->number()));
printer->Print("$name$$deprecated_attribute$ = $value$,\n", "name",
EnumValueName(all_values_[i]), "deprecated_attribute",
GetOptionalDeprecatedAttribute(all_values_[i]), "value",
SafelyPrintIntToCode(all_values_[i]->number()));
}
printer->Outdent();
printer->Print(
// clang-format off
"};\n"
"\n"
"GPBEnumDescriptor *$name$_EnumDescriptor(void);\n"
@ -169,6 +172,7 @@ void EnumGenerator::GenerateHeader(io::Printer* printer) {
" * the time this source was generated.\n"
" **/\n"
"BOOL $name$_IsValidValue(int32_t value);\n"
// clang-format on
"\n",
"name", name_);
}
@ -199,38 +203,42 @@ void EnumGenerator::GenerateSource(io::Printer* printer) {
}
printer->Print(
// clang-format off
"GPBEnumDescriptor *$name$_EnumDescriptor(void) {\n"
" static _Atomic(GPBEnumDescriptor*) descriptor = nil;\n"
" if (!descriptor) {\n",
// clang-format on
"name", name_);
static const int kBytesPerLine = 40; // allow for escaping
printer->Print(
" static const char *valueNames =");
printer->Print(" static const char *valueNames =");
for (int i = 0; i < text_blob.size(); i += kBytesPerLine) {
printer->Print(
"\n \"$data$\"",
"data", EscapeTrigraphs(absl::CEscape(text_blob.substr(i, kBytesPerLine))));
"\n \"$data$\"", "data",
EscapeTrigraphs(absl::CEscape(text_blob.substr(i, kBytesPerLine))));
}
printer->Print(
";\n"
" static const int32_t values[] = {\n");
for (int i = 0; i < all_values_.size(); i++) {
printer->Print(" $name$,\n", "name", EnumValueName(all_values_[i]));
printer->Print(" $name$,\n", "name", EnumValueName(all_values_[i]));
}
printer->Print(" };\n");
if (text_format_decode_data.num_entries() == 0) {
printer->Print(
// clang-format off
" GPBEnumDescriptor *worker =\n"
" [GPBEnumDescriptor allocDescriptorForName:GPBNSStringifySymbol($name$)\n"
" valueNames:valueNames\n"
" values:values\n"
" count:(uint32_t)(sizeof(values) / sizeof(int32_t))\n"
" enumVerifier:$name$_IsValidValue];\n",
// clang-format on
"name", name_);
} else {
printer->Print(
} else {
printer->Print(
// clang-format off
" static const char *extraTextFormatInfo = \"$extraTextFormatInfo$\";\n"
" GPBEnumDescriptor *worker =\n"
" [GPBEnumDescriptor allocDescriptorForName:GPBNSStringifySymbol($name$)\n"
@ -239,35 +247,41 @@ void EnumGenerator::GenerateSource(io::Printer* printer) {
" count:(uint32_t)(sizeof(values) / sizeof(int32_t))\n"
" enumVerifier:$name$_IsValidValue\n"
" extraTextFormatInfo:extraTextFormatInfo];\n",
"name", name_,
"extraTextFormatInfo", absl::CEscape(text_format_decode_data.Data()));
}
printer->Print(
" GPBEnumDescriptor *expected = nil;\n"
" if (!atomic_compare_exchange_strong(&descriptor, &expected, worker)) {\n"
" [worker release];\n"
" }\n"
" }\n"
" return descriptor;\n"
"}\n\n");
// clang-format on
"name", name_, "extraTextFormatInfo",
absl::CEscape(text_format_decode_data.Data()));
}
// clang-format off
printer->Print(
" GPBEnumDescriptor *expected = nil;\n"
" if (!atomic_compare_exchange_strong(&descriptor, &expected, worker)) {\n"
" [worker release];\n"
" }\n"
" }\n"
" return descriptor;\n"
"}\n\n");
// clang-format on
printer->Print(
// clang-format off
"BOOL $name$_IsValidValue(int32_t value__) {\n"
" switch (value__) {\n",
// clang-format on
"name", name_);
for (int i = 0; i < base_values_.size(); i++) {
printer->Print(
" case $name$:\n",
"name", EnumValueName(base_values_[i]));
printer->Print(" case $name$:\n", "name",
EnumValueName(base_values_[i]));
}
// clang-format off
printer->Print(
" return YES;\n"
" default:\n"
" return NO;\n"
" }\n"
"}\n\n");
// clang-format on
}
} // namespace objectivec
} // namespace compiler

View File

@ -31,9 +31,10 @@
#ifndef GOOGLE_PROTOBUF_COMPILER_OBJECTIVEC_ENUM_H__
#define GOOGLE_PROTOBUF_COMPILER_OBJECTIVEC_ENUM_H__
#include <string>
#include <set>
#include <string>
#include <vector>
#include "google/protobuf/descriptor.h"
#include "google/protobuf/io/printer.h"

View File

@ -28,12 +28,13 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "google/protobuf/compiler/objectivec/enum_field.h"
#include <map>
#include <string>
#include "google/protobuf/compiler/objectivec/enum_field.h"
#include "google/protobuf/compiler/objectivec/names.h"
#include "google/protobuf/compiler/objectivec/helpers.h"
#include "google/protobuf/compiler/objectivec/names.h"
#include "google/protobuf/io/printer.h"
namespace google {
@ -78,6 +79,7 @@ void EnumFieldGenerator::GenerateCFunctionDeclarations(
return;
}
// clang-format off
printer->Print(
variables_,
"/**\n"
@ -92,12 +94,14 @@ void EnumFieldGenerator::GenerateCFunctionDeclarations(
" **/\n"
"void Set$owning_message_class$_$capitalized_name$_RawValue($owning_message_class$ *message, int32_t value);\n"
"\n");
// clang-format on
}
void EnumFieldGenerator::GenerateCFunctionImplementations(
io::Printer* printer) const {
if (!HasPreservingUnknownEnumSemantics(descriptor_->file())) return;
// clang-format off
printer->Print(
variables_,
"int32_t $owning_message_class$_$capitalized_name$_RawValue($owning_message_class$ *message) {\n"
@ -112,13 +116,13 @@ void EnumFieldGenerator::GenerateCFunctionImplementations(
" GPBSetMessageRawEnumField(message, field, value);\n"
"}\n"
"\n");
// clang-format on
}
void EnumFieldGenerator::DetermineForwardDeclarations(
std::set<std::string>* fwd_decls,
bool include_external_types) const {
SingleFieldGenerator::DetermineForwardDeclarations(
fwd_decls, include_external_types);
std::set<std::string>* fwd_decls, bool include_external_types) const {
SingleFieldGenerator::DetermineForwardDeclarations(fwd_decls,
include_external_types);
// If it is an enum defined in a different file (and not a WKT), then we'll
// need a forward declaration for it. When it is in our file, all the enums
// are output before the message, so it will be declared before it is needed.
@ -142,8 +146,8 @@ RepeatedEnumFieldGenerator::~RepeatedEnumFieldGenerator() {}
void RepeatedEnumFieldGenerator::FinishInitialization(void) {
RepeatedFieldGenerator::FinishInitialization();
variables_["array_comment"] =
"// |" + variables_["name"] + "| contains |" + variables_["storage_type"] + "|\n";
variables_["array_comment"] = "// |" + variables_["name"] + "| contains |" +
variables_["storage_type"] + "|\n";
}
} // namespace objectivec

View File

@ -33,6 +33,7 @@
#include <map>
#include <string>
#include "google/protobuf/compiler/objectivec/field.h"
namespace google {

View File

@ -33,8 +33,8 @@
#include <iostream>
#include "absl/strings/str_cat.h"
#include "google/protobuf/compiler/objectivec/names.h"
#include "google/protobuf/compiler/objectivec/helpers.h"
#include "google/protobuf/compiler/objectivec/names.h"
#include "google/protobuf/descriptor.pb.h"
#include "google/protobuf/io/printer.h"
@ -52,7 +52,7 @@ ExtensionGenerator::ExtensionGenerator(const std::string& root_class_name,
// NOTE: src/google/protobuf/compiler/plugin.cc makes use of cerr for some
// error cases, so it seems to be ok to use as a back door for errors.
std::cerr << "error: Extension is a map<>!"
<< " That used to be blocked by the compiler." << std::endl;
<< " That used to be blocked by the compiler." << std::endl;
std::cerr.flush();
abort();
}
@ -76,10 +76,14 @@ void ExtensionGenerator::GenerateMembersHeader(io::Printer* printer) {
}
// Unlike normal message fields, check if the file for the extension was
// deprecated.
vars["deprecated_attribute"] = GetOptionalDeprecatedAttribute(descriptor_, descriptor_->file());
printer->Print(vars,
"$comments$"
"+ (GPBExtensionDescriptor *)$method_name$$storage_attribute$$deprecated_attribute$;\n");
vars["deprecated_attribute"] =
GetOptionalDeprecatedAttribute(descriptor_, descriptor_->file());
// clang-format off
printer->Print(
vars,
"$comments$"
"+ (GPBExtensionDescriptor *)$method_name$$storage_attribute$$deprecated_attribute$;\n");
// clang-format on
}
void ExtensionGenerator::GenerateStaticVariablesInitialization(
@ -117,22 +121,25 @@ void ExtensionGenerator::GenerateStaticVariablesInitialization(
if (objc_type == OBJECTIVECTYPE_ENUM) {
vars["enum_desc_func_name"] =
EnumName(descriptor_->enum_type()) + "_EnumDescriptor";
EnumName(descriptor_->enum_type()) + "_EnumDescriptor";
} else {
vars["enum_desc_func_name"] = "NULL";
}
printer->Print(vars,
"{\n"
" .defaultValue.$default_name$ = $default$,\n"
" .singletonName = GPBStringifySymbol($root_class_and_method_name$),\n"
" .extendedClass.clazz = $extended_type$,\n"
" .messageOrGroupClass.clazz = $type$,\n"
" .enumDescriptorFunc = $enum_desc_func_name$,\n"
" .fieldNumber = $number$,\n"
" .dataType = $extension_type$,\n"
" .options = $options$,\n"
"},\n");
// clang-format off
printer->Print(
vars,
"{\n"
" .defaultValue.$default_name$ = $default$,\n"
" .singletonName = GPBStringifySymbol($root_class_and_method_name$),\n"
" .extendedClass.clazz = $extended_type$,\n"
" .messageOrGroupClass.clazz = $type$,\n"
" .enumDescriptorFunc = $enum_desc_func_name$,\n"
" .fieldNumber = $number$,\n"
" .dataType = $extension_type$,\n"
" .options = $options$,\n"
"},\n");
// clang-format on
}
void ExtensionGenerator::DetermineObjectiveCClassDefinitions(
@ -147,9 +154,11 @@ void ExtensionGenerator::DetermineObjectiveCClassDefinitions(
}
void ExtensionGenerator::GenerateRegistrationSource(io::Printer* printer) {
// clang-format off
printer->Print(
"[registry addExtension:$root_class_and_method_name$];\n",
"root_class_and_method_name", root_class_and_method_name_);
// clang-format on
}
} // namespace objectivec

View File

@ -34,10 +34,10 @@
#include "absl/strings/str_cat.h"
#include "google/protobuf/compiler/objectivec/enum_field.h"
#include "google/protobuf/compiler/objectivec/names.h"
#include "google/protobuf/compiler/objectivec/helpers.h"
#include "google/protobuf/compiler/objectivec/map_field.h"
#include "google/protobuf/compiler/objectivec/message_field.h"
#include "google/protobuf/compiler/objectivec/names.h"
#include "google/protobuf/compiler/objectivec/primitive_field.h"
#include "google/protobuf/io/printer.h"
@ -78,7 +78,8 @@ void SetCommonFieldVariables(const FieldDescriptor* descriptor,
classname + "_FieldNumber_" + capitalized_name;
(*variables)["field_number"] = absl::StrCat(descriptor->number());
(*variables)["field_type"] = GetCapitalizedType(descriptor);
(*variables)["deprecated_attribute"] = GetOptionalDeprecatedAttribute(descriptor);
(*variables)["deprecated_attribute"] =
GetOptionalDeprecatedAttribute(descriptor);
std::vector<std::string> field_flags;
if (descriptor->is_repeated()) field_flags.push_back("GPBFieldRepeated");
if (descriptor->is_required()) field_flags.push_back("GPBFieldRequired");
@ -109,8 +110,8 @@ void SetCommonFieldVariables(const FieldDescriptor* descriptor,
(*variables)["dataTypeSpecific_name"] = "clazz";
(*variables)["dataTypeSpecific_value"] = "Nil";
(*variables)["storage_offset_value"] =
"(uint32_t)offsetof(" + classname + "__storage_, " + camel_case_name + ")";
(*variables)["storage_offset_value"] = "(uint32_t)offsetof(" + classname +
"__storage_, " + camel_case_name + ")";
(*variables)["storage_offset_comment"] = "";
// Clear some common things so they can be set just when needed.
@ -162,29 +163,6 @@ bool HasNonZeroDefaultValue(const FieldDescriptor* field) {
return false;
}
bool IsPrimitiveType(const FieldDescriptor* field) {
ObjectiveCType type = GetObjectiveCType(field);
switch (type) {
case OBJECTIVECTYPE_INT32:
case OBJECTIVECTYPE_UINT32:
case OBJECTIVECTYPE_INT64:
case OBJECTIVECTYPE_UINT64:
case OBJECTIVECTYPE_FLOAT:
case OBJECTIVECTYPE_DOUBLE:
case OBJECTIVECTYPE_BOOLEAN:
case OBJECTIVECTYPE_ENUM:
return true;
break;
default:
return false;
}
}
bool IsReferenceType(const FieldDescriptor* field) {
return !IsPrimitiveType(field);
}
} // namespace
FieldGenerator* FieldGenerator::Make(const FieldDescriptor* field) {
@ -236,13 +214,10 @@ FieldGenerator::FieldGenerator(const FieldDescriptor* descriptor)
FieldGenerator::~FieldGenerator() {}
void FieldGenerator::GenerateFieldNumberConstant(io::Printer* printer) const {
printer->Print(
variables_,
"$field_number_name$ = $field_number$,\n");
printer->Print(variables_, "$field_number_name$ = $field_number$,\n");
}
void FieldGenerator::GenerateCFunctionDeclarations(
io::Printer* printer) const {
void FieldGenerator::GenerateCFunctionDeclarations(io::Printer* printer) const {
// Nothing
}
@ -252,8 +227,7 @@ void FieldGenerator::GenerateCFunctionImplementations(
}
void FieldGenerator::DetermineForwardDeclarations(
std::set<std::string>* fwd_decls,
bool include_external_types) const {
std::set<std::string>* fwd_decls, bool include_external_types) const {
// Nothing
}
@ -262,10 +236,11 @@ void FieldGenerator::DetermineObjectiveCClassDefinitions(
// Nothing
}
void FieldGenerator::GenerateFieldDescription(
io::Printer* printer, bool include_default) const {
void FieldGenerator::GenerateFieldDescription(io::Printer* printer,
bool include_default) const {
// Printed in the same order as the structure decl.
if (include_default) {
// clang-format off
printer->Print(
variables_,
"{\n"
@ -278,7 +253,9 @@ void FieldGenerator::GenerateFieldDescription(
" .core.flags = $fieldflags$,\n"
" .core.dataType = GPBDataType$field_type$,\n"
"},\n");
// clang-format on
} else {
// clang-format off
printer->Print(
variables_,
"{\n"
@ -290,6 +267,7 @@ void FieldGenerator::GenerateFieldDescription(
" .flags = $fieldflags$,\n"
" .dataType = GPBDataType$field_type$,\n"
"},\n");
// clang-format on
}
}
@ -301,14 +279,13 @@ void FieldGenerator::SetNoHasBit(void) {
variables_["has_index"] = "GPBNoHasBit";
}
int FieldGenerator::ExtraRuntimeHasBitsNeeded(void) const {
return 0;
}
int FieldGenerator::ExtraRuntimeHasBitsNeeded(void) const { return 0; }
void FieldGenerator::SetExtraRuntimeHasBitsBase(int index_base) {
// NOTE: src/google/protobuf/compiler/plugin.cc makes use of cerr for some
// error cases, so it seems to be ok to use as a back door for errors.
std::cerr << "Error: should have overridden SetExtraRuntimeHasBitsBase()." << std::endl;
std::cerr << "Error: should have overridden SetExtraRuntimeHasBitsBase()."
<< std::endl;
std::cerr.flush();
abort();
}
@ -349,14 +326,18 @@ void SingleFieldGenerator::GenerateFieldStorageDeclaration(
void SingleFieldGenerator::GeneratePropertyDeclaration(
io::Printer* printer) const {
printer->Print(variables_, "$comments$");
// clang-format off
printer->Print(
variables_,
"@property(nonatomic, readwrite) $property_type$ $name$$deprecated_attribute$;\n"
"\n");
// clang-format on
if (WantsHasProperty()) {
// clang-format off
printer->Print(
variables_,
"@property(nonatomic, readwrite) BOOL has$capitalized_name$$deprecated_attribute$;\n");
// clang-format on
}
}
@ -394,26 +375,30 @@ void ObjCObjFieldGenerator::GenerateFieldStorageDeclaration(
void ObjCObjFieldGenerator::GeneratePropertyDeclaration(
io::Printer* printer) const {
// Differs from SingleFieldGenerator::GeneratePropertyDeclaration() in that
// it uses pointers and deals with Objective C's rules around storage name
// conventions (init*, new*, etc.)
printer->Print(variables_, "$comments$");
// clang-format off
printer->Print(
variables_,
"@property(nonatomic, readwrite, $property_storage_attribute$, null_resettable) $property_type$ *$name$$storage_attribute$$deprecated_attribute$;\n");
// clang-format on
if (WantsHasProperty()) {
// clang-format off
printer->Print(
variables_,
"/** Test to see if @c $name$ has been set. */\n"
"@property(nonatomic, readwrite) BOOL has$capitalized_name$$deprecated_attribute$;\n");
// clang-format on
}
if (IsInitName(variables_.find("name")->second)) {
// If property name starts with init we need to annotate it to get past ARC.
// http://stackoverflow.com/questions/18723226/how-do-i-annotate-an-objective-c-property-with-an-objc-method-family/18723227#18723227
printer->Print(variables_,
"- ($property_type$ *)$name$ GPB_METHOD_FAMILY_NONE$deprecated_attribute$;\n");
"- ($property_type$ *)$name$ "
"GPB_METHOD_FAMILY_NONE$deprecated_attribute$;\n");
}
printer->Print("\n");
}
@ -446,13 +431,13 @@ void RepeatedFieldGenerator::GeneratePropertyImplementation(
void RepeatedFieldGenerator::GeneratePropertyDeclaration(
io::Printer* printer) const {
// Repeated fields don't need the has* properties, but they do expose a
// *Count (to check without autocreation). So for the field property we need
// the same logic as ObjCObjFieldGenerator::GeneratePropertyDeclaration() for
// dealing with needing Objective C's rules around storage name conventions
// (init*, new*, etc.)
// clang-format off
printer->Print(
variables_,
"$comments$"
@ -460,11 +445,14 @@ void RepeatedFieldGenerator::GeneratePropertyDeclaration(
"@property(nonatomic, readwrite, strong, null_resettable) $array_property_type$ *$name$$storage_attribute$$deprecated_attribute$;\n"
"/** The number of items in @c $name$ without causing the container to be created. */\n"
"@property(nonatomic, readonly) NSUInteger $name$_Count$deprecated_attribute$;\n");
// clang-format on
if (IsInitName(variables_.find("name")->second)) {
// If property name starts with init we need to annotate it to get past ARC.
// http://stackoverflow.com/questions/18723226/how-do-i-annotate-an-objective-c-property-with-an-objc-method-family/18723227#18723227
// clang-format off
printer->Print(variables_,
"- ($array_property_type$ *)$name$ GPB_METHOD_FAMILY_NONE$deprecated_attribute$;\n");
// clang-format on
}
printer->Print("\n");
}
@ -479,8 +467,7 @@ FieldGeneratorMap::FieldGeneratorMap(const Descriptor* descriptor)
extension_generators_(descriptor->extension_count()) {
// Construct all the FieldGenerators.
for (int i = 0; i < descriptor->field_count(); i++) {
field_generators_[i].reset(
FieldGenerator::Make(descriptor->field(i)));
field_generators_[i].reset(FieldGenerator::Make(descriptor->field(i)));
}
for (int i = 0; i < descriptor->extension_count(); i++) {
extension_generators_[i].reset(

View File

@ -33,6 +33,7 @@
#include <map>
#include <string>
#include "google/protobuf/descriptor.h"
#include "google/protobuf/io/printer.h"
@ -63,15 +64,14 @@ class FieldGenerator {
virtual void GenerateCFunctionImplementations(io::Printer* printer) const;
// Exposed for subclasses, should always call it on the parent class also.
virtual void DetermineForwardDeclarations(
std::set<std::string>* fwd_decls,
bool include_external_types) const;
virtual void DetermineForwardDeclarations(std::set<std::string>* fwd_decls,
bool include_external_types) const;
virtual void DetermineObjectiveCClassDefinitions(
std::set<std::string>* fwd_decls) const;
// Used during generation, not intended to be extended by subclasses.
void GenerateFieldDescription(
io::Printer* printer, bool include_default) const;
void GenerateFieldDescription(io::Printer* printer,
bool include_default) const;
void GenerateFieldNumberConstant(io::Printer* printer) const;
// Exposed to get and set the has bits information.
@ -111,10 +111,12 @@ class SingleFieldGenerator : public FieldGenerator {
SingleFieldGenerator(const SingleFieldGenerator&) = delete;
SingleFieldGenerator& operator=(const SingleFieldGenerator&) = delete;
virtual void GenerateFieldStorageDeclaration(io::Printer* printer) const override;
virtual void GenerateFieldStorageDeclaration(
io::Printer* printer) const override;
virtual void GeneratePropertyDeclaration(io::Printer* printer) const override;
virtual void GeneratePropertyImplementation(io::Printer* printer) const override;
virtual void GeneratePropertyImplementation(
io::Printer* printer) const override;
virtual bool RuntimeUsesHasBit(void) const override;
@ -130,7 +132,8 @@ class ObjCObjFieldGenerator : public SingleFieldGenerator {
ObjCObjFieldGenerator(const ObjCObjFieldGenerator&) = delete;
ObjCObjFieldGenerator& operator=(const ObjCObjFieldGenerator&) = delete;
virtual void GenerateFieldStorageDeclaration(io::Printer* printer) const override;
virtual void GenerateFieldStorageDeclaration(
io::Printer* printer) const override;
virtual void GeneratePropertyDeclaration(io::Printer* printer) const override;
protected:
@ -144,10 +147,12 @@ class RepeatedFieldGenerator : public ObjCObjFieldGenerator {
RepeatedFieldGenerator(const RepeatedFieldGenerator&) = delete;
RepeatedFieldGenerator& operator=(const RepeatedFieldGenerator&) = delete;
virtual void GenerateFieldStorageDeclaration(io::Printer* printer) const override;
virtual void GenerateFieldStorageDeclaration(
io::Printer* printer) const override;
virtual void GeneratePropertyDeclaration(io::Printer* printer) const override;
virtual void GeneratePropertyImplementation(io::Printer* printer) const override;
virtual void GeneratePropertyImplementation(
io::Printer* printer) const override;
virtual bool RuntimeUsesHasBit(void) const override;

View File

@ -34,14 +34,14 @@
#include <iostream>
#include <sstream>
#include "google/protobuf/compiler/code_generator.h"
#include "absl/strings/str_cat.h"
#include "google/protobuf/compiler/code_generator.h"
#include "google/protobuf/compiler/objectivec/enum.h"
#include "google/protobuf/compiler/objectivec/extension.h"
#include "google/protobuf/compiler/objectivec/names.h"
#include "google/protobuf/compiler/objectivec/helpers.h"
#include "google/protobuf/compiler/objectivec/import_writer.h"
#include "google/protobuf/compiler/objectivec/message.h"
#include "google/protobuf/compiler/objectivec/names.h"
#include "google/protobuf/io/printer.h"
// NOTE: src/google/protobuf/compiler/plugin.cc makes use of cerr for some
@ -137,7 +137,7 @@ struct FileDescriptorsOrderedByName {
} // namespace
FileGenerator::CommonState::CommonState() { }
FileGenerator::CommonState::CommonState() {}
const FileGenerator::CommonState::MinDepsEntry&
FileGenerator::CommonState::CollectMinimalFileDepsContainingExtensionsInternal(
@ -156,20 +156,24 @@ FileGenerator::CommonState::CollectMinimalFileDepsContainingExtensionsInternal(
CollectMinimalFileDepsContainingExtensionsInternal(dep);
// Everything the dep covered, this file will also cover.
covered_deps_collector.insert(dep_info.covered_deps.begin(), dep_info.covered_deps.end());
covered_deps_collector.insert(dep_info.covered_deps.begin(),
dep_info.covered_deps.end());
// Prune everything from the dep's covered list in case another dep lists it
// as a min dep.
to_prune.insert(dep_info.covered_deps.begin(), dep_info.covered_deps.end());
// Does the dep have any extensions...
if (dep_info.has_extensions) {
// Yes -> Add this file, prune its min_deps and add them to the covered deps.
// Yes -> Add this file, prune its min_deps and add them to the covered
// deps.
min_deps_collector.insert(dep);
to_prune.insert(dep_info.min_deps.begin(), dep_info.min_deps.end());
covered_deps_collector.insert(dep_info.min_deps.begin(), dep_info.min_deps.end());
covered_deps_collector.insert(dep_info.min_deps.begin(),
dep_info.min_deps.end());
} else {
// No -> Just use its min_deps.
min_deps_collector.insert(dep_info.min_deps.begin(), dep_info.min_deps.end());
min_deps_collector.insert(dep_info.min_deps.begin(),
dep_info.min_deps.end());
}
}
@ -178,22 +182,25 @@ FileGenerator::CommonState::CollectMinimalFileDepsContainingExtensionsInternal(
// Fast path: if nothing to prune or there was only one dep, the prune work is
// a waste, skip it.
if (to_prune.empty() || file->dependency_count() == 1) {
return deps_info_cache_.insert(
{file, {file_has_exts, min_deps_collector, covered_deps_collector}}).first->second;
return deps_info_cache_
.insert(
{file, {file_has_exts, min_deps_collector, covered_deps_collector}})
.first->second;
}
std::unordered_set<const FileDescriptor*> min_deps;
std::copy_if(min_deps_collector.begin(), min_deps_collector.end(),
std::inserter(min_deps, min_deps.end()),
[&](const FileDescriptor* value){
return to_prune.find(value) == to_prune.end();
});
return deps_info_cache_.insert(
{file, {file_has_exts, min_deps, covered_deps_collector}}).first->second;
[&](const FileDescriptor* value) {
return to_prune.find(value) == to_prune.end();
});
return deps_info_cache_
.insert({file, {file_has_exts, min_deps, covered_deps_collector}})
.first->second;
}
// Collect the deps of the given file that contain extensions. This can be used to
// create the chain of roots that need to be wired together.
// Collect the deps of the given file that contain extensions. This can be used
// to create the chain of roots that need to be wired together.
//
// NOTE: If any changes are made to this and the supporting functions, you will
// need to manually validate what the generated code is for the test files:
@ -205,7 +212,7 @@ const std::vector<const FileDescriptor*>
FileGenerator::CommonState::CollectMinimalFileDepsContainingExtensions(
const FileDescriptor* file) {
std::unordered_set<const FileDescriptor*> min_deps =
CollectMinimalFileDepsContainingExtensionsInternal(file).min_deps;
CollectMinimalFileDepsContainingExtensionsInternal(file).min_deps;
// Sort the list since pointer order isn't stable across runs.
std::vector<const FileDescriptor*> result(min_deps.begin(), min_deps.end());
std::sort(result.begin(), result.end(), FileDescriptorsOrderedByName());
@ -261,6 +268,7 @@ void FileGenerator::GenerateHeader(io::Printer* printer) {
// compiled, since that will be the versions for the ObjC runtime at that
// time. The constants in the generated code will then get their values at
// at compile time (so checking against the headers being used to compile).
// clang-format off
printer->Print(
"#if GOOGLE_PROTOBUF_OBJC_VERSION < $google_protobuf_objc_version$\n"
"#error This file was generated by a newer version of protoc which is incompatible with your Protocol Buffer library sources.\n"
@ -270,10 +278,12 @@ void FileGenerator::GenerateHeader(io::Printer* printer) {
"#endif\n"
"\n",
"google_protobuf_objc_version", absl::StrCat(GOOGLE_PROTOBUF_OBJC_VERSION));
// clang-format on
// The bundled protos (WKTs) don't use of forward declarations.
bool headers_use_forward_declarations =
generation_options_.headers_use_forward_declarations && !is_bundled_proto_;
generation_options_.headers_use_forward_declarations &&
!is_bundled_proto_;
{
ImportWriter import_writer(
@ -299,6 +309,7 @@ void FileGenerator::GenerateHeader(io::Printer* printer) {
// deprecated-declarations suppression is only needed if some place in this
// proto file is something deprecated or if it references something from
// another file that is deprecated.
// clang-format off
printer->Print(
"// @@protoc_insertion_point(imports)\n"
"\n"
@ -307,6 +318,7 @@ void FileGenerator::GenerateHeader(io::Printer* printer) {
"\n"
"CF_EXTERN_C_BEGIN\n"
"\n");
// clang-format on
std::set<std::string> fwd_decls;
for (const auto& generator : message_generators_) {
@ -338,6 +350,7 @@ void FileGenerator::GenerateHeader(io::Printer* printer) {
// For extensions to chain together, the Root gets created even if there
// are no extensions.
printer->Print(
// clang-format off
"#pragma mark - $root_class_name$\n"
"\n"
"/**\n"
@ -353,6 +366,7 @@ void FileGenerator::GenerateHeader(io::Printer* printer) {
"GPB_FINAL @interface $root_class_name$ : GPBRootObject\n"
"@end\n"
"\n",
// clang-format off
"root_class_name", root_class_name_);
if (!extension_generators_.empty()) {
@ -372,6 +386,7 @@ void FileGenerator::GenerateHeader(io::Printer* printer) {
generator->GenerateMessageHeader(printer);
}
// clang-format off
printer->Print(
"NS_ASSUME_NONNULL_END\n"
"\n"
@ -382,6 +397,7 @@ void FileGenerator::GenerateHeader(io::Printer* printer) {
"// @@protoc_insertion_point(global_scope)\n"
"\n"
"// clange-format on\n");
// clang-format on
}
void FileGenerator::GenerateSource(io::Printer* printer) {
@ -401,11 +417,12 @@ void FileGenerator::GenerateSource(io::Printer* printer) {
}
std::vector<const FileDescriptor*> deps_with_extensions =
common_state_.CollectMinimalFileDepsContainingExtensions(file_);
common_state_.CollectMinimalFileDepsContainingExtensions(file_);
// The bundled protos (WKTs) don't use of forward declarations.
bool headers_use_forward_declarations =
generation_options_.headers_use_forward_declarations && !is_bundled_proto_;
generation_options_.headers_use_forward_declarations &&
!is_bundled_proto_;
{
ImportWriter import_writer(
@ -426,7 +443,7 @@ void FileGenerator::GenerateSource(io::Printer* printer) {
public_import_names.insert(file_->public_dependency(i)->name());
}
for (int i = 0; i < file_->dependency_count(); i++) {
const FileDescriptor *dep = file_->dependency(i);
const FileDescriptor* dep = file_->dependency(i);
bool public_import = (public_import_names.count(dep->name()) != 0);
if (!public_import) {
import_writer.AddFile(dep, header_extension);
@ -471,11 +488,13 @@ void FileGenerator::GenerateSource(io::Printer* printer) {
// another file that is deprecated.
// dollar-in-identifier-extension is needed because we use references to
// objc class names that have $ in identifiers.
// clang-format off
printer->Print(
"// @@protoc_insertion_point(imports)\n"
"\n"
"#pragma clang diagnostic push\n"
"#pragma clang diagnostic ignored \"-Wdeprecated-declarations\"\n");
// clang-format on
if (includes_oneof) {
// The generated code for oneof's uses direct ivar access, suppress the
// warning in case developer turn that on in the context they compile the
@ -484,17 +503,20 @@ void FileGenerator::GenerateSource(io::Printer* printer) {
"#pragma clang diagnostic ignored \"-Wdirect-ivar-access\"\n");
}
if (!fwd_decls.empty()) {
// clang-format off
printer->Print(
"#pragma clang diagnostic ignored \"-Wdollar-in-identifier-extension\"\n");
"#pragma clang diagnostic ignored \"-Wdollar-in-identifier-extension\"\n");
// clang-format on
}
printer->Print(
"\n");
printer->Print("\n");
if (!fwd_decls.empty()) {
// clang-format off
printer->Print(
"#pragma mark - Objective C Class declarations\n"
"// Forward declarations of Objective C classes that we can use as\n"
"// static values in struct initializers.\n"
"// We don't use [Foo class] because it is not a static value.\n");
// clang-format on
}
for (const auto& i : fwd_decls) {
printer->Print("$value$\n", "value", i);
@ -503,9 +525,11 @@ void FileGenerator::GenerateSource(io::Printer* printer) {
printer->Print("\n");
}
printer->Print(
// clang-format off
"#pragma mark - $root_class_name$\n"
"\n"
"@implementation $root_class_name$\n\n",
// clang-format on
"root_class_name", root_class_name_);
const bool file_contains_extensions = FileContainsExtensions(file_);
@ -513,6 +537,7 @@ void FileGenerator::GenerateSource(io::Printer* printer) {
// If there were any extensions or this file has any dependencies, output
// a registry to override to create the file specific registry.
if (file_contains_extensions || !deps_with_extensions.empty()) {
// clang-format off
printer->Print(
"+ (GPBExtensionRegistry*)extensionRegistry {\n"
" // This is called by +initialize so there is no need to worry\n"
@ -521,13 +546,13 @@ void FileGenerator::GenerateSource(io::Printer* printer) {
" if (!registry) {\n"
" GPB_DEBUG_CHECK_RUNTIME_VERSIONS();\n"
" registry = [[GPBExtensionRegistry alloc] init];\n");
// clang-format on
printer->Indent();
printer->Indent();
if (file_contains_extensions) {
printer->Print(
"static GPBExtensionDescription descriptions[] = {\n");
printer->Print("static GPBExtensionDescription descriptions[] = {\n");
printer->Indent();
for (const auto& generator : extension_generators_) {
generator->GenerateStaticVariablesInitialization(printer);
@ -536,6 +561,7 @@ void FileGenerator::GenerateSource(io::Printer* printer) {
generator->GenerateStaticVariablesInitialization(printer);
}
printer->Outdent();
// clang-format off
printer->Print(
"};\n"
"for (size_t i = 0; i < sizeof(descriptions) / sizeof(descriptions[0]); ++i) {\n"
@ -546,15 +572,20 @@ void FileGenerator::GenerateSource(io::Printer* printer) {
" [self globallyRegisterExtension:extension];\n"
" [extension release];\n"
"}\n");
// clang-format on
}
if (deps_with_extensions.empty()) {
// clang-format off
printer->Print(
"// None of the imports (direct or indirect) defined extensions, so no need to add\n"
"// them to this registry.\n");
// clang-format on
} else {
// clang-format off
printer->Print(
"// Merge in the imports (direct or indirect) that defined extensions.\n");
// clang-format on
for (std::vector<const FileDescriptor*>::iterator iter =
deps_with_extensions.begin();
iter != deps_with_extensions.end(); ++iter) {
@ -568,19 +599,25 @@ void FileGenerator::GenerateSource(io::Printer* printer) {
printer->Outdent();
printer->Outdent();
// clang-format off
printer->Print(
" }\n"
" return registry;\n"
"}\n");
// clang-format on
} else {
if (file_->dependency_count() > 0) {
// clang-format off
printer->Print(
"// No extensions in the file and none of the imports (direct or indirect)\n"
"// defined extensions, so no need to generate +extensionRegistry.\n");
// clang-format on
} else {
// clang-format off
printer->Print(
"// No extensions in the file and no imports, so no need to generate\n"
"// +extensionRegistry.\n");
// clang-format on
}
}
@ -603,7 +640,9 @@ void FileGenerator::GenerateSource(io::Printer* printer) {
vars["syntax"] = "GPBFileSyntaxProto3";
break;
}
printer->Print(vars,
// clang-format off
printer->Print(
vars,
"#pragma mark - $root_class_name$_FileDescriptor\n"
"\n"
"static GPBFileDescriptor *$root_class_name$_FileDescriptor(void) {\n"
@ -612,23 +651,30 @@ void FileGenerator::GenerateSource(io::Printer* printer) {
" static GPBFileDescriptor *descriptor = NULL;\n"
" if (!descriptor) {\n"
" GPB_DEBUG_CHECK_RUNTIME_VERSIONS();\n");
// clang-format on
if (!vars["objc_prefix"].empty()) {
// clang-format off
printer->Print(
vars,
" descriptor = [[GPBFileDescriptor alloc] initWithPackage:@\"$package$\"\n"
" objcPrefix:@\"$objc_prefix$\"\n"
" syntax:$syntax$];\n");
// clang-format on
} else {
// clang-format off
printer->Print(
vars,
" descriptor = [[GPBFileDescriptor alloc] initWithPackage:@\"$package$\"\n"
" syntax:$syntax$];\n");
// clang-format on
}
// clang-format off
printer->Print(
" }\n"
" return descriptor;\n"
"}\n"
"\n");
// clang-format on
}
for (const auto& generator : enum_generators_) {
@ -638,13 +684,15 @@ void FileGenerator::GenerateSource(io::Printer* printer) {
generator->GenerateSource(printer);
}
// clang-format off
printer->Print(
"\n"
"#pragma clang diagnostic pop\n"
"\n"
"// @@protoc_insertion_point(global_scope)\n"
"\n"
"// clange-format on\n");
"\n"
"#pragma clang diagnostic pop\n"
"\n"
"// @@protoc_insertion_point(global_scope)\n"
"\n"
"// clang-format on\n");
// clang-format on
}
// Helper to print the import of the runtime support at the top of generated
@ -655,9 +703,8 @@ void FileGenerator::PrintFileRuntimePreamble(
const std::vector<std::string>& headers_to_import) const {
printer->Print(
"// Generated by the protocol buffer compiler. DO NOT EDIT!\n"
"// source: $filename$\n"
"\n"
"// clang-format off\n"
"// source: $filename$\n"
"\n",
"filename", file_->name());
@ -670,14 +717,13 @@ void FileGenerator::PrintFileRuntimePreamble(
import_prefix += "/";
}
for (const auto& header : headers_to_import) {
printer->Print(
"#import \"$import_prefix$$header$\"\n",
"import_prefix", import_prefix,
"header", header);
printer->Print("#import \"$import_prefix$$header$\"\n", "import_prefix",
import_prefix, "header", header);
}
} else {
ImportWriter::PrintRuntimeImports(
printer, headers_to_import, generation_options_.runtime_import_prefix, true);
ImportWriter::PrintRuntimeImports(printer, headers_to_import,
generation_options_.runtime_import_prefix,
true);
}
printer->Print("\n");

View File

@ -35,6 +35,7 @@
#include <unordered_map>
#include <unordered_set>
#include <vector>
#include "google/protobuf/compiler/objectivec/options.h"
#include "google/protobuf/descriptor.h"
#include "google/protobuf/io/printer.h"
@ -66,7 +67,8 @@ class FileGenerator {
// have extensions.
std::unordered_set<const FileDescriptor*> covered_deps;
};
const MinDepsEntry& CollectMinimalFileDepsContainingExtensionsInternal(const FileDescriptor* file);
const MinDepsEntry& CollectMinimalFileDepsContainingExtensionsInternal(
const FileDescriptor* file);
std::unordered_map<const FileDescriptor*, MinDepsEntry> deps_info_cache_;
};

View File

@ -39,8 +39,8 @@
#include "absl/strings/str_split.h"
#include "absl/strings/strip.h"
#include "google/protobuf/compiler/objectivec/file.h"
#include "google/protobuf/compiler/objectivec/names.h"
#include "google/protobuf/compiler/objectivec/helpers.h"
#include "google/protobuf/compiler/objectivec/names.h"
#include "google/protobuf/io/printer.h"
#include "google/protobuf/io/zero_copy_stream.h"
@ -75,9 +75,7 @@ ObjectiveCGenerator::ObjectiveCGenerator() {}
ObjectiveCGenerator::~ObjectiveCGenerator() {}
bool ObjectiveCGenerator::HasGenerateAll() const {
return true;
}
bool ObjectiveCGenerator::HasGenerateAll() const { return true; }
bool ObjectiveCGenerator::Generate(const FileDescriptor* file,
const std::string& parameter,
@ -97,7 +95,8 @@ bool ObjectiveCGenerator::GenerateAll(
// options along with their values. If the option appears multiple times, only
// the last value will be considered.
//
// e.g. protoc ... --objc_opt=expected_prefixes=file.txt,generate_for_named_framework=MyFramework
// e.g. protoc ...
// --objc_opt=expected_prefixes=file.txt,generate_for_named_framework=MyFramework
Options validation_options;
GenerationOptions generation_options;
@ -127,8 +126,8 @@ bool ObjectiveCGenerator::GenerateAll(
// A semicolon delimited string that lists the paths of .proto files to
// exclude from the package prefix validations (expected_prefixes_path).
// This is provided as an "out", to skip some files being checked.
for (absl::string_view split_piece : absl::StrSplit(
options[i].second, ";", absl::SkipEmpty())) {
for (absl::string_view split_piece :
absl::StrSplit(options[i].second, ";", absl::SkipEmpty())) {
validation_options.expected_prefixes_suppressions.push_back(
std::string(split_piece));
}
@ -142,7 +141,8 @@ bool ObjectiveCGenerator::GenerateAll(
// Default is "no".
if (!StringToBool(options[i].second,
&validation_options.prefixes_must_be_registered)) {
*error = "error: Unknown value for prefixes_must_be_registered: " + options[i].second;
*error = "error: Unknown value for prefixes_must_be_registered: " +
options[i].second;
return false;
}
} else if (options[i].first == "require_prefixes") {
@ -154,7 +154,8 @@ bool ObjectiveCGenerator::GenerateAll(
// Default is "no".
if (!StringToBool(options[i].second,
&validation_options.require_prefixes)) {
*error = "error: Unknown value for require_prefixes: " + options[i].second;
*error =
"error: Unknown value for require_prefixes: " + options[i].second;
return false;
}
} else if (options[i].first == "generate_for_named_framework") {
@ -167,7 +168,8 @@ bool ObjectiveCGenerator::GenerateAll(
// the "default" framework name used for everything that wasn't mapped by
// the mapping file.
generation_options.generate_for_named_framework = options[i].second;
} else if (options[i].first == "named_framework_to_proto_path_mappings_path") {
} else if (options[i].first ==
"named_framework_to_proto_path_mappings_path") {
// Path to find a file containing the list of framework names and proto
// files. The generator uses this to decide if a proto file
// referenced should use a framework style import vs. a user level import
@ -188,7 +190,8 @@ bool ObjectiveCGenerator::GenerateAll(
// mappings file, it will use the default framework name if one was passed
// with generate_for_named_framework, or the relative path to it's include
// path otherwise.
generation_options.named_framework_to_proto_path_mappings_path = options[i].second;
generation_options.named_framework_to_proto_path_mappings_path =
options[i].second;
} else if (options[i].first == "runtime_import_prefix") {
// Path to use as a prefix on #imports of runtime provided headers in the
// generated files. When integrating ObjC protos into a build system,
@ -198,8 +201,9 @@ bool ObjectiveCGenerator::GenerateAll(
std::string(absl::StripSuffix(options[i].second, "/"));
} else if (options[i].first == "package_to_prefix_mappings_path") {
// Path to use for when loading the objc class prefix mappings to use.
// The `objc_class_prefix` file option is always honored first if one is present.
// This option also has precedent over the use_package_as_prefix option.
// The `objc_class_prefix` file option is always honored first if one is
// present. This option also has precedent over the use_package_as_prefix
// option.
//
// The format of the file is:
// - An entry is a line of "package=prefix".
@ -243,7 +247,8 @@ bool ObjectiveCGenerator::GenerateAll(
} else if (options[i].first == "headers_use_forward_declarations") {
if (!StringToBool(options[i].second,
&generation_options.headers_use_forward_declarations)) {
*error = "error: Unknown value for headers_use_forward_declarations: " + options[i].second;
*error = "error: Unknown value for headers_use_forward_declarations: " +
options[i].second;
return false;
}
} else {

View File

@ -34,6 +34,7 @@
#define GOOGLE_PROTOBUF_COMPILER_OBJECTIVEC_GENERATOR_H__
#include <string>
#include "google/protobuf/compiler/code_generator.h"
#include "google/protobuf/descriptor.h"

View File

@ -28,16 +28,17 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "google/protobuf/compiler/objectivec/helpers.h"
#include "google/protobuf/compiler/code_generator.h"
#include "google/protobuf/compiler/objectivec/names.h"
#include "google/protobuf/io/printer.h"
#include "google/protobuf/io/zero_copy_stream_impl.h"
#include "google/protobuf/stubs/strutil.h"
#include "absl/strings/ascii.h"
#include "absl/strings/escaping.h"
#include "absl/strings/str_split.h"
#include "absl/strings/str_replace.h"
#include "google/protobuf/compiler/objectivec/helpers.h"
#include "google/protobuf/compiler/objectivec/names.h"
#include "google/protobuf/io/printer.h"
#include "google/protobuf/io/zero_copy_stream_impl.h"
#include "google/protobuf/stubs/common.h"
// NOTE: src/google/protobuf/compiler/plugin.cc makes use of cerr for some
@ -55,7 +56,7 @@ std::string EscapeTrigraphs(absl::string_view to_escape) {
namespace {
std::string GetZeroEnumNameForFlagType(const FlagType flag_type) {
switch(flag_type) {
switch (flag_type) {
case FLAGTYPE_DESCRIPTOR_INITIALIZATION:
return "GPBDescriptorInitializationFlag_None";
case FLAGTYPE_EXTENSION:
@ -69,7 +70,7 @@ std::string GetZeroEnumNameForFlagType(const FlagType flag_type) {
}
std::string GetEnumNameForFlagType(const FlagType flag_type) {
switch(flag_type) {
switch (flag_type) {
case FLAGTYPE_DESCRIPTOR_INITIALIZATION:
return "GPBDescriptorInitializationFlags";
case FLAGTYPE_EXTENSION:
@ -82,8 +83,7 @@ std::string GetEnumNameForFlagType(const FlagType flag_type) {
}
}
std::string HandleExtremeFloatingPoint(std::string val,
bool add_float_suffix) {
std::string HandleExtremeFloatingPoint(std::string val, bool add_float_suffix) {
if (val == "nan") {
return "NAN";
} else if (val == "inf") {
@ -202,7 +202,7 @@ std::string GPBGenericValueFieldName(const FieldDescriptor* field) {
// Returns the field within the GPBGenericValue union to use for the given
// field.
if (field->is_repeated()) {
return "valueMessage";
return "valueMessage";
}
switch (field->cpp_type()) {
case FieldDescriptor::CPPTYPE_INT32:
@ -237,7 +237,6 @@ std::string GPBGenericValueFieldName(const FieldDescriptor* field) {
return std::string();
}
std::string DefaultValue(const FieldDescriptor* field) {
// Repeated fields don't have defaults.
if (field->is_repeated()) {
@ -336,10 +335,10 @@ std::string ObjCClassDeclaration(const std::string& class_name) {
}
std::string BuildCommentsString(const SourceLocation& location,
bool prefer_single_line) {
bool prefer_single_line) {
const std::string& comments = location.leading_comments.empty()
? location.trailing_comments
: location.leading_comments;
? location.trailing_comments
: location.leading_comments;
std::vector<std::string> lines;
lines = absl::StrSplit(comments, "\n", absl::AllowEmpty());
while (!lines.empty() && lines.back().empty()) {
@ -388,7 +387,6 @@ std::string BuildCommentsString(const SourceLocation& location,
return final_comments;
}
} // namespace objectivec
} // namespace compiler
} // namespace protobuf

View File

@ -79,10 +79,33 @@ inline ObjectiveCType GetObjectiveCType(const FieldDescriptor* field) {
return GetObjectiveCType(field->type());
}
inline bool IsPrimitiveType(const FieldDescriptor* field) {
ObjectiveCType type = GetObjectiveCType(field);
switch (type) {
case OBJECTIVECTYPE_INT32:
case OBJECTIVECTYPE_UINT32:
case OBJECTIVECTYPE_INT64:
case OBJECTIVECTYPE_UINT64:
case OBJECTIVECTYPE_FLOAT:
case OBJECTIVECTYPE_DOUBLE:
case OBJECTIVECTYPE_BOOLEAN:
case OBJECTIVECTYPE_ENUM:
return true;
break;
default:
return false;
}
}
inline bool IsReferenceType(const FieldDescriptor* field) {
return !IsPrimitiveType(field);
}
std::string GPBGenericValueFieldName(const FieldDescriptor* field);
std::string DefaultValue(const FieldDescriptor* field);
std::string BuildFlagsString(const FlagType type, const std::vector<std::string>& strings);
std::string BuildFlagsString(const FlagType type,
const std::vector<std::string>& strings);
// Returns a symbol that can be used in C code to refer to an Objective C
// class without initializing the class.
@ -134,7 +157,6 @@ std::string GetOptionalDeprecatedAttribute(const TDescriptor* descriptor,
}
}
} // namespace objectivec
} // namespace compiler
} // namespace protobuf

View File

@ -29,10 +29,11 @@
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "google/protobuf/compiler/objectivec/import_writer.h"
#include "absl/strings/ascii.h"
#include "google/protobuf/compiler/objectivec/line_consumer.h"
#include "google/protobuf/compiler/objectivec/names.h"
#include "google/protobuf/io/printer.h"
#include "absl/strings/ascii.h"
// NOTE: src/google/protobuf/compiler/plugin.cc makes use of cerr for some
// error cases, so it seems to be ok to use as a back door for errors.
@ -46,17 +47,19 @@ namespace {
class ProtoFrameworkCollector : public LineConsumer {
public:
ProtoFrameworkCollector(std::map<std::string, std::string>* inout_proto_file_to_framework_name)
ProtoFrameworkCollector(
std::map<std::string, std::string>* inout_proto_file_to_framework_name)
: map_(inout_proto_file_to_framework_name) {}
virtual bool ConsumeLine(const absl::string_view& line, std::string* out_error) override;
virtual bool ConsumeLine(const absl::string_view& line,
std::string* out_error) override;
private:
std::map<std::string, std::string>* map_;
};
bool ProtoFrameworkCollector::ConsumeLine(
const absl::string_view& line, std::string* out_error) {
bool ProtoFrameworkCollector::ConsumeLine(const absl::string_view& line,
std::string* out_error) {
int offset = line.find(':');
if (offset == absl::string_view::npos) {
*out_error =
@ -64,8 +67,10 @@ bool ProtoFrameworkCollector::ConsumeLine(
std::string(line) + "'.";
return false;
}
absl::string_view framework_name = absl::StripAsciiWhitespace(line.substr(0, offset));
absl::string_view proto_file_list = absl::StripAsciiWhitespace(line.substr(offset + 1));
absl::string_view framework_name =
absl::StripAsciiWhitespace(line.substr(0, offset));
absl::string_view proto_file_list =
absl::StripAsciiWhitespace(line.substr(offset + 1));
int start = 0;
while (start < proto_file_list.length()) {
@ -74,16 +79,17 @@ bool ProtoFrameworkCollector::ConsumeLine(
offset = proto_file_list.length();
}
absl::string_view proto_file =
absl::StripAsciiWhitespace(proto_file_list.substr(start, offset - start));
absl::string_view proto_file = absl::StripAsciiWhitespace(
proto_file_list.substr(start, offset - start));
if (!proto_file.empty()) {
std::map<std::string, std::string>::iterator existing_entry =
map_->find(std::string(proto_file));
if (existing_entry != map_->end()) {
std::cerr << "warning: duplicate proto file reference, replacing "
"framework entry for '"
<< std::string(proto_file) << "' with '" << std::string(framework_name)
<< "' (was '" << existing_entry->second << "')." << std::endl;
<< std::string(proto_file) << "' with '"
<< std::string(framework_name) << "' (was '"
<< existing_entry->second << "')." << std::endl;
std::cerr.flush();
}
@ -141,15 +147,14 @@ void ImportWriter::AddFile(const FileDescriptor* file,
proto_file_to_framework_name_.find(file->name());
if (proto_lookup != proto_file_to_framework_name_.end()) {
other_framework_imports_.push_back(
proto_lookup->second + "/" +
FilePathBasename(file) + header_extension);
proto_lookup->second + "/" + FilePathBasename(file) + header_extension);
return;
}
if (!generate_for_named_framework_.empty()) {
other_framework_imports_.push_back(
generate_for_named_framework_ + "/" +
FilePathBasename(file) + header_extension);
other_framework_imports_.push_back(generate_for_named_framework_ + "/" +
FilePathBasename(file) +
header_extension);
return;
}
@ -172,9 +177,7 @@ void ImportWriter::Print(io::Printer* printer) const {
for (std::vector<std::string>::const_iterator iter =
other_framework_imports_.begin();
iter != other_framework_imports_.end(); ++iter) {
printer->Print(
"#import <$header$>\n",
"header", *iter);
printer->Print("#import <$header$>\n", "header", *iter);
}
add_blank_line = true;
@ -187,9 +190,7 @@ void ImportWriter::Print(io::Printer* printer) const {
for (std::vector<std::string>::const_iterator iter = other_imports_.begin();
iter != other_imports_.end(); ++iter) {
printer->Print(
"#import \"$header$\"\n",
"header", *iter);
printer->Print("#import \"$header$\"\n", "header", *iter);
}
}
}
@ -200,10 +201,8 @@ void ImportWriter::PrintRuntimeImports(
// Given an override, use that.
if (!runtime_import_prefix.empty()) {
for (const auto& header : header_to_import) {
printer->Print(
" #import \"$import_prefix$/$header$\"\n",
"import_prefix", runtime_import_prefix,
"header", header);
printer->Print(" #import \"$import_prefix$/$header$\"\n", "import_prefix",
runtime_import_prefix, "header", header);
}
return;
}
@ -213,33 +212,27 @@ void ImportWriter::PrintRuntimeImports(
if (default_cpp_symbol) {
printer->Print(
// clang-format off
"// This CPP symbol can be defined to use imports that match up to the framework\n"
"// imports needed when using CocoaPods.\n"
"#if !defined($cpp_symbol$)\n"
" #define $cpp_symbol$ 0\n"
"#endif\n"
"\n",
// clang-format on
"cpp_symbol", cpp_symbol);
}
printer->Print(
"#if $cpp_symbol$\n",
"cpp_symbol", cpp_symbol);
printer->Print("#if $cpp_symbol$\n", "cpp_symbol", cpp_symbol);
for (const auto& header : header_to_import) {
printer->Print(
" #import <$framework_name$/$header$>\n",
"framework_name", framework_name,
"header", header);
printer->Print(" #import <$framework_name$/$header$>\n", "framework_name",
framework_name, "header", header);
}
printer->Print(
"#else\n");
printer->Print("#else\n");
for (const auto& header : header_to_import) {
printer->Print(
" #import \"$header$\"\n",
"header", header);
printer->Print(" #import \"$header$\"\n", "header", header);
}
printer->Print(
"#endif\n");
printer->Print("#endif\n");
}
void ImportWriter::ParseFrameworkMappings() {
@ -250,10 +243,11 @@ void ImportWriter::ParseFrameworkMappings() {
ProtoFrameworkCollector collector(&proto_file_to_framework_name_);
std::string parse_error;
if (!ParseSimpleFile(named_framework_to_proto_path_mappings_path_,
&collector, &parse_error)) {
std::cerr << "error parsing " << named_framework_to_proto_path_mappings_path_
<< " : " << parse_error << std::endl;
if (!ParseSimpleFile(named_framework_to_proto_path_mappings_path_, &collector,
&parse_error)) {
std::cerr << "error parsing "
<< named_framework_to_proto_path_mappings_path_ << " : "
<< parse_error << std::endl;
std::cerr.flush();
}
}

View File

@ -55,10 +55,10 @@ class ImportWriter {
void AddFile(const FileDescriptor* file, const std::string& header_extension);
void Print(io::Printer* printer) const;
static void PrintRuntimeImports(io::Printer* printer,
const std::vector<std::string>& header_to_import,
const std::string& runtime_import_prefix,
bool default_cpp_symbol = false);
static void PrintRuntimeImports(
io::Printer* printer, const std::vector<std::string>& header_to_import,
const std::string& runtime_import_prefix,
bool default_cpp_symbol = false);
private:
void ParseFrameworkMappings();
@ -75,7 +75,6 @@ class ImportWriter {
std::vector<std::string> other_imports_;
};
} // namespace objectivec
} // namespace compiler
} // namespace protobuf

View File

@ -61,16 +61,14 @@ namespace objectivec {
namespace posix {
#ifdef _WIN32
using google::protobuf::io::win32::open;
#else // !_WIN32
#else // !_WIN32
using ::open;
#endif // _WIN32
} // namespace posix
namespace {
bool ascii_isnewline(char c) {
return c == '\n' || c == '\r';
}
bool ascii_isnewline(char c) { return c == '\n' || c == '\r'; }
bool ReadLine(absl::string_view* input, absl::string_view* line) {
for (int len = 0; len < input->size(); ++len) {
@ -149,7 +147,8 @@ bool Parser::Finish(std::string* out_error) {
if (!leftover_.empty() && !ParseChunk("\n", out_error)) {
return false;
}
// This really should never fail if ParseChunk succeeded, but check to be sure.
// This really should never fail if ParseChunk succeeded, but check to be
// sure.
if (!leftover_.empty()) {
*out_error = "ParseSimple Internal error: finished with pending data.";
return false;
@ -157,8 +156,10 @@ bool Parser::Finish(std::string* out_error) {
return true;
}
std::string FullErrorString(const std::string& name, int line_num, const std::string& msg) {
return std::string("error: ") + name + " Line " + absl::StrCat(line_num) + ", " + msg;
std::string FullErrorString(const std::string& name, int line_num,
const std::string& msg) {
return std::string("error: ") + name + " Line " + absl::StrCat(line_num) +
", " + msg;
}
} // namespace
@ -186,8 +187,7 @@ bool ParseSimpleFile(const std::string& path, LineConsumer* line_consumer,
bool ParseSimpleStream(io::ZeroCopyInputStream& input_stream,
const std::string& stream_name,
LineConsumer* line_consumer,
std::string* out_error) {
LineConsumer* line_consumer, std::string* out_error) {
std::string local_error;
Parser parser(line_consumer);
const void* buf;
@ -197,9 +197,11 @@ bool ParseSimpleStream(io::ZeroCopyInputStream& input_stream,
continue;
}
if (!parser.ParseChunk(absl::string_view(static_cast<const char*>(buf), buf_len),
&local_error)) {
*out_error = FullErrorString(stream_name, parser.last_line(), local_error);
if (!parser.ParseChunk(
absl::string_view(static_cast<const char*>(buf), buf_len),
&local_error)) {
*out_error =
FullErrorString(stream_name, parser.last_line(), local_error);
return false;
}
}

View File

@ -52,7 +52,8 @@ class PROTOC_EXPORT LineConsumer {
public:
LineConsumer();
virtual ~LineConsumer();
virtual bool ConsumeLine(const absl::string_view& line, std::string* out_error) = 0;
virtual bool ConsumeLine(const absl::string_view& line,
std::string* out_error) = 0;
};
bool PROTOC_EXPORT ParseSimpleFile(const std::string& path,

View File

@ -29,11 +29,12 @@
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "google/protobuf/compiler/objectivec/line_consumer.h"
#include "google/protobuf/io/zero_copy_stream_impl_lite.h"
#include "absl/strings/str_cat.h"
#include <gtest/gtest.h>
#include "absl/strings/str_cat.h"
#include "google/protobuf/io/zero_copy_stream_impl_lite.h"
namespace google {
namespace protobuf {
namespace compiler {
@ -43,11 +44,12 @@ namespace {
class TestLineCollector : public LineConsumer {
public:
explicit TestLineCollector(std::vector<std::string>* inout_lines,
const std::string* reject_line = nullptr,
bool skip_msg = false)
: lines_(inout_lines), reject_(reject_line), skip_msg_(skip_msg) {}
const std::string* reject_line = nullptr,
bool skip_msg = false)
: lines_(inout_lines), reject_(reject_line), skip_msg_(skip_msg) {}
bool ConsumeLine(const absl::string_view& line, std::string* out_error) override {
bool ConsumeLine(const absl::string_view& line,
std::string* out_error) override {
if (reject_ && *reject_ == line) {
if (!skip_msg_) {
*out_error = std::string("Rejected '") + *reject_ + "'";
@ -71,19 +73,20 @@ const int kBlockSizeCount = ABSL_ARRAYSIZE(kBlockSizes);
TEST(ObjCHelper, ParseSimple_BasicsSuccess) {
const std::vector<std::pair<std::string, std::vector<std::string>>> tests = {
{"", {}},
{"a", {"a"}},
{"a c", {"a c"}},
{" a c ", {"a c"}},
{"\ta c ", {"a c"}},
{"abc\n", {"abc"}},
{"abc\nd f", {"abc", "d f"}},
{"\n abc \n def \n\n", {"abc", "def"}},
{"", {}},
{"a", {"a"}},
{"a c", {"a c"}},
{" a c ", {"a c"}},
{"\ta c ", {"a c"}},
{"abc\n", {"abc"}},
{"abc\nd f", {"abc", "d f"}},
{"\n abc \n def \n\n", {"abc", "def"}},
};
for (const auto& test : tests) {
for (int i = 0; i < kBlockSizeCount; i++) {
io::ArrayInputStream input(test.first.data(), test.first.size(), kBlockSizes[i]);
io::ArrayInputStream input(test.first.data(), test.first.size(),
kBlockSizes[i]);
std::string err_str;
std::vector<std::string> lines;
TestLineCollector collector(&lines);
@ -96,22 +99,23 @@ TEST(ObjCHelper, ParseSimple_BasicsSuccess) {
TEST(ObjCHelper, ParseSimple_DropsComments) {
const std::vector<std::pair<std::string, std::vector<std::string>>> tests = {
{"# nothing", {}},
{"#", {}},
{"##", {}},
{"\n# nothing\n", {}},
{"a # same line", {"a"}},
{"a # same line\n", {"a"}},
{"a\n# line\nc", {"a", "c"}},
{"# n o t # h i n g #", {}},
{"## n o # t h i n g #", {}},
{"a# n o t # h i n g #", {"a"}},
{"a\n## n o # t h i n g #", {"a"}},
{"# nothing", {}},
{"#", {}},
{"##", {}},
{"\n# nothing\n", {}},
{"a # same line", {"a"}},
{"a # same line\n", {"a"}},
{"a\n# line\nc", {"a", "c"}},
{"# n o t # h i n g #", {}},
{"## n o # t h i n g #", {}},
{"a# n o t # h i n g #", {"a"}},
{"a\n## n o # t h i n g #", {"a"}},
};
for (const auto& test : tests) {
for (int i = 0; i < kBlockSizeCount; i++) {
io::ArrayInputStream input(test.first.data(), test.first.size(), kBlockSizes[i]);
io::ArrayInputStream input(test.first.data(), test.first.size(),
kBlockSizes[i]);
std::string err_str;
std::vector<std::string> lines;
TestLineCollector collector(&lines);
@ -124,21 +128,22 @@ TEST(ObjCHelper, ParseSimple_DropsComments) {
TEST(ObjCHelper, ParseSimple_RejectLines) {
const std::vector<std::tuple<std::string, std::string, int>> tests = {
std::make_tuple("a\nb\nc", "a", 1),
std::make_tuple("a\nb\nc", "b", 2),
std::make_tuple("a\nb\nc", "c", 3),
std::make_tuple("a\nb\nc\n", "c", 3),
std::make_tuple("a\nb\nc", "a", 1),
std::make_tuple("a\nb\nc", "b", 2),
std::make_tuple("a\nb\nc", "c", 3),
std::make_tuple("a\nb\nc\n", "c", 3),
};
for (const auto& test : tests) {
for (int i = 0; i < kBlockSizeCount; i++) {
io::ArrayInputStream input(std::get<0>(test).data(), std::get<0>(test).size(),
kBlockSizes[i]);
io::ArrayInputStream input(std::get<0>(test).data(),
std::get<0>(test).size(), kBlockSizes[i]);
std::string err_str;
TestLineCollector collector(nullptr, &std::get<1>(test));
EXPECT_FALSE(ParseSimpleStream(input, "dummy", &collector, &err_str));
std::string expected_err =
absl::StrCat("error: dummy Line ", std::get<2>(test), ", Rejected '", std::get<1>(test), "'");
absl::StrCat("error: dummy Line ", std::get<2>(test), ", Rejected '",
std::get<1>(test), "'");
EXPECT_EQ(err_str, expected_err);
}
}
@ -146,22 +151,23 @@ TEST(ObjCHelper, ParseSimple_RejectLines) {
TEST(ObjCHelper, ParseSimple_RejectLinesNoMessage) {
const std::vector<std::tuple<std::string, std::string, int>> tests = {
std::make_tuple("a\nb\nc", "a", 1),
std::make_tuple("a\nb\nc", "b", 2),
std::make_tuple("a\nb\nc", "c", 3),
std::make_tuple("a\nb\nc\n", "c", 3),
std::make_tuple("a\nb\nc", "a", 1),
std::make_tuple("a\nb\nc", "b", 2),
std::make_tuple("a\nb\nc", "c", 3),
std::make_tuple("a\nb\nc\n", "c", 3),
};
for (const auto& test : tests) {
for (int i = 0; i < kBlockSizeCount; i++) {
io::ArrayInputStream input(std::get<0>(test).data(), std::get<0>(test).size(),
kBlockSizes[i]);
io::ArrayInputStream input(std::get<0>(test).data(),
std::get<0>(test).size(), kBlockSizes[i]);
std::string err_str;
TestLineCollector collector(nullptr, &std::get<1>(test), true /* skip msg */);
TestLineCollector collector(nullptr, &std::get<1>(test),
true /* skip msg */);
EXPECT_FALSE(ParseSimpleStream(input, "dummy", &collector, &err_str));
std::string expected_err =
absl::StrCat("error: dummy Line ", std::get<2>(test),
", ConsumeLine failed without setting an error.");
absl::StrCat("error: dummy Line ", std::get<2>(test),
", ConsumeLine failed without setting an error.");
EXPECT_EQ(err_str, expected_err);
}
}

View File

@ -28,12 +28,13 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "google/protobuf/compiler/objectivec/map_field.h"
#include <map>
#include <string>
#include "google/protobuf/compiler/objectivec/map_field.h"
#include "google/protobuf/compiler/objectivec/names.h"
#include "google/protobuf/compiler/objectivec/helpers.h"
#include "google/protobuf/compiler/objectivec/names.h"
#include "google/protobuf/io/printer.h"
namespace google {
@ -84,8 +85,7 @@ const char* MapEntryTypeName(const FieldDescriptor* descriptor, bool isKey) {
MapFieldGenerator::MapFieldGenerator(const FieldDescriptor* descriptor)
: RepeatedFieldGenerator(descriptor) {
const FieldDescriptor* key_descriptor =
descriptor->message_type()->map_key();
const FieldDescriptor* key_descriptor = descriptor->message_type()->map_key();
const FieldDescriptor* value_descriptor =
descriptor->message_type()->map_value();
value_field_generator_.reset(FieldGenerator::Make(value_descriptor));
@ -135,8 +135,8 @@ MapFieldGenerator::MapFieldGenerator(const FieldDescriptor* descriptor)
variables_["array_storage_type"] = class_name;
if (value_is_object_type) {
variables_["array_property_type"] =
class_name + "<" +
value_field_generator_->variable("storage_type") + "*>";
class_name + "<" + value_field_generator_->variable("storage_type") +
"*>";
}
}
@ -156,15 +156,15 @@ void MapFieldGenerator::FinishInitialization(void) {
descriptor_->message_type()->map_value();
if (GetObjectiveCType(value_descriptor) == OBJECTIVECTYPE_ENUM) {
variables_["array_comment"] =
"// |" + variables_["name"] + "| values are |" + value_field_generator_->variable("storage_type") + "|\n";
"// |" + variables_["name"] + "| values are |" +
value_field_generator_->variable("storage_type") + "|\n";
}
}
void MapFieldGenerator::DetermineForwardDeclarations(
std::set<std::string>* fwd_decls,
bool include_external_types) const {
RepeatedFieldGenerator::DetermineForwardDeclarations(
fwd_decls, include_external_types);
std::set<std::string>* fwd_decls, bool include_external_types) const {
RepeatedFieldGenerator::DetermineForwardDeclarations(fwd_decls,
include_external_types);
const FieldDescriptor* value_descriptor =
descriptor_->message_type()->map_value();
// Within a file there is no requirement on the order of the messages, so
@ -186,8 +186,8 @@ void MapFieldGenerator::DetermineObjectiveCClassDefinitions(
const FieldDescriptor* value_descriptor =
descriptor_->message_type()->map_value();
if (GetObjectiveCType(value_descriptor) == OBJECTIVECTYPE_MESSAGE) {
fwd_decls->insert(ObjCClassDeclaration(
value_field_generator_->variable("storage_type")));
fwd_decls->insert(
ObjCClassDeclaration(value_field_generator_->variable("storage_type")));
}
}

View File

@ -38,8 +38,8 @@
#include "absl/strings/str_cat.h"
#include "google/protobuf/compiler/objectivec/enum.h"
#include "google/protobuf/compiler/objectivec/extension.h"
#include "google/protobuf/compiler/objectivec/names.h"
#include "google/protobuf/compiler/objectivec/helpers.h"
#include "google/protobuf/compiler/objectivec/names.h"
#include "google/protobuf/compiler/objectivec/text_format_decode_data.h"
#include "google/protobuf/descriptor.pb.h"
#include "google/protobuf/io/printer.h"
@ -152,11 +152,12 @@ struct ExtensionRangeOrdering {
// and return it.
const FieldDescriptor** SortFieldsByNumber(const Descriptor* descriptor) {
const FieldDescriptor** fields =
new const FieldDescriptor* [descriptor->field_count()];
new const FieldDescriptor*[descriptor->field_count()];
for (int i = 0; i < descriptor->field_count(); i++) {
fields[i] = descriptor->field(i);
}
std::sort(fields, fields + descriptor->field_count(), FieldOrderingByNumber());
std::sort(fields, fields + descriptor->field_count(),
FieldOrderingByNumber());
return fields;
}
@ -164,12 +165,12 @@ const FieldDescriptor** SortFieldsByNumber(const Descriptor* descriptor) {
// array and return it.
const FieldDescriptor** SortFieldsByStorageSize(const Descriptor* descriptor) {
const FieldDescriptor** fields =
new const FieldDescriptor* [descriptor->field_count()];
new const FieldDescriptor*[descriptor->field_count()];
for (int i = 0; i < descriptor->field_count(); i++) {
fields[i] = descriptor->field(i);
}
std::sort(fields, fields + descriptor->field_count(),
FieldOrderingByStorageSize());
FieldOrderingByStorageSize());
return fields;
}
@ -200,8 +201,7 @@ MessageGenerator::MessageGenerator(const std::string& root_classname,
for (int i = 0; i < descriptor_->nested_type_count(); i++) {
MessageGenerator* generator =
new MessageGenerator(root_classname_,
descriptor_->nested_type(i));
new MessageGenerator(root_classname_, descriptor_->nested_type(i));
nested_message_generators_.emplace_back(generator);
}
}
@ -220,8 +220,7 @@ void MessageGenerator::GenerateStaticVariablesInitialization(
}
void MessageGenerator::DetermineForwardDeclarations(
std::set<std::string>* fwd_decls,
bool include_external_types) {
std::set<std::string>* fwd_decls, bool include_external_types) {
if (!IsMapEntryMessage(descriptor_)) {
for (int i = 0; i < descriptor_->field_count(); i++) {
const FieldDescriptor* fieldDescriptor = descriptor_->field(i);
@ -305,16 +304,18 @@ void MessageGenerator::GenerateMessageHeader(io::Printer* printer) {
}
printer->Print(
// clang-format off
"#pragma mark - $classname$\n"
"\n",
// clang-format on
"classname", class_name_);
if (descriptor_->field_count()) {
std::unique_ptr<const FieldDescriptor*[]> sorted_fields(
SortFieldsByNumber(descriptor_));
printer->Print("typedef GPB_ENUM($classname$_FieldNumber) {\n",
"classname", class_name_);
printer->Print("typedef GPB_ENUM($classname$_FieldNumber) {\n", "classname",
class_name_);
printer->Indent();
for (int i = 0; i < descriptor_->field_count(); i++) {
@ -339,9 +340,10 @@ void MessageGenerator::GenerateMessageHeader(io::Printer* printer) {
}
printer->Print(
// clang-format off
"$comments$$deprecated_attribute$GPB_FINAL @interface $classname$ : GPBMessage\n\n",
"classname", class_name_,
"deprecated_attribute", deprecated_attribute_,
// clang-format on
"classname", class_name_, "deprecated_attribute", deprecated_attribute_,
"comments", message_comments);
std::vector<char> seen_oneofs(oneof_generators_.size(), 0);
@ -374,8 +376,8 @@ void MessageGenerator::GenerateMessageHeader(io::Printer* printer) {
}
if (descriptor_->extension_count() > 0) {
printer->Print("@interface $classname$ (DynamicMethods)\n\n",
"classname", class_name_);
printer->Print("@interface $classname$ (DynamicMethods)\n\n", "classname",
class_name_);
for (const auto& generator : extension_generators_) {
generator->GenerateMembersHeader(printer);
}
@ -390,20 +392,23 @@ void MessageGenerator::GenerateMessageHeader(io::Printer* printer) {
void MessageGenerator::GenerateSource(io::Printer* printer) {
if (!IsMapEntryMessage(descriptor_)) {
printer->Print(
// clang-format off
"#pragma mark - $classname$\n"
"\n",
// clang-format on
"classname", class_name_);
if (!deprecated_attribute_.empty()) {
// No warnings when compiling the impl of this deprecated class.
// clang-format off
printer->Print(
"#pragma clang diagnostic push\n"
"#pragma clang diagnostic ignored \"-Wdeprecated-implementations\"\n"
"\n");
// clang-format on
}
printer->Print("@implementation $classname$\n\n",
"classname", class_name_);
printer->Print("@implementation $classname$\n\n", "classname", class_name_);
for (const auto& generator : oneof_generators_) {
generator->GeneratePropertyImplementation(printer);
@ -426,7 +431,7 @@ void MessageGenerator::GenerateSource(io::Printer* printer) {
}
std::sort(sorted_extensions.begin(), sorted_extensions.end(),
ExtensionRangeOrdering());
ExtensionRangeOrdering());
// Assign has bits:
// 1. FieldGeneratorMap::CalculateHasBits() loops through the fields seeing
@ -452,11 +457,13 @@ void MessageGenerator::GenerateSource(io::Printer* printer) {
sizeof_has_storage += oneof_generators_.size();
printer->Print(
// clang-format off
"\n"
"typedef struct $classname$__storage_ {\n"
" uint32_t _has_storage_[$sizeof_has_storage$];\n",
"classname", class_name_,
"sizeof_has_storage", absl::StrCat(sizeof_has_storage));
// clang-format on
"classname", class_name_, "sizeof_has_storage",
absl::StrCat(sizeof_has_storage));
printer->Indent();
for (int i = 0; i < descriptor_->field_count(); i++) {
@ -467,13 +474,14 @@ void MessageGenerator::GenerateSource(io::Printer* printer) {
printer->Print("} $classname$__storage_;\n\n", "classname", class_name_);
// clang-format off
printer->Print(
"// This method is threadsafe because it is initially called\n"
"// in +initialize for each subclass.\n"
"+ (GPBDescriptor *)descriptor {\n"
" static GPBDescriptor *descriptor = nil;\n"
" if (!descriptor) {\n");
// clang-format on
TextFormatDecodeData text_format_decode_data;
bool has_fields = descriptor_->field_count() > 0;
@ -487,23 +495,21 @@ void MessageGenerator::GenerateSource(io::Printer* printer) {
if (has_fields) {
printer->Indent();
printer->Indent();
printer->Print(
"static $field_description_type$ fields[] = {\n",
"field_description_type", field_description_type);
printer->Print("static $field_description_type$ fields[] = {\n",
"field_description_type", field_description_type);
printer->Indent();
for (int i = 0; i < descriptor_->field_count(); ++i) {
const FieldGenerator& field_generator =
field_generators_.get(sorted_fields[i]);
field_generator.GenerateFieldDescription(printer, need_defaults);
if (field_generator.needs_textformat_name_support()) {
text_format_decode_data.AddString(sorted_fields[i]->number(),
field_generator.generated_objc_name(),
field_generator.raw_field_name());
text_format_decode_data.AddString(
sorted_fields[i]->number(), field_generator.generated_objc_name(),
field_generator.raw_field_name());
}
}
printer->Outdent();
printer->Print(
"};\n");
printer->Print("};\n");
printer->Outdent();
printer->Outdent();
}
@ -528,9 +534,10 @@ void MessageGenerator::GenerateSource(io::Printer* printer) {
if (descriptor_->options().message_set_wire_format()) {
init_flags.push_back("GPBDescriptorInitializationFlag_WireFormat");
}
vars["init_flags"] = BuildFlagsString(FLAGTYPE_DESCRIPTOR_INITIALIZATION,
init_flags);
vars["init_flags"] =
BuildFlagsString(FLAGTYPE_DESCRIPTOR_INITIALIZATION, init_flags);
// clang-format off
printer->Print(
vars,
" GPBDescriptor *localDescriptor =\n"
@ -541,55 +548,63 @@ void MessageGenerator::GenerateSource(io::Printer* printer) {
" fieldCount:$fields_count$\n"
" storageSize:sizeof($classname$__storage_)\n"
" flags:$init_flags$];\n");
// clang-format on
if (!oneof_generators_.empty()) {
printer->Print(
" static const char *oneofs[] = {\n");
printer->Print(" static const char *oneofs[] = {\n");
for (const auto& generator : oneof_generators_) {
printer->Print(" \"$name$\",\n", "name",
generator->DescriptorName());
}
printer->Print(
// clang-format off
" };\n"
" [localDescriptor setupOneofs:oneofs\n"
" count:(uint32_t)(sizeof(oneofs) / sizeof(char*))\n"
" firstHasIndex:$first_has_index$];\n",
// clang-format on
"first_has_index", oneof_generators_[0]->HasIndexAsString());
}
if (text_format_decode_data.num_entries() != 0) {
const std::string text_format_data_str(text_format_decode_data.Data());
// clang-format off
printer->Print(
"#if !GPBOBJC_SKIP_MESSAGE_TEXTFORMAT_EXTRAS\n"
" static const char *extraTextFormatInfo =");
// clang-format on
static const int kBytesPerLine = 40; // allow for escaping
for (int i = 0; i < text_format_data_str.size(); i += kBytesPerLine) {
printer->Print(
"\n \"$data$\"",
"data", EscapeTrigraphs(
absl::CEscape(text_format_data_str.substr(i, kBytesPerLine))));
printer->Print("\n \"$data$\"", "data",
EscapeTrigraphs(absl::CEscape(
text_format_data_str.substr(i, kBytesPerLine))));
}
// clang-format off
printer->Print(
";\n"
" [localDescriptor setupExtraTextInfo:extraTextFormatInfo];\n"
"#endif // !GPBOBJC_SKIP_MESSAGE_TEXTFORMAT_EXTRAS\n");
// clang-format on
}
if (!sorted_extensions.empty()) {
printer->Print(
" static const GPBExtensionRange ranges[] = {\n");
printer->Print(" static const GPBExtensionRange ranges[] = {\n");
for (int i = 0; i < sorted_extensions.size(); i++) {
printer->Print(" { .start = $start$, .end = $end$ },\n",
"start", absl::StrCat(sorted_extensions[i]->start),
"end", absl::StrCat(sorted_extensions[i]->end));
printer->Print(" { .start = $start$, .end = $end$ },\n", "start",
absl::StrCat(sorted_extensions[i]->start), "end",
absl::StrCat(sorted_extensions[i]->end));
}
// clang-format off
printer->Print(
" };\n"
" [localDescriptor setupExtensionRanges:ranges\n"
" count:(uint32_t)(sizeof(ranges) / sizeof(GPBExtensionRange))];\n");
// clang-format on
}
if (descriptor_->containing_type() != NULL) {
std::string containing_class = ClassName(descriptor_->containing_type());
std::string parent_class_ref = ObjCClass(containing_class);
printer->Print(
// clang-format off
" [localDescriptor setupContainingMessageClass:$parent_class_ref$];\n",
// clang-format on
"parent_class_ref", parent_class_ref);
}
std::string suffix_added;
@ -599,6 +614,7 @@ void MessageGenerator::GenerateSource(io::Printer* printer) {
" [localDescriptor setupMessageClassNameSuffix:@\"$suffix$\"];\n",
"suffix", suffix_added);
}
// clang-format off
printer->Print(
" #if defined(DEBUG) && DEBUG\n"
" NSAssert(descriptor == nil, @\"Startup recursed!\");\n"
@ -608,11 +624,14 @@ void MessageGenerator::GenerateSource(io::Printer* printer) {
" return descriptor;\n"
"}\n\n"
"@end\n\n");
// clang-format on
if (!deprecated_attribute_.empty()) {
// clang-format off
printer->Print(
"#pragma clang diagnostic pop\n"
"\n");
// clang-format on
}
for (int i = 0; i < descriptor_->field_count(); i++) {

View File

@ -31,9 +31,10 @@
#ifndef GOOGLE_PROTOBUF_COMPILER_OBJECTIVEC_MESSAGE_H__
#define GOOGLE_PROTOBUF_COMPILER_OBJECTIVEC_MESSAGE_H__
#include <string>
#include <set>
#include <string>
#include <vector>
#include "google/protobuf/compiler/objectivec/field.h"
#include "google/protobuf/compiler/objectivec/oneof.h"
#include "google/protobuf/descriptor.h"

View File

@ -28,12 +28,13 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "google/protobuf/compiler/objectivec/message_field.h"
#include <map>
#include <string>
#include "google/protobuf/compiler/objectivec/message_field.h"
#include "google/protobuf/compiler/objectivec/names.h"
#include "google/protobuf/compiler/objectivec/helpers.h"
#include "google/protobuf/compiler/objectivec/names.h"
#include "google/protobuf/io/printer.h"
namespace google {
@ -66,15 +67,14 @@ MessageFieldGenerator::MessageFieldGenerator(const FieldDescriptor* descriptor)
MessageFieldGenerator::~MessageFieldGenerator() {}
void MessageFieldGenerator::DetermineForwardDeclarations(
std::set<std::string>* fwd_decls,
bool include_external_types) const {
ObjCObjFieldGenerator::DetermineForwardDeclarations(
fwd_decls, include_external_types);
std::set<std::string>* fwd_decls, bool include_external_types) const {
ObjCObjFieldGenerator::DetermineForwardDeclarations(fwd_decls,
include_external_types);
// Within a file there is no requirement on the order of the messages, so
// local references need a forward declaration. External files (not WKTs),
// need one when requested.
if ((include_external_types &&
!IsProtobufLibraryBundledProtoFile(descriptor_->message_type()->file())) ||
if ((include_external_types && !IsProtobufLibraryBundledProtoFile(
descriptor_->message_type()->file())) ||
descriptor_->file() == descriptor_->message_type()->file()) {
// Class name is already in "storage_type".
fwd_decls->insert("@class " + variable("storage_type"));
@ -98,15 +98,14 @@ RepeatedMessageFieldGenerator::RepeatedMessageFieldGenerator(
RepeatedMessageFieldGenerator::~RepeatedMessageFieldGenerator() {}
void RepeatedMessageFieldGenerator::DetermineForwardDeclarations(
std::set<std::string>* fwd_decls,
bool include_external_types) const {
RepeatedFieldGenerator::DetermineForwardDeclarations(
fwd_decls, include_external_types);
std::set<std::string>* fwd_decls, bool include_external_types) const {
RepeatedFieldGenerator::DetermineForwardDeclarations(fwd_decls,
include_external_types);
// Within a file there is no requirement on the order of the messages, so
// local references need a forward declaration. External files (not WKTs),
// need one when requested.
if ((include_external_types &&
!IsProtobufLibraryBundledProtoFile(descriptor_->message_type()->file())) ||
if ((include_external_types && !IsProtobufLibraryBundledProtoFile(
descriptor_->message_type()->file())) ||
descriptor_->file() == descriptor_->message_type()->file()) {
// Class name is already in "storage_type".
fwd_decls->insert("@class " + variable("storage_type"));

View File

@ -68,7 +68,8 @@ class RepeatedMessageFieldGenerator : public RepeatedFieldGenerator {
virtual ~RepeatedMessageFieldGenerator();
RepeatedMessageFieldGenerator(const RepeatedMessageFieldGenerator&) = delete;
RepeatedMessageFieldGenerator operator=(const RepeatedMessageFieldGenerator&) = delete;
RepeatedMessageFieldGenerator operator=(
const RepeatedMessageFieldGenerator&) = delete;
public:
virtual void DetermineForwardDeclarations(

View File

@ -28,6 +28,8 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "google/protobuf/compiler/objectivec/names.h"
#include <climits>
#include <fstream>
#include <iostream>
@ -35,11 +37,10 @@
#include <unordered_set>
#include <vector>
#include "google/protobuf/compiler/code_generator.h"
#include "absl/strings/ascii.h"
#include "absl/strings/str_split.h"
#include "google/protobuf/compiler/code_generator.h"
#include "google/protobuf/compiler/objectivec/line_consumer.h"
#include "google/protobuf/compiler/objectivec/names.h"
#include "google/protobuf/compiler/objectivec/nsobject_methods.h"
#include "google/protobuf/descriptor.pb.h"
#include "google/protobuf/io/zero_copy_stream_impl.h"
@ -67,7 +68,8 @@ class SimpleLineCollector : public LineConsumer {
explicit SimpleLineCollector(std::unordered_set<std::string>* inout_set)
: set_(inout_set) {}
virtual bool ConsumeLine(const absl::string_view& line, std::string* out_error) override {
virtual bool ConsumeLine(const absl::string_view& line,
std::string* out_error) override {
set_->insert(std::string(line));
return true;
}
@ -78,11 +80,13 @@ class SimpleLineCollector : public LineConsumer {
class PackageToPrefixesCollector : public LineConsumer {
public:
PackageToPrefixesCollector(const std::string &usage,
std::map<std::string, std::string>* inout_package_to_prefix_map)
PackageToPrefixesCollector(
const std::string& usage,
std::map<std::string, std::string>* inout_package_to_prefix_map)
: usage_(usage), prefix_map_(inout_package_to_prefix_map) {}
virtual bool ConsumeLine(const absl::string_view& line, std::string* out_error) override;
virtual bool ConsumeLine(const absl::string_view& line,
std::string* out_error) override;
private:
const std::string usage_;
@ -93,7 +97,9 @@ class PrefixModeStorage {
public:
PrefixModeStorage();
std::string package_to_prefix_mappings_path() const { return package_to_prefix_mappings_path_; }
std::string package_to_prefix_mappings_path() const {
return package_to_prefix_mappings_path_;
}
void set_package_to_prefix_mappings_path(const std::string& path) {
package_to_prefix_mappings_path_ = path;
package_to_prefix_map_.clear();
@ -115,7 +121,9 @@ class PrefixModeStorage {
// When using a proto package as the prefix, this should be added as the
// prefix in front of it.
const std::string& forced_package_prefix() const { return forced_prefix_; }
void set_forced_package_prefix(const std::string& prefix) { forced_prefix_ = prefix; }
void set_forced_package_prefix(const std::string& prefix) {
forced_prefix_ = prefix;
}
private:
bool use_package_name_;
@ -132,7 +140,8 @@ PrefixModeStorage::PrefixModeStorage() {
use_package_name_ = BoolFromEnvVar("GPB_OBJC_USE_PACKAGE_AS_PREFIX", false);
const char* exception_path = getenv("GPB_OBJC_PACKAGE_PREFIX_EXCEPTIONS_PATH");
const char* exception_path =
getenv("GPB_OBJC_PACKAGE_PREFIX_EXCEPTIONS_PATH");
if (exception_path) {
exception_path_ = exception_path;
}
@ -143,21 +152,25 @@ PrefixModeStorage::PrefixModeStorage() {
}
}
std::string PrefixModeStorage::prefix_from_proto_package_mappings(const FileDescriptor* file) {
std::string PrefixModeStorage::prefix_from_proto_package_mappings(
const FileDescriptor* file) {
if (!file) {
return "";
}
if (package_to_prefix_map_.empty() && !package_to_prefix_mappings_path_.empty()) {
if (package_to_prefix_map_.empty() &&
!package_to_prefix_mappings_path_.empty()) {
std::string error_str;
// Re use the same collector as we use for expected_prefixes_path since the file
// format is the same.
PackageToPrefixesCollector collector("Package to prefixes", &package_to_prefix_map_);
if (!ParseSimpleFile(package_to_prefix_mappings_path_, &collector, &error_str)) {
// Re use the same collector as we use for expected_prefixes_path since the
// file format is the same.
PackageToPrefixesCollector collector("Package to prefixes",
&package_to_prefix_map_);
if (!ParseSimpleFile(package_to_prefix_mappings_path_, &collector,
&error_str)) {
if (error_str.empty()) {
error_str = std::string("protoc:0: warning: Failed to parse")
+ std::string(" prefix to proto package mappings file: ")
+ package_to_prefix_mappings_path_;
error_str = std::string("protoc:0: warning: Failed to parse") +
std::string(" prefix to proto package mappings file: ") +
package_to_prefix_mappings_path_;
}
std::cerr << error_str << std::endl;
std::cerr.flush();
@ -169,7 +182,8 @@ std::string PrefixModeStorage::prefix_from_proto_package_mappings(const FileDesc
// For files without packages, the can be registered as "no_package:PATH",
// allowing the expected prefixes file.
static const std::string no_package_prefix("no_package:");
const std::string lookup_key = package.empty() ? no_package_prefix + file->name() : package;
const std::string lookup_key =
package.empty() ? no_package_prefix + file->name() : package;
std::map<std::string, std::string>::const_iterator prefix_lookup =
package_to_prefix_map_.find(lookup_key);
@ -187,9 +201,9 @@ bool PrefixModeStorage::is_package_exempted(const std::string& package) {
SimpleLineCollector collector(&exceptions_);
if (!ParseSimpleFile(exception_path_, &collector, &error_str)) {
if (error_str.empty()) {
error_str = std::string("protoc:0: warning: Failed to parse")
+ std::string(" package prefix exceptions file: ")
+ exception_path_;
error_str = std::string("protoc:0: warning: Failed to parse") +
std::string(" package prefix exceptions file: ") +
exception_path_;
}
std::cerr << error_str << std::endl;
std::cerr.flush();
@ -320,8 +334,7 @@ std::string UnderscoresToCamelCase(const std::string& input,
}
result += value;
}
if ((result.length() != 0) &&
!first_capitalized &&
if ((result.length() != 0) && !first_capitalized &&
!first_segment_forces_upper) {
result[0] = absl::ascii_tolower(result[0]);
}
@ -329,72 +342,193 @@ std::string UnderscoresToCamelCase(const std::string& input,
}
const char* const kReservedWordList[] = {
// Note NSObject Methods:
// These are brought in from nsobject_methods.h that is generated
// using method_dump.sh. See kNSObjectMethods below.
// Note NSObject Methods:
// These are brought in from nsobject_methods.h that is generated
// using method_dump.sh. See kNSObjectMethods below.
// Objective C "keywords" that aren't in C
// From
// http://stackoverflow.com/questions/1873630/reserved-keywords-in-objective-c
// with some others added on.
"id", "_cmd", "super", "in", "out", "inout", "bycopy", "byref", "oneway",
"self", "instancetype", "nullable", "nonnull", "nil", "Nil",
"YES", "NO", "weak",
// Objective C "keywords" that aren't in C
// From
// http://stackoverflow.com/questions/1873630/reserved-keywords-in-objective-c
// with some others added on.
"id",
"_cmd",
"super",
"in",
"out",
"inout",
"bycopy",
"byref",
"oneway",
"self",
"instancetype",
"nullable",
"nonnull",
"nil",
"Nil",
"YES",
"NO",
"weak",
// C/C++ keywords (Incl C++ 0x11)
// From http://en.cppreference.com/w/cpp/keywords
"and", "and_eq", "alignas", "alignof", "asm", "auto", "bitand", "bitor",
"bool", "break", "case", "catch", "char", "char16_t", "char32_t", "class",
"compl", "const", "constexpr", "const_cast", "continue", "decltype",
"default", "delete", "double", "dynamic_cast", "else", "enum", "explicit",
"export", "extern ", "false", "float", "for", "friend", "goto", "if",
"inline", "int", "long", "mutable", "namespace", "new", "noexcept", "not",
"not_eq", "nullptr", "operator", "or", "or_eq", "private", "protected",
"public", "register", "reinterpret_cast", "return", "short", "signed",
"sizeof", "static", "static_assert", "static_cast", "struct", "switch",
"template", "this", "thread_local", "throw", "true", "try", "typedef",
"typeid", "typename", "union", "unsigned", "using", "virtual", "void",
"volatile", "wchar_t", "while", "xor", "xor_eq",
// C/C++ keywords (Incl C++ 0x11)
// From http://en.cppreference.com/w/cpp/keywords
"and",
"and_eq",
"alignas",
"alignof",
"asm",
"auto",
"bitand",
"bitor",
"bool",
"break",
"case",
"catch",
"char",
"char16_t",
"char32_t",
"class",
"compl",
"const",
"constexpr",
"const_cast",
"continue",
"decltype",
"default",
"delete",
"double",
"dynamic_cast",
"else",
"enum",
"explicit",
"export",
"extern ",
"false",
"float",
"for",
"friend",
"goto",
"if",
"inline",
"int",
"long",
"mutable",
"namespace",
"new",
"noexcept",
"not",
"not_eq",
"nullptr",
"operator",
"or",
"or_eq",
"private",
"protected",
"public",
"register",
"reinterpret_cast",
"return",
"short",
"signed",
"sizeof",
"static",
"static_assert",
"static_cast",
"struct",
"switch",
"template",
"this",
"thread_local",
"throw",
"true",
"try",
"typedef",
"typeid",
"typename",
"union",
"unsigned",
"using",
"virtual",
"void",
"volatile",
"wchar_t",
"while",
"xor",
"xor_eq",
// C99 keywords
// From
// http://publib.boulder.ibm.com/infocenter/lnxpcomp/v8v101/index.jsp?topic=%2Fcom.ibm.xlcpp8l.doc%2Flanguage%2Fref%2Fkeyw.htm
"restrict",
// C99 keywords
// From
// http://publib.boulder.ibm.com/infocenter/lnxpcomp/v8v101/index.jsp?topic=%2Fcom.ibm.xlcpp8l.doc%2Flanguage%2Fref%2Fkeyw.htm
"restrict",
// GCC/Clang extension
"typeof",
// GCC/Clang extension
"typeof",
// Not a keyword, but will break you
"NULL",
// Not a keyword, but will break you
"NULL",
// C88+ specs call for these to be macros, so depending on what they are
// defined to be it can lead to odd errors for some Xcode/SDK versions.
"stdin", "stdout", "stderr",
// C88+ specs call for these to be macros, so depending on what they are
// defined to be it can lead to odd errors for some Xcode/SDK versions.
"stdin",
"stdout",
"stderr",
// Objective-C Runtime typedefs
// From <obc/runtime.h>
"Category", "Ivar", "Method", "Protocol",
// Objective-C Runtime typedefs
// From <obc/runtime.h>
"Category",
"Ivar",
"Method",
"Protocol",
// GPBMessage Methods
// Only need to add instance methods that may conflict with
// method declared in protos. The main cases are methods
// that take no arguments, or setFoo:/hasFoo: type methods.
"clear", "data", "delimitedData", "descriptor", "extensionRegistry",
"extensionsCurrentlySet", "initialized", "isInitialized", "serializedSize",
"sortedExtensionsInUse", "unknownFields",
// GPBMessage Methods
// Only need to add instance methods that may conflict with
// method declared in protos. The main cases are methods
// that take no arguments, or setFoo:/hasFoo: type methods.
"clear",
"data",
"delimitedData",
"descriptor",
"extensionRegistry",
"extensionsCurrentlySet",
"initialized",
"isInitialized",
"serializedSize",
"sortedExtensionsInUse",
"unknownFields",
// MacTypes.h names
"Fixed", "Fract", "Size", "LogicalAddress", "PhysicalAddress", "ByteCount",
"ByteOffset", "Duration", "AbsoluteTime", "OptionBits", "ItemCount",
"PBVersion", "ScriptCode", "LangCode", "RegionCode", "OSType",
"ProcessSerialNumber", "Point", "Rect", "FixedPoint", "FixedRect", "Style",
"StyleParameter", "StyleField", "TimeScale", "TimeBase", "TimeRecord",
// MacTypes.h names
"Fixed",
"Fract",
"Size",
"LogicalAddress",
"PhysicalAddress",
"ByteCount",
"ByteOffset",
"Duration",
"AbsoluteTime",
"OptionBits",
"ItemCount",
"PBVersion",
"ScriptCode",
"LangCode",
"RegionCode",
"OSType",
"ProcessSerialNumber",
"Point",
"Rect",
"FixedPoint",
"FixedRect",
"Style",
"StyleParameter",
"StyleField",
"TimeScale",
"TimeBase",
"TimeRecord",
};
// returns true is input starts with __ or _[A-Z] which are reserved identifiers
// in C/ C++. All calls should go through UnderscoresToCamelCase before getting here
// but this verifies and allows for future expansion if we decide to redefine what a
// reserved C identifier is (for example the GNU list
// in C/ C++. All calls should go through UnderscoresToCamelCase before getting
// here but this verifies and allows for future expansion if we decide to
// redefine what a reserved C identifier is (for example the GNU list
// https://www.gnu.org/software/libc/manual/html_node/Reserved-Names.html )
bool IsReservedCIdentifier(const std::string& input) {
if (input.length() > 2) {
@ -422,7 +556,8 @@ std::string SanitizeNameForObjC(const std::string& prefix,
// b) Isn't equivalent to the prefix or
// c) Has the prefix, but the letter after the prefix is lowercase
if (absl::StartsWith(input, prefix)) {
if (input.length() == prefix.length() || !absl::ascii_isupper(input[prefix.length()])) {
if (input.length() == prefix.length() ||
!absl::ascii_isupper(input[prefix.length()])) {
sanitized = prefix + input;
} else {
sanitized = input;
@ -469,8 +604,7 @@ void PathSplit(const std::string& path, std::string* directory,
}
bool IsSpecialNamePrefix(const std::string& name,
const std::string* special_names,
size_t count) {
const std::string* special_names, size_t count) {
for (size_t i = 0; i < count; ++i) {
const size_t length = special_names[i].length();
if (name.compare(0, length, special_names[i]) == 0) {
@ -503,8 +637,8 @@ bool IsRetainedName(const std::string& name) {
// http://developer.apple.com/library/mac/#documentation/Cocoa/Conceptual/MemoryMgmt/Articles/mmRules.html
static const std::string retained_names[] = {"new", "alloc", "copy",
"mutableCopy"};
return IsSpecialNamePrefix(name, retained_names,
sizeof(retained_names) / sizeof(retained_names[0]));
return IsSpecialNamePrefix(
name, retained_names, sizeof(retained_names) / sizeof(retained_names[0]));
}
bool IsInitName(const std::string& name) {
@ -557,8 +691,10 @@ std::string FileClassPrefix(const FileDescriptor* file) {
return file->options().objc_class_prefix();
}
// If package prefix is specified in an prefix to proto mappings file then use that.
std::string objc_class_prefix = g_prefix_mode.prefix_from_proto_package_mappings(file);
// If package prefix is specified in an prefix to proto mappings file then use
// that.
std::string objc_class_prefix =
g_prefix_mode.prefix_from_proto_package_mappings(file);
if (!objc_class_prefix.empty()) {
return objc_class_prefix;
}
@ -577,7 +713,8 @@ std::string FileClassPrefix(const FileDescriptor* file) {
// camelcase each one and then join them with underscores, and add an
// underscore at the end.
std::string result;
const std::vector<std::string> segments = absl::StrSplit(file->package(), ".", absl::SkipEmpty());
const std::vector<std::string> segments =
absl::StrSplit(file->package(), ".", absl::SkipEmpty());
for (const auto& segment : segments) {
const std::string part = UnderscoresToCamelCase(segment, true);
if (part.empty()) {
@ -780,7 +917,8 @@ std::string OneofNameCapitalized(const OneofDescriptor* descriptor) {
return result;
}
std::string UnCamelCaseFieldName(const std::string& name, const FieldDescriptor* field) {
std::string UnCamelCaseFieldName(const std::string& name,
const FieldDescriptor* field) {
absl::string_view worker(name);
if (absl::EndsWith(worker, "_p")) {
worker = absl::StripSuffix(worker, "_p");
@ -850,15 +988,18 @@ bool IsProtobufLibraryBundledProtoFile(const FileDescriptor* file) {
namespace {
bool PackageToPrefixesCollector::ConsumeLine(
const absl::string_view& line, std::string* out_error) {
bool PackageToPrefixesCollector::ConsumeLine(const absl::string_view& line,
std::string* out_error) {
int offset = line.find('=');
if (offset == absl::string_view::npos) {
*out_error = usage_ + " file line without equal sign: '" + absl::StrCat(line) + "'.";
*out_error =
usage_ + " file line without equal sign: '" + absl::StrCat(line) + "'.";
return false;
}
absl::string_view package = absl::StripAsciiWhitespace(line.substr(0, offset));
absl::string_view prefix = absl::StripAsciiWhitespace(line.substr(offset + 1));
absl::string_view package =
absl::StripAsciiWhitespace(line.substr(0, offset));
absl::string_view prefix =
absl::StripAsciiWhitespace(line.substr(offset + 1));
MaybeUnQuote(&prefix);
// Don't really worry about error checking the package/prefix for
// being valid. Assume the file is validated when it is created/edited.
@ -874,8 +1015,7 @@ bool LoadExpectedPackagePrefixes(const std::string& expected_prefixes_path,
}
PackageToPrefixesCollector collector("Expected prefixes", prefix_map);
return ParseSimpleFile(
expected_prefixes_path, &collector, out_error);
return ParseSimpleFile(expected_prefixes_path, &collector, out_error);
}
bool ValidateObjCClassPrefix(
@ -929,9 +1069,9 @@ bool ValidateObjCClassPrefix(
// If there was no prefix option, we're done at this point.
if (!has_prefix) {
if (require_prefixes) {
*out_error =
"error: '" + file->name() + "' does not have a required 'option" +
" objc_class_prefix'.";
*out_error = "error: '" + file->name() +
"' does not have a required 'option" +
" objc_class_prefix'.";
return false;
}
return true;
@ -958,9 +1098,9 @@ bool ValidateObjCClassPrefix(
// package (overlap is allowed, but it has to be listed as an expected
// overlap).
if (!other_package_for_prefix.empty()) {
*out_error =
"error: Found 'option objc_class_prefix = \"" + prefix +
"\";' in '" + file->name() + "'; that prefix is already used for ";
*out_error = "error: Found 'option objc_class_prefix = \"" + prefix +
"\";' in '" + file->name() +
"'; that prefix is already used for ";
if (absl::StartsWith(other_package_for_prefix, no_package_prefix)) {
absl::StrAppend(
out_error, "file '",
@ -976,26 +1116,24 @@ bool ValidateObjCClassPrefix(
expected_prefixes_path, ").");
return false; // Only report first usage of the prefix.
}
} // !prefix.empty() && have_expected_prefix_file
} // !prefix.empty() && have_expected_prefix_file
// Check: Warning - Make sure the prefix is is a reasonable value according
// to Apple's rules (the checks above implicitly whitelist anything that
// doesn't meet these rules).
if (!prefix.empty() && !absl::ascii_isupper(prefix[0])) {
std::cerr
<< "protoc:0: warning: Invalid 'option objc_class_prefix = \""
<< prefix << "\";' in '" << file->name() << "';"
<< " it should start with a capital letter." << std::endl;
std::cerr << "protoc:0: warning: Invalid 'option objc_class_prefix = \""
<< prefix << "\";' in '" << file->name() << "';"
<< " it should start with a capital letter." << std::endl;
std::cerr.flush();
}
if (!prefix.empty() && prefix.length() < 3) {
// Apple reserves 2 character prefixes for themselves. They do use some
// 3 character prefixes, but they haven't updated the rules/docs.
std::cerr
<< "protoc:0: warning: Invalid 'option objc_class_prefix = \""
<< prefix << "\";' in '" << file->name() << "';"
<< " Apple recommends they should be at least 3 characters long."
<< std::endl;
std::cerr << "protoc:0: warning: Invalid 'option objc_class_prefix = \""
<< prefix << "\";' in '" << file->name() << "';"
<< " Apple recommends they should be at least 3 characters long."
<< std::endl;
std::cerr.flush();
}
@ -1004,19 +1142,19 @@ bool ValidateObjCClassPrefix(
if (have_expected_prefix_file) {
if (prefixes_must_be_registered) {
*out_error =
"error: '" + file->name() + "' has 'option objc_class_prefix = \"" +
prefix + "\";', but it is not registered. Add '" + lookup_key + " = " +
(prefix.empty() ? "\"\"" : prefix) +
"' to the expected prefixes file (" + expected_prefixes_path + ").";
"error: '" + file->name() + "' has 'option objc_class_prefix = \"" +
prefix + "\";', but it is not registered. Add '" + lookup_key +
" = " + (prefix.empty() ? "\"\"" : prefix) +
"' to the expected prefixes file (" + expected_prefixes_path + ").";
return false;
}
std::cerr
<< "protoc:0: warning: Found unexpected 'option objc_class_prefix = \""
<< prefix << "\";' in '" << file->name() << "'; consider adding '"
<< lookup_key << " = " << (prefix.empty() ? "\"\"" : prefix)
<< "' to the expected prefixes file (" << expected_prefixes_path
<< ")." << std::endl;
<< "protoc:0: warning: Found unexpected 'option objc_class_prefix = \""
<< prefix << "\";' in '" << file->name() << "'; consider adding '"
<< lookup_key << " = " << (prefix.empty() ? "\"\"" : prefix)
<< "' to the expected prefixes file (" << expected_prefixes_path << ")."
<< std::endl;
std::cerr.flush();
}
@ -1033,7 +1171,8 @@ Options::Options() {
if (file_path) {
expected_prefixes_path = file_path;
}
const char* suppressions = getenv("GPB_OBJC_EXPECTED_PACKAGE_PREFIXES_SUPPRESSIONS");
const char* suppressions =
getenv("GPB_OBJC_EXPECTED_PACKAGE_PREFIXES_SUPPRESSIONS");
if (suppressions) {
expected_prefixes_suppressions =
absl::StrSplit(suppressions, ";", absl::SkipEmpty());
@ -1045,9 +1184,9 @@ Options::Options() {
bool ValidateObjCClassPrefixes(const std::vector<const FileDescriptor*>& files,
std::string* out_error) {
// Options's ctor load from the environment.
Options options;
return ValidateObjCClassPrefixes(files, options, out_error);
// Options's ctor load from the environment.
Options options;
return ValidateObjCClassPrefixes(files, options, out_error);
}
bool ValidateObjCClassPrefixes(const std::vector<const FileDescriptor*>& files,
@ -1062,28 +1201,25 @@ bool ValidateObjCClassPrefixes(const std::vector<const FileDescriptor*>& files,
// Load the expected package prefixes, if available, to validate against.
std::map<std::string, std::string> expected_package_prefixes;
if (!LoadExpectedPackagePrefixes(generation_options.expected_prefixes_path,
&expected_package_prefixes,
out_error)) {
&expected_package_prefixes, out_error)) {
return false;
}
for (int i = 0; i < files.size(); i++) {
bool should_skip =
(std::find(generation_options.expected_prefixes_suppressions.begin(),
generation_options.expected_prefixes_suppressions.end(),
files[i]->name())
!= generation_options.expected_prefixes_suppressions.end());
(std::find(generation_options.expected_prefixes_suppressions.begin(),
generation_options.expected_prefixes_suppressions.end(),
files[i]->name()) !=
generation_options.expected_prefixes_suppressions.end());
if (should_skip) {
continue;
}
bool is_valid =
ValidateObjCClassPrefix(files[i],
generation_options.expected_prefixes_path,
expected_package_prefixes,
generation_options.prefixes_must_be_registered,
generation_options.require_prefixes,
out_error);
bool is_valid = ValidateObjCClassPrefix(
files[i], generation_options.expected_prefixes_path,
expected_package_prefixes,
generation_options.prefixes_must_be_registered,
generation_options.require_prefixes, out_error);
if (!is_valid) {
return false;
}

View File

@ -49,8 +49,7 @@ namespace objectivec {
// Get/Set the path to a file to load for objc class prefix lookups.
std::string PROTOC_EXPORT GetPackageToPrefixMappingsPath();
void PROTOC_EXPORT SetPackageToPrefixMappingsPath(
const std::string& file_path);
void PROTOC_EXPORT SetPackageToPrefixMappingsPath(const std::string& file_path);
// Get/Set if the proto package should be used to make the default prefix for
// symbols. This will then impact most of the type naming apis below. It is done
// as a global to not break any other generator reusing the methods since they
@ -61,8 +60,8 @@ void PROTOC_EXPORT SetUseProtoPackageAsDefaultPrefix(bool on_or_off);
// `UseProtoPackageAsDefaultPrefix()` is `true`. An empty string means there
// should be no exceptions.
std::string PROTOC_EXPORT GetProtoPackagePrefixExceptionList();
void PROTOC_EXPORT SetProtoPackagePrefixExceptionList(
const std::string& file_path);
void PROTOC_EXPORT
SetProtoPackagePrefixExceptionList(const std::string& file_path);
// Get/Set a prefix to add before the prefix generated from the package name.
// This is only used when UseProtoPackageAsDefaultPrefix() is True.
std::string PROTOC_EXPORT GetForcedPackagePrefix();
@ -108,14 +107,16 @@ std::string PROTOC_EXPORT EnumName(const EnumDescriptor* descriptor);
std::string PROTOC_EXPORT EnumValueName(const EnumValueDescriptor* descriptor);
// Returns the name of the enum value corresponding to the descriptor.
std::string PROTOC_EXPORT EnumValueShortName(const EnumValueDescriptor* descriptor);
std::string PROTOC_EXPORT
EnumValueShortName(const EnumValueDescriptor* descriptor);
// Reverse what an enum does.
std::string PROTOC_EXPORT UnCamelCaseEnumShortName(const std::string& name);
// Returns the name to use for the extension (used as the method off the file's
// Root class).
std::string PROTOC_EXPORT ExtensionMethodName(const FieldDescriptor* descriptor);
std::string PROTOC_EXPORT
ExtensionMethodName(const FieldDescriptor* descriptor);
// Returns the transformed field name.
std::string PROTOC_EXPORT FieldName(const FieldDescriptor* field);
@ -124,7 +125,8 @@ std::string PROTOC_EXPORT FieldNameCapitalized(const FieldDescriptor* field);
// Returns the transformed oneof name.
std::string PROTOC_EXPORT OneofEnumName(const OneofDescriptor* descriptor);
std::string PROTOC_EXPORT OneofName(const OneofDescriptor* descriptor);
std::string PROTOC_EXPORT OneofNameCapitalized(const OneofDescriptor* descriptor);
std::string PROTOC_EXPORT
OneofNameCapitalized(const OneofDescriptor* descriptor);
// Reverse of the above.
std::string PROTOC_EXPORT UnCamelCaseFieldName(const std::string& name,

View File

@ -29,10 +29,11 @@
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "google/protobuf/compiler/objectivec/names.h"
#include "google/protobuf/io/zero_copy_stream_impl_lite.h"
#include <gtest/gtest.h>
#include "google/protobuf/io/zero_copy_stream_impl_lite.h"
namespace google {
namespace protobuf {
namespace compiler {

View File

@ -35,193 +35,193 @@
// iOS: iPhoneSimulator12.1.sdk
const char* const kNSObjectMethodsList[] = {
"CAMLType",
"CA_copyRenderValue",
"CA_prepareRenderValue",
"NS_copyCGImage",
"NS_tiledLayerVisibleRect",
"___tryRetain_OA",
"__autorelease_OA",
"__dealloc_zombie",
"__release_OA",
"__retain_OA",
"_accessibilityFinalize",
"_accessibilityIsTableViewDescendant",
"_accessibilityUIElementSpecifier",
"_accessibilityUseConvenienceAPI",
"_allowsDirectEncoding",
"_asScriptTerminologyNameArray",
"_asScriptTerminologyNameString",
"_bindingAdaptor",
"_cfTypeID",
"_copyDescription",
"_destroyObserverList",
"_didEndKeyValueObserving",
"_implicitObservationInfo",
"_internalAccessibilityAttributedHint",
"_internalAccessibilityAttributedLabel",
"_internalAccessibilityAttributedValue",
"_isAXConnector",
"_isAccessibilityContainerSectionCandidate",
"_isAccessibilityContentNavigatorSectionCandidate",
"_isAccessibilityContentSectionCandidate",
"_isAccessibilityTopLevelNavigatorSectionCandidate",
"_isDeallocating",
"_isKVOA",
"_isToManyChangeInformation",
"_ivarDescription",
"_localClassNameForClass",
"_methodDescription",
"_observerStorage",
"_overrideUseFastBlockObservers",
"_propertyDescription",
"_releaseBindingAdaptor",
"_scriptingCount",
"_scriptingCountNonrecursively",
"_scriptingDebugDescription",
"_scriptingExists",
"_scriptingShouldCheckObjectIndexes",
"_shortMethodDescription",
"_shouldSearchChildrenForSection",
"_traitStorageList",
"_tryRetain",
"_ui_descriptionBuilder",
"_uikit_variesByTraitCollections",
"_web_description",
"_webkit_invokeOnMainThread",
"_willBeginKeyValueObserving",
"accessibilityActivate",
"accessibilityActivationPoint",
"accessibilityAllowsOverriddenAttributesWhenIgnored",
"accessibilityAssistiveTechnologyFocusedIdentifiers",
"accessibilityAttributedHint",
"accessibilityAttributedLabel",
"accessibilityAttributedValue",
"accessibilityContainer",
"accessibilityContainerType",
"accessibilityCustomActions",
"accessibilityCustomRotors",
"accessibilityDecrement",
"accessibilityDragSourceDescriptors",
"accessibilityDropPointDescriptors",
"accessibilityElementCount",
"accessibilityElementDidBecomeFocused",
"accessibilityElementDidLoseFocus",
"accessibilityElementIsFocused",
"accessibilityElements",
"accessibilityElementsHidden",
"accessibilityFrame",
"accessibilityHeaderElements",
"accessibilityHint",
"accessibilityIdentification",
"accessibilityIdentifier",
"accessibilityIncrement",
"accessibilityLabel",
"accessibilityLanguage",
"accessibilityLocalizedStringKey",
"accessibilityNavigationStyle",
"accessibilityOverriddenAttributes",
"accessibilityParameterizedAttributeNames",
"accessibilityPath",
"accessibilityPerformEscape",
"accessibilityPerformMagicTap",
"accessibilityPresenterProcessIdentifier",
"accessibilityShouldUseUniqueId",
"accessibilitySupportsNotifications",
"accessibilitySupportsOverriddenAttributes",
"accessibilityTemporaryChildren",
"accessibilityTraits",
"accessibilityValue",
"accessibilityViewIsModal",
"accessibilityVisibleArea",
"allPropertyKeys",
"allowsWeakReference",
"attributeKeys",
"autoContentAccessingProxy",
"autorelease",
"awakeFromNib",
"boolValueSafe",
"bs_encoded",
"bs_isPlistableType",
"bs_secureEncoded",
"cl_json_serializeKey",
"class",
"classCode",
"classDescription",
"classForArchiver",
"classForCoder",
"classForKeyedArchiver",
"classForPortCoder",
"className",
"clearProperties",
"copy",
"dealloc",
"debugDescription",
"defaultAccessibilityTraits",
"description",
"doubleValueSafe",
"entityName",
"exposedBindings",
"finalize",
"finishObserving",
"flushKeyBindings",
"hash",
"init",
"int64ValueSafe",
"isAccessibilityElement",
"isAccessibilityElementByDefault",
"isElementAccessibilityExposedToInterfaceBuilder",
"isFault",
"isNSArray__",
"isNSCFConstantString__",
"isNSData__",
"isNSDate__",
"isNSDictionary__",
"isNSNumber__",
"isNSObject__",
"isNSOrderedSet__",
"isNSSet__",
"isNSString__",
"isNSTimeZone__",
"isNSValue__",
"isProxy",
"mutableCopy",
"nilValueForKey",
"objectSpecifier",
"observationInfo",
"pep_onDetachedThread",
"pep_onMainThread",
"pep_onMainThreadIfNecessary",
"prepareForInterfaceBuilder",
"release",
"releaseOnMainThread",
"retain",
"retainCount",
"retainWeakReference",
"scriptingProperties",
"self",
"shouldGroupAccessibilityChildren",
"storedAccessibilityActivationPoint",
"storedAccessibilityContainerType",
"storedAccessibilityElementsHidden",
"storedAccessibilityFrame",
"storedAccessibilityNavigationStyle",
"storedAccessibilityTraits",
"storedAccessibilityViewIsModal",
"storedIsAccessibilityElement",
"storedShouldGroupAccessibilityChildren",
"stringValueSafe",
"superclass",
"toManyRelationshipKeys",
"toOneRelationshipKeys",
"traitStorageList",
"un_safeBoolValue",
"userInterfaceItemIdentifier",
"utf8ValueSafe",
"valuesForKeysWithDictionary",
"zone",
// Protocol: CAAnimatableValue
// Protocol: CARenderValue
// Protocol: NSObject
// Protocol: ROCKRemoteInvocationInterface
"CAMLType",
"CA_copyRenderValue",
"CA_prepareRenderValue",
"NS_copyCGImage",
"NS_tiledLayerVisibleRect",
"___tryRetain_OA",
"__autorelease_OA",
"__dealloc_zombie",
"__release_OA",
"__retain_OA",
"_accessibilityFinalize",
"_accessibilityIsTableViewDescendant",
"_accessibilityUIElementSpecifier",
"_accessibilityUseConvenienceAPI",
"_allowsDirectEncoding",
"_asScriptTerminologyNameArray",
"_asScriptTerminologyNameString",
"_bindingAdaptor",
"_cfTypeID",
"_copyDescription",
"_destroyObserverList",
"_didEndKeyValueObserving",
"_implicitObservationInfo",
"_internalAccessibilityAttributedHint",
"_internalAccessibilityAttributedLabel",
"_internalAccessibilityAttributedValue",
"_isAXConnector",
"_isAccessibilityContainerSectionCandidate",
"_isAccessibilityContentNavigatorSectionCandidate",
"_isAccessibilityContentSectionCandidate",
"_isAccessibilityTopLevelNavigatorSectionCandidate",
"_isDeallocating",
"_isKVOA",
"_isToManyChangeInformation",
"_ivarDescription",
"_localClassNameForClass",
"_methodDescription",
"_observerStorage",
"_overrideUseFastBlockObservers",
"_propertyDescription",
"_releaseBindingAdaptor",
"_scriptingCount",
"_scriptingCountNonrecursively",
"_scriptingDebugDescription",
"_scriptingExists",
"_scriptingShouldCheckObjectIndexes",
"_shortMethodDescription",
"_shouldSearchChildrenForSection",
"_traitStorageList",
"_tryRetain",
"_ui_descriptionBuilder",
"_uikit_variesByTraitCollections",
"_web_description",
"_webkit_invokeOnMainThread",
"_willBeginKeyValueObserving",
"accessibilityActivate",
"accessibilityActivationPoint",
"accessibilityAllowsOverriddenAttributesWhenIgnored",
"accessibilityAssistiveTechnologyFocusedIdentifiers",
"accessibilityAttributedHint",
"accessibilityAttributedLabel",
"accessibilityAttributedValue",
"accessibilityContainer",
"accessibilityContainerType",
"accessibilityCustomActions",
"accessibilityCustomRotors",
"accessibilityDecrement",
"accessibilityDragSourceDescriptors",
"accessibilityDropPointDescriptors",
"accessibilityElementCount",
"accessibilityElementDidBecomeFocused",
"accessibilityElementDidLoseFocus",
"accessibilityElementIsFocused",
"accessibilityElements",
"accessibilityElementsHidden",
"accessibilityFrame",
"accessibilityHeaderElements",
"accessibilityHint",
"accessibilityIdentification",
"accessibilityIdentifier",
"accessibilityIncrement",
"accessibilityLabel",
"accessibilityLanguage",
"accessibilityLocalizedStringKey",
"accessibilityNavigationStyle",
"accessibilityOverriddenAttributes",
"accessibilityParameterizedAttributeNames",
"accessibilityPath",
"accessibilityPerformEscape",
"accessibilityPerformMagicTap",
"accessibilityPresenterProcessIdentifier",
"accessibilityShouldUseUniqueId",
"accessibilitySupportsNotifications",
"accessibilitySupportsOverriddenAttributes",
"accessibilityTemporaryChildren",
"accessibilityTraits",
"accessibilityValue",
"accessibilityViewIsModal",
"accessibilityVisibleArea",
"allPropertyKeys",
"allowsWeakReference",
"attributeKeys",
"autoContentAccessingProxy",
"autorelease",
"awakeFromNib",
"boolValueSafe",
"bs_encoded",
"bs_isPlistableType",
"bs_secureEncoded",
"cl_json_serializeKey",
"class",
"classCode",
"classDescription",
"classForArchiver",
"classForCoder",
"classForKeyedArchiver",
"classForPortCoder",
"className",
"clearProperties",
"copy",
"dealloc",
"debugDescription",
"defaultAccessibilityTraits",
"description",
"doubleValueSafe",
"entityName",
"exposedBindings",
"finalize",
"finishObserving",
"flushKeyBindings",
"hash",
"init",
"int64ValueSafe",
"isAccessibilityElement",
"isAccessibilityElementByDefault",
"isElementAccessibilityExposedToInterfaceBuilder",
"isFault",
"isNSArray__",
"isNSCFConstantString__",
"isNSData__",
"isNSDate__",
"isNSDictionary__",
"isNSNumber__",
"isNSObject__",
"isNSOrderedSet__",
"isNSSet__",
"isNSString__",
"isNSTimeZone__",
"isNSValue__",
"isProxy",
"mutableCopy",
"nilValueForKey",
"objectSpecifier",
"observationInfo",
"pep_onDetachedThread",
"pep_onMainThread",
"pep_onMainThreadIfNecessary",
"prepareForInterfaceBuilder",
"release",
"releaseOnMainThread",
"retain",
"retainCount",
"retainWeakReference",
"scriptingProperties",
"self",
"shouldGroupAccessibilityChildren",
"storedAccessibilityActivationPoint",
"storedAccessibilityContainerType",
"storedAccessibilityElementsHidden",
"storedAccessibilityFrame",
"storedAccessibilityNavigationStyle",
"storedAccessibilityTraits",
"storedAccessibilityViewIsModal",
"storedIsAccessibilityElement",
"storedShouldGroupAccessibilityChildren",
"stringValueSafe",
"superclass",
"toManyRelationshipKeys",
"toOneRelationshipKeys",
"traitStorageList",
"un_safeBoolValue",
"userInterfaceItemIdentifier",
"utf8ValueSafe",
"valuesForKeysWithDictionary",
"zone",
// Protocol: CAAnimatableValue
// Protocol: CARenderValue
// Protocol: NSObject
// Protocol: ROCKRemoteInvocationInterface
};

View File

@ -34,8 +34,8 @@
#include <string>
#include "absl/strings/str_cat.h"
#include "google/protobuf/compiler/objectivec/names.h"
#include "google/protobuf/compiler/objectivec/helpers.h"
#include "google/protobuf/compiler/objectivec/names.h"
#include "google/protobuf/io/printer.h"
namespace google {
@ -71,54 +71,53 @@ void OneofGenerator::SetOneofIndexBase(int index_base) {
}
void OneofGenerator::GenerateCaseEnum(io::Printer* printer) {
printer->Print(
variables_,
"typedef GPB_ENUM($enum_name$) {\n");
printer->Print(variables_, "typedef GPB_ENUM($enum_name$) {\n");
printer->Indent();
printer->Print(
variables_,
"$enum_name$_GPBUnsetOneOfCase = 0,\n");
printer->Print(variables_, "$enum_name$_GPBUnsetOneOfCase = 0,\n");
std::string enum_name = variables_["enum_name"];
for (int j = 0; j < descriptor_->field_count(); j++) {
const FieldDescriptor* field = descriptor_->field(j);
std::string field_name = FieldNameCapitalized(field);
printer->Print(
"$enum_name$_$field_name$ = $field_number$,\n",
"enum_name", enum_name,
"field_name", field_name,
"field_number", absl::StrCat(field->number()));
printer->Print("$enum_name$_$field_name$ = $field_number$,\n", "enum_name",
enum_name, "field_name", field_name, "field_number",
absl::StrCat(field->number()));
}
printer->Outdent();
// clang-format off
printer->Print(
"};\n"
"\n");
// clang-format on
}
void OneofGenerator::GeneratePublicCasePropertyDeclaration(
io::Printer* printer) {
// clang-format off
printer->Print(
variables_,
"$comments$"
"@property(nonatomic, readonly) $enum_name$ $name$OneOfCase;\n"
"\n");
// clang-format on
}
void OneofGenerator::GenerateClearFunctionDeclaration(io::Printer* printer) {
// clang-format off
printer->Print(
variables_,
"/**\n"
" * Clears whatever value was set for the oneof '$name$'.\n"
" **/\n"
"void $owning_message_class$_Clear$capitalized_name$OneOfCase($owning_message_class$ *message);\n");
// clang-format on
}
void OneofGenerator::GeneratePropertyImplementation(io::Printer* printer) {
printer->Print(
variables_,
"@dynamic $name$OneOfCase;\n");
printer->Print(variables_, "@dynamic $name$OneOfCase;\n");
}
void OneofGenerator::GenerateClearFunctionImplementation(io::Printer* printer) {
// clang-format off
printer->Print(
variables_,
"void $owning_message_class$_Clear$capitalized_name$OneOfCase($owning_message_class$ *message) {\n"
@ -126,6 +125,7 @@ void OneofGenerator::GenerateClearFunctionImplementation(io::Printer* printer) {
" GPBOneofDescriptor *oneof = [descriptor.oneofs objectAtIndex:$raw_index$];\n"
" GPBClearOneof(message, oneof);\n"
"}\n");
// clang-format on
}
std::string OneofGenerator::DescriptorName(void) const {

View File

@ -34,8 +34,8 @@
#include <string>
#include "absl/strings/str_cat.h"
#include "google/protobuf/compiler/objectivec/names.h"
#include "google/protobuf/compiler/objectivec/helpers.h"
#include "google/protobuf/compiler/objectivec/names.h"
#include "google/protobuf/io/printer.h"
namespace google {

View File

@ -48,7 +48,8 @@ class PrimitiveFieldGenerator : public SingleFieldGenerator {
PrimitiveFieldGenerator(const PrimitiveFieldGenerator&) = delete;
PrimitiveFieldGenerator& operator=(const PrimitiveFieldGenerator&) = delete;
virtual void GenerateFieldStorageDeclaration(io::Printer* printer) const override;
virtual void GenerateFieldStorageDeclaration(
io::Printer* printer) const override;
virtual int ExtraRuntimeHasBitsNeeded(void) const override;
virtual void SetExtraRuntimeHasBitsBase(int index_base) override;

View File

@ -28,12 +28,13 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "google/protobuf/compiler/code_generator.h"
#include "google/protobuf/compiler/objectivec/text_format_decode_data.h"
#include "absl/strings/ascii.h"
#include "absl/strings/escaping.h"
#include "absl/strings/str_split.h"
#include "absl/strings/str_replace.h"
#include "google/protobuf/compiler/objectivec/text_format_decode_data.h"
#include "absl/strings/str_split.h"
#include "google/protobuf/compiler/code_generator.h"
#include "google/protobuf/compiler/objectivec/names.h"
#include "google/protobuf/io/coded_stream.h"
#include "google/protobuf/io/printer.h"
@ -166,9 +167,9 @@ std::string DirectDecodeString(const std::string& str) {
} // namespace
TextFormatDecodeData::TextFormatDecodeData() { }
TextFormatDecodeData::TextFormatDecodeData() {}
TextFormatDecodeData::~TextFormatDecodeData() { }
TextFormatDecodeData::~TextFormatDecodeData() {}
void TextFormatDecodeData::AddString(int32_t key,
const std::string& input_for_decode,
@ -177,8 +178,8 @@ void TextFormatDecodeData::AddString(int32_t key,
i != entries_.end(); ++i) {
if (i->first == key) {
std::cerr << "error: duplicate key (" << key
<< ") making TextFormat data, input: \"" << input_for_decode
<< "\", desired: \"" << desired_output << "\"." << std::endl;
<< ") making TextFormat data, input: \"" << input_for_decode
<< "\", desired: \"" << desired_output << "\"." << std::endl;
std::cerr.flush();
abort();
}
@ -213,16 +214,17 @@ std::string TextFormatDecodeData::DecodeDataForString(
const std::string& input_for_decode, const std::string& desired_output) {
if (input_for_decode.empty() || desired_output.empty()) {
std::cerr << "error: got empty string for making TextFormat data, input: \""
<< input_for_decode << "\", desired: \"" << desired_output << "\"."
<< std::endl;
<< input_for_decode << "\", desired: \"" << desired_output
<< "\"." << std::endl;
std::cerr.flush();
abort();
}
if ((input_for_decode.find('\0') != std::string::npos) ||
(desired_output.find('\0') != std::string::npos)) {
std::cerr << "error: got a null char in a string for making TextFormat data,"
<< " input: \"" << absl::CEscape(input_for_decode) << "\", desired: \""
<< absl::CEscape(desired_output) << "\"." << std::endl;
std::cerr
<< "error: got a null char in a string for making TextFormat data,"
<< " input: \"" << absl::CEscape(input_for_decode) << "\", desired: \""
<< absl::CEscape(desired_output) << "\"." << std::endl;
std::cerr.flush();
abort();
}
@ -259,7 +261,6 @@ std::string TextFormatDecodeData::DecodeDataForString(
return builder.Finish() + (char)'\0';
}
} // namespace objectivec
} // namespace compiler
} // namespace protobuf

View File

@ -68,7 +68,6 @@ class PROTOC_EXPORT TextFormatDecodeData {
std::vector<DataEntry> entries_;
};
} // namespace objectivec
} // namespace compiler
} // namespace protobuf

View File

@ -212,7 +212,6 @@ TEST(ObjCHelper, TextFormatDecodeData_ByteCodes) {
EXPECT_EQ(expected, decode_data.Data());
}
// Death tests do not work on Windows as of yet.
#ifdef PROTOBUF_HAS_DEATH_TEST
TEST(ObjCHelperDeathTest, TextFormatDecodeData_Failures) {

View File

@ -89,7 +89,6 @@ message CodeGeneratorRequest {
// The version number of protocol compiler.
optional Version compiler_version = 3;
}
// The plugin writes an encoded CodeGeneratorResponse to stdout.

View File

@ -1,3 +1,33 @@
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#ifndef GOOGLE_PROTOBUF_COMPILER_PYTHON_PYTHON_GENERATOR_H_
#define GOOGLE_PROTOBUF_COMPILER_PYTHON_PYTHON_GENERATOR_H_

View File

@ -36,7 +36,6 @@
// A valid .proto file can be translated directly to a FileDescriptorProto
// without any other information (e.g. without reading its imports).
syntax = "proto2";
package google.protobuf;
@ -134,7 +133,6 @@ message ExtensionRangeOptions {
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
// Clients can define custom options in extensions of this message. See above.
extensions 1000 to max;
}
@ -310,7 +308,6 @@ message MethodDescriptorProto {
optional bool server_streaming = 6 [default = false];
}
// ===================================================================
// Options
@ -351,7 +348,6 @@ message FileOptions {
// domain names.
optional string java_package = 1;
// Controls the name of the wrapper Java class generated for the .proto file.
// That class will always contain the .proto file's getDescriptor() method as
// well as any top-level extensions defined in the .proto file.
@ -378,7 +374,6 @@ message FileOptions {
// This option has no effect on when used with the lite runtime.
optional bool java_string_check_utf8 = 27 [default = false];
// Generated classes can be optimized for speed or code size.
enum OptimizeMode {
SPEED = 1; // Generate complete code for parsing, serialization,
@ -395,9 +390,6 @@ message FileOptions {
// - Otherwise, the basename of the .proto file, without extension.
optional string go_package = 11;
// Should generic services be generated in each language? "Generic" services
// are not specific to any particular RPC system. They are generated by the
// main code generators in each language (without additional plugins).
@ -423,7 +415,6 @@ message FileOptions {
// only to generated classes for C++.
optional bool cc_enable_arenas = 31 [default = true];
// Sets the objective c class prefix which is prepended to all objective c
// generated classes from this .proto. There is no default.
optional string objc_class_prefix = 36;
@ -456,7 +447,6 @@ message FileOptions {
// determining the ruby package.
optional string ruby_package = 45;
// The parser stores options it doesn't recognize here.
// See the documentation for the "Options" section above.
repeated UninterpretedOption uninterpreted_option = 999;
@ -528,7 +518,6 @@ message MessageOptions {
reserved 8; // javalite_serializable
reserved 9; // javanano_as_lite
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
@ -597,7 +586,6 @@ message FieldOptions {
// call from multiple threads concurrently, while non-const methods continue
// to require exclusive access.
//
//
// Note that implementations may choose not to check required fields within
// a lazy sub-message. That is, calling IsInitialized() on the outer message
// may return true even if the inner message has missing required fields.
@ -627,7 +615,6 @@ message FieldOptions {
// For Google-internal migration only. Do not use.
optional bool weak = 10 [default = false];
// The parser stores options it doesn't recognize here. See above.
repeated UninterpretedOption uninterpreted_option = 999;
@ -731,7 +718,6 @@ message MethodOptions {
extensions 1000 to max;
}
// A message representing a option the parser does not recognize. This only
// appears in options protos created by the compiler::Parser class.
// DescriptorPool resolves these when building Descriptor objects. Therefore,

View File

@ -42,7 +42,6 @@
#include "absl/strings/match.h"
#include "absl/strings/str_replace.h"
#include "google/protobuf/descriptor.pb.h"
#include "google/protobuf/stubs/stl_util.h"
namespace google {

View File

@ -99,7 +99,6 @@ option objc_class_prefix = "GPB";
// be expressed in JSON format as "3.000000001s", and 3 seconds and 1
// microsecond should be expressed in JSON format as "3.000001s".
//
//
message Duration {
// Signed seconds of the span of time. Must be from -315,576,000,000
// to +315,576,000,000 inclusive. Note: these bounds are computed from:

View File

@ -53,7 +53,6 @@
#include "absl/strings/match.h"
#include "google/protobuf/test_util.h"
#include "google/protobuf/test_util2.h"
#include "google/protobuf/stubs/stl_util.h"
#include "google/protobuf/stubs/strutil.h"
@ -552,7 +551,7 @@ TEST(ExtensionSetTest, SerializationToArray) {
size_t size = source.ByteSizeLong();
std::string data;
data.resize(size);
uint8_t* target = reinterpret_cast<uint8_t*>(::google::protobuf::string_as_array(&data));
uint8_t* target = reinterpret_cast<uint8_t*>(&data[0]);
uint8_t* end = source.SerializeWithCachedSizesToArray(target);
EXPECT_EQ(size, end - target);
EXPECT_TRUE(destination.ParseFromString(data));
@ -574,7 +573,7 @@ TEST(ExtensionSetTest, SerializationToStream) {
std::string data;
data.resize(size);
{
io::ArrayOutputStream array_stream(::google::protobuf::string_as_array(&data), size, 1);
io::ArrayOutputStream array_stream(&data[0], size, 1);
io::CodedOutputStream output_stream(&array_stream);
source.SerializeWithCachedSizes(&output_stream);
ASSERT_FALSE(output_stream.HadError());
@ -596,7 +595,7 @@ TEST(ExtensionSetTest, PackedSerializationToArray) {
size_t size = source.ByteSizeLong();
std::string data;
data.resize(size);
uint8_t* target = reinterpret_cast<uint8_t*>(::google::protobuf::string_as_array(&data));
uint8_t* target = reinterpret_cast<uint8_t*>(&data[0]);
uint8_t* end = source.SerializeWithCachedSizesToArray(target);
EXPECT_EQ(size, end - target);
EXPECT_TRUE(destination.ParseFromString(data));
@ -618,7 +617,7 @@ TEST(ExtensionSetTest, PackedSerializationToStream) {
std::string data;
data.resize(size);
{
io::ArrayOutputStream array_stream(::google::protobuf::string_as_array(&data), size, 1);
io::ArrayOutputStream array_stream(&data[0], size, 1);
io::CodedOutputStream output_stream(&array_stream);
source.SerializeWithCachedSizes(&output_stream);
ASSERT_FALSE(output_stream.HadError());

View File

@ -3095,7 +3095,7 @@ const internal::TcParseTableBase* Reflection::CreateTcParseTableForMessageSet()
// Create a dummy table that only exists to make TcParser::ParseLoop jump
// into the reflective parse loop.
using Table = internal::TcParseTable<0, 0, 0, 1, 1>;
using Table = internal::TcParseTable<0, 0, 0, 0, 1>;
// We use `operator new` here because the destruction will be done with
// `operator delete` unconditionally.
void* p = ::operator new(sizeof(Table));

View File

@ -347,7 +347,7 @@ struct TcParseTable {
// Entries for all fields:
std::array<TcParseTableBase::FieldEntry, kNumFieldEntries> field_entries;
std::array<TcParseTableBase::FieldAux, kNumFieldAux> aux_entries;
std::array<char, kNameTableSize> field_names;
std::array<char, kNameTableSize == 0 ? 1 : kNameTableSize> field_names;
};
// Partial specialization: if there are no aux entries, there will be no array.
@ -363,7 +363,7 @@ struct TcParseTable<kFastTableSizeLog2, kNumFieldEntries, 0, kNameTableSize,
fast_entries;
std::array<uint16_t, kFieldLookupSize> field_lookup_table;
std::array<TcParseTableBase::FieldEntry, kNumFieldEntries> field_entries;
std::array<char, kNameTableSize> field_names;
std::array<char, kNameTableSize == 0 ? 1 : kNameTableSize> field_names;
};
// Partial specialization: if there are no fields at all, then we can save space
@ -375,7 +375,7 @@ struct TcParseTable<0, 0, 0, kNameTableSize, kFieldLookupSize> {
// The fast parsing loop will always use this entry, so it must be present.
std::array<TcParseTableBase::FastFieldEntry, 1> fast_entries;
std::array<uint16_t, kFieldLookupSize> field_lookup_table;
std::array<char, kNameTableSize> field_names;
std::array<char, kNameTableSize == 0 ? 1 : kNameTableSize> field_names;
};
static_assert(std::is_standard_layout<TcParseTable<1>>::value,

View File

@ -420,10 +420,9 @@ absl::string_view FieldNameForTable(
case field_layout::kTvUtf8:
case field_layout::kTvUtf8Debug:
return field->name();
break;
}
}
return "?";
return "";
}
std::vector<uint8_t> GenerateFieldNames(
@ -431,6 +430,20 @@ std::vector<uint8_t> GenerateFieldNames(
const std::vector<TailCallTableInfo::FieldEntryInfo>& entries) {
static constexpr int kMaxNameLength = 255;
std::vector<uint8_t> out;
bool found_needed_name = false;
for (const auto& entry : entries) {
if (!FieldNameForTable(entry).empty()) {
found_needed_name = true;
break;
}
}
// No names needed. Omit the whole table.
if (!found_needed_name) {
return out;
}
// First, we output the size of each string, as an unsigned byte. The first
// string is the message name.
int count = 1;

View File

@ -610,6 +610,7 @@ class PROTOBUF_EXPORT TcParser final {
// For FindFieldEntry tests:
friend class FindFieldEntryTest;
friend struct ParseFunctionGeneratorTestPeer;
static constexpr const uint32_t kMtSmallScanSize = 4;
// Mini parsing:

View File

@ -96,7 +96,6 @@
#include "absl/strings/str_format.h"
#include "google/protobuf/io/strtod.h"
#include "google/protobuf/io/zero_copy_stream.h"
#include "google/protobuf/stubs/stl_util.h"
// Must be included last.
#include "google/protobuf/port_def.inc"

View File

@ -47,7 +47,6 @@
#include "google/protobuf/stubs/logging.h"
#include "google/protobuf/io/io_win32.h"
#include "google/protobuf/io/zero_copy_stream_impl.h"
#include "google/protobuf/stubs/stl_util.h"
namespace google {

View File

@ -53,7 +53,6 @@
#include "google/protobuf/stubs/common.h"
#include "google/protobuf/io/zero_copy_stream.h"
#include "google/protobuf/port.h"
#include "google/protobuf/stubs/stl_util.h"
// Must be included last.
@ -387,24 +386,10 @@ class PROTOBUF_EXPORT LimitingInputStream PROTOBUF_FUTURE_FINAL
// ===================================================================
// mutable_string_data() and as_string_data() are workarounds to improve
// the performance of writing new data to an existing string. Unfortunately
// the methods provided by the string class are suboptimal, and using memcpy()
// is mildly annoying because it requires its pointer args to be non-NULL even
// if we ask it to copy 0 bytes. Furthermore, string_as_array() has the
// property that it always returns NULL if its arg is the empty string, exactly
// what we want to avoid if we're using it in conjunction with memcpy()!
// With C++11, the desired memcpy() boils down to memcpy(..., &(*s)[0], size),
// where s is a string*. Without C++11, &(*s)[0] is not guaranteed to be safe,
// so we use string_as_array(), and live with the extra logic that tests whether
// *s is empty.
// Return a pointer to mutable characters underlying the given string. The
// return value is valid until the next time the string is resized. We
// trust the caller to treat the return value as an array of length s->size().
inline char* mutable_string_data(std::string* s) {
// This should be simpler & faster than string_as_array() because the latter
// is guaranteed to return NULL when *s is empty, so it has to check for that.
return &(*s)[0];
}

View File

@ -2,7 +2,8 @@ load("@rules_cc//cc:defs.bzl", "cc_library", "cc_test")
load("//build_defs:cpp_opts.bzl", "COPTS")
package(default_visibility = [
"//src/google/protobuf/util:__subpackages__",
"//src/google/protobuf/json:__pkg__",
"//pkg:__pkg__",
])
licenses(["notice"])
@ -70,7 +71,6 @@ cc_library(
hdrs = ["internal/zero_copy_buffered_stream.h"],
copts = COPTS,
strip_include_prefix = "/src",
visibility = ["//visibility:private"],
deps = [
"//src/google/protobuf:port_def",
"//src/google/protobuf/io",
@ -101,7 +101,6 @@ cc_library(
hdrs = ["internal/untyped_message.h"],
copts = COPTS,
strip_include_prefix = "/src",
visibility = ["//visibility:private"],
deps = [
"//src/google/protobuf",
"//src/google/protobuf:port_def",
@ -124,7 +123,6 @@ cc_library(
hdrs = ["internal/lexer.h"],
copts = COPTS,
strip_include_prefix = "/src",
visibility = ["//visibility:private"],
deps = [
":message_path",
":zero_copy_buffered_stream",
@ -164,7 +162,6 @@ cc_library(
hdrs = ["internal/writer.h"],
copts = COPTS,
strip_include_prefix = "/src",
visibility = ["//visibility:private"],
deps = [
"//src/google/protobuf:port_def",
"//src/google/protobuf/io",
@ -180,7 +177,6 @@ cc_library(
hdrs = ["internal/descriptor_traits.h"],
copts = COPTS,
strip_include_prefix = "/src",
visibility = ["//visibility:private"],
deps = [
":lexer",
":untyped_message",
@ -207,7 +203,6 @@ cc_library(
],
copts = COPTS,
strip_include_prefix = "/src",
visibility = ["//visibility:private"],
deps = [
":descriptor_traits",
":lexer",
@ -242,7 +237,6 @@ cc_library(
],
copts = COPTS,
strip_include_prefix = "/src",
visibility = ["//visibility:private"],
deps = [
":descriptor_traits",
":untyped_message",
@ -266,7 +260,6 @@ cc_library(
hdrs = ["internal/message_path.h"],
copts = COPTS,
strip_include_prefix = "/src",
visibility = ["//visibility:private"],
deps = [
"//src/google/protobuf",
"@com_google_absl//absl/cleanup",

View File

@ -675,8 +675,7 @@ TEST(Lite, AllLite28) {
MapLiteTestUtil::SetMapFields(&message1);
size_t size = message1.ByteSizeLong();
data.resize(size);
::uint8_t* start =
reinterpret_cast<::uint8_t*>(::google::protobuf::string_as_array(&data));
::uint8_t* start = reinterpret_cast<::uint8_t*>(&data[0]);
::uint8_t* end = message1.SerializeWithCachedSizesToArray(start);
EXPECT_EQ(size, end - start);
EXPECT_TRUE(message2.ParseFromString(data));
@ -696,7 +695,7 @@ TEST(Lite, AllLite29) {
data.resize(size);
{
// Allow the output stream to buffer only one byte at a time.
io::ArrayOutputStream array_stream(::google::protobuf::string_as_array(&data), size, 1);
io::ArrayOutputStream array_stream(&data[0], size, 1);
io::CodedOutputStream output_stream(&array_stream);
message1.SerializeWithCachedSizes(&output_stream);
EXPECT_FALSE(output_stream.HadError());

View File

@ -2692,7 +2692,7 @@ TEST(GeneratedMapFieldTest, SerializationToArray) {
MapTestUtil::SetMapFields(&message1);
size_t size = message1.ByteSizeLong();
data.resize(size);
uint8_t* start = reinterpret_cast<uint8_t*>(::google::protobuf::string_as_array(&data));
uint8_t* start = reinterpret_cast<uint8_t*>(&data[0]);
uint8_t* end = message1.SerializeWithCachedSizesToArray(start);
EXPECT_EQ(size, end - start);
EXPECT_TRUE(message2.ParseFromString(data));
@ -2708,7 +2708,7 @@ TEST(GeneratedMapFieldTest, SerializationToStream) {
data.resize(size);
{
// Allow the output stream to buffer only one byte at a time.
io::ArrayOutputStream array_stream(::google::protobuf::string_as_array(&data), size, 1);
io::ArrayOutputStream array_stream(&data[0], size, 1);
io::CodedOutputStream output_stream(&array_stream);
message1.SerializeWithCachedSizes(&output_stream);
EXPECT_FALSE(output_stream.HadError());
@ -3739,7 +3739,7 @@ static std::string DeterministicSerializationWithSerializePartialToCodedStream(
const T& t) {
const size_t size = t.ByteSizeLong();
std::string result(size, '\0');
io::ArrayOutputStream array_stream(::google::protobuf::string_as_array(&result), size);
io::ArrayOutputStream array_stream(&result[0], size);
io::CodedOutputStream output_stream(&array_stream);
output_stream.SetSerializationDeterministic(true);
t.SerializePartialToCodedStream(&output_stream);
@ -3753,7 +3753,7 @@ static std::string DeterministicSerializationWithSerializeToCodedStream(
const T& t) {
const size_t size = t.ByteSizeLong();
std::string result(size, '\0');
io::ArrayOutputStream array_stream(::google::protobuf::string_as_array(&result), size);
io::ArrayOutputStream array_stream(&result[0], size);
io::CodedOutputStream output_stream(&array_stream);
output_stream.SetSerializationDeterministic(true);
t.SerializeToCodedStream(&output_stream);
@ -3766,7 +3766,7 @@ template <typename T>
static std::string DeterministicSerialization(const T& t) {
const size_t size = t.ByteSizeLong();
std::string result(size, '\0');
io::ArrayOutputStream array_stream(::google::protobuf::string_as_array(&result), size);
io::ArrayOutputStream array_stream(&result[0], size);
{
io::CodedOutputStream output_stream(&array_stream);
output_stream.SetSerializationDeterministic(true);

View File

@ -76,7 +76,6 @@ message TestSameTypeMap {
map<int32, int32> map2 = 2;
}
enum MapEnum {
MAP_ENUM_FOO = 0;
MAP_ENUM_BAR = 1;

View File

@ -61,7 +61,6 @@
#include "google/protobuf/unknown_field_set.h"
#include "google/protobuf/wire_format.h"
#include "google/protobuf/wire_format_lite.h"
#include "google/protobuf/stubs/stl_util.h"
// Must be included last.

View File

@ -56,7 +56,6 @@
#include "absl/strings/cord.h"
#include "absl/strings/str_cat.h"
#include "google/protobuf/stubs/strutil.h"
#include "google/protobuf/stubs/stl_util.h"
// Must be included last.
#include "google/protobuf/port_def.inc"

View File

@ -26,7 +26,6 @@ cc_library(
"platform_macros.h",
"port.h",
"status_macros.h",
"stl_util.h",
"strutil.h",
],
copts = COPTS,
@ -56,7 +55,6 @@ cc_library(
"platform_macros.h",
"port.h",
"status_macros.h",
"stl_util.h",
"strutil.h",
],
deps = [

View File

@ -1,3 +1,33 @@
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#ifndef GOOGLE_PROTOBUF_STUBS_CALLBACK_H_
#define GOOGLE_PROTOBUF_STUBS_CALLBACK_H_

View File

@ -1,68 +0,0 @@
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// from google3/util/gtl/stl_util.h
#ifndef GOOGLE_PROTOBUF_STUBS_STL_UTIL_H__
#define GOOGLE_PROTOBUF_STUBS_STL_UTIL_H__
#include <algorithm>
#include "google/protobuf/stubs/common.h"
// Must be last.
#include "google/protobuf/port_def.inc" // NOLINT
namespace google {
namespace protobuf {
// Return a mutable char* pointing to a string's internal buffer,
// which may not be null-terminated. Writing through this pointer will
// modify the string.
//
// string_as_array(&str)[i] is valid for 0 <= i < str.size() until the
// next call to a string method that invalidates iterators.
//
// As of 2006-04, there is no standard-blessed way of getting a
// mutable reference to a string's internal buffer. However, issue 530
// (http://www.open-std.org/JTC1/SC22/WG21/docs/lwg-active.html#530)
// proposes this as the method. According to Matt Austern, this should
// already work on all current implementations.
inline char* string_as_array(std::string* str) {
// DO NOT USE const_cast<char*>(str->data())! See the unittest for why.
return str->empty() ? nullptr : &*str->begin();
}
} // namespace protobuf
} // namespace google
#include "google/protobuf/port_undef.inc" // NOLINT
#endif // GOOGLE_PROTOBUF_STUBS_STL_UTIL_H__

View File

@ -44,7 +44,6 @@
#include "absl/strings/ascii.h"
#include "absl/strings/string_view.h"
#include "google/protobuf/stubs/logging.h"
#include "google/protobuf/stubs/stl_util.h"
#ifdef _WIN32
// MSVC has only _snprintf, not snprintf.
@ -671,9 +670,8 @@ void Base64EscapeInternal(const unsigned char *src, int szsrc,
const absl::string_view base64_chars) {
const int calc_escaped_size = CalculateBase64EscapedLen(szsrc, do_padding);
dest->resize(calc_escaped_size);
const int escaped_len =
Base64EscapeInternal(src, szsrc, string_as_array(dest), dest->size(),
base64_chars, do_padding);
const int escaped_len = Base64EscapeInternal(
src, szsrc, &(*dest)[0], dest->size(), base64_chars, do_padding);
GOOGLE_DCHECK_EQ(calc_escaped_size, escaped_len);
dest->erase(escaped_len);
}

View File

@ -35,7 +35,6 @@
#include <gtest/gtest.h>
#include <locale.h>
#include "google/protobuf/stubs/stl_util.h"
#include "google/protobuf/testing/googletest.h"
#ifdef _WIN32

View File

@ -67,7 +67,6 @@
#include "google/protobuf/repeated_field.h"
#include "google/protobuf/unknown_field_set.h"
#include "google/protobuf/wire_format_lite.h"
#include "google/protobuf/stubs/stl_util.h"
// Must be included last.
#include "google/protobuf/port_def.inc"

View File

@ -90,7 +90,6 @@ option objc_class_prefix = "GPB";
// Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000)
// .setNanos((int) ((millis % 1000) * 1000000)).build();
//
//
// Example 5: Compute Timestamp from Java `Instant.now()`.
//
// Instant now = Instant.now();
@ -99,7 +98,6 @@ option objc_class_prefix = "GPB";
// Timestamp.newBuilder().setSeconds(now.getEpochSecond())
// .setNanos(now.getNano()).build();
//
//
// Example 6: Compute Timestamp from current time in Python.
//
// timestamp = Timestamp()
@ -132,7 +130,6 @@ option objc_class_prefix = "GPB";
// http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D
// ) to obtain a formatter capable of generating timestamps in this format.
//
//
message Timestamp {
// Represents seconds of UTC time since Unix epoch
// 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to

View File

@ -889,7 +889,6 @@ message TestRequiredOneof {
}
}
// Test messages for packed fields
message TestPackedTypes {
@ -1070,7 +1069,6 @@ message TestMessageSize {
optional int64 m6 = 6;
}
// Test that RPC services work.
message FooRequest {}
message FooResponse {}
@ -1083,7 +1081,6 @@ service TestService {
rpc Bar(BarRequest) returns (BarResponse);
}
message BarRequest {}
message BarResponse {}
@ -1437,7 +1434,6 @@ message TestVerifyBigFieldNumberUint32 {
optional Nested optional_nested = 1;
}
// This message contains different kind of enums to exercise the different
// parsers in table-driven.
message EnumParseTester {
@ -1565,4 +1561,3 @@ message StringParseTester {
repeated string repeated_string_midfield = 1002;
repeated string repeated_string_hifield = 1000002;
};

View File

@ -64,7 +64,6 @@ enum ImportEnum {
IMPORT_BAZ = 9;
}
// To use an enum in a map, it must has the first value as 0.
enum ImportEnumForMap {
UNKNOWN = 0;

View File

@ -59,7 +59,6 @@
#include "absl/time/clock.h"
#include "absl/time/time.h"
#include "google/protobuf/test_util.h"
#include "google/protobuf/stubs/stl_util.h"
namespace google {
@ -207,15 +206,13 @@ TEST_F(UnknownFieldSetTest, SerializeFastAndSlowAreEquivalent) {
slow_buffer.resize(size);
fast_buffer.resize(size);
uint8_t* target =
reinterpret_cast<uint8_t*>(::google::protobuf::string_as_array(&fast_buffer));
uint8_t* target = reinterpret_cast<uint8_t*>(&fast_buffer[0]);
uint8_t* result = WireFormat::SerializeUnknownFieldsToArray(
empty_message_.unknown_fields(), target);
EXPECT_EQ(size, result - target);
{
io::ArrayOutputStream raw_stream(::google::protobuf::string_as_array(&slow_buffer), size,
1);
io::ArrayOutputStream raw_stream(&slow_buffer[0], size, 1);
io::CodedOutputStream output_stream(&raw_stream);
WireFormat::SerializeUnknownFields(empty_message_.unknown_fields(),
&output_stream);

View File

@ -41,7 +41,6 @@ package protobuf_unittest;
import "google/protobuf/any.proto";
import "google/protobuf/struct.proto";
message TestFlagsAndStrings {
required int32 A = 1;
repeated group RepeatedGroup = 2 {
@ -96,7 +95,6 @@ message TestNumbers {
optional uint32 f = 6;
}
message TestCamelCase {
optional string normal_field = 1;
optional int32 CAPITAL_FIELD = 2;

View File

@ -330,17 +330,6 @@ class PROTOBUF_EXPORT MessageDifferencer {
MapKeyComparator& operator=(const MapKeyComparator&) = delete;
virtual ~MapKeyComparator();
// This method is DEPRECATED. It is never called directly by
// MessageDifferencer. New code should implement only the next form of
// IsMatch.
//
// TODO(b/248337479) Remove this method.
virtual bool IsMatch(
const Message& /* message1 */, const Message& /* message2 */,
const std::vector<SpecificField>& /* parent_fields */) const {
GOOGLE_CHECK(false) << "IsMatch() is not implemented.";
return false;
}
// This method should be overridden by every implementation. The arg
// unmapped_any is nonzero the original messages provided by the user are of
@ -354,7 +343,8 @@ class PROTOBUF_EXPORT MessageDifferencer {
virtual bool IsMatch(const Message& message1, const Message& message2,
int /* unmapped_any */,
const std::vector<SpecificField>& fields) const {
return IsMatch(message1, message2, fields);
GOOGLE_CHECK(false) << "IsMatch() is not implemented.";
return false;
}
};

View File

@ -1907,6 +1907,7 @@ class ValueProductMapKeyComparator
public:
typedef util::MessageDifferencer::SpecificField SpecificField;
bool IsMatch(const Message& message1, const Message& message2,
int unpacked_any,
const std::vector<SpecificField>& parent_fields) const override {
const Reflection* reflection1 = message1.GetReflection();
const Reflection* reflection2 = message2.GetReflection();
@ -1968,6 +1969,7 @@ class OffsetByOneMapKeyComparator
public:
typedef util::MessageDifferencer::SpecificField SpecificField;
bool IsMatch(const Message& message1, const Message& message2,
int unpacked_any,
const std::vector<SpecificField>& parent_fields) const override {
return parent_fields.back().index + 1 == parent_fields.back().new_index;
}
@ -3402,6 +3404,7 @@ class LengthMapKeyComparator
public:
typedef util::MessageDifferencer::SpecificField SpecificField;
bool IsMatch(const Message& message1, const Message& message2,
int unpacked_any,
const std::vector<SpecificField>& parent_fields) const override {
const Reflection* reflection1 = message1.GetReflection();
const Reflection* reflection2 = message2.GetReflection();

View File

@ -30,7 +30,6 @@
#include <gtest/gtest.h>
#include "google/protobuf/stubs/common.h"
#include "google/protobuf/stubs/stl_util.h"
#include "google/protobuf/testing/googletest.h"
#include "google/protobuf/unittest_well_known_types.pb.h"

View File

@ -49,7 +49,6 @@
#include "absl/strings/match.h"
#include "google/protobuf/dynamic_message.h"
#include "google/protobuf/test_util2.h"
#include "google/protobuf/stubs/stl_util.h"
// clang-format off
#include "google/protobuf/port_def.inc"
@ -494,16 +493,14 @@ TEST(WireFormatTest, SerializeMessageSetVariousWaysAreEqual) {
// Serialize to flat array
{
uint8_t* target =
reinterpret_cast<uint8_t*>(::google::protobuf::string_as_array(&flat_data));
uint8_t* target = reinterpret_cast<uint8_t*>(&flat_data[0]);
uint8_t* end = message_set.SerializeWithCachedSizesToArray(target);
EXPECT_EQ(size, end - target);
}
// Serialize to buffer
{
io::ArrayOutputStream array_stream(::google::protobuf::string_as_array(&stream_data), size,
1);
io::ArrayOutputStream array_stream(&stream_data[0], size, 1);
io::CodedOutputStream output_stream(&array_stream);
message_set.SerializeWithCachedSizes(&output_stream);
ASSERT_FALSE(output_stream.HadError());

1
third_party/jsoncpp vendored Submodule

@ -0,0 +1 @@
Subproject commit 9059f5cad030ba11d37818847443a53918c327b1

37
third_party/jsoncpp.BUILD vendored Normal file
View File

@ -0,0 +1,37 @@
licenses(["unencumbered"]) # Public Domain or MIT
exports_files(["LICENSE"])
cc_library(
name = "jsoncpp",
srcs = [
"src/lib_json/json_reader.cpp",
"src/lib_json/json_tool.h",
"src/lib_json/json_value.cpp",
"src/lib_json/json_writer.cpp",
],
hdrs = [
"include/json/allocator.h",
"include/json/assertions.h",
"include/json/config.h",
"include/json/json_features.h",
"include/json/forwards.h",
"include/json/json.h",
"include/json/reader.h",
"include/json/value.h",
"include/json/version.h",
"include/json/writer.h",
],
copts = [
"-DJSON_USE_EXCEPTION=0",
"-DJSON_HAS_INT64",
],
includes = ["include"],
visibility = ["//visibility:public"],
deps = [":private"],
)
cc_library(
name = "private",
textual_hdrs = ["src/lib_json/json_valueiterator.inl"],
)