Remove all autotools usage (#10132)

* Bazelfying conformance tests

Adding infrastructure to "Bazelify" languages other than Java and C++

* Delete benchmarks for languages supported by other repositories

* Bazelfying benchmark tests

* Bazelfying python

Use upb's system python rule instead of branching tensorflow

* Bazelfying Ruby

* Bazelfying C#

* Bazelfying Objective-c

* Bazelfying Kokoro mac builds

* Bazelfying Kokoro linux builds

* Deleting all deprecated files from autotools cleanup

This boils down to Makefile.am and tests.sh and all of their remaining references

* Cleanup after PR reorganizing

- Enable 32 bit tests
- Move conformance tests back
- Use select statements to select alternate runtimes
- Add internal prefixes to proto library macros

* Updating READMEs to use bazel instead of autotools.

* Bazelfying Kokoro release builds

* First round of review fixes

* Second round of review fixes

* Third round of review fixes

* Filtering out conformance tests from Bazel on Windows (b/241484899)

* Add version metadata that was previously scraped from configure.ac

* fixing typo from previous fix

* Adding ruby version tests

* Bumping pinned upb version, and adding tests to python CI
pull/10389/head
Mike Kruskal 2022-08-10 22:51:29 -07:00 committed by GitHub
parent 13b3647016
commit ed5c57a574
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
245 changed files with 3981 additions and 10337 deletions

26
.gitignore vendored
View File

@ -1,24 +1,3 @@
# autogen.sh-generated files
Makefile.in
src/Makefile.in
config.guess
config.h.in
config.sub
configure
depcomp
install-sh
ltmain.sh
missing
configure~
aclocal.m4
m4/libtool.m4
m4/ltoptions.m4
m4/ltsugar.m4
m4/ltversion.m4
m4/lt~obsolete.m4
autom4te.cache
# CMake-generated files
.ninja_deps
.ninja_logs
@ -40,8 +19,6 @@ Testing/Temporary/*
/gmock
# in-tree configure-generated files
Makefile
src/Makefile
/config.h
config.log
config.status
@ -56,7 +33,6 @@ stamp-h1
*.o
*.lo
*.la
src/.libs
*.so
*.a
@ -79,8 +55,6 @@ python/build/
python/docs/_build/
src/js_embed
src/protoc
src/unittest_proto_middleman
# vim generated
*.swp

View File

@ -6,7 +6,7 @@ load("@rules_pkg//:mappings.bzl", "pkg_files", "strip_prefix")
load("@rules_proto//proto:defs.bzl", "proto_lang_toolchain", "proto_library")
load("@rules_java//java:defs.bzl", "java_binary", "java_lite_proto_library", "java_proto_library")
load("//build_defs:cpp_opts.bzl", "COPTS", "LINK_OPTS", "PROTOC_LINK_OPTS")
load(":protobuf.bzl", "py_proto_library")
load(":protobuf.bzl", "internal_py_proto_library", "internal_php_proto_library", "internal_ruby_proto_library", "internal_objc_proto_library")
licenses(["notice"])
@ -145,6 +145,16 @@ filegroup(
visibility = ["//visibility:public"],
)
internal_ruby_proto_library(
name = "well_known_ruby_protos",
srcs = [":well_known_protos"],
includes = ["src"],
visibility = [
"//conformance:__pkg__",
"//ruby:__subpackages__",
],
)
################################################################################
# Protocol Buffers Compiler
################################################################################
@ -302,6 +312,10 @@ proto_lang_toolchain(
visibility = ["//visibility:public"],
)
################################################################################
# Objective-C support
################################################################################
alias(
name = "objectivec",
actual = "//objectivec",
@ -355,7 +369,7 @@ proto_library(
deps = ["//src/google/protobuf:generic_test_protos"],
)
py_proto_library(
internal_py_proto_library(
name = "generated_protos_py",
testonly = 1,
srcs = [":generated_protos"],
@ -368,22 +382,22 @@ py_proto_library(
# Conformance tests
################################################################################
alias(
name = "conformance_test_runner",
actual = "//conformance:conformance_test_runner",
visibility = ["//visibility:public"],
cc_proto_library(
name = "test_messages_proto2_cc_proto",
visibility = [
"//conformance:__pkg__",
"//src:__subpackages__",
],
deps = ["//src/google/protobuf:test_messages_proto2_proto"],
)
alias(
name = "test_messages_proto2_proto",
actual = "//src/google/protobuf:test_messages_proto2_proto", # proto_library
visibility = ["//visibility:public"],
)
alias(
name = "test_messages_proto3_proto",
actual = "//src/google/protobuf:test_messages_proto3_proto", # proto_library
visibility = ["//visibility:public"],
cc_proto_library(
name = "test_messages_proto3_cc_proto",
visibility = [
"//conformance:__pkg__",
"//src:__subpackages__",
],
deps = ["//src/google/protobuf:test_messages_proto3_proto"],
)
java_proto_library(
@ -392,7 +406,7 @@ java_proto_library(
"//conformance:__pkg__",
"//java:__subpackages__",
],
deps = [":test_messages_proto2_proto"],
deps = ["//src/google/protobuf:test_messages_proto2_proto"],
)
java_proto_library(
@ -401,7 +415,7 @@ java_proto_library(
"//conformance:__pkg__",
"//java:__subpackages__",
],
deps = [":test_messages_proto3_proto"],
deps = ["//src/google/protobuf:test_messages_proto3_proto"],
)
java_lite_proto_library(
@ -410,7 +424,7 @@ java_lite_proto_library(
"//conformance:__pkg__",
"//java:__subpackages__",
],
deps = [":test_messages_proto2_proto"],
deps = ["//src/google/protobuf:test_messages_proto2_proto"],
)
java_lite_proto_library(
@ -419,7 +433,77 @@ java_lite_proto_library(
"//conformance:__pkg__",
"//java:__subpackages__",
],
deps = [":test_messages_proto3_proto"],
deps = ["//src/google/protobuf:test_messages_proto3_proto"],
)
internal_objc_proto_library(
name = "test_messages_proto2_objc_proto",
srcs = ["//src/google/protobuf:test_messages_proto2.proto"],
includes = ["src/google/protobuf"],
testonly = 1,
visibility = [
"//conformance:__pkg__",
"//objectivec:__subpackages__",
],
)
internal_objc_proto_library(
name = "test_messages_proto3_objc_proto",
srcs = ["//src/google/protobuf:test_messages_proto3.proto"],
proto_deps = [":well_known_protos"],
includes = ["src/google/protobuf", "src"],
testonly = 1,
visibility = [
"//conformance:__pkg__",
"//objectivec:__subpackages__",
],
)
internal_php_proto_library(
name = "test_messages_proto3_php_proto",
srcs = ["//src/google/protobuf:test_messages_proto3.proto"],
proto_deps = [":well_known_protos"],
includes = ["src/google/protobuf", "src"],
outs = [
"Protobuf_test_messages/Proto3/EnumOnlyProto3.php",
"Protobuf_test_messages/Proto3/EnumOnlyProto3/PBBool.php",
"Protobuf_test_messages/Proto3/ForeignEnum.php",
"Protobuf_test_messages/Proto3/ForeignMessage.php",
"Protobuf_test_messages/Proto3/NullHypothesisProto3.php",
"Protobuf_test_messages/Proto3/TestAllTypesProto3.php",
"Protobuf_test_messages/Proto3/TestAllTypesProto3/AliasedEnum.php",
"Protobuf_test_messages/Proto3/TestAllTypesProto3/NestedEnum.php",
"Protobuf_test_messages/Proto3/TestAllTypesProto3/NestedMessage.php",
"GPBMetadata/TestMessagesProto3.php",
],
testonly = 1,
visibility = [
"//conformance:__pkg__",
"//php:__subpackages__",
],
)
internal_ruby_proto_library(
name = "test_messages_proto2_ruby_proto",
srcs = ["//src/google/protobuf:test_messages_proto2.proto"],
includes = ["src/google/protobuf"],
testonly = 1,
visibility = [
"//conformance:__pkg__",
"//ruby:__subpackages__",
],
)
internal_ruby_proto_library(
name = "test_messages_proto3_ruby_proto",
srcs = ["//src/google/protobuf:test_messages_proto3.proto"],
proto_deps = [":well_known_protos"],
includes = ["src/google/protobuf", "src"],
testonly = 1,
visibility = [
"//conformance:__pkg__",
"//ruby:__subpackages__",
],
)
filegroup(
@ -441,22 +525,6 @@ pkg_files(
"cmake/*.cmake",
"cmake/*.in",
"editors/*",
# Several of these files are generated by autogen.sh, so using
# glob() lets us ignore them if they are missing. (This is not good
# practice, though.)
"Makefile.in",
"aclocal.m4",
"ar-lib",
"compile",
"config*",
"depcomp",
"install-sh",
"ltmain.sh",
"m4/*.m4",
"missing",
"protobuf*.pc.in",
"test-driver",
],
allow_empty = True,
) + [
@ -465,10 +533,8 @@ pkg_files(
"CMakeLists.txt",
"CONTRIBUTORS.txt",
"LICENSE",
"Makefile.am",
"README.md",
"WORKSPACE",
"autogen.sh",
"cmake/CMakeLists.txt",
"cmake/README.md",
"cmake/update_file_lists.sh",
@ -476,7 +542,6 @@ pkg_files(
"maven_install.json",
"//third_party:BUILD.bazel",
"//third_party:zlib.BUILD",
"//util/python:BUILD.bazel",
],
strip_prefix = strip_prefix.from_root(""),
visibility = ["//pkg:__pkg__"],

View File

@ -79,6 +79,11 @@ mark_as_advanced(protobuf_DEBUG_POSTFIX)
# User options
include(${protobuf_SOURCE_DIR}/cmake/protobuf-options.cmake)
# Version metadata
set(protobuf_VERSION_STRING "3.21.4")
set(protobuf_DESCRIPTION "Protocol Buffers")
set(protobuf_CONTACT "protobuf@googlegroups.com")
# Overrides for option dependencies
if (protobuf_BUILD_PROTOC_BINARIES OR protobuf_BUILD_TESTS)
set(protobuf_BUILD_LIBPROTOC ON)
@ -86,23 +91,6 @@ endif ()
if (NOT protobuf_BUILD_PROTOBUF_BINARIES)
set(protobuf_INSTALL OFF)
endif()
# Path to main configure script
set(protobuf_CONFIGURE_SCRIPT "${protobuf_SOURCE_DIR}/configure.ac")
# Parse configure script
set(protobuf_AC_INIT_REGEX
"^AC_INIT\\(\\[([^]]+)\\],\\[([^]]+)\\],\\[([^]]+)\\],\\[([^]]+)\\]\\)$")
file(STRINGS "${protobuf_CONFIGURE_SCRIPT}" protobuf_AC_INIT_LINE
LIMIT_COUNT 1 REGEX "^AC_INIT")
# Description
string(REGEX REPLACE "${protobuf_AC_INIT_REGEX}" "\\1"
protobuf_DESCRIPTION "${protobuf_AC_INIT_LINE}")
# Version
string(REGEX REPLACE "${protobuf_AC_INIT_REGEX}" "\\2"
protobuf_VERSION_STRING "${protobuf_AC_INIT_LINE}")
# Contact
string(REGEX REPLACE "${protobuf_AC_INIT_REGEX}" "\\3"
protobuf_CONTACT "${protobuf_AC_INIT_LINE}")
# Parse version tweaks
set(protobuf_VERSION_REGEX "^([0-9]+)\\.([0-9]+)\\.([0-9]+)([-]rc[-]|\\.)?([0-9]*)$")
string(REGEX REPLACE "${protobuf_VERSION_REGEX}" "\\1"

File diff suppressed because it is too large Load Diff

View File

@ -42,8 +42,7 @@ to use the github main version at HEAD, or you need to modify protobuf code,
or you are using C++, it's recommended to build your own protoc binary from
source.
If you would like to build protoc binary from source, see the [C++ Installation
Instructions](src/README.md).
If you would like to build protoc binary from source, see the [C++ Installation Instructions](src/README.md).
Protobuf Runtime Installation
-----------------------------

View File

@ -30,11 +30,6 @@ http_archive(
load("//:protobuf_deps.bzl", "PROTOBUF_MAVEN_ARTIFACTS", "protobuf_deps")
protobuf_deps()
bind(
name = "python_headers",
actual = "//util/python:python_headers",
)
load("@rules_jvm_external//:defs.bzl", "maven_install")
maven_install(
@ -69,3 +64,11 @@ kt_register_toolchains()
load("@upb//bazel:workspace_deps.bzl", "upb_deps")
upb_deps()
load("@upb//bazel:system_python.bzl", "system_python")
system_python(name = "local_config_python")
bind(
name = "python_headers",
actual = "@local_config_python//:python_headers",
)

View File

@ -1,44 +0,0 @@
#!/bin/sh
# Run this script to generate the configure script and other files that will
# be included in the distribution. These files are not checked in because they
# are automatically generated.
set -e
if [ ! -z "$@" ]; then
for argument in "$@"; do
case $argument in
# make curl silent
"-s")
curlopts="-s"
;;
esac
done
fi
# Check that we're being run from the right directory.
if test ! -f src/google/protobuf/stubs/common.h; then
cat >&2 << __EOF__
Could not find source code. Make sure you are running this script from the
root of the distribution tree.
__EOF__
exit 1
fi
set -ex
# The absence of a m4 directory in googletest causes autoreconf to fail when
# building under the CentOS docker image. It's a warning in regular build on
# Ubuntu/gLinux as well. (This is only needed if git submodules have been
# initialized, which is typically only needed for testing; see the installation
# instructions for details.)
if test -d third_party/googletest; then
mkdir -p third_party/googletest/m4
fi
# TODO(kenton): Remove the ",no-obsolete" part and fix the resulting warnings.
autoreconf -f -i -Wall,no-obsolete
rm -rf autom4te.cache config.h.in~
exit 0

View File

@ -2,6 +2,11 @@ load("@rules_cc//cc:defs.bzl", "cc_proto_library")
load("@rules_java//java:defs.bzl", "java_proto_library")
load("@rules_pkg//:mappings.bzl", "pkg_filegroup", "pkg_files", "strip_prefix")
load("@rules_proto//proto:defs.bzl", "proto_library")
load("//:protobuf.bzl", "internal_py_proto_library", "internal_php_proto_library")
################################################################################
# Benchmark Protos
################################################################################
proto_library(
name = "benchmarks_proto",
@ -34,6 +39,26 @@ java_proto_library(
],
)
internal_php_proto_library(
name = "benchmarks_php_proto",
srcs = ["benchmarks.proto"],
outs = [
"Benchmarks/BenchmarkDataset.php",
"GPBMetadata/Benchmarks.php",
],
visibility = [
"//benchmarks:__subpackages__",
],
)
internal_py_proto_library(
name = "benchmarks_py_proto",
srcs = ["benchmarks.proto"],
visibility = [
"//benchmarks:__subpackages__",
],
)
proto_library(
name = "google_size_proto",
srcs = [
@ -65,19 +90,13 @@ java_proto_library(
],
)
################################################################################
# Distribution files
################################################################################
pkg_files(
name = "dist_files",
srcs = glob(
[
"*",
"python/*.cc",
"util/*.cc",
],
exclude = [
"__init__.py", # not in autotools dist
"go/*",
],
),
srcs = glob(["*"]),
strip_prefix = strip_prefix.from_root(""),
)
@ -93,6 +112,10 @@ pkg_filegroup(
"//benchmarks/datasets/google_message2:dist_files",
"//benchmarks/datasets/google_message3:dist_files",
"//benchmarks/datasets/google_message4:dist_files",
"//benchmarks/java:dist_files",
"//benchmarks/php:dist_files",
"//benchmarks/python:dist_files",
"//benchmarks/util:dist_files",
],
visibility = ["//pkg:__pkg__"],
)

View File

@ -1,664 +0,0 @@
benchmarks_protoc_inputs_benchmark_wrapper = \
benchmarks.proto
benchmarks_protoc_inputs = \
datasets/google_message1/proto3/benchmark_message1_proto3.proto
benchmarks_protoc_inputs_proto2 = \
datasets/google_message1/proto2/benchmark_message1_proto2.proto \
datasets/google_message2/benchmark_message2.proto \
datasets/google_message3/benchmark_message3.proto \
datasets/google_message3/benchmark_message3_1.proto \
datasets/google_message3/benchmark_message3_2.proto \
datasets/google_message3/benchmark_message3_3.proto \
datasets/google_message3/benchmark_message3_4.proto \
datasets/google_message3/benchmark_message3_5.proto \
datasets/google_message3/benchmark_message3_6.proto \
datasets/google_message3/benchmark_message3_7.proto \
datasets/google_message3/benchmark_message3_8.proto \
datasets/google_message4/benchmark_message4.proto \
datasets/google_message4/benchmark_message4_1.proto \
datasets/google_message4/benchmark_message4_2.proto \
datasets/google_message4/benchmark_message4_3.proto
make_tmp_dir:
mkdir -p 'tmp/java/src/main/java'
touch make_tmp_dir
# We have to cd to $(srcdir) before executing protoc because $(protoc_inputs) is
# relative to srcdir, which may not be the same as the current directory when
# building out-of-tree.
protoc_middleman: make_tmp_dir $(top_srcdir)/src/protoc$(EXEEXT) $(benchmarks_protoc_inputs) $(well_known_type_protoc_inputs) $(benchmarks_protoc_inputs_benchmark_wrapper)
oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$(top_srcdir)/src --cpp_out=$$oldpwd/cpp --java_out=$$oldpwd/tmp/java/src/main/java --python_out=$$oldpwd/tmp $(benchmarks_protoc_inputs) $(benchmarks_protoc_inputs_benchmark_wrapper) )
touch protoc_middleman
protoc_middleman2: make_tmp_dir $(top_srcdir)/src/protoc$(EXEEXT) $(benchmarks_protoc_inputs_proto2) $(well_known_type_protoc_inputs)
oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$(top_srcdir)/src --cpp_out=$$oldpwd/cpp --java_out=$$oldpwd/tmp/java/src/main/java --python_out=$$oldpwd/tmp $(benchmarks_protoc_inputs_proto2) )
touch protoc_middleman2
all_data = $$(find $$(cd $(srcdir) && pwd) -type f -name "dataset.*.pb" -not -path "$$(cd $(srcdir) && pwd)/tmp/*")
############# CPP RULES ##############
benchmarks_protoc_outputs = \
cpp/benchmarks.pb.cc \
cpp/datasets/google_message1/proto3/benchmark_message1_proto3.pb.cc
benchmarks_protoc_outputs_header = \
cpp/benchmarks.pb.h \
cpp/datasets/google_message1/proto3/benchmark_message1_proto3.pb.h
benchmarks_protoc_outputs_proto2_header = \
cpp/datasets/google_message1/proto2/benchmark_message1_proto2.pb.h \
cpp/datasets/google_message2/benchmark_message2.pb.h \
cpp/datasets/google_message3/benchmark_message3.pb.h \
cpp/datasets/google_message3/benchmark_message3_1.pb.h \
cpp/datasets/google_message3/benchmark_message3_2.pb.h \
cpp/datasets/google_message3/benchmark_message3_3.pb.h \
cpp/datasets/google_message3/benchmark_message3_4.pb.h \
cpp/datasets/google_message3/benchmark_message3_5.pb.h \
cpp/datasets/google_message3/benchmark_message3_6.pb.h \
cpp/datasets/google_message3/benchmark_message3_7.pb.h \
cpp/datasets/google_message3/benchmark_message3_8.pb.h \
cpp/datasets/google_message4/benchmark_message4.pb.h \
cpp/datasets/google_message4/benchmark_message4_1.pb.h \
cpp/datasets/google_message4/benchmark_message4_2.pb.h \
cpp/datasets/google_message4/benchmark_message4_3.pb.h
benchmarks_protoc_outputs_proto2 = \
cpp/datasets/google_message1/proto2/benchmark_message1_proto2.pb.cc \
cpp/datasets/google_message2/benchmark_message2.pb.cc \
cpp/datasets/google_message3/benchmark_message3.pb.cc \
cpp/datasets/google_message3/benchmark_message3_1.pb.cc \
cpp/datasets/google_message3/benchmark_message3_2.pb.cc \
cpp/datasets/google_message3/benchmark_message3_3.pb.cc \
cpp/datasets/google_message3/benchmark_message3_4.pb.cc \
cpp/datasets/google_message3/benchmark_message3_5.pb.cc \
cpp/datasets/google_message3/benchmark_message3_6.pb.cc \
cpp/datasets/google_message3/benchmark_message3_7.pb.cc \
cpp/datasets/google_message3/benchmark_message3_8.pb.cc \
cpp/datasets/google_message4/benchmark_message4.pb.cc \
cpp/datasets/google_message4/benchmark_message4_1.pb.cc \
cpp/datasets/google_message4/benchmark_message4_2.pb.cc \
cpp/datasets/google_message4/benchmark_message4_3.pb.cc
$(benchmarks_protoc_outputs): protoc_middleman
$(benchmarks_protoc_outputs_header): protoc_middleman
$(benchmarks_protoc_outputs_proto2): protoc_middleman2
$(benchmarks_protoc_outputs_proto2_header): protoc_middleman2
initialize_submodule:
oldpwd=`pwd`
cd $(top_srcdir) && git submodule update --init -r third_party/benchmark && \
cd third_party/benchmark && cmake -DCMAKE_BUILD_TYPE=Release && make
cd $$oldpwd
touch initialize_submodule
$(top_srcdir)/third_party/benchmark/src/libbenchmark.a: initialize_submodule
AM_CXXFLAGS = $(NO_OPT_CXXFLAGS) $(PROTOBUF_OPT_FLAG) -Wall -Wwrite-strings -Woverloaded-virtual -Wno-sign-compare
bin_PROGRAMS = cpp-benchmark
cpp_benchmark_LDADD = $(top_srcdir)/src/libprotobuf.la $(top_srcdir)/third_party/benchmark/src/libbenchmark.a
cpp_benchmark_SOURCES = cpp/cpp_benchmark.cc
cpp_benchmark_CPPFLAGS = -I$(top_srcdir)/src -I$(srcdir)/cpp -I$(top_srcdir)/third_party/benchmark/include
# Explicit deps because BUILT_SOURCES are only done before a "make all/check"
# so a direct "make test_cpp" could fail if parallel enough.
# See: https://www.gnu.org/software/automake/manual/html_node/Built-Sources-Example.html#Recording-Dependencies-manually
cpp/cpp_benchmark-cpp_benchmark.$(OBJEXT): $(benchmarks_protoc_outputs) $(benchmarks_protoc_outputs_proto2) $(benchmarks_protoc_outputs_header) $(benchmarks_protoc_outputs_proto2_header) $(top_srcdir)/src/libprotobuf.la $(top_srcdir)/third_party/benchmark/src/libbenchmark.a
cpp/benchmark-cpp_benchmark.$(OBJEXT): $(benchmarks_protoc_outputs) $(benchmarks_protoc_outputs_proto2) $(benchmarks_protoc_outputs_header) $(benchmarks_protoc_outputs_proto2_header) $(top_srcdir)/src/libprotobuf.la $(top_srcdir)/third_party/benchmark/src/libbenchmark.a
nodist_cpp_benchmark_SOURCES = \
$(benchmarks_protoc_outputs) \
$(benchmarks_protoc_outputs_proto2) \
$(benchmarks_protoc_outputs_proto2_header) \
$(benchmarks_protoc_outputs_header)
cpp: protoc_middleman protoc_middleman2 cpp-benchmark initialize_submodule
./cpp-benchmark $(all_data)
############ CPP RULES END ############
############# JAVA RULES ##############
java_benchmark_testing_files = \
java/src/main/java/com/google/protobuf/ProtoCaliperBenchmark.java
javac_middleman: $(java_benchmark_testing_files) protoc_middleman protoc_middleman2
cp -r $(srcdir)/java tmp
mkdir -p tmp/java/lib
cp $(top_srcdir)/java/core/target/*.jar tmp/java/lib/protobuf-java.jar
cd tmp/java && mvn clean compile assembly:single -Dprotobuf.version=$(PACKAGE_VERSION) && cd ../..
@touch javac_middleman
java-benchmark: javac_middleman
@echo "Writing shortcut script java-benchmark..."
@echo '#! /bin/bash' > java-benchmark
@echo 'all_data=""' >> java-benchmark
@echo 'conf=()' >> java-benchmark
@echo 'data_files=""' >> java-benchmark
@echo 'for arg in $$@; do if [[ $${arg:0:1} == "-" ]]; then conf+=($$arg); else data_files+="$$arg,"; fi; done' >> java-benchmark
@echo 'java -cp '\"tmp/java/target/*:$(top_srcdir)/java/core/target/*:$(top_srcdir)/java/util/target/*\"" \\" >>java-benchmark
@echo ' com.google.caliper.runner.CaliperMain com.google.protobuf.ProtoCaliperBenchmark -i runtime '"\\" >> java-benchmark
@echo ' -b serializeToByteArray,serializeToMemoryStream,deserializeFromByteArray,deserializeFromMemoryStream '"\\" >> java-benchmark
@echo ' -DdataFile=$${data_files:0:-1} $${conf[*]}' >> java-benchmark
@chmod +x java-benchmark
java: protoc_middleman protoc_middleman2 java-benchmark
./java-benchmark $(all_data)
############# JAVA RULES END ##############
############# PYTHON RULES ##############
python_add_init: protoc_middleman protoc_middleman2
all_file=`find tmp -type f -regex '.*\.py'` && \
for file in $${all_file[@]}; do \
path="$${file%/*}"; \
while true; do \
touch "$$path/__init__.py" && chmod +x "$$path/__init__.py"; \
if [[ $$path != *"/"* ]]; then break; fi; \
path=$${path%/*}; \
done \
done
python_cpp_pkg_flags = `pkg-config --cflags --libs python3`
lib_LTLIBRARIES = libbenchmark_messages.la
libbenchmark_messages_la_SOURCES = python/python_benchmark_messages.cc
libbenchmark_messages_la_LIBADD = $(top_srcdir)/src/.libs/libprotobuf.la
libbenchmark_messages_la_LDFLAGS = -version-info 1:0:0 -export-dynamic
libbenchmark_messages_la_CPPFLAGS = -I$(top_srcdir)/src -I$(srcdir)/cpp $(python_cpp_pkg_flags)
libbenchmark_messages_la-python_benchmark_messages.$(OBJEXT): $(benchmarks_protoc_outputs_header) $(benchmarks_protoc_outputs_proto2_header) $(benchmarks_protoc_outputs) $(benchmarks_protoc_outputs_proto2)
nodist_libbenchmark_messages_la_SOURCES = \
$(benchmarks_protoc_outputs) \
$(benchmarks_protoc_outputs_proto2) \
$(benchmarks_protoc_outputs_proto2_header) \
$(benchmarks_protoc_outputs_header)
python-pure-python-benchmark: python_add_init
@echo "Writing shortcut script python-pure-python-benchmark..."
@echo '#! /bin/bash' > python-pure-python-benchmark
@echo export LD_LIBRARY_PATH=$(top_srcdir)/src/.libs >> python-pure-python-benchmark
@echo export DYLD_LIBRARY_PATH=$(top_srcdir)/src/.libs >> python-pure-python-benchmark
@echo export PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=\'python\' >> python-pure-python-benchmark
@echo cp $(srcdir)/python/py_benchmark.py tmp >> python-pure-python-benchmark
@echo python3 tmp/py_benchmark.py '$$@' >> python-pure-python-benchmark
@chmod +x python-pure-python-benchmark
python-cpp-reflection-benchmark: python_add_init
@echo "Writing shortcut script python-cpp-reflection-benchmark..."
@echo '#! /bin/bash' > python-cpp-reflection-benchmark
@echo export LD_LIBRARY_PATH=$(top_srcdir)/src/.libs >> python-cpp-reflection-benchmark
@echo export DYLD_LIBRARY_PATH=$(top_srcdir)/src/.libs >> python-cpp-reflection-benchmark
@echo export PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=\'cpp\' >> python-cpp-reflection-benchmark
@echo cp $(srcdir)/python/py_benchmark.py tmp >> python-cpp-reflection-benchmark
@echo python3 tmp/py_benchmark.py '$$@' >> python-cpp-reflection-benchmark
@chmod +x python-cpp-reflection-benchmark
python-cpp-generated-code-benchmark: python_add_init libbenchmark_messages.la
@echo "Writing shortcut script python-cpp-generated-code-benchmark..."
@echo '#! /bin/bash' > python-cpp-generated-code-benchmark
@echo export LD_LIBRARY_PATH=$(top_srcdir)/src/.libs >> python-cpp-generated-code-benchmark
@echo export DYLD_LIBRARY_PATH=$(top_srcdir)/src/.libs >> python-cpp-generated-code-benchmark
@echo export PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=\'cpp\' >> python-cpp-generated-code-benchmark
@echo cp $(srcdir)/python/py_benchmark.py tmp >> python-cpp-generated-code-benchmark
@echo python3 tmp/py_benchmark.py --cpp_generated '$$@' >> python-cpp-generated-code-benchmark
@chmod +x python-cpp-generated-code-benchmark
python-pure-python: python-pure-python-benchmark
./python-pure-python-benchmark $(all_data)
python-cpp-reflection: python-cpp-reflection-benchmark
./python-cpp-reflection-benchmark $(all_data)
python-cpp-generated-code: python-cpp-generated-code-benchmark
./python-cpp-generated-code-benchmark $(all_data)
############# PYTHON RULES END ##############
############# GO RULES BEGIN ##############
benchmarks_protoc_inputs_proto2_message1 = \
datasets/google_message1/proto2/benchmark_message1_proto2.proto
benchmarks_protoc_inputs_proto2_message2 = \
datasets/google_message2/benchmark_message2.proto
benchmarks_protoc_inputs_proto2_message3 = \
datasets/google_message3/benchmark_message3.proto \
datasets/google_message3/benchmark_message3_1.proto \
datasets/google_message3/benchmark_message3_2.proto \
datasets/google_message3/benchmark_message3_3.proto \
datasets/google_message3/benchmark_message3_4.proto \
datasets/google_message3/benchmark_message3_5.proto \
datasets/google_message3/benchmark_message3_6.proto \
datasets/google_message3/benchmark_message3_7.proto \
datasets/google_message3/benchmark_message3_8.proto
benchmarks_protoc_inputs_proto2_message4 = \
datasets/google_message4/benchmark_message4.proto \
datasets/google_message4/benchmark_message4_1.proto \
datasets/google_message4/benchmark_message4_2.proto \
datasets/google_message4/benchmark_message4_3.proto
go_protoc_middleman: make_tmp_dir $(top_srcdir)/src/protoc$(EXEEXT) $(benchmarks_protoc_inputs) $(well_known_type_protoc_inputs) $(benchmarks_protoc_inputs_proto2_message1) $(benchmarks_protoc_inputs_proto2_message2) $(benchmarks_protoc_inputs_proto2_message3) $(benchmarks_protoc_inputs_proto2_message4) $(well_known_type_protoc_inputs)
oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$(top_srcdir)/src --go_out=$$oldpwd/tmp $(benchmarks_protoc_inputs) )
oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$(top_srcdir)/src --go_out=$$oldpwd/tmp $(benchmarks_protoc_inputs_benchmark_wrapper) )
oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$(top_srcdir)/src --go_out=$$oldpwd/tmp $(benchmarks_protoc_inputs_proto2_message1) )
oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$(top_srcdir)/src --go_out=$$oldpwd/tmp $(benchmarks_protoc_inputs_proto2_message2) )
oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$(top_srcdir)/src --go_out=$$oldpwd/tmp $(benchmarks_protoc_inputs_proto2_message3) )
oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$(top_srcdir)/src --go_out=$$oldpwd/tmp $(benchmarks_protoc_inputs_proto2_message4) )
touch go_protoc_middleman
go-benchmark: go_protoc_middleman
@echo "Writing shortcut script go-benchmark..."
@echo '#! /bin/bash' > go-benchmark
@echo 'cd $(srcdir)/go' >> go-benchmark
@echo 'all_data=""' >> go-benchmark
@echo 'conf=()' >> go-benchmark
@echo 'data_files=()' >> go-benchmark
@echo 'for arg in $$@; do if [[ $${arg:0:1} == "-" ]]; then conf+=($$arg); else data_files+=("$$arg"); fi; done' >> go-benchmark
@echo 'go test -bench=. $${conf[*]} -- $${data_files[*]}' >> go-benchmark
@echo 'cd ..' >> go-benchmark
@chmod +x go-benchmark
go: go_protoc_middleman go-benchmark
./go-benchmark $(all_data)
############# GO RULES END ##############
############# GOGO RULES BEGIN ############
cpp_no_group_benchmarks_protoc_outputs_header = \
gogo/cpp_no_group/benchmarks.pb.h \
gogo/cpp_no_group/datasets/google_message1/proto3/benchmark_message1_proto3.pb.h
cpp_no_group_benchmarks_protoc_outputs = \
gogo/cpp_no_group/benchmarks.pb.cc \
gogo/cpp_no_group/datasets/google_message1/proto3/benchmark_message1_proto3.pb.cc
cpp_no_group_benchmarks_protoc_outputs_proto2_header = \
gogo/cpp_no_group/datasets/google_message1/proto2/benchmark_message1_proto2.pb.h \
gogo/cpp_no_group/datasets/google_message2/benchmark_message2.pb.h \
gogo/cpp_no_group/datasets/google_message3/benchmark_message3.pb.h \
gogo/cpp_no_group/datasets/google_message3/benchmark_message3_1.pb.h \
gogo/cpp_no_group/datasets/google_message3/benchmark_message3_2.pb.h \
gogo/cpp_no_group/datasets/google_message3/benchmark_message3_3.pb.h \
gogo/cpp_no_group/datasets/google_message3/benchmark_message3_4.pb.h \
gogo/cpp_no_group/datasets/google_message3/benchmark_message3_5.pb.h \
gogo/cpp_no_group/datasets/google_message3/benchmark_message3_6.pb.h \
gogo/cpp_no_group/datasets/google_message3/benchmark_message3_7.pb.h \
gogo/cpp_no_group/datasets/google_message3/benchmark_message3_8.pb.h \
gogo/cpp_no_group/datasets/google_message4/benchmark_message4.pb.h \
gogo/cpp_no_group/datasets/google_message4/benchmark_message4_1.pb.h \
gogo/cpp_no_group/datasets/google_message4/benchmark_message4_2.pb.h \
gogo/cpp_no_group/datasets/google_message4/benchmark_message4_3.pb.h
cpp_no_group_benchmarks_protoc_outputs_proto2 = \
gogo/cpp_no_group/datasets/google_message1/proto2/benchmark_message1_proto2.pb.cc \
gogo/cpp_no_group/datasets/google_message2/benchmark_message2.pb.cc \
gogo/cpp_no_group/datasets/google_message3/benchmark_message3.pb.cc \
gogo/cpp_no_group/datasets/google_message3/benchmark_message3_1.pb.cc \
gogo/cpp_no_group/datasets/google_message3/benchmark_message3_2.pb.cc \
gogo/cpp_no_group/datasets/google_message3/benchmark_message3_3.pb.cc \
gogo/cpp_no_group/datasets/google_message3/benchmark_message3_4.pb.cc \
gogo/cpp_no_group/datasets/google_message3/benchmark_message3_5.pb.cc \
gogo/cpp_no_group/datasets/google_message3/benchmark_message3_6.pb.cc \
gogo/cpp_no_group/datasets/google_message3/benchmark_message3_7.pb.cc \
gogo/cpp_no_group/datasets/google_message3/benchmark_message3_8.pb.cc \
gogo/cpp_no_group/datasets/google_message4/benchmark_message4.pb.cc \
gogo/cpp_no_group/datasets/google_message4/benchmark_message4_1.pb.cc \
gogo/cpp_no_group/datasets/google_message4/benchmark_message4_2.pb.cc \
gogo/cpp_no_group/datasets/google_message4/benchmark_message4_3.pb.cc
$(cpp_no_group_benchmarks_protoc_outputs): cpp_no_group_protoc_middleman
$(cpp_no_group_benchmarks_protoc_outputs_header): cpp_no_group_protoc_middleman
$(cpp_no_group_benchmarks_protoc_outputs_proto2): cpp_no_group_protoc_middleman
$(cpp_no_group_benchmarks_protoc_outputs_proto2_header): cpp_no_group_protoc_middleman
generate_cpp_no_group_benchmark_code:
cp $(srcdir)/cpp/cpp_benchmark.cc gogo/cpp_no_group/cpp_benchmark.cc
sed -i -e "s/\#include \"datasets/\#include \"gogo\/cpp_no_group\/datasets/g" gogo/cpp_no_group/cpp_benchmark.cc
sed -i -e "s/\#include \"benchmarks.pb.h/\#include \"gogo\/cpp_no_group\/benchmarks.pb.h/g" gogo/cpp_no_group/cpp_benchmark.cc
touch generate_cpp_no_group_benchmark_code
bin_PROGRAMS += cpp-no-group-benchmark
cpp_no_group_benchmark_LDADD = $(top_srcdir)/src/libprotobuf.la $(top_srcdir)/third_party/benchmark/src/libbenchmark.a
cpp_no_group_benchmark_SOURCES = gogo/cpp_no_group/cpp_benchmark.cc
cpp_no_group_benchmark_CPPFLAGS = -I$(top_srcdir)/src -I$(srcdir)/gogo/cpp_no_group -I$(top_srcdir)/third_party/benchmark/include
# Explicit deps because BUILT_SOURCES are only done before a "make all/check"
# so a direct "make test_cpp" could fail if parallel enough.
# See: https://www.gnu.org/software/automake/manual/html_node/Built-Sources-Example.html#Recording-Dependencies-manually
gogo/cpp_no_group/cpp_no_group_benchmark-cpp_benchmark.$(OBJEXT): $(cpp_no_group_benchmarks_protoc_outputs) $(cpp_no_group_benchmarks_protoc_outputs_proto2) $(cpp_no_group_benchmarks_protoc_outputs_header) \
$(cpp_no_group_benchmarks_protoc_outputs_proto2_header) $(top_srcdir)/third_party/benchmark/src/libbenchmark.a generate_cpp_no_group_benchmark_code
gogo/cpp_no_group/cpp_benchmark.cc: generate_cpp_no_group_benchmark_code
nodist_cpp_no_group_benchmark_SOURCES = \
$(cpp_no_group_benchmarks_protoc_outputs_proto2) \
$(cpp_no_group_benchmarks_protoc_outputs) \
$(cpp_no_group_benchmarks_protoc_outputs_header) \
$(cpp_no_group_benchmarks_protoc_outputs_proto2_header)
cpp_no_group: cpp_no_group_protoc_middleman generate_gogo_data cpp-no-group-benchmark
./cpp-no-group-benchmark $(gogo_data)
gogo_proto_middleman: protoc-gen-gogoproto
mkdir -p "tmp/gogo_proto"
oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I$(srcdir) -I$(top_srcdir) --plugin=protoc-gen-gogoproto --gogoproto_out=$$oldpwd/tmp/gogo_proto $(benchmarks_protoc_inputs) $(benchmarks_protoc_inputs_benchmark_wrapper) $(benchmarks_protoc_inputs_proto2) )
touch gogo_proto_middleman
gogo_data = $$(for data in $(all_data); do echo "tmp/gogo_data$${data\#$(srcdir)}"; done | xargs)
generate_gogo_data: protoc_middleman protoc_middleman2 gogo-data-scrubber
mkdir -p `dirname $(gogo_data)`
./gogo-data-scrubber $(all_data) $(gogo_data)
touch generate_gogo_data
make_tmp_dir_gogo:
mkdir -p tmp/go_no_group/benchmark_code
mkdir -p tmp/gogofast/benchmark_code
mkdir -p tmp/gogofaster/benchmark_code
mkdir -p tmp/gogoslick/benchmark_code
touch make_tmp_dir_gogo
go_no_group_protoc_middleman: make_tmp_dir_gogo $(top_srcdir)/src/protoc$(EXEEXT) gogo_proto_middleman $(well_known_type_protoc_inputs)
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --go_out=$$oldpwd/tmp/go_no_group $(benchmarks_protoc_inputs) )
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --go_out=$$oldpwd/tmp/go_no_group $(benchmarks_protoc_inputs_benchmark_wrapper) )
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --go_out=$$oldpwd/tmp/go_no_group $(benchmarks_protoc_inputs_proto2_message1) )
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --go_out=$$oldpwd/tmp/go_no_group $(benchmarks_protoc_inputs_proto2_message2) )
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --go_out=$$oldpwd/tmp/go_no_group $(benchmarks_protoc_inputs_proto2_message3) )
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --go_out=$$oldpwd/tmp/go_no_group $(benchmarks_protoc_inputs_proto2_message4) )
touch go_no_group_protoc_middleman
cpp_no_group_protoc_middleman: make_tmp_dir_gogo $(top_srcdir)/src/protoc$(EXEEXT) gogo_proto_middleman $(well_known_type_protoc_inputs)
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --cpp_out=$$oldpwd/gogo/cpp_no_group $(benchmarks_protoc_inputs) )
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --cpp_out=$$oldpwd/gogo/cpp_no_group $(benchmarks_protoc_inputs_benchmark_wrapper) )
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --cpp_out=$$oldpwd/gogo/cpp_no_group $(benchmarks_protoc_inputs_proto2_message1) )
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --cpp_out=$$oldpwd/gogo/cpp_no_group $(benchmarks_protoc_inputs_proto2_message2) )
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --cpp_out=$$oldpwd/gogo/cpp_no_group $(benchmarks_protoc_inputs_proto2_message3) )
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --cpp_out=$$oldpwd/gogo/cpp_no_group $(benchmarks_protoc_inputs_proto2_message4) )
touch cpp_no_group_protoc_middleman
gogofast_protoc_middleman: make_tmp_dir_gogo $(top_srcdir)/src/protoc$(EXEEXT) gogo_proto_middleman $(well_known_type_protoc_inputs)
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --gogofast_out=$$oldpwd/tmp/gogofast $(benchmarks_protoc_inputs) )
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --gogofast_out=$$oldpwd/tmp/gogofast $(benchmarks_protoc_inputs_benchmark_wrapper) )
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --gogofast_out=$$oldpwd/tmp/gogofast $(benchmarks_protoc_inputs_proto2_message1) )
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --gogofast_out=$$oldpwd/tmp/gogofast $(benchmarks_protoc_inputs_proto2_message2) )
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --gogofast_out=$$oldpwd/tmp/gogofast $(benchmarks_protoc_inputs_proto2_message3) )
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --gogofast_out=$$oldpwd/tmp/gogofast $(benchmarks_protoc_inputs_proto2_message4) )
touch gogofast_protoc_middleman
gogofaster_protoc_middleman: make_tmp_dir_gogo $(top_srcdir)/src/protoc$(EXEEXT) gogo_proto_middleman $(well_known_type_protoc_inputs)
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --gogofaster_out=$$oldpwd/tmp/gogofaster $(benchmarks_protoc_inputs) )
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --gogofaster_out=$$oldpwd/tmp/gogofaster $(benchmarks_protoc_inputs_benchmark_wrapper) )
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --gogofaster_out=$$oldpwd/tmp/gogofaster $(benchmarks_protoc_inputs_proto2_message1) )
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --gogofaster_out=$$oldpwd/tmp/gogofaster $(benchmarks_protoc_inputs_proto2_message2) )
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --gogofaster_out=$$oldpwd/tmp/gogofaster $(benchmarks_protoc_inputs_proto2_message3) )
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --gogofaster_out=$$oldpwd/tmp/gogofaster $(benchmarks_protoc_inputs_proto2_message4) )
touch gogofaster_protoc_middleman
gogoslick_protoc_middleman: make_tmp_dir_gogo $(top_srcdir)/src/protoc$(EXEEXT) gogo_proto_middleman $(well_known_type_protoc_inputs)
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --gogoslick_out=$$oldpwd/tmp/gogoslick $(benchmarks_protoc_inputs) )
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --gogoslick_out=$$oldpwd/tmp/gogoslick $(benchmarks_protoc_inputs_benchmark_wrapper) )
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --gogoslick_out=$$oldpwd/tmp/gogoslick $(benchmarks_protoc_inputs_proto2_message1) )
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --gogoslick_out=$$oldpwd/tmp/gogoslick $(benchmarks_protoc_inputs_proto2_message2) )
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --gogoslick_out=$$oldpwd/tmp/gogoslick $(benchmarks_protoc_inputs_proto2_message3) )
oldpwd=`pwd` && ( cd $(srcdir)/tmp/gogo_proto && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$$oldpwd/$(top_srcdir)/src --gogoslick_out=$$oldpwd/tmp/gogoslick $(benchmarks_protoc_inputs_proto2_message4) )
touch gogoslick_protoc_middleman
generate-gogo-benchmark-code:
@echo '#! /bin/bash' > generate-gogo-benchmark-code
@echo 'cp $(srcdir)/go/go_benchmark_test.go tmp/$$1/benchmark_code/$$1_benchmark1_test.go' >> generate-gogo-benchmark-code
@echo 'sed -i -e "s/\.\.\/tmp/../g" tmp/$$1/benchmark_code/$$1_benchmark1_test.go' >> generate-gogo-benchmark-code
@echo 'sed -i -e "s/b\.Run(\"\(.*\)\"/b.Run(\"\1\_$$1\"/g" tmp/$$1/benchmark_code/$$1_benchmark1_test.go' >> generate-gogo-benchmark-code
@echo 'if [[ $$2 == 1 ]]; then sed -i -e "s/github\.com\/golang/github.com\/gogo/g" tmp/$$1/benchmark_code/$$1_benchmark1_test.go; fi ' >> generate-gogo-benchmark-code
@chmod +x generate-gogo-benchmark-code
generate_all_gogo_benchmark_code: generate-gogo-benchmark-code make_tmp_dir_gogo
./generate-gogo-benchmark-code go_no_group 0
./generate-gogo-benchmark-code gogofast 1
./generate-gogo-benchmark-code gogofaster 1
./generate-gogo-benchmark-code gogoslick 1
gogo-benchmark:
@echo "Writing shortcut script gogo-benchmark..."
@echo '#! /bin/bash' > gogo-benchmark
@echo 'cd tmp/$$1/benchmark_code' >> gogo-benchmark
@echo 'shift' >> gogo-benchmark
@echo 'all_data=""' >> gogo-benchmark
@echo 'for data_file in $$@; do all_data="$$all_data ../../../$$data_file"; done' >> gogo-benchmark
@echo 'go test -bench=. -- $$all_data' >> gogo-benchmark
@echo 'cd ../..' >> gogo-benchmark
@chmod +x gogo-benchmark
go_no_group: go_no_group_protoc_middleman generate_gogo_data generate_all_gogo_benchmark_code gogo-benchmark
./gogo-benchmark go_no_group $(gogo_data)
gogofast: gogofast_protoc_middleman generate_gogo_data gogo-benchmark generate_all_gogo_benchmark_code
./gogo-benchmark gogofast $(gogo_data)
gogofaster: gogofaster_protoc_middleman generate_gogo_data gogo-benchmark generate_all_gogo_benchmark_code
./gogo-benchmark gogofaster $(gogo_data)
gogoslick: gogoslick_protoc_middleman generate_gogo_data gogo-benchmark generate_all_gogo_benchmark_code
./gogo-benchmark gogoslick $(gogo_data)
############# GOGO RULES END ############
############ UTIL RULES BEGIN ############
bin_PROGRAMS += protoc-gen-gogoproto gogo-data-scrubber protoc-gen-proto2_to_proto3 proto3-data-stripper
protoc_gen_gogoproto_LDADD = $(top_srcdir)/src/libprotobuf.la $(top_srcdir)/src/libprotoc.la
protoc_gen_gogoproto_SOURCES = util/protoc-gen-gogoproto.cc
protoc_gen_gogoproto_CPPFLAGS = -I$(top_srcdir)/src -I$(srcdir)/cpp -I$(srcdir)/util
gogo_data_scrubber_LDADD = $(top_srcdir)/src/libprotobuf.la
gogo_data_scrubber_SOURCES = util/gogo_data_scrubber.cc
gogo_data_scrubber_CPPFLAGS = -I$(top_srcdir)/src -I$(srcdir)/cpp -I$(srcdir)/util
util/gogo_data_scrubber-gogo_data_scrubber.$(OBJEXT): $(benchmarks_protoc_outputs) $(benchmarks_protoc_outputs_proto2) $(benchmarks_protoc_outputs_header) $(benchmarks_protoc_outputs_proto2_header)
nodist_gogo_data_scrubber_SOURCES = \
$(benchmarks_protoc_outputs) \
$(benchmarks_protoc_outputs_proto2) \
$(benchmarks_protoc_outputs_proto2_header) \
$(benchmarks_protoc_outputs_header)
protoc_gen_proto2_to_proto3_LDADD = $(top_srcdir)/src/libprotobuf.la $(top_srcdir)/src/libprotoc.la
protoc_gen_proto2_to_proto3_SOURCES = util/protoc-gen-proto2_to_proto3.cc
protoc_gen_proto2_to_proto3_CPPFLAGS = -I$(top_srcdir)/src -I$(srcdir)/cpp -I$(srcdir)/util
proto3_data_stripper_LDADD = $(top_srcdir)/src/libprotobuf.la
proto3_data_stripper_SOURCES = util/proto3_data_stripper.cc
proto3_data_stripper_CPPFLAGS = -I$(top_srcdir)/src -I$(srcdir)/cpp -I$(srcdir)/util
util/proto3_data_stripper-proto3_data_stripper.$(OBJEXT): $(benchmarks_protoc_outputs) $(benchmarks_protoc_outputs_proto2) $(benchmarks_protoc_outputs_header) $(benchmarks_protoc_outputs_proto2_header)
nodist_proto3_data_stripper_SOURCES = \
$(benchmarks_protoc_outputs) \
$(benchmarks_protoc_outputs_proto2) \
$(benchmarks_protoc_outputs_proto2_header) \
$(benchmarks_protoc_outputs_header)
############ UTIL RULES END ############
############ PROTO3 PREPARATION BEGIN #############
proto3_proto_middleman: protoc-gen-proto2_to_proto3
mkdir -p "tmp/proto3_proto"
oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I$(srcdir) -I$(top_srcdir) --plugin=protoc-gen-proto2_to_proto3 --proto2_to_proto3_out=$$oldpwd/tmp/proto3_proto $(benchmarks_protoc_inputs) $(benchmarks_protoc_inputs_benchmark_wrapper) $(benchmarks_protoc_inputs_proto2) )
touch proto3_proto_middleman
full_srcdir = $$(cd $(srcdir) && pwd)
proto3_data = $$(for data in $(all_data); do echo $(full_srcdir)"/tmp/proto3_data$${data\#$(full_srcdir)}"; done | xargs)
generate_proto3_data: protoc_middleman protoc_middleman2 proto3-data-stripper
mkdir -p `dirname $(proto3_data)`
./proto3-data-stripper $(all_data) $(proto3_data)
touch generate_proto3_data
############ PROTO3 PREPARATION END #############
############ PHP RULES BEGIN #################
proto3_middleman_php: proto3_proto_middleman
mkdir -p "tmp/php"
oldpwd=`pwd` && ( cd tmp/proto3_proto && $$oldpwd/../src/protoc$(EXEEXT) -I$(srcdir) -I$(top_srcdir) --php_out=$$oldpwd/tmp/php $(benchmarks_protoc_inputs) $(benchmarks_protoc_inputs_benchmark_wrapper) $(benchmarks_protoc_inputs_proto2) )
touch proto3_middleman_php
php-benchmark: proto3_middleman_php generate_proto3_data
mkdir -p "tmp/php/Google/Protobuf/Benchmark" && cp php/PhpBenchmark.php "tmp/php/Google/Protobuf/Benchmark"
cp php/autoload.php "tmp/php"
@echo "Writing shortcut script php-benchmark..."
@echo '#! /bin/bash' > php-benchmark
@echo 'export PROTOBUF_PHP_SRCDIR="$$(cd $(top_srcdir) && pwd)/php/src"' >> php-benchmark
@echo 'cd tmp/php' >> php-benchmark
@echo 'export CURRENT_DIR=$$(pwd)' >> php-benchmark
@echo 'php -d auto_prepend_file="autoload.php" -d include_path="$$(pwd)" Google/Protobuf/Benchmark/PhpBenchmark.php $$@' >> php-benchmark
@echo 'cd ../..' >> php-benchmark
@chmod +x php-benchmark
php: php-benchmark proto3_middleman_php
./php-benchmark --behavior_prefix="php" $(proto3_data)
php_c_extension:
cd $(top_srcdir)/php/ext/google/protobuf && phpize && ./configure CFLAGS='-O3' && make -j8
php-c-benchmark: proto3_middleman_php generate_proto3_data php_c_extension php_c_extension
mkdir -p "tmp/php/Google/Protobuf/Benchmark" && cp php/PhpBenchmark.php "tmp/php/Google/Protobuf/Benchmark"
cp php/autoload.php "tmp/php"
@echo "Writing shortcut script php-c-benchmark..."
@echo '#! /bin/bash' > php-c-benchmark
@echo 'export PROTOBUF_PHP_SRCDIR="$$(cd $(top_srcdir) && pwd)/php/src"' >> php-c-benchmark
@echo 'export PROTOBUF_PHP_EXTDIR="$$PROTOBUF_PHP_SRCDIR/../ext/google/protobuf/modules"' >> php-c-benchmark
@echo 'cd tmp/php' >> php-c-benchmark
@echo 'export CURRENT_DIR=$$(pwd)' >> php-c-benchmark
@echo 'php -d auto_prepend_file="autoload.php" -d include_path="$$(pwd)" -d extension="$$PROTOBUF_PHP_EXTDIR/protobuf.so" Google/Protobuf/Benchmark/PhpBenchmark.php $$@' >> php-c-benchmark
@echo 'cd ../..' >> php-c-benchmark
@chmod +x php-c-benchmark
php_c: php-c-benchmark proto3_middleman_php
./php-c-benchmark --behavior_prefix="php_c" $(proto3_data)
############ PHP RULES END #################
############ protobuf.js RULE BEGIN #############
pbjs_preparation:
mkdir -p tmp/protobuf.js
cd tmp/protobuf.js && git clone https://github.com/dcodeIO/protobuf.js.git && \
cd protobuf.js && npm install && npm run build
cd tmp/protobuf.js && npm install benchmark
cp protobuf.js/* tmp/protobuf.js
cp js/benchmark_suite.js tmp/protobuf.js
touch pbjs_preparation
pbjs_middleman: pbjs_preparation
export OLDDIR=$$(pwd) && cd tmp/protobuf.js && node generate_pbjs_files.js --target static-module --include_path=$$OLDDIR -o generated_bundle_code.js $(benchmarks_protoc_inputs) $(benchmarks_protoc_inputs_benchmark_wrapper) $(benchmarks_protoc_inputs_proto2)
touch pbjs_middleman
pbjs-benchmark: pbjs_middleman
@echo '#! /bin/bash' > pbjs-benchmark
@echo 'cd tmp/protobuf.js' >> pbjs-benchmark
@echo 'sed -i "s/protobufjs/.\/protobuf.js/g" generated_bundle_code.js' >> pbjs-benchmark
@echo 'env NODE_PATH=".:./node_modules:$$NODE_PATH" node protobufjs_benchmark.js $$@' >> pbjs-benchmark
@chmod +x pbjs-benchmark
pbjs: pbjs-benchmark
./pbjs-benchmark $(all_data)
############ protobuf.js RULE END #############
############ JS RULE BEGIN #############
js_preparation:
mkdir -p tmp/js
oldpwd=$$(pwd) && cd $(top_srcdir)/js && npm install && npm test
cd tmp/js && npm install benchmark
cp js/* tmp/js
touch js_preparation
js_middleman: js_preparation
oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$(top_srcdir)/src --js_out=import_style=commonjs,binary:$$oldpwd/tmp/js $(benchmarks_protoc_inputs) $(benchmarks_protoc_inputs_benchmark_wrapper) $(benchmarks_protoc_inputs_proto2))
touch js_middleman
js-benchmark: js_middleman
@echo '#! /bin/bash' > js-benchmark
@echo 'export TOP_JS_SRCDIR=$$(cd $(top_srcdir)/js && pwd)' >> js-benchmark
@echo 'cd tmp/js' >> js-benchmark
@echo 'env NODE_PATH="$$TOP_JS_SRCDIR:.:./node_modules:$$NODE_PATH" node --max-old-space-size=4096 js_benchmark.js $$@' >> js-benchmark
@chmod +x js-benchmark
js: js-benchmark
./js-benchmark $(all_data)
############ JS RULE END #############
EXTRA_DIST = \
$(benchmarks_protoc_inputs_benchmark_wrapper) \
$(benchmarks_protoc_inputs) \
$(benchmarks_protoc_inputs_proto2) \
google_size.proto
MAINTAINERCLEANFILES = \
Makefile.in
CLEANFILES = \
$(benchmarks_protoc_outputs) \
$(benchmarks_protoc_outputs_header) \
$(benchmarks_protoc_outputs_proto2) \
$(benchmarks_protoc_outputs_proto2_header) \
initialize_submodule \
make_tmp_dir \
protoc_middleman \
protoc_middleman2 \
javac_middleman \
java-benchmark \
python_cpp_proto_library \
python-pure-python-benchmark \
python-cpp-reflection-benchmark \
python-cpp-generated-code-benchmark \
go-benchmark \
go_protoc_middleman \
make_tmp_dir_gogo \
gogo_proto_middleman \
generate_gogo_data \
go_no_group_protoc_middleman \
go_no_group \
go-no-group-benchmark \
$(cpp_no_group_benchmarks_protoc_outputs_header) \
$(cpp_no_group_benchmarks_protoc_outputs) \
$(cpp_no_group_benchmarks_protoc_outputs_proto2_header) \
$(cpp_no_group_benchmarks_protoc_outputs_proto2) \
generate_all_gogo_benchmark_code \
generate-gogo-benchmark-code \
cpp_no_group_protoc_middleman \
generate_cpp_no_group_benchmark_code \
generate_gogo_benchmark_code \
gogofast_protoc_middleman \
gogofast \
gogofaster_protoc_middleman \
gogofaster \
gogoslick_protoc_middleman \
gogoslick \
gogo-benchmark \
gogo/cpp_no_group/cpp_benchmark.* \
proto3_proto_middleman \
generate_proto3_data \
php-benchmark \
php-c-benchmark \
proto3_middleman_php \
pbjs_preparation \
pbjs_middleman \
pbjs-benchmark \
js_preparation \
js_middleman \
js-benchmark
clean-local:
-rm -rf tmp/*

View File

@ -1,12 +1,13 @@
load("@rules_cc//cc:defs.bzl", "cc_binary")
load("@rules_pkg//:mappings.bzl", "pkg_files", "strip_prefix")
load("//benchmarks:internal.bzl", "internal_benchmark_test")
# The benchmark binary which can be run over any dataset.
cc_binary(
name = "cpp",
name = "cpp_benchmark",
srcs = [
"cpp_benchmark.cc",
],
tags = ["benchmark"],
deps = [
"//:protobuf",
"//benchmarks:benchmarks_cc_proto",
@ -15,6 +16,15 @@ cc_binary(
],
)
# A pre-configured binary using the checked in datasets.
internal_benchmark_test(
name = "cpp",
binary = ":cpp_benchmark",
datasets = [
"//benchmarks/datasets",
],
)
pkg_files(
name = "dist_files",
srcs = [

View File

@ -32,11 +32,11 @@
#include <iostream>
#include "benchmark/benchmark.h"
#include "benchmarks.pb.h"
#include "datasets/google_message1/proto2/benchmark_message1_proto2.pb.h"
#include "datasets/google_message1/proto3/benchmark_message1_proto3.pb.h"
#include "datasets/google_message2/benchmark_message2.pb.h"
#include "datasets/google_message3/benchmark_message3.pb.h"
#include "datasets/google_message4/benchmark_message4.pb.h"
#include "benchmarks/datasets/google_message1/proto2/benchmark_message1_proto2.pb.h"
#include "benchmarks/datasets/google_message1/proto3/benchmark_message1_proto3.pb.h"
#include "benchmarks/datasets/google_message2/benchmark_message2.pb.h"
#include "benchmarks/datasets/google_message3/benchmark_message3.pb.h"
#include "benchmarks/datasets/google_message4/benchmark_message4.pb.h"
#define PREFIX "dataset."

View File

@ -14,6 +14,21 @@ filegroup(
],
visibility = [
"//benchmarks:__subpackages__",
"//conformance:__subpackages__",
],
)
filegroup(
name = "proto3_datasets",
srcs = [
"//benchmarks/datasets/google_message1/proto2:proto3_datasets",
"//benchmarks/datasets/google_message1/proto3:datasets",
"//benchmarks/datasets/google_message2:proto3_datasets",
],
visibility = [
"//benchmarks:__subpackages__",
"//conformance:__subpackages__",
"//csharp:__pkg__",
],
)
@ -59,6 +74,46 @@ java_library(
],
)
py_library(
name = "py_protos",
visibility = [
"//benchmarks:__subpackages__",
],
deps = [
"//benchmarks/datasets/google_message1/proto2:benchmark_message1_proto2_py_proto",
"//benchmarks/datasets/google_message1/proto3:benchmark_message1_proto3_py_proto",
"//benchmarks/datasets/google_message2:benchmark_message2_py_proto",
"//benchmarks/datasets/google_message3:benchmark_message3_py_proto",
"//benchmarks/datasets/google_message4:benchmark_message4_py_proto",
],
)
filegroup(
name = "php_protos",
visibility = [
"//benchmarks:__subpackages__",
],
srcs = [
"//benchmarks/datasets/google_message1/proto2:benchmark_message1_proto2_php_proto",
"//benchmarks/datasets/google_message1/proto3:benchmark_message1_proto3_php_proto",
"//benchmarks/datasets/google_message2:benchmark_message2_php_proto",
],
)
filegroup(
name = "csharp_protos",
visibility = [
"//benchmarks:__subpackages__",
],
srcs = [
"//benchmarks/datasets/google_message1/proto2:benchmark_message1_proto2_csharp_proto",
"//benchmarks/datasets/google_message1/proto3:benchmark_message1_proto3_csharp_proto",
"//benchmarks/datasets/google_message2:benchmark_message2_csharp_proto",
"//benchmarks/datasets/google_message3:benchmark_message3_csharp_proto",
"//benchmarks/datasets/google_message4:benchmark_message4_csharp_proto",
],
)
pkg_files(
name = "dist_files",
srcs = ["BUILD.bazel"],

View File

@ -2,6 +2,8 @@ load("@rules_cc//cc:defs.bzl", "cc_proto_library")
load("@rules_java//java:defs.bzl", "java_proto_library")
load("@rules_pkg//:mappings.bzl", "pkg_files", "strip_prefix")
load("@rules_proto//proto:defs.bzl", "proto_library")
load("//:protobuf.bzl", "internal_py_proto_library", "internal_csharp_proto_library")
load("//benchmarks/util:compatibility.bzl", "proto3_from_proto2_data", "php_proto3_from_proto2_library")
filegroup(
name = "datasets",
@ -13,12 +15,21 @@ filegroup(
],
)
proto3_from_proto2_data(
name = "proto3_datasets",
srcs = [
"dataset.google_message1_proto2.pb",
],
visibility = [
"//benchmarks/datasets:__pkg__",
],
)
proto_library(
name = "benchmark_message1_proto2_proto",
srcs = [
"benchmark_message1_proto2.proto",
],
strip_import_prefix = "/benchmarks",
visibility = [
"//benchmarks/datasets:__pkg__",
],
@ -34,6 +45,14 @@ cc_proto_library(
],
)
internal_csharp_proto_library(
name = "benchmark_message1_proto2_csharp_proto",
srcs = ["benchmark_message1_proto2.proto"],
visibility = [
"//benchmarks/datasets:__pkg__",
],
)
java_proto_library(
name = "benchmark_message1_proto2_java_proto",
visibility = [
@ -44,6 +63,27 @@ java_proto_library(
],
)
php_proto3_from_proto2_library(
name = "benchmark_message1_proto2_php_proto",
src = "benchmark_message1_proto2.proto",
outs = [
"Benchmarks/Proto2/GoogleMessage1.php",
"Benchmarks/Proto2/GoogleMessage1SubMessage.php",
"GPBMetadata/BenchmarkMessage1Proto2.php",
],
visibility = [
"//benchmarks/datasets:__pkg__",
],
)
internal_py_proto_library(
name = "benchmark_message1_proto2_py_proto",
srcs = ["benchmark_message1_proto2.proto"],
visibility = [
"//benchmarks/datasets:__pkg__",
],
)
pkg_files(
name = "dist_files",
srcs = glob(["*"]),

View File

@ -2,6 +2,7 @@ load("@rules_cc//cc:defs.bzl", "cc_proto_library")
load("@rules_java//java:defs.bzl", "java_proto_library")
load("@rules_proto//proto:defs.bzl", "proto_library")
load("@rules_pkg//:mappings.bzl", "pkg_files", "strip_prefix")
load("//:protobuf.bzl", "internal_py_proto_library", "internal_php_proto_library", "internal_csharp_proto_library")
filegroup(
name = "datasets",
@ -18,7 +19,6 @@ proto_library(
srcs = [
"benchmark_message1_proto3.proto",
],
strip_import_prefix = "/benchmarks",
visibility = [
"//benchmarks/datasets:__pkg__",
],
@ -34,6 +34,14 @@ cc_proto_library(
],
)
internal_csharp_proto_library(
name = "benchmark_message1_proto3_csharp_proto",
srcs = ["benchmark_message1_proto3.proto"],
visibility = [
"//benchmarks/datasets:__pkg__",
],
)
java_proto_library(
name = "benchmark_message1_proto3_java_proto",
visibility = [
@ -44,6 +52,27 @@ java_proto_library(
],
)
internal_php_proto_library(
name = "benchmark_message1_proto3_php_proto",
srcs = ["benchmark_message1_proto3.proto"],
outs = [
"Benchmarks/Proto3/GoogleMessage1.php",
"Benchmarks/Proto3/GoogleMessage1SubMessage.php",
"GPBMetadata/BenchmarkMessage1Proto3.php",
],
visibility = [
"//benchmarks/datasets:__pkg__",
],
)
internal_py_proto_library(
name = "benchmark_message1_proto3_py_proto",
srcs = ["benchmark_message1_proto3.proto"],
visibility = [
"//benchmarks/datasets:__pkg__",
],
)
pkg_files(
name = "dist_files",
srcs = glob(["*"]),

View File

@ -2,6 +2,8 @@ load("@rules_cc//cc:defs.bzl", "cc_proto_library")
load("@rules_java//java:defs.bzl", "java_proto_library")
load("@rules_pkg//:mappings.bzl", "pkg_files", "strip_prefix")
load("@rules_proto//proto:defs.bzl", "proto_library")
load("//:protobuf.bzl", "internal_py_proto_library", "internal_csharp_proto_library")
load("//benchmarks/util:compatibility.bzl", "proto3_from_proto2_data", "php_proto3_from_proto2_library")
filegroup(
name = "datasets",
@ -13,12 +15,21 @@ filegroup(
],
)
proto3_from_proto2_data(
name = "proto3_datasets",
srcs = [
"dataset.google_message2.pb",
],
visibility = [
"//benchmarks/datasets:__pkg__",
],
)
proto_library(
name = "benchmark_message2_proto",
srcs = [
"benchmark_message2.proto",
],
strip_import_prefix = "/benchmarks",
visibility = [
"//benchmarks/datasets:__pkg__",
],
@ -34,6 +45,14 @@ cc_proto_library(
],
)
internal_csharp_proto_library(
name = "benchmark_message2_csharp_proto",
srcs = ["benchmark_message2.proto"],
visibility = [
"//benchmarks/datasets:__pkg__",
],
)
java_proto_library(
name = "benchmark_message2_java_proto",
visibility = [
@ -44,6 +63,27 @@ java_proto_library(
],
)
php_proto3_from_proto2_library(
name = "benchmark_message2_php_proto",
src = "benchmark_message2.proto",
outs = [
"Benchmarks/Proto2/GoogleMessage2.php",
"Benchmarks/Proto2/GoogleMessage2GroupedMessage.php",
"GPBMetadata/BenchmarkMessage2.php",
],
visibility = [
"//benchmarks/datasets:__pkg__",
],
)
internal_py_proto_library(
name = "benchmark_message2_py_proto",
srcs = ["benchmark_message2.proto"],
visibility = [
"//benchmarks/datasets:__pkg__",
],
)
pkg_files(
name = "dist_files",
srcs = glob(["*"]),

View File

@ -2,6 +2,7 @@ load("@rules_cc//cc:defs.bzl", "cc_proto_library")
load("@rules_java//java:defs.bzl", "java_proto_library")
load("@rules_pkg//:mappings.bzl", "pkg_files", "strip_prefix")
load("@rules_proto//proto:defs.bzl", "proto_library")
load("//:protobuf.bzl", "internal_py_proto_library", "internal_csharp_proto_library")
filegroup(
name = "datasets",
@ -24,7 +25,6 @@ proto_library(
"benchmark_message3_7.proto",
"benchmark_message3_8.proto",
],
strip_import_prefix = "/benchmarks",
visibility = [
"//benchmarks/datasets:__pkg__",
],
@ -40,6 +40,24 @@ cc_proto_library(
],
)
internal_csharp_proto_library(
name = "benchmark_message3_csharp_proto",
srcs = [
"benchmark_message3.proto",
"benchmark_message3_1.proto",
"benchmark_message3_2.proto",
"benchmark_message3_3.proto",
"benchmark_message3_4.proto",
"benchmark_message3_5.proto",
"benchmark_message3_6.proto",
"benchmark_message3_7.proto",
"benchmark_message3_8.proto",
],
visibility = [
"//benchmarks/datasets:__pkg__",
],
)
java_proto_library(
name = "benchmark_message3_java_proto",
visibility = [
@ -50,6 +68,24 @@ java_proto_library(
],
)
internal_py_proto_library(
name = "benchmark_message3_py_proto",
srcs = [
"benchmark_message3.proto",
"benchmark_message3_1.proto",
"benchmark_message3_2.proto",
"benchmark_message3_3.proto",
"benchmark_message3_4.proto",
"benchmark_message3_5.proto",
"benchmark_message3_6.proto",
"benchmark_message3_7.proto",
"benchmark_message3_8.proto",
],
visibility = [
"//benchmarks/datasets:__pkg__",
],
)
pkg_files(
name = "dist_files",
srcs = glob(["*"]),

View File

@ -34,13 +34,13 @@ syntax = "proto2";
package benchmarks.google_message3;
import "datasets/google_message3/benchmark_message3_1.proto";
import "datasets/google_message3/benchmark_message3_2.proto";
import "datasets/google_message3/benchmark_message3_3.proto";
import "datasets/google_message3/benchmark_message3_4.proto";
import "datasets/google_message3/benchmark_message3_5.proto";
import "datasets/google_message3/benchmark_message3_7.proto";
import "datasets/google_message3/benchmark_message3_8.proto";
import "benchmarks/datasets/google_message3/benchmark_message3_1.proto";
import "benchmarks/datasets/google_message3/benchmark_message3_2.proto";
import "benchmarks/datasets/google_message3/benchmark_message3_3.proto";
import "benchmarks/datasets/google_message3/benchmark_message3_4.proto";
import "benchmarks/datasets/google_message3/benchmark_message3_5.proto";
import "benchmarks/datasets/google_message3/benchmark_message3_7.proto";
import "benchmarks/datasets/google_message3/benchmark_message3_8.proto";
option cc_enable_arenas = true;
option java_package = "com.google.protobuf.benchmarks";

View File

@ -34,11 +34,11 @@ syntax = "proto2";
package benchmarks.google_message3;
import "datasets/google_message3/benchmark_message3_2.proto";
import "datasets/google_message3/benchmark_message3_3.proto";
import "datasets/google_message3/benchmark_message3_5.proto";
import "datasets/google_message3/benchmark_message3_7.proto";
import "datasets/google_message3/benchmark_message3_8.proto";
import "benchmarks/datasets/google_message3/benchmark_message3_2.proto";
import "benchmarks/datasets/google_message3/benchmark_message3_3.proto";
import "benchmarks/datasets/google_message3/benchmark_message3_5.proto";
import "benchmarks/datasets/google_message3/benchmark_message3_7.proto";
import "benchmarks/datasets/google_message3/benchmark_message3_8.proto";
option cc_enable_arenas = true;
option java_package = "com.google.protobuf.benchmarks";

View File

@ -34,11 +34,11 @@ syntax = "proto2";
package benchmarks.google_message3;
import "datasets/google_message3/benchmark_message3_3.proto";
import "datasets/google_message3/benchmark_message3_4.proto";
import "datasets/google_message3/benchmark_message3_5.proto";
import "datasets/google_message3/benchmark_message3_7.proto";
import "datasets/google_message3/benchmark_message3_8.proto";
import "benchmarks/datasets/google_message3/benchmark_message3_3.proto";
import "benchmarks/datasets/google_message3/benchmark_message3_4.proto";
import "benchmarks/datasets/google_message3/benchmark_message3_5.proto";
import "benchmarks/datasets/google_message3/benchmark_message3_7.proto";
import "benchmarks/datasets/google_message3/benchmark_message3_8.proto";
option cc_enable_arenas = true;
option java_package = "com.google.protobuf.benchmarks";

View File

@ -34,10 +34,10 @@ syntax = "proto2";
package benchmarks.google_message3;
import "datasets/google_message3/benchmark_message3_4.proto";
import "datasets/google_message3/benchmark_message3_5.proto";
import "datasets/google_message3/benchmark_message3_7.proto";
import "datasets/google_message3/benchmark_message3_8.proto";
import "benchmarks/datasets/google_message3/benchmark_message3_4.proto";
import "benchmarks/datasets/google_message3/benchmark_message3_5.proto";
import "benchmarks/datasets/google_message3/benchmark_message3_7.proto";
import "benchmarks/datasets/google_message3/benchmark_message3_8.proto";
option cc_enable_arenas = true;
option java_package = "com.google.protobuf.benchmarks";

View File

@ -34,10 +34,10 @@ syntax = "proto2";
package benchmarks.google_message3;
import "datasets/google_message3/benchmark_message3_5.proto";
import "datasets/google_message3/benchmark_message3_6.proto";
import "datasets/google_message3/benchmark_message3_7.proto";
import "datasets/google_message3/benchmark_message3_8.proto";
import "benchmarks/datasets/google_message3/benchmark_message3_5.proto";
import "benchmarks/datasets/google_message3/benchmark_message3_6.proto";
import "benchmarks/datasets/google_message3/benchmark_message3_7.proto";
import "benchmarks/datasets/google_message3/benchmark_message3_8.proto";
option cc_enable_arenas = true;
option java_package = "com.google.protobuf.benchmarks";

View File

@ -34,9 +34,9 @@ syntax = "proto2";
package benchmarks.google_message3;
import "datasets/google_message3/benchmark_message3_6.proto";
import "datasets/google_message3/benchmark_message3_7.proto";
import "datasets/google_message3/benchmark_message3_8.proto";
import "benchmarks/datasets/google_message3/benchmark_message3_6.proto";
import "benchmarks/datasets/google_message3/benchmark_message3_7.proto";
import "benchmarks/datasets/google_message3/benchmark_message3_8.proto";
option cc_enable_arenas = true;
option java_package = "com.google.protobuf.benchmarks";

View File

@ -34,8 +34,8 @@ syntax = "proto2";
package benchmarks.google_message3;
import "datasets/google_message3/benchmark_message3_7.proto";
import "datasets/google_message3/benchmark_message3_8.proto";
import "benchmarks/datasets/google_message3/benchmark_message3_7.proto";
import "benchmarks/datasets/google_message3/benchmark_message3_8.proto";
option cc_enable_arenas = true;
option java_package = "com.google.protobuf.benchmarks";

View File

@ -2,6 +2,7 @@ load("@rules_cc//cc:defs.bzl", "cc_proto_library")
load("@rules_java//java:defs.bzl", "java_proto_library")
load("@rules_pkg//:mappings.bzl", "pkg_files", "strip_prefix")
load("@rules_proto//proto:defs.bzl", "proto_library")
load("//:protobuf.bzl", "internal_py_proto_library", "internal_csharp_proto_library")
filegroup(
name = "datasets",
@ -19,7 +20,6 @@ proto_library(
"benchmark_message4_2.proto",
"benchmark_message4_3.proto",
],
strip_import_prefix = "/benchmarks",
visibility = [
"//benchmarks/datasets:__pkg__",
],
@ -35,6 +35,19 @@ cc_proto_library(
],
)
internal_csharp_proto_library(
name = "benchmark_message4_csharp_proto",
srcs = [
"benchmark_message4.proto",
"benchmark_message4_1.proto",
"benchmark_message4_2.proto",
"benchmark_message4_3.proto",
],
visibility = [
"//benchmarks/datasets:__pkg__",
],
)
java_proto_library(
name = "benchmark_message4_java_proto",
visibility = [
@ -45,6 +58,19 @@ java_proto_library(
],
)
internal_py_proto_library(
name = "benchmark_message4_py_proto",
srcs = [
"benchmark_message4.proto",
"benchmark_message4_1.proto",
"benchmark_message4_2.proto",
"benchmark_message4_3.proto",
],
visibility = [
"//benchmarks/datasets:__pkg__",
],
)
pkg_files(
name = "dist_files",
srcs = glob(["*"]),

View File

@ -34,9 +34,9 @@ syntax = "proto2";
package benchmarks.google_message4;
import "datasets/google_message4/benchmark_message4_1.proto";
import "datasets/google_message4/benchmark_message4_2.proto";
import "datasets/google_message4/benchmark_message4_3.proto";
import "benchmarks/datasets/google_message4/benchmark_message4_1.proto";
import "benchmarks/datasets/google_message4/benchmark_message4_2.proto";
import "benchmarks/datasets/google_message4/benchmark_message4_3.proto";
option cc_enable_arenas = true;
option java_package = "com.google.protobuf.benchmarks";

View File

@ -34,8 +34,8 @@ syntax = "proto2";
package benchmarks.google_message4;
import "datasets/google_message4/benchmark_message4_2.proto";
import "datasets/google_message4/benchmark_message4_3.proto";
import "benchmarks/datasets/google_message4/benchmark_message4_2.proto";
import "benchmarks/datasets/google_message4/benchmark_message4_3.proto";
option cc_enable_arenas = true;
option java_package = "com.google.protobuf.benchmarks";

View File

@ -32,7 +32,7 @@ syntax = "proto2";
package benchmarks.google_message4;
import "datasets/google_message4/benchmark_message4_3.proto";
import "benchmarks/datasets/google_message4/benchmark_message4_3.proto";
option cc_enable_arenas = true;
option java_package = "com.google.protobuf.benchmarks";

View File

@ -1,124 +0,0 @@
package main
import (
benchmarkWrapper "../tmp"
googleMessage1Proto2 "../tmp/datasets/google_message1/proto2"
googleMessage1Proto3 "../tmp/datasets/google_message1/proto3"
googleMessage2 "../tmp/datasets/google_message2"
googleMessage3 "../tmp/datasets/google_message3"
googleMessage4 "../tmp/datasets/google_message4"
"flag"
"github.com/golang/protobuf/proto"
"io/ioutil"
"testing"
)
// Data is returned by the Load function.
type Dataset struct {
name string
newMessage func() proto.Message
marshaled [][]byte
unmarshaled []proto.Message
}
var datasets []Dataset
// This is used to getDefaultInstance for a message type.
func generateNewMessageFunction(dataset benchmarkWrapper.BenchmarkDataset) func() proto.Message {
switch dataset.MessageName {
case "benchmarks.proto3.GoogleMessage1":
return func() proto.Message { return new(googleMessage1Proto3.GoogleMessage1) }
case "benchmarks.proto2.GoogleMessage1":
return func() proto.Message { return new(googleMessage1Proto2.GoogleMessage1) }
case "benchmarks.proto2.GoogleMessage2":
return func() proto.Message { return new(googleMessage2.GoogleMessage2) }
case "benchmarks.google_message3.GoogleMessage3":
return func() proto.Message { return new(googleMessage3.GoogleMessage3) }
case "benchmarks.google_message4.GoogleMessage4":
return func() proto.Message { return new(googleMessage4.GoogleMessage4) }
default:
panic("Unknown message type: " + dataset.MessageName)
}
}
func init() {
flag.Parse()
for _, f := range flag.Args() {
// Load the benchmark.
b, err := ioutil.ReadFile(f)
if err != nil {
panic(err)
}
// Parse the benchmark.
var dm benchmarkWrapper.BenchmarkDataset
if err := proto.Unmarshal(b, &dm); err != nil {
panic(err)
}
// Determine the concrete protobuf message type to use.
var ds Dataset
ds.newMessage = generateNewMessageFunction(dm)
// Unmarshal each test message.
for _, payload := range dm.Payload {
ds.marshaled = append(ds.marshaled, payload)
m := ds.newMessage()
if err := proto.Unmarshal(payload, m); err != nil {
panic(err)
}
ds.unmarshaled = append(ds.unmarshaled, m)
}
ds.name = f
datasets = append(datasets, ds)
}
}
func Benchmark(b *testing.B) {
for _, ds := range datasets {
b.Run(ds.name, func(b *testing.B) {
b.Run("Unmarshal", func(b *testing.B) {
for i := 0; i < b.N; i++ {
for j, payload := range ds.marshaled {
out := ds.newMessage()
if err := proto.Unmarshal(payload, out); err != nil {
b.Fatalf("can't unmarshal message %d %v", j, err)
}
}
}
})
b.Run("Marshal", func(b *testing.B) {
for i := 0; i < b.N; i++ {
for j, m := range ds.unmarshaled {
if _, err := proto.Marshal(m); err != nil {
b.Fatalf("can't marshal message %d %+v: %v", j, m, err)
}
}
}
})
b.Run("Size", func(b *testing.B) {
for i := 0; i < b.N; i++ {
for _, m := range ds.unmarshaled {
proto.Size(m)
}
}
})
b.Run("Clone", func(b *testing.B) {
for i := 0; i < b.N; i++ {
for _, m := range ds.unmarshaled {
proto.Clone(m)
}
}
})
b.Run("Merge", func(b *testing.B) {
for i := 0; i < b.N; i++ {
for _, m := range ds.unmarshaled {
out := ds.newMessage()
proto.Merge(out, m)
}
}
})
})
}
}

37
benchmarks/internal.bzl Normal file
View File

@ -0,0 +1,37 @@
"""Starlark definitions for Protobuf benchmark tests.
PLEASE DO NOT DEPEND ON THE CONTENTS OF THIS FILE, IT IS UNSTABLE.
"""
load("//build_defs:internal_shell.bzl", "inline_sh_binary")
def internal_benchmark_test(
name,
binary,
datasets,
args = [],
env_vars = []):
"""Benchmark test runner.
Args:
name: the name for the test.
binary: a benchmark test binary.
datasets: a set of datasets to benchmark.
args: optional arguments to pass the binary.
env_vars: environment variables to set in the test.
"""
dataset_labels = []
for dataset in datasets:
dataset_labels.append("$(rootpaths %s)" % dataset)
inline_sh_binary(
name = name,
srcs = datasets,
tools = [binary],
cmd = "%s $(rootpath %s) %s %s" % (
" ".join(env_vars),
binary,
" ".join(args),
" ".join(dataset_labels)),
tags = ["benchmark"],
testonly = 1,
)

View File

@ -0,0 +1,55 @@
load("@rules_java//java:defs.bzl", "java_library", "java_binary")
load("@rules_pkg//:mappings.bzl", "pkg_files", "strip_prefix")
load("//build_defs:internal_shell.bzl", "inline_sh_binary")
load("//benchmarks:internal.bzl", "internal_benchmark_test")
java_binary(
name = "java_benchmark_jar",
srcs = [
"src/main/java/com/google/protobuf/ProtoCaliperBenchmark.java",
],
main_class = "com.google.caliper.runner.CaliperMain",
deps = [
"//:protobuf_java",
"//benchmarks:benchmarks_java_proto",
"//benchmarks/datasets:java_protos",
"@maven//:com_google_caliper_caliper",
"@maven//:com_google_caliper_caliper_api",
],
)
# The benchmark binary which can be run over any dataset.
inline_sh_binary(
name = "java_benchmark",
srcs = ["//benchmarks/datasets"],
tools = [":java_benchmark_jar"],
cmd = """
data_files=""
conf=()
for arg in "$${@:1}"; do
if [[ $${arg:0:1} == "-" ]]; then
conf+=($$arg)
else
data_files+="$$arg,"
fi
done
$(rootpath :java_benchmark_jar) com.google.protobuf.ProtoCaliperBenchmark \
-i runtime -DdataFile=$${data_files:0:-1} $${conf[*]}
"""
)
# A pre-configured binary using the checked in datasets.
internal_benchmark_test(
name = "java",
binary = ":java_benchmark",
datasets = ["//benchmarks/datasets"],
)
pkg_files(
name = "dist_files",
srcs = [
"BUILD.bazel",
] + glob(["**/*.java"]),
strip_prefix = strip_prefix.from_root(""),
visibility = ["//benchmarks:__pkg__"],
)

View File

@ -1,98 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>protobuf-java-benchmark</artifactId>
<groupId>com.google.protobuf</groupId>
<version>1.0.0</version>
<name>Protocol Buffers [Benchmark]</name>
<description>The benchmark tools for Protobuf Java.</description>
<dependencies>
<dependency>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java</artifactId>
<version>${protobuf.version}</version>
<type>jar</type>
<scope>system</scope>
<systemPath>${project.basedir}/lib/protobuf-java.jar</systemPath>
</dependency>
<dependency>
<groupId>com.google.caliper</groupId>
<artifactId>caliper</artifactId>
<version>1.0-beta-3</version>
</dependency>
</dependencies>
<build>
<pluginManagement>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<version>2.4.1</version>
<configuration>
<!-- get all project dependencies -->
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
<!-- MainClass in mainfest make a executable jar -->
<archive>
<manifest>
<mainClass>com.mkyong.core.utils.App</mainClass>
</manifest>
</archive>
</configuration>
<executions>
<execution>
<id>make-assembly</id>
<!-- bind to the packaging phase -->
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.5.1</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>2.5</version>
<configuration>
<archive>
<manifest>
<addClasspath>true</addClasspath>
<mainClass>com.google.protocolbuffers.ProtoBench</mainClass>
</manifest>
</archive>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>2.4</version>
<executions>
<execution>
<id>attach-sources</id>
<goals>
<goal>jar-no-fork</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</pluginManagement>
</build>
</project>

View File

@ -1,33 +0,0 @@
var benchmark = require("benchmark");
function newBenchmark(messageName, filename, language) {
var benches = [];
return {
suite: new benchmark.Suite(messageName + filename + language )
.on("add", function(event) {
benches.push(event.target);
})
.on("start", function() {
process.stdout.write(
"benchmarking message " + messageName
+ " of dataset file " + filename
+ "'s performance ..." + "\n\n");
})
.on("cycle", function(event) {
process.stdout.write(String(event.target) + "\n");
})
.on("complete", function() {
var getHz = function(bench) {
return 1 / (bench.stats.mean + bench.stats.moe);
}
benches.forEach(function(val, index) {
benches[index] = getHz(val);
});
}),
benches: benches
}
}
module.exports = {
newBenchmark: newBenchmark
}

View File

@ -1,82 +0,0 @@
require('./datasets/google_message1/proto2/benchmark_message1_proto2_pb.js');
require('./datasets/google_message1/proto3/benchmark_message1_proto3_pb.js');
require('./datasets/google_message2/benchmark_message2_pb.js');
require('./datasets/google_message3/benchmark_message3_pb.js');
require('./datasets/google_message4/benchmark_message4_pb.js');
require('./benchmarks_pb.js');
var fs = require('fs');
var benchmarkSuite = require("./benchmark_suite.js");
function getNewPrototype(name) {
var message = eval("proto." + name);
if (typeof(message) == "undefined") {
throw "type " + name + " is undefined";
}
return message;
}
var results = [];
var json_file = "";
console.log("#####################################################");
console.log("Js Benchmark: ");
process.argv.forEach(function(filename, index) {
if (index < 2) {
return;
}
if (filename.indexOf("--json_output") != -1) {
json_file = filename.replace(/^--json_output=/, '');
return;
}
var benchmarkDataset =
proto.benchmarks.BenchmarkDataset.deserializeBinary(fs.readFileSync(filename));
var messageList = [];
var totalBytes = 0;
benchmarkDataset.getPayloadList().forEach(function(onePayload) {
var message = getNewPrototype(benchmarkDataset.getMessageName());
messageList.push(message.deserializeBinary(onePayload));
totalBytes += onePayload.length;
});
var scenarios = benchmarkSuite.newBenchmark(
benchmarkDataset.getMessageName(), filename, "js");
scenarios.suite
.add("js deserialize", function() {
benchmarkDataset.getPayloadList().forEach(function(onePayload) {
var protoType = getNewPrototype(benchmarkDataset.getMessageName());
protoType.deserializeBinary(onePayload);
});
})
.add("js serialize", function() {
var protoType = getNewPrototype(benchmarkDataset.getMessageName());
messageList.forEach(function(message) {
message.serializeBinary();
});
})
.run({"Async": false});
results.push({
filename: filename,
benchmarks: {
protobufjs_decoding: scenarios.benches[0] * totalBytes / 1024 / 1024,
protobufjs_encoding: scenarios.benches[1] * totalBytes / 1024 / 1024
}
})
console.log("Throughput for deserialize: "
+ scenarios.benches[0] * totalBytes / 1024 / 1024 + "MB/s" );
console.log("Throughput for serialize: "
+ scenarios.benches[1] * totalBytes / 1024 / 1024 + "MB/s" );
console.log("");
});
console.log("#####################################################");
if (json_file != "") {
fs.writeFile(json_file, JSON.stringify(results), (err) => {
if (err) throw err;
});
}

View File

@ -0,0 +1,69 @@
load("//benchmarks:internal.bzl", "internal_benchmark_test")
load("//build_defs:internal_shell.bzl", "inline_sh_binary")
load("@rules_pkg//:mappings.bzl", "pkg_files", "strip_prefix")
# The benchmark binary which can be run over any dataset.
inline_sh_binary(
name = "php_benchmark",
srcs = [
"PhpBenchmark.php",
"autoload.php",
],
deps = [
"//benchmarks:benchmarks_php_proto",
"//benchmarks/datasets:php_protos",
"//php:source_files",
],
cmd = """
php -d include_path=benchmarks:php/src \\
-d auto_prepend_file=$(rootpath autoload.php) \\
$(rootpath PhpBenchmark.php) $$@
""",
testonly = 1,
)
# A pre-configured binary using the checked in datasets.
internal_benchmark_test(
name = "php",
binary = ":php_benchmark",
datasets = ["//benchmarks/datasets:proto3_datasets"],
env_vars = ["PROTOBUF_PHP_SRCDIR=php/src"],
args = ["--behavior_prefix='php'"],
)
# The benchmark binary which can be run over any dataset.
inline_sh_binary(
name = "php_c_benchmark",
srcs = [
"PhpBenchmark.php",
"//php:extension",
],
deps = [
"//benchmarks:benchmarks_php_proto",
"//benchmarks/datasets:php_protos",
],
cmd = """
php -d include_path=benchmarks:php/src \\
-dextension=$(rootpath //php:extension) \\
$(rootpath PhpBenchmark.php) $$@
""",
testonly = 1,
)
# A pre-configured binary using the checked in datasets.
internal_benchmark_test(
name = "php_c",
binary = ":php_c_benchmark",
datasets = ["//benchmarks/datasets:proto3_datasets"],
env_vars = ["PROTOBUF_PHP_SRCDIR=php/src"],
args = ["--behavior_prefix='php_c'"],
)
pkg_files(
name = "dist_files",
srcs = glob(["*.php"]) + [
"BUILD.bazel",
],
strip_prefix = strip_prefix.from_root(""),
visibility = ["//benchmarks:__pkg__"],
)

View File

@ -1,25 +0,0 @@
var pbjs = require("./protobuf.js/cli").pbjs
var argv = [];
var protoFiles = [];
var prefix = "";
process.argv.forEach(function(val, index) {
var arg = val;
if (arg.length > 6 && arg.substring(arg.length - 6) == ".proto") {
protoFiles.push(arg);
} else if (arg.length > 15 && arg.substring(0, 15) == "--include_path=") {
prefix = arg.substring(15);
} else if (index >= 2) {
argv.push(arg);
}
});
protoFiles.forEach(function(val) {
argv.push(prefix + "/" + val);
});
pbjs.main(argv, function(err, output){
if (err) {
console.log(err);
}
});

View File

@ -1,66 +0,0 @@
var root = require("./generated_bundle_code.js");
var fs = require('fs');
var benchmark = require("./node_modules/benchmark");
var benchmarkSuite = require("./benchmark_suite.js");
function getNewPrototype(name) {
var message = eval("root." + name);
if (typeof(message) == "undefined") {
throw "type " + name + " is undefined";
}
return message;
}
var results = [];
console.log("#####################################################");
console.log("ProtobufJs Benchmark: ");
process.argv.forEach(function(filename, index) {
if (index < 2) {
return;
}
var benchmarkDataset =
root.benchmarks.BenchmarkDataset.decode(fs.readFileSync(filename));
var messageList = [];
var totalBytes = 0;
benchmarkDataset.payload.forEach(function(onePayload) {
var message = getNewPrototype(benchmarkDataset.messageName);
messageList.push(message.decode(onePayload));
totalBytes += onePayload.length;
});
var scenarios = benchmarkSuite.newBenchmark(
benchmarkDataset.messageName, filename, "protobufjs");
scenarios.suite
.add("protobuf.js static decoding", function() {
benchmarkDataset.payload.forEach(function(onePayload) {
var protoType = getNewPrototype(benchmarkDataset.messageName);
protoType.decode(onePayload);
});
})
.add("protobuf.js static encoding", function() {
var protoType = getNewPrototype(benchmarkDataset.messageName);
messageList.forEach(function(message) {
protoType.encode(message).finish();
});
})
.run({"Async": false});
results.push({
filename: filename,
benchmarks: {
protobufjs_decoding: scenarios.benches[0] * totalBytes,
protobufjs_encoding: scenarios.benches[1] * totalBytes
}
})
console.log("Throughput for decoding: "
+ scenarios.benches[0] * totalBytes / 1024 / 1024 + "MB/s" );
console.log("Throughput for encoding: "
+ scenarios.benches[1] * totalBytes / 1024 / 1024 + "MB/s" );
console.log("");
});
console.log("#####################################################");

View File

@ -0,0 +1,71 @@
load("@rules_cc//cc:defs.bzl", "cc_binary")
load("@rules_python//python:defs.bzl", "py_library", "py_binary")
load("@rules_pkg//:mappings.bzl", "pkg_files", "strip_prefix")
load("//benchmarks:internal.bzl", "internal_benchmark_test")
cc_binary(
name = "libbenchmark_messages.so",
srcs = ["python_benchmark_messages.cc"],
deps = [
"@local_config_python//:python_headers",
"//external:python_headers",
"//benchmarks:benchmarks_cc_proto",
"//benchmarks/datasets:cc_protos",
],
linkshared = True,
linkstatic = True,
visibility = ["//visibility:private"],
)
# The benchmark binary which can be run over any dataset.
py_binary(
name = "python_benchmark",
srcs = ["py_benchmark.py"],
main = "py_benchmark.py",
data = ["libbenchmark_messages.so"],
deps = [
"//:protobuf_python",
"//benchmarks:benchmarks_py_proto",
"//benchmarks/datasets:py_protos",
],
env = select({
"//python:use_fast_cpp_protos": {"PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": "cpp"},
"//conditions:default": {"PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": "python"},
}),
)
# Pre-configured binaries using the checked in datasets.
internal_benchmark_test(
name = "python",
binary = ":python_benchmark",
datasets = ["//benchmarks/datasets"],
env_vars = ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python"],
)
# Note: this requires --define=use_fast_cpp_protos=true
internal_benchmark_test(
name = "cpp_reflection",
binary = ":python_benchmark",
datasets = ["//benchmarks/datasets"],
env_vars = ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=cpp"],
)
# Note: this requires --define=use_fast_cpp_protos=true
internal_benchmark_test(
name = "cpp_generated_code",
binary = ":python_benchmark",
datasets = ["//benchmarks/datasets"],
env_vars = ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=cpp"],
args = ["--cpp_generated"],
)
pkg_files(
name = "dist_files",
srcs = glob(["*.py"]) + [
"BUILD.bazel",
"python_benchmark_messages.cc",
],
strip_prefix = strip_prefix.from_root(""),
visibility = ["//benchmarks:__pkg__"],
)

View File

@ -27,17 +27,17 @@ args = parser.parse_args()
# for the descriptor can be found in the pool
if args.cpp_generated != "no":
sys.path.append( os.path.dirname( os.path.dirname( os.path.abspath(__file__) ) ) + "/.libs" )
import libbenchmark_messages
from benchmarks.python import libbenchmark_messages
sys.path.append( os.path.dirname( os.path.dirname( os.path.abspath(__file__) ) ) + "/tmp" )
# END CPP GENERATED MESSAGE
import datasets.google_message1.proto2.benchmark_message1_proto2_pb2 as benchmark_message1_proto2_pb2
import datasets.google_message1.proto3.benchmark_message1_proto3_pb2 as benchmark_message1_proto3_pb2
import datasets.google_message2.benchmark_message2_pb2 as benchmark_message2_pb2
import datasets.google_message3.benchmark_message3_pb2 as benchmark_message3_pb2
import datasets.google_message4.benchmark_message4_pb2 as benchmark_message4_pb2
import benchmarks_pb2 as benchmarks_pb2
import benchmarks.datasets.google_message1.proto2.benchmark_message1_proto2_pb2 as benchmark_message1_proto2_pb2
import benchmarks.datasets.google_message1.proto3.benchmark_message1_proto3_pb2 as benchmark_message1_proto3_pb2
import benchmarks.datasets.google_message2.benchmark_message2_pb2 as benchmark_message2_pb2
import benchmarks.datasets.google_message3.benchmark_message3_pb2 as benchmark_message3_pb2
import benchmarks.datasets.google_message4.benchmark_message4_pb2 as benchmark_message4_pb2
import benchmarks.benchmarks_pb2 as benchmarks_pb2
def run_one_test(filename):

View File

@ -1,11 +1,11 @@
#include <Python.h>
#include "benchmarks.pb.h"
#include "datasets/google_message1/proto2/benchmark_message1_proto2.pb.h"
#include "datasets/google_message1/proto3/benchmark_message1_proto3.pb.h"
#include "datasets/google_message2/benchmark_message2.pb.h"
#include "datasets/google_message3/benchmark_message3.pb.h"
#include "datasets/google_message4/benchmark_message4.pb.h"
#include "benchmarks/datasets/google_message1/proto2/benchmark_message1_proto2.pb.h"
#include "benchmarks/datasets/google_message1/proto3/benchmark_message1_proto3.pb.h"
#include "benchmarks/datasets/google_message2/benchmark_message2.pb.h"
#include "benchmarks/datasets/google_message3/benchmark_message3.pb.h"
#include "benchmarks/datasets/google_message4/benchmark_message4.pb.h"
static struct PyModuleDef _module = {PyModuleDef_HEAD_INIT,
"libbenchmark_messages",

View File

@ -0,0 +1,50 @@
load("@rules_cc//cc:defs.bzl", "cc_binary")
load("@rules_pkg//:mappings.bzl", "pkg_files", "strip_prefix")
load("@rules_python//python:defs.bzl", "py_binary")
package(default_visibility = ["//benchmarks:__subpackages__"])
cc_binary(
name = "protoc-gen-proto2_to_proto3",
srcs = [
"schema_proto2_to_proto3_util.h",
"protoc-gen-proto2_to_proto3.cc",
],
deps = [
"//:protobuf",
"//src/google/protobuf/compiler:code_generator",
"//benchmarks:benchmarks_cc_proto",
],
visibility = ["//benchmarks:__subpackages__"],
)
cc_binary(
name = "proto3_data_stripper",
srcs = [
"data_proto2_to_proto3_util.h",
"proto3_data_stripper.cc",
],
deps = [
"//:protobuf",
"//benchmarks/datasets:cc_protos",
"//benchmarks:benchmarks_cc_proto",
],
)
py_binary(
name = "result_parser",
srcs = ["result_parser.py"],
deps = [
"//benchmarks:benchmarks_py_proto",
],
)
################################################################################
# Distribution files
################################################################################
pkg_files(
name = "dist_files",
srcs = glob(["*"]),
strip_prefix = strip_prefix.from_root(""),
)

View File

@ -0,0 +1,103 @@
"""Starlark definitions for converting proto2 to proto3.
PLEASE DO NOT DEPEND ON THE CONTENTS OF THIS FILE, IT IS UNSTABLE.
"""
load("//:protobuf.bzl", "internal_php_proto_library")
def proto3_from_proto2_data(
name,
srcs,
**kwargs):
"""Transforms proto2 binary data into a proto3-compatible format,
Args:
name: the name of the target representing the generated proto files.
srcs: the source binary protobuf data files.
**kwargs: standard arguments to forward on
"""
outs = []
out_files = []
src_files = []
for src in srcs:
outs.append("proto3/" + src)
out_files.append("$(RULEDIR)/proto3/" + src)
src_files.append("$(rootpath %s)" % src);
native.genrule(
name = name + "_genrule",
srcs = srcs,
exec_tools = [
"//benchmarks/util:proto3_data_stripper",
],
outs = outs,
cmd = "$(execpath //benchmarks/util:proto3_data_stripper) %s %s" % (
" ".join(src_files), " ".join(out_files)),
)
native.filegroup(
name = name,
srcs = outs,
**kwargs,
)
def _proto3_from_proto2_library(
name,
srcs,
**kwargs):
"""Create a proto3 library from a proto2 source.
Args:
name: the name of the target representing the generated proto files.
srcs: the source proto2 files. Note: these must be raw sources.
**kwargs: standard arguments to forward on
"""
outs = []
src_files = []
for src in srcs:
outs.append(src + "3")
src_files.append("$(rootpath %s)" % src);
native.genrule(
name = name,
srcs = srcs,
exec_tools = [
"//:protoc",
"//benchmarks/util:protoc-gen-proto2_to_proto3",
],
outs = outs,
cmd = """
$(execpath //:protoc) \
--plugin=$(execpath //benchmarks/util:protoc-gen-proto2_to_proto3) \
--proto_path=. \
--proto_path=$(GENDIR) \
--proto2_to_proto3_out=$(GENDIR) \
%s
""" % (" ".join(src_files)),
**kwargs,
)
def php_proto3_from_proto2_library(
name,
src,
outs = [],
**kwargs):
"""Create a proto3 php library from a proto2 source.
Args:
name: the name of the target representing the generated proto files.
src: the source proto2 file.
outs: the expected php outputs.
**kwargs: standard arguments to forward on
"""
_proto3_from_proto2_library(
name = name + "_genrule",
srcs = [src],
)
internal_php_proto_library(
name = name,
srcs = [name + "_genrule"],
outs = outs,
**kwargs,
)

View File

@ -1,74 +0,0 @@
#include "benchmarks.pb.h"
#include "datasets/google_message1/proto2/benchmark_message1_proto2.pb.h"
#include "datasets/google_message1/proto3/benchmark_message1_proto3.pb.h"
#include "datasets/google_message2/benchmark_message2.pb.h"
#include "datasets/google_message3/benchmark_message3.pb.h"
#include "datasets/google_message4/benchmark_message4.pb.h"
#include "data_proto2_to_proto3_util.h"
#include <fstream>
using google::protobuf::util::GogoDataStripper;
std::string ReadFile(const std::string& name) {
std::ifstream file(name.c_str());
GOOGLE_CHECK(file.is_open()) << "Couldn't find file '"
<< name
<< "', please make sure you are running this command from the benchmarks"
<< " directory.\n";
return std::string((std::istreambuf_iterator<char>(file)),
std::istreambuf_iterator<char>());
}
int main(int argc, char *argv[]) {
if (argc % 2 == 0 || argc == 1) {
std::cerr << "Usage: [input_files] [output_file_names] where " <<
"input_files are one to one mapping to output_file_names." <<
std::endl;
return 1;
}
for (int i = argc / 2; i > 0; i--) {
const std::string &input_file = argv[i];
const std::string &output_file = argv[i + argc / 2];
std::cerr << "Generating " << input_file
<< " to " << output_file << std::endl;
benchmarks::BenchmarkDataset dataset;
Message* message;
std::string dataset_payload = ReadFile(input_file);
GOOGLE_CHECK(dataset.ParseFromString(dataset_payload))
<< "Can' t parse data file " << input_file;
if (dataset.message_name() == "benchmarks.proto3.GoogleMessage1") {
message = new benchmarks::proto3::GoogleMessage1;
} else if (dataset.message_name() == "benchmarks.proto2.GoogleMessage1") {
message = new benchmarks::proto2::GoogleMessage1;
} else if (dataset.message_name() == "benchmarks.proto2.GoogleMessage2") {
message = new benchmarks::proto2::GoogleMessage2;
} else if (dataset.message_name() ==
"benchmarks.google_message3.GoogleMessage3") {
message = new benchmarks::google_message3::GoogleMessage3;
} else if (dataset.message_name() ==
"benchmarks.google_message4.GoogleMessage4") {
message = new benchmarks::google_message4::GoogleMessage4;
} else {
std::cerr << "Unknown message type: " << dataset.message_name();
exit(1);
}
for (int i = 0; i < dataset.payload_size(); i++) {
message->ParseFromString(dataset.payload(i));
GogoDataStripper stripper;
stripper.StripMessage(message);
dataset.set_payload(i, message->SerializeAsString());
}
std::ofstream ofs(output_file);
ofs << dataset.SerializeAsString();
ofs.close();
}
return 0;
}

View File

@ -1,9 +1,9 @@
#include "benchmarks.pb.h"
#include "datasets/google_message1/proto2/benchmark_message1_proto2.pb.h"
#include "datasets/google_message1/proto3/benchmark_message1_proto3.pb.h"
#include "datasets/google_message2/benchmark_message2.pb.h"
#include "datasets/google_message3/benchmark_message3.pb.h"
#include "datasets/google_message4/benchmark_message4.pb.h"
#include "benchmarks/datasets/google_message1/proto2/benchmark_message1_proto2.pb.h"
#include "benchmarks/datasets/google_message1/proto3/benchmark_message1_proto3.pb.h"
#include "benchmarks/datasets/google_message2/benchmark_message2.pb.h"
#include "benchmarks/datasets/google_message3/benchmark_message3.pb.h"
#include "benchmarks/datasets/google_message4/benchmark_message4.pb.h"
#include "data_proto2_to_proto3_util.h"
#include <fstream>

View File

@ -1,103 +0,0 @@
#include "google/protobuf/compiler/code_generator.h"
#include "google/protobuf/io/zero_copy_stream.h"
#include "google/protobuf/io/printer.h"
#include "google/protobuf/descriptor.h"
#include "google/protobuf/descriptor.pb.h"
#include "schema_proto2_to_proto3_util.h"
#include "google/protobuf/compiler/plugin.h"
using google::protobuf::FileDescriptorProto;
using google::protobuf::FileDescriptor;
using google::protobuf::DescriptorPool;
using google::protobuf::io::Printer;
using google::protobuf::util::SchemaGroupStripper;
using google::protobuf::util::EnumScrubber;
namespace google {
namespace protobuf {
namespace compiler {
namespace {
string StripProto(string filename) {
if (filename.substr(filename.size() - 11) == ".protodevel") {
// .protodevel
return filename.substr(0, filename.size() - 11);
} else {
// .proto
return filename.substr(0, filename.size() - 6);
}
}
DescriptorPool new_pool_;
} // namespace
class GoGoProtoGenerator : public CodeGenerator {
public:
virtual bool GenerateAll(const std::vector<const FileDescriptor*>& files,
const string& parameter,
GeneratorContext* context,
string* error) const {
for (int i = 0; i < files.size(); i++) {
for (auto file : files) {
bool can_generate =
(new_pool_.FindFileByName(file->name()) == nullptr);
for (int j = 0; j < file->dependency_count(); j++) {
can_generate &= (new_pool_.FindFileByName(
file->dependency(j)->name()) != nullptr);
}
for (int j = 0; j < file->public_dependency_count(); j++) {
can_generate &= (new_pool_.FindFileByName(
file->public_dependency(j)->name()) != nullptr);
}
for (int j = 0; j < file->weak_dependency_count(); j++) {
can_generate &= (new_pool_.FindFileByName(
file->weak_dependency(j)->name()) != nullptr);
}
if (can_generate) {
Generate(file, parameter, context, error);
break;
}
}
}
return true;
}
virtual bool Generate(const FileDescriptor* file,
const string& parameter,
GeneratorContext* context,
string* error) const {
FileDescriptorProto new_file;
file->CopyTo(&new_file);
SchemaGroupStripper::StripFile(file, &new_file);
EnumScrubber enum_scrubber;
enum_scrubber.ScrubFile(&new_file);
string filename = file->name();
string basename = StripProto(filename);
std::vector<std::pair<string,string>> option_pairs;
ParseGeneratorParameter(parameter, &option_pairs);
std::unique_ptr<google::protobuf::io::ZeroCopyOutputStream> output(
context->Open(basename + ".proto"));
string content = new_pool_.BuildFile(new_file)->DebugString();
Printer printer(output.get(), '$');
printer.WriteRaw(content.c_str(), content.size());
return true;
}
};
} // namespace compiler
} // namespace protobuf
} // namespace google
int main(int argc, char* argv[]) {
google::protobuf::compiler::GoGoProtoGenerator generator;
return google::protobuf::compiler::PluginMain(argc, argv, &generator);
}

View File

@ -15,6 +15,7 @@ using google::protobuf::util::SchemaGroupStripper;
using google::protobuf::util::EnumScrubber;
using google::protobuf::util::ExtensionStripper;
using google::protobuf::util::FieldScrubber;
using google::protobuf::util::ImportScrubber;
namespace google {
namespace protobuf {
@ -22,7 +23,7 @@ namespace compiler {
namespace {
string StripProto(string filename) {
std::string StripProtoExt(const std::string& filename) {
return filename.substr(0, filename.rfind(".proto"));
}
@ -36,9 +37,9 @@ DescriptorPool* GetPool() {
class Proto2ToProto3Generator final : public CodeGenerator {
public:
bool GenerateAll(const std::vector<const FileDescriptor*>& files,
const string& parameter,
const std::string& parameter,
GeneratorContext* context,
string* error) const {
std::string* error) const {
for (int i = 0; i < files.size(); i++) {
for (auto file : files) {
if (CanGenerate(file)) {
@ -52,28 +53,30 @@ class Proto2ToProto3Generator final : public CodeGenerator {
}
bool Generate(const FileDescriptor* file,
const string& parameter,
const std::string& parameter,
GeneratorContext* context,
string* error) const {
std::string* error) const {
FileDescriptorProto new_file;
file->CopyTo(&new_file);
new_file.set_name(ImportScrubber::ScrubFilename(file->name()));
SchemaGroupStripper::StripFile(file, &new_file);
EnumScrubber enum_scrubber;
enum_scrubber.ScrubFile(&new_file);
ExtensionStripper::StripFile(&new_file);
FieldScrubber::ScrubFile(&new_file);
ImportScrubber::ScrubFile(&new_file);
new_file.set_syntax("proto3");
string filename = file->name();
string basename = StripProto(filename);
std::string filename = file->name();
std::string basename = StripProtoExt(filename);
std::vector<std::pair<string,string>> option_pairs;
std::vector<std::pair<std::string,std::string>> option_pairs;
ParseGeneratorParameter(parameter, &option_pairs);
std::unique_ptr<google::protobuf::io::ZeroCopyOutputStream> output(
context->Open(basename + ".proto"));
string content = GetPool()->BuildFile(new_file)->DebugString();
context->Open(basename + ".proto3"));
std::string content = GetPool()->BuildFile(new_file)->DebugString();
Printer printer(output.get(), '$');
printer.WriteRaw(content.c_str(), content.size());
@ -81,11 +84,12 @@ class Proto2ToProto3Generator final : public CodeGenerator {
}
private:
bool CanGenerate(const FileDescriptor* file) const {
if (GetPool()->FindFileByName(file->name()) != nullptr) {
if (GetPool()->FindFileByName(ImportScrubber::ScrubFilename(file->name())) != nullptr) {
return false;
}
for (int j = 0; j < file->dependency_count(); j++) {
if (GetPool()->FindFileByName(file->dependency(j)->name()) == nullptr) {
if (GetPool()->FindFileByName(ImportScrubber::ScrubFilename(
file->dependency(j)->name())) == nullptr) {
return false;
}
}

View File

@ -1,5 +1,3 @@
# This import depends on the automake rule protoc_middleman, please make sure
# protoc_middleman has been built before run this file.
import argparse
import json
import re
@ -8,7 +6,7 @@ import os.path
import sys
sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir))
# END OPENSOURCE
import tmp.benchmarks_pb2 as benchmarks_pb2
from benchmarks import benchmarks_pb2
__file_size_map = {}

View File

@ -187,6 +187,18 @@ class FieldScrubber {
}
};
class ImportScrubber {
public:
static std::string ScrubFilename(const std::string& filename) {
return filename + "3";
}
static void ScrubFile(FileDescriptorProto *file) {
for(std::string& import: *file->mutable_dependency()) {
import += "3";
}
}
};
} // namespace util
} // namespace protobuf
} // namespace google

View File

@ -17,6 +17,16 @@ create_compiler_config_setting(
value = "msvc-cl",
)
config_setting(
name = "aarch64",
values = {"cpu": "linux-aarch_64"},
)
config_setting(
name = "x86_64",
values = {"cpu": "linux-x86_64"},
)
# Android NDK builds can specify different crosstool_top flags to choose which
# STL they use for C++. We need these multiple variants to catch all of those
# versions of crosstool_top and reliably detect Android.

56
build_defs/arch_tests.bzl Normal file
View File

@ -0,0 +1,56 @@
"""Generated unittests to verify that a binary is built for the expected architecture."""
load("//build_defs:internal_shell.bzl", "inline_sh_test")
def _arch_test_impl(
name,
platform,
file_platform,
bazel_binaries = [],
system_binaries = [],
**kwargs):
"""
Bazel rule to verify that a Bazel or system binary is built for the
aarch64 architecture.
Args:
name: the name of the test.
platform: a diagnostic name for this architecture.
file_platform: the expected output of `file`.
bazel_binaries: a set of binary targets to inspect.
system_binaries: a set of paths to system executables to inspect.
**kargs: other keyword arguments that are passed to the test.
"""
inline_sh_test(
name = name,
tools = bazel_binaries,
cmd = """
for binary in "$(rootpaths %s) %s"; do
(file -L $$binary | grep -q "%s") \
|| (echo "Test binary is not an %s binary: "; file -L $$binary; exit 1)
done
""" % (" ".join(bazel_binaries),
" ".join(system_binaries),
file_platform,
platform),
target_compatible_with = select({
"//build_defs:"+platform: [],
"//conditions:default": ["@platforms//:incompatible"],
}),
)
def aarch64_test(**kwargs):
_arch_test_impl(
platform = "aarch64",
file_platform = "ELF 64-bit LSB executable, ARM aarch64",
**kwargs,
)
def x86_64_test(**kwargs):
_arch_test_impl(
platform = "x86_64",
file_platform = "ELF 64-bit LSB executable, ARM x86_64",
**kwargs,
)

View File

@ -0,0 +1,85 @@
def inline_sh_binary(
name,
srcs = [],
tools = [],
deps = [],
cmd = "",
testonly = None,
**kwargs):
"""Bazel rule to wrap up an inline bash script in a binary. This is most
useful as a stop-gap solution for migrating off Autotools. These binaries
are likely to be non-hermetic, with implicit system dependencies.
NOTE: the rule is only an internal workaround. The interface may change and
the rule may be removed when everything is properly "Bazelified".
Args:
name: the name of the inline_sh_binary.
srcs: the files used directly by the script.
tools: the executable tools used directly by the script. Any target used
with rootpath/execpath/location must be declared here or in `srcs`.
deps: a list of dependency labels that are required to run this binary.
**kargs: other keyword arguments that are passed to sh_binary.
testonly: common rule attribute (see:
https://bazel.build/reference/be/common-definitions#common-attributes)
"""
native.genrule(
name = name + "_genrule",
srcs = srcs,
exec_tools = tools,
outs = [name + ".sh"],
cmd = "cat <<'EOF' >$(OUTS)\n#!/bin/bash -exu\n%s\nEOF\n" % cmd,
testonly = testonly,
visibility = ["//visibility:private"],
)
native.sh_binary(
name = name,
srcs = [name + "_genrule"],
data = srcs + tools + deps,
testonly = testonly,
**kwargs
)
def inline_sh_test(
name,
srcs = [],
tools = [],
deps = [],
cmd = "",
**kwargs):
"""Bazel rule to wrap up an inline bash script in a test. This is most
useful as a stop-gap solution for migrating off Autotools. These tests
are likely to be non-hermetic, with implicit system dependencies.
NOTE: the rule is only an internal workaround. The interface may change and
the rule may be removed when everything is properly "Bazelified".
Args:
name: the name of the inline_sh_binary.
srcs: the files used directly by the script.
tools: the executable tools used directly by the script. Any target used
with rootpath/execpath/location must be declared here or in `srcs`.
deps: a list of dependency labels that are required to run this binary.
**kargs: other keyword arguments that are passed to sh_binary.
https://bazel.build/reference/be/common-definitions#common-attributes)
"""
native.genrule(
name = name + "_genrule",
srcs = srcs,
exec_tools = tools,
outs = [name + ".sh"],
cmd = "cat <<'EOF' >$(OUTS)\n#!/bin/bash -exu\n%s\nEOF\n" % cmd,
visibility = ["//visibility:private"],
)
native.sh_test(
name = name,
srcs = [name + "_genrule"],
data = srcs + tools + deps,
**kwargs
)

View File

@ -42,11 +42,11 @@ add_executable(conformance_cpp
target_include_directories(
conformance_test_runner
PUBLIC ${protobuf_SOURCE_DIR}/conformance)
PUBLIC ${protobuf_SOURCE_DIR} ${protobuf_SOURCE_DIR}/conformance)
target_include_directories(
conformance_cpp
PUBLIC ${protobuf_SOURCE_DIR}/conformance)
PUBLIC ${protobuf_SOURCE_DIR})
target_link_libraries(conformance_test_runner ${protobuf_LIB_PROTOBUF})
target_link_libraries(conformance_cpp ${protobuf_LIB_PROTOBUF})

View File

@ -62,43 +62,6 @@ foreach(_header ${protobuf_HEADERS})
RENAME "${_extract_name}")
endforeach()
# Internal function for parsing auto tools scripts
function(_protobuf_auto_list FILE_NAME VARIABLE)
file(STRINGS ${FILE_NAME} _strings)
set(_list)
foreach(_string ${_strings})
set(_found)
string(REGEX MATCH "^[ \t]*${VARIABLE}[ \t]*=[ \t]*" _found "${_string}")
if(_found)
string(LENGTH "${_found}" _length)
string(SUBSTRING "${_string}" ${_length} -1 _draft_list)
foreach(_item ${_draft_list})
string(STRIP "${_item}" _item)
list(APPEND _list "${_item}")
endforeach()
endif()
endforeach()
set(${VARIABLE} ${_list} PARENT_SCOPE)
endfunction()
# Install well-known type proto files
_protobuf_auto_list("${protobuf_SOURCE_DIR}/src/Makefile.am" nobase_dist_proto_DATA)
foreach(_file ${nobase_dist_proto_DATA})
get_filename_component(_file_from "${protobuf_SOURCE_DIR}/src/${_file}" ABSOLUTE)
get_filename_component(_file_name ${_file} NAME)
get_filename_component(_dir ${_file} DIRECTORY)
if(EXISTS "${_file_from}")
install(FILES "${_file_from}"
DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}/${_dir}"
COMPONENT protobuf-protos
RENAME "${_file_name}")
else()
message(AUTHOR_WARNING "The file \"${_file_from}\" is listed in "
"\"${protobuf_SOURCE_DIR}/src/Makefile.am\" as nobase_dist_proto_DATA "
"but there not exists. The file will not be installed.")
endif()
endforeach()
# Install configuration
set(_install_cmakedir_desc "Directory relative to CMAKE_INSTALL to install the cmake configuration files")
set(_build_cmakedir_desc "Directory relative to CMAKE_CURRENT_BINARY_DIR for cmake configuration files")

View File

@ -1,247 +0,0 @@
## Process this file with autoconf to produce configure.
## In general, the safest way to proceed is to run ./autogen.sh
AC_PREREQ(2.59)
# Note: If you change the version, you must also update it in:
# * Protobuf.podspec
# * csharp/Google.Protobuf.Tools.nuspec
# * csharp/src/*/AssemblyInfo.cs
# * csharp/src/Google.Protobuf/Google.Protobuf.nuspec
# * java/*/pom.xml
# * python/google/protobuf/__init__.py
# * protoc-artifacts/pom.xml
# * src/google/protobuf/stubs/common.h
# * src/Makefile.am (Update -version-info for LDFLAGS if needed)
#
# In the SVN trunk, the version should always be the next anticipated release
# version with the "-pre" suffix. (We used to use "-SNAPSHOT" but this pushed
# the size of one file name in the dist tarfile over the 99-char limit.)
AC_INIT([Protocol Buffers],[3.21.5],[protobuf@googlegroups.com],[protobuf])
AM_MAINTAINER_MODE([enable])
AC_CONFIG_SRCDIR(src/google/protobuf/message.cc)
# The config file is generated but not used by the source code, since we only
# need very few of them, e.g. HAVE_PTHREAD and HAVE_ZLIB. Those macros are
# passed down in CXXFLAGS manually in src/Makefile.am
AC_CONFIG_HEADERS([config.h])
AC_CONFIG_MACRO_DIR([m4])
AC_ARG_VAR(DIST_LANG, [language to include in the distribution package (i.e., make dist)])
case "$DIST_LANG" in
"") DIST_LANG=all ;;
all | cpp | csharp | java | python | javanano | objectivec | ruby | php) ;;
*) AC_MSG_FAILURE([unknown language: $DIST_LANG]) ;;
esac
AC_SUBST(DIST_LANG)
# autoconf's default CXXFLAGS are usually "-g -O2". These aren't necessarily
# the best choice for libprotobuf.
AS_IF([test "x${ac_cv_env_CFLAGS_set}" = "x"],
[CFLAGS=""])
AS_IF([test "x${ac_cv_env_CXXFLAGS_set}" = "x"],
[CXXFLAGS=""])
AC_CANONICAL_TARGET
AM_INIT_AUTOMAKE([1.9 tar-ustar subdir-objects])
# Silent rules enabled: the output is minimal but informative.
# In particular, the warnings from the compiler stick out very clearly.
# To see all logs, use the --disable-silent-rules on configure or via make V=1
AM_SILENT_RULES([yes])
AC_ARG_WITH([zlib],
[AS_HELP_STRING([--with-zlib],
[include classes for streaming compressed data in and out @<:@default=check@:>@])],
[],[with_zlib=check])
AC_ARG_WITH([zlib-include],
[AS_HELP_STRING([--with-zlib-include=PATH],
[zlib include directory])],
[CPPFLAGS="-I$withval $CPPFLAGS"])
AC_ARG_WITH([zlib-lib],
[AS_HELP_STRING([--with-zlib-lib=PATH],
[zlib lib directory])],
[LDFLAGS="-L$withval $LDFLAGS"])
AC_ARG_WITH([protoc],
[AS_HELP_STRING([--with-protoc=COMMAND],
[use the given protoc command instead of building a new one when building tests (useful for cross-compiling)])],
[],[with_protoc=no])
# Checks for programs.
AC_PROG_CC
AC_PROG_CXX
AC_PROG_CXX_FOR_BUILD
AC_LANG([C++])
ACX_USE_SYSTEM_EXTENSIONS
m4_ifdef([AM_PROG_AR], [AM_PROG_AR])
AM_CONDITIONAL(GCC, test "$GCC" = yes) # let the Makefile know if we're gcc
AS_CASE([$target_os], [darwin*], [AC_PROG_OBJC], [AM_CONDITIONAL([am__fastdepOBJC], [false])])
# test_util.cc takes forever to compile with GCC and optimization turned on.
AC_MSG_CHECKING([C++ compiler flags...])
AS_IF([test "x${ac_cv_env_CXXFLAGS_set}" = "x"],[
AS_IF([test "$GCC" = "yes"],[
PROTOBUF_OPT_FLAG="-O2"
CXXFLAGS="${CXXFLAGS} -g"
])
# Protocol Buffers contains several checks that are intended to be used only
# for debugging and which might hurt performance. Most users are probably
# end users who don't want these checks, so add -DNDEBUG by default.
CXXFLAGS="$CXXFLAGS -std=c++11 -DNDEBUG"
AC_MSG_RESULT([use default: $PROTOBUF_OPT_FLAG $CXXFLAGS])
],[
AC_MSG_RESULT([use user-supplied: $CXXFLAGS])
])
AC_SUBST(PROTOBUF_OPT_FLAG)
ACX_CHECK_SUNCC
# Have to do libtool after SUNCC, other wise it "helpfully" adds Crun Cstd
# to the link
AC_PROG_LIBTOOL
# Check whether the linker supports version scripts
AC_MSG_CHECKING([whether the linker supports version scripts])
save_LDFLAGS=$LDFLAGS
LDFLAGS="$LDFLAGS -Wl,--version-script=conftest.map"
cat > conftest.map <<EOF
{
global:
main;
local:
*;
};
EOF
AC_LINK_IFELSE(
[AC_LANG_SOURCE([int main() { return 0; }])],
[have_ld_version_script=yes; AC_MSG_RESULT(yes)],
[have_ld_version_script=no; AC_MSG_RESULT(no)])
LDFLAGS=$save_LDFLAGS
AM_CONDITIONAL([HAVE_LD_VERSION_SCRIPT], [test "$have_ld_version_script" = "yes"])
# Checks for header files.
AC_HEADER_STDC
AC_CHECK_HEADERS([fcntl.h inttypes.h limits.h stdlib.h unistd.h])
# Checks for library functions.
AC_FUNC_MEMCMP
AC_FUNC_STRTOD
AC_CHECK_FUNCS([ftruncate memset mkdir strchr strerror strtol])
# Check for zlib.
HAVE_ZLIB=0
AS_IF([test "$with_zlib" != no], [
AC_MSG_CHECKING([zlib version])
# First check the zlib header version.
AC_COMPILE_IFELSE(
[AC_LANG_PROGRAM([[
#include <zlib.h>
#if !defined(ZLIB_VERNUM) || (ZLIB_VERNUM < 0x1204)
# error zlib version too old
#endif
]], [])], [
AC_MSG_RESULT([ok (1.2.0.4 or later)])
# Also need to add -lz to the linker flags and make sure this succeeds.
AC_SEARCH_LIBS([zlibVersion], [z], [
AC_DEFINE([HAVE_ZLIB], [1], [Enable classes using zlib compression.])
HAVE_ZLIB=1
], [
AS_IF([test "$with_zlib" != check], [
AC_MSG_FAILURE([--with-zlib was given, but no working zlib library was found])
])
])
], [
AS_IF([test "$with_zlib" = check], [
AC_MSG_RESULT([headers missing or too old (requires 1.2.0.4)])
], [
AC_MSG_FAILURE([--with-zlib was given, but zlib headers were not present or were too old (requires 1.2.0.4)])
])
])
])
AM_CONDITIONAL([HAVE_ZLIB], [test $HAVE_ZLIB = 1])
# Add -std=c++11 if necesssary. It is important for us to do this before the
# libatomic check below, since that also depends on C++11.
AX_CXX_COMPILE_STDCXX([11], [noext], [mandatory])
dnl On some platforms, std::atomic needs a helper library
AC_MSG_CHECKING(whether -latomic is needed)
AC_LINK_IFELSE([AC_LANG_SOURCE([[
#include <atomic>
#include <cstdint>
std::atomic<std::int64_t> v;
int main() {
return v;
}
]])], STD_ATOMIC_NEED_LIBATOMIC=no, STD_ATOMIC_NEED_LIBATOMIC=yes)
AC_MSG_RESULT($STD_ATOMIC_NEED_LIBATOMIC)
if test "x$STD_ATOMIC_NEED_LIBATOMIC" = xyes; then
LIBATOMIC_LIBS="-latomic"
fi
AC_SUBST([LIBATOMIC_LIBS])
AS_IF([test "$with_protoc" != "no"], [
PROTOC=$with_protoc
AS_IF([test "$with_protoc" = "yes"], [
# No argument given. Use system protoc.
PROTOC=protoc
])
AS_IF([echo "$PROTOC" | grep -q '^@<:@^/@:>@.*/'], [
# Does not start with a slash, but contains a slash. So, it's a relative
# path (as opposed to an absolute path or an executable in $PATH).
# Since it will actually be executed from the src directory, prefix with
# the current directory. We also insert $ac_top_build_prefix in case this
# is a nested package and --with-protoc was actually given on the outer
# package's configure script.
PROTOC=`pwd`/${ac_top_build_prefix}$PROTOC
])
AC_SUBST([PROTOC])
])
AM_CONDITIONAL([USE_EXTERNAL_PROTOC], [test "$with_protoc" != "no"])
AX_PTHREAD
AM_CONDITIONAL([HAVE_PTHREAD], [test "x$ax_pthread_ok" = "xyes"])
# We still keep this for improving pbconfig.h for unsupported platforms.
AC_CXX_STL_HASH
# Enable ObjC support for conformance directory on OS X.
OBJC_CONFORMANCE_TEST=0
case "$target_os" in
darwin*)
OBJC_CONFORMANCE_TEST=1
;;
esac
AM_CONDITIONAL([OBJC_CONFORMANCE_TEST], [test $OBJC_CONFORMANCE_TEST = 1])
AC_MSG_CHECKING(whether -llog is needed)
ANDROID_TEST=no
case "$target_os" in
*android*)
ANDROID_TEST=yes
;;
esac
AC_MSG_RESULT($ANDROID_TEST)
if test "x$ANDROID_TEST" = xyes; then
LIBLOG_LIBS="-llog"
fi
AC_SUBST([LIBLOG_LIBS])
# HACK: Make gmock's configure script pick up our copy of CFLAGS and CXXFLAGS,
# since the flags added by ACX_CHECK_SUNCC must be used when compiling gmock
# too.
export CFLAGS
export CXXFLAGS
AC_CONFIG_SUBDIRS([third_party/googletest])
AC_CONFIG_FILES([Makefile src/Makefile benchmarks/Makefile conformance/Makefile protobuf.pc protobuf-lite.pc])
AC_OUTPUT

View File

@ -1,6 +1,9 @@
# Conformance testing for Protobuf.
load("@rules_cc//cc:defs.bzl", "cc_binary", "cc_library", "cc_proto_library")
load("@rules_cc//cc:defs.bzl", "cc_binary", "cc_library", "cc_proto_library", "objc_library")
load("@rules_python//python:defs.bzl", "py_library")
load("//:protobuf.bzl", "internal_py_proto_library", "internal_php_proto_library", "internal_csharp_proto_library", "internal_ruby_proto_library", "internal_objc_proto_library")
load("//build_defs:internal_shell.bzl", "inline_sh_binary")
load(
"@rules_pkg//:mappings.bzl",
"pkg_attributes",
@ -11,20 +14,37 @@ load(
exports_files([
"conformance_test_runner.sh",
"failure_list_cpp.txt",
"failure_list_csharp.txt",
"failure_list_java.txt",
"failure_list_java_lite.txt",
"failure_list_objc.txt",
"failure_list_php.txt",
"failure_list_php_c.txt",
"failure_list_python.txt",
"failure_list_python_cpp.txt",
"failure_list_ruby.txt",
"failure_list_jruby.txt",
"text_format_failure_list_cpp.txt",
"text_format_failure_list_csharp.txt",
"text_format_failure_list_java.txt",
"text_format_failure_list_java_lite.txt",
"text_format_failure_list_php.txt",
"text_format_failure_list_php_c.txt",
"text_format_failure_list_python.txt",
"text_format_failure_list_python_cpp.txt",
"text_format_failure_list_ruby.txt",
"text_format_failure_list_jruby.txt",
])
cc_proto_library(
name = "test_messages_proto2_proto_cc",
deps = ["//:test_messages_proto2_proto"],
deps = ["//src/google/protobuf:test_messages_proto2_proto"],
)
cc_proto_library(
name = "test_messages_proto3_proto_cc",
deps = ["//:test_messages_proto3_proto"],
deps = ["//src/google/protobuf:test_messages_proto3_proto"],
)
proto_library(
@ -34,10 +54,18 @@ proto_library(
)
cc_proto_library(
name = "conformance_proto_cc",
name = "conformance_cc_proto",
deps = [":conformance_proto"],
)
internal_csharp_proto_library(
name = "conformance_csharp_proto",
srcs = ["conformance.proto"],
visibility = [
"//csharp:__subpackages__",
],
)
java_proto_library(
name = "conformance_java_proto",
visibility = [
@ -54,6 +82,51 @@ java_lite_proto_library(
deps = [":conformance_proto"],
)
internal_objc_proto_library(
name = "conformance_objc_proto",
srcs = ["conformance.proto"],
visibility = [
"//conformance:__pkg__",
"//objc:__pkg__",
],
)
internal_py_proto_library(
name = "conformance_py_proto",
srcs = ["conformance.proto"],
visibility = [
"//python:__subpackages__",
],
srcs_version = "PY2AND3",
)
internal_php_proto_library(
name = "conformance_php_proto",
srcs = ["conformance.proto"],
outs = [
"Conformance/ConformanceRequest.php",
"Conformance/ConformanceResponse.php",
"Conformance/FailureSet.php",
"Conformance/JspbEncodingConfig.php",
"Conformance/TestCategory.php",
"Conformance/WireFormat.php",
"GPBMetadata/Conformance.php",
],
visibility = [
"//conformance:__pkg__",
"//php:__pkg__",
],
)
internal_ruby_proto_library(
name = "conformance_ruby_proto",
srcs = ["conformance.proto"],
visibility = [
"//conformance:__pkg__",
"//ruby:__pkg__",
],
)
cc_library(
name = "jsoncpp",
srcs = ["third_party/jsoncpp/jsoncpp.cpp"],
@ -71,7 +144,7 @@ cc_library(
"conformance_test.h",
],
includes = ["."],
deps = [":conformance_proto_cc"],
deps = [":conformance_cc_proto"],
)
cc_library(
@ -108,13 +181,23 @@ cc_binary(
],
)
cc_binary(
name = "conformance_cpp",
srcs = ["conformance_cpp.cc"],
deps = [
":conformance_cc_proto",
"//:protobuf",
"//:test_messages_proto2_cc_proto",
"//:test_messages_proto3_cc_proto",
],
testonly = 1,
visibility = ["//src:__subpackages__"],
)
java_binary(
name = "conformance_java",
srcs = ["ConformanceJava.java"],
main_class = "ConformanceJava",
visibility = [
"//java:__subpackages__",
],
deps = [
":conformance_java_proto",
"//:protobuf_java",
@ -122,11 +205,14 @@ java_binary(
"//:test_messages_proto2_java_proto",
"//:test_messages_proto3_java_proto",
],
testonly = 1,
visibility = [
"//java:__subpackages__",
],
)
java_binary(
name = "conformance_java_lite",
srcs = ["ConformanceJavaLite.java"],
main_class = "ConformanceJavaLite",
visibility = [
"//java:__subpackages__",
@ -138,8 +224,116 @@ java_binary(
"//:test_messages_proto2_java_proto_lite",
"//:test_messages_proto3_java_proto_lite",
],
testonly = 1,
srcs = ["ConformanceJavaLite.java"],
)
py_binary(
name = "conformance_python",
srcs = ["conformance_python.py"],
deps = [
":conformance_py_proto",
"//:protobuf_python",
"//python:test_messages_proto2_py_proto",
"//python:test_messages_proto3_py_proto",
],
imports = [".", "../python"],
srcs_version = "PY2AND3",
testonly = 1,
visibility = ["//python:__subpackages__"],
)
inline_sh_binary(
name = "conformance_php",
srcs = [
"conformance_php.php",
"autoload.php",
],
deps = [
":conformance_php_proto",
"//php:source_files",
"//:test_messages_proto3_php_proto",
],
cmd = """
php -d include_path=conformance:src/google/protobuf \\
-d auto_prepend_file=$(rootpath autoload.php) \\
$(rootpath conformance_php.php)
""",
testonly = 1,
visibility = ["//php:__subpackages__"],
)
inline_sh_binary(
name = "conformance_php_c",
srcs = [
"conformance_php.php",
"//php:extension",
],
deps = [
":conformance_php_proto",
"//:test_messages_proto3_php_proto",
],
cmd = """
php -dextension=$(rootpath //php:extension) \\
-d include_path=conformance:src/google/protobuf \\
$(rootpath conformance_php.php)
""",
testonly = 1,
visibility = ["//php:__subpackages__"],
)
inline_sh_binary(
name = "conformance_csharp",
srcs = ["//csharp/src/Google.Protobuf.Conformance:conformance_dll"],
deps = [
"//csharp/src/Google.Protobuf.Conformance:conformance_runfiles",
],
cmd = "dotnet $(rootpath //csharp/src/Google.Protobuf.Conformance:conformance_dll)",
testonly = 1,
visibility = ["//csharp:__subpackages__"],
)
objc_library(
name = "conformance_objc_lib",
non_arc_srcs = ["conformance_objc.m"],
deps = [
":conformance_objc_proto",
"//:test_messages_proto2_objc_proto",
"//:test_messages_proto3_objc_proto",
],
testonly = 1,
# See https://github.com/bazelbuild/bazel/issues/12897.
tags = ["manual"],
)
cc_binary(
name = "conformance_objc",
deps = [":conformance_objc_lib"],
testonly = 1,
# See https://github.com/bazelbuild/bazel/issues/12897.
tags = ["manual"],
visibility = ["//objectivec:__subpackages__"],
)
inline_sh_binary(
name = "conformance_ruby",
srcs = ["conformance_ruby.rb"],
deps = [
":conformance_ruby_proto",
"//:well_known_ruby_protos",
"//:test_messages_proto2_ruby_proto",
"//:test_messages_proto3_ruby_proto",
"//ruby:protobuf",
],
cmd = "RUBYLIB=ruby/lib:conformance:src $(rootpath conformance_ruby.rb)",
testonly = 1,
visibility = ["//ruby:__subpackages__"],
)
################################################################################
# Distribution files
################################################################################
filegroup(
name = "all_files",
srcs = glob(["**/*"]),
@ -156,7 +350,6 @@ pkg_files(
# The following are not in autotools dist:
"autoload.php",
"conformance_nodejs.js",
"failure_list_jruby.txt",
"update_failure_list.py",
],

View File

@ -1,372 +0,0 @@
## Process this file with automake to produce Makefile.in
conformance_protoc_inputs = \
conformance.proto \
$(top_srcdir)/src/google/protobuf/test_messages_proto3.proto
# proto2 input files, should be separated with proto3, as we
# can't generate proto2 files for php.
conformance_proto2_protoc_inputs = \
$(top_srcdir)/src/google/protobuf/test_messages_proto2.proto
well_known_type_protoc_inputs = \
$(top_srcdir)/src/google/protobuf/any.proto \
$(top_srcdir)/src/google/protobuf/duration.proto \
$(top_srcdir)/src/google/protobuf/field_mask.proto \
$(top_srcdir)/src/google/protobuf/struct.proto \
$(top_srcdir)/src/google/protobuf/timestamp.proto \
$(top_srcdir)/src/google/protobuf/wrappers.proto
protoc_outputs = \
conformance.pb.cc \
conformance.pb.h
other_language_protoc_outputs = \
conformance_pb2.py \
Conformance.pbobjc.h \
Conformance.pbobjc.m \
conformance_pb.js \
conformance_pb.rb \
com/google/protobuf/Any.java \
com/google/protobuf/AnyOrBuilder.java \
com/google/protobuf/AnyProto.java \
com/google/protobuf/BoolValue.java \
com/google/protobuf/BoolValueOrBuilder.java \
com/google/protobuf/BytesValue.java \
com/google/protobuf/BytesValueOrBuilder.java \
com/google/protobuf/conformance/Conformance.java \
com/google/protobuf/DoubleValue.java \
com/google/protobuf/DoubleValueOrBuilder.java \
com/google/protobuf/Duration.java \
com/google/protobuf/DurationOrBuilder.java \
com/google/protobuf/DurationProto.java \
com/google/protobuf/FieldMask.java \
com/google/protobuf/FieldMaskOrBuilder.java \
com/google/protobuf/FieldMaskProto.java \
com/google/protobuf/FloatValue.java \
com/google/protobuf/FloatValueOrBuilder.java \
com/google/protobuf/Int32Value.java \
com/google/protobuf/Int32ValueOrBuilder.java \
com/google/protobuf/Int64Value.java \
com/google/protobuf/Int64ValueOrBuilder.java \
com/google/protobuf/ListValue.java \
com/google/protobuf/ListValueOrBuilder.java \
com/google/protobuf/NullValue.java \
com/google/protobuf/StringValue.java \
com/google/protobuf/StringValueOrBuilder.java \
com/google/protobuf/Struct.java \
com/google/protobuf/StructOrBuilder.java \
com/google/protobuf/StructProto.java \
com/google/protobuf/Timestamp.java \
com/google/protobuf/TimestampOrBuilder.java \
com/google/protobuf/TimestampProto.java \
com/google/protobuf/UInt32Value.java \
com/google/protobuf/UInt32ValueOrBuilder.java \
com/google/protobuf/UInt64Value.java \
com/google/protobuf/UInt64ValueOrBuilder.java \
com/google/protobuf/Value.java \
com/google/protobuf/ValueOrBuilder.java \
com/google/protobuf/WrappersProto.java \
com/google/protobuf_test_messages/proto3/TestMessagesProto3.java \
com/google/protobuf_test_messages/proto2/TestMessagesProto2.java \
google/protobuf/any.pb.cc \
google/protobuf/any.pb.h \
google/protobuf/any.rb \
google/protobuf/any_pb2.py \
google/protobuf/duration.pb.cc \
google/protobuf/duration.pb.h \
google/protobuf/duration.rb \
google/protobuf/duration_pb2.py \
google/protobuf/field_mask.pb.cc \
google/protobuf/field_mask.pb.h \
google/protobuf/field_mask.rb \
google/protobuf/field_mask_pb2.py \
google/protobuf/struct.pb.cc \
google/protobuf/struct.pb.h \
google/protobuf/struct.rb \
google/protobuf/struct_pb2.py \
google/protobuf/TestMessagesProto2.pbobjc.h \
google/protobuf/TestMessagesProto2.pbobjc.m \
google/protobuf/TestMessagesProto3.pbobjc.h \
google/protobuf/TestMessagesProto3.pbobjc.m \
google/protobuf/test_messages_proto3.pb.cc \
google/protobuf/test_messages_proto3.pb.h \
google/protobuf/test_messages_proto2.pb.cc \
google/protobuf/test_messages_proto2.pb.h \
google/protobuf/test_messages_proto3_pb.rb \
google/protobuf/test_messages_proto3_pb2.py \
google/protobuf/test_messages_proto2_pb2.py \
google/protobuf/timestamp.pb.cc \
google/protobuf/timestamp.pb.h \
google/protobuf/timestamp.rb \
google/protobuf/timestamp_pb2.py \
google/protobuf/wrappers.pb.cc \
google/protobuf/wrappers.pb.h \
google/protobuf/wrappers.rb \
google/protobuf/wrappers_pb2.py \
Conformance/ConformanceRequest.php \
Conformance/ConformanceResponse.php \
Conformance/FailureSet.php \
Conformance/WireFormat.php \
GPBMetadata/Conformance.php \
GPBMetadata/Google/Protobuf/Any.php \
GPBMetadata/Google/Protobuf/Duration.php \
GPBMetadata/Google/Protobuf/FieldMask.php \
GPBMetadata/Google/Protobuf/Struct.php \
GPBMetadata/Google/Protobuf/TestMessagesProto3.php \
GPBMetadata/Google/Protobuf/Timestamp.php \
GPBMetadata/Google/Protobuf/Wrappers.php \
Google/Protobuf/Any.php \
Google/Protobuf/BoolValue.php \
Google/Protobuf/BytesValue.php \
Google/Protobuf/DoubleValue.php \
Google/Protobuf/Duration.php \
Google/Protobuf/FieldMask.php \
Google/Protobuf/FloatValue.php \
Google/Protobuf/Int32Value.php \
Google/Protobuf/Int64Value.php \
Google/Protobuf/ListValue.php \
Google/Protobuf/NullValue.php \
Google/Protobuf/StringValue.php \
Google/Protobuf/Struct.php \
Google/Protobuf/Timestamp.php \
Google/Protobuf/UInt32Value.php \
Google/Protobuf/UInt64Value.php \
Google/Protobuf/Value.php \
Protobuf_test_messages/Proto3/ForeignEnum.php \
Protobuf_test_messages/Proto3/ForeignMessage.php \
Protobuf_test_messages/Proto3/TestAllTypes_NestedEnum.php \
Protobuf_test_messages/Proto3/TestAllTypes_NestedMessage.php \
Protobuf_test_messages/Proto3/TestAllTypes.php
# lite/com/google/protobuf/Any.java \
# lite/com/google/protobuf/AnyOrBuilder.java \
# lite/com/google/protobuf/AnyProto.java \
# lite/com/google/protobuf/BoolValue.java \
# lite/com/google/protobuf/BoolValueOrBuilder.java \
# lite/com/google/protobuf/BytesValue.java \
# lite/com/google/protobuf/BytesValueOrBuilder.java \
# lite/com/google/protobuf/conformance/Conformance.java \
# lite/com/google/protobuf/DoubleValue.java \
# lite/com/google/protobuf/DoubleValueOrBuilder.java \
# lite/com/google/protobuf/Duration.java \
# lite/com/google/protobuf/DurationOrBuilder.java \
# lite/com/google/protobuf/DurationProto.java \
# lite/com/google/protobuf/FieldMask.java \
# lite/com/google/protobuf/FieldMaskOrBuilder.java \
# lite/com/google/protobuf/FieldMaskProto.java \
# lite/com/google/protobuf/FloatValue.java \
# lite/com/google/protobuf/FloatValueOrBuilder.java \
# lite/com/google/protobuf/Int32Value.java \
# lite/com/google/protobuf/Int32ValueOrBuilder.java \
# lite/com/google/protobuf/Int64Value.java \
# lite/com/google/protobuf/Int64ValueOrBuilder.java \
# lite/com/google/protobuf/ListValue.java \
# lite/com/google/protobuf/ListValueOrBuilder.java \
# lite/com/google/protobuf/NullValue.java \
# lite/com/google/protobuf/StringValue.java \
# lite/com/google/protobuf/StringValueOrBuilder.java \
# lite/com/google/protobuf/Struct.java \
# lite/com/google/protobuf/StructOrBuilder.java \
# lite/com/google/protobuf/StructProto.java \
# lite/com/google/protobuf/Timestamp.java \
# lite/com/google/protobuf/TimestampOrBuilder.java \
# lite/com/google/protobuf/TimestampProto.java \
# lite/com/google/protobuf/UInt32Value.java \
# lite/com/google/protobuf/UInt32ValueOrBuilder.java \
# lite/com/google/protobuf/UInt64Value.java \
# lite/com/google/protobuf/UInt64ValueOrBuilder.java \
# lite/com/google/protobuf/Value.java \
# lite/com/google/protobuf/ValueOrBuilder.java \
# lite/com/google/protobuf/WrappersProto.java
bin_PROGRAMS = conformance-test-runner conformance-cpp
# All source files excepet C++/Objective-C ones should be explicitly listed
# here because the autoconf tools don't include files of other languages
# automatically.
EXTRA_DIST = \
ConformanceJava.java \
ConformanceJavaLite.java \
README.md \
conformance.proto \
conformance_python.py \
conformance_ruby.rb \
conformance_php.php \
failure_list_cpp.txt \
failure_list_csharp.txt \
failure_list_java.txt \
failure_list_js.txt \
failure_list_objc.txt \
failure_list_python.txt \
failure_list_python_cpp.txt \
failure_list_python-post26.txt \
failure_list_ruby.txt \
failure_list_php.txt \
failure_list_php_c.txt
conformance_test_runner_LDADD = $(top_srcdir)/src/libprotobuf.la
conformance_test_runner_SOURCES = conformance_test.h conformance_test.cc \
conformance_test_main.cc \
binary_json_conformance_suite.h \
binary_json_conformance_suite.cc \
text_format_conformance_suite.h \
text_format_conformance_suite.cc \
conformance_test_runner.cc \
third_party/jsoncpp/json.h \
third_party/jsoncpp/jsoncpp.cpp
nodist_conformance_test_runner_SOURCES = conformance.pb.cc google/protobuf/test_messages_proto3.pb.cc google/protobuf/test_messages_proto2.pb.cc
conformance_test_runner_CPPFLAGS = -I$(top_srcdir)/src -I$(srcdir)
conformance_test_runner_CXXFLAGS = -std=c++11
# Explicit deps because BUILT_SOURCES are only done before a "make all/check"
# so a direct "make test_cpp" could fail if parallel enough.
conformance_test_runner-conformance_test.$(OBJEXT): conformance.pb.h
conformance_test_runner-conformance_test_runner.$(OBJEXT): conformance.pb.h
conformance_cpp_LDADD = $(top_srcdir)/src/libprotobuf.la
conformance_cpp_SOURCES = conformance_cpp.cc
nodist_conformance_cpp_SOURCES = conformance.pb.cc google/protobuf/test_messages_proto3.pb.cc google/protobuf/test_messages_proto2.pb.cc
conformance_cpp_CPPFLAGS = -I$(top_srcdir)/src
# Explicit dep because BUILT_SOURCES are only done before a "make all/check"
# so a direct "make test_cpp" could fail if parallel enough.
conformance_cpp-conformance_cpp.$(OBJEXT): conformance.pb.h
if OBJC_CONFORMANCE_TEST
bin_PROGRAMS += conformance-objc
conformance_objc_SOURCES = conformance_objc.m ../objectivec/GPBProtocolBuffers.m
nodist_conformance_objc_SOURCES = Conformance.pbobjc.m google/protobuf/TestMessagesProto2.pbobjc.m google/protobuf/TestMessagesProto3.pbobjc.m
# On travis, the build fails without the isysroot because whatever system
# headers are being found don't include generics support for
# NSArray/NSDictionary, the only guess is their image at one time had an odd
# setup for Xcode and old frameworks are being found.
conformance_objc_CPPFLAGS = -I$(top_srcdir)/objectivec -isysroot `xcrun --sdk macosx --show-sdk-path`
conformance_objc_LDFLAGS = -framework Foundation
# Explicit dep because BUILT_SOURCES are only done before a "make all/check"
# so a direct "make test_objc" could fail if parallel enough.
conformance_objc-conformance_objc.$(OBJEXT): Conformance.pbobjc.h google/protobuf/TestMessagesProto2.pbobjc.h google/protobuf/TestMessagesProto3.pbobjc.h
endif
# JavaScript well-known types are expected to be in a directory called
# google-protobuf, because they are usually in the google-protobuf npm
# package. But we want to use the sources from our tree, so we recreate
# that directory structure here.
google-protobuf:
mkdir google-protobuf
if USE_EXTERNAL_PROTOC
# Some implementations include pre-generated versions of well-known types.
protoc_middleman: $(conformance_protoc_inputs) $(conformance_proto2_protoc_inputs) $(well_known_type_protoc_inputs) google-protobuf
$(PROTOC) -I$(srcdir) -I$(top_srcdir) --cpp_out=. --java_out=. --ruby_out=. --objc_out=. --python_out=. --php_out=. $(conformance_protoc_inputs)
$(PROTOC) -I$(srcdir) -I$(top_srcdir) --cpp_out=. --java_out=. --ruby_out=. --objc_out=. --python_out=. $(conformance_proto2_protoc_inputs)
$(PROTOC) -I$(srcdir) -I$(top_srcdir) --cpp_out=. --java_out=. --ruby_out=. --python_out=. $(well_known_type_protoc_inputs)
## $(PROTOC) -I$(srcdir) -I$(top_srcdir) --java_out=lite:lite $(conformance_protoc_inputs) $(well_known_type_protoc_inputs)
touch protoc_middleman
else
# We have to cd to $(srcdir) before executing protoc because $(protoc_inputs) is
# relative to srcdir, which may not be the same as the current directory when
# building out-of-tree.
protoc_middleman: $(top_srcdir)/src/protoc$(EXEEXT) $(conformance_protoc_inputs) $(conformance_proto2_protoc_inputs) $(well_known_type_protoc_inputs) google-protobuf
oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$(top_srcdir)/src --cpp_out=$$oldpwd --java_out=$$oldpwd --ruby_out=$$oldpwd --objc_out=$$oldpwd --python_out=$$oldpwd --php_out=$$oldpwd $(conformance_protoc_inputs) )
oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$(top_srcdir)/src --cpp_out=$$oldpwd --java_out=$$oldpwd --ruby_out=$$oldpwd --objc_out=$$oldpwd --python_out=$$oldpwd $(conformance_proto2_protoc_inputs) )
oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$(top_srcdir)/src --cpp_out=$$oldpwd --java_out=$$oldpwd --ruby_out=$$oldpwd --python_out=$$oldpwd $(well_known_type_protoc_inputs) )
## @mkdir -p lite
## oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$(top_srcdir)/src --java_out=lite:$$oldpwd/lite $(conformance_protoc_inputs) $(well_known_type_protoc_inputs) )
touch protoc_middleman
endif
$(protoc_outputs): protoc_middleman
$(other_language_protoc_outputs): protoc_middleman
CLEANFILES = $(protoc_outputs) protoc_middleman javac_middleman conformance-java javac_middleman_lite conformance-java-lite conformance-csharp conformance-php conformance-php-c $(other_language_protoc_outputs)
MAINTAINERCLEANFILES = \
Makefile.in
javac_middleman: ConformanceJava.java protoc_middleman $(other_language_protoc_outputs)
jar=`ls ../java/util/target/*jar-with-dependencies.jar` && javac -classpath ../java/target/classes:$$jar ConformanceJava.java com/google/protobuf/conformance/Conformance.java com/google/protobuf_test_messages/proto3/TestMessagesProto3.java com/google/protobuf_test_messages/proto2/TestMessagesProto2.java
@touch javac_middleman
conformance-java: javac_middleman
@echo "Writing shortcut script conformance-java..."
@echo '#! /bin/sh' > conformance-java
@jar=`ls ../java/util/target/*jar-with-dependencies.jar` && echo java -classpath .:../java/target/classes:$$jar ConformanceJava '$$@' >> conformance-java
@chmod +x conformance-java
javac_middleman_lite: ConformanceJavaLite.java protoc_middleman $(other_language_protoc_outputs)
javac -classpath ../java/lite/target/classes:lite ConformanceJavaLite.java lite/com/google/protobuf/conformance/Conformance.java
@touch javac_middleman_lite
conformance-java-lite: javac_middleman_lite
@echo "Writing shortcut script conformance-java-lite..."
@echo '#! /bin/sh' > conformance-java-lite
@echo java -classpath .:../java/lite/target/classes:lite ConformanceJavaLite '$$@' >> conformance-java-lite
@chmod +x conformance-java-lite
# Currently the conformance code is alongside the rest of the C#
# source, as it's easier to maintain there. We assume we've already
# built that, so we just need a script to run it.
conformance-csharp: $(other_language_protoc_outputs)
@echo "Writing shortcut script conformance-csharp..."
@echo '#! /bin/sh' > conformance-csharp
@echo 'dotnet ../csharp/src/Google.Protobuf.Conformance/bin/Release/netcoreapp3.1/Google.Protobuf.Conformance.dll "$$@"' >> conformance-csharp
@chmod +x conformance-csharp
conformance-php:
@echo "Writing shortcut script conformance-php..."
@echo '#! /bin/sh' > conformance-php
@echo 'php -d auto_prepend_file=autoload.php ./conformance_php.php' >> conformance-php
@chmod +x conformance-php
conformance-php-c:
@echo "Writing shortcut script conformance-php-c..."
@echo '#! /bin/sh' > conformance-php-c
@echo 'php -dextension=../php/ext/google/protobuf/modules/protobuf.so ./conformance_php.php' >> conformance-php-c
@chmod +x conformance-php-c
# Targets for actually running tests.
test_cpp: protoc_middleman conformance-test-runner conformance-cpp
./conformance-test-runner --enforce_recommended --failure_list failure_list_cpp.txt --text_format_failure_list text_format_failure_list_cpp.txt ./conformance-cpp
test_java: protoc_middleman conformance-test-runner conformance-java
./conformance-test-runner --enforce_recommended --failure_list failure_list_java.txt --text_format_failure_list text_format_failure_list_java.txt ./conformance-java
test_java_lite: protoc_middleman conformance-test-runner conformance-java-lite
./conformance-test-runner --enforce_recommended ./conformance-java-lite
test_csharp: protoc_middleman conformance-test-runner conformance-csharp
./conformance-test-runner --enforce_recommended --failure_list failure_list_csharp.txt --text_format_failure_list text_format_failure_list_csharp.txt ./conformance-csharp
test_ruby: protoc_middleman conformance-test-runner $(other_language_protoc_outputs)
RUBYLIB=../ruby/lib:. ./conformance-test-runner --enforce_recommended --failure_list failure_list_ruby.txt --text_format_failure_list text_format_failure_list_ruby.txt ./conformance_ruby.rb
test_jruby: protoc_middleman conformance-test-runner $(other_language_protoc_outputs)
RUBYLIB=../ruby/lib:. ./conformance-test-runner --enforce_recommended --failure_list failure_list_jruby.txt --text_format_failure_list text_format_failure_list_jruby.txt ./conformance_ruby.rb
test_php: protoc_middleman conformance-test-runner conformance-php $(other_language_protoc_outputs)
./conformance-test-runner --enforce_recommended --failure_list failure_list_php.txt --text_format_failure_list text_format_failure_list_php.txt ./conformance-php
test_php_c: protoc_middleman conformance-test-runner conformance-php-c $(other_language_protoc_outputs)
./conformance-test-runner --enforce_recommended --failure_list failure_list_php_c.txt --text_format_failure_list text_format_failure_list_php.txt ./conformance-php-c
# These depend on library paths being properly set up. The easiest way to
# run them is to just use "tox" from the python dir.
test_python: protoc_middleman conformance-test-runner
./conformance-test-runner --enforce_recommended --failure_list failure_list_python.txt --text_format_failure_list text_format_failure_list_python.txt ./conformance_python.py
test_python_cpp: protoc_middleman conformance-test-runner
./conformance-test-runner --enforce_recommended --failure_list failure_list_python_cpp.txt --text_format_failure_list text_format_failure_list_python_cpp.txt ./conformance_python.py
if OBJC_CONFORMANCE_TEST
test_objc: protoc_middleman conformance-test-runner conformance-objc
./conformance-test-runner --enforce_recommended --failure_list failure_list_objc.txt ./conformance-objc
endif

View File

@ -7,7 +7,7 @@ define("GOOGLE_GPBMETADATA_NAMESPACE", "GPBMetadata\\Google\\Protobuf\\");
function protobuf_autoloader_impl($class, $prefix) {
$length = strlen($prefix);
if ((substr($class, 0, $length) === $prefix)) {
$path = '../php/src/' . implode('/', array_map('ucwords', explode('\\', $class))) . '.php';
$path = 'php/src/' . implode('/', array_map('ucwords', explode('\\', $class))) . '.php';
include_once $path;
}
}

View File

@ -44,8 +44,7 @@
#include <google/protobuf/util/type_resolver_util.h>
#include <google/protobuf/stubs/status.h>
#include <google/protobuf/stubs/statusor.h>
#include "conformance.pb.h"
#include "conformance.pb.h"
#include "conformance/conformance.pb.h"
#include <google/protobuf/test_messages_proto2.pb.h>
#include <google/protobuf/test_messages_proto3.pb.h>
#include <google/protobuf/test_messages_proto3.pb.h>

View File

@ -1,189 +0,0 @@
#!/usr/bin/env node
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
var conformance = require('conformance_pb');
var test_messages_proto3 = require('google/protobuf/test_messages_proto3_pb');
var test_messages_proto2 = require('google/protobuf/test_messages_proto2_pb');
var fs = require('fs');
var testCount = 0;
function doTest(request) {
var testMessage;
var response = new conformance.ConformanceResponse();
try {
if (request.getRequestedOutputFormat() == conformance.WireFormat.JSON) {
response.setSkipped("JSON not supported.");
return response;
}
if (request.getRequestedOutputFormat() ==
conformance.WireFormat.TEXT_FORMAT) {
response.setSkipped('Text format is not supported as output format.');
return response;
}
switch (request.getPayloadCase()) {
case conformance.ConformanceRequest.PayloadCase.PROTOBUF_PAYLOAD: {
if (request.getMessageType() == "protobuf_test_messages.proto3.TestAllTypesProto3") {
try {
testMessage = test_messages_proto3.TestAllTypesProto3.deserializeBinary(
request.getProtobufPayload());
} catch (err) {
response.setParseError(err.toString());
return response;
}
} else if (request.getMessageType() == "protobuf_test_messages.proto2.TestAllTypesProto2"){
try {
testMessage = test_messages_proto2.TestAllTypesProto2.deserializeBinary(
request.getProtobufPayload());
} catch (err) {
response.setParseError(err.toString());
return response;
}
} else {
throw "Protobuf request doesn\'t have specific payload type";
}
} break;
case conformance.ConformanceRequest.PayloadCase.JSON_PAYLOAD:
response.setSkipped("JSON not supported.");
return response;
case conformance.ConformanceRequest.PayloadCase.TEXT_PAYLOAD:
response.setSkipped("Text format not supported.");
return response;
case conformance.ConformanceRequest.PayloadCase.PAYLOAD_NOT_SET:
response.setRuntimeError("Request didn't have payload");
return response;
}
switch (request.getRequestedOutputFormat()) {
case conformance.WireFormat.UNSPECIFIED:
response.setRuntimeError("Unspecified output format");
return response;
case conformance.WireFormat.PROTOBUF:
response.setProtobufPayload(testMessage.serializeBinary());
case conformance.WireFormat.JSON:
response.setSkipped("JSON not supported.");
return response;
default:
throw "Request didn't have requested output format";
}
} catch (err) {
response.setRuntimeError(err.toString());
}
return response;
}
function onEof(totalRead) {
if (totalRead == 0) {
return undefined;
} else {
throw "conformance_nodejs: premature EOF on stdin.";
}
}
// Utility function to read a buffer of N bytes.
function readBuffer(bytes) {
var buf = new Buffer(bytes);
var totalRead = 0;
while (totalRead < bytes) {
var read = 0;
try {
read = fs.readSync(process.stdin.fd, buf, totalRead, bytes - totalRead);
} catch (e) {
if (e.code == 'EOF') {
return onEof(totalRead)
} else if (e.code == 'EAGAIN') {
} else {
throw "conformance_nodejs: Error reading from stdin." + e;
}
}
totalRead += read;
}
return buf;
}
function writeBuffer(buffer) {
var totalWritten = 0;
while (totalWritten < buffer.length) {
totalWritten += fs.writeSync(
process.stdout.fd, buffer, totalWritten, buffer.length - totalWritten);
}
}
// Returns true if the test ran successfully, false on legitimate EOF.
// If EOF is encountered in an unexpected place, raises IOError.
function doTestIo() {
var lengthBuf = readBuffer(4);
if (!lengthBuf) {
return false;
}
var length = lengthBuf.readInt32LE(0);
var serializedRequest = readBuffer(length);
if (!serializedRequest) {
throw "conformance_nodejs: Failed to read request.";
}
serializedRequest = new Uint8Array(serializedRequest);
var request =
conformance.ConformanceRequest.deserializeBinary(serializedRequest);
var response = doTest(request);
var serializedResponse = response.serializeBinary();
lengthBuf = new Buffer(4);
lengthBuf.writeInt32LE(serializedResponse.length, 0);
writeBuffer(lengthBuf);
writeBuffer(new Buffer(serializedResponse));
testCount += 1
return true;
}
while (true) {
if (!doTestIo()) {
console.error('conformance_nodejs: received EOF from test runner ' +
"after " + testCount + " tests, exiting")
break;
}
}

View File

@ -14,7 +14,7 @@ require_once("Protobuf_test_messages/Proto3/TestAllTypesProto3/NestedMessage.php
require_once("Protobuf_test_messages/Proto3/TestAllTypesProto3/NestedEnum.php");
require_once("GPBMetadata/Conformance.php");
require_once("GPBMetadata/Google/Protobuf/TestMessagesProto3.php");
require_once("GPBMetadata/TestMessagesProto3.php");
use \Conformance\TestCategory;
use \Conformance\WireFormat;
@ -113,7 +113,7 @@ function doTestIO()
while(true){
if (!doTestIO()) {
fprintf(STDERR,
"conformance_php: received EOF from test runner " +
"conformance_php: received EOF from test runner " .
"after %d tests, exiting\n", $test_count);
exit;
}

View File

@ -44,13 +44,13 @@ conformance_test_runner=$(rlocation com_google_protobuf/conformance/conformance_
conformance_testee=$(rlocation $TESTEE)
args=(--enforce_recommended)
failure_list=$(rlocation $FAILURE_LIST)
if [ "$failure_list" != "1" ] ; then
failure_list=$(rlocation $FAILURE_LIST) || unset
if [ -n "$failure_list" ] ; then
args+=(--failure_list $failure_list)
fi
text_format_failure_list=$(rlocation $TEXT_FORMAT_FAILURE_LIST)
if [ "$text_format_failure_list" != "1" ]; then
text_format_failure_list=$(rlocation $TEXT_FORMAT_FAILURE_LIST) || unset
if [ -n "$text_format_failure_list" ]; then
args+=(--text_format_failure_list $text_format_failure_list)
fi

View File

@ -7,7 +7,8 @@ def conformance_test(
name,
testee,
failure_list = None,
text_format_failure_list = None):
text_format_failure_list = None,
**kwargs):
"""Conformance test runner.
Args:
@ -16,6 +17,7 @@ def conformance_test(
failure_list: a text file with known failures, one per line.
text_format_failure_list: a text file with known failures (one per line)
for the text format conformance suite.
**kwargs: common arguments to pass to sh_test.
"""
args = ["--testee %s" % _strip_bazel(testee)]
failure_lists = []
@ -36,6 +38,8 @@ def conformance_test(
deps = [
"@bazel_tools//tools/bash/runfiles",
],
tags = ["conformance"],
**kwargs,
)
def _strip_bazel(testee):

View File

@ -1,162 +0,0 @@
Recommended.Proto2.ProtobufInput.ValidDataRepeated.ENUM.UnpackedInput.DefaultOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.ENUM.UnpackedInput.PackedOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.ENUM.UnpackedInput.UnpackedOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.FIXED64.PackedInput.DefaultOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.FIXED64.PackedInput.PackedOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.FIXED64.PackedInput.UnpackedOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.FIXED64.UnpackedInput.DefaultOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.FIXED64.UnpackedInput.PackedOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.FIXED64.UnpackedInput.UnpackedOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.INT32.PackedInput.DefaultOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.INT32.PackedInput.PackedOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.INT32.PackedInput.UnpackedOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.INT32.UnpackedInput.DefaultOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.INT32.UnpackedInput.PackedOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.INT32.UnpackedInput.UnpackedOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.INT64.PackedInput.DefaultOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.INT64.PackedInput.PackedOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.INT64.PackedInput.UnpackedOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.INT64.UnpackedInput.DefaultOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.INT64.UnpackedInput.PackedOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.INT64.UnpackedInput.UnpackedOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.SFIXED64.PackedInput.DefaultOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.SFIXED64.PackedInput.PackedOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.SFIXED64.PackedInput.UnpackedOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.SFIXED64.UnpackedInput.DefaultOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.SFIXED64.UnpackedInput.PackedOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.SFIXED64.UnpackedInput.UnpackedOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.SINT64.PackedInput.DefaultOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.SINT64.PackedInput.PackedOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.SINT64.PackedInput.UnpackedOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.SINT64.UnpackedInput.DefaultOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.SINT64.UnpackedInput.PackedOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.SINT64.UnpackedInput.UnpackedOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.UINT32.PackedInput.DefaultOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.UINT32.PackedInput.PackedOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.UINT32.PackedInput.UnpackedOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.UINT32.UnpackedInput.DefaultOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.UINT32.UnpackedInput.PackedOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.UINT32.UnpackedInput.UnpackedOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.UINT64.PackedInput.DefaultOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.UINT64.PackedInput.PackedOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.UINT64.PackedInput.UnpackedOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.UINT64.UnpackedInput.DefaultOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.UINT64.UnpackedInput.PackedOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataRepeated.UINT64.UnpackedInput.UnpackedOutput.ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataScalarBinary.ENUM[4].ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataScalarBinary.ENUM[5].ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataScalarBinary.FIXED64[2].ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataScalarBinary.INT32[7].ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataScalarBinary.INT64[2].ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataScalarBinary.SFIXED64[2].ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataScalarBinary.SINT64[2].ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataScalarBinary.UINT32[8].ProtobufOutput
Recommended.Proto2.ProtobufInput.ValidDataScalarBinary.UINT64[2].ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.ENUM.UnpackedInput.DefaultOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.ENUM.UnpackedInput.PackedOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.ENUM.UnpackedInput.UnpackedOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.FIXED64.PackedInput.DefaultOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.FIXED64.PackedInput.PackedOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.FIXED64.PackedInput.UnpackedOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.FIXED64.UnpackedInput.DefaultOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.FIXED64.UnpackedInput.PackedOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.FIXED64.UnpackedInput.UnpackedOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.INT32.PackedInput.DefaultOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.INT32.PackedInput.PackedOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.INT32.PackedInput.UnpackedOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.INT32.UnpackedInput.DefaultOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.INT32.UnpackedInput.PackedOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.INT32.UnpackedInput.UnpackedOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.INT64.PackedInput.DefaultOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.INT64.PackedInput.PackedOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.INT64.PackedInput.UnpackedOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.INT64.UnpackedInput.DefaultOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.INT64.UnpackedInput.PackedOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.INT64.UnpackedInput.UnpackedOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.SFIXED64.PackedInput.DefaultOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.SFIXED64.PackedInput.PackedOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.SFIXED64.PackedInput.UnpackedOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.SFIXED64.UnpackedInput.DefaultOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.SFIXED64.UnpackedInput.PackedOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.SFIXED64.UnpackedInput.UnpackedOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.SINT64.PackedInput.DefaultOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.SINT64.PackedInput.PackedOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.SINT64.PackedInput.UnpackedOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.SINT64.UnpackedInput.DefaultOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.SINT64.UnpackedInput.PackedOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.SINT64.UnpackedInput.UnpackedOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.UINT32.PackedInput.DefaultOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.UINT32.PackedInput.PackedOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.UINT32.PackedInput.UnpackedOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.UINT32.UnpackedInput.DefaultOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.UINT32.UnpackedInput.PackedOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.UINT32.UnpackedInput.UnpackedOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.UINT64.PackedInput.DefaultOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.UINT64.PackedInput.PackedOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.UINT64.PackedInput.UnpackedOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.UINT64.UnpackedInput.DefaultOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.UINT64.UnpackedInput.PackedOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataRepeated.UINT64.UnpackedInput.UnpackedOutput.ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataScalarBinary.ENUM[4].ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataScalarBinary.ENUM[5].ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataScalarBinary.FIXED64[2].ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataScalarBinary.INT32[7].ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataScalarBinary.INT64[2].ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataScalarBinary.SFIXED64[2].ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataScalarBinary.SINT64[2].ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataScalarBinary.UINT32[8].ProtobufOutput
Recommended.Proto3.ProtobufInput.ValidDataScalarBinary.UINT64[2].ProtobufOutput
Required.Proto2.ProtobufInput.RepeatedScalarSelectsLast.ENUM.ProtobufOutput
Required.Proto2.ProtobufInput.RepeatedScalarSelectsLast.FIXED64.ProtobufOutput
Required.Proto2.ProtobufInput.RepeatedScalarSelectsLast.UINT64.ProtobufOutput
Required.Proto2.ProtobufInput.ValidDataRepeated.ENUM.UnpackedInput.ProtobufOutput
Required.Proto2.ProtobufInput.ValidDataRepeated.FIXED64.PackedInput.ProtobufOutput
Required.Proto2.ProtobufInput.ValidDataRepeated.FIXED64.UnpackedInput.ProtobufOutput
Required.Proto2.ProtobufInput.ValidDataRepeated.INT32.PackedInput.ProtobufOutput
Required.Proto2.ProtobufInput.ValidDataRepeated.INT32.UnpackedInput.ProtobufOutput
Required.Proto2.ProtobufInput.ValidDataRepeated.INT64.PackedInput.ProtobufOutput
Required.Proto2.ProtobufInput.ValidDataRepeated.INT64.UnpackedInput.ProtobufOutput
Required.Proto2.ProtobufInput.ValidDataRepeated.SFIXED64.PackedInput.ProtobufOutput
Required.Proto2.ProtobufInput.ValidDataRepeated.SFIXED64.UnpackedInput.ProtobufOutput
Required.Proto2.ProtobufInput.ValidDataRepeated.SINT64.PackedInput.ProtobufOutput
Required.Proto2.ProtobufInput.ValidDataRepeated.SINT64.UnpackedInput.ProtobufOutput
Required.Proto2.ProtobufInput.ValidDataRepeated.UINT32.PackedInput.ProtobufOutput
Required.Proto2.ProtobufInput.ValidDataRepeated.UINT32.UnpackedInput.ProtobufOutput
Required.Proto2.ProtobufInput.ValidDataRepeated.UINT64.PackedInput.ProtobufOutput
Required.Proto2.ProtobufInput.ValidDataRepeated.UINT64.UnpackedInput.ProtobufOutput
Required.Proto2.ProtobufInput.ValidDataScalar.ENUM[4].ProtobufOutput
Required.Proto2.ProtobufInput.ValidDataScalar.ENUM[5].ProtobufOutput
Required.Proto2.ProtobufInput.ValidDataScalar.FIXED64[2].ProtobufOutput
Required.Proto2.ProtobufInput.ValidDataScalar.INT32[7].ProtobufOutput
Required.Proto2.ProtobufInput.ValidDataScalar.INT64[2].ProtobufOutput
Required.Proto2.ProtobufInput.ValidDataScalar.SFIXED64[2].ProtobufOutput
Required.Proto2.ProtobufInput.ValidDataScalar.SINT64[2].ProtobufOutput
Required.Proto2.ProtobufInput.ValidDataScalar.UINT32[8].ProtobufOutput
Required.Proto2.ProtobufInput.ValidDataScalar.UINT64[2].ProtobufOutput
Required.Proto3.ProtobufInput.RepeatedScalarSelectsLast.ENUM.ProtobufOutput
Required.Proto3.ProtobufInput.RepeatedScalarSelectsLast.FIXED64.ProtobufOutput
Required.Proto3.ProtobufInput.RepeatedScalarSelectsLast.UINT64.ProtobufOutput
Required.Proto3.ProtobufInput.ValidDataRepeated.ENUM.UnpackedInput.ProtobufOutput
Required.Proto3.ProtobufInput.ValidDataRepeated.FIXED64.PackedInput.ProtobufOutput
Required.Proto3.ProtobufInput.ValidDataRepeated.FIXED64.UnpackedInput.ProtobufOutput
Required.Proto3.ProtobufInput.ValidDataRepeated.INT32.PackedInput.ProtobufOutput
Required.Proto3.ProtobufInput.ValidDataRepeated.INT32.UnpackedInput.ProtobufOutput
Required.Proto3.ProtobufInput.ValidDataRepeated.INT64.PackedInput.ProtobufOutput
Required.Proto3.ProtobufInput.ValidDataRepeated.INT64.UnpackedInput.ProtobufOutput
Required.Proto3.ProtobufInput.ValidDataRepeated.SFIXED64.PackedInput.ProtobufOutput
Required.Proto3.ProtobufInput.ValidDataRepeated.SFIXED64.UnpackedInput.ProtobufOutput
Required.Proto3.ProtobufInput.ValidDataRepeated.SINT64.PackedInput.ProtobufOutput
Required.Proto3.ProtobufInput.ValidDataRepeated.SINT64.UnpackedInput.ProtobufOutput
Required.Proto3.ProtobufInput.ValidDataRepeated.UINT32.PackedInput.ProtobufOutput
Required.Proto3.ProtobufInput.ValidDataRepeated.UINT32.UnpackedInput.ProtobufOutput
Required.Proto3.ProtobufInput.ValidDataRepeated.UINT64.PackedInput.ProtobufOutput
Required.Proto3.ProtobufInput.ValidDataRepeated.UINT64.UnpackedInput.ProtobufOutput
Required.Proto3.ProtobufInput.ValidDataScalar.ENUM[4].ProtobufOutput
Required.Proto3.ProtobufInput.ValidDataScalar.ENUM[5].ProtobufOutput
Required.Proto3.ProtobufInput.ValidDataScalar.FIXED64[2].ProtobufOutput
Required.Proto3.ProtobufInput.ValidDataScalar.INT32[7].ProtobufOutput
Required.Proto3.ProtobufInput.ValidDataScalar.INT64[2].ProtobufOutput
Required.Proto3.ProtobufInput.ValidDataScalar.SFIXED64[2].ProtobufOutput
Required.Proto3.ProtobufInput.ValidDataScalar.SINT64[2].ProtobufOutput
Required.Proto3.ProtobufInput.ValidDataScalar.UINT32[8].ProtobufOutput
Required.Proto3.ProtobufInput.ValidDataScalar.UINT64[2].ProtobufOutput

View File

@ -3,11 +3,49 @@
# See also code generation logic under /src/google/protobuf/compiler/csharp.
load("@rules_pkg//:mappings.bzl", "pkg_files", "strip_prefix")
load("//:protobuf.bzl", "internal_csharp_proto_library")
load("//build_defs:internal_shell.bzl", "inline_sh_test")
load("//conformance:defs.bzl", "conformance_test")
################################################################################
# Tests
################################################################################
conformance_test(
name = "conformance_test",
failure_list = "//conformance:failure_list_csharp.txt",
testee = "//conformance:conformance_csharp",
text_format_failure_list = "//conformance:text_format_failure_list_csharp.txt",
)
################################################################################
# CSharp Runtime
################################################################################
filegroup(
name = "srcs",
srcs = glob([
"keys/*",
"protos/*",
"src/**/*.cs*", # .cs and .csproj
], exclude = [
# Exclude generated files.
"src/*/obj/**/*"
]) + [
"src/Directory.Build.props",
"src/Google.Protobuf.Benchmarks/wrapper_benchmark_messages.proto",
"src/Google.Protobuf.Test/testprotos.pb",
"src/Google.Protobuf.sln",
],
visibility = [
"//csharp:__subpackages__",
"//conformance:__subpackages__"
],
)
filegroup(
name = "wkt_cs_srcs",
data = [
"src/Google.Protobuf.Conformance/Conformance.cs",
srcs = [
"src/Google.Protobuf/Reflection/Descriptor.cs",
"src/Google.Protobuf/WellKnownTypes/Any.cs",
"src/Google.Protobuf/WellKnownTypes/Api.cs",
@ -23,13 +61,31 @@ filegroup(
visibility = ["//src/google/protobuf/compiler/csharp:__pkg__"],
)
inline_sh_test(
name = "tests",
srcs = [
":srcs",
"src/Google.Protobuf.sln",
"//csharp/src/Google.Protobuf.Conformance:srcs",
"//benchmarks/datasets:proto3_datasets"
],
cmd = """
pushd `dirname $(location src/Google.Protobuf.sln)`/..
dotnet restore src/Google.Protobuf.sln
dotnet build -c Release src/Google.Protobuf.sln
dotnet test -c Release -f netcoreapp3.1 src/Google.Protobuf.Test/Google.Protobuf.Test.csproj
popd
""",
)
################################################################################
# Distribution files
################################################################################
pkg_files(
name = "dist_files",
srcs = glob([
"keys/*",
"protos/*",
"src/**/*.cs*", # .cs and .csproj
]) + [
srcs = [
":srcs",
".editorconfig",
".gitignore",
"BUILD.bazel",
@ -45,9 +101,7 @@ pkg_files(
"buildall.sh",
"generate_protos.sh",
"install_dotnet_sdk.ps1",
"src/Google.Protobuf.Benchmarks/wrapper_benchmark_messages.proto",
"src/Google.Protobuf.Test/testprotos.pb",
"src/Google.Protobuf.sln",
"//csharp/src/Google.Protobuf.Conformance:dist_files",
],
strip_prefix = strip_prefix.from_root(""),
visibility = ["//pkg:__pkg__"],

View File

@ -0,0 +1,16 @@
load("//build_defs:internal_shell.bzl", "inline_sh_test")
# Run csharp compatibility test between 3.0.0 and the current version.
inline_sh_test(
name = "tests",
srcs = ["test.sh"],
tools = ["//:protoc"],
deps = glob([
"src/**/*.cs*", # .cs and .csproj
"protos/**/*.proto",
]) + [
"//csharp:srcs",
"//:well_known_protos",
],
cmd = "$(location test.sh) 3.0.0 $(rootpath //:protoc)"
)

View File

@ -27,6 +27,8 @@ function run_test() {
set -ex
PROTOC=$(realpath ${2:-../../../bazel-bin/protoc})
# Change to the script's directory.
cd $(dirname $0)
@ -45,7 +47,7 @@ OLD_VERSION_PROTOC=https://repo1.maven.org/maven2/com/google/protobuf/protoc/$OL
echo "Running compatibility tests with $OLD_VERSION"
# Check protoc
[ -f ../../../src/protoc ] || {
[ -f $PROTOC ] || {
echo "[ERROR]: Please build protoc first."
exit 1
}
@ -68,12 +70,12 @@ run_test "./old_protoc" "./old_protoc"
# Test A.2:
# proto set 1: use new version
# proto set 2 which may import protos in set 1: use old version
run_test "../../../src/protoc" "./old_protoc"
run_test "$PROTOC" "./old_protoc"
# Test A.3:
# proto set 1: use old version
# proto set 2 which may import protos in set 1: use new version
run_test "./old_protoc" "../../../src/protoc"
run_test "./old_protoc" "$PROTOC"
rm old_protoc
rm keys -r

View File

@ -11,17 +11,19 @@ pushd $(dirname $0)/..
# Protocol buffer compiler to use. If the PROTOC variable is set,
# use that. Otherwise, probe for expected locations under both
# Windows and Unix.
PROTOC_LOCATIONS=(
"bazel-bin/protoc"
"solution/Debug/protoc.exe"
"cmake/build/Debug/protoc.exe"
"cmake/build/Release/protoc.exe"
)
if [ -z "$PROTOC" ]; then
# TODO(jonskeet): Use an array and a for loop instead?
if [ -x solution/Debug/protoc.exe ]; then
PROTOC=solution/Debug/protoc.exe
elif [ -x cmake/build/Debug/protoc.exe ]; then
PROTOC=cmake/build/Debug/protoc.exe
elif [ -x cmake/build/Release/protoc.exe ]; then
PROTOC=cmake/build/Release/protoc.exe
elif [ -x src/protoc ]; then
PROTOC=src/protoc
else
for protoc in "${PROTOC_LOCATIONS[@]}"; do
if [ -x "$protoc" ]; then
PROTOC="$protoc"
fi
done
if [ -z "$PROTOC" ]; then
echo "Unable to find protocol buffer compiler."
exit 1
fi
@ -75,9 +77,6 @@ $PROTOC -Isrc -Icsharp/protos \
$PROTOC -Iexamples -Isrc --csharp_out=csharp/src/AddressBook \
examples/addressbook.proto
$PROTOC -Iconformance -Isrc --csharp_out=csharp/src/Google.Protobuf.Conformance \
conformance/conformance.proto
# Benchmark protos
$PROTOC -Ibenchmarks \
benchmarks/datasets/google_message1/proto3/*.proto \

View File

@ -0,0 +1,97 @@
load("//build_defs:internal_shell.bzl", "inline_sh_binary")
load("@rules_pkg//:mappings.bzl", "pkg_files", "strip_prefix")
inline_sh_binary(
name = "build_conformance_test",
srcs = ["Google.Protobuf.Conformance.csproj"],
cmd = "dotnet build -c Release $(SRCS)",
visibility = ["//visibility:private"],
)
CONFORMANCE_DEPS = [
"Google.Protobuf.dll",
"Google.Protobuf.Conformance.deps.json",
"Google.Protobuf.Conformance.pdb",
"Google.Protobuf.Conformance.runtimeconfig.json",
"Google.Protobuf.Test.dll",
"Google.Protobuf.Test.TestProtos.dll",
"Microsoft.TestPlatform.CommunicationUtilities.dll",
"Microsoft.TestPlatform.CoreUtilities.dll",
"Microsoft.TestPlatform.CrossPlatEngine.dll",
"Microsoft.TestPlatform.PlatformAbstractions.dll",
"Microsoft.TestPlatform.Utilities.dll",
"Microsoft.VisualStudio.CodeCoverage.Shim.dll",
"Microsoft.VisualStudio.TestPlatform.Common.dll",
"Microsoft.VisualStudio.TestPlatform.ObjectModel.dll",
"NUnit3.TestAdapter.dll",
"Newtonsoft.Json.dll",
"NuGet.Frameworks.dll",
"nunit.engine.api.dll",
"nunit.engine.core.dll",
"nunit.engine.dll",
"nunit.framework.dll",
"testcentric.engine.metadata.dll",
"testhost.dll",
]
filegroup(
name = "srcs",
srcs = [
"Conformance.cs",
"Program.cs",
"Google.Protobuf.Conformance.csproj",
],
visibility = [
"//csharp:__subpackages__",
],
)
genrule(
name = "package_conformance_test",
srcs = [
"Program.cs",
"Google.Protobuf.Conformance.csproj",
"//conformance:conformance_csharp_proto",
"//csharp:srcs",
],
visibility = ["//visibility:private"],
tools = [":build_conformance_test"],
outs = CONFORMANCE_DEPS + ["Google.Protobuf.Conformance.dll"],
cmd = """
SRCDIR=$$(dirname $(location :Program.cs))
cp $(location //conformance:conformance_csharp_proto) $$SRCDIR
DOTNET_CLI_HOME=/tmp ./$(location :build_conformance_test)
cp -r $$SRCDIR/bin/Release/netcoreapp3.1/* $(RULEDIR)
""",
)
filegroup(
name = "conformance_dll",
srcs = ["Google.Protobuf.Conformance.dll"],
data = [":package_conformance_test"],
visibility = ["//conformance:__subpackages__"],
)
filegroup(
name = "conformance_runfiles",
srcs = CONFORMANCE_DEPS,
data = [":package_conformance_test"],
visibility = ["//conformance:__subpackages__"],
)
################################################################################
# Distribution files
################################################################################
pkg_files(
name = "dist_files",
srcs = [
"BUILD.bazel",
"Google.Protobuf.Conformance.csproj",
"Conformance.cs",
"Program.cs",
],
strip_prefix = strip_prefix.from_root(""),
visibility = ["//csharp:__pkg__"],
)

View File

@ -25,9 +25,9 @@ essentially a complete copy of the same build definitions.
was designed in large part to support exactly this type of rich,
multi-language build.
Currently, C++ Protobuf can be built with Bazel, Autotools, and CMake. Each of
these build systems has different semantics and structure, but share in common
the list of files needed to build the runtime and compiler.
Currently, C++ Protobuf can be built with Bazel and CMake. Each of these build
systems has different semantics and structure, but share in common the list of
files needed to build the runtime and compiler.
## Design
@ -74,7 +74,7 @@ rules, a special rule, `cc_dist_library`, combines several fine-grained
libraries into a single, monolithic library.
For the Protobuf project, these "distribution libraries" are intended to match
the granularity of the Autotools- and CMake-based builds. Since the Bazel-built
the granularity of the CMake-based builds. Since the Bazel-built
distribution library covers the rules with the source files needed by other
builds, the `cc_dist_library` rule invokes the `cc_file_list_aspect` on its
input libraries. The result is that a `cc_dist_library` rule not only produces
@ -163,8 +163,7 @@ detects this as two conflicting actions generating the same outputs. (For
### File list generation
Lists of input files are generated by Bazel in a format that can be imported to
other build systems. Currently, Automake- and CMake-style files can be
generated.
other build systems. Currently only CMake-style files can be generated.
The lists of files are derived from Bazel build targets. The sources can be:
* `cc_dist_library` rules (as described above)
@ -263,11 +262,6 @@ include(source_lists.cmake)
add_library(distlib ${distlib_srcs} ${buff_srcs})
```
In addition to `gen_cmake_file_lists`, there is also a `gen_automake_file_lists`
rule. These rules actually share most of the same implementation, but define
different file headers and different Starlark "fragment generator" functions
which format the generated list variables.
### Protobuf usage
The main C++ runtimes (lite and full) and the Protobuf compiler use their

View File

@ -131,11 +131,11 @@ These options are demonstrated below:
```
# One option:
$ ./src/protoc test.proto --cpp_out=. --experimental_allow_proto3_optional
$ protoc test.proto --cpp_out=. --experimental_allow_proto3_optional
# Another option:
$ cp test.proto test_proto3_optional.proto
$ ./src/protoc test_proto3_optional.proto --cpp_out=.
$ protoc test_proto3_optional.proto --cpp_out=.
$
```
@ -152,7 +152,7 @@ If you now try to invoke your own code generator with the test proto, you will
run into a different error:
```
$ ./src/protoc test_proto3_optional.proto --my_codegen_out=.
$ protoc test_proto3_optional.proto --my_codegen_out=.
test_proto3_optional.proto: is a proto3 file that contains optional fields, but
code generator --my_codegen_out hasn't been updated to support optional fields in
proto3. Please ask the owner of this code generator to support proto3 optional.
@ -201,7 +201,7 @@ Once you have added this, you should now be able to successfully use your code
generator to generate a file containing proto3 optional fields:
```
$ ./src/protoc test_proto3_optional.proto --my_codegen_out=.
$ protoc test_proto3_optional.proto --my_codegen_out=.
```
### Updating Your Code Generator

View File

@ -44,7 +44,7 @@ build only the language you are interested in.
### C++
You can follow instructions in [../src/README.md](../src/README.md) to install
protoc and protobuf C++ runtime from source.
protoc from source.
Then run "make cpp" in this examples directory to build the C++ example. It
will create two executables: add_person_cpp and list_people_cpp. These programs

View File

@ -10,9 +10,11 @@ load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
#
# http_archive(
# name = "com_google_protobuf",
# strip_prefix = "protobuf-main",
# urls = ["https://github.com/protocolbuffers/protobuf/archive/main.zip"],
# sha256 = "c29d8b4b79389463c546f98b15aa4391d4ed7ec459340c47bffe15db63eb9126",
# strip_prefix = "protobuf-3.21.3",
# urls = ["https://github.com/protocolbuffers/protobuf/archive/v3.21.3.tar.gz"],
# )
local_repository(
name = "com_google_protobuf",
path = "..",

View File

@ -7,7 +7,7 @@
# generate.
#
# HINT: Flags passed to generate_descriptor_proto.sh will be passed directly
# to make when building protoc. This is particularly useful for passing
# to bazel when building protoc. This is particularly useful for passing
# -j4 to run 4 jobs simultaneously.
if test ! -e src/google/protobuf/stubs/common.h; then
@ -62,12 +62,12 @@ do
PROTOC=$BOOTSTRAP_PROTOC
BOOTSTRAP_PROTOC=""
else
make -j$(nproc) $@ protoc
bazel build -j$(nproc) $@ //:protoc
if test $? -ne 0; then
echo "Failed to build protoc."
exit 1
fi
PROTOC="./protoc"
PROTOC="../bazel-bin/protoc"
fi
$PROTOC --cpp_out=dllexport_decl=PROTOBUF_EXPORT:$TMP ${RUNTIME_PROTO_FILES[@]} && \

View File

@ -6,8 +6,19 @@ pkg_files(
name = "dist_files",
srcs = [
"BUILD.bazel",
"JavaVersionTest.java",
"testing.bzl",
],
strip_prefix = strip_prefix.from_root(""),
visibility = ["//java:__pkg__"],
)
java_test(
name = "java_version",
test_class = "JavaVersionTest",
srcs = ["JavaVersionTest.java"],
deps = [
"@maven//:com_google_truth_truth",
"@maven//:junit_junit",
],
)

View File

@ -0,0 +1,22 @@
// Test that Kokoro is using the expected version of Java.
import static com.google.common.truth.Truth.assertWithMessage;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
@RunWith(JUnit4.class)
public class JavaVersionTest {
@Test
public void testJavaVersion() throws Exception {
String exp = System.getenv("KOKORO_JAVA_VERSION");
if(exp == null || exp.isEmpty()) {
System.err.println("No kokoro java version found, skipping check");
return;
}
String version = System.getProperty("java.version");
assertWithMessage("Expected Python " + exp + " but found Python " + version)
.that(version.startsWith(exp))
.isTrue();
}
}

View File

@ -33,7 +33,7 @@
<!-- These are relative to the submodules -->
<protobuf.basedir>${project.basedir}/../..</protobuf.basedir>
<protobuf.source.dir>${protobuf.basedir}/src</protobuf.source.dir>
<protoc>${protobuf.source.dir}/protoc</protoc>
<protoc>${protobuf.basedir}/protoc</protoc>
<test.proto.dir>src/test/proto</test.proto.dir>
<generated.sources.dir>${project.build.directory}/generated-sources</generated.sources.dir>
<generated.testsources.dir>${project.build.directory}/generated-test-sources</generated.testsources.dir>

View File

@ -1,55 +0,0 @@
#!/bin/bash
#
# Script to compare a distribution archive for expected files based on git.
#
# Usage:
# check_missing_dist_files.sh path/to/dist_archive.tar.gz
set -eux
set -o pipefail
# By default, look for a git repo based on this script's path.
: ${SOURCE_DIR:=$(cd $(dirname $0)/../.. ; pwd)}
# Use a temporary directory for intermediate files.
# Note that pipelines below use subshells to avoid multiple trap executions.
_workdir=$(mktemp -d)
function cleanup_workdir() { rm -r ${_workdir}; }
trap cleanup_workdir EXIT
# List all the files in the archive.
(
tar -atf $1 | \
cut -d/ -f2- | \
sort
) > ${_workdir}/archive.lst
# List all files in the git repo that should be in the archive.
(
git -C ${SOURCE_DIR} ls-files | \
grep "^\(java\|python\|objectivec\|csharp\|ruby\|php\|cmake\|examples\|src/google/protobuf/.*\.proto\)" |\
grep -v ".gitignore" | \
grep -v "java/lite/proguard.pgcfg" | \
grep -v "python/compatibility_tests" | \
grep -v "python/docs" | \
grep -v "python/.repo-metadata.json" | \
grep -v "python/protobuf_distutils" | \
grep -v "csharp/compatibility_tests" | \
sort
) > ${_workdir}/expected.lst
# Check for missing files.
MISSING_FILES=( $(cd ${_workdir} && comm -13 archive.lst expected.lst) )
if (( ${#MISSING_FILES[@]} == 0 )); then
exit 0
fi
(
set +x
echo -e "\n\nMissing files from archive:"
for (( i=0 ; i < ${#MISSING_FILES[@]} ; i++ )); do
echo " ${MISSING_FILES[i]}"
done
echo -e "\nAdd them to the 'pkg_files' rule in corresponding BUILD.bazel.\n"
) >&2
exit 1

View File

@ -1,16 +0,0 @@
# Shared logic to choose a Python version with pyenv.
#
# This file should be `source`d.
# Requested version of Python can be overridden by env variable.
: ${PYTHON_VERSION:=3.9.5}
if pyenv --version >/dev/null ; then
eval "$(pyenv init -)"
if ! pyenv global ${PYTHON_VERSION}; then
echo "Python ${PYTHON_VERSION} is not available. Versions available:" >&2
pyenv versions >&2
exit 1
fi
fi
echo "Using $(python --version || python3 --version)"

View File

@ -16,7 +16,7 @@ sudo apt-get update
sudo apt-get -y install unzip
wget https://github.com/protocolbuffers/protobuf/releases/download/v21.1/protoc-21.1-linux-x86_64.zip
unzip protoc-21.1-linux-x86_64.zip bin/protoc
mv bin/protoc ../src/protoc
mv bin/protoc ../protoc
python3 -m venv venv
source venv/bin/activate
python setup.py install

View File

@ -1,143 +0,0 @@
# This Dockerfile specifies the recipe for creating an image for the tests
# to run in.
#
# We install as many test dependencies here as we can, because these setup
# steps can be cached. They do *not* run every time we run the build.
# The Docker image is only rebuilt when the Dockerfile (ie. this file)
# changes.
# Base Dockerfile for gRPC dev images
FROM 32bit/debian:latest
# Apt source for php
RUN echo "deb http://ppa.launchpad.net/ondrej/php/ubuntu trusty main" | tee /etc/apt/sources.list.d/various-php.list && \
apt-key adv --keyserver keyserver.ubuntu.com --recv-keys F4FCBB07
# Install dependencies. We start with the basic ones require to build protoc
# and the C++ build
RUN apt-get clean && apt-get update && apt-get install -y --force-yes \
autoconf \
autotools-dev \
build-essential \
bzip2 \
ccache \
curl \
gcc \
git \
libc6 \
libc6-dbg \
libc6-dev \
libgtest-dev \
libtool \
make \
parallel \
time \
wget \
unzip \
# -- For python --
python-setuptools \
python-pip \
python-dev \
# -- For C++ benchmarks --
cmake \
# -- For PHP --
php5.5 \
php5.5-dev \
php5.5-xml \
php5.6 \
php5.6-dev \
php5.6-xml \
php7.0 \
php7.0-dev \
php7.0-xml \
phpunit \
valgrind \
libxml2-dev \
&& apt-get clean
##################
# PHP dependencies.
RUN wget http://am1.php.net/get/php-5.5.38.tar.bz2/from/this/mirror
RUN mv mirror php-5.5.38.tar.bz2
RUN tar -xvf php-5.5.38.tar.bz2
RUN cd php-5.5.38 && ./configure --enable-maintainer-zts --prefix=/usr/local/php-5.5-zts && \
make && make install && make clean && cd ..
RUN cd php-5.5.38 && make clean && ./configure --enable-bcmath --prefix=/usr/local/php-5.5 && \
make && make install && make clean && cd ..
RUN wget http://am1.php.net/get/php-5.6.30.tar.bz2/from/this/mirror
RUN mv mirror php-5.6.30.tar.bz2
RUN tar -xvf php-5.6.30.tar.bz2
RUN cd php-5.6.30 && ./configure --enable-maintainer-zts --prefix=/usr/local/php-5.6-zts && \
make && make install && cd ..
RUN cd php-5.6.30 && make clean && ./configure --enable-bcmath --prefix=/usr/local/php-5.6 && \
make && make install && cd ..
RUN wget http://am1.php.net/get/php-7.0.18.tar.bz2/from/this/mirror
RUN mv mirror php-7.0.18.tar.bz2
RUN tar -xvf php-7.0.18.tar.bz2
RUN cd php-7.0.18 && ./configure --enable-maintainer-zts --prefix=/usr/local/php-7.0-zts && \
make && make install && cd ..
RUN cd php-7.0.18 && make clean && ./configure --enable-bcmath --prefix=/usr/local/php-7.0 && \
make && make install && cd ..
RUN wget http://am1.php.net/get/php-7.1.4.tar.bz2/from/this/mirror
RUN mv mirror php-7.1.4.tar.bz2
RUN tar -xvf php-7.1.4.tar.bz2
RUN cd php-7.1.4 && ./configure --enable-maintainer-zts --prefix=/usr/local/php-7.1-zts && \
make && make install && cd ..
RUN cd php-7.1.4 && make clean && ./configure --enable-bcmath --prefix=/usr/local/php-7.1 && \
make && make install && cd ..
RUN php -r "copy('https://getcomposer.org/installer', 'composer-setup.php');"
RUN php composer-setup.php
RUN mv composer.phar /usr/bin/composer
RUN php -r "unlink('composer-setup.php');"
RUN composer config -g -- disable-tls true
RUN composer config -g -- secure-http false
RUN cd /tmp && \
git clone https://github.com/google/protobuf.git && \
cd protobuf/php && \
git reset --hard 49b44bff2b6257a119f9c6a342d6151c736586b8 && \
ln -sfn /usr/local/php-5.5/bin/php /usr/bin/php && \
ln -sfn /usr/local/php-5.5/bin/php-config /usr/bin/php-config && \
ln -sfn /usr/local/php-5.5/bin/phpize /usr/bin/phpize && \
composer install && \
mv vendor /usr/local/vendor-5.5 && \
ln -sfn /usr/local/php-5.6/bin/php /usr/bin/php && \
ln -sfn /usr/local/php-5.6/bin/php-config /usr/bin/php-config && \
ln -sfn /usr/local/php-5.6/bin/phpize /usr/bin/phpize && \
composer install && \
mv vendor /usr/local/vendor-5.6 && \
ln -sfn /usr/local/php-7.0/bin/php /usr/bin/php && \
ln -sfn /usr/local/php-7.0/bin/php-config /usr/bin/php-config && \
ln -sfn /usr/local/php-7.0/bin/phpize /usr/bin/phpize && \
composer install && \
mv vendor /usr/local/vendor-7.0 && \
ln -sfn /usr/local/php-7.1/bin/php /usr/bin/php && \
ln -sfn /usr/local/php-7.1/bin/php-config /usr/bin/php-config && \
ln -sfn /usr/local/php-7.1/bin/phpize /usr/bin/phpize && \
composer install && \
mv vendor /usr/local/vendor-7.1
##################
# Python dependencies
# These packages exist in apt-get, but their versions are too old, so we have
# to get updates from pip.
RUN pip install pip --upgrade
RUN pip install virtualenv tox yattag
##################
# Prepare ccache
RUN ln -s /usr/bin/ccache /usr/local/bin/gcc
RUN ln -s /usr/bin/ccache /usr/local/bin/g++
RUN ln -s /usr/bin/ccache /usr/local/bin/cc
RUN ln -s /usr/bin/ccache /usr/local/bin/c++
RUN ln -s /usr/bin/ccache /usr/local/bin/clang
RUN ln -s /usr/bin/ccache /usr/local/bin/clang++
# Define the default command.
CMD ["bash"]

View File

@ -4,15 +4,20 @@
# running the "pull request 32" project:
#
# This script selects a specific Dockerfile (for building a Docker image) and
# a script to run inside that image. Then we delegate to the general
# build_and_run_docker.sh script.
# a script to run inside that image.
set -ex
# Change to repo root
cd $(dirname $0)/../../..
GIT_REPO_ROOT=$(pwd)
export DOCKERHUB_ORGANIZATION=protobuftesting
export DOCKERFILE_DIR=kokoro/linux/dockerfile/test/php_32bit
export DOCKER_RUN_SCRIPT=kokoro/linux/pull_request_in_docker.sh
export OUTPUT_DIR=testoutput
export TEST_SET="php_all_32"
./kokoro/linux/build_and_run_docker.sh
CONTAINER_IMAGE=gcr.io/protobuf-build/php/32bit@sha256:824cbdff02ee543eb69ee4b02c8c58cc7887f70f49e41725a35765d92a898b4f
git submodule update --init --recursive
docker run \
"$@" \
-v $GIT_REPO_ROOT:/workspace \
$CONTAINER_IMAGE \
bash -l "/workspace/kokoro/linux/32-bit/test_php.sh"

View File

@ -0,0 +1,51 @@
#!/bin/bash
set -eux
# Change to repo root
cd $(dirname $0)/../../..
use_php() {
VERSION=$1
export PATH=/usr/local/php-${VERSION}/bin:$PATH
}
build_php() {
use_php $1
pushd php
rm -rf vendor
php -v
php -m
composer update
composer test
popd
}
test_php_c() {
pushd php
rm -rf vendor
php -v
php -m
composer update
composer test_c
popd
}
build_php_c() {
use_php $1
test_php_c
}
cmake .
cmake --build . --target protoc -- -j20
export PROTOC=$(pwd)/protoc
build_php 7.0
build_php 7.1
build_php 7.4
build_php_c 7.0
build_php_c 7.1
build_php_c 7.4
build_php_c 7.1-zts
build_php_c 7.2-zts
build_php_c 7.5-zts

View File

@ -6,6 +6,3 @@ set -ex
cmake -DCMAKE_POSITION_INDEPENDENT_CODE=ON -Dprotobuf_WITH_ZLIB=0 .
make -j8
# The Java build setup expects the protoc binary to be in the src/ directory.
ln -s $PWD/protoc ./src/protoc

View File

@ -15,10 +15,6 @@ git submodule update --init --recursive
cmake -DCMAKE_POSITION_INDEPENDENT_CODE=ON -Dprotobuf_WITH_ZLIB=0 .
make -j8
# Copy lib files to the expected location.
mkdir -p src/.libs
ln -f *.a src/.libs/
# create a simple shell wrapper that runs crosscompiled protoc under qemu
echo '#!/bin/bash' >protoc_qemu_wrapper.sh
echo 'exec qemu-aarch64 "../protoc" "$@"' >>protoc_qemu_wrapper.sh

44
kokoro/linux/bazel.sh Executable file
View File

@ -0,0 +1,44 @@
#!/bin/bash
set -ex
if [[ -z "${CONTAINER_IMAGE}" ]]; then
CONTAINER_IMAGE=gcr.io/protobuf-build/bazel/linux@sha256:2bfd061284eff8234f2fcca16d71d43c69ccf3a22206628b54c204a6a9aac277
fi
cd $(dirname $0)/../..
GIT_REPO_ROOT=`pwd`
rm -rf $GIT_REPO_ROOT/logs
ENVS=()
# Check for specific versions pinned to the docker image. In these cases we
# want to forward the environment variable to tests, so that they can verify
# that the correct version is being picked up by Bazel.
ENVS+=("--test_env=KOKORO_JAVA_VERSION")
ENVS+=("--test_env=KOKORO_PYTHON_VERSION")
ENVS+=("--test_env=KOKORO_RUBY_VERSION")
if [ -n "$BAZEL_ENV" ]; then
for env in $BAZEL_ENV; do
ENVS+="--action_env=${env}"
done
fi
tmpfile=$(mktemp -u)
docker run \
--cidfile $tmpfile \
-v $GIT_REPO_ROOT:/workspace \
$CONTAINER_IMAGE \
test \
--keep_going \
--test_output=streamed \
${ENVS[@]} \
$PLATFORM_CONFIG \
$BAZEL_EXTRA_FLAGS \
$BAZEL_TARGETS
# Save logs for Kokoro
docker cp \
`cat $tmpfile`:/workspace/logs $KOKORO_ARTIFACTS_DIR

View File

@ -1,44 +0,0 @@
#!/bin/bash
#
# Build file to set up and run tests
set -eu
# Install Bazel 4.0.0.
use_bazel.sh 4.0.0
bazel version
# Change to repo root
cd $(dirname $0)/../../..
# Get kokoro scripts from repo root by default.
: ${SCRIPT_ROOT:=$(pwd)}
source ${SCRIPT_ROOT}/kokoro/common/pyenv.sh
# Disabled for now, re-enable if appropriate.
# //:build_files_updated_unittest \
bazel_args=(
test
--keep_going
--copt=-Werror
--host_copt=-Werror
--test_output=errors
--
//...
-//objectivec/... # only works on macOS
@com_google_protobuf_examples//...
)
${SCRIPT_ROOT}/kokoro/common/bazel_wrapper.sh "${bazel_args[@]}"
# Verify that we can build successfully from generated tar files.
(
pyenv versions
pyenv shell 2.7.9 # python2 required for old googletest autotools support
git submodule update --init --recursive
./autogen.sh && ./configure && make -j$(nproc) dist
)
DIST=`ls *.tar.gz`
tar -xf $DIST
cd ${DIST//.tar.gz}
${SCRIPT_ROOT}/kokoro/common/bazel_wrapper.sh build //:protobuf //:protobuf_java

View File

@ -1,9 +0,0 @@
# Common config shared by presubmit and continuous.
bazel_setting: {
project_id: "protobuf-build"
bes_backend_address: "buildeventservice.googleapis.com"
foundry_backend_address: "remotebuildexecution.googleapis.com"
upsalite_frontend_address: "https://source.cloud.google.com"
local_execution: true
}

View File

@ -1,5 +1,16 @@
# Config file for running tests in Kokoro
# Location of the build script in repository
build_file: "protobuf/kokoro/linux/bazel/build.sh"
build_file: "protobuf/kokoro/linux/bazel.sh"
timeout_mins: 15
env_vars {
key: "BAZEL_TARGETS"
value: "//src/..."
}
action {
define_artifacts {
regex: "**/sponge_log.*"
}
}

View File

@ -1,5 +1,16 @@
# Config file for running tests in Kokoro
# Config file for running C++ Bazel tests in Kokoro
# Location of the build script in repository
build_file: "protobuf/kokoro/linux/bazel/build.sh"
build_file: "protobuf/kokoro/linux/bazel.sh"
timeout_mins: 15
env_vars {
key: "BAZEL_TARGETS"
value: "//src/..."
}
action {
define_artifacts {
regex: "**/sponge_log.*"
}
}

View File

@ -1,18 +0,0 @@
#!/bin/bash
#
# This is the top-level script we give to Kokoro as the entry point for
# running the "pull request" project:
#
# This script selects a specific Dockerfile (for building a Docker image) and
# a script to run inside that image. Then we delegate to the general
# build_and_run_docker.sh script.
# Change to repo root
cd $(dirname $0)/../../..
export DOCKERHUB_ORGANIZATION=protobuftesting
export DOCKERFILE_DIR=kokoro/linux/dockerfile/test/java_stretch
export DOCKER_RUN_SCRIPT=kokoro/linux/pull_request_in_docker.sh
export OUTPUT_DIR=testoutput
export TEST_SET="benchmark"
./kokoro/linux/build_and_run_docker.sh

View File

@ -1,7 +1,7 @@
# Config file for running tests in Kokoro
# Location of the build script in repository
build_file: "protobuf/kokoro/linux/benchmark/build.sh"
build_file: "protobuf/kokoro/linux/benchmark/run.sh"
timeout_mins: 240
action {

View File

@ -1,7 +1,11 @@
#!/bin/bash
#
# Install Bazel 4.0.0.
use_bazel.sh 4.0.0
# Change to repo root
cd $(dirname $0)/../../..
SCRIPT_ROOT=$(pwd)
set -ex
@ -18,82 +22,37 @@ datasets=$(for file in $(find . -type f -name "dataset.*.pb" -not -path "./tmp/*
echo $datasets
popd
# build Python protobuf
./autogen.sh
./configure CXXFLAGS="-fPIC -O2"
make -j8
pushd python
python3 -m venv env
source env/bin/activate
python3 setup.py build --cpp_implementation
pip3 install --install-option="--cpp_implementation" .
popd
# build and run Python benchmark
# We do this before building protobuf C++ since C++ build
# will rewrite some libraries used by protobuf python.
pushd benchmarks
make python-pure-python-benchmark
make python-cpp-reflection-benchmark
make -j8 python-cpp-generated-code-benchmark
echo "[" > tmp/python_result.json
echo "benchmarking pure python..."
./python-pure-python-benchmark --json --behavior_prefix="pure-python-benchmark" $datasets >> tmp/python_result.json
echo "," >> "tmp/python_result.json"
${SCRIPT_ROOT}/kokoro/common/bazel_wrapper.sh run //benchmarks/python:python_benchmark -- \
--json --behavior_prefix="pure-python-benchmark" $datasets > /tmp/python1.json
echo "benchmarking python cpp reflection..."
env LD_LIBRARY_PATH="${repo_root}/src/.libs" ./python-cpp-reflection-benchmark --json --behavior_prefix="cpp-reflection-benchmark" $datasets >> tmp/python_result.json
echo "," >> "tmp/python_result.json"
${SCRIPT_ROOT}/kokoro/common/bazel_wrapper.sh run //benchmarks/python:python_benchmark --define=use_fast_cpp_protos=true -- \
--json --behavior_prefix="cpp-reflection-benchmark" $datasets > /tmp/python2.json
echo "benchmarking python cpp generated code..."
env LD_LIBRARY_PATH="${repo_root}/src/.libs" ./python-cpp-generated-code-benchmark --json --behavior_prefix="cpp-generated-code-benchmark" $datasets >> tmp/python_result.json
echo "]" >> "tmp/python_result.json"
popd
${SCRIPT_ROOT}/kokoro/common/bazel_wrapper.sh run //benchmarks/python:python_benchmark --define=use_fast_cpp_protos=true -- \
--json --cpp_generated --behavior_prefix="cpp-generated-code-benchmark" $datasets >> /tmp/python3.json
# build CPP protobuf
./configure
make clean && make -j8
pushd java
mvn package -B -Dmaven.test.skip=true
popd
pushd benchmarks
jq -s . /tmp/python1.json /tmp/python2.json /tmp/python3.json > python_result.json
# build and run C++ benchmark
# "make clean" deletes the contents of the tmp/ directory, so we move it elsewhere and then restore it once build is done.
# TODO(jtattermusch): find a less clumsy way of protecting python_result.json contents
mv tmp/python_result.json . && make clean && make -j8 cpp-benchmark && mv python_result.json tmp
echo "benchmarking cpp..."
env ./cpp-benchmark --benchmark_min_time=5.0 --benchmark_out_format=json --benchmark_out="tmp/cpp_result.json" $datasets
# TODO(jtattermusch): add benchmarks for https://github.com/protocolbuffers/protobuf-go.
# The original benchmarks for https://github.com/golang/protobuf were removed
# because:
# * they were broken and haven't been producing results for a long time
# * the https://github.com/golang/protobuf implementation has been superseded by
# https://github.com/protocolbuffers/protobuf-go
${SCRIPT_ROOT}/kokoro/common/bazel_wrapper.sh run //benchmarks/cpp:cpp_benchmark -- \
--benchmark_min_time=5.0 --benchmark_out_format=json --benchmark_out="${repo_root}/cpp_result.json" $datasets
# build and run java benchmark (java 11 is required)
make java-benchmark
echo "benchmarking java..."
./java-benchmark -Cresults.file.options.file="tmp/java_result.json" $datasets
# TODO(jtattermusch): re-enable JS benchmarks once https://github.com/protocolbuffers/protobuf/issues/8747 is fixed.
# build and run js benchmark
# make js-benchmark
# echo "benchmarking js..."
# ./js-benchmark $datasets --json_output=$(pwd)/tmp/node_result.json
# TODO(jtattermusch): add php-c-benchmark. Currently its build is broken.
${SCRIPT_ROOT}/kokoro/common/bazel_wrapper.sh run //benchmarks/java:java_benchmark -- \
-Cresults.file.options.file="${repo_root}/java_result.json" $datasets
# persist raw the results in the build job log (for better debuggability)
cat tmp/cpp_result.json
cat tmp/java_result.json
cat tmp/python_result.json
cat cpp_result.json
cat java_result.json
cat python_result.json
# print the postprocessed results to the build job log
# TODO(jtattermusch): re-enable uploading results to bigquery (it is currently broken)
make python_add_init
env LD_LIBRARY_PATH="${repo_root}/src/.libs" python3 -m util.result_parser \
-cpp="../tmp/cpp_result.json" -java="../tmp/java_result.json" -python="../tmp/python_result.json"
popd
bazel run //benchmarks/util:result_parser -- \
-cpp="${repo_root}/cpp_result.json" \
-java="${repo_root}/java_result.json" \
-python="${repo_root}/python_result.json"

View File

@ -1,64 +0,0 @@
#!/bin/bash
#
# Builds docker image and runs a command under it.
# This is a generic script that is configured with the following variables:
#
# DOCKERHUB_ORGANIZATION - The organization on docker hub storing the
# Dockerfile.
# DOCKERFILE_DIR - Directory in which Dockerfile file is located.
# DOCKER_RUN_SCRIPT - Script to run under docker (relative to protobuf repo root)
# OUTPUT_DIR - Directory that will be copied from inside docker after finishing.
# $@ - Extra args to pass to docker run
set -ex
cd $(dirname $0)/../..
git_root=$(pwd)
cd -
# Use image name based on Dockerfile sha1
if [ -z "$DOCKERHUB_ORGANIZATION" ]
then
DOCKERHUB_ORGANIZATION=grpctesting/protobuf
DOCKER_IMAGE_NAME=${DOCKERHUB_ORGANIZATION}_$(sha1sum $DOCKERFILE_DIR/Dockerfile | cut -f1 -d\ )
else
# TODO(teboring): Remove this when all tests have been migrated to separate
# docker images.
DOCKERFILE_PREFIX=$(basename $DOCKERFILE_DIR)
DOCKER_IMAGE_NAME=${DOCKERHUB_ORGANIZATION}/${DOCKERFILE_PREFIX}_$(sha1sum $DOCKERFILE_DIR/Dockerfile | cut -f1 -d\ )
fi
# Pull dockerimage from Dockerhub. This sometimes fails intermittently, so we
# keep trying until we succeed.
until docker pull $DOCKER_IMAGE_NAME; do sleep 10; done
# Ensure existence of ccache directory
CCACHE_DIR=/tmp/protobuf-ccache
mkdir -p $CCACHE_DIR
# Choose random name for docker container
CONTAINER_NAME="build_and_run_docker_$(uuidgen)"
echo $git_root
# Run command inside docker
docker run \
"$@" \
-e CCACHE_DIR=$CCACHE_DIR \
-e KOKORO_BUILD_NUMBER=$KOKORO_BUILD_NUMBER \
-e KOKORO_BUILD_ID=$KOKORO_BUILD_ID \
-e EXTERNAL_GIT_ROOT="/var/local/kokoro/protobuf" \
-e TEST_SET="$TEST_SET" \
-v "$git_root:/var/local/kokoro/protobuf:ro" \
-v $CCACHE_DIR:$CCACHE_DIR \
-w /var/local/git/protobuf \
--name=$CONTAINER_NAME \
$DOCKER_IMAGE_NAME \
bash -l "/var/local/kokoro/protobuf/$DOCKER_RUN_SCRIPT" || FAILED="true"
# remove the container, possibly killing it first
docker rm -f $CONTAINER_NAME || true
[ -z "$FAILED" ] || {
exit 1
}

View File

@ -1,11 +0,0 @@
#!/bin/bash
#
# This is the top-level script we give to Kokoro as the entry point for
# running the "continuous" and "presubmit" jobs.
set -ex
# Change to repo root
cd $(dirname $0)/../../..
kokoro/linux/aarch64/test_cpp_aarch64.sh

View File

@ -1,5 +1,21 @@
# Config file for running tests in Kokoro
# Location of the build script in repository
build_file: "protobuf/kokoro/linux/cpp_aarch64/build.sh"
build_file: "protobuf/kokoro/linux/bazel.sh"
timeout_mins: 120
env_vars {
key: "CONTAINER_IMAGE"
value: "gcr.io/protobuf-build/emulation/linux:aarch64-4e847d7a01c1792471b6dd985ab0bf2677332e6f"
}
env_vars {
key: "BAZEL_TARGETS"
value: "//src/..."
}
action {
define_artifacts {
regex: "**/sponge_log.*"
}
}

View File

@ -1,5 +1,21 @@
# Config file for running tests in Kokoro
# Location of the build script in repository
build_file: "protobuf/kokoro/linux/cpp_aarch64/build.sh"
build_file: "protobuf/kokoro/linux/bazel.sh"
timeout_mins: 120
env_vars {
key: "CONTAINER_IMAGE"
value: "gcr.io/protobuf-build/emulation/linux:aarch64-4e847d7a01c1792471b6dd985ab0bf2677332e6f"
}
env_vars {
key: "BAZEL_TARGETS"
value: "//src/..."
}
action {
define_artifacts {
regex: "**/sponge_log.*"
}
}

View File

@ -1,13 +0,0 @@
#!/bin/bash
#
# Build file to set up and run tests
# Change to repo root
cd $(dirname $0)/../../..
export DOCKERHUB_ORGANIZATION=protobuftesting
export DOCKERFILE_DIR=kokoro/linux/dockerfile/test/cpp_tcmalloc
export DOCKER_RUN_SCRIPT=kokoro/linux/pull_request_in_docker.sh
export OUTPUT_DIR=testoutput
export TEST_SET="cpp_tcmalloc"
./kokoro/linux/build_and_run_docker.sh

View File

@ -1,5 +1,21 @@
# Config file for running tests in Kokoro
# Location of the build script in repository
build_file: "protobuf/kokoro/linux/cpp_tcmalloc/build.sh"
build_file: "protobuf/kokoro/linux/bazel.sh"
timeout_mins: 1440
env_vars {
key: "CONTAINER_IMAGE"
value: "gcr.io/protobuf-build/tcmalloc/linux:64e8944e4f18d7d6c9649112a8a93be57e693cd8"
}
env_vars {
key: "BAZEL_TARGETS"
value: "//src/..."
}
action {
define_artifacts {
regex: "**/sponge_log.*"
}
}

View File

@ -1,5 +1,21 @@
# Config file for running tests in Kokoro
# Location of the build script in repository
build_file: "protobuf/kokoro/linux/cpp_tcmalloc/build.sh"
build_file: "protobuf/kokoro/linux/bazel.sh"
timeout_mins: 1440
env_vars {
key: "CONTAINER_IMAGE"
value: "gcr.io/protobuf-build/tcmalloc/linux:64e8944e4f18d7d6c9649112a8a93be57e693cd8"
}
env_vars {
key: "BAZEL_TARGETS"
value: "//src/..."
}
action {
define_artifacts {
regex: "**/sponge_log.*"
}
}

View File

@ -1,18 +0,0 @@
#!/bin/bash
#
# This is the top-level script we give to Kokoro as the entry point for
# running the "pull request" project:
#
# This script selects a specific Dockerfile (for building a Docker image) and
# a script to run inside that image. Then we delegate to the general
# build_and_run_docker.sh script.
# Change to repo root
cd $(dirname $0)/../../..
export DOCKERHUB_ORGANIZATION=protobuftesting
export DOCKERFILE_DIR=kokoro/linux/dockerfile/test/csharp
export DOCKER_RUN_SCRIPT=kokoro/linux/pull_request_in_docker.sh
export OUTPUT_DIR=testoutput
export TEST_SET="csharp"
./kokoro/linux/build_and_run_docker.sh

View File

@ -1,5 +1,27 @@
# Config file for running tests in Kokoro
# Location of the build script in repository
build_file: "protobuf/kokoro/linux/csharp/build.sh"
build_file: "protobuf/kokoro/linux/bazel.sh"
timeout_mins: 1440
env_vars {
key: "CONTAINER_IMAGE"
value: "gcr.io/protobuf-build/csharp/linux:3.1.415-6.0.100-6bbe70439ba5b0404bb12662cebc0296909389fa"
}
env_vars {
key: "BAZEL_TARGETS"
value: "//csharp/..."
}
env_vars {
key: "BAZEL_EXTRA_FLAGS"
value: "--action_env=DOTNET_CLI_TELEMETRY_OPTOUT=1 "
"--test_env=DOTNET_CLI_HOME=/home/bazel"
}
action {
define_artifacts {
regex: "**/sponge_log.*"
}
}

Some files were not shown because too many files have changed in this diff Show More