diff --git a/packager/third_party/protobuf/.gitignore b/packager/third_party/protobuf/.gitignore new file mode 100644 index 0000000000..27eeb9c224 --- /dev/null +++ b/packager/third_party/protobuf/.gitignore @@ -0,0 +1,117 @@ +# autogen.sh-generated files +Makefile.in +src/Makefile.in +config.guess +config.h.in +config.sub +configure +depcomp +install-sh +ltmain.sh +missing + +aclocal.m4 +m4/libtool.m4 +m4/ltoptions.m4 +m4/ltsugar.m4 +m4/ltversion.m4 +m4/lt~obsolete.m4 +autom4te.cache + +# downloaded files +gmock + +# in-tree configure-generated files +Makefile +src/Makefile +/config.h +config.log +config.status + +libtool +protobuf-lite.pc +protobuf.pc +.deps +stamp-h1 + +# in-tree build products +*.o +*.lo +*.la +src/.libs +*.so + +.dirstamp + +any_test.pb.* +map*unittest.pb.* +unittest*.pb.* +cpp_test*.pb.* +src/google/protobuf/util/**/*.pb.cc +src/google/protobuf/util/**/*.pb.h + +*.pyc +*.egg-info +*_pb2.py +python/*.egg +python/.eggs/ +python/.tox +python/build/ +python/google/protobuf/compiler/ +python/google/protobuf/util/ + +src/protoc +src/unittest_proto_middleman + +# Generated test scaffolding +src/protobuf*-test +src/test_plugin +src/testzip.* +src/zcg*zip +ar-lib + +test-driver +compile + +src/**/*.log +src/**/*.trs + +# JavaBuild output. +java/target +javanano/target + +# Windows native output. +cmake/build +build_msvc + +# NuGet packages: we want the repository configuration, but not the +# packages themselves. +/csharp/src/packages/*/ + +# Directories created by opening the Objective C Xcode projects. +objectivec/ProtocolBuffers_OSX.xcodeproj/project.xcworkspace/xcuserdata/ +objectivec/ProtocolBuffers_OSX.xcodeproj/project.xcworkspace/xcshareddata/ProtocolBuffers_OSX.xccheckout +objectivec/ProtocolBuffers_OSX.xcodeproj/xcuserdata/ +objectivec/ProtocolBuffers_iOS.xcodeproj/project.xcworkspace/xcuserdata/ +objectivec/ProtocolBuffers_iOS.xcodeproj/project.xcworkspace/xcshareddata/ProtocolBuffers_iOS.xccheckout +objectivec/ProtocolBuffers_iOS.xcodeproj/xcuserdata/ +# OS X's Finder creates these for state about opened windows/etc. +**/.DS_Store + +# Comformance test output +conformance/.libs/ +conformance/com/ +conformance/conformance-cpp +conformance/conformance-csharp +conformance/conformance-java +conformance/conformance-objc +conformance/conformance-test-runner +conformance/conformance.pb.cc +conformance/conformance.pb.h +conformance/Conformance.pbobjc.h +conformance/Conformance.pbobjc.m +conformance/conformance.rb +conformance/google/ +conformance/javac_middleman +conformance/lite/ +conformance/protoc_middleman diff --git a/packager/third_party/protobuf/.travis.yml b/packager/third_party/protobuf/.travis.yml new file mode 100644 index 0000000000..bcf3851bc2 --- /dev/null +++ b/packager/third_party/protobuf/.travis.yml @@ -0,0 +1,84 @@ +sudo: required +# Note: travis currently does not support listing more than one language so +# this cheats and claims to only be cpp. If they add multiple language +# support, this should probably get updated to install steps and/or +# rvm/gemfile/jdk/etc. entries rather than manually doing the work. +language: cpp +os: + - linux + - osx +# The Objective C build needs Xcode 7.0 or later. +osx_image: xcode7.2 +script: + - ./tests.sh $CONFIG +env: + - CONFIG=cpp + - CONFIG=cpp_distcheck + - CONFIG=csharp + - CONFIG=golang + - CONFIG=java_jdk6 + - CONFIG=java_jdk7 + - CONFIG=java_oracle7 + - CONFIG=javanano_jdk6 + - CONFIG=javanano_jdk7 + - CONFIG=javanano_oracle7 + - CONFIG=javascript + - CONFIG=python + - CONFIG=python_cpp + - CONFIG=ruby19 + - CONFIG=ruby20 + - CONFIG=ruby21 + - CONFIG=ruby22 + - CONFIG=jruby +matrix: + exclude: + # It's nontrivial to programmatically install a new JDK from the command + # line on OS X, so we rely on testing on Linux for Java code. + - os: osx + env: CONFIG=java_jdk6 + - os: osx + env: CONFIG=java_jdk7 + - os: osx + env: CONFIG=java_oracle7 + - os: osx + env: CONFIG=javanano_jdk6 + - os: osx + env: CONFIG=javanano_jdk7 + - os: osx + env: CONFIG=javanano_oracle7 + # Requires installing mono, currently travis.sh is doing that with apt-get + # which doesn't work on OS X. + - os: osx + env: CONFIG=csharp + # Requires installing golang, currently travis.sh is doing that with apt-get + # which doesn't work on OS X. + - os: osx + env: CONFIG=golang + # Add into the matrix OS X tests of Objective C (needs Xcode, so it won't + # work on other platforms). These are split so it doesn't take as long to run. + include: + - os: osx + env: CONFIG=objectivec_ios + - os: osx + env: CONFIG=objectivec_osx + allow_failures: + # These currently do not work on OS X but are being worked on by @haberman. + - os: osx + env: CONFIG=ruby22 + - os: osx + env: CONFIG=jruby + # https://github.com/google/protobuf/issues/1253 - Started failing when + # we moved to an OS X image that is 10.11. + - os: osx + env: CONFIG=python_cpp + # xctool 0.2.8 seems to have a bug where it randomly kills tests saying + # they failed. + # https://github.com/facebook/xctool/issues/619 + # https://github.com/google/protobuf/issues/1232 + # travis updated their images to include 0.2.8: + # https://blog.travis-ci.com/2016-03-23-xcode-image-updates + # Mark the iOS test as flakey so these failures don't turn things red. + - os: osx + env: CONFIG=objectivec_ios +notifications: + email: false diff --git a/packager/third_party/protobuf/BUILD b/packager/third_party/protobuf/BUILD new file mode 100644 index 0000000000..8b1046b90d --- /dev/null +++ b/packager/third_party/protobuf/BUILD @@ -0,0 +1,709 @@ +# Bazel (http://bazel.io/) BUILD file for Protobuf. + +licenses(["notice"]) + +################################################################################ +# Protobuf Runtime Library +################################################################################ + +COPTS = [ + "-DHAVE_PTHREAD", + "-Wall", + "-Wwrite-strings", + "-Woverloaded-virtual", + "-Wno-sign-compare", + "-Wno-error=unused-function", +] + +config_setting( + name = "android", + values = { + "crosstool_top": "//external:android/crosstool", + }, +) + +# Android builds do not need to link in a separate pthread library. +LINK_OPTS = select({ + ":android": [], + "//conditions:default": ["-lpthread"], +}) + +load( + "protobuf", + "cc_proto_library", + "py_proto_library", + "internal_gen_well_known_protos_java", + "internal_protobuf_py_tests", +) + +config_setting( + name = "ios_armv7", + values = { + "ios_cpu": "armv7", + }, +) + +config_setting( + name = "ios_armv7s", + values = { + "ios_cpu": "armv7s", + }, +) + +config_setting( + name = "ios_arm64", + values = { + "ios_cpu": "arm64", + }, +) + +IOS_ARM_COPTS = COPTS + [ + "-DOS_IOS", + "-miphoneos-version-min=7.0", + "-arch armv7", + "-arch armv7s", + "-arch arm64", + "-D__thread=", + "-isysroot /Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS9.2.sdk/", +] + +cc_library( + name = "protobuf_lite", + srcs = [ + # AUTOGEN(protobuf_lite_srcs) + "src/google/protobuf/arena.cc", + "src/google/protobuf/arenastring.cc", + "src/google/protobuf/extension_set.cc", + "src/google/protobuf/generated_message_util.cc", + "src/google/protobuf/io/coded_stream.cc", + "src/google/protobuf/io/zero_copy_stream.cc", + "src/google/protobuf/io/zero_copy_stream_impl_lite.cc", + "src/google/protobuf/message_lite.cc", + "src/google/protobuf/repeated_field.cc", + "src/google/protobuf/stubs/atomicops_internals_x86_gcc.cc", + "src/google/protobuf/stubs/atomicops_internals_x86_msvc.cc", + "src/google/protobuf/stubs/bytestream.cc", + "src/google/protobuf/stubs/common.cc", + "src/google/protobuf/stubs/int128.cc", + "src/google/protobuf/stubs/once.cc", + "src/google/protobuf/stubs/status.cc", + "src/google/protobuf/stubs/statusor.cc", + "src/google/protobuf/stubs/stringpiece.cc", + "src/google/protobuf/stubs/stringprintf.cc", + "src/google/protobuf/stubs/structurally_valid.cc", + "src/google/protobuf/stubs/strutil.cc", + "src/google/protobuf/stubs/time.cc", + "src/google/protobuf/wire_format_lite.cc", + ], + hdrs = glob(["src/google/protobuf/**/*.h"]), + copts = select({ + ":ios_armv7": IOS_ARM_COPTS, + ":ios_armv7s": IOS_ARM_COPTS, + ":ios_arm64": IOS_ARM_COPTS, + "//conditions:default": COPTS, + }), + includes = ["src/"], + linkopts = LINK_OPTS, + visibility = ["//visibility:public"], +) + +cc_library( + name = "protobuf", + srcs = [ + # AUTOGEN(protobuf_srcs) + "src/google/protobuf/any.cc", + "src/google/protobuf/any.pb.cc", + "src/google/protobuf/api.pb.cc", + "src/google/protobuf/compiler/importer.cc", + "src/google/protobuf/compiler/parser.cc", + "src/google/protobuf/descriptor.cc", + "src/google/protobuf/descriptor.pb.cc", + "src/google/protobuf/descriptor_database.cc", + "src/google/protobuf/duration.pb.cc", + "src/google/protobuf/dynamic_message.cc", + "src/google/protobuf/empty.pb.cc", + "src/google/protobuf/extension_set_heavy.cc", + "src/google/protobuf/field_mask.pb.cc", + "src/google/protobuf/generated_message_reflection.cc", + "src/google/protobuf/io/gzip_stream.cc", + "src/google/protobuf/io/printer.cc", + "src/google/protobuf/io/strtod.cc", + "src/google/protobuf/io/tokenizer.cc", + "src/google/protobuf/io/zero_copy_stream_impl.cc", + "src/google/protobuf/map_field.cc", + "src/google/protobuf/message.cc", + "src/google/protobuf/reflection_ops.cc", + "src/google/protobuf/service.cc", + "src/google/protobuf/source_context.pb.cc", + "src/google/protobuf/struct.pb.cc", + "src/google/protobuf/stubs/mathlimits.cc", + "src/google/protobuf/stubs/substitute.cc", + "src/google/protobuf/text_format.cc", + "src/google/protobuf/timestamp.pb.cc", + "src/google/protobuf/type.pb.cc", + "src/google/protobuf/unknown_field_set.cc", + "src/google/protobuf/util/field_comparator.cc", + "src/google/protobuf/util/field_mask_util.cc", + "src/google/protobuf/util/internal/datapiece.cc", + "src/google/protobuf/util/internal/default_value_objectwriter.cc", + "src/google/protobuf/util/internal/error_listener.cc", + "src/google/protobuf/util/internal/field_mask_utility.cc", + "src/google/protobuf/util/internal/json_escaping.cc", + "src/google/protobuf/util/internal/json_objectwriter.cc", + "src/google/protobuf/util/internal/json_stream_parser.cc", + "src/google/protobuf/util/internal/object_writer.cc", + "src/google/protobuf/util/internal/proto_writer.cc", + "src/google/protobuf/util/internal/protostream_objectsource.cc", + "src/google/protobuf/util/internal/protostream_objectwriter.cc", + "src/google/protobuf/util/internal/type_info.cc", + "src/google/protobuf/util/internal/type_info_test_helper.cc", + "src/google/protobuf/util/internal/utility.cc", + "src/google/protobuf/util/json_util.cc", + "src/google/protobuf/util/message_differencer.cc", + "src/google/protobuf/util/time_util.cc", + "src/google/protobuf/util/type_resolver_util.cc", + "src/google/protobuf/wire_format.cc", + "src/google/protobuf/wrappers.pb.cc", + ], + hdrs = glob(["src/**/*.h"]), + copts = select({ + ":ios_armv7": IOS_ARM_COPTS, + ":ios_armv7s": IOS_ARM_COPTS, + ":ios_arm64": IOS_ARM_COPTS, + "//conditions:default": COPTS, + }), + includes = ["src/"], + linkopts = LINK_OPTS, + visibility = ["//visibility:public"], + deps = [":protobuf_lite"], +) + +objc_library( + name = "protobuf_objc", + hdrs = ["objectivec/GPBProtocolBuffers.h"], + includes = ["objectivec"], + non_arc_srcs = ["objectivec/GPBProtocolBuffers.m"], + visibility = ["//visibility:public"], +) + +RELATIVE_WELL_KNOWN_PROTOS = [ + # AUTOGEN(well_known_protos) + "google/protobuf/any.proto", + "google/protobuf/api.proto", + "google/protobuf/compiler/plugin.proto", + "google/protobuf/descriptor.proto", + "google/protobuf/duration.proto", + "google/protobuf/empty.proto", + "google/protobuf/field_mask.proto", + "google/protobuf/source_context.proto", + "google/protobuf/struct.proto", + "google/protobuf/timestamp.proto", + "google/protobuf/type.proto", + "google/protobuf/wrappers.proto", +] + +WELL_KNOWN_PROTOS = ["src/" + s for s in RELATIVE_WELL_KNOWN_PROTOS] + +filegroup( + name = "well_known_protos", + srcs = WELL_KNOWN_PROTOS, + visibility = ["//visibility:public"], +) + +cc_proto_library( + name = "cc_wkt_protos", + srcs = WELL_KNOWN_PROTOS, + include = "src", + default_runtime = ":protobuf", + internal_bootstrap_hack = 1, + protoc = ":protoc", + visibility = ["//visibility:public"], +) + +################################################################################ +# Protocol Buffers Compiler +################################################################################ + +cc_library( + name = "protoc_lib", + srcs = [ + # AUTOGEN(protoc_lib_srcs) + "src/google/protobuf/compiler/code_generator.cc", + "src/google/protobuf/compiler/command_line_interface.cc", + "src/google/protobuf/compiler/cpp/cpp_enum.cc", + "src/google/protobuf/compiler/cpp/cpp_enum_field.cc", + "src/google/protobuf/compiler/cpp/cpp_extension.cc", + "src/google/protobuf/compiler/cpp/cpp_field.cc", + "src/google/protobuf/compiler/cpp/cpp_file.cc", + "src/google/protobuf/compiler/cpp/cpp_generator.cc", + "src/google/protobuf/compiler/cpp/cpp_helpers.cc", + "src/google/protobuf/compiler/cpp/cpp_map_field.cc", + "src/google/protobuf/compiler/cpp/cpp_message.cc", + "src/google/protobuf/compiler/cpp/cpp_message_field.cc", + "src/google/protobuf/compiler/cpp/cpp_primitive_field.cc", + "src/google/protobuf/compiler/cpp/cpp_service.cc", + "src/google/protobuf/compiler/cpp/cpp_string_field.cc", + "src/google/protobuf/compiler/csharp/csharp_doc_comment.cc", + "src/google/protobuf/compiler/csharp/csharp_enum.cc", + "src/google/protobuf/compiler/csharp/csharp_enum_field.cc", + "src/google/protobuf/compiler/csharp/csharp_field_base.cc", + "src/google/protobuf/compiler/csharp/csharp_generator.cc", + "src/google/protobuf/compiler/csharp/csharp_helpers.cc", + "src/google/protobuf/compiler/csharp/csharp_map_field.cc", + "src/google/protobuf/compiler/csharp/csharp_message.cc", + "src/google/protobuf/compiler/csharp/csharp_message_field.cc", + "src/google/protobuf/compiler/csharp/csharp_primitive_field.cc", + "src/google/protobuf/compiler/csharp/csharp_reflection_class.cc", + "src/google/protobuf/compiler/csharp/csharp_repeated_enum_field.cc", + "src/google/protobuf/compiler/csharp/csharp_repeated_message_field.cc", + "src/google/protobuf/compiler/csharp/csharp_repeated_primitive_field.cc", + "src/google/protobuf/compiler/csharp/csharp_source_generator_base.cc", + "src/google/protobuf/compiler/csharp/csharp_wrapper_field.cc", + "src/google/protobuf/compiler/java/java_context.cc", + "src/google/protobuf/compiler/java/java_doc_comment.cc", + "src/google/protobuf/compiler/java/java_enum.cc", + "src/google/protobuf/compiler/java/java_enum_field.cc", + "src/google/protobuf/compiler/java/java_enum_field_lite.cc", + "src/google/protobuf/compiler/java/java_enum_lite.cc", + "src/google/protobuf/compiler/java/java_extension.cc", + "src/google/protobuf/compiler/java/java_extension_lite.cc", + "src/google/protobuf/compiler/java/java_field.cc", + "src/google/protobuf/compiler/java/java_file.cc", + "src/google/protobuf/compiler/java/java_generator.cc", + "src/google/protobuf/compiler/java/java_generator_factory.cc", + "src/google/protobuf/compiler/java/java_helpers.cc", + "src/google/protobuf/compiler/java/java_lazy_message_field.cc", + "src/google/protobuf/compiler/java/java_lazy_message_field_lite.cc", + "src/google/protobuf/compiler/java/java_map_field.cc", + "src/google/protobuf/compiler/java/java_map_field_lite.cc", + "src/google/protobuf/compiler/java/java_message.cc", + "src/google/protobuf/compiler/java/java_message_builder.cc", + "src/google/protobuf/compiler/java/java_message_builder_lite.cc", + "src/google/protobuf/compiler/java/java_message_field.cc", + "src/google/protobuf/compiler/java/java_message_field_lite.cc", + "src/google/protobuf/compiler/java/java_message_lite.cc", + "src/google/protobuf/compiler/java/java_name_resolver.cc", + "src/google/protobuf/compiler/java/java_primitive_field.cc", + "src/google/protobuf/compiler/java/java_primitive_field_lite.cc", + "src/google/protobuf/compiler/java/java_service.cc", + "src/google/protobuf/compiler/java/java_shared_code_generator.cc", + "src/google/protobuf/compiler/java/java_string_field.cc", + "src/google/protobuf/compiler/java/java_string_field_lite.cc", + "src/google/protobuf/compiler/javanano/javanano_enum.cc", + "src/google/protobuf/compiler/javanano/javanano_enum_field.cc", + "src/google/protobuf/compiler/javanano/javanano_extension.cc", + "src/google/protobuf/compiler/javanano/javanano_field.cc", + "src/google/protobuf/compiler/javanano/javanano_file.cc", + "src/google/protobuf/compiler/javanano/javanano_generator.cc", + "src/google/protobuf/compiler/javanano/javanano_helpers.cc", + "src/google/protobuf/compiler/javanano/javanano_map_field.cc", + "src/google/protobuf/compiler/javanano/javanano_message.cc", + "src/google/protobuf/compiler/javanano/javanano_message_field.cc", + "src/google/protobuf/compiler/javanano/javanano_primitive_field.cc", + "src/google/protobuf/compiler/js/js_generator.cc", + "src/google/protobuf/compiler/objectivec/objectivec_enum.cc", + "src/google/protobuf/compiler/objectivec/objectivec_enum_field.cc", + "src/google/protobuf/compiler/objectivec/objectivec_extension.cc", + "src/google/protobuf/compiler/objectivec/objectivec_field.cc", + "src/google/protobuf/compiler/objectivec/objectivec_file.cc", + "src/google/protobuf/compiler/objectivec/objectivec_generator.cc", + "src/google/protobuf/compiler/objectivec/objectivec_helpers.cc", + "src/google/protobuf/compiler/objectivec/objectivec_map_field.cc", + "src/google/protobuf/compiler/objectivec/objectivec_message.cc", + "src/google/protobuf/compiler/objectivec/objectivec_message_field.cc", + "src/google/protobuf/compiler/objectivec/objectivec_oneof.cc", + "src/google/protobuf/compiler/objectivec/objectivec_primitive_field.cc", + "src/google/protobuf/compiler/plugin.cc", + "src/google/protobuf/compiler/plugin.pb.cc", + "src/google/protobuf/compiler/python/python_generator.cc", + "src/google/protobuf/compiler/ruby/ruby_generator.cc", + "src/google/protobuf/compiler/subprocess.cc", + "src/google/protobuf/compiler/zip_writer.cc", + ], + copts = COPTS, + includes = ["src/"], + linkopts = LINK_OPTS, + visibility = ["//visibility:public"], + deps = [":protobuf"], +) + +cc_binary( + name = "protoc", + srcs = ["src/google/protobuf/compiler/main.cc"], + linkopts = LINK_OPTS, + visibility = ["//visibility:public"], + deps = [":protoc_lib"], +) + +################################################################################ +# Tests +################################################################################ + +RELATIVE_LITE_TEST_PROTOS = [ + # AUTOGEN(lite_test_protos) + "google/protobuf/map_lite_unittest.proto", + "google/protobuf/unittest_import_lite.proto", + "google/protobuf/unittest_import_public_lite.proto", + "google/protobuf/unittest_lite.proto", + "google/protobuf/unittest_no_arena_lite.proto", +] + +LITE_TEST_PROTOS = ["src/" + s for s in RELATIVE_LITE_TEST_PROTOS] + +RELATIVE_TEST_PROTOS = [ + # AUTOGEN(test_protos) + "google/protobuf/any_test.proto", + "google/protobuf/compiler/cpp/cpp_test_bad_identifiers.proto", + "google/protobuf/compiler/cpp/cpp_test_large_enum_value.proto", + "google/protobuf/map_proto2_unittest.proto", + "google/protobuf/map_unittest.proto", + "google/protobuf/unittest.proto", + "google/protobuf/unittest_arena.proto", + "google/protobuf/unittest_custom_options.proto", + "google/protobuf/unittest_drop_unknown_fields.proto", + "google/protobuf/unittest_embed_optimize_for.proto", + "google/protobuf/unittest_empty.proto", + "google/protobuf/unittest_enormous_descriptor.proto", + "google/protobuf/unittest_import.proto", + "google/protobuf/unittest_import_public.proto", + "google/protobuf/unittest_lite_imports_nonlite.proto", + "google/protobuf/unittest_mset.proto", + "google/protobuf/unittest_mset_wire_format.proto", + "google/protobuf/unittest_no_arena.proto", + "google/protobuf/unittest_no_arena_import.proto", + "google/protobuf/unittest_no_field_presence.proto", + "google/protobuf/unittest_no_generic_services.proto", + "google/protobuf/unittest_optimize_for.proto", + "google/protobuf/unittest_preserve_unknown_enum.proto", + "google/protobuf/unittest_preserve_unknown_enum2.proto", + "google/protobuf/unittest_proto3_arena.proto", + "google/protobuf/unittest_proto3_arena_lite.proto", + "google/protobuf/unittest_proto3_lite.proto", + "google/protobuf/unittest_well_known_types.proto", + "google/protobuf/util/internal/testdata/anys.proto", + "google/protobuf/util/internal/testdata/books.proto", + "google/protobuf/util/internal/testdata/default_value.proto", + "google/protobuf/util/internal/testdata/default_value_test.proto", + "google/protobuf/util/internal/testdata/field_mask.proto", + "google/protobuf/util/internal/testdata/maps.proto", + "google/protobuf/util/internal/testdata/oneofs.proto", + "google/protobuf/util/internal/testdata/struct.proto", + "google/protobuf/util/internal/testdata/timestamp_duration.proto", + "google/protobuf/util/json_format_proto3.proto", + "google/protobuf/util/message_differencer_unittest.proto", +] + +TEST_PROTOS = ["src/" + s for s in RELATIVE_TEST_PROTOS] + +cc_proto_library( + name = "cc_test_protos", + srcs = LITE_TEST_PROTOS + TEST_PROTOS, + include = "src", + default_runtime = ":protobuf", + protoc = ":protoc", + deps = [":cc_wkt_protos"], +) + +COMMON_TEST_SRCS = [ + # AUTOGEN(common_test_srcs) + "src/google/protobuf/arena_test_util.cc", + "src/google/protobuf/map_test_util.cc", + "src/google/protobuf/test_util.cc", + "src/google/protobuf/testing/file.cc", + "src/google/protobuf/testing/googletest.cc", +] + +cc_binary( + name = "test_plugin", + srcs = [ + # AUTOGEN(test_plugin_srcs) + "src/google/protobuf/compiler/mock_code_generator.cc", + "src/google/protobuf/compiler/test_plugin.cc", + "src/google/protobuf/testing/file.cc", + ], + deps = [ + ":protobuf", + ":protoc_lib", + "//external:gtest", + ], +) + +cc_test( + name = "protobuf_test", + srcs = COMMON_TEST_SRCS + [ + # AUTOGEN(test_srcs) + "src/google/protobuf/any_test.cc", + "src/google/protobuf/arena_unittest.cc", + "src/google/protobuf/arenastring_unittest.cc", + "src/google/protobuf/compiler/command_line_interface_unittest.cc", + "src/google/protobuf/compiler/cpp/cpp_bootstrap_unittest.cc", + "src/google/protobuf/compiler/cpp/cpp_plugin_unittest.cc", + "src/google/protobuf/compiler/cpp/cpp_unittest.cc", + "src/google/protobuf/compiler/cpp/metadata_test.cc", + "src/google/protobuf/compiler/csharp/csharp_generator_unittest.cc", + "src/google/protobuf/compiler/importer_unittest.cc", + "src/google/protobuf/compiler/java/java_doc_comment_unittest.cc", + "src/google/protobuf/compiler/java/java_plugin_unittest.cc", + "src/google/protobuf/compiler/mock_code_generator.cc", + "src/google/protobuf/compiler/objectivec/objectivec_helpers_unittest.cc", + "src/google/protobuf/compiler/parser_unittest.cc", + "src/google/protobuf/compiler/python/python_plugin_unittest.cc", + "src/google/protobuf/compiler/ruby/ruby_generator_unittest.cc", + "src/google/protobuf/descriptor_database_unittest.cc", + "src/google/protobuf/descriptor_unittest.cc", + "src/google/protobuf/drop_unknown_fields_test.cc", + "src/google/protobuf/dynamic_message_unittest.cc", + "src/google/protobuf/extension_set_unittest.cc", + "src/google/protobuf/generated_message_reflection_unittest.cc", + "src/google/protobuf/io/coded_stream_unittest.cc", + "src/google/protobuf/io/printer_unittest.cc", + "src/google/protobuf/io/tokenizer_unittest.cc", + "src/google/protobuf/io/zero_copy_stream_unittest.cc", + "src/google/protobuf/map_field_test.cc", + "src/google/protobuf/map_test.cc", + "src/google/protobuf/message_unittest.cc", + "src/google/protobuf/no_field_presence_test.cc", + "src/google/protobuf/preserve_unknown_enum_test.cc", + "src/google/protobuf/proto3_arena_lite_unittest.cc", + "src/google/protobuf/proto3_arena_unittest.cc", + "src/google/protobuf/proto3_lite_unittest.cc", + "src/google/protobuf/reflection_ops_unittest.cc", + "src/google/protobuf/repeated_field_reflection_unittest.cc", + "src/google/protobuf/repeated_field_unittest.cc", + "src/google/protobuf/stubs/bytestream_unittest.cc", + "src/google/protobuf/stubs/common_unittest.cc", + "src/google/protobuf/stubs/int128_unittest.cc", + "src/google/protobuf/stubs/once_unittest.cc", + "src/google/protobuf/stubs/status_test.cc", + "src/google/protobuf/stubs/statusor_test.cc", + "src/google/protobuf/stubs/stringpiece_unittest.cc", + "src/google/protobuf/stubs/stringprintf_unittest.cc", + "src/google/protobuf/stubs/structurally_valid_unittest.cc", + "src/google/protobuf/stubs/strutil_unittest.cc", + "src/google/protobuf/stubs/template_util_unittest.cc", + "src/google/protobuf/stubs/time_test.cc", + "src/google/protobuf/stubs/type_traits_unittest.cc", + "src/google/protobuf/text_format_unittest.cc", + "src/google/protobuf/unknown_field_set_unittest.cc", + "src/google/protobuf/util/field_comparator_test.cc", + "src/google/protobuf/util/field_mask_util_test.cc", + "src/google/protobuf/util/internal/default_value_objectwriter_test.cc", + "src/google/protobuf/util/internal/json_objectwriter_test.cc", + "src/google/protobuf/util/internal/json_stream_parser_test.cc", + "src/google/protobuf/util/internal/protostream_objectsource_test.cc", + "src/google/protobuf/util/internal/protostream_objectwriter_test.cc", + "src/google/protobuf/util/internal/type_info_test_helper.cc", + "src/google/protobuf/util/json_util_test.cc", + "src/google/protobuf/util/message_differencer_unittest.cc", + "src/google/protobuf/util/time_util_test.cc", + "src/google/protobuf/util/type_resolver_util_test.cc", + "src/google/protobuf/well_known_types_unittest.cc", + "src/google/protobuf/wire_format_unittest.cc", + ], + copts = COPTS, + data = [ + ":test_plugin", + ] + glob([ + "src/google/protobuf/**/*", + ]), + includes = [ + "src/", + ], + linkopts = LINK_OPTS, + deps = [ + ":cc_test_protos", + ":protobuf", + ":protoc_lib", + "//external:gtest_main", + ], +) + +################################################################################ +# Java support +################################################################################ +internal_gen_well_known_protos_java( + srcs = WELL_KNOWN_PROTOS, +) + +java_library( + name = "protobuf_java", + srcs = glob([ + "java/core/src/main/java/com/google/protobuf/*.java", + ]) + [ + ":gen_well_known_protos_java", + ], + visibility = ["//visibility:public"], +) + +java_library( + name = "protobuf_java_util", + srcs = glob([ + "java/util/src/main/java/com/google/protobuf/util/*.java", + ]), + deps = [ + "protobuf_java", + "//external:gson", + "//external:guava", + ], + visibility = ["//visibility:public"], +) + +################################################################################ +# Python support +################################################################################ + +py_library( + name = "python_srcs", + srcs = glob( + [ + "python/google/protobuf/*.py", + "python/google/protobuf/**/*.py", + ], + exclude = [ + "python/google/protobuf/internal/*_test.py", + "python/google/protobuf/internal/test_util.py", + ], + ), + srcs_version = "PY2AND3", + imports = ["python"], +) + +cc_binary( + name = "internal/_api_implementation.so", + srcs = ["python/google/protobuf/internal/api_implementation.cc"], + copts = COPTS + [ + "-DPYTHON_PROTO2_CPP_IMPL_V2", + ], + linkshared = 1, + linkstatic = 1, + deps = select({ + "//conditions:default": [], + ":use_fast_cpp_protos": ["//external:python_headers"], + }), +) + +cc_binary( + name = "pyext/_message.so", + srcs = glob([ + "python/google/protobuf/pyext/*.cc", + "python/google/protobuf/pyext/*.h", + ]), + copts = COPTS + [ + "-DGOOGLE_PROTOBUF_HAS_ONEOF=1", + ] + select({ + "//conditions:default": [], + ":allow_oversize_protos": ["-DPROTOBUF_PYTHON_ALLOW_OVERSIZE_PROTOS=1"], + }), + includes = [ + "python/", + "src/", + ], + linkshared = 1, + linkstatic = 1, + deps = [ + ":protobuf", + ] + select({ + "//conditions:default": [], + ":use_fast_cpp_protos": ["//external:python_headers"], + }), +) + +config_setting( + name = "use_fast_cpp_protos", + values = { + "define": "use_fast_cpp_protos=true", + }, +) + +config_setting( + name = "allow_oversize_protos", + values = { + "define": "allow_oversize_protos=true", + }, +) + +py_proto_library( + name = "protobuf_python", + srcs = WELL_KNOWN_PROTOS, + include = "src", + data = select({ + "//conditions:default": [], + ":use_fast_cpp_protos": [ + ":internal/_api_implementation.so", + ":pyext/_message.so", + ], + }), + default_runtime = "", + protoc = ":protoc", + py_libs = [ + ":python_srcs", + "//external:six" + ], + srcs_version = "PY2AND3", + visibility = ["//visibility:public"], +) + +py_proto_library( + name = "python_common_test_protos", + srcs = LITE_TEST_PROTOS + TEST_PROTOS, + include = "src", + default_runtime = "", + protoc = ":protoc", + srcs_version = "PY2AND3", + deps = [":protobuf_python"], +) + +py_proto_library( + name = "python_specific_test_protos", + srcs = glob([ + "python/google/protobuf/internal/*.proto", + "python/google/protobuf/internal/import_test_package/*.proto", + ]), + include = "python", + default_runtime = ":protobuf_python", + protoc = ":protoc", + srcs_version = "PY2AND3", + deps = [":python_common_test_protos"], +) + +py_library( + name = "python_tests", + srcs = glob( + [ + "python/google/protobuf/internal/*_test.py", + "python/google/protobuf/internal/test_util.py", + ], + ), + imports = ["python"], + srcs_version = "PY2AND3", + deps = [ + ":protobuf_python", + ":python_common_test_protos", + ":python_specific_test_protos", + ], +) + +internal_protobuf_py_tests( + name = "python_tests_batch", + data = glob([ + "src/google/protobuf/**/*", + ]), + modules = [ + "descriptor_database_test", + "descriptor_pool_test", + "descriptor_test", + "generator_test", + "json_format_test", + "message_factory_test", + "message_test", + "proto_builder_test", + "reflection_test", + "service_reflection_test", + "symbol_database_test", + "text_encoding_test", + "text_format_test", + "unknown_fields_test", + "wire_format_test", + ], + deps = [":python_tests"], +) diff --git a/packager/third_party/protobuf/BUILD.gn b/packager/third_party/protobuf/BUILD.gn index 72c94913d6..84c916a651 100644 --- a/packager/third_party/protobuf/BUILD.gn +++ b/packager/third_party/protobuf/BUILD.gn @@ -2,38 +2,45 @@ # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -if (is_win) { - config_h_dir = "vsprojects" -} else { - config_h_dir = "." -} - config("protobuf_config") { - include_dirs = [ - "src", - config_h_dir, - ] + include_dirs = [ "src" ] defines = [ - "PROTOBUF_USE_DLLS", "GOOGLE_PROTOBUF_NO_RTTI", "GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER", ] - - if (is_win) { - # TODO(jschuh): http://crbug.com/167187 size_t -> int. - cflags = [ "/wd4267" ] + if (!is_win) { + defines += [ "HAVE_PTHREAD" ] } } -# This condif should be applied to targets using generated code from the proto +config("protobuf_warnings") { + cflags = [] + if (is_clang) { + # protobuf-3 contains a few functions that are unused. + cflags += [ "-Wno-unused-function" ] + } +} + +if (is_component_build) { + config("protobuf_use_dlls") { + defines = [ "PROTOBUF_USE_DLLS" ] + } +} + +# This config should be applied to targets using generated code from the proto # compiler. It sets up the include directories properly. config("using_proto") { include_dirs = [ + "src", "$root_gen_dir/protoc_out", ] } protobuf_lite_sources = [ + "src/google/protobuf/arena.cc", + "src/google/protobuf/arena.h", + "src/google/protobuf/arenastring.cc", + "src/google/protobuf/arenastring.h", "src/google/protobuf/extension_set.cc", "src/google/protobuf/extension_set.h", "src/google/protobuf/generated_message_util.cc", @@ -45,146 +52,298 @@ protobuf_lite_sources = [ "src/google/protobuf/io/zero_copy_stream.h", "src/google/protobuf/io/zero_copy_stream_impl_lite.cc", "src/google/protobuf/io/zero_copy_stream_impl_lite.h", + "src/google/protobuf/map.h", + "src/google/protobuf/map_entry_lite.h", + "src/google/protobuf/map_field_lite.h", + "src/google/protobuf/map_type_handler.h", "src/google/protobuf/message_lite.cc", "src/google/protobuf/message_lite.h", "src/google/protobuf/repeated_field.cc", "src/google/protobuf/repeated_field.h", "src/google/protobuf/stubs/atomicops.h", + "src/google/protobuf/stubs/atomicops_internals_arm64_gcc.h", "src/google/protobuf/stubs/atomicops_internals_arm_gcc.h", + "src/google/protobuf/stubs/atomicops_internals_arm_qnx.h", "src/google/protobuf/stubs/atomicops_internals_atomicword_compat.h", + "src/google/protobuf/stubs/atomicops_internals_generic_gcc.h", "src/google/protobuf/stubs/atomicops_internals_macosx.h", "src/google/protobuf/stubs/atomicops_internals_mips_gcc.h", + "src/google/protobuf/stubs/atomicops_internals_pnacl.h", + "src/google/protobuf/stubs/atomicops_internals_power.h", + "src/google/protobuf/stubs/atomicops_internals_ppc_gcc.h", + "src/google/protobuf/stubs/atomicops_internals_solaris.h", + "src/google/protobuf/stubs/atomicops_internals_tsan.h", "src/google/protobuf/stubs/atomicops_internals_x86_gcc.cc", "src/google/protobuf/stubs/atomicops_internals_x86_gcc.h", "src/google/protobuf/stubs/atomicops_internals_x86_msvc.cc", "src/google/protobuf/stubs/atomicops_internals_x86_msvc.h", + "src/google/protobuf/stubs/atomic_sequence_num.h", + "src/google/protobuf/stubs/bytestream.cc", + "src/google/protobuf/stubs/bytestream.h", + "src/google/protobuf/stubs/callback.h", + "src/google/protobuf/stubs/casts.h", "src/google/protobuf/stubs/common.cc", "src/google/protobuf/stubs/common.h", + "src/google/protobuf/stubs/fastmem.h", "src/google/protobuf/stubs/hash.h", - "src/google/protobuf/stubs/map-util.h", + "src/google/protobuf/stubs/int128.cc", + "src/google/protobuf/stubs/int128.h", + "src/google/protobuf/stubs/logging.h", + "src/google/protobuf/stubs/macros.h", + "src/google/protobuf/stubs/map_util.h", + "src/google/protobuf/stubs/mutex.h", "src/google/protobuf/stubs/once.cc", "src/google/protobuf/stubs/once.h", "src/google/protobuf/stubs/platform_macros.h", - "src/google/protobuf/unknown_field_set.cc", - "src/google/protobuf/unknown_field_set.h", + "src/google/protobuf/stubs/port.h", + "src/google/protobuf/stubs/scoped_ptr.h", + "src/google/protobuf/stubs/shared_ptr.h", + "src/google/protobuf/stubs/status.cc", + "src/google/protobuf/stubs/status.h", + "src/google/protobuf/stubs/status_macros.h", + "src/google/protobuf/stubs/statusor.cc", + "src/google/protobuf/stubs/statusor.h", + "src/google/protobuf/stubs/stl_util.h", + "src/google/protobuf/stubs/stringpiece.cc", + "src/google/protobuf/stubs/stringpiece.h", + "src/google/protobuf/stubs/stringprintf.cc", + "src/google/protobuf/stubs/stringprintf.h", + "src/google/protobuf/stubs/structurally_valid.cc", + "src/google/protobuf/stubs/strutil.cc", + "src/google/protobuf/stubs/strutil.h", + "src/google/protobuf/stubs/template_util.h", + "src/google/protobuf/stubs/type_traits.h", + "src/google/protobuf/stubs/time.cc", + "src/google/protobuf/stubs/time.h", "src/google/protobuf/wire_format_lite.cc", "src/google/protobuf/wire_format_lite.h", "src/google/protobuf/wire_format_lite_inl.h", - "$config_h_dir/config.h", ] protobuf_lite_cflags = [] if (is_win) { protobuf_lite_cflags = [ "/wd4018", # signed/unsigned mismatch in comparison + "/wd4065", # switch statement contains 'default' but no 'case' labels + "/wd4146", # unary minus operator applied to unsigned type "/wd4244", # implicit conversion, possible loss of data - "/wd4355", # 'this' used in base member initializer list "/wd4267", # size_t to int truncation - "/wd4291", # no matching operator delete for a placement new + "/wd4291", # no matching operator delete for a placement new. + "/wd4305", # double to float truncation + "/wd4355", # 'this' used in base member initializer list + "/wd4506", # no definition for inline function (protobuf issue #240) + "/wd4715", # not all control paths return a value (fixed in trunk) ] } -source_set("protobuf_lite") { +component("protobuf_lite") { sources = protobuf_lite_sources configs -= [ "//build/config/compiler:chromium_code" ] - configs += [ "//build/config/compiler:no_chromium_code" ] + configs += [ + "//build/config/compiler:no_chromium_code", + + # Must be after no_chromium_code for warning flags to be ordered + # correctly. + ":protobuf_warnings", + ] + if (is_win) { configs -= [ "//build/config/win:lean_and_mean" ] } - direct_dependent_configs = [ ":protobuf_config" ] + + public_configs = [ + ":protobuf_config", + + # TODO(jschuh): crbug.com/167187 fix size_t to int truncations. + "//build/config/compiler:no_size_t_to_int_warning", + ] + + deps = [ + "//build/config/sanitizers:deps", + ] cflags = protobuf_lite_cflags # Required for component builds. See http://crbug.com/172800. - defines = [ "LIBPROTOBUF_EXPORTS" ] + if (is_component_build) { + public_configs += [ ":protobuf_use_dlls" ] + defines = [ "LIBPROTOBUF_EXPORTS" ] + } } # This is the full, heavy protobuf lib that's needed for c++ .protos that don't # specify the LITE_RUNTIME option. The protocol compiler itself (protoc) falls # into that category. Do not use in Chrome code. - -source_set("protobuf_full") { - visibility = ":*" # Prevent people from depending on this outside our file. +static_library("protobuf_full") { + # Prevent people from depending on this outside our file. + visibility = [ ":*" ] sources = protobuf_lite_sources sources += [ - "src/google/protobuf/descriptor.h", - "src/google/protobuf/descriptor.pb.h", - "src/google/protobuf/descriptor_database.h", - "src/google/protobuf/dynamic_message.h", - "src/google/protobuf/generated_enum_reflection.h", - "src/google/protobuf/generated_message_reflection.h", - "src/google/protobuf/message.h", - "src/google/protobuf/reflection_ops.h", - "src/google/protobuf/service.h", - "src/google/protobuf/text_format.h", - "src/google/protobuf/wire_format.h", - "src/google/protobuf/io/gzip_stream.h", - "src/google/protobuf/io/printer.h", - "src/google/protobuf/io/tokenizer.h", - "src/google/protobuf/io/zero_copy_stream_impl.h", - "src/google/protobuf/compiler/code_generator.h", - "src/google/protobuf/compiler/command_line_interface.h", + "src/google/protobuf/any.cc", + "src/google/protobuf/any.h", + "src/google/protobuf/any.pb.cc", + "src/google/protobuf/any.pb.h", + "src/google/protobuf/api.pb.cc", + "src/google/protobuf/api.pb.h", + "src/google/protobuf/compiler/importer.cc", "src/google/protobuf/compiler/importer.h", - "src/google/protobuf/compiler/java/java_doc_comment.cc", - "src/google/protobuf/compiler/java/java_doc_comment.h", + "src/google/protobuf/compiler/parser.cc", "src/google/protobuf/compiler/parser.h", + "src/google/protobuf/descriptor.cc", + "src/google/protobuf/descriptor.h", + "src/google/protobuf/descriptor.pb.cc", + "src/google/protobuf/descriptor.pb.h", + "src/google/protobuf/descriptor_database.cc", + "src/google/protobuf/descriptor_database.h", + "src/google/protobuf/duration.pb.cc", + "src/google/protobuf/duration.pb.h", + "src/google/protobuf/dynamic_message.cc", + "src/google/protobuf/dynamic_message.h", + "src/google/protobuf/empty.pb.cc", + "src/google/protobuf/empty.pb.h", + "src/google/protobuf/extension_set_heavy.cc", + "src/google/protobuf/field_mask.pb.cc", + "src/google/protobuf/field_mask.pb.h", + "src/google/protobuf/generated_enum_reflection.h", + "src/google/protobuf/generated_enum_util.h", + "src/google/protobuf/generated_message_reflection.cc", + "src/google/protobuf/generated_message_reflection.h", - "src/google/protobuf/stubs/strutil.cc", - "src/google/protobuf/stubs/strutil.h", + # gzip_stream.cc pulls in zlib, but it's not actually used by protoc, just + # by test code, so instead of compiling zlib for the host, let's just + # exclude this. + # "src/google/protobuf/io/gzip_stream.cc", + # "src/google/protobuf/io/gzip_stream.h", + + "src/google/protobuf/io/printer.cc", + "src/google/protobuf/io/printer.h", + "src/google/protobuf/io/strtod.cc", + "src/google/protobuf/io/strtod.h", + "src/google/protobuf/io/tokenizer.cc", + "src/google/protobuf/io/tokenizer.h", + "src/google/protobuf/io/zero_copy_stream_impl.cc", + "src/google/protobuf/io/zero_copy_stream_impl.h", + "src/google/protobuf/map_entry.h", + "src/google/protobuf/map_field.cc", + "src/google/protobuf/map_field.h", + "src/google/protobuf/map_field_inl.h", + "src/google/protobuf/message.cc", + "src/google/protobuf/message.h", + "src/google/protobuf/metadata.h", + "src/google/protobuf/reflection.h", + "src/google/protobuf/reflection_internal.h", + "src/google/protobuf/reflection_ops.cc", + "src/google/protobuf/reflection_ops.h", + "src/google/protobuf/service.cc", + "src/google/protobuf/service.h", + "src/google/protobuf/source_context.pb.cc", + "src/google/protobuf/source_context.pb.h", + "src/google/protobuf/struct.pb.cc", + "src/google/protobuf/struct.pb.h", + "src/google/protobuf/stubs/mathlimits.cc", + "src/google/protobuf/stubs/mathlimits.h", + "src/google/protobuf/stubs/mathutil.h", + "src/google/protobuf/stubs/singleton.h", "src/google/protobuf/stubs/substitute.cc", "src/google/protobuf/stubs/substitute.h", - "src/google/protobuf/stubs/stl_util.h", - "src/google/protobuf/stubs/stringprintf.cc", - "src/google/protobuf/stubs/stringprintf.h", - "src/google/protobuf/stubs/structurally_valid.cc", - "src/google/protobuf/stubs/template_util.h", - "src/google/protobuf/stubs/type_traits.h", - - "src/google/protobuf/descriptor.cc", - "src/google/protobuf/descriptor.pb.cc", - "src/google/protobuf/descriptor_database.cc", - "src/google/protobuf/dynamic_message.cc", - "src/google/protobuf/extension_set_heavy.cc", - "src/google/protobuf/generated_message_reflection.cc", - "src/google/protobuf/message.cc", - "src/google/protobuf/reflection_ops.cc", - "src/google/protobuf/service.cc", "src/google/protobuf/text_format.cc", + "src/google/protobuf/text_format.h", + "src/google/protobuf/timestamp.pb.cc", + "src/google/protobuf/timestamp.pb.h", + "src/google/protobuf/type.pb.cc", + "src/google/protobuf/type.pb.h", + "src/google/protobuf/unknown_field_set.cc", + "src/google/protobuf/unknown_field_set.h", + "src/google/protobuf/util/field_comparator.cc", + "src/google/protobuf/util/field_comparator.h", + "src/google/protobuf/util/field_mask_util.cc", + "src/google/protobuf/util/field_mask_util.h", + "src/google/protobuf/util/internal/constants.h", + "src/google/protobuf/util/internal/datapiece.cc", + "src/google/protobuf/util/internal/datapiece.h", + "src/google/protobuf/util/internal/default_value_objectwriter.cc", + "src/google/protobuf/util/internal/default_value_objectwriter.h", + "src/google/protobuf/util/internal/error_listener.cc", + "src/google/protobuf/util/internal/error_listener.h", + "src/google/protobuf/util/internal/field_mask_utility.cc", + "src/google/protobuf/util/internal/field_mask_utility.h", + "src/google/protobuf/util/internal/json_escaping.cc", + "src/google/protobuf/util/internal/json_escaping.h", + "src/google/protobuf/util/internal/json_objectwriter.cc", + "src/google/protobuf/util/internal/json_objectwriter.h", + "src/google/protobuf/util/internal/json_stream_parser.cc", + "src/google/protobuf/util/internal/json_stream_parser.h", + "src/google/protobuf/util/internal/location_tracker.h", + "src/google/protobuf/util/internal/object_location_tracker.h", + "src/google/protobuf/util/internal/object_source.h", + "src/google/protobuf/util/internal/object_writer.cc", + "src/google/protobuf/util/internal/object_writer.h", + "src/google/protobuf/util/internal/proto_writer.cc", + "src/google/protobuf/util/internal/proto_writer.h", + "src/google/protobuf/util/internal/protostream_objectsource.cc", + "src/google/protobuf/util/internal/protostream_objectsource.h", + "src/google/protobuf/util/internal/protostream_objectwriter.cc", + "src/google/protobuf/util/internal/protostream_objectwriter.h", + "src/google/protobuf/util/internal/structured_objectwriter.h", + "src/google/protobuf/util/internal/type_info.cc", + "src/google/protobuf/util/internal/type_info.h", + "src/google/protobuf/util/internal/type_info_test_helper.cc", + "src/google/protobuf/util/internal/type_info_test_helper.h", + "src/google/protobuf/util/internal/utility.cc", + "src/google/protobuf/util/internal/utility.h", + "src/google/protobuf/util/json_util.cc", + "src/google/protobuf/util/json_util.h", + "src/google/protobuf/util/message_differencer.cc", + "src/google/protobuf/util/message_differencer.h", + "src/google/protobuf/util/time_util.cc", + "src/google/protobuf/util/time_util.h", + "src/google/protobuf/util/type_resolver.h", + "src/google/protobuf/util/type_resolver_util.cc", + "src/google/protobuf/util/type_resolver_util.h", "src/google/protobuf/wire_format.cc", - # This file pulls in zlib, but it's not actually used by protoc, so - # instead of compiling zlib for the host, let's just exclude this. - # "src/src/google/protobuf/io/gzip_stream.cc", - "src/google/protobuf/io/printer.cc", - "src/google/protobuf/io/tokenizer.cc", - "src/google/protobuf/io/zero_copy_stream_impl.cc", - "src/google/protobuf/compiler/importer.cc", - "src/google/protobuf/compiler/parser.cc", + "src/google/protobuf/wire_format.h", + "src/google/protobuf/wrappers.pb.cc", + "src/google/protobuf/wrappers.pb.h", + ] + + deps = [ + "//build/config/sanitizers:deps", ] configs -= [ "//build/config/compiler:chromium_code" ] - configs += [ "//build/config/compiler:no_chromium_code" ] + configs += [ + "//build/config/compiler:no_chromium_code", + + # Must be after no_chromium_code for warning flags to be ordered + # correctly. + ":protobuf_warnings", + ] if (is_win) { configs -= [ "//build/config/win:lean_and_mean" ] } - direct_dependent_configs = [ ":protobuf_config" ] + public_configs = [ + ":protobuf_config", + + # TODO(jschuh): crbug.com/167187 fix size_t to int truncations. + "//build/config/compiler:no_size_t_to_int_warning", + ] cflags = protobuf_lite_cflags } # Only compile the compiler for the host architecture. if (current_toolchain == host_toolchain) { - executable("protoc") { + # protoc compiler is separated into protoc library and executable targets to + # support protoc plugins that need to link libprotoc, but not the main() + # itself. See src/google/protobuf/compiler/plugin.h + static_library("protoc_lib") { sources = [ "src/google/protobuf/compiler/code_generator.cc", + "src/google/protobuf/compiler/code_generator.h", "src/google/protobuf/compiler/command_line_interface.cc", - "src/google/protobuf/compiler/plugin.cc", - "src/google/protobuf/compiler/plugin.pb.cc", - "src/google/protobuf/compiler/subprocess.cc", - "src/google/protobuf/compiler/subprocess.h", - "src/google/protobuf/compiler/zip_writer.cc", - "src/google/protobuf/compiler/zip_writer.h", + "src/google/protobuf/compiler/command_line_interface.h", "src/google/protobuf/compiler/cpp/cpp_enum.cc", "src/google/protobuf/compiler/cpp/cpp_enum.h", "src/google/protobuf/compiler/cpp/cpp_enum_field.cc", @@ -196,56 +355,298 @@ if (current_toolchain == host_toolchain) { "src/google/protobuf/compiler/cpp/cpp_file.cc", "src/google/protobuf/compiler/cpp/cpp_file.h", "src/google/protobuf/compiler/cpp/cpp_generator.cc", + "src/google/protobuf/compiler/cpp/cpp_generator.h", "src/google/protobuf/compiler/cpp/cpp_helpers.cc", "src/google/protobuf/compiler/cpp/cpp_helpers.h", + "src/google/protobuf/compiler/cpp/cpp_map_field.cc", + "src/google/protobuf/compiler/cpp/cpp_map_field.h", "src/google/protobuf/compiler/cpp/cpp_message.cc", "src/google/protobuf/compiler/cpp/cpp_message.h", "src/google/protobuf/compiler/cpp/cpp_message_field.cc", "src/google/protobuf/compiler/cpp/cpp_message_field.h", + "src/google/protobuf/compiler/cpp/cpp_options.h", "src/google/protobuf/compiler/cpp/cpp_primitive_field.cc", "src/google/protobuf/compiler/cpp/cpp_primitive_field.h", "src/google/protobuf/compiler/cpp/cpp_service.cc", "src/google/protobuf/compiler/cpp/cpp_service.h", "src/google/protobuf/compiler/cpp/cpp_string_field.cc", "src/google/protobuf/compiler/cpp/cpp_string_field.h", + "src/google/protobuf/compiler/csharp/csharp_doc_comment.cc", + "src/google/protobuf/compiler/csharp/csharp_doc_comment.h", + "src/google/protobuf/compiler/csharp/csharp_enum.cc", + "src/google/protobuf/compiler/csharp/csharp_enum.h", + "src/google/protobuf/compiler/csharp/csharp_enum_field.cc", + "src/google/protobuf/compiler/csharp/csharp_enum_field.h", + "src/google/protobuf/compiler/csharp/csharp_field_base.cc", + "src/google/protobuf/compiler/csharp/csharp_field_base.h", + "src/google/protobuf/compiler/csharp/csharp_generator.cc", + "src/google/protobuf/compiler/csharp/csharp_generator.h", + "src/google/protobuf/compiler/csharp/csharp_helpers.cc", + "src/google/protobuf/compiler/csharp/csharp_helpers.h", + "src/google/protobuf/compiler/csharp/csharp_map_field.cc", + "src/google/protobuf/compiler/csharp/csharp_map_field.h", + "src/google/protobuf/compiler/csharp/csharp_message.cc", + "src/google/protobuf/compiler/csharp/csharp_message.h", + "src/google/protobuf/compiler/csharp/csharp_message_field.cc", + "src/google/protobuf/compiler/csharp/csharp_message_field.h", + "src/google/protobuf/compiler/csharp/csharp_options.h", + "src/google/protobuf/compiler/csharp/csharp_primitive_field.cc", + "src/google/protobuf/compiler/csharp/csharp_primitive_field.h", + "src/google/protobuf/compiler/csharp/csharp_reflection_class.cc", + "src/google/protobuf/compiler/csharp/csharp_reflection_class.h", + "src/google/protobuf/compiler/csharp/csharp_repeated_enum_field.cc", + "src/google/protobuf/compiler/csharp/csharp_repeated_enum_field.h", + "src/google/protobuf/compiler/csharp/csharp_repeated_message_field.cc", + "src/google/protobuf/compiler/csharp/csharp_repeated_message_field.h", + "src/google/protobuf/compiler/csharp/csharp_repeated_primitive_field.cc", + "src/google/protobuf/compiler/csharp/csharp_repeated_primitive_field.h", + "src/google/protobuf/compiler/csharp/csharp_source_generator_base.cc", + "src/google/protobuf/compiler/csharp/csharp_source_generator_base.h", + "src/google/protobuf/compiler/csharp/csharp_wrapper_field.cc", + "src/google/protobuf/compiler/csharp/csharp_wrapper_field.h", + "src/google/protobuf/compiler/java/java_context.cc", + "src/google/protobuf/compiler/java/java_context.h", + "src/google/protobuf/compiler/java/java_doc_comment.cc", + "src/google/protobuf/compiler/java/java_doc_comment.h", "src/google/protobuf/compiler/java/java_enum.cc", "src/google/protobuf/compiler/java/java_enum.h", "src/google/protobuf/compiler/java/java_enum_field.cc", "src/google/protobuf/compiler/java/java_enum_field.h", + "src/google/protobuf/compiler/java/java_enum_field_lite.cc", + "src/google/protobuf/compiler/java/java_enum_field_lite.h", + "src/google/protobuf/compiler/java/java_enum_lite.cc", + "src/google/protobuf/compiler/java/java_enum_lite.h", "src/google/protobuf/compiler/java/java_extension.cc", "src/google/protobuf/compiler/java/java_extension.h", + "src/google/protobuf/compiler/java/java_extension_lite.cc", + "src/google/protobuf/compiler/java/java_extension_lite.h", "src/google/protobuf/compiler/java/java_field.cc", "src/google/protobuf/compiler/java/java_field.h", "src/google/protobuf/compiler/java/java_file.cc", "src/google/protobuf/compiler/java/java_file.h", "src/google/protobuf/compiler/java/java_generator.cc", + "src/google/protobuf/compiler/java/java_generator.h", + "src/google/protobuf/compiler/java/java_generator_factory.cc", + "src/google/protobuf/compiler/java/java_generator_factory.h", "src/google/protobuf/compiler/java/java_helpers.cc", "src/google/protobuf/compiler/java/java_helpers.h", + "src/google/protobuf/compiler/java/java_lazy_message_field.cc", + "src/google/protobuf/compiler/java/java_lazy_message_field.h", + "src/google/protobuf/compiler/java/java_lazy_message_field_lite.cc", + "src/google/protobuf/compiler/java/java_lazy_message_field_lite.h", + "src/google/protobuf/compiler/java/java_map_field.cc", + "src/google/protobuf/compiler/java/java_map_field.h", + "src/google/protobuf/compiler/java/java_map_field_lite.cc", + "src/google/protobuf/compiler/java/java_map_field_lite.h", "src/google/protobuf/compiler/java/java_message.cc", "src/google/protobuf/compiler/java/java_message.h", + "src/google/protobuf/compiler/java/java_message_builder.cc", + "src/google/protobuf/compiler/java/java_message_builder.h", + "src/google/protobuf/compiler/java/java_message_builder_lite.cc", + "src/google/protobuf/compiler/java/java_message_builder_lite.h", "src/google/protobuf/compiler/java/java_message_field.cc", "src/google/protobuf/compiler/java/java_message_field.h", + "src/google/protobuf/compiler/java/java_message_field_lite.cc", + "src/google/protobuf/compiler/java/java_message_field_lite.h", + "src/google/protobuf/compiler/java/java_message_lite.cc", + "src/google/protobuf/compiler/java/java_message_lite.h", + "src/google/protobuf/compiler/java/java_name_resolver.cc", + "src/google/protobuf/compiler/java/java_name_resolver.h", "src/google/protobuf/compiler/java/java_primitive_field.cc", "src/google/protobuf/compiler/java/java_primitive_field.h", + "src/google/protobuf/compiler/java/java_primitive_field_lite.cc", + "src/google/protobuf/compiler/java/java_primitive_field_lite.h", "src/google/protobuf/compiler/java/java_service.cc", "src/google/protobuf/compiler/java/java_service.h", + "src/google/protobuf/compiler/java/java_shared_code_generator.cc", + "src/google/protobuf/compiler/java/java_shared_code_generator.h", "src/google/protobuf/compiler/java/java_string_field.cc", "src/google/protobuf/compiler/java/java_string_field.h", + "src/google/protobuf/compiler/java/java_string_field_lite.cc", + "src/google/protobuf/compiler/java/java_string_field_lite.h", + "src/google/protobuf/compiler/javanano/javanano_enum.cc", + "src/google/protobuf/compiler/javanano/javanano_enum.h", + "src/google/protobuf/compiler/javanano/javanano_enum_field.cc", + "src/google/protobuf/compiler/javanano/javanano_enum_field.h", + "src/google/protobuf/compiler/javanano/javanano_extension.cc", + "src/google/protobuf/compiler/javanano/javanano_extension.h", + "src/google/protobuf/compiler/javanano/javanano_field.cc", + "src/google/protobuf/compiler/javanano/javanano_field.h", + "src/google/protobuf/compiler/javanano/javanano_file.cc", + "src/google/protobuf/compiler/javanano/javanano_file.h", + "src/google/protobuf/compiler/javanano/javanano_generator.cc", + "src/google/protobuf/compiler/javanano/javanano_generator.h", + "src/google/protobuf/compiler/javanano/javanano_helpers.cc", + "src/google/protobuf/compiler/javanano/javanano_helpers.h", + "src/google/protobuf/compiler/javanano/javanano_map_field.cc", + "src/google/protobuf/compiler/javanano/javanano_map_field.h", + "src/google/protobuf/compiler/javanano/javanano_message.cc", + "src/google/protobuf/compiler/javanano/javanano_message.h", + "src/google/protobuf/compiler/javanano/javanano_message_field.cc", + "src/google/protobuf/compiler/javanano/javanano_message_field.h", + "src/google/protobuf/compiler/javanano/javanano_primitive_field.cc", + "src/google/protobuf/compiler/javanano/javanano_primitive_field.h", + "src/google/protobuf/compiler/js/js_generator.cc", + "src/google/protobuf/compiler/js/js_generator.h", + "src/google/protobuf/compiler/objectivec/objectivec_enum.cc", + "src/google/protobuf/compiler/objectivec/objectivec_enum.h", + "src/google/protobuf/compiler/objectivec/objectivec_enum_field.cc", + "src/google/protobuf/compiler/objectivec/objectivec_enum_field.h", + "src/google/protobuf/compiler/objectivec/objectivec_extension.cc", + "src/google/protobuf/compiler/objectivec/objectivec_extension.h", + "src/google/protobuf/compiler/objectivec/objectivec_field.cc", + "src/google/protobuf/compiler/objectivec/objectivec_field.h", + "src/google/protobuf/compiler/objectivec/objectivec_file.cc", + "src/google/protobuf/compiler/objectivec/objectivec_file.h", + "src/google/protobuf/compiler/objectivec/objectivec_generator.cc", + "src/google/protobuf/compiler/objectivec/objectivec_generator.h", + "src/google/protobuf/compiler/objectivec/objectivec_helpers.cc", + "src/google/protobuf/compiler/objectivec/objectivec_helpers.h", + "src/google/protobuf/compiler/objectivec/objectivec_map_field.cc", + "src/google/protobuf/compiler/objectivec/objectivec_map_field.h", + "src/google/protobuf/compiler/objectivec/objectivec_message.cc", + "src/google/protobuf/compiler/objectivec/objectivec_message.h", + "src/google/protobuf/compiler/objectivec/objectivec_message_field.cc", + "src/google/protobuf/compiler/objectivec/objectivec_message_field.h", + "src/google/protobuf/compiler/objectivec/objectivec_oneof.cc", + "src/google/protobuf/compiler/objectivec/objectivec_oneof.h", + "src/google/protobuf/compiler/objectivec/objectivec_primitive_field.cc", + "src/google/protobuf/compiler/objectivec/objectivec_primitive_field.h", + "src/google/protobuf/compiler/plugin.cc", + "src/google/protobuf/compiler/plugin.h", + "src/google/protobuf/compiler/plugin.pb.cc", + "src/google/protobuf/compiler/plugin.pb.h", "src/google/protobuf/compiler/python/python_generator.cc", - "src/google/protobuf/compiler/main.cc", + "src/google/protobuf/compiler/python/python_generator.h", + "src/google/protobuf/compiler/ruby/ruby_generator.cc", + "src/google/protobuf/compiler/ruby/ruby_generator.h", + "src/google/protobuf/compiler/subprocess.cc", + "src/google/protobuf/compiler/subprocess.h", + "src/google/protobuf/compiler/zip_writer.cc", + "src/google/protobuf/compiler/zip_writer.h", ] configs -= [ "//build/config/compiler:chromium_code" ] - configs += [ "//build/config/compiler:no_chromium_code" ] + configs += [ + "//build/config/compiler:no_chromium_code", + + # Must be after no_chromium_code for warning flags to be ordered + # correctly. + ":protobuf_warnings", + ] if (is_win) { # This is defined internally, don't warn on duplicate. configs -= [ "//build/config/win:lean_and_mean" ] } + public_configs = [ ":protobuf_config" ] + cflags = protobuf_lite_cflags - deps = [ + public_deps = [ ":protobuf_full", ] } + + executable("protoc") { + sources = [ + "src/google/protobuf/compiler/main.cc", + ] + + configs -= [ "//build/config/compiler:chromium_code" ] + configs += [ "//build/config/compiler:no_chromium_code" ] + + cflags = protobuf_lite_cflags + + deps = [ + ":protoc_lib", + + # Default manifest on Windows (a no-op elsewhere). + "//build/win:default_exe_manifest", + ] + } +} + +google_python_dir = "$root_out_dir/pyproto/google" + +copy("copy_google") { + sources = [ + "__init__.py", + ] + outputs = [ + "$google_python_dir/{{source_file_part}}", + ] +} + +copy("copy_six") { + sources = [ + "third_party/six/six.py", + ] + outputs = [ + "$google_python_dir/third_party/six/{{source_file_part}}", + ] +} + +copy("copy_google_protobuf") { + sources = [ + "python/google/protobuf/__init__.py", + "python/google/protobuf/descriptor.py", + "python/google/protobuf/descriptor_database.py", + "python/google/protobuf/descriptor_pool.py", + "python/google/protobuf/json_format.py", + "python/google/protobuf/message.py", + "python/google/protobuf/message_factory.py", + "python/google/protobuf/proto_builder.py", + "python/google/protobuf/reflection.py", + "python/google/protobuf/service.py", + "python/google/protobuf/service_reflection.py", + "python/google/protobuf/symbol_database.py", + "python/google/protobuf/text_encoding.py", + "python/google/protobuf/text_format.py", + + # TODO(ncarter): protoc's python generator treats descriptor.proto + # specially, but only when the input path is exactly + # "google/protobuf/descriptor.proto". I'm not sure how to execute a rule + # from a different directory. For now, use a manually-generated copy of + # descriptor_pb2.py. + "python/google/protobuf/descriptor_pb2.py", + ] + outputs = [ + "$google_python_dir/protobuf/{{source_file_part}}", + ] +} + +copy("copy_google_protobuf_internal") { + sources = [ + "python/google/protobuf/internal/__init__.py", + "python/google/protobuf/internal/_parameterized.py", + "python/google/protobuf/internal/api_implementation.py", + "python/google/protobuf/internal/containers.py", + "python/google/protobuf/internal/decoder.py", + "python/google/protobuf/internal/encoder.py", + "python/google/protobuf/internal/enum_type_wrapper.py", + "python/google/protobuf/internal/message_listener.py", + "python/google/protobuf/internal/python_message.py", + "python/google/protobuf/internal/type_checkers.py", + "python/google/protobuf/internal/well_known_types.py", + "python/google/protobuf/internal/wire_format.py", + ] + outputs = [ + "$google_python_dir/protobuf/internal/{{source_file_part}}", + ] +} + +group("py_proto") { + public_deps = [ + ":copy_google", + ":copy_google_protobuf", + ":copy_google_protobuf_internal", + ":copy_six", + ] + + # Targets that depend on this should depend on the copied data files. + data = get_target_outputs(":copy_google") + data += get_target_outputs(":copy_six") + data += get_target_outputs(":copy_google_protobuf") + data += get_target_outputs(":copy_google_protobuf_internal") } diff --git a/packager/third_party/protobuf/CHANGES.txt b/packager/third_party/protobuf/CHANGES.txt index a21e956c35..3459cccfa9 100644 --- a/packager/third_party/protobuf/CHANGES.txt +++ b/packager/third_party/protobuf/CHANGES.txt @@ -1,4 +1,655 @@ -2012-09-19 version 2.5.0: +2016-05-16 version 3.0.0-beta-3 (C++/Java/Python/Ruby/Nano/Objective-C/C#/JavaScript) + General + * Supported Proto3 lite-runtime in C++/Java for mobile platforms. + * Any type now supports APIs to specify prefixes other than + type.googleapis.com + * Removed javanano_use_deprecated_package option; Nano will always has its own + ".nano" package. + + C++ (Beta) + * Improved hash maps. + - Improved hash maps comments. In particular, please note that equal hash + maps will not necessarily have the same iteration order and + serialization. + - Added a new hash maps implementation that will become the default in a + later release. + * Arenas + - Several inlined methods in Arena were moved to out-of-line to improve + build performance and code size. + - Added SpaceAllocatedAndUsed() to report both space used and allocated + - Added convenient class UnsafeArenaAllocatedRepeatedPtrFieldBackInserter + * Any + - Allow custom type URL prefixes in Any packing. + - TextFormat now expand the Any type rather than printing bytes. + * Performance optimizations and various bug fixes. + + Java (Beta) + * Introduced an ExperimentalApi annotation. Annotated APIs are experimental + and are subject to change in a backward incompatible way in future releases. + * Introduced zero-copy serialization as an ExperimentalApi + - Introduction of the `ByteOutput` interface. This is similar to + `OutputStream` but provides semantics for lazy writing (i.e. no + immediate copy required) of fields that are considered to be immutable. + - `ByteString` now supports writing to a `ByteOutput`, which will directly + expose the internals of the `ByteString` (i.e. `byte[]` or `ByteBuffer`) + to the `ByteOutput` without copying. + - `CodedOutputStream` now supports writing to a `ByteOutput`. `ByteString` + instances that are too large to fit in the internal buffer will be + (lazily) written to the `ByteOutput` directly. + - This allows applications using large `ByteString` fields to avoid + duplication of these fields entirely. Such an application can supply a + `ByteOutput` that chains together the chunks received from + `CodedOutputStream` before forwarding them onto the IO system. + * Other related changes to `CodedOutputStream` + - Additional use of `sun.misc.Unsafe` where possible to perform fast + access to `byte[]` and `ByteBuffer` values and avoiding unnecessary + range checking. + - `ByteBuffer`-backed `CodedOutputStream` now writes directly to the + `ByteBuffer` rather than to an intermediate array. + * Improved lite-runtime. + - Lite protos now implement deep equals/hashCode/toString + - Significantly improved the performance of Builder#mergeFrom() and + Builder#mergeDelimitedFrom() + * Various bug fixes and small feature enhancement. + - Fixed stack overflow when in hashCode() for infinite recursive oneofs. + - Fixed the lazy field parsing in lite to merge rather than overwrite. + - TextFormat now supports reporting line/column numbers on errors. + - Updated to add appropriate @Override for better compiler errors. + + Python (Beta) + * Added JSON format for Any, Struct, Value and ListValue + * [ ] is now accepted for both repeated scalar fields and repeated message + fields in text format parser. + * Numerical field name is now supported in text format. + * Added DiscardUnknownFields API for python protobuf message. + + Objective-C (Beta) + * Proto comments now come over as HeaderDoc comments in the generated sources + so Xcode can pick them up and display them. + * The library headers have been updated to use HeaderDoc comments so Xcode can + pick them up and display them. + * The per message and per field overhead in both generated code and runtime + object sizes was reduced. + * Generated code now include deprecated annotations when the proto file + included them. + + C# (Beta) + In general: some changes are breaking, which require regenerating messages. + Most user-written code will not be impacted *except* for the renaming of enum + values. + + * Allow custom type URL prefixes in `Any` packing, and ignore them when + unpacking + * `protoc` is now in a separate NuGet package (Google.Protobuf.Tools) + * New option: `internal_access` to generate internal classes + * Enum values are now PascalCased, and if there's a prefix which matches the + name of the enum, that is removed (so an enum `COLOR` with a value + `COLOR_BLUE` would generate a value of just `Blue`). An option + (`legacy_enum_values`) is temporarily available to disable this, but the + option will be removed for GA. + * `json_name` option is now honored + * If group tags are encountered when parsing, they are validated more + thoroughly (although we don't support actual groups) + * NuGet dependencies are better specified + * Breaking: `Preconditions` is renamed to `ProtoPreconditions` + * Breaking: `GeneratedCodeInfo` is renamed to `GeneratedClrTypeInfo` + * `JsonFormatter` now allows writing to a `TextWriter` + * New interface, `ICustomDiagnosticMessage` to allow more compact + representations from `ToString` + * `CodedInputStream` and `CodedOutputStream` now implement `IDisposable`, + which simply disposes of the streams they were constructed with + * Map fields no longer support null values (in line with other languages) + * Improvements in JSON formatting and parsing + + Javascript (Alpha) + * Better support for "bytes" fields: bytes fields can be read as either a + base64 string or UInt8Array (in environments where TypedArray is supported). + * New support for CommonJS imports. This should make it easier to use the + JavaScript support in Node.js and tools like WebPack. See js/README.md for + more information. + * Some significant internal refactoring to simplify and modularize the code. + + Ruby (Alpha) + * JSON serialization now properly uses camelCased names, with a runtime option + that will preserve original names from .proto files instead. + * Well-known types are now included in the distribution. + * Release now includes binary gems for Windows, Mac, and Linux instead of just + source gems. + * Bugfix for serializing oneofs. + + C++/Java Lite (Alpha) + A new "lite" generator parameter was introduced in the protoc for C++ and + Java for Proto3 syntax messages. Example usage: + + ./protoc --cpp_out=lite:$OUTPUT_PATH foo.proto + + The protoc will treat the current input and all the transitive dependencies + as LITE. The same generator parameter must be used to generate the + dependencies. + + In Proto3 syntax files, "optimized_for=LITE_RUNTIME" is no longer supported. + + +2015-12-30 version 3.0.0-beta-2 (C++/Java/Python/Ruby/Nano/Objective-C/C#/JavaScript) + General + * Introduced a new language implementation: JavaScript. + * Added a new field option "json_name". By default proto field names are + converted to "lowerCamelCase" in proto3 JSON format. This option can be + used to override this behavior and specify a different JSON name for the + field. + * Added conformance tests to ensure implementations are following proto3 JSON + specification. + + C++ (Beta) + * Various bug fixes and improvements to the JSON support utility: + - Duplicate map keys in JSON are now rejected (i.e., translation will + fail). + - Fixed wire-format for google.protobuf.Value/ListValue. + - Fixed precision loss when converting google.protobuf.Timestamp. + - Fixed a bug when parsing invalid UTF-8 code points. + - Fixed a memory leak. + - Reduced call stack usage. + + Java (Beta) + * Cleaned up some unused methods on CodedOutputStream. + * Presized lists for packed fields during parsing in the lite runtime to + reduce allocations and improve performance. + * Improved the performance of unknown fields in the lite runtime. + * Introduced UnsafeByteStrings to support zero-copy ByteString creation. + * Various bug fixes and improvements to the JSON support utility: + - Fixed a thread-safety bug. + - Added a new option “preservingProtoFieldNames†to JsonFormat. + - Added a new option “includingDefaultValueFields†to JsonFormat. + - Updated the JSON utility to comply with proto3 JSON specification. + + Python (Beta) + * Added proto3 JSON format utility. It includes support for all field types + and a few well-known types except for Any and Struct. + * Added runtime support for Any, Timestamp, Duration and FieldMask. + * [ ] is now accepted for repeated scalar fields in text format parser. + * Map fields now have proper O(1) performance for lookup/insert/delete + when using the Python/C++ implementation. They were previously using O(n) + search-based algorithms because the C++ reflection interface didn't + support true map operations. + + Objective-C (Beta) + * Various bug-fixes and code tweaks to pass more strict compiler warnings. + * Now has conformance test coverage and is passing all tests. + + C# (Beta) + * Various bug-fixes. + * Code generation: Files generated in directories based on namespace. + * Code generation: Include comments from .proto files in XML doc + comments (naively) + * Code generation: Change organization/naming of "reflection class" (access + to file descriptor) + * Code generation and library: Add Parser property to MessageDescriptor, + and introduce a non-generic parser type. + * Library: Added TypeRegistry to support JSON parsing/formatting of Any. + * Library: Added Any.Pack/Unpack support. + * Library: Implemented JSON parsing. + + Javascript (Alpha) + * Added proto3 support for JavaScript. The runtime is written in pure + JavaScript and works in browsers and in Node.js. To generate JavaScript + code for your proto, invoke protoc with "--js_out". See js/README.md + for more build instructions. + +2015-08-26 version 3.0.0-beta-1 (C++/Java/Python/Ruby/Nano/Objective-C/C#) + About Beta + * This is the first beta release of protobuf v3.0.0. Not all languages + have reached beta stage. Languages not marked as beta are still in + alpha (i.e., be prepared for API breaking changes). + + General + * Proto3 JSON is supported in several languages (fully supported in C++ + and Java, partially supported in Ruby/C#). The JSON spec is defined in + the proto3 language guide: + + https://developers.google.com/protocol-buffers/docs/proto3#json + + We will publish a more detailed spec to define the exact behavior of + proto3-conformant JSON serializers and parsers. Until then, do not rely + on specific behaviors of the implementation if it’s not documented in + the above spec. More specifically, the behavior is not yet finalized for + the following: + - Parsing invalid JSON input (e.g., input with trailing commas). + - Non-camelCase names in JSON input. + - The same field appears multiple times in JSON input. + - JSON arrays contain “null†values. + - The message has unknown fields. + + * Proto3 now enforces strict UTF-8 checking. Parsing will fail if a string + field contains non UTF-8 data. + + C++ (Beta) + * Introduced new utility functions/classes in the google/protobuf/util + directory: + - MessageDifferencer: compare two proto messages and report their + differences. + - JsonUtil: support converting protobuf binary format to/from JSON. + - TimeUtil: utility functions to work with well-known types Timestamp + and Duration. + - FieldMaskUtil: utility functions to work with FieldMask. + + * Performance optimization of arena construction and destruction. + * Bug fixes for arena and maps support. + * Changed to use cmake for Windows Visual Studio builds. + * Added Bazel support. + + Java (Beta) + * Introduced a new util package that will be distributed as a separate + artifact in maven. It contains: + - JsonFormat: convert proto messages to/from JSON. + - TimeUtil: utility functions to work with Timestamp and Duration. + - FieldMaskUtil: utility functions to work with FieldMask. + + * The static PARSER in each generated message is deprecated, and it will + be removed in a future release. A static parser() getter is generated + for each message type instead. + * Performance optimizations for String fields serialization. + * Performance optimizations for Lite runtime on Android: + - Reduced allocations + - Reduced method overhead after ProGuarding + - Reduced code size after ProGuarding + + Python (Alpha) + * Removed legacy Python 2.5 support. + * Moved to a single Python 2.x/3.x-compatible codebase, instead of using 2to3. + * Fixed build/tests on Python 2.6, 2.7, 3.3, and 3.4. + - Pure-Python works on all four. + - Python/C++ implementation works on all but 3.4, due to changes in the + Python/C++ API in 3.4. + * Some preliminary work has been done to allow for multiple DescriptorPools + with Python/C++. + + Ruby (Alpha) + * Many bugfixes: + - fixed parsing/serialization of bytes, sint, sfixed types + - other parser bugfixes + - fixed memory leak affecting Ruby 2.2 + + JavaNano (Alpha) + * JavaNano generated code now will be put in a nano package by default to + avoid conflicts with Java generated code. + + Objective-C (Alpha) + * Added non-null markup to ObjC library. Requires SDK 8.4+ to build. + * Many bugfixes: + - Removed the class/enum filter. + - Renamed some internal types to avoid conflicts with the well-known types + protos. + - Added missing support for parsing repeated primitive fields in packed or + unpacked forms. + - Added *Count for repeated and map<> fields to avoid auto-create when + checking for them being set. + + C# (Alpha) + * Namespace changed to Google.Protobuf (and NuGet package will be named + correspondingly). + * Target platforms now .NET 4.5 and selected portable subsets only. + * Removed lite runtime. + * Reimplementation to use mutable message types. + * Null references used to represent "no value" for message type fields. + * Proto3 semantics supported; proto2 files are prohibited for C# codegen. + Most proto3 features supported: + - JSON formatting (a.k.a. serialization to JSON), including well-known + types (except for Any). + - Wrapper types mapped to nullable value types (or string/ByteString + allowing nullability). JSON parsing is not supported yet. + - maps + - oneof + - enum unknown value preservation + +2015-05-25 version 3.0.0-alpha-3 (Objective-C/C#): + General + * Introduced two new language implementations (Objective-C, C#) to proto3. + * Explicit "optional" keyword are disallowed in proto3 syntax, as fields are + optional by default. + * Group fields are no longer supported in proto3 syntax. + * Changed repeated primitive fields to use packed serialization by default in + proto3 (implemented for C++, Java, Python in this release). The user can + still disable packed serialization by setting packed to false for now. + * Added well-known type protos (any.proto, empty.proto, timestamp.proto, + duration.proto, etc.). Users can import and use these protos just like + regular proto files. Additional runtime support will be added for them in + future releases (in the form of utility helper functions, or having them + replaced by language specific types in generated code). + * Added a "reserved" keyword in both proto2 and proto3 syntax. User can use + this keyword to declare reserved field numbers and names to prevent them + from being reused by other fields in the same message. + + To reserve field numbers, add a reserved declaration in your message: + + message TestMessage { + reserved 2, 15, 9 to 11, 3; + } + + This reserves field numbers 2, 3, 9, 10, 11 and 15. If a user uses any of + these as field numbers, the protocol buffer compiler will report an error. + + Field names can also be reserved: + + message TestMessage { + reserved "foo", "bar"; + } + + * Various bug fixes since 3.0.0-alpha-2 + + Objective-C + Objective-C includes a code generator and a native objective-c runtime + library. By adding “--objc_out†to protoc, the code generator will generate + a header(*.pbobjc.h) and an implementation file(*.pbobjc.m) for each proto + file. + + In this first release, the generated interface provides: enums, messages, + field support(single, repeated, map, oneof), proto2 and proto3 syntax + support, parsing and serialization. It’s compatible with ARC and non-ARC + usage. Besides, user can also access it via the swift bridging header. + + See objectivec/README.md for details. + + C# + * C# protobufs are based on project + https://github.com/jskeet/protobuf-csharp-port. The original project was + frozen and all the new development will happen here. + * Codegen plugin for C# was completely rewritten to C++ and is now an + integral part of protoc. + * Some refactorings and cleanup has been applied to the C# runtime library. + * Only proto2 is supported in C# at the moment, proto3 support is in + progress and will likely bring significant breaking changes to the API. + + See csharp/README.md for details. + + C++ + * Added runtime support for Any type. To use Any in your proto file, first + import the definition of Any: + + // foo.proto + import "google/protobuf/any.proto"; + message Foo { + google.protobuf.Any any_field = 1; + } + message Bar { + int32 value = 1; + } + + Then in C++ you can access the Any field using PackFrom()/UnpackTo() + methods: + + Foo foo; + Bar bar = ...; + foo.mutable_any_field()->PackFrom(bar); + ... + if (foo.any_field().IsType()) { + foo.any_field().UnpackTo(&bar); + ... + } + * In text format, entries of a map field will be sorted by key. + + Java + * Continued optimizations on the lite runtime to improve performance for + Android. + + Python + * Added map support. + - maps now have a dict-like interface (msg.map_field[key] = value) + - existing code that modifies maps via the repeated field interface + will need to be updated. + + Ruby + * Improvements to RepeatedField's emulation of the Ruby Array API. + * Various speedups and internal cleanups. + +2015-02-26 version 3.0.0-alpha-2 (Python/Ruby/JavaNano): + General + * Introduced three new language implementations (Ruby, JavaNano, and + Python) to proto3. + * Various bug fixes since 3.0.0-alpha-1 + + Python: + Python has received several updates, most notably support for proto3 + semantics in any .proto file that declares syntax="proto3". + Messages declared in proto3 files no longer represent field presence + for scalar fields (number, enums, booleans, or strings). You can + no longer call HasField() for such fields, and they are serialized + based on whether they have a non-zero/empty/false value. + + One other notable change is in the C++-accelerated implementation. + Descriptor objects (which describe the protobuf schema and allow + reflection over it) are no longer duplicated between the Python + and C++ layers. The Python descriptors are now simple wrappers + around the C++ descriptors. This change should significantly + reduce the memory usage of programs that use a lot of message + types. + + Ruby: + We have added proto3 support for Ruby via a native C extension. + + The Ruby extension itself is included in the ruby/ directory, and details on + building and installing the extension are in ruby/README.md. The extension + will also be published as a Ruby gem. Code generator support is included as + part of `protoc` with the `--ruby_out` flag. + + The Ruby extension implements a user-friendly DSL to define message types + (also generated by the code generator from `.proto` files). Once a message + type is defined, the user may create instances of the message that behave in + ways idiomatic to Ruby. For example: + + - Message fields are present as ordinary Ruby properties (getter method + `foo` and setter method `foo=`). + - Repeated field elements are stored in a container that acts like a native + Ruby array, and map elements are stored in a container that acts like a + native Ruby hashmap. + - The usual well-known methods, such as `#to_s`, `#dup`, and the like, are + present. + + Unlike several existing third-party Ruby extensions for protobuf, this + extension is built on a "strongly-typed" philosophy: message fields and + array/map containers will throw exceptions eagerly when values of the + incorrect type are inserted. + + See ruby/README.md for details. + + JavaNano: + JavaNano is a special code generator and runtime library designed especially + for resource-restricted systems, like Android. It is very resource-friendly + in both the amount of code and the runtime overhead. Here is an an overview + of JavaNano features compared with the official Java protobuf: + + - No descriptors or message builders. + - All messages are mutable; fields are public Java fields. + - For optional fields only, encapsulation behind setter/getter/hazzer/ + clearer functions is opt-in, which provide proper 'has' state support. + - For proto2, if not opted in, has state (field presence) is not available. + Serialization outputs all fields not equal to their defaults. + The behavior is consistent with proto3 semantics. + - Required fields (proto2 only) are always serialized. + - Enum constants are integers; protection against invalid values only + when parsing from the wire. + - Enum constants can be generated into container interfaces bearing + the enum's name (so the referencing code is in Java style). + - CodedInputByteBufferNano can only take byte[] (not InputStream). + - Similarly CodedOutputByteBufferNano can only write to byte[]. + - Repeated fields are in arrays, not ArrayList or Vector. Null array + elements are allowed and silently ignored. + - Full support for serializing/deserializing repeated packed fields. + - Support extensions (in proto2). + - Unset messages/groups are null, not an immutable empty default + instance. + - toByteArray(...) and mergeFrom(...) are now static functions of + MessageNano. + - The 'bytes' type translates to the Java type byte[]. + + See javanano/README.txt for details. + +2014-12-01 version 3.0.0-alpha-1 (C++/Java): + + General + * Introduced Protocol Buffers language version 3 (aka proto3). + + When protobuf was initially opensourced it implemented Protocol Buffers + language version 2 (aka proto2), which is why the version number + started from v2.0.0. From v3.0.0, a new language version (proto3) is + introduced while the old version (proto2) will continue to be supported. + + The main intent of introducing proto3 is to clean up protobuf before + pushing the language as the foundation of Google's new API platform. + In proto3, the language is simplified, both for ease of use and to + make it available in a wider range of programming languages. At the + same time a few features are added to better support common idioms + found in APIs. + + The following are the main new features in language version 3: + + 1. Removal of field presence logic for primitive value fields, removal + of required fields, and removal of default values. This makes proto3 + significantly easier to implement with open struct representations, + as in languages like Android Java, Objective C, or Go. + 2. Removal of unknown fields. + 3. Removal of extensions, which are instead replaced by a new standard + type called Any. + 4. Fix semantics for unknown enum values. + 5. Addition of maps. + 6. Addition of a small set of standard types for representation of time, + dynamic data, etc. + 7. A well-defined encoding in JSON as an alternative to binary proto + encoding. + + This release (v3.0.0-alpha-1) includes partial proto3 support for C++ and + Java. Items 6 (well-known types) and 7 (JSON format) in the above feature + list are not implemented. + + A new notion "syntax" is introduced to specify whether a .proto file + uses proto2 or proto3: + + // foo.proto + syntax = "proto3"; + message Bar {...} + + If omitted, the protocol compiler will generate a warning and "proto2" will + be used as the default. This warning will be turned into an error in a + future release. + + We recommend that new Protocol Buffers users use proto3. However, we do not + generally recommend that existing users migrate from proto2 from proto3 due + to API incompatibility, and we will continue to support proto2 for a long + time. + + * Added support for map fields (implemented in C++/Java for both proto2 and + proto3). + + Map fields can be declared using the following syntax: + + message Foo { + map values = 1; + } + + Data of a map field will be stored in memory as an unordered map and it + can be accessed through generated accessors. + + C++ + * Added arena allocation support (for both proto2 and proto3). + + Profiling shows memory allocation and deallocation constitutes a significant + fraction of CPU-time spent in protobuf code and arena allocation is a + technique introduced to reduce this cost. With arena allocation, new + objects will be allocated from a large piece of preallocated memory and + deallocation of these objects is almost free. Early adoption shows 20% to + 50% improvement in some Google binaries. + + To enable arena support, add the following option to your .proto file: + + option cc_enable_arenas = true; + + Protocol compiler will generate additional code to make the generated + message classes work with arenas. This does not change the existing API + of protobuf messages and does not affect wire format. Your existing code + should continue to work after adding this option. In the future we will + make this option enabled by default. + + To actually take advantage of arena allocation, you need to use the arena + APIs when creating messages. A quick example of using the arena API: + + { + google::protobuf::Arena arena; + // Allocate a protobuf message in the arena. + MyMessage* message = Arena::CreateMessage(&arena); + // All submessages will be allocated in the same arena. + if (!message->ParseFromString(data)) { + // Deal with malformed input data. + } + // Must not delete the message here. It will be deleted automatically + // when the arena is destroyed. + } + + Currently arena does not work with map fields. Enabling arena in a .proto + file containing map fields will result in compile errors in the generated + code. This will be addressed in a future release. + +2014-10-20 version 2.6.1: + + C++ + * Added atomicops support for Solaris. + * Released memory allocated by InitializeDefaultRepeatedFields() and + GetEmptyString(). Some memory sanitizers reported them as memory leaks. + + Java + * Updated DynamicMessage.setField() to handle repeated enum values + correctly. + * Fixed a bug that caused NullPointerException to be thrown when + converting manually constructed FileDescriptorProto to + FileDescriptor. + + Python + * Fixed WhichOneof() to work with de-serialized protobuf messages. + * Fixed a missing file problem of Python C++ implementation. + +2014-08-15 version 2.6.0: + + General + * Added oneofs(unions) feature. Fields in the same oneof will share + memory and at most one field can be set at the same time. Use the + oneof keyword to define a oneof like: + message SampleMessage { + oneof test_oneof { + string name = 4; + YourMessage sub_message = 9; + } + } + * Files, services, enums, messages, methods and enum values can be marked + as deprecated now. + * Added Support for list values, including lists of messages, when + parsing text-formatted protos in C++ and Java. + For example: foo: [1, 2, 3] + + C++ + * Enhanced customization on TestFormat printing. + * Added SwapFields() in reflection API to swap a subset of fields. + Added SetAllocatedMessage() in reflection API. + * Repeated primitive extensions are now packable. The + [packed=true] option only affects serializers. Therefore, it is + possible to switch a repeated extension field to packed format + without breaking backwards-compatibility. + * Various speed optimizations. + + Java + * writeTo() method in ByteString can now write a substring to an + output stream. Added endWith() method for ByteString. + * ByteString and ByteBuffer are now supported in CodedInputStream + and CodedOutputStream. + * java_generate_equals_and_hash can now be used with the LITE_RUNTIME. + + Python + * A new C++-backed extension module (aka "cpp api v2") that replaces the + old ("cpp api v1") one. Much faster than the pure Python code. This one + resolves many bugs and is recommended for general use over the + pure Python when possible. + * Descriptors now have enum_types_by_name and extension_types_by_name dict + attributes. + * Support for Python 3. + +2013-02-27 version 2.5.0: General * New notion "import public" that allows a proto file to forward the content @@ -17,6 +668,9 @@ be assigned the same numeric value. Default value is "true". Setting it to false causes the compiler to reject enum definitions where multiple symbols have the same numeric value. + Note: We plan to flip the default value to "false" in a future release. + Projects using enum aliases should set the option to "true" in their .proto + files. C++ * New generated method set_allocated_foo(Type* foo) for message and string @@ -32,7 +686,7 @@ comments for corresponding classes and data members. * Added Parser to parse directly into messages without a Builder. For example, - Foo foo = Foo.getParser().ParseFrom(input); + Foo foo = Foo.PARSER.ParseFrom(input); Using Parser is ~25% faster than using Builder to parse messages. * Added getters/setters to access the underlying ByteString of a string field directly. @@ -45,7 +699,7 @@ Python * Added support for dynamic message creation. DescriptorDatabase, - DescriptorPool, and MessageFactory work like their C++ couterparts to + DescriptorPool, and MessageFactory work like their C++ counterparts to simplify Descriptor construction from *DescriptorProtos, and MessageFactory provides a message instance from a Descriptor. * Added pickle support for protobuf messages. @@ -59,7 +713,7 @@ 2011-05-01 version 2.4.1: C++ - * Fixed the frendship problem for old compilers to make the library now gcc 3 + * Fixed the friendship problem for old compilers to make the library now gcc 3 compatible again. * Fixed vcprojects/extract_includes.bat to extract compiler/plugin.h. @@ -326,7 +980,7 @@ * Fixed tendency for TextFormat's parsing to overflow the stack when parsing large string values. The underlying problem is with Java's regex implementation (which unfortunately uses recursive backtracking - rather than building an NFA). Worked around by making use of possesive + rather than building an NFA). Worked around by making use of possessive quantifiers. * Generated service classes now also generate pure interfaces. For a service Foo, Foo.Interface is a pure interface containing all of the service's @@ -340,7 +994,7 @@ RPC implementations will have to implement the new interfaces in order to support blocking mode. * New I/O methods parseDelimitedFrom(), mergeDelimitedFrom(), and - writeDelimitedTo() read and write "delemited" messages from/to a stream, + writeDelimitedTo() read and write "delimited" messages from/to a stream, meaning that the message size precedes the data. This way, you can write multiple messages to a stream without having to worry about delimiting them yourself. diff --git a/packager/third_party/protobuf/CONTRIBUTORS.txt b/packager/third_party/protobuf/CONTRIBUTORS.txt index 7c1ac80d66..b8d97fc23d 100644 --- a/packager/third_party/protobuf/CONTRIBUTORS.txt +++ b/packager/third_party/protobuf/CONTRIBUTORS.txt @@ -17,6 +17,12 @@ Proto2 Python primary authors: Will Robinson Petar Petrov +Java Nano primary authors: + Brian Duff + Tom Chao + Max Cai + Ulas Kirazci + Large code contributions: Jason Hsueh Joseph Schorr @@ -50,7 +56,7 @@ Patch contributors: text format. Brian Atkinson * Added @Override annotation to generated Java code where appropriate. - Vincent Choinière + Vincent Choinière * Tru64 support. Monty Taylor * Solaris 10 + Sun Studio fixes. @@ -86,3 +92,11 @@ Patch contributors: * Fix bug with permanent callbacks that delete themselves when run. Michael Kucharski * Added CodedInputStream.getTotalBytesRead(). + Kacper Kowalik + * Fixed m4/acx_pthread.m4 problem for some Linux distributions. + William Orr + * Fixed detection of sched_yield on Solaris. + * Added atomicops for Solaris + Andrew Paprocki + * Fixed minor IBM xlC compiler build issues + * Added atomicops for AIX (POWER) diff --git a/packager/third_party/protobuf/DEPS b/packager/third_party/protobuf/DEPS new file mode 100644 index 0000000000..6e156dcd36 --- /dev/null +++ b/packager/third_party/protobuf/DEPS @@ -0,0 +1,3 @@ +skip_child_includes = [ + 'objectivec', +] diff --git a/packager/third_party/protobuf/LICENSE b/packager/third_party/protobuf/LICENSE new file mode 100644 index 0000000000..f028c82324 --- /dev/null +++ b/packager/third_party/protobuf/LICENSE @@ -0,0 +1,42 @@ +This license applies to all parts of Protocol Buffers except the following: + + - Atomicops support for generic gcc, located in + src/google/protobuf/stubs/atomicops_internals_generic_gcc.h. + This file is copyrighted by Red Hat Inc. + + - Atomicops support for AIX/POWER, located in + src/google/protobuf/stubs/atomicops_internals_power.h. + This file is copyrighted by Bloomberg Finance LP. + +Copyright 2014, Google Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +Code generated by the Protocol Buffer compiler is owned by the owner +of the input file used when generating it. This code is not +standalone and requires a support library to be linked with it. This +support library is itself covered by the above license. diff --git a/packager/third_party/protobuf/Makefile.am b/packager/third_party/protobuf/Makefile.am new file mode 100644 index 0000000000..3e9888166f --- /dev/null +++ b/packager/third_party/protobuf/Makefile.am @@ -0,0 +1,769 @@ +## Process this file with automake to produce Makefile.in + +ACLOCAL_AMFLAGS = -I m4 + +AUTOMAKE_OPTIONS = foreign + +# Build . before src so that our all-local and clean-local hooks kicks in at +# the right time. +SUBDIRS = . src + +# Always include gmock in distributions. +DIST_SUBDIRS = $(subdirs) src conformance benchmarks + +# Build gmock before we build protobuf tests. We don't add gmock to SUBDIRS +# because then "make check" would also build and run all of gmock's own tests, +# which takes a lot of time and is generally not useful to us. Also, we don't +# want "make install" to recurse into gmock since we don't want to overwrite +# the installed version of gmock if there is one. +check-local: + @echo "Making lib/libgmock.a lib/libgmock_main.a in gmock" + @cd gmock && $(MAKE) $(AM_MAKEFLAGS) lib/libgmock.la lib/libgmock_main.la + @cd gmock/gtest && $(MAKE) $(AM_MAKEFLAGS) lib/libgtest.la lib/libgtest_main.la + +# We would like to clean gmock when "make clean" is invoked. But we have to +# be careful because clean-local is also invoked during "make distclean", but +# "make distclean" already recurses into gmock because it's listed among the +# DIST_SUBDIRS. distclean will delete gmock/Makefile, so if we then try to +# cd to the directory again and "make clean" it will fail. So, check that the +# Makefile exists before recursing. +clean-local: + @if test -e gmock/Makefile; then \ + echo "Making clean in gmock"; \ + cd gmock && $(MAKE) $(AM_MAKEFLAGS) clean; \ + fi; \ + if test -e conformance/Makefile; then \ + echo "Making clean in conformance"; \ + cd conformance && $(MAKE) $(AM_MAKEFLAGS) clean; \ + fi; \ + if test -e benchmarks/Makefile; then \ + echo "Making clean in benchmarks"; \ + cd benchmarks && $(MAKE) $(AM_MAKEFLAGS) clean; \ + fi; \ + if test -e objectivec/DevTools; then \ + echo "Cleaning any ObjC pyc files"; \ + rm -f objectivec/DevTools/*.pyc; \ + fi + +pkgconfigdir = $(libdir)/pkgconfig +pkgconfig_DATA = protobuf.pc protobuf-lite.pc + +csharp_EXTRA_DIST= \ + csharp/.gitignore \ + csharp/CHANGES.txt \ + csharp/README.md \ + csharp/build_packages.bat \ + csharp/buildall.sh \ + csharp/generate_protos.sh \ + csharp/keys/Google.Protobuf.public.snk \ + csharp/keys/README.md \ + csharp/protos/unittest_issues.proto \ + csharp/src/AddressBook/AddPerson.cs \ + csharp/src/AddressBook/AddressBook.csproj \ + csharp/src/AddressBook/Addressbook.cs \ + csharp/src/AddressBook/ListPeople.cs \ + csharp/src/AddressBook/Program.cs \ + csharp/src/AddressBook/Properties/AssemblyInfo.cs \ + csharp/src/AddressBook/SampleUsage.cs \ + csharp/src/AddressBook/app.config \ + csharp/src/Google.Protobuf.Conformance/App.config \ + csharp/src/Google.Protobuf.Conformance/Conformance.cs \ + csharp/src/Google.Protobuf.Conformance/Google.Protobuf.Conformance.csproj \ + csharp/src/Google.Protobuf.Conformance/Program.cs \ + csharp/src/Google.Protobuf.Conformance/Properties/AssemblyInfo.cs \ + csharp/src/Google.Protobuf.JsonDump/Google.Protobuf.JsonDump.csproj \ + csharp/src/Google.Protobuf.JsonDump/Program.cs \ + csharp/src/Google.Protobuf.JsonDump/Properties/AssemblyInfo.cs \ + csharp/src/Google.Protobuf.JsonDump/app.config \ + csharp/src/Google.Protobuf.Test/ByteStringTest.cs \ + csharp/src/Google.Protobuf.Test/CodedInputStreamExtensions.cs \ + csharp/src/Google.Protobuf.Test/CodedInputStreamTest.cs \ + csharp/src/Google.Protobuf.Test/CodedOutputStreamTest.cs \ + csharp/src/Google.Protobuf.Test/Collections/MapFieldTest.cs \ + csharp/src/Google.Protobuf.Test/Collections/RepeatedFieldTest.cs \ + csharp/src/Google.Protobuf.Test/Compatibility/PropertyInfoExtensionsTest.cs \ + csharp/src/Google.Protobuf.Test/Compatibility/TypeExtensionsTest.cs \ + csharp/src/Google.Protobuf.Test/DeprecatedMemberTest.cs \ + csharp/src/Google.Protobuf.Test/EqualityTester.cs \ + csharp/src/Google.Protobuf.Test/FieldCodecTest.cs \ + csharp/src/Google.Protobuf.Test/GeneratedMessageTest.cs \ + csharp/src/Google.Protobuf.Test/Google.Protobuf.Test.csproj \ + csharp/src/Google.Protobuf.Test/IssuesTest.cs \ + csharp/src/Google.Protobuf.Test/JsonFormatterTest.cs \ + csharp/src/Google.Protobuf.Test/JsonParserTest.cs \ + csharp/src/Google.Protobuf.Test/JsonTokenizerTest.cs \ + csharp/src/Google.Protobuf.Test/Properties/AppManifest.xml \ + csharp/src/Google.Protobuf.Test/Properties/AssemblyInfo.cs \ + csharp/src/Google.Protobuf.Test/Reflection/DescriptorsTest.cs \ + csharp/src/Google.Protobuf.Test/Reflection/FieldAccessTest.cs \ + csharp/src/Google.Protobuf.Test/Reflection/TypeRegistryTest.cs \ + csharp/src/Google.Protobuf.Test/SampleEnum.cs \ + csharp/src/Google.Protobuf.Test/SampleMessages.cs \ + csharp/src/Google.Protobuf.Test/TestCornerCases.cs \ + csharp/src/Google.Protobuf.Test/TestProtos/MapUnittestProto3.cs \ + csharp/src/Google.Protobuf.Test/TestProtos/UnittestImportProto3.cs \ + csharp/src/Google.Protobuf.Test/TestProtos/UnittestImportPublicProto3.cs \ + csharp/src/Google.Protobuf.Test/TestProtos/UnittestIssues.cs \ + csharp/src/Google.Protobuf.Test/TestProtos/UnittestProto3.cs \ + csharp/src/Google.Protobuf.Test/TestProtos/UnittestWellKnownTypes.cs \ + csharp/src/Google.Protobuf.Test/WellKnownTypes/AnyTest.cs \ + csharp/src/Google.Protobuf.Test/WellKnownTypes/DurationTest.cs \ + csharp/src/Google.Protobuf.Test/WellKnownTypes/TimestampTest.cs \ + csharp/src/Google.Protobuf.Test/WellKnownTypes/WrappersTest.cs \ + csharp/src/Google.Protobuf.Test/packages.config \ + csharp/src/Google.Protobuf.sln \ + csharp/src/Google.Protobuf/ByteArray.cs \ + csharp/src/Google.Protobuf/ByteString.cs \ + csharp/src/Google.Protobuf/CodedInputStream.cs \ + csharp/src/Google.Protobuf/CodedOutputStream.ComputeSize.cs \ + csharp/src/Google.Protobuf/CodedOutputStream.cs \ + csharp/src/Google.Protobuf/Collections/MapField.cs \ + csharp/src/Google.Protobuf/Collections/ReadOnlyDictionary.cs \ + csharp/src/Google.Protobuf/Collections/RepeatedField.cs \ + csharp/src/Google.Protobuf/Compatibility/PropertyInfoExtensions.cs \ + csharp/src/Google.Protobuf/Compatibility/TypeExtensions.cs \ + csharp/src/Google.Protobuf/FieldCodec.cs \ + csharp/src/Google.Protobuf/FrameworkPortability.cs \ + csharp/src/Google.Protobuf/Google.Protobuf.csproj \ + csharp/src/Google.Protobuf/Google.Protobuf.nuspec \ + csharp/src/Google.Protobuf/IDeepCloneable.cs \ + csharp/src/Google.Protobuf/IMessage.cs \ + csharp/src/Google.Protobuf/InvalidJsonException.cs \ + csharp/src/Google.Protobuf/InvalidProtocolBufferException.cs \ + csharp/src/Google.Protobuf/JsonFormatter.cs \ + csharp/src/Google.Protobuf/JsonParser.cs \ + csharp/src/Google.Protobuf/JsonToken.cs \ + csharp/src/Google.Protobuf/JsonTokenizer.cs \ + csharp/src/Google.Protobuf/LimitedInputStream.cs \ + csharp/src/Google.Protobuf/MessageExtensions.cs \ + csharp/src/Google.Protobuf/MessageParser.cs \ + csharp/src/Google.Protobuf/ProtoPreconditions.cs \ + csharp/src/Google.Protobuf/Properties/AssemblyInfo.cs \ + csharp/src/Google.Protobuf/Reflection/Descriptor.cs \ + csharp/src/Google.Protobuf/Reflection/DescriptorBase.cs \ + csharp/src/Google.Protobuf/Reflection/DescriptorPool.cs \ + csharp/src/Google.Protobuf/Reflection/DescriptorUtil.cs \ + csharp/src/Google.Protobuf/Reflection/DescriptorValidationException.cs \ + csharp/src/Google.Protobuf/Reflection/EnumDescriptor.cs \ + csharp/src/Google.Protobuf/Reflection/EnumValueDescriptor.cs \ + csharp/src/Google.Protobuf/Reflection/FieldAccessorBase.cs \ + csharp/src/Google.Protobuf/Reflection/FieldDescriptor.cs \ + csharp/src/Google.Protobuf/Reflection/FieldType.cs \ + csharp/src/Google.Protobuf/Reflection/FileDescriptor.cs \ + csharp/src/Google.Protobuf/Reflection/GeneratedClrTypeInfo.cs \ + csharp/src/Google.Protobuf/Reflection/IDescriptor.cs \ + csharp/src/Google.Protobuf/Reflection/IFieldAccessor.cs \ + csharp/src/Google.Protobuf/Reflection/MapFieldAccessor.cs \ + csharp/src/Google.Protobuf/Reflection/MessageDescriptor.cs \ + csharp/src/Google.Protobuf/Reflection/MethodDescriptor.cs \ + csharp/src/Google.Protobuf/Reflection/OneofAccessor.cs \ + csharp/src/Google.Protobuf/Reflection/OneofDescriptor.cs \ + csharp/src/Google.Protobuf/Reflection/OriginalNameAttribute.cs \ + csharp/src/Google.Protobuf/Reflection/PackageDescriptor.cs \ + csharp/src/Google.Protobuf/Reflection/PartialClasses.cs \ + csharp/src/Google.Protobuf/Reflection/ReflectionUtil.cs \ + csharp/src/Google.Protobuf/Reflection/RepeatedFieldAccessor.cs \ + csharp/src/Google.Protobuf/Reflection/ServiceDescriptor.cs \ + csharp/src/Google.Protobuf/Reflection/SingleFieldAccessor.cs \ + csharp/src/Google.Protobuf/Reflection/TypeRegistry.cs \ + csharp/src/Google.Protobuf/WellKnownTypes/Any.cs \ + csharp/src/Google.Protobuf/WellKnownTypes/AnyPartial.cs \ + csharp/src/Google.Protobuf/WellKnownTypes/Api.cs \ + csharp/src/Google.Protobuf/WellKnownTypes/Duration.cs \ + csharp/src/Google.Protobuf/WellKnownTypes/DurationPartial.cs \ + csharp/src/Google.Protobuf/WellKnownTypes/Empty.cs \ + csharp/src/Google.Protobuf/WellKnownTypes/FieldMask.cs \ + csharp/src/Google.Protobuf/WellKnownTypes/SourceContext.cs \ + csharp/src/Google.Protobuf/WellKnownTypes/Struct.cs \ + csharp/src/Google.Protobuf/WellKnownTypes/TimeExtensions.cs \ + csharp/src/Google.Protobuf/WellKnownTypes/Timestamp.cs \ + csharp/src/Google.Protobuf/WellKnownTypes/TimestampPartial.cs \ + csharp/src/Google.Protobuf/WellKnownTypes/Type.cs \ + csharp/src/Google.Protobuf/WellKnownTypes/ValuePartial.cs \ + csharp/src/Google.Protobuf/WellKnownTypes/Wrappers.cs \ + csharp/src/Google.Protobuf/WellKnownTypes/WrappersPartial.cs \ + csharp/src/Google.Protobuf/WireFormat.cs \ + csharp/src/Google.Protobuf/packages.config \ + csharp/src/packages/repositories.config + +java_EXTRA_DIST= \ + java/README.md \ + java/core/generate-sources-build.xml \ + java/core/generate-test-sources-build.xml \ + java/core/pom.xml \ + java/core/src/main/java/com/google/protobuf/AbstractMessage.java \ + java/core/src/main/java/com/google/protobuf/AbstractMessageLite.java \ + java/core/src/main/java/com/google/protobuf/AbstractParser.java \ + java/core/src/main/java/com/google/protobuf/AbstractProtobufList.java \ + java/core/src/main/java/com/google/protobuf/BlockingRpcChannel.java \ + java/core/src/main/java/com/google/protobuf/BlockingService.java \ + java/core/src/main/java/com/google/protobuf/BooleanArrayList.java \ + java/core/src/main/java/com/google/protobuf/ByteBufferWriter.java \ + java/core/src/main/java/com/google/protobuf/ByteOutput.java \ + java/core/src/main/java/com/google/protobuf/ByteString.java \ + java/core/src/main/java/com/google/protobuf/CodedInputStream.java \ + java/core/src/main/java/com/google/protobuf/CodedOutputStream.java \ + java/core/src/main/java/com/google/protobuf/Descriptors.java \ + java/core/src/main/java/com/google/protobuf/DoubleArrayList.java \ + java/core/src/main/java/com/google/protobuf/DynamicMessage.java \ + java/core/src/main/java/com/google/protobuf/Extension.java \ + java/core/src/main/java/com/google/protobuf/ExtensionLite.java \ + java/core/src/main/java/com/google/protobuf/ExtensionRegistry.java \ + java/core/src/main/java/com/google/protobuf/ExtensionRegistryLite.java \ + java/core/src/main/java/com/google/protobuf/FieldSet.java \ + java/core/src/main/java/com/google/protobuf/FloatArrayList.java \ + java/core/src/main/java/com/google/protobuf/GeneratedMessage.java \ + java/core/src/main/java/com/google/protobuf/GeneratedMessageLite.java \ + java/core/src/main/java/com/google/protobuf/IntArrayList.java \ + java/core/src/main/java/com/google/protobuf/Internal.java \ + java/core/src/main/java/com/google/protobuf/InvalidProtocolBufferException.java \ + java/core/src/main/java/com/google/protobuf/LazyField.java \ + java/core/src/main/java/com/google/protobuf/LazyFieldLite.java \ + java/core/src/main/java/com/google/protobuf/LazyStringArrayList.java \ + java/core/src/main/java/com/google/protobuf/LazyStringList.java \ + java/core/src/main/java/com/google/protobuf/LongArrayList.java \ + java/core/src/main/java/com/google/protobuf/MapEntry.java \ + java/core/src/main/java/com/google/protobuf/MapEntryLite.java \ + java/core/src/main/java/com/google/protobuf/MapField.java \ + java/core/src/main/java/com/google/protobuf/MapFieldLite.java \ + java/core/src/main/java/com/google/protobuf/Message.java \ + java/core/src/main/java/com/google/protobuf/MessageLite.java \ + java/core/src/main/java/com/google/protobuf/MessageLiteOrBuilder.java \ + java/core/src/main/java/com/google/protobuf/MessageLiteToString.java \ + java/core/src/main/java/com/google/protobuf/MessageOrBuilder.java \ + java/core/src/main/java/com/google/protobuf/MessageReflection.java \ + java/core/src/main/java/com/google/protobuf/MutabilityOracle.java \ + java/core/src/main/java/com/google/protobuf/NioByteString.java \ + java/core/src/main/java/com/google/protobuf/Parser.java \ + java/core/src/main/java/com/google/protobuf/ProtobufArrayList.java \ + java/core/src/main/java/com/google/protobuf/ProtocolMessageEnum.java \ + java/core/src/main/java/com/google/protobuf/ProtocolStringList.java \ + java/core/src/main/java/com/google/protobuf/RepeatedFieldBuilder.java \ + java/core/src/main/java/com/google/protobuf/RopeByteString.java \ + java/core/src/main/java/com/google/protobuf/RpcCallback.java \ + java/core/src/main/java/com/google/protobuf/RpcChannel.java \ + java/core/src/main/java/com/google/protobuf/RpcController.java \ + java/core/src/main/java/com/google/protobuf/RpcUtil.java \ + java/core/src/main/java/com/google/protobuf/Service.java \ + java/core/src/main/java/com/google/protobuf/ServiceException.java \ + java/core/src/main/java/com/google/protobuf/SingleFieldBuilder.java \ + java/core/src/main/java/com/google/protobuf/SmallSortedMap.java \ + java/core/src/main/java/com/google/protobuf/TextFormat.java \ + java/core/src/main/java/com/google/protobuf/TextFormatEscaper.java \ + java/core/src/main/java/com/google/protobuf/TextFormatParseInfoTree.java \ + java/core/src/main/java/com/google/protobuf/TextFormatParseLocation.java \ + java/core/src/main/java/com/google/protobuf/UninitializedMessageException.java \ + java/core/src/main/java/com/google/protobuf/UnknownFieldSet.java \ + java/core/src/main/java/com/google/protobuf/UnknownFieldSetLite.java \ + java/core/src/main/java/com/google/protobuf/UnmodifiableLazyStringList.java \ + java/core/src/main/java/com/google/protobuf/UnsafeByteOperations.java \ + java/core/src/main/java/com/google/protobuf/Utf8.java \ + java/core/src/main/java/com/google/protobuf/WireFormat.java \ + java/core/src/test/java/com/google/protobuf/AbstractMessageTest.java \ + java/core/src/test/java/com/google/protobuf/AnyTest.java \ + java/core/src/test/java/com/google/protobuf/BooleanArrayListTest.java \ + java/core/src/test/java/com/google/protobuf/BoundedByteStringTest.java \ + java/core/src/test/java/com/google/protobuf/ByteBufferWriterTest.java \ + java/core/src/test/java/com/google/protobuf/ByteStringTest.java \ + java/core/src/test/java/com/google/protobuf/CheckUtf8Test.java \ + java/core/src/test/java/com/google/protobuf/CodedInputStreamTest.java \ + java/core/src/test/java/com/google/protobuf/CodedOutputStreamTest.java \ + java/core/src/test/java/com/google/protobuf/DeprecatedFieldTest.java \ + java/core/src/test/java/com/google/protobuf/DescriptorsTest.java \ + java/core/src/test/java/com/google/protobuf/DoubleArrayListTest.java \ + java/core/src/test/java/com/google/protobuf/DynamicMessageTest.java \ + java/core/src/test/java/com/google/protobuf/EnumTest.java \ + java/core/src/test/java/com/google/protobuf/FieldPresenceTest.java \ + java/core/src/test/java/com/google/protobuf/FloatArrayListTest.java \ + java/core/src/test/java/com/google/protobuf/ForceFieldBuildersPreRun.java \ + java/core/src/test/java/com/google/protobuf/GeneratedMessageTest.java \ + java/core/src/test/java/com/google/protobuf/IntArrayListTest.java \ + java/core/src/test/java/com/google/protobuf/IsValidUtf8Test.java \ + java/core/src/test/java/com/google/protobuf/IsValidUtf8TestUtil.java \ + java/core/src/test/java/com/google/protobuf/LazyFieldLiteTest.java \ + java/core/src/test/java/com/google/protobuf/LazyFieldTest.java \ + java/core/src/test/java/com/google/protobuf/LazyMessageLiteTest.java \ + java/core/src/test/java/com/google/protobuf/LazyStringArrayListTest.java \ + java/core/src/test/java/com/google/protobuf/LazyStringEndToEndTest.java \ + java/core/src/test/java/com/google/protobuf/LiteEqualsAndHashTest.java \ + java/core/src/test/java/com/google/protobuf/LiteTest.java \ + java/core/src/test/java/com/google/protobuf/LiteralByteStringTest.java \ + java/core/src/test/java/com/google/protobuf/LongArrayListTest.java \ + java/core/src/test/java/com/google/protobuf/MapForProto2LiteTest.java \ + java/core/src/test/java/com/google/protobuf/MapForProto2Test.java \ + java/core/src/test/java/com/google/protobuf/MapTest.java \ + java/core/src/test/java/com/google/protobuf/MessageTest.java \ + java/core/src/test/java/com/google/protobuf/NestedBuildersTest.java \ + java/core/src/test/java/com/google/protobuf/NioByteStringTest.java \ + java/core/src/test/java/com/google/protobuf/ParserTest.java \ + java/core/src/test/java/com/google/protobuf/ProtobufArrayListTest.java \ + java/core/src/test/java/com/google/protobuf/RepeatedFieldBuilderTest.java \ + java/core/src/test/java/com/google/protobuf/RopeByteStringSubstringTest.java \ + java/core/src/test/java/com/google/protobuf/RopeByteStringTest.java \ + java/core/src/test/java/com/google/protobuf/ServiceTest.java \ + java/core/src/test/java/com/google/protobuf/SingleFieldBuilderTest.java \ + java/core/src/test/java/com/google/protobuf/SmallSortedMapTest.java \ + java/core/src/test/java/com/google/protobuf/TestBadIdentifiers.java \ + java/core/src/test/java/com/google/protobuf/TestUtil.java \ + java/core/src/test/java/com/google/protobuf/TextFormatParseInfoTreeTest.java \ + java/core/src/test/java/com/google/protobuf/TextFormatParseLocationTest.java \ + java/core/src/test/java/com/google/protobuf/TextFormatTest.java \ + java/core/src/test/java/com/google/protobuf/UnknownEnumValueTest.java \ + java/core/src/test/java/com/google/protobuf/UnknownFieldSetLiteTest.java \ + java/core/src/test/java/com/google/protobuf/UnknownFieldSetTest.java \ + java/core/src/test/java/com/google/protobuf/UnmodifiableLazyStringListTest.java \ + java/core/src/test/java/com/google/protobuf/WellKnownTypesTest.java \ + java/core/src/test/java/com/google/protobuf/WireFormatTest.java \ + java/core/src/test/proto/com/google/protobuf/any_test.proto \ + java/core/src/test/proto/com/google/protobuf/field_presence_test.proto \ + java/core/src/test/proto/com/google/protobuf/lazy_fields_lite.proto \ + java/core/src/test/proto/com/google/protobuf/lite_equals_and_hash.proto \ + java/core/src/test/proto/com/google/protobuf/map_for_proto2_lite_test.proto \ + java/core/src/test/proto/com/google/protobuf/map_for_proto2_test.proto \ + java/core/src/test/proto/com/google/protobuf/map_initialization_order_test.proto \ + java/core/src/test/proto/com/google/protobuf/map_test.proto \ + java/core/src/test/proto/com/google/protobuf/multiple_files_test.proto \ + java/core/src/test/proto/com/google/protobuf/nested_builders_test.proto \ + java/core/src/test/proto/com/google/protobuf/nested_extension.proto \ + java/core/src/test/proto/com/google/protobuf/nested_extension_lite.proto \ + java/core/src/test/proto/com/google/protobuf/non_nested_extension.proto \ + java/core/src/test/proto/com/google/protobuf/non_nested_extension_lite.proto \ + java/core/src/test/proto/com/google/protobuf/outer_class_name_test.proto \ + java/core/src/test/proto/com/google/protobuf/outer_class_name_test2.proto \ + java/core/src/test/proto/com/google/protobuf/outer_class_name_test3.proto \ + java/core/src/test/proto/com/google/protobuf/test_bad_identifiers.proto \ + java/core/src/test/proto/com/google/protobuf/test_check_utf8.proto \ + java/core/src/test/proto/com/google/protobuf/test_check_utf8_size.proto \ + java/core/src/test/proto/com/google/protobuf/test_custom_options.proto \ + java/core/src/test/proto/com/google/protobuf/test_extra_interfaces.proto \ + java/lite/pom.xml \ + java/pom.xml \ + java/util/pom.xml \ + java/util/src/main/java/com/google/protobuf/util/FieldMaskTree.java \ + java/util/src/main/java/com/google/protobuf/util/FieldMaskUtil.java \ + java/util/src/main/java/com/google/protobuf/util/JsonFormat.java \ + java/util/src/main/java/com/google/protobuf/util/TimeUtil.java \ + java/util/src/test/java/com/google/protobuf/util/FieldMaskTreeTest.java \ + java/util/src/test/java/com/google/protobuf/util/FieldMaskUtilTest.java \ + java/util/src/test/java/com/google/protobuf/util/JsonFormatTest.java \ + java/util/src/test/java/com/google/protobuf/util/TimeUtilTest.java \ + java/util/src/test/proto/com/google/protobuf/util/json_test.proto + +javanano_EXTRA_DIST= \ + javanano/src/main/java/com/google/protobuf/nano/CodedOutputByteBufferNano.java \ + javanano/src/main/java/com/google/protobuf/nano/FieldData.java \ + javanano/src/main/java/com/google/protobuf/nano/FieldArray.java \ + javanano/src/main/java/com/google/protobuf/nano/WireFormatNano.java \ + javanano/src/main/java/com/google/protobuf/nano/Extension.java \ + javanano/src/main/java/com/google/protobuf/nano/CodedInputByteBufferNano.java \ + javanano/src/main/java/com/google/protobuf/nano/UnknownFieldData.java \ + javanano/src/main/java/com/google/protobuf/nano/MessageNano.java \ + javanano/src/main/java/com/google/protobuf/nano/InternalNano.java \ + javanano/src/main/java/com/google/protobuf/nano/InvalidProtocolBufferNanoException.java \ + javanano/src/main/java/com/google/protobuf/nano/MapFactories.java \ + javanano/src/main/java/com/google/protobuf/nano/ExtendableMessageNano.java \ + javanano/src/main/java/com/google/protobuf/nano/MessageNanoPrinter.java \ + javanano/src/test/java/com/google/protobuf/nano/unittest_accessors_nano.proto \ + javanano/src/test/java/com/google/protobuf/nano/unittest_enum_class_nano.proto \ + javanano/src/test/java/com/google/protobuf/nano/unittest_reference_types_nano.proto \ + javanano/src/test/java/com/google/protobuf/nano/unittest_extension_repeated_nano.proto \ + javanano/src/test/java/com/google/protobuf/nano/unittest_has_nano.proto \ + javanano/src/test/java/com/google/protobuf/nano/unittest_nano.proto \ + javanano/src/test/java/com/google/protobuf/nano/unittest_multiple_nameclash_nano.proto \ + javanano/src/test/java/com/google/protobuf/nano/unittest_single_nano.proto \ + javanano/src/test/java/com/google/protobuf/nano/NanoTest.java \ + javanano/src/test/java/com/google/protobuf/nano/unittest_simple_nano.proto \ + javanano/src/test/java/com/google/protobuf/nano/unittest_import_nano.proto \ + javanano/src/test/java/com/google/protobuf/nano/unittest_repeated_merge_nano.proto \ + javanano/src/test/java/com/google/protobuf/nano/unittest_extension_nano.proto \ + javanano/src/test/java/com/google/protobuf/nano/unittest_repeated_packables_nano.proto \ + javanano/src/test/java/com/google/protobuf/nano/unittest_extension_singular_nano.proto \ + javanano/src/test/java/com/google/protobuf/nano/unittest_recursive_nano.proto \ + javanano/src/test/java/com/google/protobuf/nano/unittest_extension_packed_nano.proto \ + javanano/src/test/java/com/google/protobuf/nano/unittest_enum_validity_nano.proto \ + javanano/src/test/java/com/google/protobuf/nano/unittest_stringutf8_nano.proto \ + javanano/src/test/java/com/google/protobuf/nano/unittest_multiple_nano.proto \ + javanano/src/test/java/com/google/protobuf/nano/unittest_enum_class_multiple_nano.proto \ + javanano/src/test/java/com/google/protobuf/nano/map_test.proto \ + javanano/README.md \ + javanano/pom.xml + +objectivec_EXTRA_DIST= \ + objectivec/DevTools/check_version_stamps.sh \ + objectivec/DevTools/compile_testing_protos.sh \ + objectivec/DevTools/full_mac_build.sh \ + objectivec/DevTools/pddm.py \ + objectivec/DevTools/pddm_tests.py \ + objectivec/generate_well_known_types.sh \ + objectivec/google/protobuf/Any.pbobjc.h \ + objectivec/google/protobuf/Any.pbobjc.m \ + objectivec/google/protobuf/Api.pbobjc.h \ + objectivec/google/protobuf/Api.pbobjc.m \ + objectivec/google/protobuf/Duration.pbobjc.h \ + objectivec/google/protobuf/Duration.pbobjc.m \ + objectivec/google/protobuf/Empty.pbobjc.h \ + objectivec/google/protobuf/Empty.pbobjc.m \ + objectivec/google/protobuf/FieldMask.pbobjc.h \ + objectivec/google/protobuf/FieldMask.pbobjc.m \ + objectivec/google/protobuf/SourceContext.pbobjc.h \ + objectivec/google/protobuf/SourceContext.pbobjc.m \ + objectivec/google/protobuf/Struct.pbobjc.h \ + objectivec/google/protobuf/Struct.pbobjc.m \ + objectivec/google/protobuf/Timestamp.pbobjc.h \ + objectivec/google/protobuf/Timestamp.pbobjc.m \ + objectivec/google/protobuf/Type.pbobjc.h \ + objectivec/google/protobuf/Type.pbobjc.m \ + objectivec/google/protobuf/Wrappers.pbobjc.h \ + objectivec/google/protobuf/Wrappers.pbobjc.m \ + objectivec/GPBArray.h \ + objectivec/GPBArray.m \ + objectivec/GPBArray_PackagePrivate.h \ + objectivec/GPBBootstrap.h \ + objectivec/GPBCodedInputStream.h \ + objectivec/GPBCodedInputStream.m \ + objectivec/GPBCodedInputStream_PackagePrivate.h \ + objectivec/GPBCodedOutputStream.h \ + objectivec/GPBCodedOutputStream.m \ + objectivec/GPBCodedOutputStream_PackagePrivate.h \ + objectivec/GPBDescriptor.h \ + objectivec/GPBDescriptor.m \ + objectivec/GPBDescriptor_PackagePrivate.h \ + objectivec/GPBDictionary.h \ + objectivec/GPBDictionary.m \ + objectivec/GPBDictionary_PackagePrivate.h \ + objectivec/GPBExtensionInternals.h \ + objectivec/GPBExtensionInternals.m \ + objectivec/GPBExtensionRegistry.h \ + objectivec/GPBExtensionRegistry.m \ + objectivec/GPBMessage.h \ + objectivec/GPBMessage.m \ + objectivec/GPBMessage_PackagePrivate.h \ + objectivec/GPBProtocolBuffers.h \ + objectivec/GPBProtocolBuffers.m \ + objectivec/GPBProtocolBuffers_RuntimeSupport.h \ + objectivec/GPBRootObject.h \ + objectivec/GPBRootObject.m \ + objectivec/GPBRootObject_PackagePrivate.h \ + objectivec/GPBRuntimeTypes.h \ + objectivec/GPBUnknownField.h \ + objectivec/GPBUnknownField.m \ + objectivec/GPBUnknownField_PackagePrivate.h \ + objectivec/GPBUnknownFieldSet.h \ + objectivec/GPBUnknownFieldSet.m \ + objectivec/GPBUnknownFieldSet_PackagePrivate.h \ + objectivec/GPBUtilities.h \ + objectivec/GPBUtilities.m \ + objectivec/GPBUtilities_PackagePrivate.h \ + objectivec/GPBWellKnownTypes.h \ + objectivec/GPBWellKnownTypes.m \ + objectivec/GPBWireFormat.h \ + objectivec/GPBWireFormat.m \ + objectivec/ProtocolBuffers_iOS.xcodeproj/project.pbxproj \ + objectivec/ProtocolBuffers_iOS.xcodeproj/project.xcworkspace/contents.xcworkspacedata \ + objectivec/ProtocolBuffers_iOS.xcodeproj/project.xcworkspace/xcshareddata/WorkspaceSettings.xcsettings \ + objectivec/ProtocolBuffers_iOS.xcodeproj/xcshareddata/xcbaselines/8BBEA4A5147C727100C4ADB7.xcbaseline/FFE465CA-0E74-40E8-9F09-500B66B7DCB2.plist \ + objectivec/ProtocolBuffers_iOS.xcodeproj/xcshareddata/xcbaselines/8BBEA4A5147C727100C4ADB7.xcbaseline/Info.plist \ + objectivec/ProtocolBuffers_iOS.xcodeproj/xcshareddata/xcschemes/PerformanceTests.xcscheme \ + objectivec/ProtocolBuffers_iOS.xcodeproj/xcshareddata/xcschemes/ProtocolBuffers.xcscheme \ + objectivec/ProtocolBuffers_OSX.xcodeproj/project.pbxproj \ + objectivec/ProtocolBuffers_OSX.xcodeproj/project.xcworkspace/contents.xcworkspacedata \ + objectivec/ProtocolBuffers_OSX.xcodeproj/project.xcworkspace/xcshareddata/WorkspaceSettings.xcsettings \ + objectivec/ProtocolBuffers_OSX.xcodeproj/xcshareddata/xcschemes/PerformanceTests.xcscheme \ + objectivec/ProtocolBuffers_OSX.xcodeproj/xcshareddata/xcschemes/ProtocolBuffers.xcscheme \ + objectivec/README.md \ + objectivec/Tests/golden_message \ + objectivec/Tests/golden_packed_fields_message \ + objectivec/Tests/GPBARCUnittestProtos.m \ + objectivec/Tests/GPBArrayTests.m \ + objectivec/Tests/GPBCodedInputStreamTests.m \ + objectivec/Tests/GPBCodedOuputStreamTests.m \ + objectivec/Tests/GPBConcurrencyTests.m \ + objectivec/Tests/GPBDescriptorTests.m \ + objectivec/Tests/GPBDictionaryTests+Bool.m \ + objectivec/Tests/GPBDictionaryTests+Int32.m \ + objectivec/Tests/GPBDictionaryTests+Int64.m \ + objectivec/Tests/GPBDictionaryTests+String.m \ + objectivec/Tests/GPBDictionaryTests+UInt32.m \ + objectivec/Tests/GPBDictionaryTests+UInt64.m \ + objectivec/Tests/GPBDictionaryTests.pddm \ + objectivec/Tests/GPBMessageTests+Merge.m \ + objectivec/Tests/GPBMessageTests+Runtime.m \ + objectivec/Tests/GPBMessageTests+Serialization.m \ + objectivec/Tests/GPBMessageTests.m \ + objectivec/Tests/GPBObjectiveCPlusPlusTest.mm \ + objectivec/Tests/GPBPerfTests.m \ + objectivec/Tests/GPBSwiftTests.swift \ + objectivec/Tests/GPBTestUtilities.h \ + objectivec/Tests/GPBTestUtilities.m \ + objectivec/Tests/GPBUnittestProtos.m \ + objectivec/Tests/GPBUnknownFieldSetTest.m \ + objectivec/Tests/GPBUtilitiesTests.m \ + objectivec/Tests/GPBWellKnownTypesTest.m \ + objectivec/Tests/GPBWireFormatTests.m \ + objectivec/Tests/iOSTestHarness/AppDelegate.m \ + objectivec/Tests/iOSTestHarness/en.lproj/InfoPlist.strings \ + objectivec/Tests/iOSTestHarness/Images.xcassets/AppIcon.appiconset/Contents.json \ + objectivec/Tests/iOSTestHarness/Images.xcassets/AppIcon.appiconset/iPad6.png \ + objectivec/Tests/iOSTestHarness/Images.xcassets/AppIcon.appiconset/iPad6_2x.png \ + objectivec/Tests/iOSTestHarness/Images.xcassets/AppIcon.appiconset/iPad7.png \ + objectivec/Tests/iOSTestHarness/Images.xcassets/AppIcon.appiconset/iPad7_2x.png \ + objectivec/Tests/iOSTestHarness/Images.xcassets/AppIcon.appiconset/iPhone6.png \ + objectivec/Tests/iOSTestHarness/Images.xcassets/AppIcon.appiconset/iPhone6_2x.png \ + objectivec/Tests/iOSTestHarness/Images.xcassets/AppIcon.appiconset/iPhone7_2x.png \ + objectivec/Tests/iOSTestHarness/Images.xcassets/AppIcon.appiconset/iPhone7_3x.png \ + objectivec/Tests/iOSTestHarness/Images.xcassets/LaunchImage.launchimage/Contents.json \ + objectivec/Tests/iOSTestHarness/Info.plist \ + objectivec/Tests/iOSTestHarness/LaunchScreen.xib \ + objectivec/Tests/text_format_map_unittest_data.txt \ + objectivec/Tests/text_format_unittest_data.txt \ + objectivec/Tests/unittest_cycle.proto \ + objectivec/Tests/unittest_objc.proto \ + objectivec/Tests/unittest_objc_startup.proto \ + objectivec/Tests/unittest_runtime_proto2.proto \ + objectivec/Tests/unittest_runtime_proto3.proto \ + objectivec/Tests/UnitTests-Bridging-Header.h \ + objectivec/Tests/UnitTests-Info.plist \ + Protobuf.podspec + +python_EXTRA_DIST= \ + python/MANIFEST.in \ + python/google/__init__.py \ + python/google/protobuf/__init__.py \ + python/google/protobuf/descriptor.py \ + python/google/protobuf/descriptor_database.py \ + python/google/protobuf/descriptor_pool.py \ + python/google/protobuf/internal/__init__.py \ + python/google/protobuf/internal/_parameterized.py \ + python/google/protobuf/internal/any_test.proto \ + python/google/protobuf/internal/any_test.proto \ + python/google/protobuf/internal/api_implementation.cc \ + python/google/protobuf/internal/api_implementation.py \ + python/google/protobuf/internal/containers.py \ + python/google/protobuf/internal/decoder.py \ + python/google/protobuf/internal/descriptor_database_test.py \ + python/google/protobuf/internal/descriptor_pool_test.py \ + python/google/protobuf/internal/descriptor_pool_test1.proto \ + python/google/protobuf/internal/descriptor_pool_test2.proto \ + python/google/protobuf/internal/descriptor_test.py \ + python/google/protobuf/internal/encoder.py \ + python/google/protobuf/internal/enum_type_wrapper.py \ + python/google/protobuf/internal/factory_test1.proto \ + python/google/protobuf/internal/factory_test2.proto \ + python/google/protobuf/internal/generator_test.py \ + python/google/protobuf/internal/import_test_package/__init__.py \ + python/google/protobuf/internal/import_test_package/inner.proto \ + python/google/protobuf/internal/import_test_package/outer.proto \ + python/google/protobuf/internal/json_format_test.py \ + python/google/protobuf/internal/message_factory_test.py \ + python/google/protobuf/internal/message_listener.py \ + python/google/protobuf/internal/message_set_extensions.proto \ + python/google/protobuf/internal/message_test.py \ + python/google/protobuf/internal/missing_enum_values.proto \ + python/google/protobuf/internal/more_extensions.proto \ + python/google/protobuf/internal/more_extensions_dynamic.proto \ + python/google/protobuf/internal/more_messages.proto \ + python/google/protobuf/internal/packed_field_test.proto \ + python/google/protobuf/internal/proto_builder_test.py \ + python/google/protobuf/internal/python_message.py \ + python/google/protobuf/internal/reflection_test.py \ + python/google/protobuf/internal/service_reflection_test.py \ + python/google/protobuf/internal/symbol_database_test.py \ + python/google/protobuf/internal/test_bad_identifiers.proto \ + python/google/protobuf/internal/test_util.py \ + python/google/protobuf/internal/text_encoding_test.py \ + python/google/protobuf/internal/text_format_test.py \ + python/google/protobuf/internal/type_checkers.py \ + python/google/protobuf/internal/unknown_fields_test.py \ + python/google/protobuf/internal/well_known_types.py \ + python/google/protobuf/internal/well_known_types.py \ + python/google/protobuf/internal/well_known_types_test.py \ + python/google/protobuf/internal/well_known_types_test.py \ + python/google/protobuf/internal/wire_format.py \ + python/google/protobuf/internal/wire_format_test.py \ + python/google/protobuf/json_format.py \ + python/google/protobuf/message.py \ + python/google/protobuf/message_factory.py \ + python/google/protobuf/proto_builder.py \ + python/google/protobuf/pyext/README \ + python/google/protobuf/pyext/__init__.py \ + python/google/protobuf/pyext/cpp_message.py \ + python/google/protobuf/pyext/descriptor.cc \ + python/google/protobuf/pyext/descriptor.h \ + python/google/protobuf/pyext/descriptor_containers.cc \ + python/google/protobuf/pyext/descriptor_containers.h \ + python/google/protobuf/pyext/descriptor_database.cc \ + python/google/protobuf/pyext/descriptor_database.h \ + python/google/protobuf/pyext/descriptor_pool.cc \ + python/google/protobuf/pyext/descriptor_pool.h \ + python/google/protobuf/pyext/extension_dict.cc \ + python/google/protobuf/pyext/extension_dict.h \ + python/google/protobuf/pyext/map_container.cc \ + python/google/protobuf/pyext/map_container.h \ + python/google/protobuf/pyext/message.cc \ + python/google/protobuf/pyext/message.h \ + python/google/protobuf/pyext/proto2_api_test.proto \ + python/google/protobuf/pyext/python.proto \ + python/google/protobuf/pyext/python_protobuf.h \ + python/google/protobuf/pyext/repeated_composite_container.cc \ + python/google/protobuf/pyext/repeated_composite_container.h \ + python/google/protobuf/pyext/repeated_scalar_container.cc \ + python/google/protobuf/pyext/repeated_scalar_container.h \ + python/google/protobuf/pyext/scoped_pyobject_ptr.h \ + python/google/protobuf/reflection.py \ + python/google/protobuf/service.py \ + python/google/protobuf/service_reflection.py \ + python/google/protobuf/symbol_database.py \ + python/google/protobuf/text_encoding.py \ + python/google/protobuf/text_format.py \ + python/mox.py \ + python/setup.py \ + python/stubout.py \ + python/tox.ini \ + python/README.md + +ruby_EXTRA_DIST= \ + ruby/Gemfile \ + ruby/Gemfile.lock \ + ruby/.gitignore \ + ruby/README.md \ + ruby/Rakefile \ + ruby/ext/google/protobuf_c/defs.c \ + ruby/ext/google/protobuf_c/encode_decode.c \ + ruby/ext/google/protobuf_c/extconf.rb \ + ruby/ext/google/protobuf_c/map.c \ + ruby/ext/google/protobuf_c/message.c \ + ruby/ext/google/protobuf_c/protobuf.c \ + ruby/ext/google/protobuf_c/protobuf.h \ + ruby/ext/google/protobuf_c/repeated_field.c \ + ruby/ext/google/protobuf_c/storage.c \ + ruby/ext/google/protobuf_c/upb.c \ + ruby/ext/google/protobuf_c/upb.h \ + ruby/google-protobuf.gemspec \ + ruby/lib/google/protobuf/message_exts.rb \ + ruby/lib/google/protobuf/repeated_field.rb \ + ruby/lib/google/protobuf.rb \ + ruby/pom.xml \ + ruby/src/main/java/com/google/protobuf/jruby/RubyBuilder.java \ + ruby/src/main/java/com/google/protobuf/jruby/RubyDescriptor.java \ + ruby/src/main/java/com/google/protobuf/jruby/RubyDescriptorPool.java \ + ruby/src/main/java/com/google/protobuf/jruby/RubyEnumBuilderContext.java \ + ruby/src/main/java/com/google/protobuf/jruby/RubyEnumDescriptor.java \ + ruby/src/main/java/com/google/protobuf/jruby/RubyEnum.java \ + ruby/src/main/java/com/google/protobuf/jruby/RubyFieldDescriptor.java \ + ruby/src/main/java/com/google/protobuf/jruby/RubyMap.java \ + ruby/src/main/java/com/google/protobuf/jruby/RubyMessageBuilderContext.java \ + ruby/src/main/java/com/google/protobuf/jruby/RubyMessage.java \ + ruby/src/main/java/com/google/protobuf/jruby/RubyOneofBuilderContext.java \ + ruby/src/main/java/com/google/protobuf/jruby/RubyOneofDescriptor.java \ + ruby/src/main/java/com/google/protobuf/jruby/RubyProtobuf.java \ + ruby/src/main/java/com/google/protobuf/jruby/RubyRepeatedField.java \ + ruby/src/main/java/com/google/protobuf/jruby/SentinelOuterClass.java \ + ruby/src/main/java/com/google/protobuf/jruby/Utils.java \ + ruby/src/main/java/google/ProtobufJavaService.java \ + ruby/src/main/sentinel.proto \ + ruby/tests/basic.rb \ + ruby/tests/repeated_field_test.rb \ + ruby/tests/stress.rb \ + ruby/tests/generated_code.proto \ + ruby/tests/generated_code_test.rb \ + ruby/travis-test.sh + +js_EXTRA_DIST= \ + js/README.md \ + js/binary/arith.js \ + js/binary/arith_test.js \ + js/binary/constants.js \ + js/binary/decoder.js \ + js/binary/decoder_test.js \ + js/binary/proto_test.js \ + js/binary/reader.js \ + js/binary/reader_test.js \ + js/binary/utils.js \ + js/binary/utils_test.js \ + js/binary/writer.js \ + js/binary/writer_test.js \ + js/data.proto \ + js/debug.js \ + js/debug_test.js \ + js/gulpfile.js \ + js/jasmine.json \ + js/message.js \ + js/message_test.js \ + js/node_loader.js \ + js/package.json \ + js/proto3_test.js \ + js/proto3_test.proto \ + js/test.proto \ + js/test2.proto \ + js/test3.proto \ + js/test4.proto \ + js/test5.proto \ + js/test_bootstrap.js \ + js/testbinary.proto \ + js/testempty.proto + +all_EXTRA_DIST=$(csharp_EXTRA_DIST) $(java_EXTRA_DIST) $(javanano_EXTRA_DIST) $(objectivec_EXTRA_DIST) $(python_EXTRA_DIST) $(ruby_EXTRA_DIST) $(js_EXTRA_DIST) + +EXTRA_DIST = $(@DIST_LANG@_EXTRA_DIST) \ + autogen.sh \ + generate_descriptor_proto.sh \ + README.md \ + LICENSE \ + CONTRIBUTORS.txt \ + CHANGES.txt \ + update_file_lists.sh \ + BUILD \ + gmock.BUILD \ + WORKSPACE \ + cmake/CMakeLists.txt \ + cmake/README.md \ + cmake/extract_includes.bat.in \ + cmake/install.cmake \ + cmake/libprotobuf.cmake \ + cmake/libprotobuf-lite.cmake \ + cmake/libprotoc.cmake \ + cmake/protobuf-config-version.cmake.in \ + cmake/protobuf-config.cmake.in \ + cmake/protobuf-module.cmake.in \ + cmake/protoc.cmake \ + cmake/tests.cmake \ + editors/README.txt \ + editors/proto.vim \ + editors/protobuf-mode.el \ + examples/README.txt \ + examples/Makefile \ + examples/addressbook.proto \ + examples/add_person.cc \ + examples/add_person.go \ + examples/add_person_test.go \ + examples/list_people.cc \ + examples/list_people.go \ + examples/AddPerson.java \ + examples/ListPeople.java \ + examples/add_person.py \ + examples/list_people.py \ + examples/list_people_test.go \ + protobuf.bzl \ + six.BUILD \ + util/python/BUILD + +# Deletes all the files generated by autogen.sh. +MAINTAINERCLEANFILES = \ + aclocal.m4 \ + ar-lib \ + config.guess \ + config.sub \ + configure \ + depcomp \ + install-sh \ + ltmain.sh \ + Makefile.in \ + missing \ + mkinstalldirs \ + config.h.in \ + stamp.h.in \ + m4/ltsugar.m4 \ + m4/libtool.m4 \ + m4/ltversion.m4 \ + m4/lt~obsolete.m4 \ + m4/ltoptions.m4 diff --git a/packager/third_party/protobuf/OWNERS b/packager/third_party/protobuf/OWNERS new file mode 100644 index 0000000000..e7125d323a --- /dev/null +++ b/packager/third_party/protobuf/OWNERS @@ -0,0 +1,2 @@ +pkasting@chromium.org +xyzzyz@chromium.org diff --git a/packager/third_party/protobuf/Protobuf.podspec b/packager/third_party/protobuf/Protobuf.podspec new file mode 100644 index 0000000000..0bbd06df01 --- /dev/null +++ b/packager/third_party/protobuf/Protobuf.podspec @@ -0,0 +1,40 @@ +# This file describes to Cocoapods how to integrate the Objective-C runtime into a dependent +# project. +# Despite this file being specific to Objective-C, it needs to be on the root of the repository. +# Otherwise, Cocoapods gives trouble like not picking up the license file correctly, or not letting +# dependent projects use the :git notation to refer to the library. +Pod::Spec.new do |s| + s.name = 'Protobuf' + s.version = '3.0.0-beta-2' + s.summary = 'Protocol Buffers v.3 runtime library for Objective-C.' + s.homepage = 'https://github.com/google/protobuf' + s.license = 'New BSD' + s.authors = { 'The Protocol Buffers contributors' => 'protobuf@googlegroups.com' } + + s.source = { :git => 'https://github.com/google/protobuf.git', + :tag => "v#{s.version}" } + + s.source_files = 'objectivec/*.{h,m}', + 'objectivec/google/protobuf/Any.pbobjc.{h,m}', + 'objectivec/google/protobuf/Api.pbobjc.{h,m}', + 'objectivec/google/protobuf/Duration.pbobjc.h', + 'objectivec/google/protobuf/Empty.pbobjc.{h,m}', + 'objectivec/google/protobuf/FieldMask.pbobjc.{h,m}', + 'objectivec/google/protobuf/SourceContext.pbobjc.{h,m}', + 'objectivec/google/protobuf/Struct.pbobjc.{h,m}', + 'objectivec/google/protobuf/Timestamp.pbobjc.h', + 'objectivec/google/protobuf/Type.pbobjc.{h,m}', + 'objectivec/google/protobuf/Wrappers.pbobjc.{h,m}' + # Timestamp.pbobjc.m and Duration.pbobjc.m are #imported by GPBWellKnownTypes.m. So we can't + # compile them (duplicate symbols), but we need them available for the importing: + s.preserve_paths = 'objectivec/google/protobuf/Duration.pbobjc.m', + 'objectivec/google/protobuf/Timestamp.pbobjc.m' + # The following would cause duplicate symbol definitions. GPBProtocolBuffers is expected to be + # left out, as it's an umbrella implementation file. + s.exclude_files = 'objectivec/GPBProtocolBuffers.m' + s.header_mappings_dir = 'objectivec' + + s.ios.deployment_target = '7.1' + s.osx.deployment_target = '10.9' + s.requires_arc = false +end diff --git a/packager/third_party/protobuf/README.chromium b/packager/third_party/protobuf/README.chromium index de7839265a..53aac51ecc 100644 --- a/packager/third_party/protobuf/README.chromium +++ b/packager/third_party/protobuf/README.chromium @@ -1,45 +1,90 @@ Name: Protocol Buffers Short Name: protobuf -URL: http://protobuf.googlecode.com/svn/trunk +URL: https://github.com/google/protobuf License: BSD -License File: COPYING.txt -Version: unknown -Revision: r476 +License File: LICENSE +Version: 3.0.0-beta-3 +Revision: 3470b6895aa659b7559ed678e029a5338e535f14 Security Critical: yes -Local files (not taken from upstream): -README.chromium -config.h -descriptor2_pb.py -protobuf_lite_java_descriptor_proto.py -protobuf_lite_java_parse_pom.py +Steps used to create the current version: +1. Pull the release from https://github.com/google/protobuf/releases +2. Add build files (BUILD.gn, proto_library.gni, protobuf.gyp, + protobuf_lite.gypi, protobuf_nacl.gyp). -A protobuf.gyp file has been added for building with Chromium. + Be sure to update the list of source files, as additional .cc files and + headers might have been added -- you need to find the transitive closure of + include files required by targets. -This code has been patched to support unknown field retention in protobuf-lite. -See r62331 for the patch. + Other things to care about are defines required by protobuf on various + platforms, warnings generated by compilers, and new dependencies introduced. +3. Get open-source library six.py from https://pypi.python.org/pypi/six/ and add + it to protobuf/third_party/six/six.py. +4. Apply patches in patches/ (see the description below): -This code has been patched to ensure that files in the target protobuf_lite -do not include headers from protobuf_full. See r173228 for the patch. + $ for patch in patches/*; do patch -s -p1 < $patch; done -This code has been patched to make the target protobuf_lite a component so that -targets that depend on it can be componentized. See http://crbug.com/172800 for -details, and r179806 for the patch. + For future releases, it will be worth looking into which patches still need + to be applied. +5. Generate descriptor_pb2.py using something like the following steps. Make + sure you've regenerated your buildfiles and will build protoc from the + newly-modified sources above. -Revision 504 was cherry-picked from upstream. -Revision 512 was cherry-picked from upstream. -Revision 516 was cherry-picked from upstream. -Revision 517 was cherry-picked from upstream. -Revision 522 was cherry-picked from upstream. -Revision 523 was cherry-picked from upstream. -Revision 524 was cherry-picked from upstream. -The `&file->options() != NULL &&` was removed from descriptor.cc + $ cd $SRC_DIR + $ ninja -C out/Debug protoc + $ cd third_party/protobuf/src + $ ../../../out/Debug/protoc --python_out=../python google/protobuf/descriptor.proto -Notes about Java: -We have not forked the Java version of protobuf-lite, so the Java version does -not support unknown field retention. +6. Add an __init__.py to protobuf/ that adds third_party/six/ to Python path. +7. Update README.chromium. -The list of Java files included in the lite profile for Java is parsed from the -maven java/pom.xml by the script protobuf_lite_java_parse_pom.py. See -'javac_includes' variable in protobuf_lite_javalib GYP target. +Description of the patches: +- 0001-ignore-option-retain-unknown-fields.patch + + Previous versions of protobuf in Chromium carried a local patch that retained + unknown fields in protobuf_lite mode. It was enabled by setting option + retain_unknown_fields = true in .proto file. Now that it is enabled by + default, this option is no longer recognized by protobuf, and so I had to + patch it so that I don't have to fix all .proto files in Chromium in a single + CL. + + I plan to remove those occurences, and then this patch will no longer be + necessary. + +- 0003-remove-static-initializers.patch + + This patch removes all static initializers from Chromium. The change in Status + class is not completely compatible with upstream, but it's compatible enough + to work in Chromium, which doesn't use this functionality yet. The work on + upstreaming the removal of static initializers is in progress: + https://github.com/google/protobuf/issues/1404 + +- 0004-fix-integer-types-and-shared-library-exports.patch + + This patch makes protobuf int64 to be int64_t (as opposed to long long in + upstream), and similarly for other integer types. It also allows exporting + protobuf symbols in Linux .so libraries, so that protobuf can be built as a + component (see http://crrev.com/179806). + +- 0005-fix-include-js-generator.protobuf + + During merge with internal branch, the <> in one of the #includes were + accidentally replaced with "", which results in a failure in checkdeps. + + Fixed in https://github.com/google/protobuf/pull/1547. + +- 0007-uninline_googleonce.patch +- 0008-uninline_get_empty_string.patch +- 0009-uninline-arenastring.patch +- 0010-uninline-generated-code.patch + + These patches uninline some functions, resulting in a significant reduction + (somewhere between 500 KB and 1 MB) of binary size. + +- 0011-libprotobuf_export.patch + + During merge with internal branch, the dll export attribute was accidentally + removed in C++11 mode. + + Fixed in https://github.com/google/protobuf/pull/1549 diff --git a/packager/third_party/protobuf/README.md b/packager/third_party/protobuf/README.md new file mode 100644 index 0000000000..ba9c589d2b --- /dev/null +++ b/packager/third_party/protobuf/README.md @@ -0,0 +1,77 @@ +Protocol Buffers - Google's data interchange format +=================================================== + +[![Build Status](https://travis-ci.org/google/protobuf.svg?branch=master)](https://travis-ci.org/google/protobuf) [![Build status](https://ci.appveyor.com/api/projects/status/73ctee6ua4w2ruin?svg=true)](https://ci.appveyor.com/project/protobuf/protobuf) + +Copyright 2008 Google Inc. + +https://developers.google.com/protocol-buffers/ + +Overview +-------- + +Protocol Buffers (a.k.a., protobuf) are Google's language-neutral, +platform-neutral, extensible mechanism for serializing structured data. You +can find [protobuf's documentation on the Google Developers site](https://developers.google.com/protocol-buffers/). + +This README file contains protobuf installation instructions. To install +protobuf, you need to install the protocol compiler (used to compile .proto +files) and the protobuf runtime for your chosen programming language. + +Protocol Compiler Installation +------------------------------ + +The protocol compiler is written in C++. If you are using C++, please follow +the [C++ Installation Instructions](src/README.md) to install protoc along +with the C++ runtime. + +For non-C++ users, the simplest way to install the protocol compiler is to +download a pre-built binary from our release page: + + [https://github.com/google/protobuf/releases](https://github.com/google/protobuf/releases) + +In the downloads section of each release, you can find pre-built binaries in +zip packages: protoc-$VERSION-$PLATFORM.zip. It contains the protoc binary +as well as a set of standard .proto files distributed along with protobuf. + +If you are looking for an old version that is not available in the release +page, check out the maven repo here: + + [http://repo1.maven.org/maven2/com/google/protobuf/protoc/](http://repo1.maven.org/maven2/com/google/protobuf/protoc/) + +These pre-built binaries are only provided for released versions. If you want +to use the github master version at HEAD, or you need to modify protobuf code, +or you are using C++, it's recommended to build your own protoc binary from +source. + +If you would like to build protoc binary from source, see the [C++ Installation +Instructions](src/README.md). + +Protobuf Runtime Installation +----------------------------- + +Protobuf supports several different programming languages. For each programming +language, you can find instructions in the corresponding source directory about +how to install protobuf runtime for that specific language: + +| Language | Source | +|--------------------------------------|-------------------------------------------------------| +| C++ (include C++ runtime and protoc) | [src](src) | +| Java | [java](java) | +| Python | [python](python) | +| Objective-C | [objectivec](objectivec) | +| C# | [csharp](csharp) | +| JavaNano | [javanano](javanano) | +| JavaScript | [js](js) | +| Ruby | [ruby](ruby) | +| Go | [golang/protobuf](https://github.com/golang/protobuf) | +| PHP | TBD | + + +Usage +----- + +The complete documentation for Protocol Buffers is available via the +web at: + + https://developers.google.com/protocol-buffers/ diff --git a/packager/third_party/protobuf/WORKSPACE b/packager/third_party/protobuf/WORKSPACE new file mode 100644 index 0000000000..065dc81984 --- /dev/null +++ b/packager/third_party/protobuf/WORKSPACE @@ -0,0 +1,53 @@ +new_http_archive( + name = "gmock_archive", + url = "https://googlemock.googlecode.com/files/gmock-1.7.0.zip", + sha256 = "26fcbb5925b74ad5fc8c26b0495dfc96353f4d553492eb97e85a8a6d2f43095b", + build_file = "gmock.BUILD", +) + +new_http_archive( + name = "six_archive", + url = "https://pypi.python.org/packages/source/s/six/six-1.10.0.tar.gz#md5=34eed507548117b2ab523ab14b2f8b55", + sha256 = "105f8d68616f8248e24bf0e9372ef04d3cc10104f1980f54d57b2ce73a5ad56a", + build_file = "six.BUILD", +) + +bind( + name = "python_headers", + actual = "//util/python:python_headers", +) + +bind( + name = "gtest", + actual = "@gmock_archive//:gtest", +) + +bind( + name = "gtest_main", + actual = "@gmock_archive//:gtest_main", +) + +bind( + name = "six", + actual = "@six_archive//:six", +) + +maven_jar( + name = "guava_maven", + artifact = "com.google.guava:guava:18.0", +) + +bind( + name = "guava", + actual = "@guava_maven//jar", +) + +maven_jar( + name = "gson_maven", + artifact = "com.google.code.gson:gson:2.3", +) + +bind( + name = "gson", + actual = "@gson_maven//jar", +) diff --git a/packager/third_party/protobuf/__init__.py b/packager/third_party/protobuf/__init__.py old mode 100755 new mode 100644 index 139597f9cb..11e1cf2d20 --- a/packager/third_party/protobuf/__init__.py +++ b/packager/third_party/protobuf/__init__.py @@ -1,2 +1,6 @@ +import sys +import os +THIS_DIR = os.path.dirname(os.path.abspath(__file__)) +sys.path.insert(0, os.path.join(THIS_DIR, "third_party", "six")) diff --git a/packager/third_party/protobuf/appveyor.bat b/packager/third_party/protobuf/appveyor.bat new file mode 100644 index 0000000000..9a46b92892 --- /dev/null +++ b/packager/third_party/protobuf/appveyor.bat @@ -0,0 +1,29 @@ +setlocal + +IF %language%==cpp GOTO build_cpp +IF %language%==csharp GOTO build_csharp + +echo Unsupported language %language%. Exiting. +goto :error + +:build_cpp +echo Building C++ +mkdir build_msvc +cd build_msvc +cmake -G "%generator%" -Dprotobuf_BUILD_SHARED_LIBS=%BUILD_DLL% ../cmake +msbuild protobuf.sln /p:Platform=%vcplatform% /logger:"C:\Program Files\AppVeyor\BuildAgent\Appveyor.MSBuildLogger.dll" || goto error +cd %configuration% +tests.exe || goto error +goto :EOF + +:build_csharp +echo Building C# +cd csharp\src +nuget restore +msbuild Google.Protobuf.sln /p:Platform="Any CPU" /logger:"C:\Program Files\AppVeyor\BuildAgent\Appveyor.MSBuildLogger.dll" || goto error +nunit-console Google.Protobuf.Test\bin\%configuration%\Google.Protobuf.Test.dll || goto error +goto :EOF + +:error +echo Failed! +EXIT /b %ERRORLEVEL% diff --git a/packager/third_party/protobuf/appveyor.yml b/packager/third_party/protobuf/appveyor.yml new file mode 100644 index 0000000000..c84ecae2f3 --- /dev/null +++ b/packager/third_party/protobuf/appveyor.yml @@ -0,0 +1,32 @@ +# Only test one combination: "Visual Studio 12 + Win64 + Debug + DLL". We can +# test more combinations but AppVeyor just takes too long to finish (each +# combination takes ~15mins). +platform: + - Win64 + +configuration: + - Debug + +environment: + matrix: + - language: cpp + BUILD_DLL: ON + + - language: csharp + +install: + - ps: Start-FileDownload https://googlemock.googlecode.com/files/gmock-1.7.0.zip + - 7z x gmock-1.7.0.zip + - rename gmock-1.7.0 gmock + +before_build: + - if %platform%==Win32 set generator=Visual Studio 12 + - if %platform%==Win64 set generator=Visual Studio 12 Win64 + - if %platform%==Win32 set vcplatform=Win32 + - if %platform%==Win64 set vcplatform=x64 + +build_script: + - CALL appveyor.bat + +skip_commits: + message: /.*\[skip appveyor\].*/ diff --git a/packager/third_party/protobuf/autogen.sh b/packager/third_party/protobuf/autogen.sh new file mode 100755 index 0000000000..5b4c29f8c0 --- /dev/null +++ b/packager/third_party/protobuf/autogen.sh @@ -0,0 +1,46 @@ +#!/bin/sh + +# Run this script to generate the configure script and other files that will +# be included in the distribution. These files are not checked in because they +# are automatically generated. + +set -e + +if [ ! -z "$@" ]; then + for argument in "$@"; do + case $argument in + # make curl silent + "-s") + curlopts="-s" + ;; + esac + done +fi + + +# Check that we're being run from the right directory. +if test ! -f src/google/protobuf/stubs/common.h; then + cat >&2 << __EOF__ +Could not find source code. Make sure you are running this script from the +root of the distribution tree. +__EOF__ + exit 1 +fi + +# Check that gmock is present. Usually it is already there since the +# directory is set up as an SVN external. +if test ! -e gmock; then + echo "Google Mock not present. Fetching gmock-1.7.0 from the web..." + curl $curlopts -O https://googlemock.googlecode.com/files/gmock-1.7.0.zip + unzip -q gmock-1.7.0.zip + rm gmock-1.7.0.zip + mv gmock-1.7.0 gmock +fi + +set -ex + +# TODO(kenton): Remove the ",no-obsolete" part and fix the resulting warnings. +autoreconf -f -i -Wall,no-obsolete + +rm -rf autom4te.cache config.h.in~ +exit 0 diff --git a/packager/third_party/protobuf/benchmarks/Makefile.am b/packager/third_party/protobuf/benchmarks/Makefile.am new file mode 100644 index 0000000000..f730afe576 --- /dev/null +++ b/packager/third_party/protobuf/benchmarks/Makefile.am @@ -0,0 +1,69 @@ + +benchmarks_protoc_inputs = \ + benchmarks.proto \ + benchmark_messages_proto3.proto + +benchmarks_protoc_inputs_proto2 = \ + benchmark_messages_proto2.proto + +benchmarks_protoc_outputs = \ + benchmarks.pb.cc \ + benchmarks.pb.h \ + benchmark_messages_proto3.pb.cc \ + benchmark_messages_proto3.pb.h + +benchmarks_protoc_outputs_proto2 = \ + benchmark_messages_proto2.pb.cc \ + benchmark_messages_proto2.pb.h + +bin_PROGRAMS = generate-datasets + +generate_datasets_LDADD = $(top_srcdir)/src/libprotobuf.la +generate_datasets_SOURCES = generate_datasets.cc +generate_datasets_CPPFLAGS = -I$(top_srcdir)/src -I$(srcdir) +nodist_generate_datasets_SOURCES = \ + $(benchmarks_protoc_outputs) \ + $(benchmarks_protoc_outputs_proto2) + +# Explicit deps because BUILT_SOURCES are only done before a "make all/check" +# so a direct "make test_cpp" could fail if parallel enough. +# See: https://www.gnu.org/software/automake/manual/html_node/Built-Sources-Example.html#Recording-Dependencies-manually +generate_datasets-generate_datasets.$(OBJEXT): benchmarks.pb.h + +$(benchmarks_protoc_outputs): protoc_middleman +$(benchmarks_protoc_outputs_proto2): protoc_middleman2 + +CLEANFILES = \ + $(benchmarks_protoc_outputs) \ + $(benchmarks_protoc_outputs_proto2) \ + protoc_middleman \ + protoc_middleman2 \ + dataset.* + +MAINTAINERCLEANFILES = \ + Makefile.in + +if USE_EXTERNAL_PROTOC + +protoc_middleman: $(benchmarks_protoc_inputs) + $(PROTOC) -I$(srcdir) -I$(top_srcdir) --cpp_out=. $(benchmarks_protoc_inputs) + touch protoc_middleman + +protoc_middleman2: $(benchmarks_protoc_inputs_proto2) + $(PROTOC) -I$(srcdir) -I$(top_srcdir) --cpp_out=. $(benchmarks_protoc_inputs_proto2) + touch protoc_middleman2 + +else + +# We have to cd to $(srcdir) before executing protoc because $(protoc_inputs) is +# relative to srcdir, which may not be the same as the current directory when +# building out-of-tree. +protoc_middleman: $(top_srcdir)/src/protoc$(EXEEXT) $(benchmarks_protoc_inputs) $(well_known_type_protoc_inputs) + oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$(top_srcdir)/src --cpp_out=$$oldpwd $(benchmarks_protoc_inputs) ) + touch protoc_middleman + +protoc_middleman2: $(top_srcdir)/src/protoc$(EXEEXT) $(benchmarks_protoc_inputs_proto2) $(well_known_type_protoc_inputs) + oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$(top_srcdir)/src --cpp_out=$$oldpwd $(benchmarks_protoc_inputs_proto2) ) + touch protoc_middleman + +endif diff --git a/packager/third_party/protobuf/benchmarks/ProtoBench.java b/packager/third_party/protobuf/benchmarks/ProtoBench.java new file mode 100644 index 0000000000..86d62feb67 --- /dev/null +++ b/packager/third_party/protobuf/benchmarks/ProtoBench.java @@ -0,0 +1,203 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2009 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package com.google.protocolbuffers; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.FileOutputStream; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.RandomAccessFile; +import java.lang.reflect.Method; + +import com.google.protobuf.ByteString; +import com.google.protobuf.CodedInputStream; +import com.google.protobuf.CodedOutputStream; +import com.google.protobuf.Message; + +public class ProtoBench { + + private static final long MIN_SAMPLE_TIME_MS = 2 * 1000; + private static final long TARGET_TIME_MS = 30 * 1000; + + private ProtoBench() { + // Prevent instantiation + } + + public static void main(String[] args) { + if (args.length < 2 || (args.length % 2) != 0) { + System.err.println("Usage: ProtoBench "); + System.err.println("The descriptor type name is the fully-qualified message name,"); + System.err.println("e.g. com.google.protocolbuffers.benchmark.Message1"); + System.err.println("(You can specify multiple pairs of descriptor type name and input data.)"); + System.exit(1); + } + boolean success = true; + for (int i = 0; i < args.length; i += 2) { + success &= runTest(args[i], args[i + 1]); + } + System.exit(success ? 0 : 1); + } + + /** + * Runs a single test. Error messages are displayed to stderr, and the return value + * indicates general success/failure. + */ + public static boolean runTest(String type, String file) { + System.out.println("Benchmarking " + type + " with file " + file); + final Message defaultMessage; + try { + Class clazz = Class.forName(type); + Method method = clazz.getDeclaredMethod("getDefaultInstance"); + defaultMessage = (Message) method.invoke(null); + } catch (Exception e) { + // We want to do the same thing with all exceptions. Not generally nice, + // but this is slightly different. + System.err.println("Unable to get default message for " + type); + return false; + } + + try { + final byte[] inputData = readAllBytes(file); + final ByteArrayInputStream inputStream = new ByteArrayInputStream(inputData); + final ByteString inputString = ByteString.copyFrom(inputData); + final Message sampleMessage = defaultMessage.newBuilderForType().mergeFrom(inputString).build(); + FileOutputStream devNullTemp = null; + CodedOutputStream reuseDevNullTemp = null; + try { + devNullTemp = new FileOutputStream("/dev/null"); + reuseDevNullTemp = CodedOutputStream.newInstance(devNullTemp); + } catch (FileNotFoundException e) { + // ignore: this is probably Windows, where /dev/null does not exist + } + final FileOutputStream devNull = devNullTemp; + final CodedOutputStream reuseDevNull = reuseDevNullTemp; + benchmark("Serialize to byte string", inputData.length, new Action() { + public void execute() { sampleMessage.toByteString(); } + }); + benchmark("Serialize to byte array", inputData.length, new Action() { + public void execute() { sampleMessage.toByteArray(); } + }); + benchmark("Serialize to memory stream", inputData.length, new Action() { + public void execute() throws IOException { + sampleMessage.writeTo(new ByteArrayOutputStream()); + } + }); + if (devNull != null) { + benchmark("Serialize to /dev/null with FileOutputStream", inputData.length, new Action() { + public void execute() throws IOException { + sampleMessage.writeTo(devNull); + } + }); + benchmark("Serialize to /dev/null reusing FileOutputStream", inputData.length, new Action() { + public void execute() throws IOException { + sampleMessage.writeTo(reuseDevNull); + reuseDevNull.flush(); // force the write to the OutputStream + } + }); + } + benchmark("Deserialize from byte string", inputData.length, new Action() { + public void execute() throws IOException { + defaultMessage.newBuilderForType().mergeFrom(inputString).build(); + } + }); + benchmark("Deserialize from byte array", inputData.length, new Action() { + public void execute() throws IOException { + defaultMessage.newBuilderForType() + .mergeFrom(CodedInputStream.newInstance(inputData)).build(); + } + }); + benchmark("Deserialize from memory stream", inputData.length, new Action() { + public void execute() throws IOException { + defaultMessage.newBuilderForType() + .mergeFrom(CodedInputStream.newInstance(inputStream)).build(); + inputStream.reset(); + } + }); + System.out.println(); + return true; + } catch (Exception e) { + System.err.println("Error: " + e.getMessage()); + System.err.println("Detailed exception information:"); + e.printStackTrace(System.err); + return false; + } + } + + private static void benchmark(String name, long dataSize, Action action) throws IOException { + // Make sure it's JITted "reasonably" hard before running the first progress test + for (int i=0; i < 100; i++) { + action.execute(); + } + + // Run it progressively more times until we've got a reasonable sample + int iterations = 1; + long elapsed = timeAction(action, iterations); + while (elapsed < MIN_SAMPLE_TIME_MS) { + iterations *= 2; + elapsed = timeAction(action, iterations); + } + + // Upscale the sample to the target time. Do this in floating point arithmetic + // to avoid overflow issues. + iterations = (int) ((TARGET_TIME_MS / (double) elapsed) * iterations); + elapsed = timeAction(action, iterations); + System.out.println(name + ": " + iterations + " iterations in " + + (elapsed/1000f) + "s; " + + (iterations * dataSize) / (elapsed * 1024 * 1024 / 1000f) + + "MB/s"); + } + + private static long timeAction(Action action, int iterations) throws IOException { + System.gc(); + long start = System.currentTimeMillis(); + for (int i = 0; i < iterations; i++) { + action.execute(); + } + long end = System.currentTimeMillis(); + return end - start; + } + + private static byte[] readAllBytes(String filename) throws IOException { + RandomAccessFile file = new RandomAccessFile(new File(filename), "r"); + byte[] content = new byte[(int) file.length()]; + file.readFully(content); + return content; + } + + /** + * Interface used to capture a single action to benchmark. + */ + interface Action { + void execute() throws IOException; + } +} diff --git a/packager/third_party/protobuf/benchmarks/README.md b/packager/third_party/protobuf/benchmarks/README.md new file mode 100644 index 0000000000..c902780582 --- /dev/null +++ b/packager/third_party/protobuf/benchmarks/README.md @@ -0,0 +1,28 @@ + +# Protocol Buffers Benchmarks + +This directory contains benchmarking schemas and data sets that you +can use to test a variety of performance scenarios against your +protobuf language runtime. + +The schema for the datasets is described in `benchmarks.proto`. + +Generate the data sets like so: + +``` +$ make +$ ./generate-datasets +Wrote dataset: dataset.google_message1_proto3.pb +Wrote dataset: dataset.google_message1_proto2.pb +Wrote dataset: dataset.google_message2.pb +$ +``` + +Each data set will be written to its own file. Benchmarks will +likely want to run several benchmarks against each data set (parse, +serialize, possibly JSON, possibly using different APIs, etc). + +We would like to add more data sets. In general we will favor data sets +that make the overall suite diverse without being too large or having +too many similar tests. Ideally everyone can run through the entire +suite without the test run getting too long. diff --git a/packager/third_party/protobuf/benchmarks/benchmark_messages_proto2.proto b/packager/third_party/protobuf/benchmarks/benchmark_messages_proto2.proto new file mode 100644 index 0000000000..01f67a1af3 --- /dev/null +++ b/packager/third_party/protobuf/benchmarks/benchmark_messages_proto2.proto @@ -0,0 +1,141 @@ +// Benchmark messages for proto2. + +syntax = "proto2"; + +package benchmarks.proto2; +option java_package = "com.google.protobuf.benchmarks"; + +// This is the default, but we specify it here explicitly. +option optimize_for = SPEED; + +message GoogleMessage1 { + required string field1 = 1; + optional string field9 = 9; + optional string field18 = 18; + optional bool field80 = 80 [default=false]; + optional bool field81 = 81 [default=true]; + required int32 field2 = 2; + required int32 field3 = 3; + optional int32 field280 = 280; + optional int32 field6 = 6 [default=0]; + optional int64 field22 = 22; + optional string field4 = 4; + repeated fixed64 field5 = 5; + optional bool field59 = 59 [default=false]; + optional string field7 = 7; + optional int32 field16 = 16; + optional int32 field130 = 130 [default=0]; + optional bool field12 = 12 [default=true]; + optional bool field17 = 17 [default=true]; + optional bool field13 = 13 [default=true]; + optional bool field14 = 14 [default=true]; + optional int32 field104 = 104 [default=0]; + optional int32 field100 = 100 [default=0]; + optional int32 field101 = 101 [default=0]; + optional string field102 = 102; + optional string field103 = 103; + optional int32 field29 = 29 [default=0]; + optional bool field30 = 30 [default=false]; + optional int32 field60 = 60 [default=-1]; + optional int32 field271 = 271 [default=-1]; + optional int32 field272 = 272 [default=-1]; + optional int32 field150 = 150; + optional int32 field23 = 23 [default=0]; + optional bool field24 = 24 [default=false]; + optional int32 field25 = 25 [default=0]; + optional GoogleMessage1SubMessage field15 = 15; + optional bool field78 = 78; + optional int32 field67 = 67 [default=0]; + optional int32 field68 = 68; + optional int32 field128 = 128 [default=0]; + optional string field129 = 129 [default="xxxxxxxxxxxxxxxxxxxxx"]; + optional int32 field131 = 131 [default=0]; +} + +message GoogleMessage1SubMessage { + optional int32 field1 = 1 [default=0]; + optional int32 field2 = 2 [default=0]; + optional int32 field3 = 3 [default=0]; + optional string field15 = 15; + optional bool field12 = 12 [default=true]; + optional int64 field13 = 13; + optional int64 field14 = 14; + optional int32 field16 = 16; + optional int32 field19 = 19 [default=2]; + optional bool field20 = 20 [default=true]; + optional bool field28 = 28 [default=true]; + optional fixed64 field21 = 21; + optional int32 field22 = 22; + optional bool field23 = 23 [ default=false ]; + optional bool field206 = 206 [default=false]; + optional fixed32 field203 = 203; + optional int32 field204 = 204; + optional string field205 = 205; + optional uint64 field207 = 207; + optional uint64 field300 = 300; +} + +message GoogleMessage2 { + optional string field1 = 1; + optional int64 field3 = 3; + optional int64 field4 = 4; + optional int64 field30 = 30; + optional bool field75 = 75 [default=false]; + optional string field6 = 6; + optional bytes field2 = 2; + optional int32 field21 = 21 [default=0]; + optional int32 field71 = 71; + optional float field25 = 25; + optional int32 field109 = 109 [default=0]; + optional int32 field210 = 210 [default=0]; + optional int32 field211 = 211 [default=0]; + optional int32 field212 = 212 [default=0]; + optional int32 field213 = 213 [default=0]; + optional int32 field216 = 216 [default=0]; + optional int32 field217 = 217 [default=0]; + optional int32 field218 = 218 [default=0]; + optional int32 field220 = 220 [default=0]; + optional int32 field221 = 221 [default=0]; + optional float field222 = 222 [default=0.0]; + optional int32 field63 = 63; + + repeated group Group1 = 10 { + required float field11 = 11; + optional float field26 = 26; + optional string field12 = 12; + optional string field13 = 13; + repeated string field14 = 14; + required uint64 field15 = 15; + optional int32 field5 = 5; + optional string field27 = 27; + optional int32 field28 = 28; + optional string field29 = 29; + optional string field16 = 16; + repeated string field22 = 22; + repeated int32 field73 = 73; + optional int32 field20 = 20 [default=0]; + optional string field24 = 24; + optional GoogleMessage2GroupedMessage field31 = 31; + } + repeated string field128 = 128; + optional int64 field131 = 131; + repeated string field127 = 127; + optional int32 field129 = 129; + repeated int64 field130 = 130; + optional bool field205 = 205 [default=false]; + optional bool field206 = 206 [default=false]; +} + +message GoogleMessage2GroupedMessage { + optional float field1 = 1; + optional float field2 = 2; + optional float field3 = 3 [default=0.0]; + optional bool field4 = 4; + optional bool field5 = 5; + optional bool field6 = 6 [default=true]; + optional bool field7 = 7 [default=false]; + optional float field8 = 8; + optional bool field9 = 9; + optional float field10 = 10; + optional int64 field11 = 11; +} diff --git a/packager/third_party/protobuf/benchmarks/benchmark_messages_proto3.proto b/packager/third_party/protobuf/benchmarks/benchmark_messages_proto3.proto new file mode 100644 index 0000000000..32f586986b --- /dev/null +++ b/packager/third_party/protobuf/benchmarks/benchmark_messages_proto3.proto @@ -0,0 +1,76 @@ +// Benchmark messages for proto3. + +syntax = "proto3"; + +package benchmarks.proto3; +option java_package = "com.google.protobuf.benchmarks"; + +// This is the default, but we specify it here explicitly. +option optimize_for = SPEED; + +message GoogleMessage1 { + string field1 = 1; + string field9 = 9; + string field18 = 18; + bool field80 = 80; + bool field81 = 81; + int32 field2 = 2; + int32 field3 = 3; + int32 field280 = 280; + int32 field6 = 6; + int64 field22 = 22; + string field4 = 4; + repeated fixed64 field5 = 5; + bool field59 = 59; + string field7 = 7; + int32 field16 = 16; + int32 field130 = 130; + bool field12 = 12; + bool field17 = 17; + bool field13 = 13; + bool field14 = 14; + int32 field104 = 104; + int32 field100 = 100; + int32 field101 = 101; + string field102 = 102; + string field103 = 103; + int32 field29 = 29; + bool field30 = 30; + int32 field60 = 60; + int32 field271 = 271; + int32 field272 = 272; + int32 field150 = 150; + int32 field23 = 23; + bool field24 = 24; + int32 field25 = 25; + GoogleMessage1SubMessage field15 = 15; + bool field78 = 78; + int32 field67 = 67; + int32 field68 = 68; + int32 field128 = 128; + string field129 = 129; + int32 field131 = 131; +} + +message GoogleMessage1SubMessage { + int32 field1 = 1; + int32 field2 = 2; + int32 field3 = 3; + string field15 = 15; + bool field12 = 12; + int64 field13 = 13; + int64 field14 = 14; + int32 field16 = 16; + int32 field19 = 19; + bool field20 = 20; + bool field28 = 28; + fixed64 field21 = 21; + int32 field22 = 22; + bool field23 = 23; + bool field206 = 206; + fixed32 field203 = 203; + int32 field204 = 204; + string field205 = 205; + uint64 field207 = 207; + uint64 field300 = 300; +} diff --git a/packager/third_party/protobuf/benchmarks/benchmarks.proto b/packager/third_party/protobuf/benchmarks/benchmarks.proto new file mode 100644 index 0000000000..51c0b54877 --- /dev/null +++ b/packager/third_party/protobuf/benchmarks/benchmarks.proto @@ -0,0 +1,63 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; +package benchmarks; +option java_package = "com.google.protobuf.benchmarks"; + +message BenchmarkDataset { + // Name of the benchmark dataset. This should be unique across all datasets. + // Should only contain word characters: [a-zA-Z0-9_] + string name = 1; + + // Fully-qualified name of the protobuf message for this dataset. + // It will be one of the messages defined benchmark_messages_proto2.proto + // or benchmark_messages_proto3.proto. + // + // Implementations that do not support reflection can implement this with + // an explicit "if/else" chain that lists every known message defined + // in those files. + string message_name = 2; + + // The payload(s) for this dataset. They should be parsed or serialized + // in sequence, in a loop, ie. + // + // while (!benchmarkDone) { // Benchmark runner decides when to exit. + // for (i = 0; i < benchmark.payload.length; i++) { + // parse(benchmark.payload[i]) + // } + // } + // + // This is intended to let datasets include a variety of data to provide + // potentially more realistic results than just parsing the same message + // over and over. A single message parsed repeatedly could yield unusually + // good branch prediction performance. + repeated bytes payload = 3; +} diff --git a/packager/third_party/protobuf/benchmarks/generate_datasets.cc b/packager/third_party/protobuf/benchmarks/generate_datasets.cc new file mode 100644 index 0000000000..61e7adf1ba --- /dev/null +++ b/packager/third_party/protobuf/benchmarks/generate_datasets.cc @@ -0,0 +1,117 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#include +#include +#include "benchmarks.pb.h" + +using benchmarks::BenchmarkDataset; +using google::protobuf::Descriptor; +using google::protobuf::DescriptorPool; +using google::protobuf::Message; +using google::protobuf::MessageFactory; + +std::set names; + +const char *file_prefix = "dataset."; +const char *file_suffix = ".pb"; + +void WriteFileWithPayloads(const std::string& name, + const std::string& message_name, + const std::vector& payload) { + if (!names.insert(name).second) { + std::cerr << "Duplicate test name: " << name << "\n"; + abort(); + } + + // First verify that this message name exists in our set of benchmark messages + // and that these payloads are valid for the given message. + const Descriptor* d = + DescriptorPool::generated_pool()->FindMessageTypeByName(message_name); + + if (!d) { + std::cerr << "For dataset " << name << ", no such message: " + << message_name << "\n"; + abort(); + } + + Message* m = MessageFactory::generated_factory()->GetPrototype(d)->New(); + + for (size_t i = 0; i < payload.size(); i++) { + if (!m->ParseFromString(payload[i])) { + std::cerr << "For dataset " << name << ", payload[" << i << "] fails " + << "to parse\n"; + abort(); + } + } + + BenchmarkDataset dataset; + dataset.set_name(name); + dataset.set_message_name(message_name); + for (size_t i = 0; i < payload.size(); i++) { + dataset.add_payload()->assign(payload[i]); + } + + std::ofstream writer; + std::string fname = file_prefix + name + file_suffix; + writer.open(fname.c_str()); + dataset.SerializeToOstream(&writer); + writer.close(); + + std::cerr << "Wrote dataset: " << fname << "\n"; +} + +void WriteFile(const std::string& name, const std::string& message_name, + const std::string& payload) { + std::vector payloads; + payloads.push_back(payload); + WriteFileWithPayloads(name, message_name, payloads); +} + +std::string ReadFile(const std::string& name) { + std::ifstream file(name.c_str()); + GOOGLE_CHECK(file.is_open()) << "Couldn't find file '" << name << + "', please make sure you are running " + "this command from the benchmarks/ " + "directory.\n"; + return std::string((std::istreambuf_iterator(file)), + std::istreambuf_iterator()); +} + +int main() { + WriteFile("google_message1_proto3", "benchmarks.proto3.GoogleMessage1", + ReadFile("google_message1.dat")); + WriteFile("google_message1_proto2", "benchmarks.proto2.GoogleMessage1", + ReadFile("google_message1.dat")); + + // Not in proto3 because it has a group, which is not supported. + WriteFile("google_message2", "benchmarks.proto2.GoogleMessage2", + ReadFile("google_message2.dat")); +} diff --git a/packager/third_party/protobuf/benchmarks/google_message1.dat b/packager/third_party/protobuf/benchmarks/google_message1.dat new file mode 100644 index 0000000000..bc0f064cc2 Binary files /dev/null and b/packager/third_party/protobuf/benchmarks/google_message1.dat differ diff --git a/packager/third_party/protobuf/benchmarks/google_message2.dat b/packager/third_party/protobuf/benchmarks/google_message2.dat new file mode 100644 index 0000000000..06c09441b9 Binary files /dev/null and b/packager/third_party/protobuf/benchmarks/google_message2.dat differ diff --git a/packager/third_party/protobuf/benchmarks/google_size.proto b/packager/third_party/protobuf/benchmarks/google_size.proto new file mode 100644 index 0000000000..d2d319f31c --- /dev/null +++ b/packager/third_party/protobuf/benchmarks/google_size.proto @@ -0,0 +1,138 @@ +syntax = "proto2"; + +package benchmarks; + +option java_outer_classname = "GoogleSize"; +option optimize_for = CODE_SIZE; + +message SizeMessage1 { + required string field1 = 1; + optional string field9 = 9; + optional string field18 = 18; + optional bool field80 = 80 [default=false]; + optional bool field81 = 81 [default=true]; + required int32 field2 = 2; + required int32 field3 = 3; + optional int32 field280 = 280; + optional int32 field6 = 6 [default=0]; + optional int64 field22 = 22; + optional string field4 = 4; + repeated fixed64 field5 = 5; + optional bool field59 = 59 [default=false]; + optional string field7 = 7; + optional int32 field16 = 16; + optional int32 field130 = 130 [default=0]; + optional bool field12 = 12 [default=true]; + optional bool field17 = 17 [default=true]; + optional bool field13 = 13 [default=true]; + optional bool field14 = 14 [default=true]; + optional int32 field104 = 104 [default=0]; + optional int32 field100 = 100 [default=0]; + optional int32 field101 = 101 [default=0]; + optional string field102 = 102; + optional string field103 = 103; + optional int32 field29 = 29 [default=0]; + optional bool field30 = 30 [default=false]; + optional int32 field60 = 60 [default=-1]; + optional int32 field271 = 271 [default=-1]; + optional int32 field272 = 272 [default=-1]; + optional int32 field150 = 150; + optional int32 field23 = 23 [default=0]; + optional bool field24 = 24 [default=false]; + optional int32 field25 = 25 [default=0]; + optional SizeMessage1SubMessage field15 = 15; + optional bool field78 = 78; + optional int32 field67 = 67 [default=0]; + optional int32 field68 = 68; + optional int32 field128 = 128 [default=0]; + optional string field129 = 129 [default="xxxxxxxxxxxxxxxxxxxxx"]; + optional int32 field131 = 131 [default=0]; +} + +message SizeMessage1SubMessage { + optional int32 field1 = 1 [default=0]; + optional int32 field2 = 2 [default=0]; + optional int32 field3 = 3 [default=0]; + optional string field15 = 15; + optional bool field12 = 12 [default=true]; + optional int64 field13 = 13; + optional int64 field14 = 14; + optional int32 field16 = 16; + optional int32 field19 = 19 [default=2]; + optional bool field20 = 20 [default=true]; + optional bool field28 = 28 [default=true]; + optional fixed64 field21 = 21; + optional int32 field22 = 22; + optional bool field23 = 23 [ default=false ]; + optional bool field206 = 206 [default=false]; + optional fixed32 field203 = 203; + optional int32 field204 = 204; + optional string field205 = 205; + optional uint64 field207 = 207; + optional uint64 field300 = 300; +} + +message SizeMessage2 { + optional string field1 = 1; + optional int64 field3 = 3; + optional int64 field4 = 4; + optional int64 field30 = 30; + optional bool field75 = 75 [default=false]; + optional string field6 = 6; + optional bytes field2 = 2; + optional int32 field21 = 21 [default=0]; + optional int32 field71 = 71; + optional float field25 = 25; + optional int32 field109 = 109 [default=0]; + optional int32 field210 = 210 [default=0]; + optional int32 field211 = 211 [default=0]; + optional int32 field212 = 212 [default=0]; + optional int32 field213 = 213 [default=0]; + optional int32 field216 = 216 [default=0]; + optional int32 field217 = 217 [default=0]; + optional int32 field218 = 218 [default=0]; + optional int32 field220 = 220 [default=0]; + optional int32 field221 = 221 [default=0]; + optional float field222 = 222 [default=0.0]; + optional int32 field63 = 63; + + repeated group Group1 = 10 { + required float field11 = 11; + optional float field26 = 26; + optional string field12 = 12; + optional string field13 = 13; + repeated string field14 = 14; + required uint64 field15 = 15; + optional int32 field5 = 5; + optional string field27 = 27; + optional int32 field28 = 28; + optional string field29 = 29; + optional string field16 = 16; + repeated string field22 = 22; + repeated int32 field73 = 73; + optional int32 field20 = 20 [default=0]; + optional string field24 = 24; + optional SizeMessage2GroupedMessage field31 = 31; + } + repeated string field128 = 128; + optional int64 field131 = 131; + repeated string field127 = 127; + optional int32 field129 = 129; + repeated int64 field130 = 130; + optional bool field205 = 205 [default=false]; + optional bool field206 = 206 [default=false]; +} + +message SizeMessage2GroupedMessage { + optional float field1 = 1; + optional float field2 = 2; + optional float field3 = 3 [default=0.0]; + optional bool field4 = 4; + optional bool field5 = 5; + optional bool field6 = 6 [default=true]; + optional bool field7 = 7 [default=false]; + optional float field8 = 8; + optional bool field9 = 9; + optional float field10 = 10; + optional int64 field11 = 11; +} diff --git a/packager/third_party/protobuf/benchmarks/readme.txt b/packager/third_party/protobuf/benchmarks/readme.txt new file mode 100644 index 0000000000..2c836d0a1c --- /dev/null +++ b/packager/third_party/protobuf/benchmarks/readme.txt @@ -0,0 +1,50 @@ +Contents +-------- + +This folder contains three kinds of file: + +- Code, such as ProtoBench.java, to build the benchmarking framework. +- Protocol buffer definitions (.proto files) +- Sample data files + +If we end up with a lot of different benchmarks it may be worth +separating these out info different directories, but while there are +so few they might as well all be together. + +Running a benchmark (Java) +-------------------------- + +1) Build protoc and the Java protocol buffer library. The examples + below assume a jar file (protobuf.jar) has been built and copied + into this directory. + +2) Build ProtoBench: + $ javac -d tmp -cp protobuf.jar ProtoBench.java + +3) Generate code for the relevant benchmark protocol buffer, e.g. + $ protoc --java_out=tmp google_size.proto google_speed.proto + +4) Build the generated code, e.g. + $ cd tmp + $ javac -d . -cp ../protobuf.jar benchmarks/*.java + +5) Run the test. Arguments are given in pairs - the first argument + is the descriptor type; the second is the filename. For example: + $ java -cp .;../protobuf.jar com.google.protocolbuffers.ProtoBench + benchmarks.GoogleSize$SizeMessage1 ../google_message1.dat + benchmarks.GoogleSpeed$SpeedMessage1 ../google_message1.dat + benchmarks.GoogleSize$SizeMessage2 ../google_message2.dat + benchmarks.GoogleSpeed$SpeedMessage2 ../google_message2.dat + +6) Wait! Each test runs for around 30 seconds, and there are 6 tests + per class/data combination. The above command would therefore take + about 12 minutes to run. + + +Benchmarks available +-------------------- + +From Google: +google_size.proto and google_speed.proto, messages +google_message1.dat and google_message2.dat. The proto files are +equivalent, but optimized differently. diff --git a/packager/third_party/protobuf/cmake/CMakeLists.txt b/packager/third_party/protobuf/cmake/CMakeLists.txt new file mode 100644 index 0000000000..f32a0e4e65 --- /dev/null +++ b/packager/third_party/protobuf/cmake/CMakeLists.txt @@ -0,0 +1,153 @@ +# Minimum CMake required +cmake_minimum_required(VERSION 2.8) + +# Project +project(protobuf C CXX) + +# CMake policies +cmake_policy(SET CMP0022 NEW) + +# Options +option(protobuf_VERBOSE "Enable for verbose output" OFF) +option(protobuf_BUILD_TESTS "Build tests" ON) +if (BUILD_SHARED_LIBS) + set(protobuf_BUILD_SHARED_LIBS_DEFAULT ON) +else (BUILD_SHARED_LIBS) + set(protobuf_BUILD_SHARED_LIBS_DEFAULT OFF) +endif (BUILD_SHARED_LIBS) +option(protobuf_BUILD_SHARED_LIBS "Build Shared Libraries" ${protobuf_BUILD_SHARED_LIBS_DEFAULT}) +option(protobuf_MSVC_STATIC_RUNTIME "Link static runtime libraries" ON) +if (MSVC) + set(protobuf_WITH_ZLIB_DEFAULT OFF) +else (MSVC) + set(protobuf_WITH_ZLIB_DEFAULT ON) +endif (MSVC) +option(protobuf_WITH_ZLIB "Build with zlib support" ${protobuf_WITH_ZLIB_DEFAULT}) +set(protobuf_DEBUG_POSTFIX "d" + CACHE STRING "Default debug postfix") + +# Path to main configure script +set(protobuf_CONFIGURE_SCRIPT "../configure.ac") + +# Parse configure script +set(protobuf_AC_INIT_REGEX + "^AC_INIT\\(\\[([^]]+)\\],\\[([^]]+)\\],\\[([^]]+)\\],\\[([^]]+)\\]\\)$") +file(STRINGS "${protobuf_CONFIGURE_SCRIPT}" protobuf_AC_INIT_LINE + LIMIT_COUNT 1 REGEX "^AC_INIT") +# Description +string(REGEX REPLACE "${protobuf_AC_INIT_REGEX}" "\\1" + protobuf_DESCRIPTION "${protobuf_AC_INIT_LINE}") +# Version +string(REGEX REPLACE "${protobuf_AC_INIT_REGEX}" "\\2" + protobuf_VERSION_STRING "${protobuf_AC_INIT_LINE}") +# Contact +string(REGEX REPLACE "${protobuf_AC_INIT_REGEX}" "\\3" + protobuf_CONTACT "${protobuf_AC_INIT_LINE}") +# Parse version tweaks +set(protobuf_VERSION_REGEX "^([0-9]+)\\.([0-9]+)\\.([0-9]+).*$") +string(REGEX REPLACE "${protobuf_VERSION_REGEX}" "\\1" + protobuf_VERSION_MAJOR "${protobuf_VERSION_STRING}") +string(REGEX REPLACE "${protobuf_VERSION_REGEX}" "\\2" + protobuf_VERSION_MINOR "${protobuf_VERSION_STRING}") +string(REGEX REPLACE "${protobuf_VERSION_REGEX}" "\\3" + protobuf_VERSION_PATCH "${protobuf_VERSION_STRING}") +# Package version +set(protobuf_VERSION + "${protobuf_VERSION_MAJOR}.${protobuf_VERSION_MINOR}.${protobuf_VERSION_PATCH}") + +if(protobuf_VERBOSE) + message(STATUS "Configuration script parsing status [") + message(STATUS " Description : ${protobuf_DESCRIPTION}") + message(STATUS " Version : ${protobuf_VERSION} (${protobuf_VERSION_STRING})") + message(STATUS " Contact : ${protobuf_CONTACT}") + message(STATUS "]") +endif() + +add_definitions(-DGOOGLE_PROTOBUF_CMAKE_BUILD) + +find_package(Threads REQUIRED) +if (CMAKE_USE_PTHREADS_INIT) + add_definitions(-DHAVE_PTHREAD) +endif (CMAKE_USE_PTHREADS_INIT) + +if (protobuf_WITH_ZLIB) + find_package(ZLIB) + if (ZLIB_FOUND) + set(HAVE_ZLIB 1) + # FindZLIB module define ZLIB_INCLUDE_DIRS variable + # Set ZLIB_INCLUDE_DIRECTORIES for compatible + set(ZLIB_INCLUDE_DIRECTORIES ${ZLIB_INCLUDE_DIRECTORIES} ${ZLIB_INCLUDE_DIRS}) + # Using imported target if exists + if (TARGET ZLIB::ZLIB) + set(ZLIB_LIBRARIES ZLIB::ZLIB) + endif (TARGET ZLIB::ZLIB) + else (ZLIB_FOUND) + set(HAVE_ZLIB 0) + # Explicitly set these to empty (override NOT_FOUND) so cmake doesn't + # complain when we use them later. + set(ZLIB_INCLUDE_DIRECTORIES) + set(ZLIB_LIBRARIES) + endif (ZLIB_FOUND) +endif (protobuf_WITH_ZLIB) + +if (HAVE_ZLIB) + add_definitions(-DHAVE_ZLIB) +endif (HAVE_ZLIB) + +if (protobuf_BUILD_SHARED_LIBS) + set(protobuf_SHARED_OR_STATIC "SHARED") +else (protobuf_BUILD_SHARED_LIBS) + set(protobuf_SHARED_OR_STATIC "STATIC") + # In case we are building static libraries, link also the runtime library statically + # so that MSVCR*.DLL is not required at runtime. + # https://msdn.microsoft.com/en-us/library/2kzt1wy3.aspx + # This is achieved by replacing msvc option /MD with /MT and /MDd with /MTd + # http://www.cmake.org/Wiki/CMake_FAQ#How_can_I_build_my_MSVC_application_with_a_static_runtime.3F + if (MSVC AND protobuf_MSVC_STATIC_RUNTIME) + foreach(flag_var + CMAKE_CXX_FLAGS CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE + CMAKE_CXX_FLAGS_MINSIZEREL CMAKE_CXX_FLAGS_RELWITHDEBINFO) + if(${flag_var} MATCHES "/MD") + string(REGEX REPLACE "/MD" "/MT" ${flag_var} "${${flag_var}}") + endif(${flag_var} MATCHES "/MD") + endforeach(flag_var) + endif (MSVC AND protobuf_MSVC_STATIC_RUNTIME) +endif (protobuf_BUILD_SHARED_LIBS) + +if (MSVC) + # Build with multiple processes + add_definitions(/MP) + add_definitions(/wd4244 /wd4267 /wd4018 /wd4355 /wd4800 /wd4251 /wd4996 /wd4146 /wd4305) + # Allow big object + add_definitions(/bigobj) + string(REPLACE "/" "\\" PROTOBUF_SOURCE_WIN32_PATH ${protobuf_SOURCE_DIR}) + string(REPLACE "/" "\\" PROTOBUF_BINARY_WIN32_PATH ${protobuf_BINARY_DIR}) + configure_file(extract_includes.bat.in extract_includes.bat) +endif (MSVC) + +get_filename_component(protobuf_source_dir ${protobuf_SOURCE_DIR} PATH) + +include_directories( + ${ZLIB_INCLUDE_DIRECTORIES} + ${protobuf_BINARY_DIR} + ${protobuf_source_dir}/src) + +if (MSVC) + # Add the "lib" prefix for generated .lib outputs. + set(LIB_PREFIX lib) +else (MSVC) + # When building with "make", "lib" prefix will be added automatically by + # the build tool. + set(LIB_PREFIX) +endif (MSVC) + +include(libprotobuf-lite.cmake) +include(libprotobuf.cmake) +include(libprotoc.cmake) +include(protoc.cmake) + +if (protobuf_BUILD_TESTS) + include(tests.cmake) +endif (protobuf_BUILD_TESTS) + +include(install.cmake) diff --git a/packager/third_party/protobuf/cmake/README.md b/packager/third_party/protobuf/cmake/README.md new file mode 100644 index 0000000000..1e7410d828 --- /dev/null +++ b/packager/third_party/protobuf/cmake/README.md @@ -0,0 +1,336 @@ +This directory contains *CMake* files that can be used to build protobuf +with *MSVC* on *Windows*. You can build the project from *Command Prompt* +and using an *Visual Studio* IDE. + +You need to have [CMake](http://www.cmake.org), [Visual Studio](https://www.visualstudio.com) +and optionally [Git](http://git-scm.com) installed on your computer before proceeding. + +Most of the instructions will be given to the *Сommand Prompt*, but the same +actions can be performed using appropriate GUI tools. + +Environment Setup +================= + +Open the appropriate *Command Prompt* from the *Start* menu. + +For example *VS2013 x64 Native Tools Command Prompt*: + + C:\Program Files (x86)\Microsoft Visual Studio 12.0\VC\bin\amd64> + +Change to your working directory: + + C:\Program Files (x86)\Microsoft Visual Studio 12.0\VC\bin\amd64>cd C:\Path\to + C:\Path\to> + +Where *C:\Path\to* is path to your real working directory. + +Create a folder where protobuf headers/libraries/binaries will be installed after built: + + C:\Path\to>mkdir install + +If *cmake* command is not available from *Command Prompt*, add it to system *PATH* variable: + + C:\Path\to>set PATH=%PATH%;C:\Program Files (x86)\CMake\bin + +If *git* command is not available from *Command Prompt*, add it to system *PATH* variable: + + C:\Path\to>set PATH=%PATH%;C:\Program Files\Git\cmd + +Good. Now you are ready to continue. + +Getting Sources +=============== + +You can get the latest stable source packages from the +[releases](https://github.com/google/protobuf/releases) page. +Or you can type: + + C:\Path\to> git clone -b [release_tag] https://github.com/google/protobuf.git + +Where *[release_tag]* is a git tag like *v3.0.0-beta-1* or a branch name like *master* +if you want to get the latest code. + +Go to the project folder: + + C:\Path\to>cd protobuf + C:\Path\to\protobuf> + +Protobuf unit-tests require gmock to build. If you download protobuf source code +from the *releases* page, the *gmock* directory should already be there. If you checkout +the code via `git clone`, this *gmock* directory won't exist and you will have to +download it manually or skip building protobuf unit-tests. + +You can download gmock as follows: + + C:\Path\to\protobuf>git clone -b release-1.7.0 https://github.com/google/googlemock.git gmock + +Then go to *gmock* folder and download gtest: + + C:\Path\to\protobuf>cd gmock + C:\Path\to\protobuf\gmock>git clone -b release-1.7.0 https://github.com/google/googletest.git gtest + +If you absolutely don't want to build and run protobuf unit-tests, skip +this steps and use protobuf at your own risk. + +Now go to *cmake* folder in protobuf sources: + + C:\Path\to\protobuf\gmock>cd ..\cmake + C:\Path\to\protobuf\cmake> + +Good. Now you are ready to *CMake* configuration. + +CMake Configuration +=================== + +*CMake* supports a lot of different +[generators](http://www.cmake.org/cmake/help/latest/manual/cmake-generators.7.html) +for various native build systems. +We are only interested in +[Makefile](http://www.cmake.org/cmake/help/latest/manual/cmake-generators.7.html#makefile-generators) +and +[Visual Studio](http://www.cmake.org/cmake/help/latest/manual/cmake-generators.7.html#visual-studio-generators) +generators. + +We will use shadow building to separate the temporary files from the protobuf source code. + +Create a temporary *build* folder and change your working directory to it: + + C:\Path\to\protobuf\cmake>mkdir build & cd build + C:\Path\to\protobuf\cmake\build> + +The *Makefile* generator can build the project in only one configuration, so you need to build +a separate folder for each configuration. + +To start using a *Release* configuration: + + C:\Path\to\protobuf\cmake\build>mkdir release & cd release + C:\Path\to\protobuf\cmake\build\release>cmake -G "NMake Makefiles" ^ + -DCMAKE_BUILD_TYPE=Release ^ + -DCMAKE_INSTALL_PREFIX=../../../../install ^ + ../.. + +It will generate *nmake* *Makefile* in current directory. + +To use *Debug* configuration: + + C:\Path\to\protobuf\cmake\build>mkdir debug & cd debug + C:\Path\to\protobuf\cmake\build\debug>cmake -G "NMake Makefiles" ^ + -DCMAKE_BUILD_TYPE=Debug ^ + -DCMAKE_INSTALL_PREFIX=../../../../install ^ + ../.. + +It will generate *nmake* *Makefile* in current directory. + +To create *Visual Studio* solution file: + + C:\Path\to\protobuf\cmake\build>mkdir solution & cd solution + C:\Path\to\protobuf\cmake\build\solution>cmake -G "Visual Studio 12 2013 Win64" ^ + -DCMAKE_INSTALL_PREFIX=../../../../install ^ + ../.. + +It will generate *Visual Studio* solution file *protobuf.sln* in current directory. + +If the *gmock* directory does not exist, and you do not want to build protobuf unit tests, +you need to add *cmake* command argument `-Dprotobuf_BUILD_TESTS=OFF` to disable testing. + +Compiling +========= + +To compile protobuf: + + C:\Path\to\protobuf\cmake\build\release>nmake + +or + + C:\Path\to\protobuf\cmake\build\debug>nmake + +And wait for the compilation to finish. + +If you prefer to use the IDE: + + * Open the generated protobuf.sln file in Microsoft Visual Studio. + * Choose "Debug" or "Release" configuration as desired. + * From the Build menu, choose "Build Solution". + +And wait for the compilation to finish. + +Testing +======= + +To run unit-tests, first you must compile protobuf as described above. +Then run: + + C:\Path\to\protobuf\cmake\build\release>nmake check + +or + + C:\Path\to\protobuf\cmake\build\debug>nmake check + +You can also build project *check* from Visual Studio solution. +Yes, it may sound strange, but it works. + +You should see output similar to: + + Running main() from gmock_main.cc + [==========] Running 1546 tests from 165 test cases. + + ... + + [==========] 1546 tests from 165 test cases ran. (2529 ms total) + [ PASSED ] 1546 tests. + +To run specific tests: + + C:\Path\to\protobuf>cmake\build\release\tests.exe --gtest_filter=AnyTest* + Running main() from gmock_main.cc + Note: Google Test filter = AnyTest* + [==========] Running 3 tests from 1 test case. + [----------] Global test environment set-up. + [----------] 3 tests from AnyTest + [ RUN ] AnyTest.TestPackAndUnpack + [ OK ] AnyTest.TestPackAndUnpack (0 ms) + [ RUN ] AnyTest.TestPackAndUnpackAny + [ OK ] AnyTest.TestPackAndUnpackAny (0 ms) + [ RUN ] AnyTest.TestIs + [ OK ] AnyTest.TestIs (0 ms) + [----------] 3 tests from AnyTest (1 ms total) + + [----------] Global test environment tear-down + [==========] 3 tests from 1 test case ran. (2 ms total) + [ PASSED ] 3 tests. + +Note that the tests must be run from the source folder. + +If all tests are passed, safely continue. + +Installing +========== + +To install protobuf to the specified *install* folder: + + C:\Path\to\protobuf\cmake\build\release>nmake install + +or + + C:\Path\to\protobuf\cmake\build\debug>nmake install + +You can also build project *INSTALL* from Visual Studio solution. +It sounds not so strange and it works. + +This will create the following folders under the *install* location: + * bin - that contains protobuf *protoc.exe* compiler; + * include - that contains C++ headers and protobuf *.proto files; + * lib - that contains linking libraries and *CMake* configuration files for *protobuf* package. + +Now you can if needed: + * Copy the contents of the include directory to wherever you want to put headers. + * Copy protoc.exe wherever you put build tools (probably somewhere in your PATH). + * Copy linking libraries libprotobuf[d].lib, libprotobuf-lite[d].lib, and libprotoc[d].lib wherever you put libraries. + +To avoid conflicts between the MSVC debug and release runtime libraries, when +compiling a debug build of your application, you may need to link against a +debug build of libprotobufd.lib with "d" postfix. Similarly, release builds should link against +release libprotobuf.lib library. + +DLLs vs. static linking +======================= + +Static linking is now the default for the Protocol Buffer libraries. Due to +issues with Win32's use of a separate heap for each DLL, as well as binary +compatibility issues between different versions of MSVC's STL library, it is +recommended that you use static linkage only. However, it is possible to +build libprotobuf and libprotoc as DLLs if you really want. To do this, +do the following: + + * Add an additional flag `-Dprotobuf_BUILD_SHARED_LIBS=ON` when invoking cmake + * Follow the same steps as described in the above section. + * When compiling your project, make sure to `#define PROTOBUF_USE_DLLS`. + +When distributing your software to end users, we strongly recommend that you +do NOT install libprotobuf.dll or libprotoc.dll to any shared location. +Instead, keep these libraries next to your binaries, in your application's +own install directory. C++ makes it very difficult to maintain binary +compatibility between releases, so it is likely that future versions of these +libraries will *not* be usable as drop-in replacements. + +If your project is itself a DLL intended for use by third-party software, we +recommend that you do NOT expose protocol buffer objects in your library's +public interface, and that you statically link protocol buffers into your +library. + +ZLib support +============ + +If you want to include GzipInputStream and GzipOutputStream +(google/protobuf/io/gzip_stream.h) in libprotobuf, you will need to do a few +additional steps. + +Obtain a copy of the zlib library. The pre-compiled DLL at zlib.net works. +You need prepare it: + + * Make sure zlib's two headers are in your `C:\Path\to\install\include` path + * Make sure zlib's linking libraries (*.lib file) is in your + `C:\Path\to\install\lib` library path. + +You can also compile it from source by yourself. + +Getting sources: + + C:\Path\to>git clone -b v1.2.8 https://github.com/madler/zlib.git + C:\Path\to>cd zlib + +Compiling and Installing: + + C:\Path\to\zlib>mkdir build & cd build + C:\Path\to\zlib\build>mkdir release & cd release + C:\Path\to\zlib\build\release>cmake -G "NMake Makefiles" -DCMAKE_BUILD_TYPE=Release ^ + -DCMAKE_INSTALL_PREFIX=../../../install ../.. + C:\Path\to\zlib\build\release>nmake & nmake install + +You can make *debug* version or use *Visual Studio* generator also as before for the +protobuf project. + +Now add *bin* folder from *install* to system *PATH*: + + C:\Path\to>set PATH=%PATH%;C:\Path\to\install\bin + +You need reconfigure protobuf with flag `-Dprotobuf_WITH_ZLIB=ON` when invoking cmake. + +Note that if you have compiled ZLIB yourself, as stated above, +further disable the option `-Dprotobuf_MSVC_STATIC_RUNTIME=OFF`. + +If it reports NOTFOUND for zlib_include or zlib_lib, you might haven't put +the headers or the .lib file in the right directory. + +Build and testing protobuf as usual. + +Notes on Compiler Warnings +========================== + +The following warnings have been disabled while building the protobuf libraries +and compiler. You may have to disable some of them in your own project as +well, or live with them. + +* C4018 - 'expression' : signed/unsigned mismatch +* C4146 - unary minus operator applied to unsigned type, result still unsigned +* C4244 - Conversion from 'type1' to 'type2', possible loss of data. +* C4251 - 'identifier' : class 'type' needs to have dll-interface to be used by + clients of class 'type2' +* C4267 - Conversion from 'size_t' to 'type', possible loss of data. +* C4305 - 'identifier' : truncation from 'type1' to 'type2' +* C4355 - 'this' : used in base member initializer list +* C4800 - 'type' : forcing value to bool 'true' or 'false' (performance warning) +* C4996 - 'function': was declared deprecated + +C4251 is of particular note, if you are compiling the Protocol Buffer library +as a DLL (see previous section). The protocol buffer library uses templates in +its public interfaces. MSVC does not provide any reasonable way to export +template classes from a DLL. However, in practice, it appears that exporting +templates is not necessary anyway. Since the complete definition of any +template is available in the header files, anyone importing the DLL will just +end up compiling instances of the templates into their own binary. The +Protocol Buffer implementation does not rely on static template members being +unique, so there should be no problem with this, but MSVC prints warning +nevertheless. So, we disable it. Unfortunately, this warning will also be +produced when compiling code which merely uses protocol buffers, meaning you +may have to disable it in your code too. diff --git a/packager/third_party/protobuf/cmake/extract_includes.bat.in b/packager/third_party/protobuf/cmake/extract_includes.bat.in new file mode 100644 index 0000000000..b593e0c9cc --- /dev/null +++ b/packager/third_party/protobuf/cmake/extract_includes.bat.in @@ -0,0 +1,124 @@ +mkdir include +mkdir include\google +mkdir include\google\protobuf +mkdir include\google\protobuf\compiler +mkdir include\google\protobuf\compiler\cpp +mkdir include\google\protobuf\compiler\csharp +mkdir include\google\protobuf\compiler\java +mkdir include\google\protobuf\compiler\javanano +mkdir include\google\protobuf\compiler\js +mkdir include\google\protobuf\compiler\objectivec +mkdir include\google\protobuf\compiler\python +mkdir include\google\protobuf\compiler\ruby +mkdir include\google\protobuf\io +mkdir include\google\protobuf\stubs +mkdir include\google\protobuf\util +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\any.h include\google\protobuf\any.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\any.pb.h include\google\protobuf\any.pb.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\api.pb.h include\google\protobuf\api.pb.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\arena.h include\google\protobuf\arena.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\arenastring.h include\google\protobuf\arenastring.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\compiler\code_generator.h include\google\protobuf\compiler\code_generator.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\compiler\command_line_interface.h include\google\protobuf\compiler\command_line_interface.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\compiler\cpp\cpp_generator.h include\google\protobuf\compiler\cpp\cpp_generator.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\compiler\csharp\csharp_generator.h include\google\protobuf\compiler\csharp\csharp_generator.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\compiler\csharp\csharp_names.h include\google\protobuf\compiler\csharp\csharp_names.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\compiler\importer.h include\google\protobuf\compiler\importer.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\compiler\java\java_generator.h include\google\protobuf\compiler\java\java_generator.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\compiler\java\java_names.h include\google\protobuf\compiler\java\java_names.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\compiler\javanano\javanano_generator.h include\google\protobuf\compiler\javanano\javanano_generator.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\compiler\js\js_generator.h include\google\protobuf\compiler\js\js_generator.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\compiler\objectivec\objectivec_generator.h include\google\protobuf\compiler\objectivec\objectivec_generator.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\compiler\objectivec\objectivec_helpers.h include\google\protobuf\compiler\objectivec\objectivec_helpers.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\compiler\parser.h include\google\protobuf\compiler\parser.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\compiler\plugin.h include\google\protobuf\compiler\plugin.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\compiler\plugin.pb.h include\google\protobuf\compiler\plugin.pb.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\compiler\python\python_generator.h include\google\protobuf\compiler\python\python_generator.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\compiler\ruby\ruby_generator.h include\google\protobuf\compiler\ruby\ruby_generator.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\descriptor.h include\google\protobuf\descriptor.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\descriptor.pb.h include\google\protobuf\descriptor.pb.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\descriptor_database.h include\google\protobuf\descriptor_database.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\duration.pb.h include\google\protobuf\duration.pb.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\dynamic_message.h include\google\protobuf\dynamic_message.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\empty.pb.h include\google\protobuf\empty.pb.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\extension_set.h include\google\protobuf\extension_set.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\field_mask.pb.h include\google\protobuf\field_mask.pb.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\generated_enum_reflection.h include\google\protobuf\generated_enum_reflection.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\generated_enum_util.h include\google\protobuf\generated_enum_util.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\generated_message_reflection.h include\google\protobuf\generated_message_reflection.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\generated_message_util.h include\google\protobuf\generated_message_util.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\io\coded_stream.h include\google\protobuf\io\coded_stream.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\io\gzip_stream.h include\google\protobuf\io\gzip_stream.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\io\printer.h include\google\protobuf\io\printer.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\io\strtod.h include\google\protobuf\io\strtod.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\io\tokenizer.h include\google\protobuf\io\tokenizer.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\io\zero_copy_stream.h include\google\protobuf\io\zero_copy_stream.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\io\zero_copy_stream_impl.h include\google\protobuf\io\zero_copy_stream_impl.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\io\zero_copy_stream_impl_lite.h include\google\protobuf\io\zero_copy_stream_impl_lite.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\map.h include\google\protobuf\map.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\map_entry.h include\google\protobuf\map_entry.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\map_entry_lite.h include\google\protobuf\map_entry_lite.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\map_field.h include\google\protobuf\map_field.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\map_field_inl.h include\google\protobuf\map_field_inl.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\map_field_lite.h include\google\protobuf\map_field_lite.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\map_type_handler.h include\google\protobuf\map_type_handler.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\message.h include\google\protobuf\message.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\message_lite.h include\google\protobuf\message_lite.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\metadata.h include\google\protobuf\metadata.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\reflection.h include\google\protobuf\reflection.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\reflection_ops.h include\google\protobuf\reflection_ops.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\repeated_field.h include\google\protobuf\repeated_field.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\repeated_field_reflection.h include\google\protobuf\repeated_field_reflection.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\service.h include\google\protobuf\service.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\source_context.pb.h include\google\protobuf\source_context.pb.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\struct.pb.h include\google\protobuf\struct.pb.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\atomic_sequence_num.h include\google\protobuf\stubs\atomic_sequence_num.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\atomicops.h include\google\protobuf\stubs\atomicops.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\atomicops_internals_arm64_gcc.h include\google\protobuf\stubs\atomicops_internals_arm64_gcc.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\atomicops_internals_arm_gcc.h include\google\protobuf\stubs\atomicops_internals_arm_gcc.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\atomicops_internals_arm_qnx.h include\google\protobuf\stubs\atomicops_internals_arm_qnx.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\atomicops_internals_atomicword_compat.h include\google\protobuf\stubs\atomicops_internals_atomicword_compat.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\atomicops_internals_generic_gcc.h include\google\protobuf\stubs\atomicops_internals_generic_gcc.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\atomicops_internals_macosx.h include\google\protobuf\stubs\atomicops_internals_macosx.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\atomicops_internals_mips_gcc.h include\google\protobuf\stubs\atomicops_internals_mips_gcc.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\atomicops_internals_pnacl.h include\google\protobuf\stubs\atomicops_internals_pnacl.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\atomicops_internals_power.h include\google\protobuf\stubs\atomicops_internals_power.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\atomicops_internals_solaris.h include\google\protobuf\stubs\atomicops_internals_solaris.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\atomicops_internals_tsan.h include\google\protobuf\stubs\atomicops_internals_tsan.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\atomicops_internals_x86_gcc.h include\google\protobuf\stubs\atomicops_internals_x86_gcc.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\atomicops_internals_x86_msvc.h include\google\protobuf\stubs\atomicops_internals_x86_msvc.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\bytestream.h include\google\protobuf\stubs\bytestream.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\callback.h include\google\protobuf\stubs\callback.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\casts.h include\google\protobuf\stubs\casts.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\common.h include\google\protobuf\stubs\common.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\fastmem.h include\google\protobuf\stubs\fastmem.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\hash.h include\google\protobuf\stubs\hash.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\logging.h include\google\protobuf\stubs\logging.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\macros.h include\google\protobuf\stubs\macros.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\mutex.h include\google\protobuf\stubs\mutex.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\once.h include\google\protobuf\stubs\once.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\platform_macros.h include\google\protobuf\stubs\platform_macros.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\port.h include\google\protobuf\stubs\port.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\scoped_ptr.h include\google\protobuf\stubs\scoped_ptr.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\shared_ptr.h include\google\protobuf\stubs\shared_ptr.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\singleton.h include\google\protobuf\stubs\singleton.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\status.h include\google\protobuf\stubs\status.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\stl_util.h include\google\protobuf\stubs\stl_util.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\stringpiece.h include\google\protobuf\stubs\stringpiece.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\template_util.h include\google\protobuf\stubs\template_util.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\type_traits.h include\google\protobuf\stubs\type_traits.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\text_format.h include\google\protobuf\text_format.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\timestamp.pb.h include\google\protobuf\timestamp.pb.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\type.pb.h include\google\protobuf\type.pb.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\unknown_field_set.h include\google\protobuf\unknown_field_set.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\util\field_comparator.h include\google\protobuf\util\field_comparator.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\util\field_mask_util.h include\google\protobuf\util\field_mask_util.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\util\json_util.h include\google\protobuf\util\json_util.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\util\message_differencer.h include\google\protobuf\util\message_differencer.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\util\time_util.h include\google\protobuf\util\time_util.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\util\type_resolver.h include\google\protobuf\util\type_resolver.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\util\type_resolver_util.h include\google\protobuf\util\type_resolver_util.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\wire_format.h include\google\protobuf\wire_format.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\wire_format_lite.h include\google\protobuf\wire_format_lite.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\wire_format_lite_inl.h include\google\protobuf\wire_format_lite_inl.h +copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\wrappers.pb.h include\google\protobuf\wrappers.pb.h diff --git a/packager/third_party/protobuf/cmake/install.cmake b/packager/third_party/protobuf/cmake/install.cmake new file mode 100644 index 0000000000..dbb4265d45 --- /dev/null +++ b/packager/third_party/protobuf/cmake/install.cmake @@ -0,0 +1,103 @@ +include(GNUInstallDirs) + +foreach(_library + libprotobuf-lite + libprotobuf + libprotoc) + set_property(TARGET ${_library} + PROPERTY INTERFACE_INCLUDE_DIRECTORIES + $) + install(TARGETS ${_library} EXPORT protobuf-targets + RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT ${_library} + LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR} COMPONENT ${_library} + ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR} COMPONENT ${_library}) +endforeach() + +install(TARGETS protoc EXPORT protobuf-targets + RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT protoc) + +if(TRUE) + file(STRINGS extract_includes.bat.in _extract_strings + REGEX "^copy") + foreach(_extract_string ${_extract_strings}) + string(REPLACE "copy \${PROTOBUF_SOURCE_WIN32_PATH}\\" "" + _extract_string ${_extract_string}) + string(REPLACE "\\" "/" _extract_string ${_extract_string}) + string(REGEX MATCH "^[^ ]+" + _extract_from ${_extract_string}) + string(REGEX REPLACE "^${_extract_from} ([^$]+)" "\\1" + _extract_to ${_extract_string}) + get_filename_component(_extract_from "${protobuf_SOURCE_DIR}/${_extract_from}" ABSOLUTE) + get_filename_component(_extract_name ${_extract_to} NAME) + get_filename_component(_extract_to ${_extract_to} PATH) + string(REPLACE "include/" "${CMAKE_INSTALL_INCLUDEDIR}/" + _extract_to "${_extract_to}") + if(EXISTS "${_extract_from}") + install(FILES "${_extract_from}" + DESTINATION "${_extract_to}" + COMPONENT protobuf-headers + RENAME "${_extract_name}") + else() + message(AUTHOR_WARNING "The file \"${_extract_from}\" is listed in " + "\"${protobuf_SOURCE_DIR}/cmake/extract_includes.bat.in\" " + "but there not exists. The file will not be installed.") + endif() + endforeach() +endif() + +# Internal function for parsing auto tools scripts +function(_protobuf_auto_list FILE_NAME VARIABLE) + file(STRINGS ${FILE_NAME} _strings) + set(_list) + foreach(_string ${_strings}) + set(_found) + string(REGEX MATCH "^[ \t]*${VARIABLE}[ \t]*=[ \t]*" _found "${_string}") + if(_found) + string(LENGTH "${_found}" _length) + string(SUBSTRING "${_string}" ${_length} -1 _draft_list) + foreach(_item ${_draft_list}) + string(STRIP "${_item}" _item) + list(APPEND _list "${_item}") + endforeach() + endif() + endforeach() + set(${VARIABLE} ${_list} PARENT_SCOPE) +endfunction() + +# Install well-known type proto files +_protobuf_auto_list("../src/Makefile.am" nobase_dist_proto_DATA) +foreach(_file ${nobase_dist_proto_DATA}) + get_filename_component(_file_from "../src/${_file}" ABSOLUTE) + get_filename_component(_file_name ${_file} NAME) + get_filename_component(_file_path ${_file} PATH) + if(EXISTS "${_file_from}") + install(FILES "${_file_from}" + DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}/${_file_path}" + COMPONENT protobuf-protos + RENAME "${_file_name}") + else() + message(AUTHOR_WARNING "The file \"${_file_from}\" is listed in " + "\"${protobuf_SOURCE_DIR}/../src/Makefile.am\" as nobase_dist_proto_DATA " + "but there not exists. The file will not be installed.") + endif() +endforeach() + +# Export configuration + +install(EXPORT protobuf-targets + DESTINATION "lib/cmake/protobuf" + COMPONENT protobuf-export) + +configure_file(protobuf-config.cmake.in + protobuf-config.cmake @ONLY) +configure_file(protobuf-config-version.cmake.in + protobuf-config-version.cmake @ONLY) +configure_file(protobuf-module.cmake.in + protobuf-module.cmake @ONLY) + +install(FILES + "${protobuf_BINARY_DIR}/protobuf-config.cmake" + "${protobuf_BINARY_DIR}/protobuf-config-version.cmake" + "${protobuf_BINARY_DIR}/protobuf-module.cmake" + DESTINATION "lib/cmake/protobuf" + COMPONENT protobuf-export) diff --git a/packager/third_party/protobuf/cmake/libprotobuf-lite.cmake b/packager/third_party/protobuf/cmake/libprotobuf-lite.cmake new file mode 100644 index 0000000000..036b051707 --- /dev/null +++ b/packager/third_party/protobuf/cmake/libprotobuf-lite.cmake @@ -0,0 +1,38 @@ +set(libprotobuf_lite_files + ${protobuf_source_dir}/src/google/protobuf/arena.cc + ${protobuf_source_dir}/src/google/protobuf/arenastring.cc + ${protobuf_source_dir}/src/google/protobuf/extension_set.cc + ${protobuf_source_dir}/src/google/protobuf/generated_message_util.cc + ${protobuf_source_dir}/src/google/protobuf/io/coded_stream.cc + ${protobuf_source_dir}/src/google/protobuf/io/zero_copy_stream.cc + ${protobuf_source_dir}/src/google/protobuf/io/zero_copy_stream_impl_lite.cc + ${protobuf_source_dir}/src/google/protobuf/message_lite.cc + ${protobuf_source_dir}/src/google/protobuf/repeated_field.cc + ${protobuf_source_dir}/src/google/protobuf/stubs/atomicops_internals_x86_gcc.cc + ${protobuf_source_dir}/src/google/protobuf/stubs/atomicops_internals_x86_msvc.cc + ${protobuf_source_dir}/src/google/protobuf/stubs/bytestream.cc + ${protobuf_source_dir}/src/google/protobuf/stubs/common.cc + ${protobuf_source_dir}/src/google/protobuf/stubs/int128.cc + ${protobuf_source_dir}/src/google/protobuf/stubs/once.cc + ${protobuf_source_dir}/src/google/protobuf/stubs/status.cc + ${protobuf_source_dir}/src/google/protobuf/stubs/statusor.cc + ${protobuf_source_dir}/src/google/protobuf/stubs/stringpiece.cc + ${protobuf_source_dir}/src/google/protobuf/stubs/stringprintf.cc + ${protobuf_source_dir}/src/google/protobuf/stubs/structurally_valid.cc + ${protobuf_source_dir}/src/google/protobuf/stubs/strutil.cc + ${protobuf_source_dir}/src/google/protobuf/stubs/time.cc + ${protobuf_source_dir}/src/google/protobuf/wire_format_lite.cc +) + +add_library(libprotobuf-lite ${protobuf_SHARED_OR_STATIC} + ${libprotobuf_lite_files}) +target_link_libraries(libprotobuf-lite ${CMAKE_THREAD_LIBS_INIT}) +target_include_directories(libprotobuf-lite PUBLIC ${protobuf_source_dir}/src) +if(MSVC AND protobuf_BUILD_SHARED_LIBS) + target_compile_definitions(libprotobuf-lite + PUBLIC PROTOBUF_USE_DLLS + PRIVATE LIBPROTOBUF_EXPORTS) +endif() +set_target_properties(libprotobuf-lite PROPERTIES + OUTPUT_NAME ${LIB_PREFIX}protobuf-lite + DEBUG_POSTFIX "${protobuf_DEBUG_POSTFIX}") diff --git a/packager/third_party/protobuf/cmake/libprotobuf.cmake b/packager/third_party/protobuf/cmake/libprotobuf.cmake new file mode 100644 index 0000000000..8930c1ca1e --- /dev/null +++ b/packager/third_party/protobuf/cmake/libprotobuf.cmake @@ -0,0 +1,68 @@ +set(libprotobuf_files + ${protobuf_source_dir}/src/google/protobuf/any.cc + ${protobuf_source_dir}/src/google/protobuf/any.pb.cc + ${protobuf_source_dir}/src/google/protobuf/api.pb.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/importer.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/parser.cc + ${protobuf_source_dir}/src/google/protobuf/descriptor.cc + ${protobuf_source_dir}/src/google/protobuf/descriptor.pb.cc + ${protobuf_source_dir}/src/google/protobuf/descriptor_database.cc + ${protobuf_source_dir}/src/google/protobuf/duration.pb.cc + ${protobuf_source_dir}/src/google/protobuf/dynamic_message.cc + ${protobuf_source_dir}/src/google/protobuf/empty.pb.cc + ${protobuf_source_dir}/src/google/protobuf/extension_set_heavy.cc + ${protobuf_source_dir}/src/google/protobuf/field_mask.pb.cc + ${protobuf_source_dir}/src/google/protobuf/generated_message_reflection.cc + ${protobuf_source_dir}/src/google/protobuf/io/gzip_stream.cc + ${protobuf_source_dir}/src/google/protobuf/io/printer.cc + ${protobuf_source_dir}/src/google/protobuf/io/strtod.cc + ${protobuf_source_dir}/src/google/protobuf/io/tokenizer.cc + ${protobuf_source_dir}/src/google/protobuf/io/zero_copy_stream_impl.cc + ${protobuf_source_dir}/src/google/protobuf/map_field.cc + ${protobuf_source_dir}/src/google/protobuf/message.cc + ${protobuf_source_dir}/src/google/protobuf/reflection_ops.cc + ${protobuf_source_dir}/src/google/protobuf/service.cc + ${protobuf_source_dir}/src/google/protobuf/source_context.pb.cc + ${protobuf_source_dir}/src/google/protobuf/struct.pb.cc + ${protobuf_source_dir}/src/google/protobuf/stubs/mathlimits.cc + ${protobuf_source_dir}/src/google/protobuf/stubs/substitute.cc + ${protobuf_source_dir}/src/google/protobuf/text_format.cc + ${protobuf_source_dir}/src/google/protobuf/timestamp.pb.cc + ${protobuf_source_dir}/src/google/protobuf/type.pb.cc + ${protobuf_source_dir}/src/google/protobuf/unknown_field_set.cc + ${protobuf_source_dir}/src/google/protobuf/util/field_comparator.cc + ${protobuf_source_dir}/src/google/protobuf/util/field_mask_util.cc + ${protobuf_source_dir}/src/google/protobuf/util/internal/datapiece.cc + ${protobuf_source_dir}/src/google/protobuf/util/internal/default_value_objectwriter.cc + ${protobuf_source_dir}/src/google/protobuf/util/internal/error_listener.cc + ${protobuf_source_dir}/src/google/protobuf/util/internal/field_mask_utility.cc + ${protobuf_source_dir}/src/google/protobuf/util/internal/json_escaping.cc + ${protobuf_source_dir}/src/google/protobuf/util/internal/json_objectwriter.cc + ${protobuf_source_dir}/src/google/protobuf/util/internal/json_stream_parser.cc + ${protobuf_source_dir}/src/google/protobuf/util/internal/object_writer.cc + ${protobuf_source_dir}/src/google/protobuf/util/internal/proto_writer.cc + ${protobuf_source_dir}/src/google/protobuf/util/internal/protostream_objectsource.cc + ${protobuf_source_dir}/src/google/protobuf/util/internal/protostream_objectwriter.cc + ${protobuf_source_dir}/src/google/protobuf/util/internal/type_info.cc + ${protobuf_source_dir}/src/google/protobuf/util/internal/type_info_test_helper.cc + ${protobuf_source_dir}/src/google/protobuf/util/internal/utility.cc + ${protobuf_source_dir}/src/google/protobuf/util/json_util.cc + ${protobuf_source_dir}/src/google/protobuf/util/message_differencer.cc + ${protobuf_source_dir}/src/google/protobuf/util/time_util.cc + ${protobuf_source_dir}/src/google/protobuf/util/type_resolver_util.cc + ${protobuf_source_dir}/src/google/protobuf/wire_format.cc + ${protobuf_source_dir}/src/google/protobuf/wrappers.pb.cc +) + +add_library(libprotobuf ${protobuf_SHARED_OR_STATIC} + ${libprotobuf_lite_files} ${libprotobuf_files}) +target_link_libraries(libprotobuf ${CMAKE_THREAD_LIBS_INIT} ${ZLIB_LIBRARIES}) +target_include_directories(libprotobuf PUBLIC ${protobuf_source_dir}/src) +if(MSVC AND protobuf_BUILD_SHARED_LIBS) + target_compile_definitions(libprotobuf + PUBLIC PROTOBUF_USE_DLLS + PRIVATE LIBPROTOBUF_EXPORTS) +endif() +set_target_properties(libprotobuf PROPERTIES + OUTPUT_NAME ${LIB_PREFIX}protobuf + DEBUG_POSTFIX "${protobuf_DEBUG_POSTFIX}") diff --git a/packager/third_party/protobuf/cmake/libprotoc.cmake b/packager/third_party/protobuf/cmake/libprotoc.cmake new file mode 100644 index 0000000000..8df8986a11 --- /dev/null +++ b/packager/third_party/protobuf/cmake/libprotoc.cmake @@ -0,0 +1,106 @@ +set(libprotoc_files + ${protobuf_source_dir}/src/google/protobuf/compiler/code_generator.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/command_line_interface.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_enum.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_enum_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_extension.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_file.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_generator.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_helpers.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_map_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_message.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_message_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_primitive_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_service.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_string_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_doc_comment.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_enum.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_enum_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_field_base.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_generator.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_helpers.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_map_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_message.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_message_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_primitive_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_reflection_class.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_repeated_enum_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_repeated_message_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_repeated_primitive_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_source_generator_base.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_wrapper_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_context.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_doc_comment.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_enum.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_enum_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_enum_field_lite.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_enum_lite.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_extension.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_extension_lite.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_file.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_generator.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_generator_factory.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_helpers.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_lazy_message_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_lazy_message_field_lite.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_map_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_map_field_lite.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_message.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_message_builder.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_message_builder_lite.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_message_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_message_field_lite.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_message_lite.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_name_resolver.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_primitive_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_primitive_field_lite.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_service.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_shared_code_generator.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_string_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_string_field_lite.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/javanano/javanano_enum.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/javanano/javanano_enum_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/javanano/javanano_extension.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/javanano/javanano_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/javanano/javanano_file.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/javanano/javanano_generator.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/javanano/javanano_helpers.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/javanano/javanano_map_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/javanano/javanano_message.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/javanano/javanano_message_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/javanano/javanano_primitive_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/js/js_generator.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_enum.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_enum_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_extension.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_file.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_generator.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_helpers.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_map_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_message.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_message_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_oneof.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_primitive_field.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/plugin.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/plugin.pb.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/python/python_generator.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/ruby/ruby_generator.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/subprocess.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/zip_writer.cc +) + +add_library(libprotoc ${protobuf_SHARED_OR_STATIC} + ${libprotoc_files}) +target_link_libraries(libprotoc libprotobuf) +if(MSVC AND protobuf_BUILD_SHARED_LIBS) + target_compile_definitions(libprotoc + PUBLIC PROTOBUF_USE_DLLS + PRIVATE LIBPROTOC_EXPORTS) +endif() +set_target_properties(libprotoc PROPERTIES + COMPILE_DEFINITIONS LIBPROTOC_EXPORTS + OUTPUT_NAME ${LIB_PREFIX}protoc + DEBUG_POSTFIX "${protobuf_DEBUG_POSTFIX}") diff --git a/packager/third_party/protobuf/cmake/protobuf-config-version.cmake.in b/packager/third_party/protobuf/cmake/protobuf-config-version.cmake.in new file mode 100644 index 0000000000..1f171c6649 --- /dev/null +++ b/packager/third_party/protobuf/cmake/protobuf-config-version.cmake.in @@ -0,0 +1 @@ +set(PACKAGE_VERSION @protobuf_VERSION@) diff --git a/packager/third_party/protobuf/cmake/protobuf-config.cmake.in b/packager/third_party/protobuf/cmake/protobuf-config.cmake.in new file mode 100644 index 0000000000..bb0997b88b --- /dev/null +++ b/packager/third_party/protobuf/cmake/protobuf-config.cmake.in @@ -0,0 +1,27 @@ +# Version info variables +set(PROTOBUF_VERSION "@protobuf_VERSION@") +set(PROTOBUF_VERSION_STRING "@protobuf_VERSION_STRING@") + +# Current dir +get_filename_component(_PROTOBUF_PACKAGE_PREFIX + "${CMAKE_CURRENT_LIST_FILE}" PATH) + +# Imported targets +include("${_PROTOBUF_PACKAGE_PREFIX}/protobuf-targets.cmake") + +# Compute the installation prefix relative to this file. +get_filename_component(_PROTOBUF_IMPORT_PREFIX + "${_PROTOBUF_PACKAGE_PREFIX}" PATH) +get_filename_component(_PROTOBUF_IMPORT_PREFIX + "${_PROTOBUF_IMPORT_PREFIX}" PATH) +get_filename_component(_PROTOBUF_IMPORT_PREFIX + "${_PROTOBUF_IMPORT_PREFIX}" PATH) + +# CMake FindProtobuf module compatible file +if(NOT DEFINED PROTOBUF_MODULE_COMPATIBLE OR "${PROTOBUF_MODULE_COMPATIBLE}") + include("${_PROTOBUF_PACKAGE_PREFIX}/protobuf-module.cmake") +endif() + +# Cleanup temporary variables. +set(_PROTOBUF_PACKAGE_PREFIX) +set(_PROTOBUF_IMPORT_PREFIX) diff --git a/packager/third_party/protobuf/cmake/protobuf-module.cmake.in b/packager/third_party/protobuf/cmake/protobuf-module.cmake.in new file mode 100644 index 0000000000..d81dc459b6 --- /dev/null +++ b/packager/third_party/protobuf/cmake/protobuf-module.cmake.in @@ -0,0 +1,139 @@ +if(PROTOBUF_SRC_ROOT_FOLDER) + message(AUTHOR_WARNING "Variable PROTOBUF_SRC_ROOT_FOLDER defined, but not" + " used in CONFIG mode") +endif() + +function(PROTOBUF_GENERATE_CPP SRCS HDRS) + if(NOT ARGN) + message(SEND_ERROR "Error: PROTOBUF_GENERATE_CPP() called without any proto files") + return() + endif() + + if(PROTOBUF_GENERATE_CPP_APPEND_PATH) + # Create an include path for each file specified + foreach(FIL ${ARGN}) + get_filename_component(ABS_FIL ${FIL} ABSOLUTE) + get_filename_component(ABS_PATH ${ABS_FIL} PATH) + list(FIND _protobuf_include_path ${ABS_PATH} _contains_already) + if(${_contains_already} EQUAL -1) + list(APPEND _protobuf_include_path -I ${ABS_PATH}) + endif() + endforeach() + else() + set(_protobuf_include_path -I ${CMAKE_CURRENT_SOURCE_DIR}) + endif() + + # Add well-known type protos include path + list(APPEND _protobuf_include_path + -I "${_PROTOBUF_IMPORT_PREFIX}/@CMAKE_INSTALL_INCLUDEDIR@") + + if(DEFINED PROTOBUF_IMPORT_DIRS) + foreach(DIR ${PROTOBUF_IMPORT_DIRS}) + get_filename_component(ABS_PATH ${DIR} ABSOLUTE) + list(FIND _protobuf_include_path ${ABS_PATH} _contains_already) + if(${_contains_already} EQUAL -1) + list(APPEND _protobuf_include_path -I ${ABS_PATH}) + endif() + endforeach() + endif() + + set(${SRCS}) + set(${HDRS}) + foreach(FIL ${ARGN}) + get_filename_component(ABS_FIL ${FIL} ABSOLUTE) + get_filename_component(FIL_WE ${FIL} NAME_WE) + + list(APPEND ${SRCS} "${CMAKE_CURRENT_BINARY_DIR}/${FIL_WE}.pb.cc") + list(APPEND ${HDRS} "${CMAKE_CURRENT_BINARY_DIR}/${FIL_WE}.pb.h") + + add_custom_command( + OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/${FIL_WE}.pb.cc" + "${CMAKE_CURRENT_BINARY_DIR}/${FIL_WE}.pb.h" + COMMAND ${PROTOBUF_PROTOC_EXECUTABLE} + ARGS --cpp_out ${CMAKE_CURRENT_BINARY_DIR} ${_protobuf_include_path} ${ABS_FIL} + DEPENDS ${ABS_FIL} ${PROTOBUF_PROTOC_EXECUTABLE} + COMMENT "Running C++ protocol buffer compiler on ${FIL}" + VERBATIM) + endforeach() + + set_source_files_properties(${${SRCS}} ${${HDRS}} PROPERTIES GENERATED TRUE) + set(${SRCS} ${${SRCS}} PARENT_SCOPE) + set(${HDRS} ${${HDRS}} PARENT_SCOPE) +endfunction() + +# Internal function: search for normal library as well as a debug one +# if the debug one is specified also include debug/optimized keywords +# in *_LIBRARIES variable +function(_protobuf_find_libraries name filename) + get_target_property(${name}_LIBRARY lib${filename} + IMPORTED_LOCATION_RELEASE) + set(${name}_LIBRARY "${${name}_LIBRARY}" PARENT_SCOPE) + get_target_property(${name}_LIBRARY_DEBUG lib${filename} + IMPORTED_LOCATION_DEBUG) + set(${name}_LIBRARY_DEBUG "${${name}_LIBRARY_DEBUG}" PARENT_SCOPE) + + if(NOT ${name}_LIBRARY_DEBUG) + # There is no debug library + set(${name}_LIBRARY_DEBUG ${${name}_LIBRARY} PARENT_SCOPE) + set(${name}_LIBRARIES ${${name}_LIBRARY} PARENT_SCOPE) + else() + # There IS a debug library + set(${name}_LIBRARIES + optimized ${${name}_LIBRARY} + debug ${${name}_LIBRARY_DEBUG} + PARENT_SCOPE + ) + endif() +endfunction() + +# Internal function: find threads library +function(_protobuf_find_threads) + set(CMAKE_THREAD_PREFER_PTHREAD TRUE) + find_package(Threads) + if(Threads_FOUND) + list(APPEND PROTOBUF_LIBRARIES ${CMAKE_THREAD_LIBS_INIT}) + set(PROTOBUF_LIBRARIES "${PROTOBUF_LIBRARIES}" PARENT_SCOPE) + endif() +endfunction() + +# +# Main. +# + +# By default have PROTOBUF_GENERATE_CPP macro pass -I to protoc +# for each directory where a proto file is referenced. +if(NOT DEFINED PROTOBUF_GENERATE_CPP_APPEND_PATH) + set(PROTOBUF_GENERATE_CPP_APPEND_PATH TRUE) +endif() + +# The Protobuf library +_protobuf_find_libraries(PROTOBUF protobuf) + +# The Protobuf Lite library +_protobuf_find_libraries(PROTOBUF_LITE protobuf-lite) + +# The Protobuf Protoc Library +_protobuf_find_libraries(PROTOBUF_PROTOC protoc) + +if(UNIX) + _protobuf_find_threads() +endif() + +# Set the include directory +set(PROTOBUF_INCLUDE_DIR "${_PROTOBUF_IMPORT_PREFIX}/@CMAKE_INSTALL_INCLUDEDIR@") + +# Set the protoc Executable +get_target_property(PROTOBUF_PROTOC_EXECUTABLE protoc + IMPORTED_LOCATION_RELEASE) +if(NOT PROTOBUF_PROTOC_EXECUTABLE) + get_target_property(PROTOBUF_PROTOC_EXECUTABLE protoc + IMPORTED_LOCATION_DEBUG) +endif() + +include(FindPackageHandleStandardArgs) +FIND_PACKAGE_HANDLE_STANDARD_ARGS(PROTOBUF DEFAULT_MSG + PROTOBUF_LIBRARY PROTOBUF_INCLUDE_DIR) + +if(PROTOBUF_FOUND) + set(PROTOBUF_INCLUDE_DIRS ${PROTOBUF_INCLUDE_DIR}) +endif() diff --git a/packager/third_party/protobuf/cmake/protoc.cmake b/packager/third_party/protobuf/cmake/protoc.cmake new file mode 100644 index 0000000000..4f07c389c9 --- /dev/null +++ b/packager/third_party/protobuf/cmake/protoc.cmake @@ -0,0 +1,6 @@ +set(protoc_files + ${protobuf_source_dir}/src/google/protobuf/compiler/main.cc +) + +add_executable(protoc ${protoc_files}) +target_link_libraries(protoc libprotobuf libprotoc) diff --git a/packager/third_party/protobuf/cmake/tests.cmake b/packager/third_party/protobuf/cmake/tests.cmake new file mode 100644 index 0000000000..76fdf8efdf --- /dev/null +++ b/packager/third_party/protobuf/cmake/tests.cmake @@ -0,0 +1,214 @@ +if (NOT EXISTS "${PROJECT_SOURCE_DIR}/../gmock/CMakeLists.txt") + message(FATAL_ERROR "Cannot find gmock directory.") +endif() + +option(protobuf_ABSOLUTE_TEST_PLUGIN_PATH + "Using absolute test_plugin path in tests" ON) + +include_directories( + ${protobuf_source_dir}/gmock + ${protobuf_source_dir}/gmock/gtest + ${protobuf_source_dir}/gmock/gtest/include + ${protobuf_source_dir}/gmock/include +) + +add_library(gmock STATIC + ${protobuf_source_dir}/gmock/src/gmock-all.cc + ${protobuf_source_dir}/gmock/gtest/src/gtest-all.cc +) +add_library(gmock_main STATIC ${protobuf_source_dir}/gmock/src/gmock_main.cc) +target_link_libraries(gmock_main gmock) + +set(lite_test_protos + google/protobuf/map_lite_unittest.proto + google/protobuf/unittest_import_lite.proto + google/protobuf/unittest_import_public_lite.proto + google/protobuf/unittest_lite.proto + google/protobuf/unittest_no_arena_lite.proto +) + +set(tests_protos + google/protobuf/any_test.proto + google/protobuf/compiler/cpp/cpp_test_bad_identifiers.proto + google/protobuf/compiler/cpp/cpp_test_large_enum_value.proto + google/protobuf/map_proto2_unittest.proto + google/protobuf/map_unittest.proto + google/protobuf/unittest.proto + google/protobuf/unittest_arena.proto + google/protobuf/unittest_custom_options.proto + google/protobuf/unittest_drop_unknown_fields.proto + google/protobuf/unittest_embed_optimize_for.proto + google/protobuf/unittest_empty.proto + google/protobuf/unittest_import.proto + google/protobuf/unittest_import_public.proto + google/protobuf/unittest_lite_imports_nonlite.proto + google/protobuf/unittest_mset.proto + google/protobuf/unittest_mset_wire_format.proto + google/protobuf/unittest_no_arena.proto + google/protobuf/unittest_no_arena_import.proto + google/protobuf/unittest_no_field_presence.proto + google/protobuf/unittest_no_generic_services.proto + google/protobuf/unittest_optimize_for.proto + google/protobuf/unittest_preserve_unknown_enum.proto + google/protobuf/unittest_preserve_unknown_enum2.proto + google/protobuf/unittest_proto3_arena.proto + google/protobuf/unittest_proto3_arena_lite.proto + google/protobuf/unittest_proto3_lite.proto + google/protobuf/unittest_well_known_types.proto + google/protobuf/util/internal/testdata/anys.proto + google/protobuf/util/internal/testdata/books.proto + google/protobuf/util/internal/testdata/default_value.proto + google/protobuf/util/internal/testdata/default_value_test.proto + google/protobuf/util/internal/testdata/field_mask.proto + google/protobuf/util/internal/testdata/maps.proto + google/protobuf/util/internal/testdata/oneofs.proto + google/protobuf/util/internal/testdata/struct.proto + google/protobuf/util/internal/testdata/timestamp_duration.proto + google/protobuf/util/json_format_proto3.proto + google/protobuf/util/message_differencer_unittest.proto +) + +macro(compile_proto_file filename) + get_filename_component(dirname ${filename} PATH) + get_filename_component(basename ${filename} NAME_WE) + add_custom_command( + OUTPUT ${protobuf_source_dir}/src/${dirname}/${basename}.pb.cc + DEPENDS protoc ${protobuf_source_dir}/src/${dirname}/${basename}.proto + COMMAND protoc ${protobuf_source_dir}/src/${dirname}/${basename}.proto + --proto_path=${protobuf_source_dir}/src + --cpp_out=${protobuf_source_dir}/src + ) +endmacro(compile_proto_file) + +set(lite_test_proto_files) +foreach(proto_file ${lite_test_protos}) + compile_proto_file(${proto_file}) + string(REPLACE .proto .pb.cc pb_file ${proto_file}) + set(lite_test_proto_files ${lite_test_proto_files} + ${protobuf_source_dir}/src/${pb_file}) +endforeach(proto_file) + +set(tests_proto_files) +foreach(proto_file ${tests_protos}) + compile_proto_file(${proto_file}) + string(REPLACE .proto .pb.cc pb_file ${proto_file}) + set(tests_proto_files ${tests_proto_files} + ${protobuf_source_dir}/src/${pb_file}) +endforeach(proto_file) + +set(common_test_files + ${protobuf_source_dir}/src/google/protobuf/arena_test_util.cc + ${protobuf_source_dir}/src/google/protobuf/map_test_util.cc + ${protobuf_source_dir}/src/google/protobuf/test_util.cc + ${protobuf_source_dir}/src/google/protobuf/testing/file.cc + ${protobuf_source_dir}/src/google/protobuf/testing/googletest.cc +) + +set(common_lite_test_files + ${protobuf_source_dir}/src/google/protobuf/arena_test_util.cc + ${protobuf_source_dir}/src/google/protobuf/map_lite_test_util.cc + ${protobuf_source_dir}/src/google/protobuf/test_util_lite.cc +) + +set(tests_files + ${protobuf_source_dir}/src/google/protobuf/any_test.cc + ${protobuf_source_dir}/src/google/protobuf/arena_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/arenastring_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/command_line_interface_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_bootstrap_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_plugin_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/metadata_test.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_generator_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/importer_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_doc_comment_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_plugin_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/mock_code_generator.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_helpers_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/parser_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/python/python_plugin_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/compiler/ruby/ruby_generator_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/descriptor_database_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/descriptor_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/drop_unknown_fields_test.cc + ${protobuf_source_dir}/src/google/protobuf/dynamic_message_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/extension_set_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/generated_message_reflection_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/io/coded_stream_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/io/printer_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/io/tokenizer_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/io/zero_copy_stream_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/map_field_test.cc + ${protobuf_source_dir}/src/google/protobuf/map_test.cc + ${protobuf_source_dir}/src/google/protobuf/message_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/no_field_presence_test.cc + ${protobuf_source_dir}/src/google/protobuf/preserve_unknown_enum_test.cc + ${protobuf_source_dir}/src/google/protobuf/proto3_arena_lite_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/proto3_arena_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/proto3_lite_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/reflection_ops_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/repeated_field_reflection_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/repeated_field_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/stubs/bytestream_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/stubs/common_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/stubs/int128_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/stubs/once_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/stubs/status_test.cc + ${protobuf_source_dir}/src/google/protobuf/stubs/statusor_test.cc + ${protobuf_source_dir}/src/google/protobuf/stubs/stringpiece_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/stubs/stringprintf_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/stubs/structurally_valid_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/stubs/strutil_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/stubs/template_util_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/stubs/time_test.cc + ${protobuf_source_dir}/src/google/protobuf/stubs/type_traits_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/text_format_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/unknown_field_set_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/util/field_comparator_test.cc + ${protobuf_source_dir}/src/google/protobuf/util/field_mask_util_test.cc + ${protobuf_source_dir}/src/google/protobuf/util/internal/default_value_objectwriter_test.cc + ${protobuf_source_dir}/src/google/protobuf/util/internal/json_objectwriter_test.cc + ${protobuf_source_dir}/src/google/protobuf/util/internal/json_stream_parser_test.cc + ${protobuf_source_dir}/src/google/protobuf/util/internal/protostream_objectsource_test.cc + ${protobuf_source_dir}/src/google/protobuf/util/internal/protostream_objectwriter_test.cc + ${protobuf_source_dir}/src/google/protobuf/util/internal/type_info_test_helper.cc + ${protobuf_source_dir}/src/google/protobuf/util/json_util_test.cc + ${protobuf_source_dir}/src/google/protobuf/util/message_differencer_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/util/time_util_test.cc + ${protobuf_source_dir}/src/google/protobuf/util/type_resolver_util_test.cc + ${protobuf_source_dir}/src/google/protobuf/well_known_types_unittest.cc + ${protobuf_source_dir}/src/google/protobuf/wire_format_unittest.cc +) + +if(protobuf_ABSOLUTE_TEST_PLUGIN_PATH) + add_compile_options(-DGOOGLE_PROTOBUF_TEST_PLUGIN_PATH="$") +endif() + +add_executable(tests ${tests_files} ${common_test_files} ${tests_proto_files} ${lite_test_proto_files}) +target_link_libraries(tests libprotoc libprotobuf gmock_main) + +set(test_plugin_files + ${protobuf_source_dir}/src/google/protobuf/compiler/mock_code_generator.cc + ${protobuf_source_dir}/src/google/protobuf/testing/file.cc + ${protobuf_source_dir}/src/google/protobuf/testing/file.h + ${protobuf_source_dir}/src/google/protobuf/compiler/test_plugin.cc +) + +add_executable(test_plugin ${test_plugin_files}) +target_link_libraries(test_plugin libprotoc libprotobuf gmock) + +set(lite_test_files + ${protobuf_source_dir}/src/google/protobuf/lite_unittest.cc +) +add_executable(lite-test ${lite_test_files} ${common_lite_test_files} ${lite_test_proto_files}) +target_link_libraries(lite-test libprotobuf-lite) + +set(lite_arena_test_files + ${protobuf_source_dir}/src/google/protobuf/lite_arena_unittest.cc +) +add_executable(lite-arena-test ${lite_arena_test_files} ${common_lite_test_files} ${lite_test_proto_files}) +target_link_libraries(lite-arena-test libprotobuf-lite gmock_main) + +add_custom_target(check + COMMAND tests + WORKING_DIRECTORY ${protobuf_source_dir}) diff --git a/packager/third_party/protobuf/configure.ac b/packager/third_party/protobuf/configure.ac new file mode 100644 index 0000000000..bdc72ee696 --- /dev/null +++ b/packager/third_party/protobuf/configure.ac @@ -0,0 +1,184 @@ +## Process this file with autoconf to produce configure. +## In general, the safest way to proceed is to run ./autogen.sh + +AC_PREREQ(2.59) + +# Note: If you change the version, you must also update it in: +# * java/pom.xml +# * python/setup.py +# * src/google/protobuf/stubs/common.h +# * src/Makefile.am (Update -version-info for LDFLAGS if needed) +# +# In the SVN trunk, the version should always be the next anticipated release +# version with the "-pre" suffix. (We used to use "-SNAPSHOT" but this pushed +# the size of one file name in the dist tarfile over the 99-char limit.) +AC_INIT([Protocol Buffers],[3.0.0-beta-3],[protobuf@googlegroups.com],[protobuf]) + +AM_MAINTAINER_MODE([enable]) + +AC_CONFIG_SRCDIR(src/google/protobuf/message.cc) +# The config file is generated but not used by the source code, since we only +# need very few of them, e.g. HAVE_PTHREAD and HAVE_ZLIB. Those macros are +# passed down in CXXFLAGS manually in src/Makefile.am +AC_CONFIG_HEADERS([config.h]) +AC_CONFIG_MACRO_DIR([m4]) + +AC_ARG_VAR(DIST_LANG, [language to include in the distribution package (i.e., make dist)]) +case "$DIST_LANG" in + "") DIST_LANG=all ;; + all | cpp | csharp | java | python | javanano | objectivec | ruby | js) ;; + *) AC_MSG_FAILURE([unknown language: $DIST_LANG]) ;; +esac +AC_SUBST(DIST_LANG) + +# autoconf's default CXXFLAGS are usually "-g -O2". These aren't necessarily +# the best choice for libprotobuf. +AS_IF([test "x${ac_cv_env_CFLAGS_set}" = "x"], + [CFLAGS=""]) +AS_IF([test "x${ac_cv_env_CXXFLAGS_set}" = "x"], + [CXXFLAGS=""]) + +AC_CANONICAL_TARGET + +AM_INIT_AUTOMAKE([1.9 tar-ustar subdir-objects]) + +AC_ARG_WITH([zlib], + [AS_HELP_STRING([--with-zlib], + [include classes for streaming compressed data in and out @<:@default=check@:>@])], + [],[with_zlib=check]) + +AC_ARG_WITH([protoc], + [AS_HELP_STRING([--with-protoc=COMMAND], + [use the given protoc command instead of building a new one when building tests (useful for cross-compiling)])], + [],[with_protoc=no]) + +# Checks for programs. +AC_PROG_CC +AC_PROG_CXX +AC_LANG([C++]) +ACX_USE_SYSTEM_EXTENSIONS +m4_ifdef([AM_PROG_AR], [AM_PROG_AR]) +AM_CONDITIONAL(GCC, test "$GCC" = yes) # let the Makefile know if we're gcc +AC_PROG_OBJC + +# test_util.cc takes forever to compile with GCC and optimization turned on. +AC_MSG_CHECKING([C++ compiler flags...]) +AS_IF([test "x${ac_cv_env_CXXFLAGS_set}" = "x"],[ + AS_IF([test "$GCC" = "yes"],[ + PROTOBUF_OPT_FLAG="-O2" + CXXFLAGS="${CXXFLAGS} -g" + ]) + + # Protocol Buffers contains several checks that are intended to be used only + # for debugging and which might hurt performance. Most users are probably + # end users who don't want these checks, so add -DNDEBUG by default. + CXXFLAGS="$CXXFLAGS -DNDEBUG" + + AC_MSG_RESULT([use default: $PROTOBUF_OPT_FLAG $CXXFLAGS]) +],[ + AC_MSG_RESULT([use user-supplied: $CXXFLAGS]) +]) + +AC_SUBST(PROTOBUF_OPT_FLAG) + +ACX_CHECK_SUNCC + +# Have to do libtool after SUNCC, other wise it "helpfully" adds Crun Cstd +# to the link +AC_PROG_LIBTOOL + +# Checks for header files. +AC_HEADER_STDC +AC_CHECK_HEADERS([fcntl.h inttypes.h limits.h stdlib.h unistd.h]) + +# Checks for library functions. +AC_FUNC_MEMCMP +AC_FUNC_STRTOD +AC_CHECK_FUNCS([ftruncate memset mkdir strchr strerror strtol]) + +# Check for zlib. +HAVE_ZLIB=0 +AS_IF([test "$with_zlib" != no], [ + AC_MSG_CHECKING([zlib version]) + + # First check the zlib header version. + AC_COMPILE_IFELSE( + [AC_LANG_PROGRAM([[ + #include + #if !defined(ZLIB_VERNUM) || (ZLIB_VERNUM < 0x1204) + # error zlib version too old + #endif + ]], [])], [ + AC_MSG_RESULT([ok (1.2.0.4 or later)]) + + # Also need to add -lz to the linker flags and make sure this succeeds. + AC_SEARCH_LIBS([zlibVersion], [z], [ + AC_DEFINE([HAVE_ZLIB], [1], [Enable classes using zlib compression.]) + HAVE_ZLIB=1 + ], [ + AS_IF([test "$with_zlib" != check], [ + AC_MSG_FAILURE([--with-zlib was given, but no working zlib library was found]) + ]) + ]) + ], [ + AS_IF([test "$with_zlib" = check], [ + AC_MSG_RESULT([headers missing or too old (requires 1.2.0.4)]) + ], [ + AC_MSG_FAILURE([--with-zlib was given, but zlib headers were not present or were too old (requires 1.2.0.4)]) + ]) + ]) +]) +AM_CONDITIONAL([HAVE_ZLIB], [test $HAVE_ZLIB = 1]) + +AS_IF([test "$with_protoc" != "no"], [ + PROTOC=$with_protoc + AS_IF([test "$with_protoc" = "yes"], [ + # No argument given. Use system protoc. + PROTOC=protoc + ]) + AS_IF([echo "$PROTOC" | grep -q '^@<:@^/@:>@.*/'], [ + # Does not start with a slash, but contains a slash. So, it's a relative + # path (as opposed to an absolute path or an executable in $PATH). + # Since it will actually be executed from the src directory, prefix with + # the current directory. We also insert $ac_top_build_prefix in case this + # is a nested package and --with-protoc was actually given on the outer + # package's configure script. + PROTOC=`pwd`/${ac_top_build_prefix}$PROTOC + ]) + AC_SUBST([PROTOC]) +]) +AM_CONDITIONAL([USE_EXTERNAL_PROTOC], [test "$with_protoc" != "no"]) + +ACX_PTHREAD +AM_CONDITIONAL([HAVE_PTHREAD], [test "x$acx_pthread_ok" = "xyes"]) + +# We still keep this for improving pbconfig.h for unsupported platforms. +AC_CXX_STL_HASH + +case "$target_os" in + mingw* | cygwin* | win*) + ;; + *) + # Need to link against rt on Solaris + AC_SEARCH_LIBS([sched_yield], [rt], [], [AC_MSG_FAILURE([sched_yield was not found on your system])]) + ;; +esac + +# Enable ObjC support for conformance directory on OS X. +OBJC_CONFORMANCE_TEST=0 +case "$target_os" in + darwin*) + OBJC_CONFORMANCE_TEST=1 + ;; +esac +AM_CONDITIONAL([OBJC_CONFORMANCE_TEST], [test $OBJC_CONFORMANCE_TEST = 1]) + +# HACK: Make gmock's configure script pick up our copy of CFLAGS and CXXFLAGS, +# since the flags added by ACX_CHECK_SUNCC must be used when compiling gmock +# too. +export CFLAGS +export CXXFLAGS +AC_CONFIG_SUBDIRS([gmock]) + +AC_CONFIG_FILES([Makefile src/Makefile benchmarks/Makefile conformance/Makefile protobuf.pc protobuf-lite.pc]) +AC_OUTPUT diff --git a/packager/third_party/protobuf/conformance/ConformanceJava.java b/packager/third_party/protobuf/conformance/ConformanceJava.java new file mode 100644 index 0000000000..43787ffcb2 --- /dev/null +++ b/packager/third_party/protobuf/conformance/ConformanceJava.java @@ -0,0 +1,142 @@ + +import com.google.protobuf.conformance.Conformance; +import com.google.protobuf.util.JsonFormat; +import com.google.protobuf.util.JsonFormat.TypeRegistry; +import com.google.protobuf.InvalidProtocolBufferException; + +class ConformanceJava { + private int testCount = 0; + private TypeRegistry typeRegistry; + + private boolean readFromStdin(byte[] buf, int len) throws Exception { + int ofs = 0; + while (len > 0) { + int read = System.in.read(buf, ofs, len); + if (read == -1) { + return false; // EOF + } + ofs += read; + len -= read; + } + + return true; + } + + private void writeToStdout(byte[] buf) throws Exception { + System.out.write(buf); + } + + // Returns -1 on EOF (the actual values will always be positive). + private int readLittleEndianIntFromStdin() throws Exception { + byte[] buf = new byte[4]; + if (!readFromStdin(buf, 4)) { + return -1; + } + return (buf[0] & 0xff) + | ((buf[1] & 0xff) << 8) + | ((buf[2] & 0xff) << 16) + | ((buf[3] & 0xff) << 24); + } + + private void writeLittleEndianIntToStdout(int val) throws Exception { + byte[] buf = new byte[4]; + buf[0] = (byte)val; + buf[1] = (byte)(val >> 8); + buf[2] = (byte)(val >> 16); + buf[3] = (byte)(val >> 24); + writeToStdout(buf); + } + + private Conformance.ConformanceResponse doTest(Conformance.ConformanceRequest request) { + Conformance.TestAllTypes testMessage; + + switch (request.getPayloadCase()) { + case PROTOBUF_PAYLOAD: { + try { + testMessage = Conformance.TestAllTypes.parseFrom(request.getProtobufPayload()); + } catch (InvalidProtocolBufferException e) { + return Conformance.ConformanceResponse.newBuilder().setParseError(e.getMessage()).build(); + } + break; + } + case JSON_PAYLOAD: { + try { + Conformance.TestAllTypes.Builder builder = Conformance.TestAllTypes.newBuilder(); + JsonFormat.parser().usingTypeRegistry(typeRegistry) + .merge(request.getJsonPayload(), builder); + testMessage = builder.build(); + } catch (InvalidProtocolBufferException e) { + return Conformance.ConformanceResponse.newBuilder().setParseError(e.getMessage()).build(); + } + break; + } + case PAYLOAD_NOT_SET: { + throw new RuntimeException("Request didn't have payload."); + } + + default: { + throw new RuntimeException("Unexpected payload case."); + } + } + + switch (request.getRequestedOutputFormat()) { + case UNSPECIFIED: + throw new RuntimeException("Unspecified output format."); + + case PROTOBUF: + return Conformance.ConformanceResponse.newBuilder().setProtobufPayload(testMessage.toByteString()).build(); + + case JSON: + try { + return Conformance.ConformanceResponse.newBuilder().setJsonPayload( + JsonFormat.printer().usingTypeRegistry(typeRegistry).print(testMessage)).build(); + } catch (InvalidProtocolBufferException | IllegalArgumentException e) { + return Conformance.ConformanceResponse.newBuilder().setSerializeError( + e.getMessage()).build(); + } + + default: { + throw new RuntimeException("Unexpected request output."); + } + } + } + + private boolean doTestIo() throws Exception { + int bytes = readLittleEndianIntFromStdin(); + + if (bytes == -1) { + return false; // EOF + } + + byte[] serializedInput = new byte[bytes]; + + if (!readFromStdin(serializedInput, bytes)) { + throw new RuntimeException("Unexpected EOF from test program."); + } + + Conformance.ConformanceRequest request = + Conformance.ConformanceRequest.parseFrom(serializedInput); + Conformance.ConformanceResponse response = doTest(request); + byte[] serializedOutput = response.toByteArray(); + + writeLittleEndianIntToStdout(serializedOutput.length); + writeToStdout(serializedOutput); + + return true; + } + + public void run() throws Exception { + typeRegistry = TypeRegistry.newBuilder().add( + Conformance.TestAllTypes.getDescriptor()).build(); + while (doTestIo()) { + this.testCount++; + } + + System.err.println("ConformanceJava: received EOF from test runner after " + + this.testCount + " tests"); + } + + public static void main(String[] args) throws Exception { + new ConformanceJava().run(); + } +} diff --git a/packager/third_party/protobuf/conformance/ConformanceJavaLite.java b/packager/third_party/protobuf/conformance/ConformanceJavaLite.java new file mode 100644 index 0000000000..121dc7d1b6 --- /dev/null +++ b/packager/third_party/protobuf/conformance/ConformanceJavaLite.java @@ -0,0 +1,125 @@ + +import com.google.protobuf.conformance.Conformance; +import com.google.protobuf.InvalidProtocolBufferException; + +class ConformanceJavaLite { + private int testCount = 0; + + private boolean readFromStdin(byte[] buf, int len) throws Exception { + int ofs = 0; + while (len > 0) { + int read = System.in.read(buf, ofs, len); + if (read == -1) { + return false; // EOF + } + ofs += read; + len -= read; + } + + return true; + } + + private void writeToStdout(byte[] buf) throws Exception { + System.out.write(buf); + } + + // Returns -1 on EOF (the actual values will always be positive). + private int readLittleEndianIntFromStdin() throws Exception { + byte[] buf = new byte[4]; + if (!readFromStdin(buf, 4)) { + return -1; + } + return (buf[0] & 0xff) + | ((buf[1] & 0xff) << 8) + | ((buf[2] & 0xff) << 16) + | ((buf[3] & 0xff) << 24); + } + + private void writeLittleEndianIntToStdout(int val) throws Exception { + byte[] buf = new byte[4]; + buf[0] = (byte)val; + buf[1] = (byte)(val >> 8); + buf[2] = (byte)(val >> 16); + buf[3] = (byte)(val >> 24); + writeToStdout(buf); + } + + private Conformance.ConformanceResponse doTest(Conformance.ConformanceRequest request) { + Conformance.TestAllTypes testMessage; + + switch (request.getPayloadCase()) { + case PROTOBUF_PAYLOAD: { + try { + testMessage = Conformance.TestAllTypes.parseFrom(request.getProtobufPayload()); + } catch (InvalidProtocolBufferException e) { + return Conformance.ConformanceResponse.newBuilder().setParseError(e.getMessage()).build(); + } + break; + } + case JSON_PAYLOAD: { + return Conformance.ConformanceResponse.newBuilder().setSkipped( + "Lite runtime does not suport Json Formant.").build(); + } + case PAYLOAD_NOT_SET: { + throw new RuntimeException("Request didn't have payload."); + } + + default: { + throw new RuntimeException("Unexpected payload case."); + } + } + + switch (request.getRequestedOutputFormat()) { + case UNSPECIFIED: + throw new RuntimeException("Unspecified output format."); + + case PROTOBUF: + return Conformance.ConformanceResponse.newBuilder().setProtobufPayload(testMessage.toByteString()).build(); + + case JSON: + return Conformance.ConformanceResponse.newBuilder().setSkipped( + "Lite runtime does not suport Json Formant.").build(); + + default: { + throw new RuntimeException("Unexpected request output."); + } + } + } + + private boolean doTestIo() throws Exception { + int bytes = readLittleEndianIntFromStdin(); + + if (bytes == -1) { + return false; // EOF + } + + byte[] serializedInput = new byte[bytes]; + + if (!readFromStdin(serializedInput, bytes)) { + throw new RuntimeException("Unexpected EOF from test program."); + } + + Conformance.ConformanceRequest request = + Conformance.ConformanceRequest.parseFrom(serializedInput); + Conformance.ConformanceResponse response = doTest(request); + byte[] serializedOutput = response.toByteArray(); + + writeLittleEndianIntToStdout(serializedOutput.length); + writeToStdout(serializedOutput); + + return true; + } + + public void run() throws Exception { + while (doTestIo()) { + this.testCount++; + } + + System.err.println("ConformanceJavaLite: received EOF from test runner after " + + this.testCount + " tests"); + } + + public static void main(String[] args) throws Exception { + new ConformanceJavaLite().run(); + } +} diff --git a/packager/third_party/protobuf/conformance/Makefile.am b/packager/third_party/protobuf/conformance/Makefile.am new file mode 100644 index 0000000000..31a9e40868 --- /dev/null +++ b/packager/third_party/protobuf/conformance/Makefile.am @@ -0,0 +1,281 @@ +## Process this file with automake to produce Makefile.in + +conformance_protoc_inputs = \ + conformance.proto + +well_known_type_protoc_inputs = \ + $(top_srcdir)/src/google/protobuf/any.proto \ + $(top_srcdir)/src/google/protobuf/duration.proto \ + $(top_srcdir)/src/google/protobuf/field_mask.proto \ + $(top_srcdir)/src/google/protobuf/struct.proto \ + $(top_srcdir)/src/google/protobuf/timestamp.proto \ + $(top_srcdir)/src/google/protobuf/wrappers.proto + + +protoc_outputs = \ + conformance.pb.cc \ + conformance.pb.h + +other_language_protoc_outputs = \ + conformance_pb2.py \ + Conformance.pbobjc.h \ + Conformance.pbobjc.m \ + conformance.rb \ + com/google/protobuf/Any.java \ + com/google/protobuf/AnyOrBuilder.java \ + com/google/protobuf/AnyProto.java \ + com/google/protobuf/BoolValue.java \ + com/google/protobuf/BoolValueOrBuilder.java \ + com/google/protobuf/BytesValue.java \ + com/google/protobuf/BytesValueOrBuilder.java \ + com/google/protobuf/conformance/Conformance.java \ + com/google/protobuf/DoubleValue.java \ + com/google/protobuf/DoubleValueOrBuilder.java \ + com/google/protobuf/Duration.java \ + com/google/protobuf/DurationOrBuilder.java \ + com/google/protobuf/DurationProto.java \ + com/google/protobuf/FieldMask.java \ + com/google/protobuf/FieldMaskOrBuilder.java \ + com/google/protobuf/FieldMaskProto.java \ + com/google/protobuf/FloatValue.java \ + com/google/protobuf/FloatValueOrBuilder.java \ + com/google/protobuf/Int32Value.java \ + com/google/protobuf/Int32ValueOrBuilder.java \ + com/google/protobuf/Int64Value.java \ + com/google/protobuf/Int64ValueOrBuilder.java \ + com/google/protobuf/ListValue.java \ + com/google/protobuf/ListValueOrBuilder.java \ + com/google/protobuf/NullValue.java \ + com/google/protobuf/StringValue.java \ + com/google/protobuf/StringValueOrBuilder.java \ + com/google/protobuf/Struct.java \ + com/google/protobuf/StructOrBuilder.java \ + com/google/protobuf/StructProto.java \ + com/google/protobuf/Timestamp.java \ + com/google/protobuf/TimestampOrBuilder.java \ + com/google/protobuf/TimestampProto.java \ + com/google/protobuf/UInt32Value.java \ + com/google/protobuf/UInt32ValueOrBuilder.java \ + com/google/protobuf/UInt64Value.java \ + com/google/protobuf/UInt64ValueOrBuilder.java \ + com/google/protobuf/Value.java \ + com/google/protobuf/ValueOrBuilder.java \ + com/google/protobuf/WrappersProto.java \ + google/protobuf/any.pb.cc \ + google/protobuf/any.pb.h \ + google/protobuf/any.rb \ + google/protobuf/any_pb2.py \ + google/protobuf/duration.pb.cc \ + google/protobuf/duration.pb.h \ + google/protobuf/duration.rb \ + google/protobuf/duration_pb2.py \ + google/protobuf/field_mask.pb.cc \ + google/protobuf/field_mask.pb.h \ + google/protobuf/field_mask.rb \ + google/protobuf/field_mask_pb2.py \ + google/protobuf/struct.pb.cc \ + google/protobuf/struct.pb.h \ + google/protobuf/struct.rb \ + google/protobuf/struct_pb2.py \ + google/protobuf/timestamp.pb.cc \ + google/protobuf/timestamp.pb.h \ + google/protobuf/timestamp.rb \ + google/protobuf/timestamp_pb2.py \ + google/protobuf/wrappers.pb.cc \ + google/protobuf/wrappers.pb.h \ + google/protobuf/wrappers.rb \ + google/protobuf/wrappers_pb2.py \ + lite/com/google/protobuf/Any.java \ + lite/com/google/protobuf/AnyOrBuilder.java \ + lite/com/google/protobuf/AnyProto.java \ + lite/com/google/protobuf/BoolValue.java \ + lite/com/google/protobuf/BoolValueOrBuilder.java \ + lite/com/google/protobuf/BytesValue.java \ + lite/com/google/protobuf/BytesValueOrBuilder.java \ + lite/com/google/protobuf/conformance/Conformance.java \ + lite/com/google/protobuf/DoubleValue.java \ + lite/com/google/protobuf/DoubleValueOrBuilder.java \ + lite/com/google/protobuf/Duration.java \ + lite/com/google/protobuf/DurationOrBuilder.java \ + lite/com/google/protobuf/DurationProto.java \ + lite/com/google/protobuf/FieldMask.java \ + lite/com/google/protobuf/FieldMaskOrBuilder.java \ + lite/com/google/protobuf/FieldMaskProto.java \ + lite/com/google/protobuf/FloatValue.java \ + lite/com/google/protobuf/FloatValueOrBuilder.java \ + lite/com/google/protobuf/Int32Value.java \ + lite/com/google/protobuf/Int32ValueOrBuilder.java \ + lite/com/google/protobuf/Int64Value.java \ + lite/com/google/protobuf/Int64ValueOrBuilder.java \ + lite/com/google/protobuf/ListValue.java \ + lite/com/google/protobuf/ListValueOrBuilder.java \ + lite/com/google/protobuf/NullValue.java \ + lite/com/google/protobuf/StringValue.java \ + lite/com/google/protobuf/StringValueOrBuilder.java \ + lite/com/google/protobuf/Struct.java \ + lite/com/google/protobuf/StructOrBuilder.java \ + lite/com/google/protobuf/StructProto.java \ + lite/com/google/protobuf/Timestamp.java \ + lite/com/google/protobuf/TimestampOrBuilder.java \ + lite/com/google/protobuf/TimestampProto.java \ + lite/com/google/protobuf/UInt32Value.java \ + lite/com/google/protobuf/UInt32ValueOrBuilder.java \ + lite/com/google/protobuf/UInt64Value.java \ + lite/com/google/protobuf/UInt64ValueOrBuilder.java \ + lite/com/google/protobuf/Value.java \ + lite/com/google/protobuf/ValueOrBuilder.java \ + lite/com/google/protobuf/WrappersProto.java + +bin_PROGRAMS = conformance-test-runner conformance-cpp + +# All source files excepet C++/Objective-C ones should be explicitly listed +# here because the autoconf tools don't include files of other languages +# automatically. +EXTRA_DIST = \ + ConformanceJava.java \ + ConformanceJavaLite.java \ + README.md \ + conformance.proto \ + conformance_python.py \ + conformance_ruby.rb \ + failure_list_cpp.txt \ + failure_list_csharp.txt \ + failure_list_java.txt \ + failure_list_objc.txt \ + failure_list_python.txt \ + failure_list_python_cpp.txt \ + failure_list_python-post26.txt \ + failure_list_ruby.txt + +conformance_test_runner_LDADD = $(top_srcdir)/src/libprotobuf.la +conformance_test_runner_SOURCES = conformance_test.h conformance_test.cc \ + conformance_test_runner.cc \ + third_party/jsoncpp/json.h \ + third_party/jsoncpp/jsoncpp.cpp +nodist_conformance_test_runner_SOURCES = conformance.pb.cc +conformance_test_runner_CPPFLAGS = -I$(top_srcdir)/src -I$(srcdir) +conformance_test_runner_CXXFLAGS = -std=c++11 +# Explicit deps beacuse BUILT_SOURCES are only done before a "make all/check" +# so a direct "make test_cpp" could fail if parallel enough. +conformance_test_runner-conformance_test.$(OBJEXT): conformance.pb.h +conformance_test_runner-conformance_test_runner.$(OBJEXT): conformance.pb.h + +conformance_cpp_LDADD = $(top_srcdir)/src/libprotobuf.la +conformance_cpp_SOURCES = conformance_cpp.cc +nodist_conformance_cpp_SOURCES = conformance.pb.cc +conformance_cpp_CPPFLAGS = -I$(top_srcdir)/src +# Explicit dep beacuse BUILT_SOURCES are only done before a "make all/check" +# so a direct "make test_cpp" could fail if parallel enough. +conformance_cpp-conformance_cpp.$(OBJEXT): conformance.pb.h + +if OBJC_CONFORMANCE_TEST + +bin_PROGRAMS += conformance-objc + +conformance_objc_SOURCES = conformance_objc.m ../objectivec/GPBProtocolBuffers.m +nodist_conformance_objc_SOURCES = Conformance.pbobjc.m +# On travis, the build fails without the isysroot because whatever system +# headers are being found don't include generics support for +# NSArray/NSDictionary, the only guess is their image at one time had an odd +# setup for Xcode and old frameworks are being found. +conformance_objc_CPPFLAGS = -I$(top_srcdir)/objectivec -isysroot `xcrun --sdk macosx --show-sdk-path` +conformance_objc_LDFLAGS = -framework Foundation +# Explicit dep beacuse BUILT_SOURCES are only done before a "make all/check" +# so a direct "make test_objc" could fail if parallel enough. +conformance_objc-conformance_objc.$(OBJEXT): Conformance.pbobjc.h + +endif + +if USE_EXTERNAL_PROTOC + +# Some implementations include pre-generated versions of well-known types. +protoc_middleman: $(conformance_protoc_inputs) $(well_known_type_protoc_inputs) + $(PROTOC) -I$(srcdir) -I$(top_srcdir) --cpp_out=. --java_out=. --ruby_out=. --objc_out=. --python_out=. $(conformance_protoc_inputs) + $(PROTOC) -I$(srcdir) -I$(top_srcdir) --cpp_out=. --java_out=. --ruby_out=. --python_out=. $(well_known_type_protoc_inputs) + $(PROTOC) -I$(srcdir) -I$(top_srcdir) --java_out=lite:lite $(conformance_protoc_inputs) $(well_known_type_protoc_inputs) + touch protoc_middleman + +else + +# We have to cd to $(srcdir) before executing protoc because $(protoc_inputs) is +# relative to srcdir, which may not be the same as the current directory when +# building out-of-tree. +protoc_middleman: $(top_srcdir)/src/protoc$(EXEEXT) $(conformance_protoc_inputs) $(well_known_type_protoc_inputs) + oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$(top_srcdir)/src --cpp_out=$$oldpwd --java_out=$$oldpwd --ruby_out=$$oldpwd --objc_out=$$oldpwd --python_out=$$oldpwd $(conformance_protoc_inputs) ) + oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$(top_srcdir)/src --cpp_out=$$oldpwd --java_out=$$oldpwd --ruby_out=$$oldpwd --python_out=$$oldpwd $(well_known_type_protoc_inputs) ) + @mkdir -p lite + oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$(top_srcdir)/src --java_out=lite:$$oldpwd/lite $(conformance_protoc_inputs) $(well_known_type_protoc_inputs) ) + touch protoc_middleman + +endif + +$(protoc_outputs): protoc_middleman + +$(other_language_protoc_outputs): protoc_middleman + +BUILT_SOURCES = $(protoc_outputs) $(other_language_protoc_outputs) + +CLEANFILES = $(protoc_outputs) protoc_middleman javac_middleman conformance-java javac_middleman_lite conformance-java-lite conformance-csharp $(other_language_protoc_outputs) + +MAINTAINERCLEANFILES = \ + Makefile.in + +javac_middleman: ConformanceJava.java protoc_middleman $(other_language_protoc_outputs) + jar=`ls ../java/util/target/*jar-with-dependencies.jar` && javac -classpath ../java/target/classes:$$jar ConformanceJava.java com/google/protobuf/conformance/Conformance.java + @touch javac_middleman + +conformance-java: javac_middleman + @echo "Writing shortcut script conformance-java..." + @echo '#! /bin/sh' > conformance-java + @jar=`ls ../java/util/target/*jar-with-dependencies.jar` && echo java -classpath .:../java/target/classes:$$jar ConformanceJava '$$@' >> conformance-java + @chmod +x conformance-java + +javac_middleman_lite: ConformanceJavaLite.java protoc_middleman $(other_language_protoc_outputs) + javac -classpath ../java/lite/target/classes:lite ConformanceJavaLite.java lite/com/google/protobuf/conformance/Conformance.java + @touch javac_middleman_lite + +conformance-java-lite: javac_middleman_lite + @echo "Writing shortcut script conformance-java-lite..." + @echo '#! /bin/sh' > conformance-java-lite + @echo java -classpath .:../java/lite/target/classes:lite ConformanceJavaLite '$$@' >> conformance-java-lite + @chmod +x conformance-java-lite + +# Currently the conformance code is alongside the rest of the C# +# source, as it's easier to maintain there. We assume we've already +# built that, so we just need a script to run it. +conformance-csharp: $(other_language_protoc_outputs) + @echo "Writing shortcut script conformance-csharp..." + @echo '#! /bin/sh' > conformance-csharp + @echo 'mono ../csharp/src/Google.Protobuf.Conformance/bin/Release/Google.Protobuf.Conformance.exe "$$@"' >> conformance-csharp + @chmod +x conformance-csharp + +# Targets for actually running tests. +test_cpp: protoc_middleman conformance-test-runner conformance-cpp + ./conformance-test-runner --failure_list failure_list_cpp.txt ./conformance-cpp + +test_java: protoc_middleman conformance-test-runner conformance-java + ./conformance-test-runner --failure_list failure_list_java.txt ./conformance-java + +test_java_lite: protoc_middleman conformance-test-runner conformance-java-lite + ./conformance-test-runner ./conformance-java-lite + +test_csharp: protoc_middleman conformance-test-runner conformance-csharp + ./conformance-test-runner --failure_list failure_list_csharp.txt ./conformance-csharp + +test_ruby: protoc_middleman conformance-test-runner $(other_language_protoc_outputs) + RUBYLIB=../ruby/lib:. ./conformance-test-runner --failure_list failure_list_ruby.txt ./conformance_ruby.rb + +# These depend on library paths being properly set up. The easiest way to +# run them is to just use "tox" from the python dir. +test_python: protoc_middleman conformance-test-runner + ./conformance-test-runner --failure_list failure_list_python.txt $(CONFORMANCE_PYTHON_EXTRA_FAILURES) ./conformance_python.py + +test_python_cpp: protoc_middleman conformance-test-runner + ./conformance-test-runner --failure_list failure_list_python_cpp.txt $(CONFORMANCE_PYTHON_EXTRA_FAILURES) ./conformance_python.py + +if OBJC_CONFORMANCE_TEST + +test_objc: protoc_middleman conformance-test-runner conformance-objc + ./conformance-test-runner --failure_list failure_list_objc.txt ./conformance-objc + +endif diff --git a/packager/third_party/protobuf/conformance/README.md b/packager/third_party/protobuf/conformance/README.md new file mode 100644 index 0000000000..9388055fba --- /dev/null +++ b/packager/third_party/protobuf/conformance/README.md @@ -0,0 +1,45 @@ +Protocol Buffers - Google's data interchange format +=================================================== + +[![Build Status](https://travis-ci.org/google/protobuf.svg?branch=master)](https://travis-ci.org/google/protobuf) + +Copyright 2008 Google Inc. + +This directory contains conformance tests for testing completeness and +correctness of Protocol Buffers implementations. These tests are designed +to be easy to run against any Protocol Buffers implementation. + +This directory contains the tester process `conformance-test`, which +contains all of the tests themselves. Then separate programs written +in whatever language you want to test communicate with the tester +program over a pipe. + +Before running any of these tests, make sure you run `make` in the base +directory to build `protoc`, since all the tests depend on it. + + $ make + +Then to run the tests against the C++ implementation, run: + + $ cd conformance && make test_cpp + +More tests and languages will be added soon! + +Testing other Protocol Buffer implementations +--------------------------------------------- + +To run these tests against a new Protocol Buffers implementation, write a +program in your language that uses the protobuf implementation you want +to test. This program should implement the testing protocol defined in +[conformance.proto](https://github.com/google/protobuf/blob/master/conformance/conformance.proto). +This is designed to be as easy as possible: the C++ version is only +150 lines and is a good example for what this program should look like +(see [conformance_cpp.cc](https://github.com/google/protobuf/blob/master/conformance/conformance_cpp.cc)). +The program only needs to be able to read from stdin and write to stdout. + +Portability +----------- + +Note that the test runner currently does not work on Windows. Patches +to fix this are welcome! (But please get in touch first to settle on +a general implementation strategy). diff --git a/packager/third_party/protobuf/conformance/conformance.proto b/packager/third_party/protobuf/conformance/conformance.proto new file mode 100644 index 0000000000..fc96074ac8 --- /dev/null +++ b/packager/third_party/protobuf/conformance/conformance.proto @@ -0,0 +1,273 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; +package conformance; +option java_package = "com.google.protobuf.conformance"; + +import "google/protobuf/any.proto"; +import "google/protobuf/duration.proto"; +import "google/protobuf/field_mask.proto"; +import "google/protobuf/struct.proto"; +import "google/protobuf/timestamp.proto"; +import "google/protobuf/wrappers.proto"; + +// This defines the conformance testing protocol. This protocol exists between +// the conformance test suite itself and the code being tested. For each test, +// the suite will send a ConformanceRequest message and expect a +// ConformanceResponse message. +// +// You can either run the tests in two different ways: +// +// 1. in-process (using the interface in conformance_test.h). +// +// 2. as a sub-process communicating over a pipe. Information about how to +// do this is in conformance_test_runner.cc. +// +// Pros/cons of the two approaches: +// +// - running as a sub-process is much simpler for languages other than C/C++. +// +// - running as a sub-process may be more tricky in unusual environments like +// iOS apps, where fork/stdin/stdout are not available. + +enum WireFormat { + UNSPECIFIED = 0; + PROTOBUF = 1; + JSON = 2; +} + +// Represents a single test case's input. The testee should: +// +// 1. parse this proto (which should always succeed) +// 2. parse the protobuf or JSON payload in "payload" (which may fail) +// 3. if the parse succeeded, serialize the message in the requested format. +message ConformanceRequest { + // The payload (whether protobuf of JSON) is always for a TestAllTypes proto + // (see below). + oneof payload { + bytes protobuf_payload = 1; + string json_payload = 2; + } + + // Which format should the testee serialize its message to? + WireFormat requested_output_format = 3; +} + +// Represents a single test case's output. +message ConformanceResponse { + oneof result { + // This string should be set to indicate parsing failed. The string can + // provide more information about the parse error if it is available. + // + // Setting this string does not necessarily mean the testee failed the + // test. Some of the test cases are intentionally invalid input. + string parse_error = 1; + + // If the input was successfully parsed but errors occurred when + // serializing it to the requested output format, set the error message in + // this field. + string serialize_error = 6; + + // This should be set if some other error occurred. This will always + // indicate that the test failed. The string can provide more information + // about the failure. + string runtime_error = 2; + + // If the input was successfully parsed and the requested output was + // protobuf, serialize it to protobuf and set it in this field. + bytes protobuf_payload = 3; + + // If the input was successfully parsed and the requested output was JSON, + // serialize to JSON and set it in this field. + string json_payload = 4; + + // For when the testee skipped the test, likely because a certain feature + // wasn't supported, like JSON input/output. + string skipped = 5; + } +} + +// This proto includes every type of field in both singular and repeated +// forms. +message TestAllTypes { + message NestedMessage { + int32 a = 1; + TestAllTypes corecursive = 2; + } + + enum NestedEnum { + FOO = 0; + BAR = 1; + BAZ = 2; + NEG = -1; // Intentionally negative. + } + + // Singular + int32 optional_int32 = 1; + int64 optional_int64 = 2; + uint32 optional_uint32 = 3; + uint64 optional_uint64 = 4; + sint32 optional_sint32 = 5; + sint64 optional_sint64 = 6; + fixed32 optional_fixed32 = 7; + fixed64 optional_fixed64 = 8; + sfixed32 optional_sfixed32 = 9; + sfixed64 optional_sfixed64 = 10; + float optional_float = 11; + double optional_double = 12; + bool optional_bool = 13; + string optional_string = 14; + bytes optional_bytes = 15; + + NestedMessage optional_nested_message = 18; + ForeignMessage optional_foreign_message = 19; + + NestedEnum optional_nested_enum = 21; + ForeignEnum optional_foreign_enum = 22; + + string optional_string_piece = 24 [ctype=STRING_PIECE]; + string optional_cord = 25 [ctype=CORD]; + + TestAllTypes recursive_message = 27; + + // Repeated + repeated int32 repeated_int32 = 31; + repeated int64 repeated_int64 = 32; + repeated uint32 repeated_uint32 = 33; + repeated uint64 repeated_uint64 = 34; + repeated sint32 repeated_sint32 = 35; + repeated sint64 repeated_sint64 = 36; + repeated fixed32 repeated_fixed32 = 37; + repeated fixed64 repeated_fixed64 = 38; + repeated sfixed32 repeated_sfixed32 = 39; + repeated sfixed64 repeated_sfixed64 = 40; + repeated float repeated_float = 41; + repeated double repeated_double = 42; + repeated bool repeated_bool = 43; + repeated string repeated_string = 44; + repeated bytes repeated_bytes = 45; + + repeated NestedMessage repeated_nested_message = 48; + repeated ForeignMessage repeated_foreign_message = 49; + + repeated NestedEnum repeated_nested_enum = 51; + repeated ForeignEnum repeated_foreign_enum = 52; + + repeated string repeated_string_piece = 54 [ctype=STRING_PIECE]; + repeated string repeated_cord = 55 [ctype=CORD]; + + // Map + map < int32, int32> map_int32_int32 = 56; + map < int64, int64> map_int64_int64 = 57; + map < uint32, uint32> map_uint32_uint32 = 58; + map < uint64, uint64> map_uint64_uint64 = 59; + map < sint32, sint32> map_sint32_sint32 = 60; + map < sint64, sint64> map_sint64_sint64 = 61; + map < fixed32, fixed32> map_fixed32_fixed32 = 62; + map < fixed64, fixed64> map_fixed64_fixed64 = 63; + map map_sfixed32_sfixed32 = 64; + map map_sfixed64_sfixed64 = 65; + map < int32, float> map_int32_float = 66; + map < int32, double> map_int32_double = 67; + map < bool, bool> map_bool_bool = 68; + map < string, string> map_string_string = 69; + map < string, bytes> map_string_bytes = 70; + map < string, NestedMessage> map_string_nested_message = 71; + map < string, ForeignMessage> map_string_foreign_message = 72; + map < string, NestedEnum> map_string_nested_enum = 73; + map < string, ForeignEnum> map_string_foreign_enum = 74; + + oneof oneof_field { + uint32 oneof_uint32 = 111; + NestedMessage oneof_nested_message = 112; + string oneof_string = 113; + bytes oneof_bytes = 114; + } + + // Well-known types + google.protobuf.BoolValue optional_bool_wrapper = 201; + google.protobuf.Int32Value optional_int32_wrapper = 202; + google.protobuf.Int64Value optional_int64_wrapper = 203; + google.protobuf.UInt32Value optional_uint32_wrapper = 204; + google.protobuf.UInt64Value optional_uint64_wrapper = 205; + google.protobuf.FloatValue optional_float_wrapper = 206; + google.protobuf.DoubleValue optional_double_wrapper = 207; + google.protobuf.StringValue optional_string_wrapper = 208; + google.protobuf.BytesValue optional_bytes_wrapper = 209; + + repeated google.protobuf.BoolValue repeated_bool_wrapper = 211; + repeated google.protobuf.Int32Value repeated_int32_wrapper = 212; + repeated google.protobuf.Int64Value repeated_int64_wrapper = 213; + repeated google.protobuf.UInt32Value repeated_uint32_wrapper = 214; + repeated google.protobuf.UInt64Value repeated_uint64_wrapper = 215; + repeated google.protobuf.FloatValue repeated_float_wrapper = 216; + repeated google.protobuf.DoubleValue repeated_double_wrapper = 217; + repeated google.protobuf.StringValue repeated_string_wrapper = 218; + repeated google.protobuf.BytesValue repeated_bytes_wrapper = 219; + + google.protobuf.Duration optional_duration = 301; + google.protobuf.Timestamp optional_timestamp = 302; + google.protobuf.FieldMask optional_field_mask = 303; + google.protobuf.Struct optional_struct = 304; + google.protobuf.Any optional_any = 305; + google.protobuf.Value optional_value = 306; + + repeated google.protobuf.Duration repeated_duration = 311; + repeated google.protobuf.Timestamp repeated_timestamp = 312; + repeated google.protobuf.FieldMask repeated_fieldmask = 313; + repeated google.protobuf.Struct repeated_struct = 324; + repeated google.protobuf.Any repeated_any = 315; + repeated google.protobuf.Value repeated_value = 316; + + // Test field-name-to-JSON-name convention. + int32 fieldname1 = 401; + int32 field_name2 = 402; + int32 _field_name3 = 403; + int32 field__name4_ = 404; + int32 field0name5 = 405; + int32 field_0_name6 = 406; + int32 fieldName7 = 407; + int32 FieldName8 = 408; + int32 field_Name9 = 409; + int32 Field_Name10 = 410; + int32 FIELD_NAME11 = 411; + int32 FIELD_name12 = 412; +} + +message ForeignMessage { + int32 c = 1; +} + +enum ForeignEnum { + FOREIGN_FOO = 0; + FOREIGN_BAR = 1; + FOREIGN_BAZ = 2; +} diff --git a/packager/third_party/protobuf/conformance/conformance_cpp.cc b/packager/third_party/protobuf/conformance/conformance_cpp.cc new file mode 100644 index 0000000000..1a26549325 --- /dev/null +++ b/packager/third_party/protobuf/conformance/conformance_cpp.cc @@ -0,0 +1,207 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#include +#include +#include + +#include "conformance.pb.h" +#include +#include + +using conformance::ConformanceRequest; +using conformance::ConformanceResponse; +using conformance::TestAllTypes; +using google::protobuf::Descriptor; +using google::protobuf::DescriptorPool; +using google::protobuf::internal::scoped_ptr; +using google::protobuf::util::BinaryToJsonString; +using google::protobuf::util::JsonToBinaryString; +using google::protobuf::util::NewTypeResolverForDescriptorPool; +using google::protobuf::util::Status; +using google::protobuf::util::TypeResolver; +using std::string; + +static const char kTypeUrlPrefix[] = "type.googleapis.com"; + +static string GetTypeUrl(const Descriptor* message) { + return string(kTypeUrlPrefix) + "/" + message->full_name(); +} + +int test_count = 0; +bool verbose = false; +TypeResolver* type_resolver; +string* type_url; + + +bool CheckedRead(int fd, void *buf, size_t len) { + size_t ofs = 0; + while (len > 0) { + ssize_t bytes_read = read(fd, (char*)buf + ofs, len); + + if (bytes_read == 0) return false; + + if (bytes_read < 0) { + GOOGLE_LOG(FATAL) << "Error reading from test runner: " << strerror(errno); + } + + len -= bytes_read; + ofs += bytes_read; + } + + return true; +} + +void CheckedWrite(int fd, const void *buf, size_t len) { + if (write(fd, buf, len) != len) { + GOOGLE_LOG(FATAL) << "Error writing to test runner: " << strerror(errno); + } +} + +void DoTest(const ConformanceRequest& request, ConformanceResponse* response) { + TestAllTypes test_message; + + switch (request.payload_case()) { + case ConformanceRequest::kProtobufPayload: + if (!test_message.ParseFromString(request.protobuf_payload())) { + // Getting parse details would involve something like: + // http://stackoverflow.com/questions/22121922/how-can-i-get-more-details-about-errors-generated-during-protobuf-parsing-c + response->set_parse_error("Parse error (no more details available)."); + return; + } + break; + + case ConformanceRequest::kJsonPayload: { + string proto_binary; + Status status = JsonToBinaryString(type_resolver, *type_url, + request.json_payload(), &proto_binary); + if (!status.ok()) { + response->set_parse_error(string("Parse error: ") + + status.error_message().as_string()); + return; + } + + if (!test_message.ParseFromString(proto_binary)) { + response->set_runtime_error( + "Parsing JSON generates invalid proto output."); + return; + } + break; + } + + case ConformanceRequest::PAYLOAD_NOT_SET: + GOOGLE_LOG(FATAL) << "Request didn't have payload."; + break; + } + + switch (request.requested_output_format()) { + case conformance::UNSPECIFIED: + GOOGLE_LOG(FATAL) << "Unspecified output format"; + break; + + case conformance::PROTOBUF: + GOOGLE_CHECK( + test_message.SerializeToString(response->mutable_protobuf_payload())); + break; + + case conformance::JSON: { + string proto_binary; + GOOGLE_CHECK(test_message.SerializeToString(&proto_binary)); + Status status = BinaryToJsonString(type_resolver, *type_url, proto_binary, + response->mutable_json_payload()); + if (!status.ok()) { + response->set_serialize_error( + string("Failed to serialize JSON output: ") + + status.error_message().as_string()); + return; + } + break; + } + + default: + GOOGLE_LOG(FATAL) << "Unknown output format: " + << request.requested_output_format(); + } +} + +bool DoTestIo() { + string serialized_input; + string serialized_output; + ConformanceRequest request; + ConformanceResponse response; + uint32_t bytes; + + if (!CheckedRead(STDIN_FILENO, &bytes, sizeof(uint32_t))) { + // EOF. + return false; + } + + serialized_input.resize(bytes); + + if (!CheckedRead(STDIN_FILENO, (char*)serialized_input.c_str(), bytes)) { + GOOGLE_LOG(ERROR) << "Unexpected EOF on stdin. " << strerror(errno); + } + + if (!request.ParseFromString(serialized_input)) { + GOOGLE_LOG(FATAL) << "Parse of ConformanceRequest proto failed."; + return false; + } + + DoTest(request, &response); + + response.SerializeToString(&serialized_output); + + bytes = serialized_output.size(); + CheckedWrite(STDOUT_FILENO, &bytes, sizeof(uint32_t)); + CheckedWrite(STDOUT_FILENO, serialized_output.c_str(), bytes); + + if (verbose) { + fprintf(stderr, "conformance-cpp: request=%s, response=%s\n", + request.ShortDebugString().c_str(), + response.ShortDebugString().c_str()); + } + + test_count++; + + return true; +} + +int main() { + type_resolver = NewTypeResolverForDescriptorPool( + kTypeUrlPrefix, DescriptorPool::generated_pool()); + type_url = new string(GetTypeUrl(TestAllTypes::descriptor())); + while (1) { + if (!DoTestIo()) { + fprintf(stderr, "conformance-cpp: received EOF from test runner " + "after %d tests, exiting\n", test_count); + return 0; + } + } +} diff --git a/packager/third_party/protobuf/conformance/conformance_objc.m b/packager/third_party/protobuf/conformance/conformance_objc.m new file mode 100644 index 0000000000..1124bfeba1 --- /dev/null +++ b/packager/third_party/protobuf/conformance/conformance_objc.m @@ -0,0 +1,179 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#import + +#import "Conformance.pbobjc.h" + +static void Die(NSString *format, ...) __dead2; + +static BOOL verbose = NO; +static int32_t testCount = 0; + +static void Die(NSString *format, ...) { + va_list args; + va_start(args, format); + NSString *msg = [[NSString alloc] initWithFormat:format arguments:args]; + NSLog(@"%@", msg); + va_end(args); + [msg release]; + exit(66); +} + +static NSData *CheckedReadDataOfLength(NSFileHandle *handle, NSUInteger numBytes) { + NSData *data = [handle readDataOfLength:numBytes]; + NSUInteger dataLen = data.length; + if (dataLen == 0) { + return nil; // EOF. + } + if (dataLen != numBytes) { + Die(@"Failed to read the request length (%d), only got: %@", + numBytes, data); + } + return data; +} + +static ConformanceResponse *DoTest(ConformanceRequest *request) { + ConformanceResponse *response = [ConformanceResponse message]; + TestAllTypes *testMessage = nil; + + switch (request.payloadOneOfCase) { + case ConformanceRequest_Payload_OneOfCase_GPBUnsetOneOfCase: + Die(@"Request didn't have a payload: %@", request); + break; + + case ConformanceRequest_Payload_OneOfCase_ProtobufPayload: { + NSError *error = nil; + testMessage = [TestAllTypes parseFromData:request.protobufPayload + error:&error]; + if (!testMessage) { + response.parseError = + [NSString stringWithFormat:@"Parse error: %@", error]; + } + break; + } + + case ConformanceRequest_Payload_OneOfCase_JsonPayload: + response.skipped = @"ObjC doesn't support parsing JSON"; + break; + } + + if (testMessage) { + switch (request.requestedOutputFormat) { + case WireFormat_GPBUnrecognizedEnumeratorValue: + case WireFormat_Unspecified: + Die(@"Unrecognized/unspecified output format: %@", request); + break; + + case WireFormat_Protobuf: + response.protobufPayload = testMessage.data; + if (!response.protobufPayload) { + response.serializeError = + [NSString stringWithFormat:@"Failed to make data from: %@", testMessage]; + } + break; + + case WireFormat_Json: + response.skipped = @"ObjC doesn't support generating JSON"; + break; + } + } + + return response; +} + +static uint32_t UInt32FromLittleEndianData(NSData *data) { + if (data.length != sizeof(uint32_t)) { + Die(@"Data not the right size for uint32_t: %@", data); + } + uint32_t value; + memcpy(&value, data.bytes, sizeof(uint32_t)); + return CFSwapInt32LittleToHost(value); +} + +static NSData *UInt32ToLittleEndianData(uint32_t num) { + uint32_t value = CFSwapInt32HostToLittle(num); + return [NSData dataWithBytes:&value length:sizeof(uint32_t)]; +} + +static BOOL DoTestIo(NSFileHandle *input, NSFileHandle *output) { + // See conformance_test_runner.cc for the wire format. + NSData *data = CheckedReadDataOfLength(input, sizeof(uint32_t)); + if (!data) { + // EOF. + return NO; + } + uint32_t numBytes = UInt32FromLittleEndianData(data); + data = CheckedReadDataOfLength(input, numBytes); + if (!data) { + Die(@"Failed to read request"); + } + + NSError *error = nil; + ConformanceRequest *request = [ConformanceRequest parseFromData:data + error:&error]; + if (!request) { + Die(@"Failed to parse the message data: %@", error); + } + + ConformanceResponse *response = DoTest(request); + if (!response) { + Die(@"Failed to make a reply from %@", request); + } + + data = response.data; + [output writeData:UInt32ToLittleEndianData((int32_t)data.length)]; + [output writeData:data]; + + if (verbose) { + NSLog(@"Request: %@", request); + NSLog(@"Response: %@", response); + } + + ++testCount; + return YES; +} + +int main(int argc, const char *argv[]) { + @autoreleasepool { + NSFileHandle *input = [[NSFileHandle fileHandleWithStandardInput] retain]; + NSFileHandle *output = [[NSFileHandle fileHandleWithStandardOutput] retain]; + + BOOL notDone = YES; + while (notDone) { + @autoreleasepool { + notDone = DoTestIo(input, output); + } + } + + NSLog(@"Received EOF from test runner after %d tests, exiting.", testCount); + } + return 0; +} diff --git a/packager/third_party/protobuf/conformance/conformance_python.py b/packager/third_party/protobuf/conformance/conformance_python.py new file mode 100755 index 0000000000..a490c8e8e5 --- /dev/null +++ b/packager/third_party/protobuf/conformance/conformance_python.py @@ -0,0 +1,130 @@ +#!/usr/bin/env python +# +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# https://developers.google.com/protocol-buffers/ +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""A conformance test implementation for the Python protobuf library. + +See conformance.proto for more information. +""" + +import struct +import sys +import os +from google.protobuf import message +from google.protobuf import json_format +import conformance_pb2 + +sys.stdout = os.fdopen(sys.stdout.fileno(), 'wb', 0) +sys.stdin = os.fdopen(sys.stdin.fileno(), 'rb', 0) + +test_count = 0 +verbose = False + +class ProtocolError(Exception): + pass + +def do_test(request): + test_message = conformance_pb2.TestAllTypes() + response = conformance_pb2.ConformanceResponse() + test_message = conformance_pb2.TestAllTypes() + + try: + if request.WhichOneof('payload') == 'protobuf_payload': + try: + test_message.ParseFromString(request.protobuf_payload) + except message.DecodeError as e: + response.parse_error = str(e) + return response + + elif request.WhichOneof('payload') == 'json_payload': + try: + json_format.Parse(request.json_payload, test_message) + except json_format.ParseError as e: + response.parse_error = str(e) + return response + + else: + raise ProtocolError("Request didn't have payload.") + + if request.requested_output_format == conformance_pb2.UNSPECIFIED: + raise ProtocolError("Unspecified output format") + + elif request.requested_output_format == conformance_pb2.PROTOBUF: + response.protobuf_payload = test_message.SerializeToString() + + elif request.requested_output_format == conformance_pb2.JSON: + response.json_payload = json_format.MessageToJson(test_message) + + except Exception as e: + response.runtime_error = str(e) + + return response + +def do_test_io(): + length_bytes = sys.stdin.read(4) + if len(length_bytes) == 0: + return False # EOF + elif len(length_bytes) != 4: + raise IOError("I/O error") + + # "I" is "unsigned int", so this depends on running on a platform with + # 32-bit "unsigned int" type. The Python struct module unfortunately + # has no format specifier for uint32_t. + length = struct.unpack(" err + response.parse_error = err.message.encode('utf-8') + return response + end + + when :json_payload + begin + test_message = Conformance::TestAllTypes.decode_json(request.json_payload) + rescue Google::Protobuf::ParseError => err + response.parse_error = err.message.encode('utf-8') + return response + end + + when nil + fail "Request didn't have payload" + end + + case request.requested_output_format + when :UNSPECIFIED + fail 'Unspecified output format' + + when :PROTOBUF + response.protobuf_payload = test_message.to_proto + + when :JSON + response.json_payload = test_message.to_json + + when nil + fail "Request didn't have requested output format" + end + rescue StandardError => err + response.runtime_error = err.message.encode('utf-8') + end + + response +end + +# Returns true if the test ran successfully, false on legitimate EOF. +# If EOF is encountered in an unexpected place, raises IOError. +def do_test_io + length_bytes = STDIN.read(4) + return false if length_bytes.nil? + + length = length_bytes.unpack('V').first + serialized_request = STDIN.read(length) + if serialized_request.nil? || serialized_request.length != length + fail IOError + end + + request = Conformance::ConformanceRequest.decode(serialized_request) + + response = do_test(request) + + serialized_response = Conformance::ConformanceResponse.encode(response) + STDOUT.write([serialized_response.length].pack('V')) + STDOUT.write(serialized_response) + STDOUT.flush + + if $verbose + STDERR.puts("conformance_ruby: request=#{request.to_json}, " \ + "response=#{response.to_json}\n") + end + + $test_count += 1 + + true +end + +loop do + unless do_test_io + STDERR.puts('conformance_ruby: received EOF from test runner ' \ + "after #{$test_count} tests, exiting") + break + end +end diff --git a/packager/third_party/protobuf/conformance/conformance_test.cc b/packager/third_party/protobuf/conformance/conformance_test.cc new file mode 100644 index 0000000000..fc0605bfc4 --- /dev/null +++ b/packager/third_party/protobuf/conformance/conformance_test.cc @@ -0,0 +1,2006 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#include +#include + +#include "conformance.pb.h" +#include "conformance_test.h" +#include +#include +#include +#include +#include +#include +#include +#include + +#include "third_party/jsoncpp/json.h" + +using conformance::ConformanceRequest; +using conformance::ConformanceResponse; +using conformance::TestAllTypes; +using conformance::WireFormat; +using google::protobuf::Descriptor; +using google::protobuf::FieldDescriptor; +using google::protobuf::internal::WireFormatLite; +using google::protobuf::TextFormat; +using google::protobuf::util::DefaultFieldComparator; +using google::protobuf::util::JsonToBinaryString; +using google::protobuf::util::MessageDifferencer; +using google::protobuf::util::NewTypeResolverForDescriptorPool; +using google::protobuf::util::Status; +using std::string; + +namespace { + +static const char kTypeUrlPrefix[] = "type.googleapis.com"; + +static string GetTypeUrl(const Descriptor* message) { + return string(kTypeUrlPrefix) + "/" + message->full_name(); +} + +/* Routines for building arbitrary protos *************************************/ + +// We would use CodedOutputStream except that we want more freedom to build +// arbitrary protos (even invalid ones). + +const string empty; + +string cat(const string& a, const string& b, + const string& c = empty, + const string& d = empty, + const string& e = empty, + const string& f = empty, + const string& g = empty, + const string& h = empty, + const string& i = empty, + const string& j = empty, + const string& k = empty, + const string& l = empty) { + string ret; + ret.reserve(a.size() + b.size() + c.size() + d.size() + e.size() + f.size() + + g.size() + h.size() + i.size() + j.size() + k.size() + l.size()); + ret.append(a); + ret.append(b); + ret.append(c); + ret.append(d); + ret.append(e); + ret.append(f); + ret.append(g); + ret.append(h); + ret.append(i); + ret.append(j); + ret.append(k); + ret.append(l); + return ret; +} + +// The maximum number of bytes that it takes to encode a 64-bit varint. +#define VARINT_MAX_LEN 10 + +size_t vencode64(uint64_t val, char *buf) { + if (val == 0) { buf[0] = 0; return 1; } + size_t i = 0; + while (val) { + uint8_t byte = val & 0x7fU; + val >>= 7; + if (val) byte |= 0x80U; + buf[i++] = byte; + } + return i; +} + +string varint(uint64_t x) { + char buf[VARINT_MAX_LEN]; + size_t len = vencode64(x, buf); + return string(buf, len); +} + +// TODO: proper byte-swapping for big-endian machines. +string fixed32(void *data) { return string(static_cast(data), 4); } +string fixed64(void *data) { return string(static_cast(data), 8); } + +string delim(const string& buf) { return cat(varint(buf.size()), buf); } +string uint32(uint32_t u32) { return fixed32(&u32); } +string uint64(uint64_t u64) { return fixed64(&u64); } +string flt(float f) { return fixed32(&f); } +string dbl(double d) { return fixed64(&d); } +string zz32(int32_t x) { return varint(WireFormatLite::ZigZagEncode32(x)); } +string zz64(int64_t x) { return varint(WireFormatLite::ZigZagEncode64(x)); } + +string tag(uint32_t fieldnum, char wire_type) { + return varint((fieldnum << 3) | wire_type); +} + +string submsg(uint32_t fn, const string& buf) { + return cat( tag(fn, WireFormatLite::WIRETYPE_LENGTH_DELIMITED), delim(buf) ); +} + +#define UNKNOWN_FIELD 666 + +uint32_t GetFieldNumberForType(FieldDescriptor::Type type, bool repeated) { + const Descriptor* d = TestAllTypes().GetDescriptor(); + for (int i = 0; i < d->field_count(); i++) { + const FieldDescriptor* f = d->field(i); + if (f->type() == type && f->is_repeated() == repeated) { + return f->number(); + } + } + GOOGLE_LOG(FATAL) << "Couldn't find field with type " << (int)type; + return 0; +} + +string UpperCase(string str) { + for (int i = 0; i < str.size(); i++) { + str[i] = toupper(str[i]); + } + return str; +} + +} // anonymous namespace + +namespace google { +namespace protobuf { + +void ConformanceTestSuite::ReportSuccess(const string& test_name) { + if (expected_to_fail_.erase(test_name) != 0) { + StringAppendF(&output_, + "ERROR: test %s is in the failure list, but test succeeded. " + "Remove it from the failure list.\n", + test_name.c_str()); + unexpected_succeeding_tests_.insert(test_name); + } + successes_++; +} + +void ConformanceTestSuite::ReportFailure(const string& test_name, + const ConformanceRequest& request, + const ConformanceResponse& response, + const char* fmt, ...) { + if (expected_to_fail_.erase(test_name) == 1) { + expected_failures_++; + if (!verbose_) + return; + } else { + StringAppendF(&output_, "ERROR, test=%s: ", test_name.c_str()); + unexpected_failing_tests_.insert(test_name); + } + va_list args; + va_start(args, fmt); + StringAppendV(&output_, fmt, args); + va_end(args); + StringAppendF(&output_, " request=%s, response=%s\n", + request.ShortDebugString().c_str(), + response.ShortDebugString().c_str()); +} + +void ConformanceTestSuite::ReportSkip(const string& test_name, + const ConformanceRequest& request, + const ConformanceResponse& response) { + if (verbose_) { + StringAppendF(&output_, "SKIPPED, test=%s request=%s, response=%s\n", + test_name.c_str(), request.ShortDebugString().c_str(), + response.ShortDebugString().c_str()); + } + skipped_.insert(test_name); +} + +void ConformanceTestSuite::RunTest(const string& test_name, + const ConformanceRequest& request, + ConformanceResponse* response) { + if (test_names_.insert(test_name).second == false) { + GOOGLE_LOG(FATAL) << "Duplicated test name: " << test_name; + } + + string serialized_request; + string serialized_response; + request.SerializeToString(&serialized_request); + + runner_->RunTest(test_name, serialized_request, &serialized_response); + + if (!response->ParseFromString(serialized_response)) { + response->Clear(); + response->set_runtime_error("response proto could not be parsed."); + } + + if (verbose_) { + StringAppendF(&output_, "conformance test: name=%s, request=%s, response=%s\n", + test_name.c_str(), + request.ShortDebugString().c_str(), + response->ShortDebugString().c_str()); + } +} + +void ConformanceTestSuite::RunValidInputTest( + const string& test_name, const string& input, WireFormat input_format, + const string& equivalent_text_format, WireFormat requested_output) { + TestAllTypes reference_message; + GOOGLE_CHECK( + TextFormat::ParseFromString(equivalent_text_format, &reference_message)) + << "Failed to parse data for test case: " << test_name + << ", data: " << equivalent_text_format; + + ConformanceRequest request; + ConformanceResponse response; + + switch (input_format) { + case conformance::PROTOBUF: + request.set_protobuf_payload(input); + break; + + case conformance::JSON: + request.set_json_payload(input); + break; + + default: + GOOGLE_LOG(FATAL) << "Unspecified input format"; + } + + request.set_requested_output_format(requested_output); + + RunTest(test_name, request, &response); + + TestAllTypes test_message; + + switch (response.result_case()) { + case ConformanceResponse::kParseError: + case ConformanceResponse::kRuntimeError: + case ConformanceResponse::kSerializeError: + ReportFailure(test_name, request, response, + "Failed to parse JSON input or produce JSON output."); + return; + + case ConformanceResponse::kSkipped: + ReportSkip(test_name, request, response); + return; + + case ConformanceResponse::kJsonPayload: { + if (requested_output != conformance::JSON) { + ReportFailure( + test_name, request, response, + "Test was asked for protobuf output but provided JSON instead."); + return; + } + string binary_protobuf; + Status status = + JsonToBinaryString(type_resolver_.get(), type_url_, + response.json_payload(), &binary_protobuf); + if (!status.ok()) { + ReportFailure(test_name, request, response, + "JSON output we received from test was unparseable."); + return; + } + + if (!test_message.ParseFromString(binary_protobuf)) { + ReportFailure(test_name, request, response, + "INTERNAL ERROR: internal JSON->protobuf transcode " + "yielded unparseable proto."); + return; + } + + break; + } + + case ConformanceResponse::kProtobufPayload: { + if (requested_output != conformance::PROTOBUF) { + ReportFailure( + test_name, request, response, + "Test was asked for JSON output but provided protobuf instead."); + return; + } + + if (!test_message.ParseFromString(response.protobuf_payload())) { + ReportFailure(test_name, request, response, + "Protobuf output we received from test was unparseable."); + return; + } + + break; + } + + default: + GOOGLE_LOG(FATAL) << test_name << ": unknown payload type: " + << response.result_case(); + } + + MessageDifferencer differencer; + DefaultFieldComparator field_comparator; + field_comparator.set_treat_nan_as_equal(true); + differencer.set_field_comparator(&field_comparator); + string differences; + differencer.ReportDifferencesToString(&differences); + + if (differencer.Compare(reference_message, test_message)) { + ReportSuccess(test_name); + } else { + ReportFailure(test_name, request, response, + "Output was not equivalent to reference message: %s.", + differences.c_str()); + } +} + +// Expect that this precise protobuf will cause a parse error. +void ConformanceTestSuite::ExpectParseFailureForProto( + const string& proto, const string& test_name) { + ConformanceRequest request; + ConformanceResponse response; + request.set_protobuf_payload(proto); + string effective_test_name = "ProtobufInput." + test_name; + + // We don't expect output, but if the program erroneously accepts the protobuf + // we let it send its response as this. We must not leave it unspecified. + request.set_requested_output_format(conformance::PROTOBUF); + + RunTest(effective_test_name, request, &response); + if (response.result_case() == ConformanceResponse::kParseError) { + ReportSuccess(effective_test_name); + } else if (response.result_case() == ConformanceResponse::kSkipped) { + ReportSkip(effective_test_name, request, response); + } else { + ReportFailure(effective_test_name, request, response, + "Should have failed to parse, but didn't."); + } +} + +// Expect that this protobuf will cause a parse error, even if it is followed +// by valid protobuf data. We can try running this twice: once with this +// data verbatim and once with this data followed by some valid data. +// +// TODO(haberman): implement the second of these. +void ConformanceTestSuite::ExpectHardParseFailureForProto( + const string& proto, const string& test_name) { + return ExpectParseFailureForProto(proto, test_name); +} + +void ConformanceTestSuite::RunValidJsonTest( + const string& test_name, const string& input_json, + const string& equivalent_text_format) { + RunValidInputTest("JsonInput." + test_name + ".ProtobufOutput", input_json, + conformance::JSON, equivalent_text_format, + conformance::PROTOBUF); + RunValidInputTest("JsonInput." + test_name + ".JsonOutput", input_json, + conformance::JSON, equivalent_text_format, + conformance::JSON); +} + +void ConformanceTestSuite::RunValidJsonTestWithProtobufInput( + const string& test_name, const TestAllTypes& input, + const string& equivalent_text_format) { + RunValidInputTest("ProtobufInput." + test_name + ".JsonOutput", + input.SerializeAsString(), conformance::PROTOBUF, + equivalent_text_format, conformance::JSON); +} + +// According to proto3 JSON specification, JSON serializers follow more strict +// rules than parsers (e.g., a serializer must serialize int32 values as JSON +// numbers while the parser is allowed to accept them as JSON strings). This +// method allows strict checking on a proto3 JSON serializer by inspecting +// the JSON output directly. +void ConformanceTestSuite::RunValidJsonTestWithValidator( + const string& test_name, const string& input_json, + const Validator& validator) { + ConformanceRequest request; + ConformanceResponse response; + request.set_json_payload(input_json); + request.set_requested_output_format(conformance::JSON); + + string effective_test_name = "JsonInput." + test_name + ".Validator"; + + RunTest(effective_test_name, request, &response); + + if (response.result_case() == ConformanceResponse::kSkipped) { + ReportSkip(effective_test_name, request, response); + return; + } + + if (response.result_case() != ConformanceResponse::kJsonPayload) { + ReportFailure(effective_test_name, request, response, + "Expected JSON payload but got type %d.", + response.result_case()); + return; + } + Json::Reader reader; + Json::Value value; + if (!reader.parse(response.json_payload(), value)) { + ReportFailure(effective_test_name, request, response, + "JSON payload cannot be parsed as valid JSON: %s", + reader.getFormattedErrorMessages().c_str()); + return; + } + if (!validator(value)) { + ReportFailure(effective_test_name, request, response, + "JSON payload validation failed."); + return; + } + ReportSuccess(effective_test_name); +} + +void ConformanceTestSuite::ExpectParseFailureForJson( + const string& test_name, const string& input_json) { + ConformanceRequest request; + ConformanceResponse response; + request.set_json_payload(input_json); + string effective_test_name = "JsonInput." + test_name; + + // We don't expect output, but if the program erroneously accepts the protobuf + // we let it send its response as this. We must not leave it unspecified. + request.set_requested_output_format(conformance::JSON); + + RunTest(effective_test_name, request, &response); + if (response.result_case() == ConformanceResponse::kParseError) { + ReportSuccess(effective_test_name); + } else if (response.result_case() == ConformanceResponse::kSkipped) { + ReportSkip(effective_test_name, request, response); + } else { + ReportFailure(effective_test_name, request, response, + "Should have failed to parse, but didn't."); + } +} + +void ConformanceTestSuite::ExpectSerializeFailureForJson( + const string& test_name, const string& text_format) { + TestAllTypes payload_message; + GOOGLE_CHECK( + TextFormat::ParseFromString(text_format, &payload_message)) + << "Failed to parse: " << text_format; + + ConformanceRequest request; + ConformanceResponse response; + request.set_protobuf_payload(payload_message.SerializeAsString()); + string effective_test_name = test_name + ".JsonOutput"; + request.set_requested_output_format(conformance::JSON); + + RunTest(effective_test_name, request, &response); + if (response.result_case() == ConformanceResponse::kSerializeError) { + ReportSuccess(effective_test_name); + } else if (response.result_case() == ConformanceResponse::kSkipped) { + ReportSkip(effective_test_name, request, response); + } else { + ReportFailure(effective_test_name, request, response, + "Should have failed to serialize, but didn't."); + } +} + +void ConformanceTestSuite::TestPrematureEOFForType(FieldDescriptor::Type type) { + // Incomplete values for each wire type. + static const string incompletes[6] = { + string("\x80"), // VARINT + string("abcdefg"), // 64BIT + string("\x80"), // DELIMITED (partial length) + string(), // START_GROUP (no value required) + string(), // END_GROUP (no value required) + string("abc") // 32BIT + }; + + uint32_t fieldnum = GetFieldNumberForType(type, false); + uint32_t rep_fieldnum = GetFieldNumberForType(type, true); + WireFormatLite::WireType wire_type = WireFormatLite::WireTypeForFieldType( + static_cast(type)); + const string& incomplete = incompletes[wire_type]; + const string type_name = + UpperCase(string(".") + FieldDescriptor::TypeName(type)); + + ExpectParseFailureForProto( + tag(fieldnum, wire_type), + "PrematureEofBeforeKnownNonRepeatedValue" + type_name); + + ExpectParseFailureForProto( + tag(rep_fieldnum, wire_type), + "PrematureEofBeforeKnownRepeatedValue" + type_name); + + ExpectParseFailureForProto( + tag(UNKNOWN_FIELD, wire_type), + "PrematureEofBeforeUnknownValue" + type_name); + + ExpectParseFailureForProto( + cat( tag(fieldnum, wire_type), incomplete ), + "PrematureEofInsideKnownNonRepeatedValue" + type_name); + + ExpectParseFailureForProto( + cat( tag(rep_fieldnum, wire_type), incomplete ), + "PrematureEofInsideKnownRepeatedValue" + type_name); + + ExpectParseFailureForProto( + cat( tag(UNKNOWN_FIELD, wire_type), incomplete ), + "PrematureEofInsideUnknownValue" + type_name); + + if (wire_type == WireFormatLite::WIRETYPE_LENGTH_DELIMITED) { + ExpectParseFailureForProto( + cat( tag(fieldnum, wire_type), varint(1) ), + "PrematureEofInDelimitedDataForKnownNonRepeatedValue" + type_name); + + ExpectParseFailureForProto( + cat( tag(rep_fieldnum, wire_type), varint(1) ), + "PrematureEofInDelimitedDataForKnownRepeatedValue" + type_name); + + // EOF in the middle of delimited data for unknown value. + ExpectParseFailureForProto( + cat( tag(UNKNOWN_FIELD, wire_type), varint(1) ), + "PrematureEofInDelimitedDataForUnknownValue" + type_name); + + if (type == FieldDescriptor::TYPE_MESSAGE) { + // Submessage ends in the middle of a value. + string incomplete_submsg = + cat( tag(WireFormatLite::TYPE_INT32, WireFormatLite::WIRETYPE_VARINT), + incompletes[WireFormatLite::WIRETYPE_VARINT] ); + ExpectHardParseFailureForProto( + cat( tag(fieldnum, WireFormatLite::WIRETYPE_LENGTH_DELIMITED), + varint(incomplete_submsg.size()), + incomplete_submsg ), + "PrematureEofInSubmessageValue" + type_name); + } + } else if (type != FieldDescriptor::TYPE_GROUP) { + // Non-delimited, non-group: eligible for packing. + + // Packed region ends in the middle of a value. + ExpectHardParseFailureForProto( + cat( tag(rep_fieldnum, WireFormatLite::WIRETYPE_LENGTH_DELIMITED), + varint(incomplete.size()), + incomplete ), + "PrematureEofInPackedFieldValue" + type_name); + + // EOF in the middle of packed region. + ExpectParseFailureForProto( + cat( tag(rep_fieldnum, WireFormatLite::WIRETYPE_LENGTH_DELIMITED), + varint(1) ), + "PrematureEofInPackedField" + type_name); + } +} + +void ConformanceTestSuite::SetFailureList(const vector& failure_list) { + expected_to_fail_.clear(); + std::copy(failure_list.begin(), failure_list.end(), + std::inserter(expected_to_fail_, expected_to_fail_.end())); +} + +bool ConformanceTestSuite::CheckSetEmpty(const set& set_to_check, + const char* msg) { + if (set_to_check.empty()) { + return true; + } else { + StringAppendF(&output_, "\n"); + StringAppendF(&output_, "%s:\n", msg); + for (set::const_iterator iter = set_to_check.begin(); + iter != set_to_check.end(); ++iter) { + StringAppendF(&output_, " %s\n", iter->c_str()); + } + StringAppendF(&output_, "\n"); + return false; + } +} + +bool ConformanceTestSuite::RunSuite(ConformanceTestRunner* runner, + std::string* output) { + runner_ = runner; + successes_ = 0; + expected_failures_ = 0; + skipped_.clear(); + test_names_.clear(); + unexpected_failing_tests_.clear(); + unexpected_succeeding_tests_.clear(); + type_resolver_.reset(NewTypeResolverForDescriptorPool( + kTypeUrlPrefix, DescriptorPool::generated_pool())); + type_url_ = GetTypeUrl(TestAllTypes::descriptor()); + + output_ = "\nCONFORMANCE TEST BEGIN ====================================\n\n"; + + for (int i = 1; i <= FieldDescriptor::MAX_TYPE; i++) { + if (i == FieldDescriptor::TYPE_GROUP) continue; + TestPrematureEOFForType(static_cast(i)); + } + + RunValidJsonTest("HelloWorld", "{\"optionalString\":\"Hello, World!\"}", + "optional_string: 'Hello, World!'"); + + // Test field name conventions. + RunValidJsonTest( + "FieldNameInSnakeCase", + R"({ + "fieldname1": 1, + "fieldName2": 2, + "FieldName3": 3 + })", + R"( + fieldname1: 1 + field_name2: 2 + _field_name3: 3 + )"); + RunValidJsonTest( + "FieldNameWithNumbers", + R"({ + "field0name5": 5, + "field0Name6": 6 + })", + R"( + field0name5: 5 + field_0_name6: 6 + )"); + RunValidJsonTest( + "FieldNameWithMixedCases", + R"({ + "fieldName7": 7, + "fieldName8": 8, + "fieldName9": 9, + "fieldName10": 10, + "fIELDNAME11": 11, + "fIELDName12": 12 + })", + R"( + fieldName7: 7 + FieldName8: 8 + field_Name9: 9 + Field_Name10: 10 + FIELD_NAME11: 11 + FIELD_name12: 12 + )"); + // Using the original proto field name in JSON is also allowed. + RunValidJsonTest( + "OriginalProtoFieldName", + R"({ + "fieldname1": 1, + "field_name2": 2, + "_field_name3": 3, + "field0name5": 5, + "field_0_name6": 6, + "fieldName7": 7, + "FieldName8": 8, + "field_Name9": 9, + "Field_Name10": 10, + "FIELD_NAME11": 11, + "FIELD_name12": 12 + })", + R"( + fieldname1: 1 + field_name2: 2 + _field_name3: 3 + field0name5: 5 + field_0_name6: 6 + fieldName7: 7 + FieldName8: 8 + field_Name9: 9 + Field_Name10: 10 + FIELD_NAME11: 11 + FIELD_name12: 12 + )"); + // Field names can be escaped. + RunValidJsonTest( + "FieldNameEscaped", + R"({"fieldn\u0061me1": 1})", + "fieldname1: 1"); + // Field names must be quoted (or it's not valid JSON). + ExpectParseFailureForJson( + "FieldNameNotQuoted", + "{fieldname1: 1}"); + // Trailing comma is not allowed (not valid JSON). + ExpectParseFailureForJson( + "TrailingCommaInAnObject", + R"({"fieldname1":1,})"); + // JSON doesn't support comments. + ExpectParseFailureForJson( + "JsonWithComments", + R"({ + // This is a comment. + "fieldname1": 1 + })"); + // Duplicated field names are not allowed. + ExpectParseFailureForJson( + "FieldNameDuplicate", + R"({ + "optionalNestedMessage": {a: 1}, + "optionalNestedMessage": {} + })"); + ExpectParseFailureForJson( + "FieldNameDuplicateDifferentCasing1", + R"({ + "optional_nested_message": {a: 1}, + "optionalNestedMessage": {} + })"); + ExpectParseFailureForJson( + "FieldNameDuplicateDifferentCasing2", + R"({ + "optionalNestedMessage": {a: 1}, + "optional_nested_message": {} + })"); + // Serializers should use lowerCamelCase by default. + RunValidJsonTestWithValidator( + "FieldNameInLowerCamelCase", + R"({ + "fieldname1": 1, + "fieldName2": 2, + "FieldName3": 3 + })", + [](const Json::Value& value) { + return value.isMember("fieldname1") && + value.isMember("fieldName2") && + value.isMember("FieldName3"); + }); + RunValidJsonTestWithValidator( + "FieldNameWithNumbers", + R"({ + "field0name5": 5, + "field0Name6": 6 + })", + [](const Json::Value& value) { + return value.isMember("field0name5") && + value.isMember("field0Name6"); + }); + RunValidJsonTestWithValidator( + "FieldNameWithMixedCases", + R"({ + "fieldName7": 7, + "fieldName8": 8, + "fieldName9": 9, + "fieldName10": 10, + "fIELDNAME11": 11, + "fIELDName12": 12 + })", + [](const Json::Value& value) { + return value.isMember("fieldName7") && + value.isMember("fieldName8") && + value.isMember("fieldName9") && + value.isMember("fieldName10") && + value.isMember("fIELDNAME11") && + value.isMember("fIELDName12"); + }); + + // Integer fields. + RunValidJsonTest( + "Int32FieldMaxValue", + R"({"optionalInt32": 2147483647})", + "optional_int32: 2147483647"); + RunValidJsonTest( + "Int32FieldMinValue", + R"({"optionalInt32": -2147483648})", + "optional_int32: -2147483648"); + RunValidJsonTest( + "Uint32FieldMaxValue", + R"({"optionalUint32": 4294967295})", + "optional_uint32: 4294967295"); + RunValidJsonTest( + "Int64FieldMaxValue", + R"({"optionalInt64": "9223372036854775807"})", + "optional_int64: 9223372036854775807"); + RunValidJsonTest( + "Int64FieldMinValue", + R"({"optionalInt64": "-9223372036854775808"})", + "optional_int64: -9223372036854775808"); + RunValidJsonTest( + "Uint64FieldMaxValue", + R"({"optionalUint64": "18446744073709551615"})", + "optional_uint64: 18446744073709551615"); + RunValidJsonTest( + "Int64FieldMaxValueNotQuoted", + R"({"optionalInt64": 9223372036854775807})", + "optional_int64: 9223372036854775807"); + RunValidJsonTest( + "Int64FieldMinValueNotQuoted", + R"({"optionalInt64": -9223372036854775808})", + "optional_int64: -9223372036854775808"); + RunValidJsonTest( + "Uint64FieldMaxValueNotQuoted", + R"({"optionalUint64": 18446744073709551615})", + "optional_uint64: 18446744073709551615"); + // Values can be represented as JSON strings. + RunValidJsonTest( + "Int32FieldStringValue", + R"({"optionalInt32": "2147483647"})", + "optional_int32: 2147483647"); + RunValidJsonTest( + "Int32FieldStringValueEscaped", + R"({"optionalInt32": "2\u003147483647"})", + "optional_int32: 2147483647"); + + // Parsers reject out-of-bound integer values. + ExpectParseFailureForJson( + "Int32FieldTooLarge", + R"({"optionalInt32": 2147483648})"); + ExpectParseFailureForJson( + "Int32FieldTooSmall", + R"({"optionalInt32": -2147483649})"); + ExpectParseFailureForJson( + "Uint32FieldTooLarge", + R"({"optionalUint32": 4294967296})"); + ExpectParseFailureForJson( + "Int64FieldTooLarge", + R"({"optionalInt64": "9223372036854775808"})"); + ExpectParseFailureForJson( + "Int64FieldTooSmall", + R"({"optionalInt64": "-9223372036854775809"})"); + ExpectParseFailureForJson( + "Uint64FieldTooLarge", + R"({"optionalUint64": "18446744073709551616"})"); + // Parser reject non-integer numeric values as well. + ExpectParseFailureForJson( + "Int32FieldNotInteger", + R"({"optionalInt32": 0.5})"); + ExpectParseFailureForJson( + "Uint32FieldNotInteger", + R"({"optionalUint32": 0.5})"); + ExpectParseFailureForJson( + "Int64FieldNotInteger", + R"({"optionalInt64": "0.5"})"); + ExpectParseFailureForJson( + "Uint64FieldNotInteger", + R"({"optionalUint64": "0.5"})"); + + // Integers but represented as float values are accepted. + RunValidJsonTest( + "Int32FieldFloatTrailingZero", + R"({"optionalInt32": 100000.000})", + "optional_int32: 100000"); + RunValidJsonTest( + "Int32FieldExponentialFormat", + R"({"optionalInt32": 1e5})", + "optional_int32: 100000"); + RunValidJsonTest( + "Int32FieldMaxFloatValue", + R"({"optionalInt32": 2.147483647e9})", + "optional_int32: 2147483647"); + RunValidJsonTest( + "Int32FieldMinFloatValue", + R"({"optionalInt32": -2.147483648e9})", + "optional_int32: -2147483648"); + RunValidJsonTest( + "Uint32FieldMaxFloatValue", + R"({"optionalUint32": 4.294967295e9})", + "optional_uint32: 4294967295"); + + // Parser reject non-numeric values. + ExpectParseFailureForJson( + "Int32FieldNotNumber", + R"({"optionalInt32": "3x3"})"); + ExpectParseFailureForJson( + "Uint32FieldNotNumber", + R"({"optionalUint32": "3x3"})"); + ExpectParseFailureForJson( + "Int64FieldNotNumber", + R"({"optionalInt64": "3x3"})"); + ExpectParseFailureForJson( + "Uint64FieldNotNumber", + R"({"optionalUint64": "3x3"})"); + // JSON does not allow "+" on numric values. + ExpectParseFailureForJson( + "Int32FieldPlusSign", + R"({"optionalInt32": +1})"); + // JSON doesn't allow leading 0s. + ExpectParseFailureForJson( + "Int32FieldLeadingZero", + R"({"optionalInt32": 01})"); + ExpectParseFailureForJson( + "Int32FieldNegativeWithLeadingZero", + R"({"optionalInt32": -01})"); + // String values must follow the same syntax rule. Specifically leading + // or traling spaces are not allowed. + ExpectParseFailureForJson( + "Int32FieldLeadingSpace", + R"({"optionalInt32": " 1"})"); + ExpectParseFailureForJson( + "Int32FieldTrailingSpace", + R"({"optionalInt32": "1 "})"); + + // 64-bit values are serialized as strings. + RunValidJsonTestWithValidator( + "Int64FieldBeString", + R"({"optionalInt64": 1})", + [](const Json::Value& value) { + return value["optionalInt64"].type() == Json::stringValue && + value["optionalInt64"].asString() == "1"; + }); + RunValidJsonTestWithValidator( + "Uint64FieldBeString", + R"({"optionalUint64": 1})", + [](const Json::Value& value) { + return value["optionalUint64"].type() == Json::stringValue && + value["optionalUint64"].asString() == "1"; + }); + + // Bool fields. + RunValidJsonTest( + "BoolFieldTrue", + R"({"optionalBool":true})", + "optional_bool: true"); + RunValidJsonTest( + "BoolFieldFalse", + R"({"optionalBool":false})", + "optional_bool: false"); + + // Other forms are not allowed. + ExpectParseFailureForJson( + "BoolFieldIntegerZero", + R"({"optionalBool":0})"); + ExpectParseFailureForJson( + "BoolFieldIntegerOne", + R"({"optionalBool":1})"); + ExpectParseFailureForJson( + "BoolFieldCamelCaseTrue", + R"({"optionalBool":True})"); + ExpectParseFailureForJson( + "BoolFieldCamelCaseFalse", + R"({"optionalBool":False})"); + ExpectParseFailureForJson( + "BoolFieldAllCapitalTrue", + R"({"optionalBool":TRUE})"); + ExpectParseFailureForJson( + "BoolFieldAllCapitalFalse", + R"({"optionalBool":FALSE})"); + ExpectParseFailureForJson( + "BoolFieldDoubleQuotedTrue", + R"({"optionalBool":"true"})"); + ExpectParseFailureForJson( + "BoolFieldDoubleQuotedFalse", + R"({"optionalBool":"false"})"); + + // Float fields. + RunValidJsonTest( + "FloatFieldMinPositiveValue", + R"({"optionalFloat": 1.175494e-38})", + "optional_float: 1.175494e-38"); + RunValidJsonTest( + "FloatFieldMaxNegativeValue", + R"({"optionalFloat": -1.175494e-38})", + "optional_float: -1.175494e-38"); + RunValidJsonTest( + "FloatFieldMaxPositiveValue", + R"({"optionalFloat": 3.402823e+38})", + "optional_float: 3.402823e+38"); + RunValidJsonTest( + "FloatFieldMinNegativeValue", + R"({"optionalFloat": 3.402823e+38})", + "optional_float: 3.402823e+38"); + // Values can be quoted. + RunValidJsonTest( + "FloatFieldQuotedValue", + R"({"optionalFloat": "1"})", + "optional_float: 1"); + // Special values. + RunValidJsonTest( + "FloatFieldNan", + R"({"optionalFloat": "NaN"})", + "optional_float: nan"); + RunValidJsonTest( + "FloatFieldInfinity", + R"({"optionalFloat": "Infinity"})", + "optional_float: inf"); + RunValidJsonTest( + "FloatFieldNegativeInfinity", + R"({"optionalFloat": "-Infinity"})", + "optional_float: -inf"); + // Non-cannonical Nan will be correctly normalized. + { + TestAllTypes message; + // IEEE floating-point standard 32-bit quiet NaN: + // 0111 1111 1xxx xxxx xxxx xxxx xxxx xxxx + message.set_optional_float( + WireFormatLite::DecodeFloat(0x7FA12345)); + RunValidJsonTestWithProtobufInput( + "FloatFieldNormalizeQuietNan", message, + "optional_float: nan"); + // IEEE floating-point standard 64-bit signaling NaN: + // 1111 1111 1xxx xxxx xxxx xxxx xxxx xxxx + message.set_optional_float( + WireFormatLite::DecodeFloat(0xFFB54321)); + RunValidJsonTestWithProtobufInput( + "FloatFieldNormalizeSignalingNan", message, + "optional_float: nan"); + } + + // Special values must be quoted. + ExpectParseFailureForJson( + "FloatFieldNanNotQuoted", + R"({"optionalFloat": NaN})"); + ExpectParseFailureForJson( + "FloatFieldInfinityNotQuoted", + R"({"optionalFloat": Infinity})"); + ExpectParseFailureForJson( + "FloatFieldNegativeInfinityNotQuoted", + R"({"optionalFloat": -Infinity})"); + // Parsers should reject out-of-bound values. + ExpectParseFailureForJson( + "FloatFieldTooSmall", + R"({"optionalFloat": -3.502823e+38})"); + ExpectParseFailureForJson( + "FloatFieldTooLarge", + R"({"optionalFloat": 3.502823e+38})"); + + // Double fields. + RunValidJsonTest( + "DoubleFieldMinPositiveValue", + R"({"optionalDouble": 2.22507e-308})", + "optional_double: 2.22507e-308"); + RunValidJsonTest( + "DoubleFieldMaxNegativeValue", + R"({"optionalDouble": -2.22507e-308})", + "optional_double: -2.22507e-308"); + RunValidJsonTest( + "DoubleFieldMaxPositiveValue", + R"({"optionalDouble": 1.79769e+308})", + "optional_double: 1.79769e+308"); + RunValidJsonTest( + "DoubleFieldMinNegativeValue", + R"({"optionalDouble": -1.79769e+308})", + "optional_double: -1.79769e+308"); + // Values can be quoted. + RunValidJsonTest( + "DoubleFieldQuotedValue", + R"({"optionalDouble": "1"})", + "optional_double: 1"); + // Speical values. + RunValidJsonTest( + "DoubleFieldNan", + R"({"optionalDouble": "NaN"})", + "optional_double: nan"); + RunValidJsonTest( + "DoubleFieldInfinity", + R"({"optionalDouble": "Infinity"})", + "optional_double: inf"); + RunValidJsonTest( + "DoubleFieldNegativeInfinity", + R"({"optionalDouble": "-Infinity"})", + "optional_double: -inf"); + // Non-cannonical Nan will be correctly normalized. + { + TestAllTypes message; + message.set_optional_double( + WireFormatLite::DecodeDouble(0x7FFA123456789ABCLL)); + RunValidJsonTestWithProtobufInput( + "DoubleFieldNormalizeQuietNan", message, + "optional_double: nan"); + message.set_optional_double( + WireFormatLite::DecodeDouble(0xFFFBCBA987654321LL)); + RunValidJsonTestWithProtobufInput( + "DoubleFieldNormalizeSignalingNan", message, + "optional_double: nan"); + } + + // Special values must be quoted. + ExpectParseFailureForJson( + "DoubleFieldNanNotQuoted", + R"({"optionalDouble": NaN})"); + ExpectParseFailureForJson( + "DoubleFieldInfinityNotQuoted", + R"({"optionalDouble": Infinity})"); + ExpectParseFailureForJson( + "DoubleFieldNegativeInfinityNotQuoted", + R"({"optionalDouble": -Infinity})"); + + // Parsers should reject out-of-bound values. + ExpectParseFailureForJson( + "DoubleFieldTooSmall", + R"({"optionalDouble": -1.89769e+308})"); + ExpectParseFailureForJson( + "DoubleFieldTooLarge", + R"({"optionalDouble": +1.89769e+308})"); + + // Enum fields. + RunValidJsonTest( + "EnumField", + R"({"optionalNestedEnum": "FOO"})", + "optional_nested_enum: FOO"); + // Enum values must be represented as strings. + ExpectParseFailureForJson( + "EnumFieldNotQuoted", + R"({"optionalNestedEnum": FOO})"); + // Numeric values are allowed. + RunValidJsonTest( + "EnumFieldNumericValueZero", + R"({"optionalNestedEnum": 0})", + "optional_nested_enum: FOO"); + RunValidJsonTest( + "EnumFieldNumericValueNonZero", + R"({"optionalNestedEnum": 1})", + "optional_nested_enum: BAR"); + // Unknown enum values are represented as numeric values. + RunValidJsonTestWithValidator( + "EnumFieldUnknownValue", + R"({"optionalNestedEnum": 123})", + [](const Json::Value& value) { + return value["optionalNestedEnum"].type() == Json::intValue && + value["optionalNestedEnum"].asInt() == 123; + }); + + // String fields. + RunValidJsonTest( + "StringField", + R"({"optionalString": "Hello world!"})", + "optional_string: \"Hello world!\""); + RunValidJsonTest( + "StringFieldUnicode", + // Google in Chinese. + R"({"optionalString": "è°·æ­Œ"})", + R"(optional_string: "è°·æ­Œ")"); + RunValidJsonTest( + "StringFieldEscape", + R"({"optionalString": "\"\\\/\b\f\n\r\t"})", + R"(optional_string: "\"\\/\b\f\n\r\t")"); + RunValidJsonTest( + "StringFieldUnicodeEscape", + R"({"optionalString": "\u8C37\u6B4C"})", + R"(optional_string: "è°·æ­Œ")"); + RunValidJsonTest( + "StringFieldUnicodeEscapeWithLowercaseHexLetters", + R"({"optionalString": "\u8c37\u6b4c"})", + R"(optional_string: "è°·æ­Œ")"); + RunValidJsonTest( + "StringFieldSurrogatePair", + // The character is an emoji: grinning face with smiling eyes. 😠+ R"({"optionalString": "\uD83D\uDE01"})", + R"(optional_string: "\xF0\x9F\x98\x81")"); + + // Unicode escapes must start with "\u" (lowercase u). + ExpectParseFailureForJson( + "StringFieldUppercaseEscapeLetter", + R"({"optionalString": "\U8C37\U6b4C"})"); + ExpectParseFailureForJson( + "StringFieldInvalidEscape", + R"({"optionalString": "\uXXXX\u6B4C"})"); + ExpectParseFailureForJson( + "StringFieldUnterminatedEscape", + R"({"optionalString": "\u8C3"})"); + ExpectParseFailureForJson( + "StringFieldUnpairedHighSurrogate", + R"({"optionalString": "\uD800"})"); + ExpectParseFailureForJson( + "StringFieldUnpairedLowSurrogate", + R"({"optionalString": "\uDC00"})"); + ExpectParseFailureForJson( + "StringFieldSurrogateInWrongOrder", + R"({"optionalString": "\uDE01\uD83D"})"); + ExpectParseFailureForJson( + "StringFieldNotAString", + R"({"optionalString": 12345})"); + + // Bytes fields. + RunValidJsonTest( + "BytesField", + R"({"optionalBytes": "AQI="})", + R"(optional_bytes: "\x01\x02")"); + ExpectParseFailureForJson( + "BytesFieldNoPadding", + R"({"optionalBytes": "AQI"})"); + ExpectParseFailureForJson( + "BytesFieldInvalidBase64Characters", + R"({"optionalBytes": "-_=="})"); + + // Message fields. + RunValidJsonTest( + "MessageField", + R"({"optionalNestedMessage": {"a": 1234}})", + "optional_nested_message: {a: 1234}"); + + // Oneof fields. + ExpectParseFailureForJson( + "OneofFieldDuplicate", + R"({"oneofUint32": 1, "oneofString": "test"})"); + + // Repeated fields. + RunValidJsonTest( + "PrimitiveRepeatedField", + R"({"repeatedInt32": [1, 2, 3, 4]})", + "repeated_int32: [1, 2, 3, 4]"); + RunValidJsonTest( + "EnumRepeatedField", + R"({"repeatedNestedEnum": ["FOO", "BAR", "BAZ"]})", + "repeated_nested_enum: [FOO, BAR, BAZ]"); + RunValidJsonTest( + "StringRepeatedField", + R"({"repeatedString": ["Hello", "world"]})", + R"(repeated_string: ["Hello", "world"])"); + RunValidJsonTest( + "BytesRepeatedField", + R"({"repeatedBytes": ["AAEC", "AQI="]})", + R"(repeated_bytes: ["\x00\x01\x02", "\x01\x02"])"); + RunValidJsonTest( + "MessageRepeatedField", + R"({"repeatedNestedMessage": [{"a": 1234}, {"a": 5678}]})", + "repeated_nested_message: {a: 1234}" + "repeated_nested_message: {a: 5678}"); + + // Repeated field elements are of incorrect type. + ExpectParseFailureForJson( + "RepeatedFieldWrongElementTypeExpectingIntegersGotBool", + R"({"repeatedInt32": [1, false, 3, 4]})"); + ExpectParseFailureForJson( + "RepeatedFieldWrongElementTypeExpectingIntegersGotString", + R"({"repeatedInt32": [1, 2, "name", 4]})"); + ExpectParseFailureForJson( + "RepeatedFieldWrongElementTypeExpectingIntegersGotMessage", + R"({"repeatedInt32": [1, 2, 3, {"a": 4}]})"); + ExpectParseFailureForJson( + "RepeatedFieldWrongElementTypeExpectingStringsGotInt", + R"({"repeatedString": ["1", 2, "3", "4"]})"); + ExpectParseFailureForJson( + "RepeatedFieldWrongElementTypeExpectingStringsGotBool", + R"({"repeatedString": ["1", "2", false, "4"]})"); + ExpectParseFailureForJson( + "RepeatedFieldWrongElementTypeExpectingStringsGotMessage", + R"({"repeatedString": ["1", 2, "3", {"a": 4}]})"); + ExpectParseFailureForJson( + "RepeatedFieldWrongElementTypeExpectingMessagesGotInt", + R"({"repeatedNestedMessage": [{"a": 1}, 2]})"); + ExpectParseFailureForJson( + "RepeatedFieldWrongElementTypeExpectingMessagesGotBool", + R"({"repeatedNestedMessage": [{"a": 1}, false]})"); + ExpectParseFailureForJson( + "RepeatedFieldWrongElementTypeExpectingMessagesGotString", + R"({"repeatedNestedMessage": [{"a": 1}, "2"]})"); + // Trailing comma in the repeated field is not allowed. + ExpectParseFailureForJson( + "RepeatedFieldTrailingComma", + R"({"repeatedInt32": [1, 2, 3, 4,]})"); + + // Map fields. + RunValidJsonTest( + "Int32MapField", + R"({"mapInt32Int32": {"1": 2, "3": 4}})", + "map_int32_int32: {key: 1 value: 2}" + "map_int32_int32: {key: 3 value: 4}"); + ExpectParseFailureForJson( + "Int32MapFieldKeyNotQuoted", + R"({"mapInt32Int32": {1: 2, 3: 4}})"); + RunValidJsonTest( + "Uint32MapField", + R"({"mapUint32Uint32": {"1": 2, "3": 4}})", + "map_uint32_uint32: {key: 1 value: 2}" + "map_uint32_uint32: {key: 3 value: 4}"); + ExpectParseFailureForJson( + "Uint32MapFieldKeyNotQuoted", + R"({"mapUint32Uint32": {1: 2, 3: 4}})"); + RunValidJsonTest( + "Int64MapField", + R"({"mapInt64Int64": {"1": 2, "3": 4}})", + "map_int64_int64: {key: 1 value: 2}" + "map_int64_int64: {key: 3 value: 4}"); + ExpectParseFailureForJson( + "Int64MapFieldKeyNotQuoted", + R"({"mapInt64Int64": {1: 2, 3: 4}})"); + RunValidJsonTest( + "Uint64MapField", + R"({"mapUint64Uint64": {"1": 2, "3": 4}})", + "map_uint64_uint64: {key: 1 value: 2}" + "map_uint64_uint64: {key: 3 value: 4}"); + ExpectParseFailureForJson( + "Uint64MapFieldKeyNotQuoted", + R"({"mapUint64Uint64": {1: 2, 3: 4}})"); + RunValidJsonTest( + "BoolMapField", + R"({"mapBoolBool": {"true": true, "false": false}})", + "map_bool_bool: {key: true value: true}" + "map_bool_bool: {key: false value: false}"); + ExpectParseFailureForJson( + "BoolMapFieldKeyNotQuoted", + R"({"mapBoolBool": {true: true, false: false}})"); + RunValidJsonTest( + "MessageMapField", + R"({ + "mapStringNestedMessage": { + "hello": {"a": 1234}, + "world": {"a": 5678} + } + })", + R"( + map_string_nested_message: { + key: "hello" + value: {a: 1234} + } + map_string_nested_message: { + key: "world" + value: {a: 5678} + } + )"); + // Since Map keys are represented as JSON strings, escaping should be allowed. + RunValidJsonTest( + "Int32MapEscapedKey", + R"({"mapInt32Int32": {"\u0031": 2}})", + "map_int32_int32: {key: 1 value: 2}"); + RunValidJsonTest( + "Int64MapEscapedKey", + R"({"mapInt64Int64": {"\u0031": 2}})", + "map_int64_int64: {key: 1 value: 2}"); + RunValidJsonTest( + "BoolMapEscapedKey", + R"({"mapBoolBool": {"tr\u0075e": true}})", + "map_bool_bool: {key: true value: true}"); + + // "null" is accepted for all fields types. + RunValidJsonTest( + "AllFieldAcceptNull", + R"({ + "optionalInt32": null, + "optionalInt64": null, + "optionalUint32": null, + "optionalUint64": null, + "optionalBool": null, + "optionalString": null, + "optionalBytes": null, + "optionalNestedEnum": null, + "optionalNestedMessage": null, + "repeatedInt32": null, + "repeatedInt64": null, + "repeatedUint32": null, + "repeatedUint64": null, + "repeatedBool": null, + "repeatedString": null, + "repeatedBytes": null, + "repeatedNestedEnum": null, + "repeatedNestedMessage": null, + "mapInt32Int32": null, + "mapBoolBool": null, + "mapStringNestedMessage": null + })", + ""); + + // Repeated field elements cannot be null. + ExpectParseFailureForJson( + "RepeatedFieldPrimitiveElementIsNull", + R"({"repeatedInt32": [1, null, 2]})"); + ExpectParseFailureForJson( + "RepeatedFieldMessageElementIsNull", + R"({"repeatedNestedMessage": [{"a":1}, null, {"a":2}]})"); + // Map field keys cannot be null. + ExpectParseFailureForJson( + "MapFieldKeyIsNull", + R"({"mapInt32Int32": {null: 1}})"); + // Map field values cannot be null. + ExpectParseFailureForJson( + "MapFieldValueIsNull", + R"({"mapInt32Int32": {"0": null}})"); + + // Wrapper types. + RunValidJsonTest( + "OptionalBoolWrapper", + R"({"optionalBoolWrapper": false})", + "optional_bool_wrapper: {value: false}"); + RunValidJsonTest( + "OptionalInt32Wrapper", + R"({"optionalInt32Wrapper": 0})", + "optional_int32_wrapper: {value: 0}"); + RunValidJsonTest( + "OptionalUint32Wrapper", + R"({"optionalUint32Wrapper": 0})", + "optional_uint32_wrapper: {value: 0}"); + RunValidJsonTest( + "OptionalInt64Wrapper", + R"({"optionalInt64Wrapper": 0})", + "optional_int64_wrapper: {value: 0}"); + RunValidJsonTest( + "OptionalUint64Wrapper", + R"({"optionalUint64Wrapper": 0})", + "optional_uint64_wrapper: {value: 0}"); + RunValidJsonTest( + "OptionalFloatWrapper", + R"({"optionalFloatWrapper": 0})", + "optional_float_wrapper: {value: 0}"); + RunValidJsonTest( + "OptionalDoubleWrapper", + R"({"optionalDoubleWrapper": 0})", + "optional_double_wrapper: {value: 0}"); + RunValidJsonTest( + "OptionalStringWrapper", + R"({"optionalStringWrapper": ""})", + R"(optional_string_wrapper: {value: ""})"); + RunValidJsonTest( + "OptionalBytesWrapper", + R"({"optionalBytesWrapper": ""})", + R"(optional_bytes_wrapper: {value: ""})"); + RunValidJsonTest( + "OptionalWrapperTypesWithNonDefaultValue", + R"({ + "optionalBoolWrapper": true, + "optionalInt32Wrapper": 1, + "optionalUint32Wrapper": 1, + "optionalInt64Wrapper": "1", + "optionalUint64Wrapper": "1", + "optionalFloatWrapper": 1, + "optionalDoubleWrapper": 1, + "optionalStringWrapper": "1", + "optionalBytesWrapper": "AQI=" + })", + R"( + optional_bool_wrapper: {value: true} + optional_int32_wrapper: {value: 1} + optional_uint32_wrapper: {value: 1} + optional_int64_wrapper: {value: 1} + optional_uint64_wrapper: {value: 1} + optional_float_wrapper: {value: 1} + optional_double_wrapper: {value: 1} + optional_string_wrapper: {value: "1"} + optional_bytes_wrapper: {value: "\x01\x02"} + )"); + RunValidJsonTest( + "RepeatedBoolWrapper", + R"({"repeatedBoolWrapper": [true, false]})", + "repeated_bool_wrapper: {value: true}" + "repeated_bool_wrapper: {value: false}"); + RunValidJsonTest( + "RepeatedInt32Wrapper", + R"({"repeatedInt32Wrapper": [0, 1]})", + "repeated_int32_wrapper: {value: 0}" + "repeated_int32_wrapper: {value: 1}"); + RunValidJsonTest( + "RepeatedUint32Wrapper", + R"({"repeatedUint32Wrapper": [0, 1]})", + "repeated_uint32_wrapper: {value: 0}" + "repeated_uint32_wrapper: {value: 1}"); + RunValidJsonTest( + "RepeatedInt64Wrapper", + R"({"repeatedInt64Wrapper": [0, 1]})", + "repeated_int64_wrapper: {value: 0}" + "repeated_int64_wrapper: {value: 1}"); + RunValidJsonTest( + "RepeatedUint64Wrapper", + R"({"repeatedUint64Wrapper": [0, 1]})", + "repeated_uint64_wrapper: {value: 0}" + "repeated_uint64_wrapper: {value: 1}"); + RunValidJsonTest( + "RepeatedFloatWrapper", + R"({"repeatedFloatWrapper": [0, 1]})", + "repeated_float_wrapper: {value: 0}" + "repeated_float_wrapper: {value: 1}"); + RunValidJsonTest( + "RepeatedDoubleWrapper", + R"({"repeatedDoubleWrapper": [0, 1]})", + "repeated_double_wrapper: {value: 0}" + "repeated_double_wrapper: {value: 1}"); + RunValidJsonTest( + "RepeatedStringWrapper", + R"({"repeatedStringWrapper": ["", "AQI="]})", + R"( + repeated_string_wrapper: {value: ""} + repeated_string_wrapper: {value: "AQI="} + )"); + RunValidJsonTest( + "RepeatedBytesWrapper", + R"({"repeatedBytesWrapper": ["", "AQI="]})", + R"( + repeated_bytes_wrapper: {value: ""} + repeated_bytes_wrapper: {value: "\x01\x02"} + )"); + RunValidJsonTest( + "WrapperTypesWithNullValue", + R"({ + "optionalBoolWrapper": null, + "optionalInt32Wrapper": null, + "optionalUint32Wrapper": null, + "optionalInt64Wrapper": null, + "optionalUint64Wrapper": null, + "optionalFloatWrapper": null, + "optionalDoubleWrapper": null, + "optionalStringWrapper": null, + "optionalBytesWrapper": null, + "repeatedBoolWrapper": null, + "repeatedInt32Wrapper": null, + "repeatedUint32Wrapper": null, + "repeatedInt64Wrapper": null, + "repeatedUint64Wrapper": null, + "repeatedFloatWrapper": null, + "repeatedDoubleWrapper": null, + "repeatedStringWrapper": null, + "repeatedBytesWrapper": null + })", + ""); + + // Duration + RunValidJsonTest( + "DurationMinValue", + R"({"optionalDuration": "-315576000000.999999999s"})", + "optional_duration: {seconds: -315576000000 nanos: -999999999}"); + RunValidJsonTest( + "DurationMaxValue", + R"({"optionalDuration": "315576000000.999999999s"})", + "optional_duration: {seconds: 315576000000 nanos: 999999999}"); + RunValidJsonTest( + "DurationRepeatedValue", + R"({"repeatedDuration": ["1.5s", "-1.5s"]})", + "repeated_duration: {seconds: 1 nanos: 500000000}" + "repeated_duration: {seconds: -1 nanos: -500000000}"); + + ExpectParseFailureForJson( + "DurationMissingS", + R"({"optionalDuration": "1"})"); + ExpectParseFailureForJson( + "DurationJsonInputTooSmall", + R"({"optionalDuration": "-315576000001.000000000s"})"); + ExpectParseFailureForJson( + "DurationJsonInputTooLarge", + R"({"optionalDuration": "315576000001.000000000s"})"); + ExpectSerializeFailureForJson( + "DurationProtoInputTooSmall", + "optional_duration: {seconds: -315576000001 nanos: 0}"); + ExpectSerializeFailureForJson( + "DurationProtoInputTooLarge", + "optional_duration: {seconds: 315576000001 nanos: 0}"); + + RunValidJsonTestWithValidator( + "DurationHasZeroFractionalDigit", + R"({"optionalDuration": "1.000000000s"})", + [](const Json::Value& value) { + return value["optionalDuration"].asString() == "1s"; + }); + RunValidJsonTestWithValidator( + "DurationHas3FractionalDigits", + R"({"optionalDuration": "1.010000000s"})", + [](const Json::Value& value) { + return value["optionalDuration"].asString() == "1.010s"; + }); + RunValidJsonTestWithValidator( + "DurationHas6FractionalDigits", + R"({"optionalDuration": "1.000010000s"})", + [](const Json::Value& value) { + return value["optionalDuration"].asString() == "1.000010s"; + }); + RunValidJsonTestWithValidator( + "DurationHas9FractionalDigits", + R"({"optionalDuration": "1.000000010s"})", + [](const Json::Value& value) { + return value["optionalDuration"].asString() == "1.000000010s"; + }); + + // Timestamp + RunValidJsonTest( + "TimestampMinValue", + R"({"optionalTimestamp": "0001-01-01T00:00:00Z"})", + "optional_timestamp: {seconds: -62135596800}"); + RunValidJsonTest( + "TimestampMaxValue", + R"({"optionalTimestamp": "9999-12-31T23:59:59.999999999Z"})", + "optional_timestamp: {seconds: 253402300799 nanos: 999999999}"); + RunValidJsonTest( + "TimestampRepeatedValue", + R"({ + "repeatedTimestamp": [ + "0001-01-01T00:00:00Z", + "9999-12-31T23:59:59.999999999Z" + ] + })", + "repeated_timestamp: {seconds: -62135596800}" + "repeated_timestamp: {seconds: 253402300799 nanos: 999999999}"); + RunValidJsonTest( + "TimestampWithPositiveOffset", + R"({"optionalTimestamp": "1970-01-01T08:00:00+08:00"})", + "optional_timestamp: {seconds: 0}"); + RunValidJsonTest( + "TimestampWithNegativeOffset", + R"({"optionalTimestamp": "1969-12-31T16:00:00-08:00"})", + "optional_timestamp: {seconds: 0}"); + + ExpectParseFailureForJson( + "TimestampJsonInputTooSmall", + R"({"optionalTimestamp": "0000-01-01T00:00:00Z"})"); + ExpectParseFailureForJson( + "TimestampJsonInputTooLarge", + R"({"optionalTimestamp": "10000-01-01T00:00:00Z"})"); + ExpectParseFailureForJson( + "TimestampJsonInputMissingZ", + R"({"optionalTimestamp": "0001-01-01T00:00:00"})"); + ExpectParseFailureForJson( + "TimestampJsonInputMissingT", + R"({"optionalTimestamp": "0001-01-01 00:00:00Z"})"); + ExpectParseFailureForJson( + "TimestampJsonInputLowercaseZ", + R"({"optionalTimestamp": "0001-01-01T00:00:00z"})"); + ExpectParseFailureForJson( + "TimestampJsonInputLowercaseT", + R"({"optionalTimestamp": "0001-01-01t00:00:00Z"})"); + ExpectSerializeFailureForJson( + "TimestampProtoInputTooSmall", + "optional_timestamp: {seconds: -62135596801}"); + ExpectSerializeFailureForJson( + "TimestampProtoInputTooLarge", + "optional_timestamp: {seconds: 253402300800}"); + RunValidJsonTestWithValidator( + "TimestampZeroNormalized", + R"({"optionalTimestamp": "1969-12-31T16:00:00-08:00"})", + [](const Json::Value& value) { + return value["optionalTimestamp"].asString() == + "1970-01-01T00:00:00Z"; + }); + RunValidJsonTestWithValidator( + "TimestampHasZeroFractionalDigit", + R"({"optionalTimestamp": "1970-01-01T00:00:00.000000000Z"})", + [](const Json::Value& value) { + return value["optionalTimestamp"].asString() == + "1970-01-01T00:00:00Z"; + }); + RunValidJsonTestWithValidator( + "TimestampHas3FractionalDigits", + R"({"optionalTimestamp": "1970-01-01T00:00:00.010000000Z"})", + [](const Json::Value& value) { + return value["optionalTimestamp"].asString() == + "1970-01-01T00:00:00.010Z"; + }); + RunValidJsonTestWithValidator( + "TimestampHas6FractionalDigits", + R"({"optionalTimestamp": "1970-01-01T00:00:00.000010000Z"})", + [](const Json::Value& value) { + return value["optionalTimestamp"].asString() == + "1970-01-01T00:00:00.000010Z"; + }); + RunValidJsonTestWithValidator( + "TimestampHas9FractionalDigits", + R"({"optionalTimestamp": "1970-01-01T00:00:00.000000010Z"})", + [](const Json::Value& value) { + return value["optionalTimestamp"].asString() == + "1970-01-01T00:00:00.000000010Z"; + }); + + // FieldMask + RunValidJsonTest( + "FieldMask", + R"({"optionalFieldMask": "foo,barBaz"})", + R"(optional_field_mask: {paths: "foo" paths: "bar_baz"})"); + ExpectParseFailureForJson( + "FieldMaskInvalidCharacter", + R"({"optionalFieldMask": "foo,bar_bar"})"); + ExpectSerializeFailureForJson( + "FieldMaskPathsDontRoundTrip", + R"(optional_field_mask: {paths: "fooBar"})"); + ExpectSerializeFailureForJson( + "FieldMaskNumbersDontRoundTrip", + R"(optional_field_mask: {paths: "foo_3_bar"})"); + ExpectSerializeFailureForJson( + "FieldMaskTooManyUnderscore", + R"(optional_field_mask: {paths: "foo__bar"})"); + + // Struct + RunValidJsonTest( + "Struct", + R"({ + "optionalStruct": { + "nullValue": null, + "intValue": 1234, + "boolValue": true, + "doubleValue": 1234.5678, + "stringValue": "Hello world!", + "listValue": [1234, "5678"], + "objectValue": { + "value": 0 + } + } + })", + R"( + optional_struct: { + fields: { + key: "nullValue" + value: {null_value: NULL_VALUE} + } + fields: { + key: "intValue" + value: {number_value: 1234} + } + fields: { + key: "boolValue" + value: {bool_value: true} + } + fields: { + key: "doubleValue" + value: {number_value: 1234.5678} + } + fields: { + key: "stringValue" + value: {string_value: "Hello world!"} + } + fields: { + key: "listValue" + value: { + list_value: { + values: { + number_value: 1234 + } + values: { + string_value: "5678" + } + } + } + } + fields: { + key: "objectValue" + value: { + struct_value: { + fields: { + key: "value" + value: { + number_value: 0 + } + } + } + } + } + } + )"); + // Value + RunValidJsonTest( + "ValueAcceptInteger", + R"({"optionalValue": 1})", + "optional_value: { number_value: 1}"); + RunValidJsonTest( + "ValueAcceptFloat", + R"({"optionalValue": 1.5})", + "optional_value: { number_value: 1.5}"); + RunValidJsonTest( + "ValueAcceptBool", + R"({"optionalValue": false})", + "optional_value: { bool_value: false}"); + RunValidJsonTest( + "ValueAcceptNull", + R"({"optionalValue": null})", + "optional_value: { null_value: NULL_VALUE}"); + RunValidJsonTest( + "ValueAcceptString", + R"({"optionalValue": "hello"})", + R"(optional_value: { string_value: "hello"})"); + RunValidJsonTest( + "ValueAcceptList", + R"({"optionalValue": [0, "hello"]})", + R"( + optional_value: { + list_value: { + values: { + number_value: 0 + } + values: { + string_value: "hello" + } + } + } + )"); + RunValidJsonTest( + "ValueAcceptObject", + R"({"optionalValue": {"value": 1}})", + R"( + optional_value: { + struct_value: { + fields: { + key: "value" + value: { + number_value: 1 + } + } + } + } + )"); + + // Any + RunValidJsonTest( + "Any", + R"({ + "optionalAny": { + "@type": "type.googleapis.com/conformance.TestAllTypes", + "optionalInt32": 12345 + } + })", + R"( + optional_any: { + [type.googleapis.com/conformance.TestAllTypes] { + optional_int32: 12345 + } + } + )"); + RunValidJsonTest( + "AnyNested", + R"({ + "optionalAny": { + "@type": "type.googleapis.com/google.protobuf.Any", + "value": { + "@type": "type.googleapis.com/conformance.TestAllTypes", + "optionalInt32": 12345 + } + } + })", + R"( + optional_any: { + [type.googleapis.com/google.protobuf.Any] { + [type.googleapis.com/conformance.TestAllTypes] { + optional_int32: 12345 + } + } + } + )"); + // The special "@type" tag is not required to appear first. + RunValidJsonTest( + "AnyUnorderedTypeTag", + R"({ + "optionalAny": { + "optionalInt32": 12345, + "@type": "type.googleapis.com/conformance.TestAllTypes" + } + })", + R"( + optional_any: { + [type.googleapis.com/conformance.TestAllTypes] { + optional_int32: 12345 + } + } + )"); + // Well-known types in Any. + RunValidJsonTest( + "AnyWithInt32ValueWrapper", + R"({ + "optionalAny": { + "@type": "type.googleapis.com/google.protobuf.Int32Value", + "value": 12345 + } + })", + R"( + optional_any: { + [type.googleapis.com/google.protobuf.Int32Value] { + value: 12345 + } + } + )"); + RunValidJsonTest( + "AnyWithDuration", + R"({ + "optionalAny": { + "@type": "type.googleapis.com/google.protobuf.Duration", + "value": "1.5s" + } + })", + R"( + optional_any: { + [type.googleapis.com/google.protobuf.Duration] { + seconds: 1 + nanos: 500000000 + } + } + )"); + RunValidJsonTest( + "AnyWithTimestamp", + R"({ + "optionalAny": { + "@type": "type.googleapis.com/google.protobuf.Timestamp", + "value": "1970-01-01T00:00:00Z" + } + })", + R"( + optional_any: { + [type.googleapis.com/google.protobuf.Timestamp] { + seconds: 0 + nanos: 0 + } + } + )"); + RunValidJsonTest( + "AnyWithFieldMask", + R"({ + "optionalAny": { + "@type": "type.googleapis.com/google.protobuf.FieldMask", + "value": "foo,barBaz" + } + })", + R"( + optional_any: { + [type.googleapis.com/google.protobuf.FieldMask] { + paths: ["foo", "bar_baz"] + } + } + )"); + RunValidJsonTest( + "AnyWithStruct", + R"({ + "optionalAny": { + "@type": "type.googleapis.com/google.protobuf.Struct", + "value": { + "foo": 1 + } + } + })", + R"( + optional_any: { + [type.googleapis.com/google.protobuf.Struct] { + fields: { + key: "foo" + value: { + number_value: 1 + } + } + } + } + )"); + RunValidJsonTest( + "AnyWithValueForJsonObject", + R"({ + "optionalAny": { + "@type": "type.googleapis.com/google.protobuf.Value", + "value": { + "foo": 1 + } + } + })", + R"( + optional_any: { + [type.googleapis.com/google.protobuf.Value] { + struct_value: { + fields: { + key: "foo" + value: { + number_value: 1 + } + } + } + } + } + )"); + RunValidJsonTest( + "AnyWithValueForInteger", + R"({ + "optionalAny": { + "@type": "type.googleapis.com/google.protobuf.Value", + "value": 1 + } + })", + R"( + optional_any: { + [type.googleapis.com/google.protobuf.Value] { + number_value: 1 + } + } + )"); + + bool ok = true; + if (!CheckSetEmpty(expected_to_fail_, + "These tests were listed in the failure list, but they " + "don't exist. Remove them from the failure list")) { + ok = false; + } + if (!CheckSetEmpty(unexpected_failing_tests_, + "These tests failed. If they can't be fixed right now, " + "you can add them to the failure list so the overall " + "suite can succeed")) { + ok = false; + } + + // Sometimes the testee may be fixed before we update the failure list (e.g., + // the testee is from a different component). We warn about this case but + // don't consider it an overall test failure. + CheckSetEmpty(unexpected_succeeding_tests_, + "These tests succeeded, even though they were listed in " + "the failure list. Remove them from the failure list"); + + if (verbose_) { + CheckSetEmpty(skipped_, + "These tests were skipped (probably because support for some " + "features is not implemented)"); + } + + StringAppendF(&output_, + "CONFORMANCE SUITE %s: %d successes, %d skipped, " + "%d expected failures, %d unexpected failures.\n", + ok ? "PASSED" : "FAILED", successes_, skipped_.size(), + expected_failures_, unexpected_failing_tests_.size()); + StringAppendF(&output_, "\n"); + + output->assign(output_); + + return ok; +} + +} // namespace protobuf +} // namespace google diff --git a/packager/third_party/protobuf/conformance/conformance_test.h b/packager/third_party/protobuf/conformance/conformance_test.h new file mode 100644 index 0000000000..75fc97bc1a --- /dev/null +++ b/packager/third_party/protobuf/conformance/conformance_test.h @@ -0,0 +1,178 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +// This file defines a protocol for running the conformance test suite +// in-process. In other words, the suite itself will run in the same process as +// the code under test. +// +// For pros and cons of this approach, please see conformance.proto. + +#ifndef CONFORMANCE_CONFORMANCE_TEST_H +#define CONFORMANCE_CONFORMANCE_TEST_H + +#include +#include +#include +#include +#include + +#include "third_party/jsoncpp/json.h" + +namespace conformance { +class ConformanceRequest; +class ConformanceResponse; +class TestAllTypes; +} // namespace conformance + +namespace google { +namespace protobuf { + +class ConformanceTestRunner { + public: + virtual ~ConformanceTestRunner() {} + + // Call to run a single conformance test. + // + // "input" is a serialized conformance.ConformanceRequest. + // "output" should be set to a serialized conformance.ConformanceResponse. + // + // If there is any error in running the test itself, set "runtime_error" in + // the response. + virtual void RunTest(const std::string& test_name, + const std::string& input, + std::string* output) = 0; +}; + +// Class representing the test suite itself. To run it, implement your own +// class derived from ConformanceTestRunner and then write code like: +// +// class MyConformanceTestRunner : public ConformanceTestRunner { +// public: +// virtual void RunTest(...) { +// // INSERT YOUR FRAMEWORK-SPECIFIC CODE HERE. +// } +// }; +// +// int main() { +// MyConformanceTestRunner runner; +// google::protobuf::ConformanceTestSuite suite; +// +// std::string output; +// suite.RunSuite(&runner, &output); +// } +// +class ConformanceTestSuite { + public: + ConformanceTestSuite() : verbose_(false) {} + + void SetVerbose(bool verbose) { verbose_ = verbose; } + + // Sets the list of tests that are expected to fail when RunSuite() is called. + // RunSuite() will fail unless the set of failing tests is exactly the same + // as this list. + void SetFailureList(const std::vector& failure_list); + + // Run all the conformance tests against the given test runner. + // Test output will be stored in "output". + // + // Returns true if the set of failing tests was exactly the same as the + // failure list. If SetFailureList() was not called, returns true if all + // tests passed. + bool RunSuite(ConformanceTestRunner* runner, std::string* output); + + private: + void ReportSuccess(const std::string& test_name); + void ReportFailure(const string& test_name, + const conformance::ConformanceRequest& request, + const conformance::ConformanceResponse& response, + const char* fmt, ...); + void ReportSkip(const string& test_name, + const conformance::ConformanceRequest& request, + const conformance::ConformanceResponse& response); + void RunTest(const std::string& test_name, + const conformance::ConformanceRequest& request, + conformance::ConformanceResponse* response); + void RunValidInputTest(const string& test_name, const string& input, + conformance::WireFormat input_format, + const string& equivalent_text_format, + conformance::WireFormat requested_output); + void RunValidJsonTest(const string& test_name, const string& input_json, + const string& equivalent_text_format); + void RunValidJsonTestWithProtobufInput(const string& test_name, + const conformance::TestAllTypes& input, + const string& equivalent_text_format); + + typedef std::function Validator; + void RunValidJsonTestWithValidator(const string& test_name, + const string& input_json, + const Validator& validator); + void ExpectParseFailureForJson(const string& test_name, + const string& input_json); + void ExpectSerializeFailureForJson(const string& test_name, + const string& text_format); + void ExpectParseFailureForProto(const std::string& proto, + const std::string& test_name); + void ExpectHardParseFailureForProto(const std::string& proto, + const std::string& test_name); + void TestPrematureEOFForType(google::protobuf::FieldDescriptor::Type type); + bool CheckSetEmpty(const set& set_to_check, const char* msg); + ConformanceTestRunner* runner_; + int successes_; + int expected_failures_; + bool verbose_; + std::string output_; + + // The set of test names that are expected to fail in this run, but haven't + // failed yet. + std::set expected_to_fail_; + + // The set of test names that have been run. Used to ensure that there are no + // duplicate names in the suite. + std::set test_names_; + + // The set of tests that failed, but weren't expected to. + std::set unexpected_failing_tests_; + + // The set of tests that succeeded, but weren't expected to. + std::set unexpected_succeeding_tests_; + + // The set of tests that the testee opted out of; + std::set skipped_; + + google::protobuf::internal::scoped_ptr + type_resolver_; + std::string type_url_; +}; + +} // namespace protobuf +} // namespace google + +#endif // CONFORMANCE_CONFORMANCE_TEST_H diff --git a/packager/third_party/protobuf/conformance/conformance_test_runner.cc b/packager/third_party/protobuf/conformance/conformance_test_runner.cc new file mode 100644 index 0000000000..376a60b9aa --- /dev/null +++ b/packager/third_party/protobuf/conformance/conformance_test_runner.cc @@ -0,0 +1,312 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// This file contains a program for running the test suite in a separate +// process. The other alternative is to run the suite in-process. See +// conformance.proto for pros/cons of these two options. +// +// This program will fork the process under test and communicate with it over +// its stdin/stdout: +// +// +--------+ pipe +----------+ +// | tester | <------> | testee | +// | | | | +// | C++ | | any lang | +// +--------+ +----------+ +// +// The tester contains all of the test cases and their expected output. +// The testee is a simple program written in the target language that reads +// each test case and attempts to produce acceptable output for it. +// +// Every test consists of a ConformanceRequest/ConformanceResponse +// request/reply pair. The protocol on the pipe is simply: +// +// 1. tester sends 4-byte length N (little endian) +// 2. tester sends N bytes representing a ConformanceRequest proto +// 3. testee sends 4-byte length M (little endian) +// 4. testee sends M bytes representing a ConformanceResponse proto + +#include +#include +#include +#include +#include +#include +#include + +#include + +#include "conformance.pb.h" +#include "conformance_test.h" + +using conformance::ConformanceRequest; +using conformance::ConformanceResponse; +using google::protobuf::internal::scoped_array; +using google::protobuf::StringAppendF; +using std::string; +using std::vector; + +#define STRINGIFY(x) #x +#define TOSTRING(x) STRINGIFY(x) +#define CHECK_SYSCALL(call) \ + if (call < 0) { \ + perror(#call " " __FILE__ ":" TOSTRING(__LINE__)); \ + exit(1); \ + } + +// Test runner that spawns the process being tested and communicates with it +// over a pipe. +class ForkPipeRunner : public google::protobuf::ConformanceTestRunner { + public: + ForkPipeRunner(const std::string &executable) + : child_pid_(-1), executable_(executable) {} + + virtual ~ForkPipeRunner() {} + + void RunTest(const std::string& test_name, + const std::string& request, + std::string* response) { + if (child_pid_ < 0) { + SpawnTestProgram(); + } + + current_test_name_ = test_name; + + uint32_t len = request.size(); + CheckedWrite(write_fd_, &len, sizeof(uint32_t)); + CheckedWrite(write_fd_, request.c_str(), request.size()); + + if (!TryRead(read_fd_, &len, sizeof(uint32_t))) { + // We failed to read from the child, assume a crash and try to reap. + GOOGLE_LOG(INFO) << "Trying to reap child, pid=" << child_pid_; + + int status; + waitpid(child_pid_, &status, WEXITED); + + string error_msg; + if (WIFEXITED(status)) { + StringAppendF(&error_msg, + "child exited, status=%d", WEXITSTATUS(status)); + } else if (WIFSIGNALED(status)) { + StringAppendF(&error_msg, + "child killed by signal %d", WTERMSIG(status)); + } + GOOGLE_LOG(INFO) << error_msg; + child_pid_ = -1; + + conformance::ConformanceResponse response_obj; + response_obj.set_runtime_error(error_msg); + response_obj.SerializeToString(response); + return; + } + + response->resize(len); + CheckedRead(read_fd_, (void*)response->c_str(), len); + } + + private: + // TODO(haberman): make this work on Windows, instead of using these + // UNIX-specific APIs. + // + // There is a platform-agnostic API in + // src/google/protobuf/compiler/subprocess.h + // + // However that API only supports sending a single message to the subprocess. + // We really want to be able to send messages and receive responses one at a + // time: + // + // 1. Spawning a new process for each test would take way too long for thousands + // of tests and subprocesses like java that can take 100ms or more to start + // up. + // + // 2. Sending all the tests in one big message and receiving all results in one + // big message would take away our visibility about which test(s) caused a + // crash or other fatal error. It would also give us only a single failure + // instead of all of them. + void SpawnTestProgram() { + int toproc_pipe_fd[2]; + int fromproc_pipe_fd[2]; + if (pipe(toproc_pipe_fd) < 0 || pipe(fromproc_pipe_fd) < 0) { + perror("pipe"); + exit(1); + } + + pid_t pid = fork(); + if (pid < 0) { + perror("fork"); + exit(1); + } + + if (pid) { + // Parent. + CHECK_SYSCALL(close(toproc_pipe_fd[0])); + CHECK_SYSCALL(close(fromproc_pipe_fd[1])); + write_fd_ = toproc_pipe_fd[1]; + read_fd_ = fromproc_pipe_fd[0]; + child_pid_ = pid; + } else { + // Child. + CHECK_SYSCALL(close(STDIN_FILENO)); + CHECK_SYSCALL(close(STDOUT_FILENO)); + CHECK_SYSCALL(dup2(toproc_pipe_fd[0], STDIN_FILENO)); + CHECK_SYSCALL(dup2(fromproc_pipe_fd[1], STDOUT_FILENO)); + + CHECK_SYSCALL(close(toproc_pipe_fd[0])); + CHECK_SYSCALL(close(fromproc_pipe_fd[1])); + CHECK_SYSCALL(close(toproc_pipe_fd[1])); + CHECK_SYSCALL(close(fromproc_pipe_fd[0])); + + scoped_array executable(new char[executable_.size() + 1]); + memcpy(executable.get(), executable_.c_str(), executable_.size()); + executable[executable_.size()] = '\0'; + + char *const argv[] = {executable.get(), NULL}; + CHECK_SYSCALL(execv(executable.get(), argv)); // Never returns. + } + } + + void CheckedWrite(int fd, const void *buf, size_t len) { + if (write(fd, buf, len) != len) { + GOOGLE_LOG(FATAL) << current_test_name_ + << ": error writing to test program: " + << strerror(errno); + } + } + + bool TryRead(int fd, void *buf, size_t len) { + size_t ofs = 0; + while (len > 0) { + ssize_t bytes_read = read(fd, (char*)buf + ofs, len); + + if (bytes_read == 0) { + GOOGLE_LOG(ERROR) << current_test_name_ + << ": unexpected EOF from test program"; + return false; + } else if (bytes_read < 0) { + GOOGLE_LOG(ERROR) << current_test_name_ + << ": error reading from test program: " + << strerror(errno); + return false; + } + + len -= bytes_read; + ofs += bytes_read; + } + + return true; + } + + void CheckedRead(int fd, void *buf, size_t len) { + if (!TryRead(fd, buf, len)) { + GOOGLE_LOG(FATAL) << current_test_name_ + << ": error reading from test program: " + << strerror(errno); + } + } + + int write_fd_; + int read_fd_; + pid_t child_pid_; + std::string executable_; + std::string current_test_name_; +}; + +void UsageError() { + fprintf(stderr, + "Usage: conformance-test-runner [options] \n"); + fprintf(stderr, "\n"); + fprintf(stderr, "Options:\n"); + fprintf(stderr, + " --failure_list Use to specify list of tests\n"); + fprintf(stderr, + " that are expected to fail. File\n"); + fprintf(stderr, + " should contain one test name per\n"); + fprintf(stderr, + " line. Use '#' for comments.\n"); + exit(1); +} + +void ParseFailureList(const char *filename, vector* failure_list) { + std::ifstream infile(filename); + + if (!infile.is_open()) { + fprintf(stderr, "Couldn't open failure list file: %s\n", filename); + exit(1); + } + + for (string line; getline(infile, line);) { + // Remove whitespace. + line.erase(std::remove_if(line.begin(), line.end(), ::isspace), + line.end()); + + // Remove comments. + line = line.substr(0, line.find("#")); + + if (!line.empty()) { + failure_list->push_back(line); + } + } +} + +int main(int argc, char *argv[]) { + char *program; + google::protobuf::ConformanceTestSuite suite; + + vector failure_list; + + for (int arg = 1; arg < argc; ++arg) { + if (strcmp(argv[arg], "--failure_list") == 0) { + if (++arg == argc) UsageError(); + ParseFailureList(argv[arg], &failure_list); + } else if (strcmp(argv[arg], "--verbose") == 0) { + suite.SetVerbose(true); + } else if (argv[arg][0] == '-') { + fprintf(stderr, "Unknown option: %s\n", argv[arg]); + UsageError(); + } else { + if (arg != argc - 1) { + fprintf(stderr, "Too many arguments.\n"); + UsageError(); + } + program = argv[arg]; + } + } + + suite.SetFailureList(failure_list); + ForkPipeRunner runner(program); + + std::string output; + bool ok = suite.RunSuite(&runner, &output); + + fwrite(output.c_str(), 1, output.size(), stderr); + + return ok ? EXIT_SUCCESS : EXIT_FAILURE; +} diff --git a/packager/third_party/protobuf/conformance/failure_list_cpp.txt b/packager/third_party/protobuf/conformance/failure_list_cpp.txt new file mode 100644 index 0000000000..2ddf831c03 --- /dev/null +++ b/packager/third_party/protobuf/conformance/failure_list_cpp.txt @@ -0,0 +1,106 @@ +# This is the list of conformance tests that are known to fail for the C++ +# implementation right now. These should be fixed. +# +# By listing them here we can keep tabs on which ones are failing and be sure +# that we don't introduce regressions in other tests. +# +# TODO(haberman): insert links to corresponding bugs tracking the issue. +# Should we use GitHub issues or the Google-internal bug tracker? + +FieldMaskNumbersDontRoundTrip.JsonOutput +FieldMaskPathsDontRoundTrip.JsonOutput +FieldMaskTooManyUnderscore.JsonOutput +JsonInput.AnyUnorderedTypeTag.JsonOutput +JsonInput.AnyUnorderedTypeTag.ProtobufOutput +JsonInput.AnyWithValueForInteger.JsonOutput +JsonInput.AnyWithValueForInteger.ProtobufOutput +JsonInput.AnyWithValueForJsonObject.JsonOutput +JsonInput.AnyWithValueForJsonObject.ProtobufOutput +JsonInput.BoolFieldDoubleQuotedFalse +JsonInput.BoolFieldDoubleQuotedTrue +JsonInput.BoolFieldIntegerOne +JsonInput.BoolFieldIntegerZero +JsonInput.BytesFieldInvalidBase64Characters +JsonInput.BytesFieldNoPadding +JsonInput.DoubleFieldTooSmall +JsonInput.DurationHasZeroFractionalDigit.Validator +JsonInput.DurationJsonInputTooLarge +JsonInput.DurationJsonInputTooSmall +JsonInput.DurationMissingS +JsonInput.EnumFieldUnknownValue.Validator +JsonInput.FieldMaskInvalidCharacter +JsonInput.FieldNameDuplicate +JsonInput.FieldNameDuplicateDifferentCasing1 +JsonInput.FieldNameDuplicateDifferentCasing2 +JsonInput.FieldNameInLowerCamelCase.Validator +JsonInput.FieldNameInSnakeCase.JsonOutput +JsonInput.FieldNameInSnakeCase.ProtobufOutput +JsonInput.FieldNameNotQuoted +JsonInput.FloatFieldTooLarge +JsonInput.FloatFieldTooSmall +JsonInput.Int32FieldLeadingSpace +JsonInput.Int32FieldLeadingZero +JsonInput.Int32FieldMinFloatValue.JsonOutput +JsonInput.Int32FieldMinFloatValue.ProtobufOutput +JsonInput.Int32FieldMinValue.JsonOutput +JsonInput.Int32FieldMinValue.ProtobufOutput +JsonInput.Int32FieldNegativeWithLeadingZero +JsonInput.Int32FieldNotInteger +JsonInput.Int32FieldNotNumber +JsonInput.Int32FieldTooLarge +JsonInput.Int32FieldTooSmall +JsonInput.Int32FieldTrailingSpace +JsonInput.Int64FieldNotInteger +JsonInput.Int64FieldNotNumber +JsonInput.Int64FieldTooLarge +JsonInput.Int64FieldTooSmall +JsonInput.MapFieldValueIsNull +JsonInput.OneofFieldDuplicate +JsonInput.RepeatedFieldMessageElementIsNull +JsonInput.RepeatedFieldPrimitiveElementIsNull +JsonInput.RepeatedFieldTrailingComma +JsonInput.RepeatedFieldWrongElementTypeExpectingIntegersGotBool +JsonInput.RepeatedFieldWrongElementTypeExpectingIntegersGotMessage +JsonInput.RepeatedFieldWrongElementTypeExpectingIntegersGotString +JsonInput.RepeatedFieldWrongElementTypeExpectingMessagesGotBool +JsonInput.RepeatedFieldWrongElementTypeExpectingMessagesGotInt +JsonInput.RepeatedFieldWrongElementTypeExpectingMessagesGotString +JsonInput.RepeatedFieldWrongElementTypeExpectingStringsGotBool +JsonInput.RepeatedFieldWrongElementTypeExpectingStringsGotInt +JsonInput.RepeatedFieldWrongElementTypeExpectingStringsGotMessage +JsonInput.StringFieldNotAString +JsonInput.StringFieldSurrogateInWrongOrder +JsonInput.StringFieldSurrogatePair.JsonOutput +JsonInput.StringFieldSurrogatePair.ProtobufOutput +JsonInput.StringFieldUnpairedHighSurrogate +JsonInput.StringFieldUnpairedLowSurrogate +JsonInput.StringFieldUppercaseEscapeLetter +JsonInput.TimestampJsonInputLowercaseT +JsonInput.TimestampJsonInputLowercaseZ +JsonInput.TimestampJsonInputMissingT +JsonInput.TimestampJsonInputMissingZ +JsonInput.TimestampJsonInputTooLarge +JsonInput.TimestampJsonInputTooSmall +JsonInput.TrailingCommaInAnObject +JsonInput.Uint32FieldNotInteger +JsonInput.Uint32FieldNotNumber +JsonInput.Uint32FieldTooLarge +JsonInput.Uint64FieldNotInteger +JsonInput.Uint64FieldNotNumber +JsonInput.Uint64FieldTooLarge +JsonInput.WrapperTypesWithNullValue.JsonOutput +JsonInput.WrapperTypesWithNullValue.ProtobufOutput +ProtobufInput.PrematureEofBeforeKnownRepeatedValue.MESSAGE +ProtobufInput.PrematureEofInDelimitedDataForKnownNonRepeatedValue.MESSAGE +ProtobufInput.PrematureEofInDelimitedDataForKnownRepeatedValue.MESSAGE +ProtobufInput.PrematureEofInPackedField.BOOL +ProtobufInput.PrematureEofInPackedField.ENUM +ProtobufInput.PrematureEofInPackedField.INT32 +ProtobufInput.PrematureEofInPackedField.INT64 +ProtobufInput.PrematureEofInPackedField.SINT32 +ProtobufInput.PrematureEofInPackedField.SINT64 +ProtobufInput.PrematureEofInPackedField.UINT32 +ProtobufInput.PrematureEofInPackedField.UINT64 +ProtobufInput.PrematureEofInsideKnownRepeatedValue.MESSAGE +TimestampProtoInputTooLarge.JsonOutput +TimestampProtoInputTooSmall.JsonOutput diff --git a/packager/third_party/protobuf/conformance/failure_list_csharp.txt b/packager/third_party/protobuf/conformance/failure_list_csharp.txt new file mode 100644 index 0000000000..a46cee472b --- /dev/null +++ b/packager/third_party/protobuf/conformance/failure_list_csharp.txt @@ -0,0 +1,16 @@ +JsonInput.AnyWithValueForInteger.JsonOutput +JsonInput.AnyWithValueForJsonObject.JsonOutput +JsonInput.FieldNameInLowerCamelCase.Validator +JsonInput.FieldNameInSnakeCase.JsonOutput +JsonInput.FieldNameInSnakeCase.ProtobufOutput +JsonInput.FieldNameWithMixedCases.JsonOutput +JsonInput.FieldNameWithMixedCases.ProtobufOutput +JsonInput.FieldNameWithMixedCases.Validator +JsonInput.Int32FieldMinFloatValue.JsonOutput +JsonInput.Int32FieldMinValue.JsonOutput +JsonInput.Int64FieldMaxValueNotQuoted.JsonOutput +JsonInput.Int64FieldMaxValueNotQuoted.ProtobufOutput +JsonInput.OriginalProtoFieldName.JsonOutput +JsonInput.StringFieldSurrogatePair.JsonOutput +JsonInput.Uint64FieldMaxValueNotQuoted.JsonOutput +JsonInput.Uint64FieldMaxValueNotQuoted.ProtobufOutput diff --git a/packager/third_party/protobuf/conformance/failure_list_java.txt b/packager/third_party/protobuf/conformance/failure_list_java.txt new file mode 100644 index 0000000000..552c0cc9c4 --- /dev/null +++ b/packager/third_party/protobuf/conformance/failure_list_java.txt @@ -0,0 +1,49 @@ +# This is the list of conformance tests that are known to fail for the Java +# implementation right now. These should be fixed. +# +# By listing them here we can keep tabs on which ones are failing and be sure +# that we don't introduce regressions in other tests. + +FieldMaskNumbersDontRoundTrip.JsonOutput +FieldMaskPathsDontRoundTrip.JsonOutput +FieldMaskTooManyUnderscore.JsonOutput +JsonInput.AnyWithFieldMask.ProtobufOutput +JsonInput.AnyWithValueForInteger.JsonOutput +JsonInput.AnyWithValueForJsonObject.JsonOutput +JsonInput.BoolFieldAllCapitalFalse +JsonInput.BoolFieldAllCapitalTrue +JsonInput.BoolFieldCamelCaseFalse +JsonInput.BoolFieldCamelCaseTrue +JsonInput.BoolFieldDoubleQuotedFalse +JsonInput.BoolFieldDoubleQuotedTrue +JsonInput.BoolMapFieldKeyNotQuoted +JsonInput.DoubleFieldInfinityNotQuoted +JsonInput.DoubleFieldNanNotQuoted +JsonInput.DoubleFieldNegativeInfinityNotQuoted +JsonInput.EnumFieldNotQuoted +JsonInput.FieldMask.ProtobufOutput +JsonInput.FieldMaskInvalidCharacter +JsonInput.FieldNameDuplicate +JsonInput.FieldNameInSnakeCase.JsonOutput +JsonInput.FieldNameNotQuoted +JsonInput.FloatFieldInfinityNotQuoted +JsonInput.FloatFieldNanNotQuoted +JsonInput.FloatFieldNegativeInfinityNotQuoted +JsonInput.Int32FieldLeadingZero +JsonInput.Int32FieldMinFloatValue.JsonOutput +JsonInput.Int32FieldMinValue.JsonOutput +JsonInput.Int32FieldNegativeWithLeadingZero +JsonInput.Int32FieldPlusSign +JsonInput.Int32MapFieldKeyNotQuoted +JsonInput.Int64MapFieldKeyNotQuoted +JsonInput.JsonWithComments +JsonInput.OriginalProtoFieldName.JsonOutput +JsonInput.RepeatedFieldWrongElementTypeExpectingStringsGotBool +JsonInput.RepeatedFieldWrongElementTypeExpectingStringsGotInt +JsonInput.StringFieldNotAString +JsonInput.StringFieldSurrogateInWrongOrder +JsonInput.StringFieldUnpairedHighSurrogate +JsonInput.StringFieldUnpairedLowSurrogate +JsonInput.StringFieldUppercaseEscapeLetter +JsonInput.Uint32MapFieldKeyNotQuoted +JsonInput.Uint64MapFieldKeyNotQuoted diff --git a/packager/third_party/protobuf/conformance/failure_list_objc.txt b/packager/third_party/protobuf/conformance/failure_list_objc.txt new file mode 100644 index 0000000000..5dac3501f1 --- /dev/null +++ b/packager/third_party/protobuf/conformance/failure_list_objc.txt @@ -0,0 +1,4 @@ +# No tests currently failing. +# +# json input or output tests are skipped (in conformance_objc.m) as mobile +# platforms don't support json wire format to avoid code bloat. diff --git a/packager/third_party/protobuf/conformance/failure_list_python-post26.txt b/packager/third_party/protobuf/conformance/failure_list_python-post26.txt new file mode 100644 index 0000000000..19d99b044a --- /dev/null +++ b/packager/third_party/protobuf/conformance/failure_list_python-post26.txt @@ -0,0 +1,2 @@ +JsonInput.StringFieldSurrogateInWrongOrder +JsonInput.StringFieldUnpairedHighSurrogate diff --git a/packager/third_party/protobuf/conformance/failure_list_python.txt b/packager/third_party/protobuf/conformance/failure_list_python.txt new file mode 100644 index 0000000000..d2e52637bd --- /dev/null +++ b/packager/third_party/protobuf/conformance/failure_list_python.txt @@ -0,0 +1,85 @@ +DurationProtoInputTooLarge.JsonOutput +DurationProtoInputTooSmall.JsonOutput +FieldMaskNumbersDontRoundTrip.JsonOutput +FieldMaskPathsDontRoundTrip.JsonOutput +FieldMaskTooManyUnderscore.JsonOutput +JsonInput.Any.JsonOutput +JsonInput.Any.ProtobufOutput +JsonInput.AnyNested.JsonOutput +JsonInput.AnyNested.ProtobufOutput +JsonInput.AnyUnorderedTypeTag.JsonOutput +JsonInput.AnyUnorderedTypeTag.ProtobufOutput +JsonInput.AnyWithDuration.JsonOutput +JsonInput.AnyWithDuration.ProtobufOutput +JsonInput.AnyWithFieldMask.JsonOutput +JsonInput.AnyWithFieldMask.ProtobufOutput +JsonInput.AnyWithInt32ValueWrapper.JsonOutput +JsonInput.AnyWithInt32ValueWrapper.ProtobufOutput +JsonInput.AnyWithStruct.JsonOutput +JsonInput.AnyWithStruct.ProtobufOutput +JsonInput.AnyWithTimestamp.JsonOutput +JsonInput.AnyWithTimestamp.ProtobufOutput +JsonInput.AnyWithValueForInteger.JsonOutput +JsonInput.AnyWithValueForInteger.ProtobufOutput +JsonInput.AnyWithValueForJsonObject.JsonOutput +JsonInput.AnyWithValueForJsonObject.ProtobufOutput +JsonInput.BytesFieldInvalidBase64Characters +JsonInput.DoubleFieldInfinityNotQuoted +JsonInput.DoubleFieldNanNotQuoted +JsonInput.DoubleFieldNegativeInfinityNotQuoted +JsonInput.DoubleFieldTooSmall +JsonInput.DurationJsonInputTooLarge +JsonInput.DurationJsonInputTooSmall +JsonInput.DurationMissingS +JsonInput.EnumFieldNumericValueNonZero.JsonOutput +JsonInput.EnumFieldNumericValueNonZero.ProtobufOutput +JsonInput.EnumFieldNumericValueZero.JsonOutput +JsonInput.EnumFieldNumericValueZero.ProtobufOutput +JsonInput.EnumFieldUnknownValue.Validator +JsonInput.FieldMask.ProtobufOutput +JsonInput.FieldMaskInvalidCharacter +JsonInput.FieldNameInLowerCamelCase.Validator +JsonInput.FieldNameInSnakeCase.JsonOutput +JsonInput.FieldNameInSnakeCase.ProtobufOutput +JsonInput.FloatFieldInfinityNotQuoted +JsonInput.FloatFieldNanNotQuoted +JsonInput.FloatFieldNegativeInfinityNotQuoted +JsonInput.FloatFieldTooLarge +JsonInput.FloatFieldTooSmall +JsonInput.Int32FieldExponentialFormat.JsonOutput +JsonInput.Int32FieldExponentialFormat.ProtobufOutput +JsonInput.Int32FieldFloatTrailingZero.JsonOutput +JsonInput.Int32FieldFloatTrailingZero.ProtobufOutput +JsonInput.Int32FieldMaxFloatValue.JsonOutput +JsonInput.Int32FieldMaxFloatValue.ProtobufOutput +JsonInput.Int32FieldMinFloatValue.JsonOutput +JsonInput.Int32FieldMinFloatValue.ProtobufOutput +JsonInput.Int32FieldMinValue.JsonOutput +JsonInput.OriginalProtoFieldName.JsonOutput +JsonInput.OriginalProtoFieldName.ProtobufOutput +JsonInput.RepeatedFieldMessageElementIsNull +JsonInput.RepeatedFieldPrimitiveElementIsNull +JsonInput.RepeatedFieldWrongElementTypeExpectingIntegersGotBool +JsonInput.StringFieldSurrogatePair.JsonOutput +JsonInput.StringFieldUnpairedLowSurrogate +JsonInput.Struct.JsonOutput +JsonInput.Struct.ProtobufOutput +JsonInput.TimestampJsonInputLowercaseT +JsonInput.Uint32FieldMaxFloatValue.JsonOutput +JsonInput.Uint32FieldMaxFloatValue.ProtobufOutput +JsonInput.ValueAcceptBool.JsonOutput +JsonInput.ValueAcceptBool.ProtobufOutput +JsonInput.ValueAcceptFloat.JsonOutput +JsonInput.ValueAcceptFloat.ProtobufOutput +JsonInput.ValueAcceptInteger.JsonOutput +JsonInput.ValueAcceptInteger.ProtobufOutput +JsonInput.ValueAcceptList.JsonOutput +JsonInput.ValueAcceptList.ProtobufOutput +JsonInput.ValueAcceptNull.JsonOutput +JsonInput.ValueAcceptNull.ProtobufOutput +JsonInput.ValueAcceptObject.JsonOutput +JsonInput.ValueAcceptObject.ProtobufOutput +JsonInput.ValueAcceptString.JsonOutput +JsonInput.ValueAcceptString.ProtobufOutput +TimestampProtoInputTooLarge.JsonOutput +TimestampProtoInputTooSmall.JsonOutput diff --git a/packager/third_party/protobuf/conformance/failure_list_python_cpp.txt b/packager/third_party/protobuf/conformance/failure_list_python_cpp.txt new file mode 100644 index 0000000000..7b5e45f9ff --- /dev/null +++ b/packager/third_party/protobuf/conformance/failure_list_python_cpp.txt @@ -0,0 +1,110 @@ +# This is the list of conformance tests that are known to fail for the +# Python/C++ implementation right now. These should be fixed. +# +# By listing them here we can keep tabs on which ones are failing and be sure +# that we don't introduce regressions in other tests. +# +# TODO(haberman): insert links to corresponding bugs tracking the issue. +# Should we use GitHub issues or the Google-internal bug tracker? + +DurationProtoInputTooLarge.JsonOutput +DurationProtoInputTooSmall.JsonOutput +FieldMaskNumbersDontRoundTrip.JsonOutput +FieldMaskPathsDontRoundTrip.JsonOutput +FieldMaskTooManyUnderscore.JsonOutput +JsonInput.Any.JsonOutput +JsonInput.Any.ProtobufOutput +JsonInput.AnyNested.JsonOutput +JsonInput.AnyNested.ProtobufOutput +JsonInput.AnyUnorderedTypeTag.JsonOutput +JsonInput.AnyUnorderedTypeTag.ProtobufOutput +JsonInput.AnyWithDuration.JsonOutput +JsonInput.AnyWithDuration.ProtobufOutput +JsonInput.AnyWithFieldMask.JsonOutput +JsonInput.AnyWithFieldMask.ProtobufOutput +JsonInput.AnyWithInt32ValueWrapper.JsonOutput +JsonInput.AnyWithInt32ValueWrapper.ProtobufOutput +JsonInput.AnyWithStruct.JsonOutput +JsonInput.AnyWithStruct.ProtobufOutput +JsonInput.AnyWithTimestamp.JsonOutput +JsonInput.AnyWithTimestamp.ProtobufOutput +JsonInput.AnyWithValueForInteger.JsonOutput +JsonInput.AnyWithValueForInteger.ProtobufOutput +JsonInput.AnyWithValueForJsonObject.JsonOutput +JsonInput.AnyWithValueForJsonObject.ProtobufOutput +JsonInput.BytesFieldInvalidBase64Characters +JsonInput.DoubleFieldInfinityNotQuoted +JsonInput.DoubleFieldNanNotQuoted +JsonInput.DoubleFieldNegativeInfinityNotQuoted +JsonInput.DoubleFieldTooSmall +JsonInput.DurationJsonInputTooLarge +JsonInput.DurationJsonInputTooSmall +JsonInput.DurationMissingS +JsonInput.EnumFieldNumericValueNonZero.JsonOutput +JsonInput.EnumFieldNumericValueNonZero.ProtobufOutput +JsonInput.EnumFieldNumericValueZero.JsonOutput +JsonInput.EnumFieldNumericValueZero.ProtobufOutput +JsonInput.EnumFieldUnknownValue.Validator +JsonInput.FieldMask.ProtobufOutput +JsonInput.FieldMaskInvalidCharacter +JsonInput.FieldNameInLowerCamelCase.Validator +JsonInput.FieldNameInSnakeCase.JsonOutput +JsonInput.FieldNameInSnakeCase.ProtobufOutput +JsonInput.FloatFieldInfinityNotQuoted +JsonInput.FloatFieldNanNotQuoted +JsonInput.FloatFieldNegativeInfinityNotQuoted +JsonInput.FloatFieldTooLarge +JsonInput.FloatFieldTooSmall +JsonInput.Int32FieldExponentialFormat.JsonOutput +JsonInput.Int32FieldExponentialFormat.ProtobufOutput +JsonInput.Int32FieldFloatTrailingZero.JsonOutput +JsonInput.Int32FieldFloatTrailingZero.ProtobufOutput +JsonInput.Int32FieldMaxFloatValue.JsonOutput +JsonInput.Int32FieldMaxFloatValue.ProtobufOutput +JsonInput.Int32FieldMinFloatValue.JsonOutput +JsonInput.Int32FieldMinFloatValue.ProtobufOutput +JsonInput.Int32FieldMinValue.JsonOutput +JsonInput.OriginalProtoFieldName.JsonOutput +JsonInput.OriginalProtoFieldName.ProtobufOutput +JsonInput.RepeatedFieldMessageElementIsNull +JsonInput.RepeatedFieldPrimitiveElementIsNull +JsonInput.RepeatedFieldWrongElementTypeExpectingIntegersGotBool +JsonInput.StringFieldSurrogatePair.JsonOutput +JsonInput.StringFieldUnpairedLowSurrogate +JsonInput.Struct.JsonOutput +JsonInput.Struct.ProtobufOutput +JsonInput.TimestampJsonInputLowercaseT +JsonInput.Uint32FieldMaxFloatValue.JsonOutput +JsonInput.Uint32FieldMaxFloatValue.ProtobufOutput +JsonInput.ValueAcceptBool.JsonOutput +JsonInput.ValueAcceptBool.ProtobufOutput +JsonInput.ValueAcceptFloat.JsonOutput +JsonInput.ValueAcceptFloat.ProtobufOutput +JsonInput.ValueAcceptInteger.JsonOutput +JsonInput.ValueAcceptInteger.ProtobufOutput +JsonInput.ValueAcceptList.JsonOutput +JsonInput.ValueAcceptList.ProtobufOutput +JsonInput.ValueAcceptNull.JsonOutput +JsonInput.ValueAcceptNull.ProtobufOutput +JsonInput.ValueAcceptObject.JsonOutput +JsonInput.ValueAcceptObject.ProtobufOutput +JsonInput.ValueAcceptString.JsonOutput +JsonInput.ValueAcceptString.ProtobufOutput +ProtobufInput.PrematureEofInDelimitedDataForKnownNonRepeatedValue.MESSAGE +ProtobufInput.PrematureEofInDelimitedDataForKnownRepeatedValue.MESSAGE +ProtobufInput.PrematureEofInPackedField.BOOL +ProtobufInput.PrematureEofInPackedField.DOUBLE +ProtobufInput.PrematureEofInPackedField.ENUM +ProtobufInput.PrematureEofInPackedField.FIXED32 +ProtobufInput.PrematureEofInPackedField.FIXED64 +ProtobufInput.PrematureEofInPackedField.FLOAT +ProtobufInput.PrematureEofInPackedField.INT32 +ProtobufInput.PrematureEofInPackedField.INT64 +ProtobufInput.PrematureEofInPackedField.SFIXED32 +ProtobufInput.PrematureEofInPackedField.SFIXED64 +ProtobufInput.PrematureEofInPackedField.SINT32 +ProtobufInput.PrematureEofInPackedField.SINT64 +ProtobufInput.PrematureEofInPackedField.UINT32 +ProtobufInput.PrematureEofInPackedField.UINT64 +TimestampProtoInputTooLarge.JsonOutput +TimestampProtoInputTooSmall.JsonOutput diff --git a/packager/third_party/protobuf/conformance/failure_list_ruby.txt b/packager/third_party/protobuf/conformance/failure_list_ruby.txt new file mode 100644 index 0000000000..7c12da0611 --- /dev/null +++ b/packager/third_party/protobuf/conformance/failure_list_ruby.txt @@ -0,0 +1,199 @@ +DurationProtoInputTooLarge.JsonOutput +DurationProtoInputTooSmall.JsonOutput +FieldMaskNumbersDontRoundTrip.JsonOutput +FieldMaskPathsDontRoundTrip.JsonOutput +FieldMaskTooManyUnderscore.JsonOutput +JsonInput.Any.JsonOutput +JsonInput.Any.ProtobufOutput +JsonInput.AnyNested.JsonOutput +JsonInput.AnyNested.ProtobufOutput +JsonInput.AnyUnorderedTypeTag.JsonOutput +JsonInput.AnyUnorderedTypeTag.ProtobufOutput +JsonInput.AnyWithDuration.JsonOutput +JsonInput.AnyWithDuration.ProtobufOutput +JsonInput.AnyWithFieldMask.JsonOutput +JsonInput.AnyWithFieldMask.ProtobufOutput +JsonInput.AnyWithInt32ValueWrapper.JsonOutput +JsonInput.AnyWithInt32ValueWrapper.ProtobufOutput +JsonInput.AnyWithStruct.JsonOutput +JsonInput.AnyWithStruct.ProtobufOutput +JsonInput.AnyWithTimestamp.JsonOutput +JsonInput.AnyWithTimestamp.ProtobufOutput +JsonInput.AnyWithValueForInteger.JsonOutput +JsonInput.AnyWithValueForInteger.ProtobufOutput +JsonInput.AnyWithValueForJsonObject.JsonOutput +JsonInput.AnyWithValueForJsonObject.ProtobufOutput +JsonInput.BoolFieldIntegerOne +JsonInput.BoolFieldIntegerZero +JsonInput.DoubleFieldInfinity.JsonOutput +JsonInput.DoubleFieldInfinity.ProtobufOutput +JsonInput.DoubleFieldMaxNegativeValue.JsonOutput +JsonInput.DoubleFieldMaxNegativeValue.ProtobufOutput +JsonInput.DoubleFieldMaxPositiveValue.JsonOutput +JsonInput.DoubleFieldMaxPositiveValue.ProtobufOutput +JsonInput.DoubleFieldMinNegativeValue.JsonOutput +JsonInput.DoubleFieldMinNegativeValue.ProtobufOutput +JsonInput.DoubleFieldMinPositiveValue.JsonOutput +JsonInput.DoubleFieldMinPositiveValue.ProtobufOutput +JsonInput.DoubleFieldNan.JsonOutput +JsonInput.DoubleFieldNan.ProtobufOutput +JsonInput.DoubleFieldNegativeInfinity.JsonOutput +JsonInput.DoubleFieldNegativeInfinity.ProtobufOutput +JsonInput.DoubleFieldQuotedValue.JsonOutput +JsonInput.DoubleFieldQuotedValue.ProtobufOutput +JsonInput.DurationHas3FractionalDigits.Validator +JsonInput.DurationHas6FractionalDigits.Validator +JsonInput.DurationHas9FractionalDigits.Validator +JsonInput.DurationHasZeroFractionalDigit.Validator +JsonInput.DurationMaxValue.JsonOutput +JsonInput.DurationMaxValue.ProtobufOutput +JsonInput.DurationMinValue.JsonOutput +JsonInput.DurationMinValue.ProtobufOutput +JsonInput.DurationRepeatedValue.JsonOutput +JsonInput.DurationRepeatedValue.ProtobufOutput +JsonInput.EnumFieldNumericValueNonZero.JsonOutput +JsonInput.EnumFieldNumericValueNonZero.ProtobufOutput +JsonInput.EnumFieldNumericValueZero.JsonOutput +JsonInput.EnumFieldNumericValueZero.ProtobufOutput +JsonInput.EnumFieldUnknownValue.Validator +JsonInput.FieldMask.JsonOutput +JsonInput.FieldMask.ProtobufOutput +JsonInput.FieldNameInSnakeCase.JsonOutput +JsonInput.FieldNameWithMixedCases.JsonOutput +JsonInput.FieldNameWithMixedCases.ProtobufOutput +JsonInput.FieldNameWithMixedCases.Validator +JsonInput.FloatFieldInfinity.JsonOutput +JsonInput.FloatFieldInfinity.ProtobufOutput +JsonInput.FloatFieldNan.JsonOutput +JsonInput.FloatFieldNan.ProtobufOutput +JsonInput.FloatFieldNegativeInfinity.JsonOutput +JsonInput.FloatFieldNegativeInfinity.ProtobufOutput +JsonInput.FloatFieldQuotedValue.JsonOutput +JsonInput.FloatFieldQuotedValue.ProtobufOutput +JsonInput.FloatFieldTooLarge +JsonInput.FloatFieldTooSmall +JsonInput.Int32FieldExponentialFormat.JsonOutput +JsonInput.Int32FieldExponentialFormat.ProtobufOutput +JsonInput.Int32FieldFloatTrailingZero.JsonOutput +JsonInput.Int32FieldFloatTrailingZero.ProtobufOutput +JsonInput.Int32FieldMaxFloatValue.JsonOutput +JsonInput.Int32FieldMaxFloatValue.ProtobufOutput +JsonInput.Int32FieldMinFloatValue.JsonOutput +JsonInput.Int32FieldMinFloatValue.ProtobufOutput +JsonInput.Int32FieldStringValue.JsonOutput +JsonInput.Int32FieldStringValue.ProtobufOutput +JsonInput.Int32FieldStringValueEscaped.JsonOutput +JsonInput.Int32FieldStringValueEscaped.ProtobufOutput +JsonInput.Int32MapEscapedKey.JsonOutput +JsonInput.Int32MapEscapedKey.ProtobufOutput +JsonInput.Int32MapField.JsonOutput +JsonInput.Int32MapField.ProtobufOutput +JsonInput.Int64FieldBeString.Validator +JsonInput.Int64FieldMaxValue.JsonOutput +JsonInput.Int64FieldMaxValue.ProtobufOutput +JsonInput.Int64FieldMinValue.JsonOutput +JsonInput.Int64FieldMinValue.ProtobufOutput +JsonInput.Int64MapEscapedKey.JsonOutput +JsonInput.Int64MapEscapedKey.ProtobufOutput +JsonInput.Int64MapField.JsonOutput +JsonInput.Int64MapField.ProtobufOutput +JsonInput.MessageField.JsonOutput +JsonInput.MessageField.ProtobufOutput +JsonInput.MessageMapField.JsonOutput +JsonInput.MessageMapField.ProtobufOutput +JsonInput.MessageRepeatedField.JsonOutput +JsonInput.MessageRepeatedField.ProtobufOutput +JsonInput.OptionalBoolWrapper.JsonOutput +JsonInput.OptionalBoolWrapper.ProtobufOutput +JsonInput.OptionalBytesWrapper.JsonOutput +JsonInput.OptionalBytesWrapper.ProtobufOutput +JsonInput.OptionalDoubleWrapper.JsonOutput +JsonInput.OptionalDoubleWrapper.ProtobufOutput +JsonInput.OptionalFloatWrapper.JsonOutput +JsonInput.OptionalFloatWrapper.ProtobufOutput +JsonInput.OptionalInt32Wrapper.JsonOutput +JsonInput.OptionalInt32Wrapper.ProtobufOutput +JsonInput.OptionalInt64Wrapper.JsonOutput +JsonInput.OptionalInt64Wrapper.ProtobufOutput +JsonInput.OptionalStringWrapper.JsonOutput +JsonInput.OptionalStringWrapper.ProtobufOutput +JsonInput.OptionalUint32Wrapper.JsonOutput +JsonInput.OptionalUint32Wrapper.ProtobufOutput +JsonInput.OptionalUint64Wrapper.JsonOutput +JsonInput.OptionalUint64Wrapper.ProtobufOutput +JsonInput.OptionalWrapperTypesWithNonDefaultValue.JsonOutput +JsonInput.OptionalWrapperTypesWithNonDefaultValue.ProtobufOutput +JsonInput.OriginalProtoFieldName.JsonOutput +JsonInput.PrimitiveRepeatedField.JsonOutput +JsonInput.PrimitiveRepeatedField.ProtobufOutput +JsonInput.RepeatedBoolWrapper.JsonOutput +JsonInput.RepeatedBoolWrapper.ProtobufOutput +JsonInput.RepeatedBytesWrapper.JsonOutput +JsonInput.RepeatedBytesWrapper.ProtobufOutput +JsonInput.RepeatedDoubleWrapper.JsonOutput +JsonInput.RepeatedDoubleWrapper.ProtobufOutput +JsonInput.RepeatedFieldWrongElementTypeExpectingStringsGotInt +JsonInput.RepeatedFloatWrapper.JsonOutput +JsonInput.RepeatedFloatWrapper.ProtobufOutput +JsonInput.RepeatedInt32Wrapper.JsonOutput +JsonInput.RepeatedInt32Wrapper.ProtobufOutput +JsonInput.RepeatedInt64Wrapper.JsonOutput +JsonInput.RepeatedInt64Wrapper.ProtobufOutput +JsonInput.RepeatedStringWrapper.JsonOutput +JsonInput.RepeatedStringWrapper.ProtobufOutput +JsonInput.RepeatedUint32Wrapper.JsonOutput +JsonInput.RepeatedUint32Wrapper.ProtobufOutput +JsonInput.RepeatedUint64Wrapper.JsonOutput +JsonInput.RepeatedUint64Wrapper.ProtobufOutput +JsonInput.StringFieldNotAString +JsonInput.StringFieldSurrogateInWrongOrder +JsonInput.StringFieldSurrogatePair.JsonOutput +JsonInput.StringFieldSurrogatePair.ProtobufOutput +JsonInput.StringFieldUnpairedHighSurrogate +JsonInput.StringFieldUnpairedLowSurrogate +JsonInput.Struct.JsonOutput +JsonInput.Struct.ProtobufOutput +JsonInput.TimestampHas3FractionalDigits.Validator +JsonInput.TimestampHas6FractionalDigits.Validator +JsonInput.TimestampHas9FractionalDigits.Validator +JsonInput.TimestampHasZeroFractionalDigit.Validator +JsonInput.TimestampMaxValue.JsonOutput +JsonInput.TimestampMaxValue.ProtobufOutput +JsonInput.TimestampMinValue.JsonOutput +JsonInput.TimestampMinValue.ProtobufOutput +JsonInput.TimestampRepeatedValue.JsonOutput +JsonInput.TimestampRepeatedValue.ProtobufOutput +JsonInput.TimestampWithNegativeOffset.JsonOutput +JsonInput.TimestampWithNegativeOffset.ProtobufOutput +JsonInput.TimestampWithPositiveOffset.JsonOutput +JsonInput.TimestampWithPositiveOffset.ProtobufOutput +JsonInput.TimestampZeroNormalized.Validator +JsonInput.Uint32FieldMaxFloatValue.JsonOutput +JsonInput.Uint32FieldMaxFloatValue.ProtobufOutput +JsonInput.Uint32MapField.JsonOutput +JsonInput.Uint32MapField.ProtobufOutput +JsonInput.Uint64FieldBeString.Validator +JsonInput.Uint64FieldMaxValue.JsonOutput +JsonInput.Uint64FieldMaxValue.ProtobufOutput +JsonInput.Uint64MapField.JsonOutput +JsonInput.Uint64MapField.ProtobufOutput +JsonInput.ValueAcceptBool.JsonOutput +JsonInput.ValueAcceptBool.ProtobufOutput +JsonInput.ValueAcceptFloat.JsonOutput +JsonInput.ValueAcceptFloat.ProtobufOutput +JsonInput.ValueAcceptInteger.JsonOutput +JsonInput.ValueAcceptInteger.ProtobufOutput +JsonInput.ValueAcceptList.JsonOutput +JsonInput.ValueAcceptList.ProtobufOutput +JsonInput.ValueAcceptNull.JsonOutput +JsonInput.ValueAcceptNull.ProtobufOutput +JsonInput.ValueAcceptObject.JsonOutput +JsonInput.ValueAcceptObject.ProtobufOutput +JsonInput.ValueAcceptString.JsonOutput +JsonInput.ValueAcceptString.ProtobufOutput +ProtobufInput.DoubleFieldNormalizeQuietNan.JsonOutput +ProtobufInput.DoubleFieldNormalizeSignalingNan.JsonOutput +ProtobufInput.FloatFieldNormalizeQuietNan.JsonOutput +ProtobufInput.FloatFieldNormalizeSignalingNan.JsonOutput +TimestampProtoInputTooLarge.JsonOutput +TimestampProtoInputTooSmall.JsonOutput diff --git a/packager/third_party/protobuf/conformance/third_party/jsoncpp/json.h b/packager/third_party/protobuf/conformance/third_party/jsoncpp/json.h new file mode 100644 index 0000000000..42e7e7f4ad --- /dev/null +++ b/packager/third_party/protobuf/conformance/third_party/jsoncpp/json.h @@ -0,0 +1,2075 @@ +/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/). +/// It is intended to be used with #include "json/json.h" + +// ////////////////////////////////////////////////////////////////////// +// Beginning of content of file: LICENSE +// ////////////////////////////////////////////////////////////////////// + +/* +The JsonCpp library's source code, including accompanying documentation, +tests and demonstration applications, are licensed under the following +conditions... + +The author (Baptiste Lepilleur) explicitly disclaims copyright in all +jurisdictions which recognize such a disclaimer. In such jurisdictions, +this software is released into the Public Domain. + +In jurisdictions which do not recognize Public Domain property (e.g. Germany as of +2010), this software is Copyright (c) 2007-2010 by Baptiste Lepilleur, and is +released under the terms of the MIT License (see below). + +In jurisdictions which recognize Public Domain property, the user of this +software may choose to accept it either as 1) Public Domain, 2) under the +conditions of the MIT License (see below), or 3) under the terms of dual +Public Domain/MIT License conditions described here, as they choose. + +The MIT License is about as close to Public Domain as a license can get, and is +described in clear, concise terms at: + + http://en.wikipedia.org/wiki/MIT_License + +The full text of the MIT License follows: + +======================================================================== +Copyright (c) 2007-2010 Baptiste Lepilleur + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, copy, +modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +======================================================================== +(END LICENSE TEXT) + +The MIT license is compatible with both the GPL and commercial +software, affording one all of the rights of Public Domain with the +minor nuisance of being required to keep the above copyright notice +and license text in the source code. Note also that by accepting the +Public Domain "license" you can re-license your copy using whatever +license you like. + +*/ + +// ////////////////////////////////////////////////////////////////////// +// End of content of file: LICENSE +// ////////////////////////////////////////////////////////////////////// + + + + + +#ifndef JSON_AMALGATED_H_INCLUDED +# define JSON_AMALGATED_H_INCLUDED +/// If defined, indicates that the source file is amalgated +/// to prevent private header inclusion. +#define JSON_IS_AMALGAMATION + +// ////////////////////////////////////////////////////////////////////// +// Beginning of content of file: include/json/version.h +// ////////////////////////////////////////////////////////////////////// + +// DO NOT EDIT. This file (and "version") is generated by CMake. +// Run CMake configure step to update it. +#ifndef JSON_VERSION_H_INCLUDED +# define JSON_VERSION_H_INCLUDED + +# define JSONCPP_VERSION_STRING "1.6.5" +# define JSONCPP_VERSION_MAJOR 1 +# define JSONCPP_VERSION_MINOR 6 +# define JSONCPP_VERSION_PATCH 5 +# define JSONCPP_VERSION_QUALIFIER +# define JSONCPP_VERSION_HEXA ((JSONCPP_VERSION_MAJOR << 24) | (JSONCPP_VERSION_MINOR << 16) | (JSONCPP_VERSION_PATCH << 8)) + +#endif // JSON_VERSION_H_INCLUDED + +// ////////////////////////////////////////////////////////////////////// +// End of content of file: include/json/version.h +// ////////////////////////////////////////////////////////////////////// + + + + + + +// ////////////////////////////////////////////////////////////////////// +// Beginning of content of file: include/json/config.h +// ////////////////////////////////////////////////////////////////////// + +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_CONFIG_H_INCLUDED +#define JSON_CONFIG_H_INCLUDED + +/// If defined, indicates that json library is embedded in CppTL library. +//# define JSON_IN_CPPTL 1 + +/// If defined, indicates that json may leverage CppTL library +//# define JSON_USE_CPPTL 1 +/// If defined, indicates that cpptl vector based map should be used instead of +/// std::map +/// as Value container. +//# define JSON_USE_CPPTL_SMALLMAP 1 + +// If non-zero, the library uses exceptions to report bad input instead of C +// assertion macros. The default is to use exceptions. +#ifndef JSON_USE_EXCEPTION +#define JSON_USE_EXCEPTION 1 +#endif + +/// If defined, indicates that the source file is amalgated +/// to prevent private header inclusion. +/// Remarks: it is automatically defined in the generated amalgated header. +// #define JSON_IS_AMALGAMATION + +#ifdef JSON_IN_CPPTL +#include +#ifndef JSON_USE_CPPTL +#define JSON_USE_CPPTL 1 +#endif +#endif + +#ifdef JSON_IN_CPPTL +#define JSON_API CPPTL_API +#elif defined(JSON_DLL_BUILD) +#if defined(_MSC_VER) +#define JSON_API __declspec(dllexport) +#define JSONCPP_DISABLE_DLL_INTERFACE_WARNING +#endif // if defined(_MSC_VER) +#elif defined(JSON_DLL) +#if defined(_MSC_VER) +#define JSON_API __declspec(dllimport) +#define JSONCPP_DISABLE_DLL_INTERFACE_WARNING +#endif // if defined(_MSC_VER) +#endif // ifdef JSON_IN_CPPTL +#if !defined(JSON_API) +#define JSON_API +#endif + +// If JSON_NO_INT64 is defined, then Json only support C++ "int" type for +// integer +// Storages, and 64 bits integer support is disabled. +// #define JSON_NO_INT64 1 + +#if defined(_MSC_VER) // MSVC +# if _MSC_VER <= 1200 // MSVC 6 + // Microsoft Visual Studio 6 only support conversion from __int64 to double + // (no conversion from unsigned __int64). +# define JSON_USE_INT64_DOUBLE_CONVERSION 1 + // Disable warning 4786 for VS6 caused by STL (identifier was truncated to '255' + // characters in the debug information) + // All projects I've ever seen with VS6 were using this globally (not bothering + // with pragma push/pop). +# pragma warning(disable : 4786) +# endif // MSVC 6 + +# if _MSC_VER >= 1500 // MSVC 2008 + /// Indicates that the following function is deprecated. +# define JSONCPP_DEPRECATED(message) __declspec(deprecated(message)) +# endif + +#endif // defined(_MSC_VER) + + +#ifndef JSON_HAS_RVALUE_REFERENCES + +#if defined(_MSC_VER) && _MSC_VER >= 1600 // MSVC >= 2010 +#define JSON_HAS_RVALUE_REFERENCES 1 +#endif // MSVC >= 2010 + +#ifdef __clang__ +#if __has_feature(cxx_rvalue_references) +#define JSON_HAS_RVALUE_REFERENCES 1 +#endif // has_feature + +#elif defined __GNUC__ // not clang (gcc comes later since clang emulates gcc) +#if defined(__GXX_EXPERIMENTAL_CXX0X__) || (__cplusplus >= 201103L) +#define JSON_HAS_RVALUE_REFERENCES 1 +#endif // GXX_EXPERIMENTAL + +#endif // __clang__ || __GNUC__ + +#endif // not defined JSON_HAS_RVALUE_REFERENCES + +#ifndef JSON_HAS_RVALUE_REFERENCES +#define JSON_HAS_RVALUE_REFERENCES 0 +#endif + +#ifdef __clang__ +#elif defined __GNUC__ // not clang (gcc comes later since clang emulates gcc) +# if (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)) +# define JSONCPP_DEPRECATED(message) __attribute__ ((deprecated(message))) +# elif (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 1)) +# define JSONCPP_DEPRECATED(message) __attribute__((__deprecated__)) +# endif // GNUC version +#endif // __clang__ || __GNUC__ + +#if !defined(JSONCPP_DEPRECATED) +#define JSONCPP_DEPRECATED(message) +#endif // if !defined(JSONCPP_DEPRECATED) + +namespace Json { +typedef int Int; +typedef unsigned int UInt; +#if defined(JSON_NO_INT64) +typedef int LargestInt; +typedef unsigned int LargestUInt; +#undef JSON_HAS_INT64 +#else // if defined(JSON_NO_INT64) +// For Microsoft Visual use specific types as long long is not supported +#if defined(_MSC_VER) // Microsoft Visual Studio +typedef __int64 Int64; +typedef unsigned __int64 UInt64; +#else // if defined(_MSC_VER) // Other platforms, use long long +typedef long long int Int64; +typedef unsigned long long int UInt64; +#endif // if defined(_MSC_VER) +typedef Int64 LargestInt; +typedef UInt64 LargestUInt; +#define JSON_HAS_INT64 +#endif // if defined(JSON_NO_INT64) +} // end namespace Json + +#endif // JSON_CONFIG_H_INCLUDED + +// ////////////////////////////////////////////////////////////////////// +// End of content of file: include/json/config.h +// ////////////////////////////////////////////////////////////////////// + + + + + + +// ////////////////////////////////////////////////////////////////////// +// Beginning of content of file: include/json/forwards.h +// ////////////////////////////////////////////////////////////////////// + +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_FORWARDS_H_INCLUDED +#define JSON_FORWARDS_H_INCLUDED + +#if !defined(JSON_IS_AMALGAMATION) +#include "config.h" +#endif // if !defined(JSON_IS_AMALGAMATION) + +namespace Json { + +// writer.h +class FastWriter; +class StyledWriter; + +// reader.h +class Reader; + +// features.h +class Features; + +// value.h +typedef unsigned int ArrayIndex; +class StaticString; +class Path; +class PathArgument; +class Value; +class ValueIteratorBase; +class ValueIterator; +class ValueConstIterator; + +} // namespace Json + +#endif // JSON_FORWARDS_H_INCLUDED + +// ////////////////////////////////////////////////////////////////////// +// End of content of file: include/json/forwards.h +// ////////////////////////////////////////////////////////////////////// + + + + + + +// ////////////////////////////////////////////////////////////////////// +// Beginning of content of file: include/json/features.h +// ////////////////////////////////////////////////////////////////////// + +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef CPPTL_JSON_FEATURES_H_INCLUDED +#define CPPTL_JSON_FEATURES_H_INCLUDED + +#if !defined(JSON_IS_AMALGAMATION) +#include "forwards.h" +#endif // if !defined(JSON_IS_AMALGAMATION) + +namespace Json { + +/** \brief Configuration passed to reader and writer. + * This configuration object can be used to force the Reader or Writer + * to behave in a standard conforming way. + */ +class JSON_API Features { +public: + /** \brief A configuration that allows all features and assumes all strings + * are UTF-8. + * - C & C++ comments are allowed + * - Root object can be any JSON value + * - Assumes Value strings are encoded in UTF-8 + */ + static Features all(); + + /** \brief A configuration that is strictly compatible with the JSON + * specification. + * - Comments are forbidden. + * - Root object must be either an array or an object value. + * - Assumes Value strings are encoded in UTF-8 + */ + static Features strictMode(); + + /** \brief Initialize the configuration like JsonConfig::allFeatures; + */ + Features(); + + /// \c true if comments are allowed. Default: \c true. + bool allowComments_; + + /// \c true if root must be either an array or an object value. Default: \c + /// false. + bool strictRoot_; + + /// \c true if dropped null placeholders are allowed. Default: \c false. + bool allowDroppedNullPlaceholders_; + + /// \c true if numeric object key are allowed. Default: \c false. + bool allowNumericKeys_; +}; + +} // namespace Json + +#endif // CPPTL_JSON_FEATURES_H_INCLUDED + +// ////////////////////////////////////////////////////////////////////// +// End of content of file: include/json/features.h +// ////////////////////////////////////////////////////////////////////// + + + + + + +// ////////////////////////////////////////////////////////////////////// +// Beginning of content of file: include/json/value.h +// ////////////////////////////////////////////////////////////////////// + +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef CPPTL_JSON_H_INCLUDED +#define CPPTL_JSON_H_INCLUDED + +#if !defined(JSON_IS_AMALGAMATION) +#include "forwards.h" +#endif // if !defined(JSON_IS_AMALGAMATION) +#include +#include +#include + +#ifndef JSON_USE_CPPTL_SMALLMAP +#include +#else +#include +#endif +#ifdef JSON_USE_CPPTL +#include +#endif + +// Disable warning C4251: : needs to have dll-interface to +// be used by... +#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) +#pragma warning(push) +#pragma warning(disable : 4251) +#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) + +/** \brief JSON (JavaScript Object Notation). + */ +namespace Json { + +/** Base class for all exceptions we throw. + * + * We use nothing but these internally. Of course, STL can throw others. + */ +class JSON_API Exception : public std::exception { +public: + Exception(std::string const& msg); + ~Exception() throw() override; + char const* what() const throw() override; +protected: + std::string msg_; +}; + +/** Exceptions which the user cannot easily avoid. + * + * E.g. out-of-memory (when we use malloc), stack-overflow, malicious input + * + * \remark derived from Json::Exception + */ +class JSON_API RuntimeError : public Exception { +public: + RuntimeError(std::string const& msg); +}; + +/** Exceptions thrown by JSON_ASSERT/JSON_FAIL macros. + * + * These are precondition-violations (user bugs) and internal errors (our bugs). + * + * \remark derived from Json::Exception + */ +class JSON_API LogicError : public Exception { +public: + LogicError(std::string const& msg); +}; + +/// used internally +void throwRuntimeError(std::string const& msg); +/// used internally +void throwLogicError(std::string const& msg); + +/** \brief Type of the value held by a Value object. + */ +enum ValueType { + nullValue = 0, ///< 'null' value + intValue, ///< signed integer value + uintValue, ///< unsigned integer value + realValue, ///< double value + stringValue, ///< UTF-8 string value + booleanValue, ///< bool value + arrayValue, ///< array value (ordered list) + objectValue ///< object value (collection of name/value pairs). +}; + +enum CommentPlacement { + commentBefore = 0, ///< a comment placed on the line before a value + commentAfterOnSameLine, ///< a comment just after a value on the same line + commentAfter, ///< a comment on the line after a value (only make sense for + /// root value) + numberOfCommentPlacement +}; + +//# ifdef JSON_USE_CPPTL +// typedef CppTL::AnyEnumerator EnumMemberNames; +// typedef CppTL::AnyEnumerator EnumValues; +//# endif + +/** \brief Lightweight wrapper to tag static string. + * + * Value constructor and objectValue member assignement takes advantage of the + * StaticString and avoid the cost of string duplication when storing the + * string or the member name. + * + * Example of usage: + * \code + * Json::Value aValue( StaticString("some text") ); + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ +class JSON_API StaticString { +public: + explicit StaticString(const char* czstring) : c_str_(czstring) {} + + operator const char*() const { return c_str_; } + + const char* c_str() const { return c_str_; } + +private: + const char* c_str_; +}; + +/** \brief Represents a JSON value. + * + * This class is a discriminated union wrapper that can represents a: + * - signed integer [range: Value::minInt - Value::maxInt] + * - unsigned integer (range: 0 - Value::maxUInt) + * - double + * - UTF-8 string + * - boolean + * - 'null' + * - an ordered list of Value + * - collection of name/value pairs (javascript object) + * + * The type of the held value is represented by a #ValueType and + * can be obtained using type(). + * + * Values of an #objectValue or #arrayValue can be accessed using operator[]() + * methods. + * Non-const methods will automatically create the a #nullValue element + * if it does not exist. + * The sequence of an #arrayValue will be automatically resized and initialized + * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. + * + * The get() methods can be used to obtain default value in the case the + * required element does not exist. + * + * It is possible to iterate over the list of a #objectValue values using + * the getMemberNames() method. + * + * \note #Value string-length fit in size_t, but keys must be < 2^30. + * (The reason is an implementation detail.) A #CharReader will raise an + * exception if a bound is exceeded to avoid security holes in your app, + * but the Value API does *not* check bounds. That is the responsibility + * of the caller. + */ +class JSON_API Value { + friend class ValueIteratorBase; +public: + typedef std::vector Members; + typedef ValueIterator iterator; + typedef ValueConstIterator const_iterator; + typedef Json::UInt UInt; + typedef Json::Int Int; +#if defined(JSON_HAS_INT64) + typedef Json::UInt64 UInt64; + typedef Json::Int64 Int64; +#endif // defined(JSON_HAS_INT64) + typedef Json::LargestInt LargestInt; + typedef Json::LargestUInt LargestUInt; + typedef Json::ArrayIndex ArrayIndex; + + static const Value& null; ///< We regret this reference to a global instance; prefer the simpler Value(). + static const Value& nullRef; ///< just a kludge for binary-compatibility; same as null + /// Minimum signed integer value that can be stored in a Json::Value. + static const LargestInt minLargestInt; + /// Maximum signed integer value that can be stored in a Json::Value. + static const LargestInt maxLargestInt; + /// Maximum unsigned integer value that can be stored in a Json::Value. + static const LargestUInt maxLargestUInt; + + /// Minimum signed int value that can be stored in a Json::Value. + static const Int minInt; + /// Maximum signed int value that can be stored in a Json::Value. + static const Int maxInt; + /// Maximum unsigned int value that can be stored in a Json::Value. + static const UInt maxUInt; + +#if defined(JSON_HAS_INT64) + /// Minimum signed 64 bits int value that can be stored in a Json::Value. + static const Int64 minInt64; + /// Maximum signed 64 bits int value that can be stored in a Json::Value. + static const Int64 maxInt64; + /// Maximum unsigned 64 bits int value that can be stored in a Json::Value. + static const UInt64 maxUInt64; +#endif // defined(JSON_HAS_INT64) + +private: +#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + class CZString { + public: + enum DuplicationPolicy { + noDuplication = 0, + duplicate, + duplicateOnCopy + }; + CZString(ArrayIndex index); + CZString(char const* str, unsigned length, DuplicationPolicy allocate); + CZString(CZString const& other); +#if JSON_HAS_RVALUE_REFERENCES + CZString(CZString&& other); +#endif + ~CZString(); + CZString& operator=(CZString other); + bool operator<(CZString const& other) const; + bool operator==(CZString const& other) const; + ArrayIndex index() const; + //const char* c_str() const; ///< \deprecated + char const* data() const; + unsigned length() const; + bool isStaticString() const; + + private: + void swap(CZString& other); + + struct StringStorage { + unsigned policy_: 2; + unsigned length_: 30; // 1GB max + }; + + char const* cstr_; // actually, a prefixed string, unless policy is noDup + union { + ArrayIndex index_; + StringStorage storage_; + }; + }; + +public: +#ifndef JSON_USE_CPPTL_SMALLMAP + typedef std::map ObjectValues; +#else + typedef CppTL::SmallMap ObjectValues; +#endif // ifndef JSON_USE_CPPTL_SMALLMAP +#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION + +public: + /** \brief Create a default Value of the given type. + + This is a very useful constructor. + To create an empty array, pass arrayValue. + To create an empty object, pass objectValue. + Another Value can then be set to this one by assignment. +This is useful since clear() and resize() will not alter types. + + Examples: +\code +Json::Value null_value; // null +Json::Value arr_value(Json::arrayValue); // [] +Json::Value obj_value(Json::objectValue); // {} +\endcode + */ + Value(ValueType type = nullValue); + Value(Int value); + Value(UInt value); +#if defined(JSON_HAS_INT64) + Value(Int64 value); + Value(UInt64 value); +#endif // if defined(JSON_HAS_INT64) + Value(double value); + Value(const char* value); ///< Copy til first 0. (NULL causes to seg-fault.) + Value(const char* begin, const char* end); ///< Copy all, incl zeroes. + /** \brief Constructs a value from a static string. + + * Like other value string constructor but do not duplicate the string for + * internal storage. The given string must remain alive after the call to this + * constructor. + * \note This works only for null-terminated strings. (We cannot change the + * size of this class, so we have nowhere to store the length, + * which might be computed later for various operations.) + * + * Example of usage: + * \code + * static StaticString foo("some text"); + * Json::Value aValue(foo); + * \endcode + */ + Value(const StaticString& value); + Value(const std::string& value); ///< Copy data() til size(). Embedded zeroes too. +#ifdef JSON_USE_CPPTL + Value(const CppTL::ConstString& value); +#endif + Value(bool value); + /// Deep copy. + Value(const Value& other); +#if JSON_HAS_RVALUE_REFERENCES + /// Move constructor + Value(Value&& other); +#endif + ~Value(); + + /// Deep copy, then swap(other). + /// \note Over-write existing comments. To preserve comments, use #swapPayload(). + Value& operator=(Value other); + /// Swap everything. + void swap(Value& other); + /// Swap values but leave comments and source offsets in place. + void swapPayload(Value& other); + + ValueType type() const; + + /// Compare payload only, not comments etc. + bool operator<(const Value& other) const; + bool operator<=(const Value& other) const; + bool operator>=(const Value& other) const; + bool operator>(const Value& other) const; + bool operator==(const Value& other) const; + bool operator!=(const Value& other) const; + int compare(const Value& other) const; + + const char* asCString() const; ///< Embedded zeroes could cause you trouble! + std::string asString() const; ///< Embedded zeroes are possible. + /** Get raw char* of string-value. + * \return false if !string. (Seg-fault if str or end are NULL.) + */ + bool getString( + char const** begin, char const** end) const; +#ifdef JSON_USE_CPPTL + CppTL::ConstString asConstString() const; +#endif + Int asInt() const; + UInt asUInt() const; +#if defined(JSON_HAS_INT64) + Int64 asInt64() const; + UInt64 asUInt64() const; +#endif // if defined(JSON_HAS_INT64) + LargestInt asLargestInt() const; + LargestUInt asLargestUInt() const; + float asFloat() const; + double asDouble() const; + bool asBool() const; + + bool isNull() const; + bool isBool() const; + bool isInt() const; + bool isInt64() const; + bool isUInt() const; + bool isUInt64() const; + bool isIntegral() const; + bool isDouble() const; + bool isNumeric() const; + bool isString() const; + bool isArray() const; + bool isObject() const; + + bool isConvertibleTo(ValueType other) const; + + /// Number of values in array or object + ArrayIndex size() const; + + /// \brief Return true if empty array, empty object, or null; + /// otherwise, false. + bool empty() const; + + /// Return isNull() + bool operator!() const; + + /// Remove all object members and array elements. + /// \pre type() is arrayValue, objectValue, or nullValue + /// \post type() is unchanged + void clear(); + + /// Resize the array to size elements. + /// New elements are initialized to null. + /// May only be called on nullValue or arrayValue. + /// \pre type() is arrayValue or nullValue + /// \post type() is arrayValue + void resize(ArrayIndex size); + + /// Access an array element (zero based index ). + /// If the array contains less than index element, then null value are + /// inserted + /// in the array so that its size is index+1. + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + Value& operator[](ArrayIndex index); + + /// Access an array element (zero based index ). + /// If the array contains less than index element, then null value are + /// inserted + /// in the array so that its size is index+1. + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + Value& operator[](int index); + + /// Access an array element (zero based index ) + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + const Value& operator[](ArrayIndex index) const; + + /// Access an array element (zero based index ) + /// (You may need to say 'value[0u]' to get your compiler to distinguish + /// this from the operator[] which takes a string.) + const Value& operator[](int index) const; + + /// If the array contains at least index+1 elements, returns the element + /// value, + /// otherwise returns defaultValue. + Value get(ArrayIndex index, const Value& defaultValue) const; + /// Return true if index < size(). + bool isValidIndex(ArrayIndex index) const; + /// \brief Append value to array at the end. + /// + /// Equivalent to jsonvalue[jsonvalue.size()] = value; + Value& append(const Value& value); + + /// Access an object value by name, create a null member if it does not exist. + /// \note Because of our implementation, keys are limited to 2^30 -1 chars. + /// Exceeding that will cause an exception. + Value& operator[](const char* key); + /// Access an object value by name, returns null if there is no member with + /// that name. + const Value& operator[](const char* key) const; + /// Access an object value by name, create a null member if it does not exist. + /// \param key may contain embedded nulls. + Value& operator[](const std::string& key); + /// Access an object value by name, returns null if there is no member with + /// that name. + /// \param key may contain embedded nulls. + const Value& operator[](const std::string& key) const; + /** \brief Access an object value by name, create a null member if it does not + exist. + + * If the object has no entry for that name, then the member name used to store + * the new entry is not duplicated. + * Example of use: + * \code + * Json::Value object; + * static const StaticString code("code"); + * object[code] = 1234; + * \endcode + */ + Value& operator[](const StaticString& key); +#ifdef JSON_USE_CPPTL + /// Access an object value by name, create a null member if it does not exist. + Value& operator[](const CppTL::ConstString& key); + /// Access an object value by name, returns null if there is no member with + /// that name. + const Value& operator[](const CppTL::ConstString& key) const; +#endif + /// Return the member named key if it exist, defaultValue otherwise. + /// \note deep copy + Value get(const char* key, const Value& defaultValue) const; + /// Return the member named key if it exist, defaultValue otherwise. + /// \note deep copy + /// \note key may contain embedded nulls. + Value get(const char* begin, const char* end, const Value& defaultValue) const; + /// Return the member named key if it exist, defaultValue otherwise. + /// \note deep copy + /// \param key may contain embedded nulls. + Value get(const std::string& key, const Value& defaultValue) const; +#ifdef JSON_USE_CPPTL + /// Return the member named key if it exist, defaultValue otherwise. + /// \note deep copy + Value get(const CppTL::ConstString& key, const Value& defaultValue) const; +#endif + /// Most general and efficient version of isMember()const, get()const, + /// and operator[]const + /// \note As stated elsewhere, behavior is undefined if (end-begin) >= 2^30 + Value const* find(char const* begin, char const* end) const; + /// Most general and efficient version of object-mutators. + /// \note As stated elsewhere, behavior is undefined if (end-begin) >= 2^30 + /// \return non-zero, but JSON_ASSERT if this is neither object nor nullValue. + Value const* demand(char const* begin, char const* end); + /// \brief Remove and return the named member. + /// + /// Do nothing if it did not exist. + /// \return the removed Value, or null. + /// \pre type() is objectValue or nullValue + /// \post type() is unchanged + /// \deprecated + Value removeMember(const char* key); + /// Same as removeMember(const char*) + /// \param key may contain embedded nulls. + /// \deprecated + Value removeMember(const std::string& key); + /// Same as removeMember(const char* begin, const char* end, Value* removed), + /// but 'key' is null-terminated. + bool removeMember(const char* key, Value* removed); + /** \brief Remove the named map member. + + Update 'removed' iff removed. + \param key may contain embedded nulls. + \return true iff removed (no exceptions) + */ + bool removeMember(std::string const& key, Value* removed); + /// Same as removeMember(std::string const& key, Value* removed) + bool removeMember(const char* begin, const char* end, Value* removed); + /** \brief Remove the indexed array element. + + O(n) expensive operations. + Update 'removed' iff removed. + \return true iff removed (no exceptions) + */ + bool removeIndex(ArrayIndex i, Value* removed); + + /// Return true if the object has a member named key. + /// \note 'key' must be null-terminated. + bool isMember(const char* key) const; + /// Return true if the object has a member named key. + /// \param key may contain embedded nulls. + bool isMember(const std::string& key) const; + /// Same as isMember(std::string const& key)const + bool isMember(const char* begin, const char* end) const; +#ifdef JSON_USE_CPPTL + /// Return true if the object has a member named key. + bool isMember(const CppTL::ConstString& key) const; +#endif + + /// \brief Return a list of the member names. + /// + /// If null, return an empty list. + /// \pre type() is objectValue or nullValue + /// \post if type() was nullValue, it remains nullValue + Members getMemberNames() const; + + //# ifdef JSON_USE_CPPTL + // EnumMemberNames enumMemberNames() const; + // EnumValues enumValues() const; + //# endif + + /// \deprecated Always pass len. + JSONCPP_DEPRECATED("Use setComment(std::string const&) instead.") + void setComment(const char* comment, CommentPlacement placement); + /// Comments must be //... or /* ... */ + void setComment(const char* comment, size_t len, CommentPlacement placement); + /// Comments must be //... or /* ... */ + void setComment(const std::string& comment, CommentPlacement placement); + bool hasComment(CommentPlacement placement) const; + /// Include delimiters and embedded newlines. + std::string getComment(CommentPlacement placement) const; + + std::string toStyledString() const; + + const_iterator begin() const; + const_iterator end() const; + + iterator begin(); + iterator end(); + + // Accessors for the [start, limit) range of bytes within the JSON text from + // which this value was parsed, if any. + void setOffsetStart(size_t start); + void setOffsetLimit(size_t limit); + size_t getOffsetStart() const; + size_t getOffsetLimit() const; + +private: + void initBasic(ValueType type, bool allocated = false); + + Value& resolveReference(const char* key); + Value& resolveReference(const char* key, const char* end); + + struct CommentInfo { + CommentInfo(); + ~CommentInfo(); + + void setComment(const char* text, size_t len); + + char* comment_; + }; + + // struct MemberNamesTransform + //{ + // typedef const char *result_type; + // const char *operator()( const CZString &name ) const + // { + // return name.c_str(); + // } + //}; + + union ValueHolder { + LargestInt int_; + LargestUInt uint_; + double real_; + bool bool_; + char* string_; // actually ptr to unsigned, followed by str, unless !allocated_ + ObjectValues* map_; + } value_; + ValueType type_ : 8; + unsigned int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. + // If not allocated_, string_ must be null-terminated. + CommentInfo* comments_; + + // [start, limit) byte offsets in the source JSON text from which this Value + // was extracted. + size_t start_; + size_t limit_; +}; + +/** \brief Experimental and untested: represents an element of the "path" to + * access a node. + */ +class JSON_API PathArgument { +public: + friend class Path; + + PathArgument(); + PathArgument(ArrayIndex index); + PathArgument(const char* key); + PathArgument(const std::string& key); + +private: + enum Kind { + kindNone = 0, + kindIndex, + kindKey + }; + std::string key_; + ArrayIndex index_; + Kind kind_; +}; + +/** \brief Experimental and untested: represents a "path" to access a node. + * + * Syntax: + * - "." => root node + * - ".[n]" => elements at index 'n' of root node (an array value) + * - ".name" => member named 'name' of root node (an object value) + * - ".name1.name2.name3" + * - ".[0][1][2].name1[3]" + * - ".%" => member name is provided as parameter + * - ".[%]" => index is provied as parameter + */ +class JSON_API Path { +public: + Path(const std::string& path, + const PathArgument& a1 = PathArgument(), + const PathArgument& a2 = PathArgument(), + const PathArgument& a3 = PathArgument(), + const PathArgument& a4 = PathArgument(), + const PathArgument& a5 = PathArgument()); + + const Value& resolve(const Value& root) const; + Value resolve(const Value& root, const Value& defaultValue) const; + /// Creates the "path" to access the specified node and returns a reference on + /// the node. + Value& make(Value& root) const; + +private: + typedef std::vector InArgs; + typedef std::vector Args; + + void makePath(const std::string& path, const InArgs& in); + void addPathInArg(const std::string& path, + const InArgs& in, + InArgs::const_iterator& itInArg, + PathArgument::Kind kind); + void invalidPath(const std::string& path, int location); + + Args args_; +}; + +/** \brief base class for Value iterators. + * + */ +class JSON_API ValueIteratorBase { +public: + typedef std::bidirectional_iterator_tag iterator_category; + typedef unsigned int size_t; + typedef int difference_type; + typedef ValueIteratorBase SelfType; + + bool operator==(const SelfType& other) const { return isEqual(other); } + + bool operator!=(const SelfType& other) const { return !isEqual(other); } + + difference_type operator-(const SelfType& other) const { + return other.computeDistance(*this); + } + + /// Return either the index or the member name of the referenced value as a + /// Value. + Value key() const; + + /// Return the index of the referenced Value, or -1 if it is not an arrayValue. + UInt index() const; + + /// Return the member name of the referenced Value, or "" if it is not an + /// objectValue. + /// \note Avoid `c_str()` on result, as embedded zeroes are possible. + std::string name() const; + + /// Return the member name of the referenced Value. "" if it is not an + /// objectValue. + /// \deprecated This cannot be used for UTF-8 strings, since there can be embedded nulls. + JSONCPP_DEPRECATED("Use `key = name();` instead.") + char const* memberName() const; + /// Return the member name of the referenced Value, or NULL if it is not an + /// objectValue. + /// \note Better version than memberName(). Allows embedded nulls. + char const* memberName(char const** end) const; + +protected: + Value& deref() const; + + void increment(); + + void decrement(); + + difference_type computeDistance(const SelfType& other) const; + + bool isEqual(const SelfType& other) const; + + void copy(const SelfType& other); + +private: + Value::ObjectValues::iterator current_; + // Indicates that iterator is for a null value. + bool isNull_; + +public: + // For some reason, BORLAND needs these at the end, rather + // than earlier. No idea why. + ValueIteratorBase(); + explicit ValueIteratorBase(const Value::ObjectValues::iterator& current); +}; + +/** \brief const iterator for object and array value. + * + */ +class JSON_API ValueConstIterator : public ValueIteratorBase { + friend class Value; + +public: + typedef const Value value_type; + //typedef unsigned int size_t; + //typedef int difference_type; + typedef const Value& reference; + typedef const Value* pointer; + typedef ValueConstIterator SelfType; + + ValueConstIterator(); + ValueConstIterator(ValueIterator const& other); + +private: +/*! \internal Use by Value to create an iterator. + */ + explicit ValueConstIterator(const Value::ObjectValues::iterator& current); +public: + SelfType& operator=(const ValueIteratorBase& other); + + SelfType operator++(int) { + SelfType temp(*this); + ++*this; + return temp; + } + + SelfType operator--(int) { + SelfType temp(*this); + --*this; + return temp; + } + + SelfType& operator--() { + decrement(); + return *this; + } + + SelfType& operator++() { + increment(); + return *this; + } + + reference operator*() const { return deref(); } + + pointer operator->() const { return &deref(); } +}; + +/** \brief Iterator for object and array value. + */ +class JSON_API ValueIterator : public ValueIteratorBase { + friend class Value; + +public: + typedef Value value_type; + typedef unsigned int size_t; + typedef int difference_type; + typedef Value& reference; + typedef Value* pointer; + typedef ValueIterator SelfType; + + ValueIterator(); + explicit ValueIterator(const ValueConstIterator& other); + ValueIterator(const ValueIterator& other); + +private: +/*! \internal Use by Value to create an iterator. + */ + explicit ValueIterator(const Value::ObjectValues::iterator& current); +public: + SelfType& operator=(const SelfType& other); + + SelfType operator++(int) { + SelfType temp(*this); + ++*this; + return temp; + } + + SelfType operator--(int) { + SelfType temp(*this); + --*this; + return temp; + } + + SelfType& operator--() { + decrement(); + return *this; + } + + SelfType& operator++() { + increment(); + return *this; + } + + reference operator*() const { return deref(); } + + pointer operator->() const { return &deref(); } +}; + +} // namespace Json + + +namespace std { +/// Specialize std::swap() for Json::Value. +template<> +inline void swap(Json::Value& a, Json::Value& b) { a.swap(b); } +} + + +#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) +#pragma warning(pop) +#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) + +#endif // CPPTL_JSON_H_INCLUDED + +// ////////////////////////////////////////////////////////////////////// +// End of content of file: include/json/value.h +// ////////////////////////////////////////////////////////////////////// + + + + + + +// ////////////////////////////////////////////////////////////////////// +// Beginning of content of file: include/json/reader.h +// ////////////////////////////////////////////////////////////////////// + +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef CPPTL_JSON_READER_H_INCLUDED +#define CPPTL_JSON_READER_H_INCLUDED + +#if !defined(JSON_IS_AMALGAMATION) +#include "features.h" +#include "value.h" +#endif // if !defined(JSON_IS_AMALGAMATION) +#include +#include +#include +#include +#include + +// Disable warning C4251: : needs to have dll-interface to +// be used by... +#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) +#pragma warning(push) +#pragma warning(disable : 4251) +#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) + +namespace Json { + +/** \brief Unserialize a JSON document into a + *Value. + * + * \deprecated Use CharReader and CharReaderBuilder. + */ +class JSON_API Reader { +public: + typedef char Char; + typedef const Char* Location; + + /** \brief An error tagged with where in the JSON text it was encountered. + * + * The offsets give the [start, limit) range of bytes within the text. Note + * that this is bytes, not codepoints. + * + */ + struct StructuredError { + size_t offset_start; + size_t offset_limit; + std::string message; + }; + + /** \brief Constructs a Reader allowing all features + * for parsing. + */ + Reader(); + + /** \brief Constructs a Reader allowing the specified feature set + * for parsing. + */ + Reader(const Features& features); + + /** \brief Read a Value from a JSON + * document. + * \param document UTF-8 encoded string containing the document to read. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them + * back during + * serialization, \c false to discard comments. + * This parameter is ignored if + * Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an + * error occurred. + */ + bool + parse(const std::string& document, Value& root, bool collectComments = true); + + /** \brief Read a Value from a JSON + document. + * \param beginDoc Pointer on the beginning of the UTF-8 encoded string of the + document to read. + * \param endDoc Pointer on the end of the UTF-8 encoded string of the + document to read. + * Must be >= beginDoc. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param collectComments \c true to collect comment and allow writing them + back during + * serialization, \c false to discard comments. + * This parameter is ignored if + Features::allowComments_ + * is \c false. + * \return \c true if the document was successfully parsed, \c false if an + error occurred. + */ + bool parse(const char* beginDoc, + const char* endDoc, + Value& root, + bool collectComments = true); + + /// \brief Parse from input stream. + /// \see Json::operator>>(std::istream&, Json::Value&). + bool parse(std::istream& is, Value& root, bool collectComments = true); + + /** \brief Returns a user friendly string that list errors in the parsed + * document. + * \return Formatted error message with the list of errors with their location + * in + * the parsed document. An empty string is returned if no error + * occurred + * during parsing. + * \deprecated Use getFormattedErrorMessages() instead (typo fix). + */ + JSONCPP_DEPRECATED("Use getFormattedErrorMessages() instead.") + std::string getFormatedErrorMessages() const; + + /** \brief Returns a user friendly string that list errors in the parsed + * document. + * \return Formatted error message with the list of errors with their location + * in + * the parsed document. An empty string is returned if no error + * occurred + * during parsing. + */ + std::string getFormattedErrorMessages() const; + + /** \brief Returns a vector of structured erros encounted while parsing. + * \return A (possibly empty) vector of StructuredError objects. Currently + * only one error can be returned, but the caller should tolerate + * multiple + * errors. This can occur if the parser recovers from a non-fatal + * parse error and then encounters additional errors. + */ + std::vector getStructuredErrors() const; + + /** \brief Add a semantic error message. + * \param value JSON Value location associated with the error + * \param message The error message. + * \return \c true if the error was successfully added, \c false if the + * Value offset exceeds the document size. + */ + bool pushError(const Value& value, const std::string& message); + + /** \brief Add a semantic error message with extra context. + * \param value JSON Value location associated with the error + * \param message The error message. + * \param extra Additional JSON Value location to contextualize the error + * \return \c true if the error was successfully added, \c false if either + * Value offset exceeds the document size. + */ + bool pushError(const Value& value, const std::string& message, const Value& extra); + + /** \brief Return whether there are any errors. + * \return \c true if there are no errors to report \c false if + * errors have occurred. + */ + bool good() const; + +private: + enum TokenType { + tokenEndOfStream = 0, + tokenObjectBegin, + tokenObjectEnd, + tokenArrayBegin, + tokenArrayEnd, + tokenString, + tokenNumber, + tokenTrue, + tokenFalse, + tokenNull, + tokenArraySeparator, + tokenMemberSeparator, + tokenComment, + tokenError + }; + + class Token { + public: + TokenType type_; + Location start_; + Location end_; + }; + + class ErrorInfo { + public: + Token token_; + std::string message_; + Location extra_; + }; + + typedef std::deque Errors; + + bool readToken(Token& token); + void skipSpaces(); + bool match(Location pattern, int patternLength); + bool readComment(); + bool readCStyleComment(); + bool readCppStyleComment(); + bool readString(); + void readNumber(); + bool readValue(); + bool readObject(Token& token); + bool readArray(Token& token); + bool decodeNumber(Token& token); + bool decodeNumber(Token& token, Value& decoded); + bool decodeString(Token& token); + bool decodeString(Token& token, std::string& decoded); + bool decodeDouble(Token& token); + bool decodeDouble(Token& token, Value& decoded); + bool decodeUnicodeCodePoint(Token& token, + Location& current, + Location end, + unsigned int& unicode); + bool decodeUnicodeEscapeSequence(Token& token, + Location& current, + Location end, + unsigned int& unicode); + bool addError(const std::string& message, Token& token, Location extra = 0); + bool recoverFromError(TokenType skipUntilToken); + bool addErrorAndRecover(const std::string& message, + Token& token, + TokenType skipUntilToken); + void skipUntilSpace(); + Value& currentValue(); + Char getNextChar(); + void + getLocationLineAndColumn(Location location, int& line, int& column) const; + std::string getLocationLineAndColumn(Location location) const; + void addComment(Location begin, Location end, CommentPlacement placement); + void skipCommentTokens(Token& token); + + typedef std::stack Nodes; + Nodes nodes_; + Errors errors_; + std::string document_; + Location begin_; + Location end_; + Location current_; + Location lastValueEnd_; + Value* lastValue_; + std::string commentsBefore_; + Features features_; + bool collectComments_; +}; // Reader + +/** Interface for reading JSON from a char array. + */ +class JSON_API CharReader { +public: + virtual ~CharReader() {} + /** \brief Read a Value from a JSON + document. + * The document must be a UTF-8 encoded string containing the document to read. + * + * \param beginDoc Pointer on the beginning of the UTF-8 encoded string of the + document to read. + * \param endDoc Pointer on the end of the UTF-8 encoded string of the + document to read. + * Must be >= beginDoc. + * \param root [out] Contains the root value of the document if it was + * successfully parsed. + * \param errs [out] Formatted error messages (if not NULL) + * a user friendly string that lists errors in the parsed + * document. + * \return \c true if the document was successfully parsed, \c false if an + error occurred. + */ + virtual bool parse( + char const* beginDoc, char const* endDoc, + Value* root, std::string* errs) = 0; + + class JSON_API Factory { + public: + virtual ~Factory() {} + /** \brief Allocate a CharReader via operator new(). + * \throw std::exception if something goes wrong (e.g. invalid settings) + */ + virtual CharReader* newCharReader() const = 0; + }; // Factory +}; // CharReader + +/** \brief Build a CharReader implementation. + +Usage: +\code + using namespace Json; + CharReaderBuilder builder; + builder["collectComments"] = false; + Value value; + std::string errs; + bool ok = parseFromStream(builder, std::cin, &value, &errs); +\endcode +*/ +class JSON_API CharReaderBuilder : public CharReader::Factory { +public: + // Note: We use a Json::Value so that we can add data-members to this class + // without a major version bump. + /** Configuration of this builder. + These are case-sensitive. + Available settings (case-sensitive): + - `"collectComments": false or true` + - true to collect comment and allow writing them + back during serialization, false to discard comments. + This parameter is ignored if allowComments is false. + - `"allowComments": false or true` + - true if comments are allowed. + - `"strictRoot": false or true` + - true if root must be either an array or an object value + - `"allowDroppedNullPlaceholders": false or true` + - true if dropped null placeholders are allowed. (See StreamWriterBuilder.) + - `"allowNumericKeys": false or true` + - true if numeric object keys are allowed. + - `"allowSingleQuotes": false or true` + - true if '' are allowed for strings (both keys and values) + - `"stackLimit": integer` + - Exceeding stackLimit (recursive depth of `readValue()`) will + cause an exception. + - This is a security issue (seg-faults caused by deeply nested JSON), + so the default is low. + - `"failIfExtra": false or true` + - If true, `parse()` returns false when extra non-whitespace trails + the JSON value in the input string. + - `"rejectDupKeys": false or true` + - If true, `parse()` returns false when a key is duplicated within an object. + - `"allowSpecialFloats": false or true` + - If true, special float values (NaNs and infinities) are allowed + and their values are lossfree restorable. + + You can examine 'settings_` yourself + to see the defaults. You can also write and read them just like any + JSON Value. + \sa setDefaults() + */ + Json::Value settings_; + + CharReaderBuilder(); + ~CharReaderBuilder() override; + + CharReader* newCharReader() const override; + + /** \return true if 'settings' are legal and consistent; + * otherwise, indicate bad settings via 'invalid'. + */ + bool validate(Json::Value* invalid) const; + + /** A simple way to update a specific setting. + */ + Value& operator[](std::string key); + + /** Called by ctor, but you can use this to reset settings_. + * \pre 'settings' != NULL (but Json::null is fine) + * \remark Defaults: + * \snippet src/lib_json/json_reader.cpp CharReaderBuilderDefaults + */ + static void setDefaults(Json::Value* settings); + /** Same as old Features::strictMode(). + * \pre 'settings' != NULL (but Json::null is fine) + * \remark Defaults: + * \snippet src/lib_json/json_reader.cpp CharReaderBuilderStrictMode + */ + static void strictMode(Json::Value* settings); +}; + +/** Consume entire stream and use its begin/end. + * Someday we might have a real StreamReader, but for now this + * is convenient. + */ +bool JSON_API parseFromStream( + CharReader::Factory const&, + std::istream&, + Value* root, std::string* errs); + +/** \brief Read from 'sin' into 'root'. + + Always keep comments from the input JSON. + + This can be used to read a file into a particular sub-object. + For example: + \code + Json::Value root; + cin >> root["dir"]["file"]; + cout << root; + \endcode + Result: + \verbatim + { + "dir": { + "file": { + // The input stream JSON would be nested here. + } + } + } + \endverbatim + \throw std::exception on parse error. + \see Json::operator<<() +*/ +JSON_API std::istream& operator>>(std::istream&, Value&); + +} // namespace Json + +#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) +#pragma warning(pop) +#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) + +#endif // CPPTL_JSON_READER_H_INCLUDED + +// ////////////////////////////////////////////////////////////////////// +// End of content of file: include/json/reader.h +// ////////////////////////////////////////////////////////////////////// + + + + + + +// ////////////////////////////////////////////////////////////////////// +// Beginning of content of file: include/json/writer.h +// ////////////////////////////////////////////////////////////////////// + +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef JSON_WRITER_H_INCLUDED +#define JSON_WRITER_H_INCLUDED + +#if !defined(JSON_IS_AMALGAMATION) +#include "value.h" +#endif // if !defined(JSON_IS_AMALGAMATION) +#include +#include +#include + +// Disable warning C4251: : needs to have dll-interface to +// be used by... +#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) +#pragma warning(push) +#pragma warning(disable : 4251) +#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) + +namespace Json { + +class Value; + +/** + +Usage: +\code + using namespace Json; + void writeToStdout(StreamWriter::Factory const& factory, Value const& value) { + std::unique_ptr const writer( + factory.newStreamWriter()); + writer->write(value, &std::cout); + std::cout << std::endl; // add lf and flush + } +\endcode +*/ +class JSON_API StreamWriter { +protected: + std::ostream* sout_; // not owned; will not delete +public: + StreamWriter(); + virtual ~StreamWriter(); + /** Write Value into document as configured in sub-class. + Do not take ownership of sout, but maintain a reference during function. + \pre sout != NULL + \return zero on success (For now, we always return zero, so check the stream instead.) + \throw std::exception possibly, depending on configuration + */ + virtual int write(Value const& root, std::ostream* sout) = 0; + + /** \brief A simple abstract factory. + */ + class JSON_API Factory { + public: + virtual ~Factory(); + /** \brief Allocate a CharReader via operator new(). + * \throw std::exception if something goes wrong (e.g. invalid settings) + */ + virtual StreamWriter* newStreamWriter() const = 0; + }; // Factory +}; // StreamWriter + +/** \brief Write into stringstream, then return string, for convenience. + * A StreamWriter will be created from the factory, used, and then deleted. + */ +std::string JSON_API writeString(StreamWriter::Factory const& factory, Value const& root); + + +/** \brief Build a StreamWriter implementation. + +Usage: +\code + using namespace Json; + Value value = ...; + StreamWriterBuilder builder; + builder["commentStyle"] = "None"; + builder["indentation"] = " "; // or whatever you like + std::unique_ptr writer( + builder.newStreamWriter()); + writer->write(value, &std::cout); + std::cout << std::endl; // add lf and flush +\endcode +*/ +class JSON_API StreamWriterBuilder : public StreamWriter::Factory { +public: + // Note: We use a Json::Value so that we can add data-members to this class + // without a major version bump. + /** Configuration of this builder. + Available settings (case-sensitive): + - "commentStyle": "None" or "All" + - "indentation": "" + - "enableYAMLCompatibility": false or true + - slightly change the whitespace around colons + - "dropNullPlaceholders": false or true + - Drop the "null" string from the writer's output for nullValues. + Strictly speaking, this is not valid JSON. But when the output is being + fed to a browser's Javascript, it makes for smaller output and the + browser can handle the output just fine. + - "useSpecialFloats": false or true + - If true, outputs non-finite floating point values in the following way: + NaN values as "NaN", positive infinity as "Infinity", and negative infinity + as "-Infinity". + + You can examine 'settings_` yourself + to see the defaults. You can also write and read them just like any + JSON Value. + \sa setDefaults() + */ + Json::Value settings_; + + StreamWriterBuilder(); + ~StreamWriterBuilder() override; + + /** + * \throw std::exception if something goes wrong (e.g. invalid settings) + */ + StreamWriter* newStreamWriter() const override; + + /** \return true if 'settings' are legal and consistent; + * otherwise, indicate bad settings via 'invalid'. + */ + bool validate(Json::Value* invalid) const; + /** A simple way to update a specific setting. + */ + Value& operator[](std::string key); + + /** Called by ctor, but you can use this to reset settings_. + * \pre 'settings' != NULL (but Json::null is fine) + * \remark Defaults: + * \snippet src/lib_json/json_writer.cpp StreamWriterBuilderDefaults + */ + static void setDefaults(Json::Value* settings); +}; + +/** \brief Abstract class for writers. + * \deprecated Use StreamWriter. (And really, this is an implementation detail.) + */ +class JSON_API Writer { +public: + virtual ~Writer(); + + virtual std::string write(const Value& root) = 0; +}; + +/** \brief Outputs a Value in JSON format + *without formatting (not human friendly). + * + * The JSON document is written in a single line. It is not intended for 'human' + *consumption, + * but may be usefull to support feature such as RPC where bandwith is limited. + * \sa Reader, Value + * \deprecated Use StreamWriterBuilder. + */ +class JSON_API FastWriter : public Writer { + +public: + FastWriter(); + ~FastWriter() override {} + + void enableYAMLCompatibility(); + + /** \brief Drop the "null" string from the writer's output for nullValues. + * Strictly speaking, this is not valid JSON. But when the output is being + * fed to a browser's Javascript, it makes for smaller output and the + * browser can handle the output just fine. + */ + void dropNullPlaceholders(); + + void omitEndingLineFeed(); + +public: // overridden from Writer + std::string write(const Value& root) override; + +private: + void writeValue(const Value& value); + + std::string document_; + bool yamlCompatiblityEnabled_; + bool dropNullPlaceholders_; + bool omitEndingLineFeed_; +}; + +/** \brief Writes a Value in JSON format in a + *human friendly way. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per + *line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value + *types, + * and all the values fit on one lines, then print the array on a single + *line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their + *#CommentPlacement. + * + * \sa Reader, Value, Value::setComment() + * \deprecated Use StreamWriterBuilder. + */ +class JSON_API StyledWriter : public Writer { +public: + StyledWriter(); + ~StyledWriter() override {} + +public: // overridden from Writer + /** \brief Serialize a Value in JSON format. + * \param root Value to serialize. + * \return String containing the JSON document that represents the root value. + */ + std::string write(const Value& root) override; + +private: + void writeValue(const Value& value); + void writeArrayValue(const Value& value); + bool isMultineArray(const Value& value); + void pushValue(const std::string& value); + void writeIndent(); + void writeWithIndent(const std::string& value); + void indent(); + void unindent(); + void writeCommentBeforeValue(const Value& root); + void writeCommentAfterValueOnSameLine(const Value& root); + bool hasCommentForValue(const Value& value); + static std::string normalizeEOL(const std::string& text); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::string document_; + std::string indentString_; + int rightMargin_; + int indentSize_; + bool addChildValues_; +}; + +/** \brief Writes a Value in JSON format in a + human friendly way, + to a stream rather than to a string. + * + * The rules for line break and indent are as follow: + * - Object value: + * - if empty then print {} without indent and line break + * - if not empty the print '{', line break & indent, print one value per + line + * and then unindent and line break and print '}'. + * - Array value: + * - if empty then print [] without indent and line break + * - if the array contains no object value, empty array or some other value + types, + * and all the values fit on one lines, then print the array on a single + line. + * - otherwise, it the values do not fit on one line, or the array contains + * object or non empty array, then print one value per line. + * + * If the Value have comments then they are outputed according to their + #CommentPlacement. + * + * \param indentation Each level will be indented by this amount extra. + * \sa Reader, Value, Value::setComment() + * \deprecated Use StreamWriterBuilder. + */ +class JSON_API StyledStreamWriter { +public: + StyledStreamWriter(std::string indentation = "\t"); + ~StyledStreamWriter() {} + +public: + /** \brief Serialize a Value in JSON format. + * \param out Stream to write to. (Can be ostringstream, e.g.) + * \param root Value to serialize. + * \note There is no point in deriving from Writer, since write() should not + * return a value. + */ + void write(std::ostream& out, const Value& root); + +private: + void writeValue(const Value& value); + void writeArrayValue(const Value& value); + bool isMultineArray(const Value& value); + void pushValue(const std::string& value); + void writeIndent(); + void writeWithIndent(const std::string& value); + void indent(); + void unindent(); + void writeCommentBeforeValue(const Value& root); + void writeCommentAfterValueOnSameLine(const Value& root); + bool hasCommentForValue(const Value& value); + static std::string normalizeEOL(const std::string& text); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::ostream* document_; + std::string indentString_; + int rightMargin_; + std::string indentation_; + bool addChildValues_ : 1; + bool indented_ : 1; +}; + +#if defined(JSON_HAS_INT64) +std::string JSON_API valueToString(Int value); +std::string JSON_API valueToString(UInt value); +#endif // if defined(JSON_HAS_INT64) +std::string JSON_API valueToString(LargestInt value); +std::string JSON_API valueToString(LargestUInt value); +std::string JSON_API valueToString(double value); +std::string JSON_API valueToString(bool value); +std::string JSON_API valueToQuotedString(const char* value); + +/// \brief Output using the StyledStreamWriter. +/// \see Json::operator>>() +JSON_API std::ostream& operator<<(std::ostream&, const Value& root); + +} // namespace Json + +#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) +#pragma warning(pop) +#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) + +#endif // JSON_WRITER_H_INCLUDED + +// ////////////////////////////////////////////////////////////////////// +// End of content of file: include/json/writer.h +// ////////////////////////////////////////////////////////////////////// + + + + + + +// ////////////////////////////////////////////////////////////////////// +// Beginning of content of file: include/json/assertions.h +// ////////////////////////////////////////////////////////////////////// + +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef CPPTL_JSON_ASSERTIONS_H_INCLUDED +#define CPPTL_JSON_ASSERTIONS_H_INCLUDED + +#include +#include + +#if !defined(JSON_IS_AMALGAMATION) +#include "config.h" +#endif // if !defined(JSON_IS_AMALGAMATION) + +/** It should not be possible for a maliciously designed file to + * cause an abort() or seg-fault, so these macros are used only + * for pre-condition violations and internal logic errors. + */ +#if JSON_USE_EXCEPTION + +// @todo <= add detail about condition in exception +# define JSON_ASSERT(condition) \ + {if (!(condition)) {Json::throwLogicError( "assert json failed" );}} + +# define JSON_FAIL_MESSAGE(message) \ + { \ + std::ostringstream oss; oss << message; \ + Json::throwLogicError(oss.str()); \ + abort(); \ + } + +#else // JSON_USE_EXCEPTION + +# define JSON_ASSERT(condition) assert(condition) + +// The call to assert() will show the failure message in debug builds. In +// release builds we abort, for a core-dump or debugger. +# define JSON_FAIL_MESSAGE(message) \ + { \ + std::ostringstream oss; oss << message; \ + assert(false && oss.str().c_str()); \ + abort(); \ + } + + +#endif + +#define JSON_ASSERT_MESSAGE(condition, message) \ + if (!(condition)) { \ + JSON_FAIL_MESSAGE(message); \ + } + +#endif // CPPTL_JSON_ASSERTIONS_H_INCLUDED + +// ////////////////////////////////////////////////////////////////////// +// End of content of file: include/json/assertions.h +// ////////////////////////////////////////////////////////////////////// + + + + + +#endif //ifndef JSON_AMALGATED_H_INCLUDED diff --git a/packager/third_party/protobuf/conformance/third_party/jsoncpp/jsoncpp.cpp b/packager/third_party/protobuf/conformance/third_party/jsoncpp/jsoncpp.cpp new file mode 100644 index 0000000000..f803962ade --- /dev/null +++ b/packager/third_party/protobuf/conformance/third_party/jsoncpp/jsoncpp.cpp @@ -0,0 +1,5192 @@ +/// Json-cpp amalgated source (http://jsoncpp.sourceforge.net/). +/// It is intended to be used with #include "json/json.h" + +// ////////////////////////////////////////////////////////////////////// +// Beginning of content of file: LICENSE +// ////////////////////////////////////////////////////////////////////// + +/* +The JsonCpp library's source code, including accompanying documentation, +tests and demonstration applications, are licensed under the following +conditions... + +The author (Baptiste Lepilleur) explicitly disclaims copyright in all +jurisdictions which recognize such a disclaimer. In such jurisdictions, +this software is released into the Public Domain. + +In jurisdictions which do not recognize Public Domain property (e.g. Germany as of +2010), this software is Copyright (c) 2007-2010 by Baptiste Lepilleur, and is +released under the terms of the MIT License (see below). + +In jurisdictions which recognize Public Domain property, the user of this +software may choose to accept it either as 1) Public Domain, 2) under the +conditions of the MIT License (see below), or 3) under the terms of dual +Public Domain/MIT License conditions described here, as they choose. + +The MIT License is about as close to Public Domain as a license can get, and is +described in clear, concise terms at: + + http://en.wikipedia.org/wiki/MIT_License + +The full text of the MIT License follows: + +======================================================================== +Copyright (c) 2007-2010 Baptiste Lepilleur + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, copy, +modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +======================================================================== +(END LICENSE TEXT) + +The MIT license is compatible with both the GPL and commercial +software, affording one all of the rights of Public Domain with the +minor nuisance of being required to keep the above copyright notice +and license text in the source code. Note also that by accepting the +Public Domain "license" you can re-license your copy using whatever +license you like. + +*/ + +// ////////////////////////////////////////////////////////////////////// +// End of content of file: LICENSE +// ////////////////////////////////////////////////////////////////////// + + + + + + +#include "third_party/jsoncpp/json.h" + +#ifndef JSON_IS_AMALGAMATION +#error "Compile with -I PATH_TO_JSON_DIRECTORY" +#endif + + +// ////////////////////////////////////////////////////////////////////// +// Beginning of content of file: src/lib_json/json_tool.h +// ////////////////////////////////////////////////////////////////////// + +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#ifndef LIB_JSONCPP_JSON_TOOL_H_INCLUDED +#define LIB_JSONCPP_JSON_TOOL_H_INCLUDED + +/* This header provides common string manipulation support, such as UTF-8, + * portable conversion from/to string... + * + * It is an internal header that must not be exposed. + */ + +namespace Json { + +/// Converts a unicode code-point to UTF-8. +static inline std::string codePointToUTF8(unsigned int cp) { + std::string result; + + // based on description from http://en.wikipedia.org/wiki/UTF-8 + + if (cp <= 0x7f) { + result.resize(1); + result[0] = static_cast(cp); + } else if (cp <= 0x7FF) { + result.resize(2); + result[1] = static_cast(0x80 | (0x3f & cp)); + result[0] = static_cast(0xC0 | (0x1f & (cp >> 6))); + } else if (cp <= 0xFFFF) { + result.resize(3); + result[2] = static_cast(0x80 | (0x3f & cp)); + result[1] = static_cast(0x80 | (0x3f & (cp >> 6))); + result[0] = static_cast(0xE0 | (0xf & (cp >> 12))); + } else if (cp <= 0x10FFFF) { + result.resize(4); + result[3] = static_cast(0x80 | (0x3f & cp)); + result[2] = static_cast(0x80 | (0x3f & (cp >> 6))); + result[1] = static_cast(0x80 | (0x3f & (cp >> 12))); + result[0] = static_cast(0xF0 | (0x7 & (cp >> 18))); + } + + return result; +} + +/// Returns true if ch is a control character (in range [1,31]). +static inline bool isControlCharacter(char ch) { return ch > 0 && ch <= 0x1F; } + +enum { + /// Constant that specify the size of the buffer that must be passed to + /// uintToString. + uintToStringBufferSize = 3 * sizeof(LargestUInt) + 1 +}; + +// Defines a char buffer for use with uintToString(). +typedef char UIntToStringBuffer[uintToStringBufferSize]; + +/** Converts an unsigned integer to string. + * @param value Unsigned interger to convert to string + * @param current Input/Output string buffer. + * Must have at least uintToStringBufferSize chars free. + */ +static inline void uintToString(LargestUInt value, char*& current) { + *--current = 0; + do { + *--current = static_cast(value % 10U + static_cast('0')); + value /= 10; + } while (value != 0); +} + +/** Change ',' to '.' everywhere in buffer. + * + * We had a sophisticated way, but it did not work in WinCE. + * @see https://github.com/open-source-parsers/jsoncpp/pull/9 + */ +static inline void fixNumericLocale(char* begin, char* end) { + while (begin < end) { + if (*begin == ',') { + *begin = '.'; + } + ++begin; + } +} + +} // namespace Json { + +#endif // LIB_JSONCPP_JSON_TOOL_H_INCLUDED + +// ////////////////////////////////////////////////////////////////////// +// End of content of file: src/lib_json/json_tool.h +// ////////////////////////////////////////////////////////////////////// + + + + + + +// ////////////////////////////////////////////////////////////////////// +// Beginning of content of file: src/lib_json/json_reader.cpp +// ////////////////////////////////////////////////////////////////////// + +// Copyright 2007-2011 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#if !defined(JSON_IS_AMALGAMATION) +#include +#include +#include +#include "json_tool.h" +#endif // if !defined(JSON_IS_AMALGAMATION) +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#if defined(_MSC_VER) +#if !defined(WINCE) && defined(__STDC_SECURE_LIB__) && _MSC_VER >= 1500 // VC++ 9.0 and above +#define snprintf sprintf_s +#elif _MSC_VER >= 1900 // VC++ 14.0 and above +#define snprintf std::snprintf +#else +#define snprintf _snprintf +#endif +#elif defined(__ANDROID__) || defined(__QNXNTO__) +#define snprintf snprintf +#elif __cplusplus >= 201103L +#define snprintf std::snprintf +#endif + +#if defined(__QNXNTO__) +#define sscanf std::sscanf +#endif + +#if defined(_MSC_VER) && _MSC_VER >= 1400 // VC++ 8.0 +// Disable warning about strdup being deprecated. +#pragma warning(disable : 4996) +#endif + +static int const stackLimit_g = 1000; +static int stackDepth_g = 0; // see readValue() + +namespace Json { + +#if __cplusplus >= 201103L || (defined(_CPPLIB_VER) && _CPPLIB_VER >= 520) +typedef std::unique_ptr CharReaderPtr; +#else +typedef std::auto_ptr CharReaderPtr; +#endif + +// Implementation of class Features +// //////////////////////////////// + +Features::Features() + : allowComments_(true), strictRoot_(false), + allowDroppedNullPlaceholders_(false), allowNumericKeys_(false) {} + +Features Features::all() { return Features(); } + +Features Features::strictMode() { + Features features; + features.allowComments_ = false; + features.strictRoot_ = true; + features.allowDroppedNullPlaceholders_ = false; + features.allowNumericKeys_ = false; + return features; +} + +// Implementation of class Reader +// //////////////////////////////// + +static bool containsNewLine(Reader::Location begin, Reader::Location end) { + for (; begin < end; ++begin) + if (*begin == '\n' || *begin == '\r') + return true; + return false; +} + +// Class Reader +// ////////////////////////////////////////////////////////////////// + +Reader::Reader() + : errors_(), document_(), begin_(), end_(), current_(), lastValueEnd_(), + lastValue_(), commentsBefore_(), features_(Features::all()), + collectComments_() {} + +Reader::Reader(const Features& features) + : errors_(), document_(), begin_(), end_(), current_(), lastValueEnd_(), + lastValue_(), commentsBefore_(), features_(features), collectComments_() { +} + +bool +Reader::parse(const std::string& document, Value& root, bool collectComments) { + document_ = document; + const char* begin = document_.c_str(); + const char* end = begin + document_.length(); + return parse(begin, end, root, collectComments); +} + +bool Reader::parse(std::istream& sin, Value& root, bool collectComments) { + // std::istream_iterator begin(sin); + // std::istream_iterator end; + // Those would allow streamed input from a file, if parse() were a + // template function. + + // Since std::string is reference-counted, this at least does not + // create an extra copy. + std::string doc; + std::getline(sin, doc, (char)EOF); + return parse(doc, root, collectComments); +} + +bool Reader::parse(const char* beginDoc, + const char* endDoc, + Value& root, + bool collectComments) { + if (!features_.allowComments_) { + collectComments = false; + } + + begin_ = beginDoc; + end_ = endDoc; + collectComments_ = collectComments; + current_ = begin_; + lastValueEnd_ = 0; + lastValue_ = 0; + commentsBefore_ = ""; + errors_.clear(); + while (!nodes_.empty()) + nodes_.pop(); + nodes_.push(&root); + + stackDepth_g = 0; // Yes, this is bad coding, but options are limited. + bool successful = readValue(); + Token token; + skipCommentTokens(token); + if (collectComments_ && !commentsBefore_.empty()) + root.setComment(commentsBefore_, commentAfter); + if (features_.strictRoot_) { + if (!root.isArray() && !root.isObject()) { + // Set error location to start of doc, ideally should be first token found + // in doc + token.type_ = tokenError; + token.start_ = beginDoc; + token.end_ = endDoc; + addError( + "A valid JSON document must be either an array or an object value.", + token); + return false; + } + } + return successful; +} + +bool Reader::readValue() { + // This is a non-reentrant way to support a stackLimit. Terrible! + // But this deprecated class has a security problem: Bad input can + // cause a seg-fault. This seems like a fair, binary-compatible way + // to prevent the problem. + if (stackDepth_g >= stackLimit_g) throwRuntimeError("Exceeded stackLimit in readValue()."); + ++stackDepth_g; + + Token token; + skipCommentTokens(token); + bool successful = true; + + if (collectComments_ && !commentsBefore_.empty()) { + currentValue().setComment(commentsBefore_, commentBefore); + commentsBefore_ = ""; + } + + switch (token.type_) { + case tokenObjectBegin: + successful = readObject(token); + currentValue().setOffsetLimit(current_ - begin_); + break; + case tokenArrayBegin: + successful = readArray(token); + currentValue().setOffsetLimit(current_ - begin_); + break; + case tokenNumber: + successful = decodeNumber(token); + break; + case tokenString: + successful = decodeString(token); + break; + case tokenTrue: + { + Value v(true); + currentValue().swapPayload(v); + currentValue().setOffsetStart(token.start_ - begin_); + currentValue().setOffsetLimit(token.end_ - begin_); + } + break; + case tokenFalse: + { + Value v(false); + currentValue().swapPayload(v); + currentValue().setOffsetStart(token.start_ - begin_); + currentValue().setOffsetLimit(token.end_ - begin_); + } + break; + case tokenNull: + { + Value v; + currentValue().swapPayload(v); + currentValue().setOffsetStart(token.start_ - begin_); + currentValue().setOffsetLimit(token.end_ - begin_); + } + break; + case tokenArraySeparator: + case tokenObjectEnd: + case tokenArrayEnd: + if (features_.allowDroppedNullPlaceholders_) { + // "Un-read" the current token and mark the current value as a null + // token. + current_--; + Value v; + currentValue().swapPayload(v); + currentValue().setOffsetStart(current_ - begin_ - 1); + currentValue().setOffsetLimit(current_ - begin_); + break; + } // Else, fall through... + default: + currentValue().setOffsetStart(token.start_ - begin_); + currentValue().setOffsetLimit(token.end_ - begin_); + return addError("Syntax error: value, object or array expected.", token); + } + + if (collectComments_) { + lastValueEnd_ = current_; + lastValue_ = ¤tValue(); + } + + --stackDepth_g; + return successful; +} + +void Reader::skipCommentTokens(Token& token) { + if (features_.allowComments_) { + do { + readToken(token); + } while (token.type_ == tokenComment); + } else { + readToken(token); + } +} + +bool Reader::readToken(Token& token) { + skipSpaces(); + token.start_ = current_; + Char c = getNextChar(); + bool ok = true; + switch (c) { + case '{': + token.type_ = tokenObjectBegin; + break; + case '}': + token.type_ = tokenObjectEnd; + break; + case '[': + token.type_ = tokenArrayBegin; + break; + case ']': + token.type_ = tokenArrayEnd; + break; + case '"': + token.type_ = tokenString; + ok = readString(); + break; + case '/': + token.type_ = tokenComment; + ok = readComment(); + break; + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + case '-': + token.type_ = tokenNumber; + readNumber(); + break; + case 't': + token.type_ = tokenTrue; + ok = match("rue", 3); + break; + case 'f': + token.type_ = tokenFalse; + ok = match("alse", 4); + break; + case 'n': + token.type_ = tokenNull; + ok = match("ull", 3); + break; + case ',': + token.type_ = tokenArraySeparator; + break; + case ':': + token.type_ = tokenMemberSeparator; + break; + case 0: + token.type_ = tokenEndOfStream; + break; + default: + ok = false; + break; + } + if (!ok) + token.type_ = tokenError; + token.end_ = current_; + return true; +} + +void Reader::skipSpaces() { + while (current_ != end_) { + Char c = *current_; + if (c == ' ' || c == '\t' || c == '\r' || c == '\n') + ++current_; + else + break; + } +} + +bool Reader::match(Location pattern, int patternLength) { + if (end_ - current_ < patternLength) + return false; + int index = patternLength; + while (index--) + if (current_[index] != pattern[index]) + return false; + current_ += patternLength; + return true; +} + +bool Reader::readComment() { + Location commentBegin = current_ - 1; + Char c = getNextChar(); + bool successful = false; + if (c == '*') + successful = readCStyleComment(); + else if (c == '/') + successful = readCppStyleComment(); + if (!successful) + return false; + + if (collectComments_) { + CommentPlacement placement = commentBefore; + if (lastValueEnd_ && !containsNewLine(lastValueEnd_, commentBegin)) { + if (c != '*' || !containsNewLine(commentBegin, current_)) + placement = commentAfterOnSameLine; + } + + addComment(commentBegin, current_, placement); + } + return true; +} + +static std::string normalizeEOL(Reader::Location begin, Reader::Location end) { + std::string normalized; + normalized.reserve(end - begin); + Reader::Location current = begin; + while (current != end) { + char c = *current++; + if (c == '\r') { + if (current != end && *current == '\n') + // convert dos EOL + ++current; + // convert Mac EOL + normalized += '\n'; + } else { + normalized += c; + } + } + return normalized; +} + +void +Reader::addComment(Location begin, Location end, CommentPlacement placement) { + assert(collectComments_); + const std::string& normalized = normalizeEOL(begin, end); + if (placement == commentAfterOnSameLine) { + assert(lastValue_ != 0); + lastValue_->setComment(normalized, placement); + } else { + commentsBefore_ += normalized; + } +} + +bool Reader::readCStyleComment() { + while (current_ != end_) { + Char c = getNextChar(); + if (c == '*' && *current_ == '/') + break; + } + return getNextChar() == '/'; +} + +bool Reader::readCppStyleComment() { + while (current_ != end_) { + Char c = getNextChar(); + if (c == '\n') + break; + if (c == '\r') { + // Consume DOS EOL. It will be normalized in addComment. + if (current_ != end_ && *current_ == '\n') + getNextChar(); + // Break on Moc OS 9 EOL. + break; + } + } + return true; +} + +void Reader::readNumber() { + const char *p = current_; + char c = '0'; // stopgap for already consumed character + // integral part + while (c >= '0' && c <= '9') + c = (current_ = p) < end_ ? *p++ : 0; + // fractional part + if (c == '.') { + c = (current_ = p) < end_ ? *p++ : 0; + while (c >= '0' && c <= '9') + c = (current_ = p) < end_ ? *p++ : 0; + } + // exponential part + if (c == 'e' || c == 'E') { + c = (current_ = p) < end_ ? *p++ : 0; + if (c == '+' || c == '-') + c = (current_ = p) < end_ ? *p++ : 0; + while (c >= '0' && c <= '9') + c = (current_ = p) < end_ ? *p++ : 0; + } +} + +bool Reader::readString() { + Char c = 0; + while (current_ != end_) { + c = getNextChar(); + if (c == '\\') + getNextChar(); + else if (c == '"') + break; + } + return c == '"'; +} + +bool Reader::readObject(Token& tokenStart) { + Token tokenName; + std::string name; + Value init(objectValue); + currentValue().swapPayload(init); + currentValue().setOffsetStart(tokenStart.start_ - begin_); + while (readToken(tokenName)) { + bool initialTokenOk = true; + while (tokenName.type_ == tokenComment && initialTokenOk) + initialTokenOk = readToken(tokenName); + if (!initialTokenOk) + break; + if (tokenName.type_ == tokenObjectEnd && name.empty()) // empty object + return true; + name = ""; + if (tokenName.type_ == tokenString) { + if (!decodeString(tokenName, name)) + return recoverFromError(tokenObjectEnd); + } else if (tokenName.type_ == tokenNumber && features_.allowNumericKeys_) { + Value numberName; + if (!decodeNumber(tokenName, numberName)) + return recoverFromError(tokenObjectEnd); + name = numberName.asString(); + } else { + break; + } + + Token colon; + if (!readToken(colon) || colon.type_ != tokenMemberSeparator) { + return addErrorAndRecover( + "Missing ':' after object member name", colon, tokenObjectEnd); + } + Value& value = currentValue()[name]; + nodes_.push(&value); + bool ok = readValue(); + nodes_.pop(); + if (!ok) // error already set + return recoverFromError(tokenObjectEnd); + + Token comma; + if (!readToken(comma) || + (comma.type_ != tokenObjectEnd && comma.type_ != tokenArraySeparator && + comma.type_ != tokenComment)) { + return addErrorAndRecover( + "Missing ',' or '}' in object declaration", comma, tokenObjectEnd); + } + bool finalizeTokenOk = true; + while (comma.type_ == tokenComment && finalizeTokenOk) + finalizeTokenOk = readToken(comma); + if (comma.type_ == tokenObjectEnd) + return true; + } + return addErrorAndRecover( + "Missing '}' or object member name", tokenName, tokenObjectEnd); +} + +bool Reader::readArray(Token& tokenStart) { + Value init(arrayValue); + currentValue().swapPayload(init); + currentValue().setOffsetStart(tokenStart.start_ - begin_); + skipSpaces(); + if (*current_ == ']') // empty array + { + Token endArray; + readToken(endArray); + return true; + } + int index = 0; + for (;;) { + Value& value = currentValue()[index++]; + nodes_.push(&value); + bool ok = readValue(); + nodes_.pop(); + if (!ok) // error already set + return recoverFromError(tokenArrayEnd); + + Token token; + // Accept Comment after last item in the array. + ok = readToken(token); + while (token.type_ == tokenComment && ok) { + ok = readToken(token); + } + bool badTokenType = + (token.type_ != tokenArraySeparator && token.type_ != tokenArrayEnd); + if (!ok || badTokenType) { + return addErrorAndRecover( + "Missing ',' or ']' in array declaration", token, tokenArrayEnd); + } + if (token.type_ == tokenArrayEnd) + break; + } + return true; +} + +bool Reader::decodeNumber(Token& token) { + Value decoded; + if (!decodeNumber(token, decoded)) + return false; + currentValue().swapPayload(decoded); + currentValue().setOffsetStart(token.start_ - begin_); + currentValue().setOffsetLimit(token.end_ - begin_); + return true; +} + +bool Reader::decodeNumber(Token& token, Value& decoded) { + // Attempts to parse the number as an integer. If the number is + // larger than the maximum supported value of an integer then + // we decode the number as a double. + Location current = token.start_; + bool isNegative = *current == '-'; + if (isNegative) + ++current; + // TODO: Help the compiler do the div and mod at compile time or get rid of them. + Value::LargestUInt maxIntegerValue = + isNegative ? Value::LargestUInt(Value::maxLargestInt) + 1 + : Value::maxLargestUInt; + Value::LargestUInt threshold = maxIntegerValue / 10; + Value::LargestUInt value = 0; + while (current < token.end_) { + Char c = *current++; + if (c < '0' || c > '9') + return decodeDouble(token, decoded); + Value::UInt digit(c - '0'); + if (value >= threshold) { + // We've hit or exceeded the max value divided by 10 (rounded down). If + // a) we've only just touched the limit, b) this is the last digit, and + // c) it's small enough to fit in that rounding delta, we're okay. + // Otherwise treat this number as a double to avoid overflow. + if (value > threshold || current != token.end_ || + digit > maxIntegerValue % 10) { + return decodeDouble(token, decoded); + } + } + value = value * 10 + digit; + } + if (isNegative && value == maxIntegerValue) + decoded = Value::minLargestInt; + else if (isNegative) + decoded = -Value::LargestInt(value); + else if (value <= Value::LargestUInt(Value::maxInt)) + decoded = Value::LargestInt(value); + else + decoded = value; + return true; +} + +bool Reader::decodeDouble(Token& token) { + Value decoded; + if (!decodeDouble(token, decoded)) + return false; + currentValue().swapPayload(decoded); + currentValue().setOffsetStart(token.start_ - begin_); + currentValue().setOffsetLimit(token.end_ - begin_); + return true; +} + +bool Reader::decodeDouble(Token& token, Value& decoded) { + double value = 0; + std::string buffer(token.start_, token.end_); + std::istringstream is(buffer); + if (!(is >> value)) + return addError("'" + std::string(token.start_, token.end_) + + "' is not a number.", + token); + decoded = value; + return true; +} + +bool Reader::decodeString(Token& token) { + std::string decoded_string; + if (!decodeString(token, decoded_string)) + return false; + Value decoded(decoded_string); + currentValue().swapPayload(decoded); + currentValue().setOffsetStart(token.start_ - begin_); + currentValue().setOffsetLimit(token.end_ - begin_); + return true; +} + +bool Reader::decodeString(Token& token, std::string& decoded) { + decoded.reserve(token.end_ - token.start_ - 2); + Location current = token.start_ + 1; // skip '"' + Location end = token.end_ - 1; // do not include '"' + while (current != end) { + Char c = *current++; + if (c == '"') + break; + else if (c == '\\') { + if (current == end) + return addError("Empty escape sequence in string", token, current); + Char escape = *current++; + switch (escape) { + case '"': + decoded += '"'; + break; + case '/': + decoded += '/'; + break; + case '\\': + decoded += '\\'; + break; + case 'b': + decoded += '\b'; + break; + case 'f': + decoded += '\f'; + break; + case 'n': + decoded += '\n'; + break; + case 'r': + decoded += '\r'; + break; + case 't': + decoded += '\t'; + break; + case 'u': { + unsigned int unicode; + if (!decodeUnicodeCodePoint(token, current, end, unicode)) + return false; + decoded += codePointToUTF8(unicode); + } break; + default: + return addError("Bad escape sequence in string", token, current); + } + } else { + decoded += c; + } + } + return true; +} + +bool Reader::decodeUnicodeCodePoint(Token& token, + Location& current, + Location end, + unsigned int& unicode) { + + if (!decodeUnicodeEscapeSequence(token, current, end, unicode)) + return false; + if (unicode >= 0xD800 && unicode <= 0xDBFF) { + // surrogate pairs + if (end - current < 6) + return addError( + "additional six characters expected to parse unicode surrogate pair.", + token, + current); + unsigned int surrogatePair; + if (*(current++) == '\\' && *(current++) == 'u') { + if (decodeUnicodeEscapeSequence(token, current, end, surrogatePair)) { + unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); + } else + return false; + } else + return addError("expecting another \\u token to begin the second half of " + "a unicode surrogate pair", + token, + current); + } + return true; +} + +bool Reader::decodeUnicodeEscapeSequence(Token& token, + Location& current, + Location end, + unsigned int& unicode) { + if (end - current < 4) + return addError( + "Bad unicode escape sequence in string: four digits expected.", + token, + current); + unicode = 0; + for (int index = 0; index < 4; ++index) { + Char c = *current++; + unicode *= 16; + if (c >= '0' && c <= '9') + unicode += c - '0'; + else if (c >= 'a' && c <= 'f') + unicode += c - 'a' + 10; + else if (c >= 'A' && c <= 'F') + unicode += c - 'A' + 10; + else + return addError( + "Bad unicode escape sequence in string: hexadecimal digit expected.", + token, + current); + } + return true; +} + +bool +Reader::addError(const std::string& message, Token& token, Location extra) { + ErrorInfo info; + info.token_ = token; + info.message_ = message; + info.extra_ = extra; + errors_.push_back(info); + return false; +} + +bool Reader::recoverFromError(TokenType skipUntilToken) { + int errorCount = int(errors_.size()); + Token skip; + for (;;) { + if (!readToken(skip)) + errors_.resize(errorCount); // discard errors caused by recovery + if (skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream) + break; + } + errors_.resize(errorCount); + return false; +} + +bool Reader::addErrorAndRecover(const std::string& message, + Token& token, + TokenType skipUntilToken) { + addError(message, token); + return recoverFromError(skipUntilToken); +} + +Value& Reader::currentValue() { return *(nodes_.top()); } + +Reader::Char Reader::getNextChar() { + if (current_ == end_) + return 0; + return *current_++; +} + +void Reader::getLocationLineAndColumn(Location location, + int& line, + int& column) const { + Location current = begin_; + Location lastLineStart = current; + line = 0; + while (current < location && current != end_) { + Char c = *current++; + if (c == '\r') { + if (*current == '\n') + ++current; + lastLineStart = current; + ++line; + } else if (c == '\n') { + lastLineStart = current; + ++line; + } + } + // column & line start at 1 + column = int(location - lastLineStart) + 1; + ++line; +} + +std::string Reader::getLocationLineAndColumn(Location location) const { + int line, column; + getLocationLineAndColumn(location, line, column); + char buffer[18 + 16 + 16 + 1]; + snprintf(buffer, sizeof(buffer), "Line %d, Column %d", line, column); + return buffer; +} + +// Deprecated. Preserved for backward compatibility +std::string Reader::getFormatedErrorMessages() const { + return getFormattedErrorMessages(); +} + +std::string Reader::getFormattedErrorMessages() const { + std::string formattedMessage; + for (Errors::const_iterator itError = errors_.begin(); + itError != errors_.end(); + ++itError) { + const ErrorInfo& error = *itError; + formattedMessage += + "* " + getLocationLineAndColumn(error.token_.start_) + "\n"; + formattedMessage += " " + error.message_ + "\n"; + if (error.extra_) + formattedMessage += + "See " + getLocationLineAndColumn(error.extra_) + " for detail.\n"; + } + return formattedMessage; +} + +std::vector Reader::getStructuredErrors() const { + std::vector allErrors; + for (Errors::const_iterator itError = errors_.begin(); + itError != errors_.end(); + ++itError) { + const ErrorInfo& error = *itError; + Reader::StructuredError structured; + structured.offset_start = error.token_.start_ - begin_; + structured.offset_limit = error.token_.end_ - begin_; + structured.message = error.message_; + allErrors.push_back(structured); + } + return allErrors; +} + +bool Reader::pushError(const Value& value, const std::string& message) { + size_t length = end_ - begin_; + if(value.getOffsetStart() > length + || value.getOffsetLimit() > length) + return false; + Token token; + token.type_ = tokenError; + token.start_ = begin_ + value.getOffsetStart(); + token.end_ = end_ + value.getOffsetLimit(); + ErrorInfo info; + info.token_ = token; + info.message_ = message; + info.extra_ = 0; + errors_.push_back(info); + return true; +} + +bool Reader::pushError(const Value& value, const std::string& message, const Value& extra) { + size_t length = end_ - begin_; + if(value.getOffsetStart() > length + || value.getOffsetLimit() > length + || extra.getOffsetLimit() > length) + return false; + Token token; + token.type_ = tokenError; + token.start_ = begin_ + value.getOffsetStart(); + token.end_ = begin_ + value.getOffsetLimit(); + ErrorInfo info; + info.token_ = token; + info.message_ = message; + info.extra_ = begin_ + extra.getOffsetStart(); + errors_.push_back(info); + return true; +} + +bool Reader::good() const { + return !errors_.size(); +} + +// exact copy of Features +class OurFeatures { +public: + static OurFeatures all(); + bool allowComments_; + bool strictRoot_; + bool allowDroppedNullPlaceholders_; + bool allowNumericKeys_; + bool allowSingleQuotes_; + bool failIfExtra_; + bool rejectDupKeys_; + bool allowSpecialFloats_; + int stackLimit_; +}; // OurFeatures + +// exact copy of Implementation of class Features +// //////////////////////////////// + +OurFeatures OurFeatures::all() { return OurFeatures(); } + +// Implementation of class Reader +// //////////////////////////////// + +// exact copy of Reader, renamed to OurReader +class OurReader { +public: + typedef char Char; + typedef const Char* Location; + struct StructuredError { + size_t offset_start; + size_t offset_limit; + std::string message; + }; + + OurReader(OurFeatures const& features); + bool parse(const char* beginDoc, + const char* endDoc, + Value& root, + bool collectComments = true); + std::string getFormattedErrorMessages() const; + std::vector getStructuredErrors() const; + bool pushError(const Value& value, const std::string& message); + bool pushError(const Value& value, const std::string& message, const Value& extra); + bool good() const; + +private: + OurReader(OurReader const&); // no impl + void operator=(OurReader const&); // no impl + + enum TokenType { + tokenEndOfStream = 0, + tokenObjectBegin, + tokenObjectEnd, + tokenArrayBegin, + tokenArrayEnd, + tokenString, + tokenNumber, + tokenTrue, + tokenFalse, + tokenNull, + tokenNaN, + tokenPosInf, + tokenNegInf, + tokenArraySeparator, + tokenMemberSeparator, + tokenComment, + tokenError + }; + + class Token { + public: + TokenType type_; + Location start_; + Location end_; + }; + + class ErrorInfo { + public: + Token token_; + std::string message_; + Location extra_; + }; + + typedef std::deque Errors; + + bool readToken(Token& token); + void skipSpaces(); + bool match(Location pattern, int patternLength); + bool readComment(); + bool readCStyleComment(); + bool readCppStyleComment(); + bool readString(); + bool readStringSingleQuote(); + bool readNumber(bool checkInf); + bool readValue(); + bool readObject(Token& token); + bool readArray(Token& token); + bool decodeNumber(Token& token); + bool decodeNumber(Token& token, Value& decoded); + bool decodeString(Token& token); + bool decodeString(Token& token, std::string& decoded); + bool decodeDouble(Token& token); + bool decodeDouble(Token& token, Value& decoded); + bool decodeUnicodeCodePoint(Token& token, + Location& current, + Location end, + unsigned int& unicode); + bool decodeUnicodeEscapeSequence(Token& token, + Location& current, + Location end, + unsigned int& unicode); + bool addError(const std::string& message, Token& token, Location extra = 0); + bool recoverFromError(TokenType skipUntilToken); + bool addErrorAndRecover(const std::string& message, + Token& token, + TokenType skipUntilToken); + void skipUntilSpace(); + Value& currentValue(); + Char getNextChar(); + void + getLocationLineAndColumn(Location location, int& line, int& column) const; + std::string getLocationLineAndColumn(Location location) const; + void addComment(Location begin, Location end, CommentPlacement placement); + void skipCommentTokens(Token& token); + + typedef std::stack Nodes; + Nodes nodes_; + Errors errors_; + std::string document_; + Location begin_; + Location end_; + Location current_; + Location lastValueEnd_; + Value* lastValue_; + std::string commentsBefore_; + int stackDepth_; + + OurFeatures const features_; + bool collectComments_; +}; // OurReader + +// complete copy of Read impl, for OurReader + +OurReader::OurReader(OurFeatures const& features) + : errors_(), document_(), begin_(), end_(), current_(), lastValueEnd_(), + lastValue_(), commentsBefore_(), + stackDepth_(0), + features_(features), collectComments_() { +} + +bool OurReader::parse(const char* beginDoc, + const char* endDoc, + Value& root, + bool collectComments) { + if (!features_.allowComments_) { + collectComments = false; + } + + begin_ = beginDoc; + end_ = endDoc; + collectComments_ = collectComments; + current_ = begin_; + lastValueEnd_ = 0; + lastValue_ = 0; + commentsBefore_ = ""; + errors_.clear(); + while (!nodes_.empty()) + nodes_.pop(); + nodes_.push(&root); + + stackDepth_ = 0; + bool successful = readValue(); + Token token; + skipCommentTokens(token); + if (features_.failIfExtra_) { + if (token.type_ != tokenError && token.type_ != tokenEndOfStream) { + addError("Extra non-whitespace after JSON value.", token); + return false; + } + } + if (collectComments_ && !commentsBefore_.empty()) + root.setComment(commentsBefore_, commentAfter); + if (features_.strictRoot_) { + if (!root.isArray() && !root.isObject()) { + // Set error location to start of doc, ideally should be first token found + // in doc + token.type_ = tokenError; + token.start_ = beginDoc; + token.end_ = endDoc; + addError( + "A valid JSON document must be either an array or an object value.", + token); + return false; + } + } + return successful; +} + +bool OurReader::readValue() { + if (stackDepth_ >= features_.stackLimit_) throwRuntimeError("Exceeded stackLimit in readValue()."); + ++stackDepth_; + Token token; + skipCommentTokens(token); + bool successful = true; + + if (collectComments_ && !commentsBefore_.empty()) { + currentValue().setComment(commentsBefore_, commentBefore); + commentsBefore_ = ""; + } + + switch (token.type_) { + case tokenObjectBegin: + successful = readObject(token); + currentValue().setOffsetLimit(current_ - begin_); + break; + case tokenArrayBegin: + successful = readArray(token); + currentValue().setOffsetLimit(current_ - begin_); + break; + case tokenNumber: + successful = decodeNumber(token); + break; + case tokenString: + successful = decodeString(token); + break; + case tokenTrue: + { + Value v(true); + currentValue().swapPayload(v); + currentValue().setOffsetStart(token.start_ - begin_); + currentValue().setOffsetLimit(token.end_ - begin_); + } + break; + case tokenFalse: + { + Value v(false); + currentValue().swapPayload(v); + currentValue().setOffsetStart(token.start_ - begin_); + currentValue().setOffsetLimit(token.end_ - begin_); + } + break; + case tokenNull: + { + Value v; + currentValue().swapPayload(v); + currentValue().setOffsetStart(token.start_ - begin_); + currentValue().setOffsetLimit(token.end_ - begin_); + } + break; + case tokenNaN: + { + Value v(std::numeric_limits::quiet_NaN()); + currentValue().swapPayload(v); + currentValue().setOffsetStart(token.start_ - begin_); + currentValue().setOffsetLimit(token.end_ - begin_); + } + break; + case tokenPosInf: + { + Value v(std::numeric_limits::infinity()); + currentValue().swapPayload(v); + currentValue().setOffsetStart(token.start_ - begin_); + currentValue().setOffsetLimit(token.end_ - begin_); + } + break; + case tokenNegInf: + { + Value v(-std::numeric_limits::infinity()); + currentValue().swapPayload(v); + currentValue().setOffsetStart(token.start_ - begin_); + currentValue().setOffsetLimit(token.end_ - begin_); + } + break; + case tokenArraySeparator: + case tokenObjectEnd: + case tokenArrayEnd: + if (features_.allowDroppedNullPlaceholders_) { + // "Un-read" the current token and mark the current value as a null + // token. + current_--; + Value v; + currentValue().swapPayload(v); + currentValue().setOffsetStart(current_ - begin_ - 1); + currentValue().setOffsetLimit(current_ - begin_); + break; + } // else, fall through ... + default: + currentValue().setOffsetStart(token.start_ - begin_); + currentValue().setOffsetLimit(token.end_ - begin_); + return addError("Syntax error: value, object or array expected.", token); + } + + if (collectComments_) { + lastValueEnd_ = current_; + lastValue_ = ¤tValue(); + } + + --stackDepth_; + return successful; +} + +void OurReader::skipCommentTokens(Token& token) { + if (features_.allowComments_) { + do { + readToken(token); + } while (token.type_ == tokenComment); + } else { + readToken(token); + } +} + +bool OurReader::readToken(Token& token) { + skipSpaces(); + token.start_ = current_; + Char c = getNextChar(); + bool ok = true; + switch (c) { + case '{': + token.type_ = tokenObjectBegin; + break; + case '}': + token.type_ = tokenObjectEnd; + break; + case '[': + token.type_ = tokenArrayBegin; + break; + case ']': + token.type_ = tokenArrayEnd; + break; + case '"': + token.type_ = tokenString; + ok = readString(); + break; + case '\'': + if (features_.allowSingleQuotes_) { + token.type_ = tokenString; + ok = readStringSingleQuote(); + break; + } // else continue + case '/': + token.type_ = tokenComment; + ok = readComment(); + break; + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + token.type_ = tokenNumber; + readNumber(false); + break; + case '-': + if (readNumber(true)) { + token.type_ = tokenNumber; + } else { + token.type_ = tokenNegInf; + ok = features_.allowSpecialFloats_ && match("nfinity", 7); + } + break; + case 't': + token.type_ = tokenTrue; + ok = match("rue", 3); + break; + case 'f': + token.type_ = tokenFalse; + ok = match("alse", 4); + break; + case 'n': + token.type_ = tokenNull; + ok = match("ull", 3); + break; + case 'N': + if (features_.allowSpecialFloats_) { + token.type_ = tokenNaN; + ok = match("aN", 2); + } else { + ok = false; + } + break; + case 'I': + if (features_.allowSpecialFloats_) { + token.type_ = tokenPosInf; + ok = match("nfinity", 7); + } else { + ok = false; + } + break; + case ',': + token.type_ = tokenArraySeparator; + break; + case ':': + token.type_ = tokenMemberSeparator; + break; + case 0: + token.type_ = tokenEndOfStream; + break; + default: + ok = false; + break; + } + if (!ok) + token.type_ = tokenError; + token.end_ = current_; + return true; +} + +void OurReader::skipSpaces() { + while (current_ != end_) { + Char c = *current_; + if (c == ' ' || c == '\t' || c == '\r' || c == '\n') + ++current_; + else + break; + } +} + +bool OurReader::match(Location pattern, int patternLength) { + if (end_ - current_ < patternLength) + return false; + int index = patternLength; + while (index--) + if (current_[index] != pattern[index]) + return false; + current_ += patternLength; + return true; +} + +bool OurReader::readComment() { + Location commentBegin = current_ - 1; + Char c = getNextChar(); + bool successful = false; + if (c == '*') + successful = readCStyleComment(); + else if (c == '/') + successful = readCppStyleComment(); + if (!successful) + return false; + + if (collectComments_) { + CommentPlacement placement = commentBefore; + if (lastValueEnd_ && !containsNewLine(lastValueEnd_, commentBegin)) { + if (c != '*' || !containsNewLine(commentBegin, current_)) + placement = commentAfterOnSameLine; + } + + addComment(commentBegin, current_, placement); + } + return true; +} + +void +OurReader::addComment(Location begin, Location end, CommentPlacement placement) { + assert(collectComments_); + const std::string& normalized = normalizeEOL(begin, end); + if (placement == commentAfterOnSameLine) { + assert(lastValue_ != 0); + lastValue_->setComment(normalized, placement); + } else { + commentsBefore_ += normalized; + } +} + +bool OurReader::readCStyleComment() { + while (current_ != end_) { + Char c = getNextChar(); + if (c == '*' && *current_ == '/') + break; + } + return getNextChar() == '/'; +} + +bool OurReader::readCppStyleComment() { + while (current_ != end_) { + Char c = getNextChar(); + if (c == '\n') + break; + if (c == '\r') { + // Consume DOS EOL. It will be normalized in addComment. + if (current_ != end_ && *current_ == '\n') + getNextChar(); + // Break on Moc OS 9 EOL. + break; + } + } + return true; +} + +bool OurReader::readNumber(bool checkInf) { + const char *p = current_; + if (checkInf && p != end_ && *p == 'I') { + current_ = ++p; + return false; + } + char c = '0'; // stopgap for already consumed character + // integral part + while (c >= '0' && c <= '9') + c = (current_ = p) < end_ ? *p++ : 0; + // fractional part + if (c == '.') { + c = (current_ = p) < end_ ? *p++ : 0; + while (c >= '0' && c <= '9') + c = (current_ = p) < end_ ? *p++ : 0; + } + // exponential part + if (c == 'e' || c == 'E') { + c = (current_ = p) < end_ ? *p++ : 0; + if (c == '+' || c == '-') + c = (current_ = p) < end_ ? *p++ : 0; + while (c >= '0' && c <= '9') + c = (current_ = p) < end_ ? *p++ : 0; + } + return true; +} +bool OurReader::readString() { + Char c = 0; + while (current_ != end_) { + c = getNextChar(); + if (c == '\\') + getNextChar(); + else if (c == '"') + break; + } + return c == '"'; +} + + +bool OurReader::readStringSingleQuote() { + Char c = 0; + while (current_ != end_) { + c = getNextChar(); + if (c == '\\') + getNextChar(); + else if (c == '\'') + break; + } + return c == '\''; +} + +bool OurReader::readObject(Token& tokenStart) { + Token tokenName; + std::string name; + Value init(objectValue); + currentValue().swapPayload(init); + currentValue().setOffsetStart(tokenStart.start_ - begin_); + while (readToken(tokenName)) { + bool initialTokenOk = true; + while (tokenName.type_ == tokenComment && initialTokenOk) + initialTokenOk = readToken(tokenName); + if (!initialTokenOk) + break; + if (tokenName.type_ == tokenObjectEnd && name.empty()) // empty object + return true; + name = ""; + if (tokenName.type_ == tokenString) { + if (!decodeString(tokenName, name)) + return recoverFromError(tokenObjectEnd); + } else if (tokenName.type_ == tokenNumber && features_.allowNumericKeys_) { + Value numberName; + if (!decodeNumber(tokenName, numberName)) + return recoverFromError(tokenObjectEnd); + name = numberName.asString(); + } else { + break; + } + + Token colon; + if (!readToken(colon) || colon.type_ != tokenMemberSeparator) { + return addErrorAndRecover( + "Missing ':' after object member name", colon, tokenObjectEnd); + } + if (name.length() >= (1U<<30)) throwRuntimeError("keylength >= 2^30"); + if (features_.rejectDupKeys_ && currentValue().isMember(name)) { + std::string msg = "Duplicate key: '" + name + "'"; + return addErrorAndRecover( + msg, tokenName, tokenObjectEnd); + } + Value& value = currentValue()[name]; + nodes_.push(&value); + bool ok = readValue(); + nodes_.pop(); + if (!ok) // error already set + return recoverFromError(tokenObjectEnd); + + Token comma; + if (!readToken(comma) || + (comma.type_ != tokenObjectEnd && comma.type_ != tokenArraySeparator && + comma.type_ != tokenComment)) { + return addErrorAndRecover( + "Missing ',' or '}' in object declaration", comma, tokenObjectEnd); + } + bool finalizeTokenOk = true; + while (comma.type_ == tokenComment && finalizeTokenOk) + finalizeTokenOk = readToken(comma); + if (comma.type_ == tokenObjectEnd) + return true; + } + return addErrorAndRecover( + "Missing '}' or object member name", tokenName, tokenObjectEnd); +} + +bool OurReader::readArray(Token& tokenStart) { + Value init(arrayValue); + currentValue().swapPayload(init); + currentValue().setOffsetStart(tokenStart.start_ - begin_); + skipSpaces(); + if (*current_ == ']') // empty array + { + Token endArray; + readToken(endArray); + return true; + } + int index = 0; + for (;;) { + Value& value = currentValue()[index++]; + nodes_.push(&value); + bool ok = readValue(); + nodes_.pop(); + if (!ok) // error already set + return recoverFromError(tokenArrayEnd); + + Token token; + // Accept Comment after last item in the array. + ok = readToken(token); + while (token.type_ == tokenComment && ok) { + ok = readToken(token); + } + bool badTokenType = + (token.type_ != tokenArraySeparator && token.type_ != tokenArrayEnd); + if (!ok || badTokenType) { + return addErrorAndRecover( + "Missing ',' or ']' in array declaration", token, tokenArrayEnd); + } + if (token.type_ == tokenArrayEnd) + break; + } + return true; +} + +bool OurReader::decodeNumber(Token& token) { + Value decoded; + if (!decodeNumber(token, decoded)) + return false; + currentValue().swapPayload(decoded); + currentValue().setOffsetStart(token.start_ - begin_); + currentValue().setOffsetLimit(token.end_ - begin_); + return true; +} + +bool OurReader::decodeNumber(Token& token, Value& decoded) { + // Attempts to parse the number as an integer. If the number is + // larger than the maximum supported value of an integer then + // we decode the number as a double. + Location current = token.start_; + bool isNegative = *current == '-'; + if (isNegative) + ++current; + // TODO: Help the compiler do the div and mod at compile time or get rid of them. + Value::LargestUInt maxIntegerValue = + isNegative ? Value::LargestUInt(-Value::minLargestInt) + : Value::maxLargestUInt; + Value::LargestUInt threshold = maxIntegerValue / 10; + Value::LargestUInt value = 0; + while (current < token.end_) { + Char c = *current++; + if (c < '0' || c > '9') + return decodeDouble(token, decoded); + Value::UInt digit(c - '0'); + if (value >= threshold) { + // We've hit or exceeded the max value divided by 10 (rounded down). If + // a) we've only just touched the limit, b) this is the last digit, and + // c) it's small enough to fit in that rounding delta, we're okay. + // Otherwise treat this number as a double to avoid overflow. + if (value > threshold || current != token.end_ || + digit > maxIntegerValue % 10) { + return decodeDouble(token, decoded); + } + } + value = value * 10 + digit; + } + if (isNegative) + decoded = -Value::LargestInt(value); + else if (value <= Value::LargestUInt(Value::maxInt)) + decoded = Value::LargestInt(value); + else + decoded = value; + return true; +} + +bool OurReader::decodeDouble(Token& token) { + Value decoded; + if (!decodeDouble(token, decoded)) + return false; + currentValue().swapPayload(decoded); + currentValue().setOffsetStart(token.start_ - begin_); + currentValue().setOffsetLimit(token.end_ - begin_); + return true; +} + +bool OurReader::decodeDouble(Token& token, Value& decoded) { + double value = 0; + const int bufferSize = 32; + int count; + int length = int(token.end_ - token.start_); + + // Sanity check to avoid buffer overflow exploits. + if (length < 0) { + return addError("Unable to parse token length", token); + } + + // Avoid using a string constant for the format control string given to + // sscanf, as this can cause hard to debug crashes on OS X. See here for more + // info: + // + // http://developer.apple.com/library/mac/#DOCUMENTATION/DeveloperTools/gcc-4.0.1/gcc/Incompatibilities.html + char format[] = "%lf"; + + if (length <= bufferSize) { + Char buffer[bufferSize + 1]; + memcpy(buffer, token.start_, length); + buffer[length] = 0; + count = sscanf(buffer, format, &value); + } else { + std::string buffer(token.start_, token.end_); + count = sscanf(buffer.c_str(), format, &value); + } + + if (count != 1) + return addError("'" + std::string(token.start_, token.end_) + + "' is not a number.", + token); + decoded = value; + return true; +} + +bool OurReader::decodeString(Token& token) { + std::string decoded_string; + if (!decodeString(token, decoded_string)) + return false; + Value decoded(decoded_string); + currentValue().swapPayload(decoded); + currentValue().setOffsetStart(token.start_ - begin_); + currentValue().setOffsetLimit(token.end_ - begin_); + return true; +} + +bool OurReader::decodeString(Token& token, std::string& decoded) { + decoded.reserve(token.end_ - token.start_ - 2); + Location current = token.start_ + 1; // skip '"' + Location end = token.end_ - 1; // do not include '"' + while (current != end) { + Char c = *current++; + if (c == '"') + break; + else if (c == '\\') { + if (current == end) + return addError("Empty escape sequence in string", token, current); + Char escape = *current++; + switch (escape) { + case '"': + decoded += '"'; + break; + case '/': + decoded += '/'; + break; + case '\\': + decoded += '\\'; + break; + case 'b': + decoded += '\b'; + break; + case 'f': + decoded += '\f'; + break; + case 'n': + decoded += '\n'; + break; + case 'r': + decoded += '\r'; + break; + case 't': + decoded += '\t'; + break; + case 'u': { + unsigned int unicode; + if (!decodeUnicodeCodePoint(token, current, end, unicode)) + return false; + decoded += codePointToUTF8(unicode); + } break; + default: + return addError("Bad escape sequence in string", token, current); + } + } else { + decoded += c; + } + } + return true; +} + +bool OurReader::decodeUnicodeCodePoint(Token& token, + Location& current, + Location end, + unsigned int& unicode) { + + if (!decodeUnicodeEscapeSequence(token, current, end, unicode)) + return false; + if (unicode >= 0xD800 && unicode <= 0xDBFF) { + // surrogate pairs + if (end - current < 6) + return addError( + "additional six characters expected to parse unicode surrogate pair.", + token, + current); + unsigned int surrogatePair; + if (*(current++) == '\\' && *(current++) == 'u') { + if (decodeUnicodeEscapeSequence(token, current, end, surrogatePair)) { + unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF); + } else + return false; + } else + return addError("expecting another \\u token to begin the second half of " + "a unicode surrogate pair", + token, + current); + } + return true; +} + +bool OurReader::decodeUnicodeEscapeSequence(Token& token, + Location& current, + Location end, + unsigned int& unicode) { + if (end - current < 4) + return addError( + "Bad unicode escape sequence in string: four digits expected.", + token, + current); + unicode = 0; + for (int index = 0; index < 4; ++index) { + Char c = *current++; + unicode *= 16; + if (c >= '0' && c <= '9') + unicode += c - '0'; + else if (c >= 'a' && c <= 'f') + unicode += c - 'a' + 10; + else if (c >= 'A' && c <= 'F') + unicode += c - 'A' + 10; + else + return addError( + "Bad unicode escape sequence in string: hexadecimal digit expected.", + token, + current); + } + return true; +} + +bool +OurReader::addError(const std::string& message, Token& token, Location extra) { + ErrorInfo info; + info.token_ = token; + info.message_ = message; + info.extra_ = extra; + errors_.push_back(info); + return false; +} + +bool OurReader::recoverFromError(TokenType skipUntilToken) { + int errorCount = int(errors_.size()); + Token skip; + for (;;) { + if (!readToken(skip)) + errors_.resize(errorCount); // discard errors caused by recovery + if (skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream) + break; + } + errors_.resize(errorCount); + return false; +} + +bool OurReader::addErrorAndRecover(const std::string& message, + Token& token, + TokenType skipUntilToken) { + addError(message, token); + return recoverFromError(skipUntilToken); +} + +Value& OurReader::currentValue() { return *(nodes_.top()); } + +OurReader::Char OurReader::getNextChar() { + if (current_ == end_) + return 0; + return *current_++; +} + +void OurReader::getLocationLineAndColumn(Location location, + int& line, + int& column) const { + Location current = begin_; + Location lastLineStart = current; + line = 0; + while (current < location && current != end_) { + Char c = *current++; + if (c == '\r') { + if (*current == '\n') + ++current; + lastLineStart = current; + ++line; + } else if (c == '\n') { + lastLineStart = current; + ++line; + } + } + // column & line start at 1 + column = int(location - lastLineStart) + 1; + ++line; +} + +std::string OurReader::getLocationLineAndColumn(Location location) const { + int line, column; + getLocationLineAndColumn(location, line, column); + char buffer[18 + 16 + 16 + 1]; + snprintf(buffer, sizeof(buffer), "Line %d, Column %d", line, column); + return buffer; +} + +std::string OurReader::getFormattedErrorMessages() const { + std::string formattedMessage; + for (Errors::const_iterator itError = errors_.begin(); + itError != errors_.end(); + ++itError) { + const ErrorInfo& error = *itError; + formattedMessage += + "* " + getLocationLineAndColumn(error.token_.start_) + "\n"; + formattedMessage += " " + error.message_ + "\n"; + if (error.extra_) + formattedMessage += + "See " + getLocationLineAndColumn(error.extra_) + " for detail.\n"; + } + return formattedMessage; +} + +std::vector OurReader::getStructuredErrors() const { + std::vector allErrors; + for (Errors::const_iterator itError = errors_.begin(); + itError != errors_.end(); + ++itError) { + const ErrorInfo& error = *itError; + OurReader::StructuredError structured; + structured.offset_start = error.token_.start_ - begin_; + structured.offset_limit = error.token_.end_ - begin_; + structured.message = error.message_; + allErrors.push_back(structured); + } + return allErrors; +} + +bool OurReader::pushError(const Value& value, const std::string& message) { + size_t length = end_ - begin_; + if(value.getOffsetStart() > length + || value.getOffsetLimit() > length) + return false; + Token token; + token.type_ = tokenError; + token.start_ = begin_ + value.getOffsetStart(); + token.end_ = end_ + value.getOffsetLimit(); + ErrorInfo info; + info.token_ = token; + info.message_ = message; + info.extra_ = 0; + errors_.push_back(info); + return true; +} + +bool OurReader::pushError(const Value& value, const std::string& message, const Value& extra) { + size_t length = end_ - begin_; + if(value.getOffsetStart() > length + || value.getOffsetLimit() > length + || extra.getOffsetLimit() > length) + return false; + Token token; + token.type_ = tokenError; + token.start_ = begin_ + value.getOffsetStart(); + token.end_ = begin_ + value.getOffsetLimit(); + ErrorInfo info; + info.token_ = token; + info.message_ = message; + info.extra_ = begin_ + extra.getOffsetStart(); + errors_.push_back(info); + return true; +} + +bool OurReader::good() const { + return !errors_.size(); +} + + +class OurCharReader : public CharReader { + bool const collectComments_; + OurReader reader_; +public: + OurCharReader( + bool collectComments, + OurFeatures const& features) + : collectComments_(collectComments) + , reader_(features) + {} + bool parse( + char const* beginDoc, char const* endDoc, + Value* root, std::string* errs) override { + bool ok = reader_.parse(beginDoc, endDoc, *root, collectComments_); + if (errs) { + *errs = reader_.getFormattedErrorMessages(); + } + return ok; + } +}; + +CharReaderBuilder::CharReaderBuilder() +{ + setDefaults(&settings_); +} +CharReaderBuilder::~CharReaderBuilder() +{} +CharReader* CharReaderBuilder::newCharReader() const +{ + bool collectComments = settings_["collectComments"].asBool(); + OurFeatures features = OurFeatures::all(); + features.allowComments_ = settings_["allowComments"].asBool(); + features.strictRoot_ = settings_["strictRoot"].asBool(); + features.allowDroppedNullPlaceholders_ = settings_["allowDroppedNullPlaceholders"].asBool(); + features.allowNumericKeys_ = settings_["allowNumericKeys"].asBool(); + features.allowSingleQuotes_ = settings_["allowSingleQuotes"].asBool(); + features.stackLimit_ = settings_["stackLimit"].asInt(); + features.failIfExtra_ = settings_["failIfExtra"].asBool(); + features.rejectDupKeys_ = settings_["rejectDupKeys"].asBool(); + features.allowSpecialFloats_ = settings_["allowSpecialFloats"].asBool(); + return new OurCharReader(collectComments, features); +} +static void getValidReaderKeys(std::set* valid_keys) +{ + valid_keys->clear(); + valid_keys->insert("collectComments"); + valid_keys->insert("allowComments"); + valid_keys->insert("strictRoot"); + valid_keys->insert("allowDroppedNullPlaceholders"); + valid_keys->insert("allowNumericKeys"); + valid_keys->insert("allowSingleQuotes"); + valid_keys->insert("stackLimit"); + valid_keys->insert("failIfExtra"); + valid_keys->insert("rejectDupKeys"); + valid_keys->insert("allowSpecialFloats"); +} +bool CharReaderBuilder::validate(Json::Value* invalid) const +{ + Json::Value my_invalid; + if (!invalid) invalid = &my_invalid; // so we do not need to test for NULL + Json::Value& inv = *invalid; + std::set valid_keys; + getValidReaderKeys(&valid_keys); + Value::Members keys = settings_.getMemberNames(); + size_t n = keys.size(); + for (size_t i = 0; i < n; ++i) { + std::string const& key = keys[i]; + if (valid_keys.find(key) == valid_keys.end()) { + inv[key] = settings_[key]; + } + } + return 0u == inv.size(); +} +Value& CharReaderBuilder::operator[](std::string key) +{ + return settings_[key]; +} +// static +void CharReaderBuilder::strictMode(Json::Value* settings) +{ +//! [CharReaderBuilderStrictMode] + (*settings)["allowComments"] = false; + (*settings)["strictRoot"] = true; + (*settings)["allowDroppedNullPlaceholders"] = false; + (*settings)["allowNumericKeys"] = false; + (*settings)["allowSingleQuotes"] = false; + (*settings)["stackLimit"] = 1000; + (*settings)["failIfExtra"] = true; + (*settings)["rejectDupKeys"] = true; + (*settings)["allowSpecialFloats"] = false; +//! [CharReaderBuilderStrictMode] +} +// static +void CharReaderBuilder::setDefaults(Json::Value* settings) +{ +//! [CharReaderBuilderDefaults] + (*settings)["collectComments"] = true; + (*settings)["allowComments"] = true; + (*settings)["strictRoot"] = false; + (*settings)["allowDroppedNullPlaceholders"] = false; + (*settings)["allowNumericKeys"] = false; + (*settings)["allowSingleQuotes"] = false; + (*settings)["stackLimit"] = 1000; + (*settings)["failIfExtra"] = false; + (*settings)["rejectDupKeys"] = false; + (*settings)["allowSpecialFloats"] = false; +//! [CharReaderBuilderDefaults] +} + +////////////////////////////////// +// global functions + +bool parseFromStream( + CharReader::Factory const& fact, std::istream& sin, + Value* root, std::string* errs) +{ + std::ostringstream ssin; + ssin << sin.rdbuf(); + std::string doc = ssin.str(); + char const* begin = doc.data(); + char const* end = begin + doc.size(); + // Note that we do not actually need a null-terminator. + CharReaderPtr const reader(fact.newCharReader()); + return reader->parse(begin, end, root, errs); +} + +std::istream& operator>>(std::istream& sin, Value& root) { + CharReaderBuilder b; + std::string errs; + bool ok = parseFromStream(b, sin, &root, &errs); + if (!ok) { + fprintf(stderr, + "Error from reader: %s", + errs.c_str()); + + throwRuntimeError(errs); + } + return sin; +} + +} // namespace Json + +// ////////////////////////////////////////////////////////////////////// +// End of content of file: src/lib_json/json_reader.cpp +// ////////////////////////////////////////////////////////////////////// + + + + + + +// ////////////////////////////////////////////////////////////////////// +// Beginning of content of file: src/lib_json/json_valueiterator.inl +// ////////////////////////////////////////////////////////////////////// + +// Copyright 2007-2010 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +// included by json_value.cpp + +namespace Json { + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIteratorBase +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIteratorBase::ValueIteratorBase() + : current_(), isNull_(true) { +} + +ValueIteratorBase::ValueIteratorBase( + const Value::ObjectValues::iterator& current) + : current_(current), isNull_(false) {} + +Value& ValueIteratorBase::deref() const { + return current_->second; +} + +void ValueIteratorBase::increment() { + ++current_; +} + +void ValueIteratorBase::decrement() { + --current_; +} + +ValueIteratorBase::difference_type +ValueIteratorBase::computeDistance(const SelfType& other) const { +#ifdef JSON_USE_CPPTL_SMALLMAP + return other.current_ - current_; +#else + // Iterator for null value are initialized using the default + // constructor, which initialize current_ to the default + // std::map::iterator. As begin() and end() are two instance + // of the default std::map::iterator, they can not be compared. + // To allow this, we handle this comparison specifically. + if (isNull_ && other.isNull_) { + return 0; + } + + // Usage of std::distance is not portable (does not compile with Sun Studio 12 + // RogueWave STL, + // which is the one used by default). + // Using a portable hand-made version for non random iterator instead: + // return difference_type( std::distance( current_, other.current_ ) ); + difference_type myDistance = 0; + for (Value::ObjectValues::iterator it = current_; it != other.current_; + ++it) { + ++myDistance; + } + return myDistance; +#endif +} + +bool ValueIteratorBase::isEqual(const SelfType& other) const { + if (isNull_) { + return other.isNull_; + } + return current_ == other.current_; +} + +void ValueIteratorBase::copy(const SelfType& other) { + current_ = other.current_; + isNull_ = other.isNull_; +} + +Value ValueIteratorBase::key() const { + const Value::CZString czstring = (*current_).first; + if (czstring.data()) { + if (czstring.isStaticString()) + return Value(StaticString(czstring.data())); + return Value(czstring.data(), czstring.data() + czstring.length()); + } + return Value(czstring.index()); +} + +UInt ValueIteratorBase::index() const { + const Value::CZString czstring = (*current_).first; + if (!czstring.data()) + return czstring.index(); + return Value::UInt(-1); +} + +std::string ValueIteratorBase::name() const { + char const* keey; + char const* end; + keey = memberName(&end); + if (!keey) return std::string(); + return std::string(keey, end); +} + +char const* ValueIteratorBase::memberName() const { + const char* cname = (*current_).first.data(); + return cname ? cname : ""; +} + +char const* ValueIteratorBase::memberName(char const** end) const { + const char* cname = (*current_).first.data(); + if (!cname) { + *end = NULL; + return NULL; + } + *end = cname + (*current_).first.length(); + return cname; +} + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueConstIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueConstIterator::ValueConstIterator() {} + +ValueConstIterator::ValueConstIterator( + const Value::ObjectValues::iterator& current) + : ValueIteratorBase(current) {} + +ValueConstIterator::ValueConstIterator(ValueIterator const& other) + : ValueIteratorBase(other) {} + +ValueConstIterator& ValueConstIterator:: +operator=(const ValueIteratorBase& other) { + copy(other); + return *this; +} + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class ValueIterator +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +ValueIterator::ValueIterator() {} + +ValueIterator::ValueIterator(const Value::ObjectValues::iterator& current) + : ValueIteratorBase(current) {} + +ValueIterator::ValueIterator(const ValueConstIterator& other) + : ValueIteratorBase(other) { + throwRuntimeError("ConstIterator to Iterator should never be allowed."); +} + +ValueIterator::ValueIterator(const ValueIterator& other) + : ValueIteratorBase(other) {} + +ValueIterator& ValueIterator::operator=(const SelfType& other) { + copy(other); + return *this; +} + +} // namespace Json + +// ////////////////////////////////////////////////////////////////////// +// End of content of file: src/lib_json/json_valueiterator.inl +// ////////////////////////////////////////////////////////////////////// + + + + + + +// ////////////////////////////////////////////////////////////////////// +// Beginning of content of file: src/lib_json/json_value.cpp +// ////////////////////////////////////////////////////////////////////// + +// Copyright 2011 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#if !defined(JSON_IS_AMALGAMATION) +#include +#include +#include +#endif // if !defined(JSON_IS_AMALGAMATION) +#include +#include +#include +#include +#include +#ifdef JSON_USE_CPPTL +#include +#endif +#include // size_t +#include // min() + +#define JSON_ASSERT_UNREACHABLE assert(false) + +namespace Json { + +// This is a walkaround to avoid the static initialization of Value::null. +// kNull must be word-aligned to avoid crashing on ARM. We use an alignment of +// 8 (instead of 4) as a bit of future-proofing. +#if defined(__ARMEL__) +#define ALIGNAS(byte_alignment) __attribute__((aligned(byte_alignment))) +#else +#define ALIGNAS(byte_alignment) +#endif +static const unsigned char ALIGNAS(8) kNull[sizeof(Value)] = { 0 }; +const unsigned char& kNullRef = kNull[0]; +const Value& Value::null = reinterpret_cast(kNullRef); +const Value& Value::nullRef = null; + +const Int Value::minInt = Int(~(UInt(-1) / 2)); +const Int Value::maxInt = Int(UInt(-1) / 2); +const UInt Value::maxUInt = UInt(-1); +#if defined(JSON_HAS_INT64) +const Int64 Value::minInt64 = Int64(~(UInt64(-1) / 2)); +const Int64 Value::maxInt64 = Int64(UInt64(-1) / 2); +const UInt64 Value::maxUInt64 = UInt64(-1); +// The constant is hard-coded because some compiler have trouble +// converting Value::maxUInt64 to a double correctly (AIX/xlC). +// Assumes that UInt64 is a 64 bits integer. +static const double maxUInt64AsDouble = 18446744073709551615.0; +#endif // defined(JSON_HAS_INT64) +const LargestInt Value::minLargestInt = LargestInt(~(LargestUInt(-1) / 2)); +const LargestInt Value::maxLargestInt = LargestInt(LargestUInt(-1) / 2); +const LargestUInt Value::maxLargestUInt = LargestUInt(-1); + +#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) +template +static inline bool InRange(double d, T min, U max) { + return d >= min && d <= max; +} +#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) +static inline double integerToDouble(Json::UInt64 value) { + return static_cast(Int64(value / 2)) * 2.0 + Int64(value & 1); +} + +template static inline double integerToDouble(T value) { + return static_cast(value); +} + +template +static inline bool InRange(double d, T min, U max) { + return d >= integerToDouble(min) && d <= integerToDouble(max); +} +#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + +/** Duplicates the specified string value. + * @param value Pointer to the string to duplicate. Must be zero-terminated if + * length is "unknown". + * @param length Length of the value. if equals to unknown, then it will be + * computed using strlen(value). + * @return Pointer on the duplicate instance of string. + */ +static inline char* duplicateStringValue(const char* value, + size_t length) { + // Avoid an integer overflow in the call to malloc below by limiting length + // to a sane value. + if (length >= (size_t)Value::maxInt) + length = Value::maxInt - 1; + + char* newString = static_cast(malloc(length + 1)); + if (newString == NULL) { + throwRuntimeError( + "in Json::Value::duplicateStringValue(): " + "Failed to allocate string value buffer"); + } + memcpy(newString, value, length); + newString[length] = 0; + return newString; +} + +/* Record the length as a prefix. + */ +static inline char* duplicateAndPrefixStringValue( + const char* value, + unsigned int length) +{ + // Avoid an integer overflow in the call to malloc below by limiting length + // to a sane value. + JSON_ASSERT_MESSAGE(length <= (unsigned)Value::maxInt - sizeof(unsigned) - 1U, + "in Json::Value::duplicateAndPrefixStringValue(): " + "length too big for prefixing"); + unsigned actualLength = length + static_cast(sizeof(unsigned)) + 1U; + char* newString = static_cast(malloc(actualLength)); + if (newString == 0) { + throwRuntimeError( + "in Json::Value::duplicateAndPrefixStringValue(): " + "Failed to allocate string value buffer"); + } + *reinterpret_cast(newString) = length; + memcpy(newString + sizeof(unsigned), value, length); + newString[actualLength - 1U] = 0; // to avoid buffer over-run accidents by users later + return newString; +} +inline static void decodePrefixedString( + bool isPrefixed, char const* prefixed, + unsigned* length, char const** value) +{ + if (!isPrefixed) { + *length = static_cast(strlen(prefixed)); + *value = prefixed; + } else { + *length = *reinterpret_cast(prefixed); + *value = prefixed + sizeof(unsigned); + } +} +/** Free the string duplicated by duplicateStringValue()/duplicateAndPrefixStringValue(). + */ +static inline void releaseStringValue(char* value) { free(value); } + +} // namespace Json + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ValueInternals... +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +#if !defined(JSON_IS_AMALGAMATION) + +#include "json_valueiterator.inl" +#endif // if !defined(JSON_IS_AMALGAMATION) + +namespace Json { + +Exception::Exception(std::string const& msg) + : msg_(msg) +{} +Exception::~Exception() throw() +{} +char const* Exception::what() const throw() +{ + return msg_.c_str(); +} +RuntimeError::RuntimeError(std::string const& msg) + : Exception(msg) +{} +LogicError::LogicError(std::string const& msg) + : Exception(msg) +{} +void throwRuntimeError(std::string const& msg) +{ + throw RuntimeError(msg); +} +void throwLogicError(std::string const& msg) +{ + throw LogicError(msg); +} + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CommentInfo +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +Value::CommentInfo::CommentInfo() : comment_(0) {} + +Value::CommentInfo::~CommentInfo() { + if (comment_) + releaseStringValue(comment_); +} + +void Value::CommentInfo::setComment(const char* text, size_t len) { + if (comment_) { + releaseStringValue(comment_); + comment_ = 0; + } + JSON_ASSERT(text != 0); + JSON_ASSERT_MESSAGE( + text[0] == '\0' || text[0] == '/', + "in Json::Value::setComment(): Comments must start with /"); + // It seems that /**/ style comments are acceptable as well. + comment_ = duplicateStringValue(text, len); +} + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::CZString +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +// Notes: policy_ indicates if the string was allocated when +// a string is stored. + +Value::CZString::CZString(ArrayIndex aindex) : cstr_(0), index_(aindex) {} + +Value::CZString::CZString(char const* str, unsigned ulength, DuplicationPolicy allocate) + : cstr_(str) { + // allocate != duplicate + storage_.policy_ = allocate & 0x3; + storage_.length_ = ulength & 0x3FFFFFFF; +} + +Value::CZString::CZString(const CZString& other) + : cstr_(other.storage_.policy_ != noDuplication && other.cstr_ != 0 + ? duplicateStringValue(other.cstr_, other.storage_.length_) + : other.cstr_) { + storage_.policy_ = (other.cstr_ + ? (static_cast(other.storage_.policy_) == noDuplication + ? noDuplication : duplicate) + : static_cast(other.storage_.policy_)); + storage_.length_ = other.storage_.length_; +} + +#if JSON_HAS_RVALUE_REFERENCES +Value::CZString::CZString(CZString&& other) + : cstr_(other.cstr_), index_(other.index_) { + other.cstr_ = nullptr; +} +#endif + +Value::CZString::~CZString() { + if (cstr_ && storage_.policy_ == duplicate) + releaseStringValue(const_cast(cstr_)); +} + +void Value::CZString::swap(CZString& other) { + std::swap(cstr_, other.cstr_); + std::swap(index_, other.index_); +} + +Value::CZString& Value::CZString::operator=(CZString other) { + swap(other); + return *this; +} + +bool Value::CZString::operator<(const CZString& other) const { + if (!cstr_) return index_ < other.index_; + //return strcmp(cstr_, other.cstr_) < 0; + // Assume both are strings. + unsigned this_len = this->storage_.length_; + unsigned other_len = other.storage_.length_; + unsigned min_len = std::min(this_len, other_len); + int comp = memcmp(this->cstr_, other.cstr_, min_len); + if (comp < 0) return true; + if (comp > 0) return false; + return (this_len < other_len); +} + +bool Value::CZString::operator==(const CZString& other) const { + if (!cstr_) return index_ == other.index_; + //return strcmp(cstr_, other.cstr_) == 0; + // Assume both are strings. + unsigned this_len = this->storage_.length_; + unsigned other_len = other.storage_.length_; + if (this_len != other_len) return false; + int comp = memcmp(this->cstr_, other.cstr_, this_len); + return comp == 0; +} + +ArrayIndex Value::CZString::index() const { return index_; } + +//const char* Value::CZString::c_str() const { return cstr_; } +const char* Value::CZString::data() const { return cstr_; } +unsigned Value::CZString::length() const { return storage_.length_; } +bool Value::CZString::isStaticString() const { return storage_.policy_ == noDuplication; } + +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// class Value::Value +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// +// ////////////////////////////////////////////////////////////////// + +/*! \internal Default constructor initialization must be equivalent to: + * memset( this, 0, sizeof(Value) ) + * This optimization is used in ValueInternalMap fast allocator. + */ +Value::Value(ValueType vtype) { + initBasic(vtype); + switch (vtype) { + case nullValue: + break; + case intValue: + case uintValue: + value_.int_ = 0; + break; + case realValue: + value_.real_ = 0.0; + break; + case stringValue: + value_.string_ = 0; + break; + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues(); + break; + case booleanValue: + value_.bool_ = false; + break; + default: + JSON_ASSERT_UNREACHABLE; + } +} + +Value::Value(Int value) { + initBasic(intValue); + value_.int_ = value; +} + +Value::Value(UInt value) { + initBasic(uintValue); + value_.uint_ = value; +} +#if defined(JSON_HAS_INT64) +Value::Value(Int64 value) { + initBasic(intValue); + value_.int_ = value; +} +Value::Value(UInt64 value) { + initBasic(uintValue); + value_.uint_ = value; +} +#endif // defined(JSON_HAS_INT64) + +Value::Value(double value) { + initBasic(realValue); + value_.real_ = value; +} + +Value::Value(const char* value) { + initBasic(stringValue, true); + value_.string_ = duplicateAndPrefixStringValue(value, static_cast(strlen(value))); +} + +Value::Value(const char* beginValue, const char* endValue) { + initBasic(stringValue, true); + value_.string_ = + duplicateAndPrefixStringValue(beginValue, static_cast(endValue - beginValue)); +} + +Value::Value(const std::string& value) { + initBasic(stringValue, true); + value_.string_ = + duplicateAndPrefixStringValue(value.data(), static_cast(value.length())); +} + +Value::Value(const StaticString& value) { + initBasic(stringValue); + value_.string_ = const_cast(value.c_str()); +} + +#ifdef JSON_USE_CPPTL +Value::Value(const CppTL::ConstString& value) { + initBasic(stringValue, true); + value_.string_ = duplicateAndPrefixStringValue(value, static_cast(value.length())); +} +#endif + +Value::Value(bool value) { + initBasic(booleanValue); + value_.bool_ = value; +} + +Value::Value(Value const& other) + : type_(other.type_), allocated_(false) + , + comments_(0), start_(other.start_), limit_(other.limit_) +{ + switch (type_) { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + value_ = other.value_; + break; + case stringValue: + if (other.value_.string_ && other.allocated_) { + unsigned len; + char const* str; + decodePrefixedString(other.allocated_, other.value_.string_, + &len, &str); + value_.string_ = duplicateAndPrefixStringValue(str, len); + allocated_ = true; + } else { + value_.string_ = other.value_.string_; + allocated_ = false; + } + break; + case arrayValue: + case objectValue: + value_.map_ = new ObjectValues(*other.value_.map_); + break; + default: + JSON_ASSERT_UNREACHABLE; + } + if (other.comments_) { + comments_ = new CommentInfo[numberOfCommentPlacement]; + for (int comment = 0; comment < numberOfCommentPlacement; ++comment) { + const CommentInfo& otherComment = other.comments_[comment]; + if (otherComment.comment_) + comments_[comment].setComment( + otherComment.comment_, strlen(otherComment.comment_)); + } + } +} + +#if JSON_HAS_RVALUE_REFERENCES +// Move constructor +Value::Value(Value&& other) { + initBasic(nullValue); + swap(other); +} +#endif + +Value::~Value() { + switch (type_) { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + break; + case stringValue: + if (allocated_) + releaseStringValue(value_.string_); + break; + case arrayValue: + case objectValue: + delete value_.map_; + break; + default: + JSON_ASSERT_UNREACHABLE; + } + + if (comments_) + delete[] comments_; +} + +Value& Value::operator=(Value other) { + swap(other); + return *this; +} + +void Value::swapPayload(Value& other) { + ValueType temp = type_; + type_ = other.type_; + other.type_ = temp; + std::swap(value_, other.value_); + int temp2 = allocated_; + allocated_ = other.allocated_; + other.allocated_ = temp2 & 0x1; +} + +void Value::swap(Value& other) { + swapPayload(other); + std::swap(comments_, other.comments_); + std::swap(start_, other.start_); + std::swap(limit_, other.limit_); +} + +ValueType Value::type() const { return type_; } + +int Value::compare(const Value& other) const { + if (*this < other) + return -1; + if (*this > other) + return 1; + return 0; +} + +bool Value::operator<(const Value& other) const { + int typeDelta = type_ - other.type_; + if (typeDelta) + return typeDelta < 0 ? true : false; + switch (type_) { + case nullValue: + return false; + case intValue: + return value_.int_ < other.value_.int_; + case uintValue: + return value_.uint_ < other.value_.uint_; + case realValue: + return value_.real_ < other.value_.real_; + case booleanValue: + return value_.bool_ < other.value_.bool_; + case stringValue: + { + if ((value_.string_ == 0) || (other.value_.string_ == 0)) { + if (other.value_.string_) return true; + else return false; + } + unsigned this_len; + unsigned other_len; + char const* this_str; + char const* other_str; + decodePrefixedString(this->allocated_, this->value_.string_, &this_len, &this_str); + decodePrefixedString(other.allocated_, other.value_.string_, &other_len, &other_str); + unsigned min_len = std::min(this_len, other_len); + int comp = memcmp(this_str, other_str, min_len); + if (comp < 0) return true; + if (comp > 0) return false; + return (this_len < other_len); + } + case arrayValue: + case objectValue: { + int delta = int(value_.map_->size() - other.value_.map_->size()); + if (delta) + return delta < 0; + return (*value_.map_) < (*other.value_.map_); + } + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable +} + +bool Value::operator<=(const Value& other) const { return !(other < *this); } + +bool Value::operator>=(const Value& other) const { return !(*this < other); } + +bool Value::operator>(const Value& other) const { return other < *this; } + +bool Value::operator==(const Value& other) const { + // if ( type_ != other.type_ ) + // GCC 2.95.3 says: + // attempt to take address of bit-field structure member `Json::Value::type_' + // Beats me, but a temp solves the problem. + int temp = other.type_; + if (type_ != temp) + return false; + switch (type_) { + case nullValue: + return true; + case intValue: + return value_.int_ == other.value_.int_; + case uintValue: + return value_.uint_ == other.value_.uint_; + case realValue: + return value_.real_ == other.value_.real_; + case booleanValue: + return value_.bool_ == other.value_.bool_; + case stringValue: + { + if ((value_.string_ == 0) || (other.value_.string_ == 0)) { + return (value_.string_ == other.value_.string_); + } + unsigned this_len; + unsigned other_len; + char const* this_str; + char const* other_str; + decodePrefixedString(this->allocated_, this->value_.string_, &this_len, &this_str); + decodePrefixedString(other.allocated_, other.value_.string_, &other_len, &other_str); + if (this_len != other_len) return false; + int comp = memcmp(this_str, other_str, this_len); + return comp == 0; + } + case arrayValue: + case objectValue: + return value_.map_->size() == other.value_.map_->size() && + (*value_.map_) == (*other.value_.map_); + default: + JSON_ASSERT_UNREACHABLE; + } + return false; // unreachable +} + +bool Value::operator!=(const Value& other) const { return !(*this == other); } + +const char* Value::asCString() const { + JSON_ASSERT_MESSAGE(type_ == stringValue, + "in Json::Value::asCString(): requires stringValue"); + if (value_.string_ == 0) return 0; + unsigned this_len; + char const* this_str; + decodePrefixedString(this->allocated_, this->value_.string_, &this_len, &this_str); + return this_str; +} + +bool Value::getString(char const** str, char const** cend) const { + if (type_ != stringValue) return false; + if (value_.string_ == 0) return false; + unsigned length; + decodePrefixedString(this->allocated_, this->value_.string_, &length, str); + *cend = *str + length; + return true; +} + +std::string Value::asString() const { + switch (type_) { + case nullValue: + return ""; + case stringValue: + { + if (value_.string_ == 0) return ""; + unsigned this_len; + char const* this_str; + decodePrefixedString(this->allocated_, this->value_.string_, &this_len, &this_str); + return std::string(this_str, this_len); + } + case booleanValue: + return value_.bool_ ? "true" : "false"; + case intValue: + return valueToString(value_.int_); + case uintValue: + return valueToString(value_.uint_); + case realValue: + return valueToString(value_.real_); + default: + JSON_FAIL_MESSAGE("Type is not convertible to string"); + } +} + +#ifdef JSON_USE_CPPTL +CppTL::ConstString Value::asConstString() const { + unsigned len; + char const* str; + decodePrefixedString(allocated_, value_.string_, + &len, &str); + return CppTL::ConstString(str, len); +} +#endif + +Value::Int Value::asInt() const { + switch (type_) { + case intValue: + JSON_ASSERT_MESSAGE(isInt(), "LargestInt out of Int range"); + return Int(value_.int_); + case uintValue: + JSON_ASSERT_MESSAGE(isInt(), "LargestUInt out of Int range"); + return Int(value_.uint_); + case realValue: + JSON_ASSERT_MESSAGE(InRange(value_.real_, minInt, maxInt), + "double out of Int range"); + return Int(value_.real_); + case nullValue: + return 0; + case booleanValue: + return value_.bool_ ? 1 : 0; + default: + break; + } + JSON_FAIL_MESSAGE("Value is not convertible to Int."); +} + +Value::UInt Value::asUInt() const { + switch (type_) { + case intValue: + JSON_ASSERT_MESSAGE(isUInt(), "LargestInt out of UInt range"); + return UInt(value_.int_); + case uintValue: + JSON_ASSERT_MESSAGE(isUInt(), "LargestUInt out of UInt range"); + return UInt(value_.uint_); + case realValue: + JSON_ASSERT_MESSAGE(InRange(value_.real_, 0, maxUInt), + "double out of UInt range"); + return UInt(value_.real_); + case nullValue: + return 0; + case booleanValue: + return value_.bool_ ? 1 : 0; + default: + break; + } + JSON_FAIL_MESSAGE("Value is not convertible to UInt."); +} + +#if defined(JSON_HAS_INT64) + +Value::Int64 Value::asInt64() const { + switch (type_) { + case intValue: + return Int64(value_.int_); + case uintValue: + JSON_ASSERT_MESSAGE(isInt64(), "LargestUInt out of Int64 range"); + return Int64(value_.uint_); + case realValue: + JSON_ASSERT_MESSAGE(InRange(value_.real_, minInt64, maxInt64), + "double out of Int64 range"); + return Int64(value_.real_); + case nullValue: + return 0; + case booleanValue: + return value_.bool_ ? 1 : 0; + default: + break; + } + JSON_FAIL_MESSAGE("Value is not convertible to Int64."); +} + +Value::UInt64 Value::asUInt64() const { + switch (type_) { + case intValue: + JSON_ASSERT_MESSAGE(isUInt64(), "LargestInt out of UInt64 range"); + return UInt64(value_.int_); + case uintValue: + return UInt64(value_.uint_); + case realValue: + JSON_ASSERT_MESSAGE(InRange(value_.real_, 0, maxUInt64), + "double out of UInt64 range"); + return UInt64(value_.real_); + case nullValue: + return 0; + case booleanValue: + return value_.bool_ ? 1 : 0; + default: + break; + } + JSON_FAIL_MESSAGE("Value is not convertible to UInt64."); +} +#endif // if defined(JSON_HAS_INT64) + +LargestInt Value::asLargestInt() const { +#if defined(JSON_NO_INT64) + return asInt(); +#else + return asInt64(); +#endif +} + +LargestUInt Value::asLargestUInt() const { +#if defined(JSON_NO_INT64) + return asUInt(); +#else + return asUInt64(); +#endif +} + +double Value::asDouble() const { + switch (type_) { + case intValue: + return static_cast(value_.int_); + case uintValue: +#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return static_cast(value_.uint_); +#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return integerToDouble(value_.uint_); +#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + case realValue: + return value_.real_; + case nullValue: + return 0.0; + case booleanValue: + return value_.bool_ ? 1.0 : 0.0; + default: + break; + } + JSON_FAIL_MESSAGE("Value is not convertible to double."); +} + +float Value::asFloat() const { + switch (type_) { + case intValue: + return static_cast(value_.int_); + case uintValue: +#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return static_cast(value_.uint_); +#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + return integerToDouble(value_.uint_); +#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) + case realValue: + return static_cast(value_.real_); + case nullValue: + return 0.0; + case booleanValue: + return value_.bool_ ? 1.0f : 0.0f; + default: + break; + } + JSON_FAIL_MESSAGE("Value is not convertible to float."); +} + +bool Value::asBool() const { + switch (type_) { + case booleanValue: + return value_.bool_; + case nullValue: + return false; + case intValue: + return value_.int_ ? true : false; + case uintValue: + return value_.uint_ ? true : false; + case realValue: + // This is kind of strange. Not recommended. + return (value_.real_ != 0.0) ? true : false; + default: + break; + } + JSON_FAIL_MESSAGE("Value is not convertible to bool."); +} + +bool Value::isConvertibleTo(ValueType other) const { + switch (other) { + case nullValue: + return (isNumeric() && asDouble() == 0.0) || + (type_ == booleanValue && value_.bool_ == false) || + (type_ == stringValue && asString() == "") || + (type_ == arrayValue && value_.map_->size() == 0) || + (type_ == objectValue && value_.map_->size() == 0) || + type_ == nullValue; + case intValue: + return isInt() || + (type_ == realValue && InRange(value_.real_, minInt, maxInt)) || + type_ == booleanValue || type_ == nullValue; + case uintValue: + return isUInt() || + (type_ == realValue && InRange(value_.real_, 0, maxUInt)) || + type_ == booleanValue || type_ == nullValue; + case realValue: + return isNumeric() || type_ == booleanValue || type_ == nullValue; + case booleanValue: + return isNumeric() || type_ == booleanValue || type_ == nullValue; + case stringValue: + return isNumeric() || type_ == booleanValue || type_ == stringValue || + type_ == nullValue; + case arrayValue: + return type_ == arrayValue || type_ == nullValue; + case objectValue: + return type_ == objectValue || type_ == nullValue; + } + JSON_ASSERT_UNREACHABLE; + return false; +} + +/// Number of values in array or object +ArrayIndex Value::size() const { + switch (type_) { + case nullValue: + case intValue: + case uintValue: + case realValue: + case booleanValue: + case stringValue: + return 0; + case arrayValue: // size of the array is highest index + 1 + if (!value_.map_->empty()) { + ObjectValues::const_iterator itLast = value_.map_->end(); + --itLast; + return (*itLast).first.index() + 1; + } + return 0; + case objectValue: + return ArrayIndex(value_.map_->size()); + } + JSON_ASSERT_UNREACHABLE; + return 0; // unreachable; +} + +bool Value::empty() const { + if (isNull() || isArray() || isObject()) + return size() == 0u; + else + return false; +} + +bool Value::operator!() const { return isNull(); } + +void Value::clear() { + JSON_ASSERT_MESSAGE(type_ == nullValue || type_ == arrayValue || + type_ == objectValue, + "in Json::Value::clear(): requires complex value"); + start_ = 0; + limit_ = 0; + switch (type_) { + case arrayValue: + case objectValue: + value_.map_->clear(); + break; + default: + break; + } +} + +void Value::resize(ArrayIndex newSize) { + JSON_ASSERT_MESSAGE(type_ == nullValue || type_ == arrayValue, + "in Json::Value::resize(): requires arrayValue"); + if (type_ == nullValue) + *this = Value(arrayValue); + ArrayIndex oldSize = size(); + if (newSize == 0) + clear(); + else if (newSize > oldSize) + (*this)[newSize - 1]; + else { + for (ArrayIndex index = newSize; index < oldSize; ++index) { + value_.map_->erase(index); + } + assert(size() == newSize); + } +} + +Value& Value::operator[](ArrayIndex index) { + JSON_ASSERT_MESSAGE( + type_ == nullValue || type_ == arrayValue, + "in Json::Value::operator[](ArrayIndex): requires arrayValue"); + if (type_ == nullValue) + *this = Value(arrayValue); + CZString key(index); + ObjectValues::iterator it = value_.map_->lower_bound(key); + if (it != value_.map_->end() && (*it).first == key) + return (*it).second; + + ObjectValues::value_type defaultValue(key, nullRef); + it = value_.map_->insert(it, defaultValue); + return (*it).second; +} + +Value& Value::operator[](int index) { + JSON_ASSERT_MESSAGE( + index >= 0, + "in Json::Value::operator[](int index): index cannot be negative"); + return (*this)[ArrayIndex(index)]; +} + +const Value& Value::operator[](ArrayIndex index) const { + JSON_ASSERT_MESSAGE( + type_ == nullValue || type_ == arrayValue, + "in Json::Value::operator[](ArrayIndex)const: requires arrayValue"); + if (type_ == nullValue) + return nullRef; + CZString key(index); + ObjectValues::const_iterator it = value_.map_->find(key); + if (it == value_.map_->end()) + return nullRef; + return (*it).second; +} + +const Value& Value::operator[](int index) const { + JSON_ASSERT_MESSAGE( + index >= 0, + "in Json::Value::operator[](int index) const: index cannot be negative"); + return (*this)[ArrayIndex(index)]; +} + +void Value::initBasic(ValueType vtype, bool allocated) { + type_ = vtype; + allocated_ = allocated; + comments_ = 0; + start_ = 0; + limit_ = 0; +} + +// Access an object value by name, create a null member if it does not exist. +// @pre Type of '*this' is object or null. +// @param key is null-terminated. +Value& Value::resolveReference(const char* key) { + JSON_ASSERT_MESSAGE( + type_ == nullValue || type_ == objectValue, + "in Json::Value::resolveReference(): requires objectValue"); + if (type_ == nullValue) + *this = Value(objectValue); + CZString actualKey( + key, static_cast(strlen(key)), CZString::noDuplication); // NOTE! + ObjectValues::iterator it = value_.map_->lower_bound(actualKey); + if (it != value_.map_->end() && (*it).first == actualKey) + return (*it).second; + + ObjectValues::value_type defaultValue(actualKey, nullRef); + it = value_.map_->insert(it, defaultValue); + Value& value = (*it).second; + return value; +} + +// @param key is not null-terminated. +Value& Value::resolveReference(char const* key, char const* cend) +{ + JSON_ASSERT_MESSAGE( + type_ == nullValue || type_ == objectValue, + "in Json::Value::resolveReference(key, end): requires objectValue"); + if (type_ == nullValue) + *this = Value(objectValue); + CZString actualKey( + key, static_cast(cend-key), CZString::duplicateOnCopy); + ObjectValues::iterator it = value_.map_->lower_bound(actualKey); + if (it != value_.map_->end() && (*it).first == actualKey) + return (*it).second; + + ObjectValues::value_type defaultValue(actualKey, nullRef); + it = value_.map_->insert(it, defaultValue); + Value& value = (*it).second; + return value; +} + +Value Value::get(ArrayIndex index, const Value& defaultValue) const { + const Value* value = &((*this)[index]); + return value == &nullRef ? defaultValue : *value; +} + +bool Value::isValidIndex(ArrayIndex index) const { return index < size(); } + +Value const* Value::find(char const* key, char const* cend) const +{ + JSON_ASSERT_MESSAGE( + type_ == nullValue || type_ == objectValue, + "in Json::Value::find(key, end, found): requires objectValue or nullValue"); + if (type_ == nullValue) return NULL; + CZString actualKey(key, static_cast(cend-key), CZString::noDuplication); + ObjectValues::const_iterator it = value_.map_->find(actualKey); + if (it == value_.map_->end()) return NULL; + return &(*it).second; +} +const Value& Value::operator[](const char* key) const +{ + Value const* found = find(key, key + strlen(key)); + if (!found) return nullRef; + return *found; +} +Value const& Value::operator[](std::string const& key) const +{ + Value const* found = find(key.data(), key.data() + key.length()); + if (!found) return nullRef; + return *found; +} + +Value& Value::operator[](const char* key) { + return resolveReference(key, key + strlen(key)); +} + +Value& Value::operator[](const std::string& key) { + return resolveReference(key.data(), key.data() + key.length()); +} + +Value& Value::operator[](const StaticString& key) { + return resolveReference(key.c_str()); +} + +#ifdef JSON_USE_CPPTL +Value& Value::operator[](const CppTL::ConstString& key) { + return resolveReference(key.c_str(), key.end_c_str()); +} +Value const& Value::operator[](CppTL::ConstString const& key) const +{ + Value const* found = find(key.c_str(), key.end_c_str()); + if (!found) return nullRef; + return *found; +} +#endif + +Value& Value::append(const Value& value) { return (*this)[size()] = value; } + +Value Value::get(char const* key, char const* cend, Value const& defaultValue) const +{ + Value const* found = find(key, cend); + return !found ? defaultValue : *found; +} +Value Value::get(char const* key, Value const& defaultValue) const +{ + return get(key, key + strlen(key), defaultValue); +} +Value Value::get(std::string const& key, Value const& defaultValue) const +{ + return get(key.data(), key.data() + key.length(), defaultValue); +} + + +bool Value::removeMember(const char* key, const char* cend, Value* removed) +{ + if (type_ != objectValue) { + return false; + } + CZString actualKey(key, static_cast(cend-key), CZString::noDuplication); + ObjectValues::iterator it = value_.map_->find(actualKey); + if (it == value_.map_->end()) + return false; + *removed = it->second; + value_.map_->erase(it); + return true; +} +bool Value::removeMember(const char* key, Value* removed) +{ + return removeMember(key, key + strlen(key), removed); +} +bool Value::removeMember(std::string const& key, Value* removed) +{ + return removeMember(key.data(), key.data() + key.length(), removed); +} +Value Value::removeMember(const char* key) +{ + JSON_ASSERT_MESSAGE(type_ == nullValue || type_ == objectValue, + "in Json::Value::removeMember(): requires objectValue"); + if (type_ == nullValue) + return nullRef; + + Value removed; // null + removeMember(key, key + strlen(key), &removed); + return removed; // still null if removeMember() did nothing +} +Value Value::removeMember(const std::string& key) +{ + return removeMember(key.c_str()); +} + +bool Value::removeIndex(ArrayIndex index, Value* removed) { + if (type_ != arrayValue) { + return false; + } + CZString key(index); + ObjectValues::iterator it = value_.map_->find(key); + if (it == value_.map_->end()) { + return false; + } + *removed = it->second; + ArrayIndex oldSize = size(); + // shift left all items left, into the place of the "removed" + for (ArrayIndex i = index; i < (oldSize - 1); ++i){ + CZString keey(i); + (*value_.map_)[keey] = (*this)[i + 1]; + } + // erase the last one ("leftover") + CZString keyLast(oldSize - 1); + ObjectValues::iterator itLast = value_.map_->find(keyLast); + value_.map_->erase(itLast); + return true; +} + +#ifdef JSON_USE_CPPTL +Value Value::get(const CppTL::ConstString& key, + const Value& defaultValue) const { + return get(key.c_str(), key.end_c_str(), defaultValue); +} +#endif + +bool Value::isMember(char const* key, char const* cend) const +{ + Value const* value = find(key, cend); + return NULL != value; +} +bool Value::isMember(char const* key) const +{ + return isMember(key, key + strlen(key)); +} +bool Value::isMember(std::string const& key) const +{ + return isMember(key.data(), key.data() + key.length()); +} + +#ifdef JSON_USE_CPPTL +bool Value::isMember(const CppTL::ConstString& key) const { + return isMember(key.c_str(), key.end_c_str()); +} +#endif + +Value::Members Value::getMemberNames() const { + JSON_ASSERT_MESSAGE( + type_ == nullValue || type_ == objectValue, + "in Json::Value::getMemberNames(), value must be objectValue"); + if (type_ == nullValue) + return Value::Members(); + Members members; + members.reserve(value_.map_->size()); + ObjectValues::const_iterator it = value_.map_->begin(); + ObjectValues::const_iterator itEnd = value_.map_->end(); + for (; it != itEnd; ++it) { + members.push_back(std::string((*it).first.data(), + (*it).first.length())); + } + return members; +} +// +//# ifdef JSON_USE_CPPTL +// EnumMemberNames +// Value::enumMemberNames() const +//{ +// if ( type_ == objectValue ) +// { +// return CppTL::Enum::any( CppTL::Enum::transform( +// CppTL::Enum::keys( *(value_.map_), CppTL::Type() ), +// MemberNamesTransform() ) ); +// } +// return EnumMemberNames(); +//} +// +// +// EnumValues +// Value::enumValues() const +//{ +// if ( type_ == objectValue || type_ == arrayValue ) +// return CppTL::Enum::anyValues( *(value_.map_), +// CppTL::Type() ); +// return EnumValues(); +//} +// +//# endif + +static bool IsIntegral(double d) { + double integral_part; + return modf(d, &integral_part) == 0.0; +} + +bool Value::isNull() const { return type_ == nullValue; } + +bool Value::isBool() const { return type_ == booleanValue; } + +bool Value::isInt() const { + switch (type_) { + case intValue: + return value_.int_ >= minInt && value_.int_ <= maxInt; + case uintValue: + return value_.uint_ <= UInt(maxInt); + case realValue: + return value_.real_ >= minInt && value_.real_ <= maxInt && + IsIntegral(value_.real_); + default: + break; + } + return false; +} + +bool Value::isUInt() const { + switch (type_) { + case intValue: + return value_.int_ >= 0 && LargestUInt(value_.int_) <= LargestUInt(maxUInt); + case uintValue: + return value_.uint_ <= maxUInt; + case realValue: + return value_.real_ >= 0 && value_.real_ <= maxUInt && + IsIntegral(value_.real_); + default: + break; + } + return false; +} + +bool Value::isInt64() const { +#if defined(JSON_HAS_INT64) + switch (type_) { + case intValue: + return true; + case uintValue: + return value_.uint_ <= UInt64(maxInt64); + case realValue: + // Note that maxInt64 (= 2^63 - 1) is not exactly representable as a + // double, so double(maxInt64) will be rounded up to 2^63. Therefore we + // require the value to be strictly less than the limit. + return value_.real_ >= double(minInt64) && + value_.real_ < double(maxInt64) && IsIntegral(value_.real_); + default: + break; + } +#endif // JSON_HAS_INT64 + return false; +} + +bool Value::isUInt64() const { +#if defined(JSON_HAS_INT64) + switch (type_) { + case intValue: + return value_.int_ >= 0; + case uintValue: + return true; + case realValue: + // Note that maxUInt64 (= 2^64 - 1) is not exactly representable as a + // double, so double(maxUInt64) will be rounded up to 2^64. Therefore we + // require the value to be strictly less than the limit. + return value_.real_ >= 0 && value_.real_ < maxUInt64AsDouble && + IsIntegral(value_.real_); + default: + break; + } +#endif // JSON_HAS_INT64 + return false; +} + +bool Value::isIntegral() const { +#if defined(JSON_HAS_INT64) + return isInt64() || isUInt64(); +#else + return isInt() || isUInt(); +#endif +} + +bool Value::isDouble() const { return type_ == realValue || isIntegral(); } + +bool Value::isNumeric() const { return isIntegral() || isDouble(); } + +bool Value::isString() const { return type_ == stringValue; } + +bool Value::isArray() const { return type_ == arrayValue; } + +bool Value::isObject() const { return type_ == objectValue; } + +void Value::setComment(const char* comment, size_t len, CommentPlacement placement) { + if (!comments_) + comments_ = new CommentInfo[numberOfCommentPlacement]; + if ((len > 0) && (comment[len-1] == '\n')) { + // Always discard trailing newline, to aid indentation. + len -= 1; + } + comments_[placement].setComment(comment, len); +} + +void Value::setComment(const char* comment, CommentPlacement placement) { + setComment(comment, strlen(comment), placement); +} + +void Value::setComment(const std::string& comment, CommentPlacement placement) { + setComment(comment.c_str(), comment.length(), placement); +} + +bool Value::hasComment(CommentPlacement placement) const { + return comments_ != 0 && comments_[placement].comment_ != 0; +} + +std::string Value::getComment(CommentPlacement placement) const { + if (hasComment(placement)) + return comments_[placement].comment_; + return ""; +} + +void Value::setOffsetStart(size_t start) { start_ = start; } + +void Value::setOffsetLimit(size_t limit) { limit_ = limit; } + +size_t Value::getOffsetStart() const { return start_; } + +size_t Value::getOffsetLimit() const { return limit_; } + +std::string Value::toStyledString() const { + StyledWriter writer; + return writer.write(*this); +} + +Value::const_iterator Value::begin() const { + switch (type_) { + case arrayValue: + case objectValue: + if (value_.map_) + return const_iterator(value_.map_->begin()); + break; + default: + break; + } + return const_iterator(); +} + +Value::const_iterator Value::end() const { + switch (type_) { + case arrayValue: + case objectValue: + if (value_.map_) + return const_iterator(value_.map_->end()); + break; + default: + break; + } + return const_iterator(); +} + +Value::iterator Value::begin() { + switch (type_) { + case arrayValue: + case objectValue: + if (value_.map_) + return iterator(value_.map_->begin()); + break; + default: + break; + } + return iterator(); +} + +Value::iterator Value::end() { + switch (type_) { + case arrayValue: + case objectValue: + if (value_.map_) + return iterator(value_.map_->end()); + break; + default: + break; + } + return iterator(); +} + +// class PathArgument +// ////////////////////////////////////////////////////////////////// + +PathArgument::PathArgument() : key_(), index_(), kind_(kindNone) {} + +PathArgument::PathArgument(ArrayIndex index) + : key_(), index_(index), kind_(kindIndex) {} + +PathArgument::PathArgument(const char* key) + : key_(key), index_(), kind_(kindKey) {} + +PathArgument::PathArgument(const std::string& key) + : key_(key.c_str()), index_(), kind_(kindKey) {} + +// class Path +// ////////////////////////////////////////////////////////////////// + +Path::Path(const std::string& path, + const PathArgument& a1, + const PathArgument& a2, + const PathArgument& a3, + const PathArgument& a4, + const PathArgument& a5) { + InArgs in; + in.push_back(&a1); + in.push_back(&a2); + in.push_back(&a3); + in.push_back(&a4); + in.push_back(&a5); + makePath(path, in); +} + +void Path::makePath(const std::string& path, const InArgs& in) { + const char* current = path.c_str(); + const char* end = current + path.length(); + InArgs::const_iterator itInArg = in.begin(); + while (current != end) { + if (*current == '[') { + ++current; + if (*current == '%') + addPathInArg(path, in, itInArg, PathArgument::kindIndex); + else { + ArrayIndex index = 0; + for (; current != end && *current >= '0' && *current <= '9'; ++current) + index = index * 10 + ArrayIndex(*current - '0'); + args_.push_back(index); + } + if (current == end || *current++ != ']') + invalidPath(path, int(current - path.c_str())); + } else if (*current == '%') { + addPathInArg(path, in, itInArg, PathArgument::kindKey); + ++current; + } else if (*current == '.') { + ++current; + } else { + const char* beginName = current; + while (current != end && !strchr("[.", *current)) + ++current; + args_.push_back(std::string(beginName, current)); + } + } +} + +void Path::addPathInArg(const std::string& /*path*/, + const InArgs& in, + InArgs::const_iterator& itInArg, + PathArgument::Kind kind) { + if (itInArg == in.end()) { + // Error: missing argument %d + } else if ((*itInArg)->kind_ != kind) { + // Error: bad argument type + } else { + args_.push_back(**itInArg); + } +} + +void Path::invalidPath(const std::string& /*path*/, int /*location*/) { + // Error: invalid path. +} + +const Value& Path::resolve(const Value& root) const { + const Value* node = &root; + for (Args::const_iterator it = args_.begin(); it != args_.end(); ++it) { + const PathArgument& arg = *it; + if (arg.kind_ == PathArgument::kindIndex) { + if (!node->isArray() || !node->isValidIndex(arg.index_)) { + // Error: unable to resolve path (array value expected at position... + } + node = &((*node)[arg.index_]); + } else if (arg.kind_ == PathArgument::kindKey) { + if (!node->isObject()) { + // Error: unable to resolve path (object value expected at position...) + } + node = &((*node)[arg.key_]); + if (node == &Value::nullRef) { + // Error: unable to resolve path (object has no member named '' at + // position...) + } + } + } + return *node; +} + +Value Path::resolve(const Value& root, const Value& defaultValue) const { + const Value* node = &root; + for (Args::const_iterator it = args_.begin(); it != args_.end(); ++it) { + const PathArgument& arg = *it; + if (arg.kind_ == PathArgument::kindIndex) { + if (!node->isArray() || !node->isValidIndex(arg.index_)) + return defaultValue; + node = &((*node)[arg.index_]); + } else if (arg.kind_ == PathArgument::kindKey) { + if (!node->isObject()) + return defaultValue; + node = &((*node)[arg.key_]); + if (node == &Value::nullRef) + return defaultValue; + } + } + return *node; +} + +Value& Path::make(Value& root) const { + Value* node = &root; + for (Args::const_iterator it = args_.begin(); it != args_.end(); ++it) { + const PathArgument& arg = *it; + if (arg.kind_ == PathArgument::kindIndex) { + if (!node->isArray()) { + // Error: node is not an array at position ... + } + node = &((*node)[arg.index_]); + } else if (arg.kind_ == PathArgument::kindKey) { + if (!node->isObject()) { + // Error: node is not an object at position... + } + node = &((*node)[arg.key_]); + } + } + return *node; +} + +} // namespace Json + +// ////////////////////////////////////////////////////////////////////// +// End of content of file: src/lib_json/json_value.cpp +// ////////////////////////////////////////////////////////////////////// + + + + + + +// ////////////////////////////////////////////////////////////////////// +// Beginning of content of file: src/lib_json/json_writer.cpp +// ////////////////////////////////////////////////////////////////////// + +// Copyright 2011 Baptiste Lepilleur +// Distributed under MIT license, or public domain if desired and +// recognized in your jurisdiction. +// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE + +#if !defined(JSON_IS_AMALGAMATION) +#include +#include "json_tool.h" +#endif // if !defined(JSON_IS_AMALGAMATION) +#include +#include +#include +#include +#include +#include +#include +#include + +#if defined(_MSC_VER) && _MSC_VER >= 1200 && _MSC_VER < 1800 // Between VC++ 6.0 and VC++ 11.0 +#include +#define isfinite _finite +#elif defined(__sun) && defined(__SVR4) //Solaris +#if !defined(isfinite) +#include +#define isfinite finite +#endif +#elif defined(_AIX) +#if !defined(isfinite) +#include +#define isfinite finite +#endif +#elif defined(__hpux) +#if !defined(isfinite) +#if defined(__ia64) && !defined(finite) +#define isfinite(x) ((sizeof(x) == sizeof(float) ? \ + _Isfinitef(x) : _IsFinite(x))) +#else +#include +#define isfinite finite +#endif +#endif +#else +#include +#if !(defined(__QNXNTO__)) // QNX already defines isfinite +#define isfinite std::isfinite +#endif +#endif + +#if defined(_MSC_VER) +#if !defined(WINCE) && defined(__STDC_SECURE_LIB__) && _MSC_VER >= 1500 // VC++ 9.0 and above +#define snprintf sprintf_s +#elif _MSC_VER >= 1900 // VC++ 14.0 and above +#define snprintf std::snprintf +#else +#define snprintf _snprintf +#endif +#elif defined(__ANDROID__) || defined(__QNXNTO__) +#define snprintf snprintf +#elif __cplusplus >= 201103L +#define snprintf std::snprintf +#endif + +#if defined(__BORLANDC__) +#include +#define isfinite _finite +#define snprintf _snprintf +#endif + +#if defined(_MSC_VER) && _MSC_VER >= 1400 // VC++ 8.0 +// Disable warning about strdup being deprecated. +#pragma warning(disable : 4996) +#endif + +namespace Json { + +#if __cplusplus >= 201103L || (defined(_CPPLIB_VER) && _CPPLIB_VER >= 520) +typedef std::unique_ptr StreamWriterPtr; +#else +typedef std::auto_ptr StreamWriterPtr; +#endif + +static bool containsControlCharacter(const char* str) { + while (*str) { + if (isControlCharacter(*(str++))) + return true; + } + return false; +} + +static bool containsControlCharacter0(const char* str, unsigned len) { + char const* end = str + len; + while (end != str) { + if (isControlCharacter(*str) || 0==*str) + return true; + ++str; + } + return false; +} + +std::string valueToString(LargestInt value) { + UIntToStringBuffer buffer; + char* current = buffer + sizeof(buffer); + if (value == Value::minLargestInt) { + uintToString(LargestUInt(Value::maxLargestInt) + 1, current); + *--current = '-'; + } else if (value < 0) { + uintToString(LargestUInt(-value), current); + *--current = '-'; + } else { + uintToString(LargestUInt(value), current); + } + assert(current >= buffer); + return current; +} + +std::string valueToString(LargestUInt value) { + UIntToStringBuffer buffer; + char* current = buffer + sizeof(buffer); + uintToString(value, current); + assert(current >= buffer); + return current; +} + +#if defined(JSON_HAS_INT64) + +std::string valueToString(Int value) { + return valueToString(LargestInt(value)); +} + +std::string valueToString(UInt value) { + return valueToString(LargestUInt(value)); +} + +#endif // # if defined(JSON_HAS_INT64) + +std::string valueToString(double value, bool useSpecialFloats, unsigned int precision) { + // Allocate a buffer that is more than large enough to store the 16 digits of + // precision requested below. + char buffer[32]; + int len = -1; + + char formatString[6]; + sprintf(formatString, "%%.%dg", precision); + + // Print into the buffer. We need not request the alternative representation + // that always has a decimal point because JSON doesn't distingish the + // concepts of reals and integers. + if (isfinite(value)) { + len = snprintf(buffer, sizeof(buffer), formatString, value); + } else { + // IEEE standard states that NaN values will not compare to themselves + if (value != value) { + len = snprintf(buffer, sizeof(buffer), useSpecialFloats ? "NaN" : "null"); + } else if (value < 0) { + len = snprintf(buffer, sizeof(buffer), useSpecialFloats ? "-Infinity" : "-1e+9999"); + } else { + len = snprintf(buffer, sizeof(buffer), useSpecialFloats ? "Infinity" : "1e+9999"); + } + // For those, we do not need to call fixNumLoc, but it is fast. + } + assert(len >= 0); + fixNumericLocale(buffer, buffer + len); + return buffer; +} + +std::string valueToString(double value) { return valueToString(value, false, 17); } + +std::string valueToString(bool value) { return value ? "true" : "false"; } + +std::string valueToQuotedString(const char* value) { + if (value == NULL) + return ""; + // Not sure how to handle unicode... + if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && + !containsControlCharacter(value)) + return std::string("\"") + value + "\""; + // We have to walk value and escape any special characters. + // Appending to std::string is not efficient, but this should be rare. + // (Note: forward slashes are *not* rare, but I am not escaping them.) + std::string::size_type maxsize = + strlen(value) * 2 + 3; // allescaped+quotes+NULL + std::string result; + result.reserve(maxsize); // to avoid lots of mallocs + result += "\""; + for (const char* c = value; *c != 0; ++c) { + switch (*c) { + case '\"': + result += "\\\""; + break; + case '\\': + result += "\\\\"; + break; + case '\b': + result += "\\b"; + break; + case '\f': + result += "\\f"; + break; + case '\n': + result += "\\n"; + break; + case '\r': + result += "\\r"; + break; + case '\t': + result += "\\t"; + break; + // case '/': + // Even though \/ is considered a legal escape in JSON, a bare + // slash is also legal, so I see no reason to escape it. + // (I hope I am not misunderstanding something. + // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); + result += oss.str(); + } else { + result += *c; + } + break; + } + } + result += "\""; + return result; +} + +// https://github.com/upcaste/upcaste/blob/master/src/upcore/src/cstring/strnpbrk.cpp +static char const* strnpbrk(char const* s, char const* accept, size_t n) { + assert((s || !n) && accept); + + char const* const end = s + n; + for (char const* cur = s; cur < end; ++cur) { + int const c = *cur; + for (char const* a = accept; *a; ++a) { + if (*a == c) { + return cur; + } + } + } + return NULL; +} +static std::string valueToQuotedStringN(const char* value, unsigned length) { + if (value == NULL) + return ""; + // Not sure how to handle unicode... + if (strnpbrk(value, "\"\\\b\f\n\r\t", length) == NULL && + !containsControlCharacter0(value, length)) + return std::string("\"") + value + "\""; + // We have to walk value and escape any special characters. + // Appending to std::string is not efficient, but this should be rare. + // (Note: forward slashes are *not* rare, but I am not escaping them.) + std::string::size_type maxsize = + length * 2 + 3; // allescaped+quotes+NULL + std::string result; + result.reserve(maxsize); // to avoid lots of mallocs + result += "\""; + char const* end = value + length; + for (const char* c = value; c != end; ++c) { + switch (*c) { + case '\"': + result += "\\\""; + break; + case '\\': + result += "\\\\"; + break; + case '\b': + result += "\\b"; + break; + case '\f': + result += "\\f"; + break; + case '\n': + result += "\\n"; + break; + case '\r': + result += "\\r"; + break; + case '\t': + result += "\\t"; + break; + // case '/': + // Even though \/ is considered a legal escape in JSON, a bare + // slash is also legal, so I see no reason to escape it. + // (I hope I am not misunderstanding something.) + // blep notes: actually escaping \/ may be useful in javascript to avoid (*c); + result += oss.str(); + } else { + result += *c; + } + break; + } + } + result += "\""; + return result; +} + +// Class Writer +// ////////////////////////////////////////////////////////////////// +Writer::~Writer() {} + +// Class FastWriter +// ////////////////////////////////////////////////////////////////// + +FastWriter::FastWriter() + : yamlCompatiblityEnabled_(false), dropNullPlaceholders_(false), + omitEndingLineFeed_(false) {} + +void FastWriter::enableYAMLCompatibility() { yamlCompatiblityEnabled_ = true; } + +void FastWriter::dropNullPlaceholders() { dropNullPlaceholders_ = true; } + +void FastWriter::omitEndingLineFeed() { omitEndingLineFeed_ = true; } + +std::string FastWriter::write(const Value& root) { + document_ = ""; + writeValue(root); + if (!omitEndingLineFeed_) + document_ += "\n"; + return document_; +} + +void FastWriter::writeValue(const Value& value) { + switch (value.type()) { + case nullValue: + if (!dropNullPlaceholders_) + document_ += "null"; + break; + case intValue: + document_ += valueToString(value.asLargestInt()); + break; + case uintValue: + document_ += valueToString(value.asLargestUInt()); + break; + case realValue: + document_ += valueToString(value.asDouble()); + break; + case stringValue: + { + // Is NULL possible for value.string_? + char const* str; + char const* end; + bool ok = value.getString(&str, &end); + if (ok) document_ += valueToQuotedStringN(str, static_cast(end-str)); + break; + } + case booleanValue: + document_ += valueToString(value.asBool()); + break; + case arrayValue: { + document_ += '['; + int size = value.size(); + for (int index = 0; index < size; ++index) { + if (index > 0) + document_ += ','; + writeValue(value[index]); + } + document_ += ']'; + } break; + case objectValue: { + Value::Members members(value.getMemberNames()); + document_ += '{'; + for (Value::Members::iterator it = members.begin(); it != members.end(); + ++it) { + const std::string& name = *it; + if (it != members.begin()) + document_ += ','; + document_ += valueToQuotedStringN(name.data(), static_cast(name.length())); + document_ += yamlCompatiblityEnabled_ ? ": " : ":"; + writeValue(value[name]); + } + document_ += '}'; + } break; + } +} + +// Class StyledWriter +// ////////////////////////////////////////////////////////////////// + +StyledWriter::StyledWriter() + : rightMargin_(74), indentSize_(3), addChildValues_() {} + +std::string StyledWriter::write(const Value& root) { + document_ = ""; + addChildValues_ = false; + indentString_ = ""; + writeCommentBeforeValue(root); + writeValue(root); + writeCommentAfterValueOnSameLine(root); + document_ += "\n"; + return document_; +} + +void StyledWriter::writeValue(const Value& value) { + switch (value.type()) { + case nullValue: + pushValue("null"); + break; + case intValue: + pushValue(valueToString(value.asLargestInt())); + break; + case uintValue: + pushValue(valueToString(value.asLargestUInt())); + break; + case realValue: + pushValue(valueToString(value.asDouble())); + break; + case stringValue: + { + // Is NULL possible for value.string_? + char const* str; + char const* end; + bool ok = value.getString(&str, &end); + if (ok) pushValue(valueToQuotedStringN(str, static_cast(end-str))); + else pushValue(""); + break; + } + case booleanValue: + pushValue(valueToString(value.asBool())); + break; + case arrayValue: + writeArrayValue(value); + break; + case objectValue: { + Value::Members members(value.getMemberNames()); + if (members.empty()) + pushValue("{}"); + else { + writeWithIndent("{"); + indent(); + Value::Members::iterator it = members.begin(); + for (;;) { + const std::string& name = *it; + const Value& childValue = value[name]; + writeCommentBeforeValue(childValue); + writeWithIndent(valueToQuotedString(name.c_str())); + document_ += " : "; + writeValue(childValue); + if (++it == members.end()) { + writeCommentAfterValueOnSameLine(childValue); + break; + } + document_ += ','; + writeCommentAfterValueOnSameLine(childValue); + } + unindent(); + writeWithIndent("}"); + } + } break; + } +} + +void StyledWriter::writeArrayValue(const Value& value) { + unsigned size = value.size(); + if (size == 0) + pushValue("[]"); + else { + bool isArrayMultiLine = isMultineArray(value); + if (isArrayMultiLine) { + writeWithIndent("["); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index = 0; + for (;;) { + const Value& childValue = value[index]; + writeCommentBeforeValue(childValue); + if (hasChildValue) + writeWithIndent(childValues_[index]); + else { + writeIndent(); + writeValue(childValue); + } + if (++index == size) { + writeCommentAfterValueOnSameLine(childValue); + break; + } + document_ += ','; + writeCommentAfterValueOnSameLine(childValue); + } + unindent(); + writeWithIndent("]"); + } else // output on a single line + { + assert(childValues_.size() == size); + document_ += "[ "; + for (unsigned index = 0; index < size; ++index) { + if (index > 0) + document_ += ", "; + document_ += childValues_[index]; + } + document_ += " ]"; + } + } +} + +bool StyledWriter::isMultineArray(const Value& value) { + int size = value.size(); + bool isMultiLine = size * 3 >= rightMargin_; + childValues_.clear(); + for (int index = 0; index < size && !isMultiLine; ++index) { + const Value& childValue = value[index]; + isMultiLine = ((childValue.isArray() || childValue.isObject()) && + childValue.size() > 0); + } + if (!isMultiLine) // check if line length > max line length + { + childValues_.reserve(size); + addChildValues_ = true; + int lineLength = 4 + (size - 1) * 2; // '[ ' + ', '*n + ' ]' + for (int index = 0; index < size; ++index) { + if (hasCommentForValue(value[index])) { + isMultiLine = true; + } + writeValue(value[index]); + lineLength += int(childValues_[index].length()); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + +void StyledWriter::pushValue(const std::string& value) { + if (addChildValues_) + childValues_.push_back(value); + else + document_ += value; +} + +void StyledWriter::writeIndent() { + if (!document_.empty()) { + char last = document_[document_.length() - 1]; + if (last == ' ') // already indented + return; + if (last != '\n') // Comments may add new-line + document_ += '\n'; + } + document_ += indentString_; +} + +void StyledWriter::writeWithIndent(const std::string& value) { + writeIndent(); + document_ += value; +} + +void StyledWriter::indent() { indentString_ += std::string(indentSize_, ' '); } + +void StyledWriter::unindent() { + assert(int(indentString_.size()) >= indentSize_); + indentString_.resize(indentString_.size() - indentSize_); +} + +void StyledWriter::writeCommentBeforeValue(const Value& root) { + if (!root.hasComment(commentBefore)) + return; + + document_ += "\n"; + writeIndent(); + const std::string& comment = root.getComment(commentBefore); + std::string::const_iterator iter = comment.begin(); + while (iter != comment.end()) { + document_ += *iter; + if (*iter == '\n' && + (iter != comment.end() && *(iter + 1) == '/')) + writeIndent(); + ++iter; + } + + // Comments are stripped of trailing newlines, so add one here + document_ += "\n"; +} + +void StyledWriter::writeCommentAfterValueOnSameLine(const Value& root) { + if (root.hasComment(commentAfterOnSameLine)) + document_ += " " + root.getComment(commentAfterOnSameLine); + + if (root.hasComment(commentAfter)) { + document_ += "\n"; + document_ += root.getComment(commentAfter); + document_ += "\n"; + } +} + +bool StyledWriter::hasCommentForValue(const Value& value) { + return value.hasComment(commentBefore) || + value.hasComment(commentAfterOnSameLine) || + value.hasComment(commentAfter); +} + +// Class StyledStreamWriter +// ////////////////////////////////////////////////////////////////// + +StyledStreamWriter::StyledStreamWriter(std::string indentation) + : document_(NULL), rightMargin_(74), indentation_(indentation), + addChildValues_() {} + +void StyledStreamWriter::write(std::ostream& out, const Value& root) { + document_ = &out; + addChildValues_ = false; + indentString_ = ""; + indented_ = true; + writeCommentBeforeValue(root); + if (!indented_) writeIndent(); + indented_ = true; + writeValue(root); + writeCommentAfterValueOnSameLine(root); + *document_ << "\n"; + document_ = NULL; // Forget the stream, for safety. +} + +void StyledStreamWriter::writeValue(const Value& value) { + switch (value.type()) { + case nullValue: + pushValue("null"); + break; + case intValue: + pushValue(valueToString(value.asLargestInt())); + break; + case uintValue: + pushValue(valueToString(value.asLargestUInt())); + break; + case realValue: + pushValue(valueToString(value.asDouble())); + break; + case stringValue: + { + // Is NULL possible for value.string_? + char const* str; + char const* end; + bool ok = value.getString(&str, &end); + if (ok) pushValue(valueToQuotedStringN(str, static_cast(end-str))); + else pushValue(""); + break; + } + case booleanValue: + pushValue(valueToString(value.asBool())); + break; + case arrayValue: + writeArrayValue(value); + break; + case objectValue: { + Value::Members members(value.getMemberNames()); + if (members.empty()) + pushValue("{}"); + else { + writeWithIndent("{"); + indent(); + Value::Members::iterator it = members.begin(); + for (;;) { + const std::string& name = *it; + const Value& childValue = value[name]; + writeCommentBeforeValue(childValue); + writeWithIndent(valueToQuotedString(name.c_str())); + *document_ << " : "; + writeValue(childValue); + if (++it == members.end()) { + writeCommentAfterValueOnSameLine(childValue); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine(childValue); + } + unindent(); + writeWithIndent("}"); + } + } break; + } +} + +void StyledStreamWriter::writeArrayValue(const Value& value) { + unsigned size = value.size(); + if (size == 0) + pushValue("[]"); + else { + bool isArrayMultiLine = isMultineArray(value); + if (isArrayMultiLine) { + writeWithIndent("["); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index = 0; + for (;;) { + const Value& childValue = value[index]; + writeCommentBeforeValue(childValue); + if (hasChildValue) + writeWithIndent(childValues_[index]); + else { + if (!indented_) writeIndent(); + indented_ = true; + writeValue(childValue); + indented_ = false; + } + if (++index == size) { + writeCommentAfterValueOnSameLine(childValue); + break; + } + *document_ << ","; + writeCommentAfterValueOnSameLine(childValue); + } + unindent(); + writeWithIndent("]"); + } else // output on a single line + { + assert(childValues_.size() == size); + *document_ << "[ "; + for (unsigned index = 0; index < size; ++index) { + if (index > 0) + *document_ << ", "; + *document_ << childValues_[index]; + } + *document_ << " ]"; + } + } +} + +bool StyledStreamWriter::isMultineArray(const Value& value) { + int size = value.size(); + bool isMultiLine = size * 3 >= rightMargin_; + childValues_.clear(); + for (int index = 0; index < size && !isMultiLine; ++index) { + const Value& childValue = value[index]; + isMultiLine = ((childValue.isArray() || childValue.isObject()) && + childValue.size() > 0); + } + if (!isMultiLine) // check if line length > max line length + { + childValues_.reserve(size); + addChildValues_ = true; + int lineLength = 4 + (size - 1) * 2; // '[ ' + ', '*n + ' ]' + for (int index = 0; index < size; ++index) { + if (hasCommentForValue(value[index])) { + isMultiLine = true; + } + writeValue(value[index]); + lineLength += int(childValues_[index].length()); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + +void StyledStreamWriter::pushValue(const std::string& value) { + if (addChildValues_) + childValues_.push_back(value); + else + *document_ << value; +} + +void StyledStreamWriter::writeIndent() { + // blep intended this to look at the so-far-written string + // to determine whether we are already indented, but + // with a stream we cannot do that. So we rely on some saved state. + // The caller checks indented_. + *document_ << '\n' << indentString_; +} + +void StyledStreamWriter::writeWithIndent(const std::string& value) { + if (!indented_) writeIndent(); + *document_ << value; + indented_ = false; +} + +void StyledStreamWriter::indent() { indentString_ += indentation_; } + +void StyledStreamWriter::unindent() { + assert(indentString_.size() >= indentation_.size()); + indentString_.resize(indentString_.size() - indentation_.size()); +} + +void StyledStreamWriter::writeCommentBeforeValue(const Value& root) { + if (!root.hasComment(commentBefore)) + return; + + if (!indented_) writeIndent(); + const std::string& comment = root.getComment(commentBefore); + std::string::const_iterator iter = comment.begin(); + while (iter != comment.end()) { + *document_ << *iter; + if (*iter == '\n' && + (iter != comment.end() && *(iter + 1) == '/')) + // writeIndent(); // would include newline + *document_ << indentString_; + ++iter; + } + indented_ = false; +} + +void StyledStreamWriter::writeCommentAfterValueOnSameLine(const Value& root) { + if (root.hasComment(commentAfterOnSameLine)) + *document_ << ' ' << root.getComment(commentAfterOnSameLine); + + if (root.hasComment(commentAfter)) { + writeIndent(); + *document_ << root.getComment(commentAfter); + } + indented_ = false; +} + +bool StyledStreamWriter::hasCommentForValue(const Value& value) { + return value.hasComment(commentBefore) || + value.hasComment(commentAfterOnSameLine) || + value.hasComment(commentAfter); +} + +////////////////////////// +// BuiltStyledStreamWriter + +/// Scoped enums are not available until C++11. +struct CommentStyle { + /// Decide whether to write comments. + enum Enum { + None, ///< Drop all comments. + Most, ///< Recover odd behavior of previous versions (not implemented yet). + All ///< Keep all comments. + }; +}; + +struct BuiltStyledStreamWriter : public StreamWriter +{ + BuiltStyledStreamWriter( + std::string const& indentation, + CommentStyle::Enum cs, + std::string const& colonSymbol, + std::string const& nullSymbol, + std::string const& endingLineFeedSymbol, + bool useSpecialFloats, + unsigned int precision); + int write(Value const& root, std::ostream* sout) override; +private: + void writeValue(Value const& value); + void writeArrayValue(Value const& value); + bool isMultineArray(Value const& value); + void pushValue(std::string const& value); + void writeIndent(); + void writeWithIndent(std::string const& value); + void indent(); + void unindent(); + void writeCommentBeforeValue(Value const& root); + void writeCommentAfterValueOnSameLine(Value const& root); + static bool hasCommentForValue(const Value& value); + + typedef std::vector ChildValues; + + ChildValues childValues_; + std::string indentString_; + int rightMargin_; + std::string indentation_; + CommentStyle::Enum cs_; + std::string colonSymbol_; + std::string nullSymbol_; + std::string endingLineFeedSymbol_; + bool addChildValues_ : 1; + bool indented_ : 1; + bool useSpecialFloats_ : 1; + unsigned int precision_; +}; +BuiltStyledStreamWriter::BuiltStyledStreamWriter( + std::string const& indentation, + CommentStyle::Enum cs, + std::string const& colonSymbol, + std::string const& nullSymbol, + std::string const& endingLineFeedSymbol, + bool useSpecialFloats, + unsigned int precision) + : rightMargin_(74) + , indentation_(indentation) + , cs_(cs) + , colonSymbol_(colonSymbol) + , nullSymbol_(nullSymbol) + , endingLineFeedSymbol_(endingLineFeedSymbol) + , addChildValues_(false) + , indented_(false) + , useSpecialFloats_(useSpecialFloats) + , precision_(precision) +{ +} +int BuiltStyledStreamWriter::write(Value const& root, std::ostream* sout) +{ + sout_ = sout; + addChildValues_ = false; + indented_ = true; + indentString_ = ""; + writeCommentBeforeValue(root); + if (!indented_) writeIndent(); + indented_ = true; + writeValue(root); + writeCommentAfterValueOnSameLine(root); + *sout_ << endingLineFeedSymbol_; + sout_ = NULL; + return 0; +} +void BuiltStyledStreamWriter::writeValue(Value const& value) { + switch (value.type()) { + case nullValue: + pushValue(nullSymbol_); + break; + case intValue: + pushValue(valueToString(value.asLargestInt())); + break; + case uintValue: + pushValue(valueToString(value.asLargestUInt())); + break; + case realValue: + pushValue(valueToString(value.asDouble(), useSpecialFloats_, precision_)); + break; + case stringValue: + { + // Is NULL is possible for value.string_? + char const* str; + char const* end; + bool ok = value.getString(&str, &end); + if (ok) pushValue(valueToQuotedStringN(str, static_cast(end-str))); + else pushValue(""); + break; + } + case booleanValue: + pushValue(valueToString(value.asBool())); + break; + case arrayValue: + writeArrayValue(value); + break; + case objectValue: { + Value::Members members(value.getMemberNames()); + if (members.empty()) + pushValue("{}"); + else { + writeWithIndent("{"); + indent(); + Value::Members::iterator it = members.begin(); + for (;;) { + std::string const& name = *it; + Value const& childValue = value[name]; + writeCommentBeforeValue(childValue); + writeWithIndent(valueToQuotedStringN(name.data(), static_cast(name.length()))); + *sout_ << colonSymbol_; + writeValue(childValue); + if (++it == members.end()) { + writeCommentAfterValueOnSameLine(childValue); + break; + } + *sout_ << ","; + writeCommentAfterValueOnSameLine(childValue); + } + unindent(); + writeWithIndent("}"); + } + } break; + } +} + +void BuiltStyledStreamWriter::writeArrayValue(Value const& value) { + unsigned size = value.size(); + if (size == 0) + pushValue("[]"); + else { + bool isMultiLine = (cs_ == CommentStyle::All) || isMultineArray(value); + if (isMultiLine) { + writeWithIndent("["); + indent(); + bool hasChildValue = !childValues_.empty(); + unsigned index = 0; + for (;;) { + Value const& childValue = value[index]; + writeCommentBeforeValue(childValue); + if (hasChildValue) + writeWithIndent(childValues_[index]); + else { + if (!indented_) writeIndent(); + indented_ = true; + writeValue(childValue); + indented_ = false; + } + if (++index == size) { + writeCommentAfterValueOnSameLine(childValue); + break; + } + *sout_ << ","; + writeCommentAfterValueOnSameLine(childValue); + } + unindent(); + writeWithIndent("]"); + } else // output on a single line + { + assert(childValues_.size() == size); + *sout_ << "["; + if (!indentation_.empty()) *sout_ << " "; + for (unsigned index = 0; index < size; ++index) { + if (index > 0) + *sout_ << ", "; + *sout_ << childValues_[index]; + } + if (!indentation_.empty()) *sout_ << " "; + *sout_ << "]"; + } + } +} + +bool BuiltStyledStreamWriter::isMultineArray(Value const& value) { + int size = value.size(); + bool isMultiLine = size * 3 >= rightMargin_; + childValues_.clear(); + for (int index = 0; index < size && !isMultiLine; ++index) { + Value const& childValue = value[index]; + isMultiLine = ((childValue.isArray() || childValue.isObject()) && + childValue.size() > 0); + } + if (!isMultiLine) // check if line length > max line length + { + childValues_.reserve(size); + addChildValues_ = true; + int lineLength = 4 + (size - 1) * 2; // '[ ' + ', '*n + ' ]' + for (int index = 0; index < size; ++index) { + if (hasCommentForValue(value[index])) { + isMultiLine = true; + } + writeValue(value[index]); + lineLength += int(childValues_[index].length()); + } + addChildValues_ = false; + isMultiLine = isMultiLine || lineLength >= rightMargin_; + } + return isMultiLine; +} + +void BuiltStyledStreamWriter::pushValue(std::string const& value) { + if (addChildValues_) + childValues_.push_back(value); + else + *sout_ << value; +} + +void BuiltStyledStreamWriter::writeIndent() { + // blep intended this to look at the so-far-written string + // to determine whether we are already indented, but + // with a stream we cannot do that. So we rely on some saved state. + // The caller checks indented_. + + if (!indentation_.empty()) { + // In this case, drop newlines too. + *sout_ << '\n' << indentString_; + } +} + +void BuiltStyledStreamWriter::writeWithIndent(std::string const& value) { + if (!indented_) writeIndent(); + *sout_ << value; + indented_ = false; +} + +void BuiltStyledStreamWriter::indent() { indentString_ += indentation_; } + +void BuiltStyledStreamWriter::unindent() { + assert(indentString_.size() >= indentation_.size()); + indentString_.resize(indentString_.size() - indentation_.size()); +} + +void BuiltStyledStreamWriter::writeCommentBeforeValue(Value const& root) { + if (cs_ == CommentStyle::None) return; + if (!root.hasComment(commentBefore)) + return; + + if (!indented_) writeIndent(); + const std::string& comment = root.getComment(commentBefore); + std::string::const_iterator iter = comment.begin(); + while (iter != comment.end()) { + *sout_ << *iter; + if (*iter == '\n' && + (iter != comment.end() && *(iter + 1) == '/')) + // writeIndent(); // would write extra newline + *sout_ << indentString_; + ++iter; + } + indented_ = false; +} + +void BuiltStyledStreamWriter::writeCommentAfterValueOnSameLine(Value const& root) { + if (cs_ == CommentStyle::None) return; + if (root.hasComment(commentAfterOnSameLine)) + *sout_ << " " + root.getComment(commentAfterOnSameLine); + + if (root.hasComment(commentAfter)) { + writeIndent(); + *sout_ << root.getComment(commentAfter); + } +} + +// static +bool BuiltStyledStreamWriter::hasCommentForValue(const Value& value) { + return value.hasComment(commentBefore) || + value.hasComment(commentAfterOnSameLine) || + value.hasComment(commentAfter); +} + +/////////////// +// StreamWriter + +StreamWriter::StreamWriter() + : sout_(NULL) +{ +} +StreamWriter::~StreamWriter() +{ +} +StreamWriter::Factory::~Factory() +{} +StreamWriterBuilder::StreamWriterBuilder() +{ + setDefaults(&settings_); +} +StreamWriterBuilder::~StreamWriterBuilder() +{} +StreamWriter* StreamWriterBuilder::newStreamWriter() const +{ + std::string indentation = settings_["indentation"].asString(); + std::string cs_str = settings_["commentStyle"].asString(); + bool eyc = settings_["enableYAMLCompatibility"].asBool(); + bool dnp = settings_["dropNullPlaceholders"].asBool(); + bool usf = settings_["useSpecialFloats"].asBool(); + unsigned int pre = settings_["precision"].asUInt(); + CommentStyle::Enum cs = CommentStyle::All; + if (cs_str == "All") { + cs = CommentStyle::All; + } else if (cs_str == "None") { + cs = CommentStyle::None; + } else { + throwRuntimeError("commentStyle must be 'All' or 'None'"); + } + std::string colonSymbol = " : "; + if (eyc) { + colonSymbol = ": "; + } else if (indentation.empty()) { + colonSymbol = ":"; + } + std::string nullSymbol = "null"; + if (dnp) { + nullSymbol = ""; + } + if (pre > 17) pre = 17; + std::string endingLineFeedSymbol = ""; + return new BuiltStyledStreamWriter( + indentation, cs, + colonSymbol, nullSymbol, endingLineFeedSymbol, usf, pre); +} +static void getValidWriterKeys(std::set* valid_keys) +{ + valid_keys->clear(); + valid_keys->insert("indentation"); + valid_keys->insert("commentStyle"); + valid_keys->insert("enableYAMLCompatibility"); + valid_keys->insert("dropNullPlaceholders"); + valid_keys->insert("useSpecialFloats"); + valid_keys->insert("precision"); +} +bool StreamWriterBuilder::validate(Json::Value* invalid) const +{ + Json::Value my_invalid; + if (!invalid) invalid = &my_invalid; // so we do not need to test for NULL + Json::Value& inv = *invalid; + std::set valid_keys; + getValidWriterKeys(&valid_keys); + Value::Members keys = settings_.getMemberNames(); + size_t n = keys.size(); + for (size_t i = 0; i < n; ++i) { + std::string const& key = keys[i]; + if (valid_keys.find(key) == valid_keys.end()) { + inv[key] = settings_[key]; + } + } + return 0u == inv.size(); +} +Value& StreamWriterBuilder::operator[](std::string key) +{ + return settings_[key]; +} +// static +void StreamWriterBuilder::setDefaults(Json::Value* settings) +{ + //! [StreamWriterBuilderDefaults] + (*settings)["commentStyle"] = "All"; + (*settings)["indentation"] = "\t"; + (*settings)["enableYAMLCompatibility"] = false; + (*settings)["dropNullPlaceholders"] = false; + (*settings)["useSpecialFloats"] = false; + (*settings)["precision"] = 17; + //! [StreamWriterBuilderDefaults] +} + +std::string writeString(StreamWriter::Factory const& builder, Value const& root) { + std::ostringstream sout; + StreamWriterPtr const writer(builder.newStreamWriter()); + writer->write(root, &sout); + return sout.str(); +} + +std::ostream& operator<<(std::ostream& sout, Value const& root) { + StreamWriterBuilder builder; + StreamWriterPtr const writer(builder.newStreamWriter()); + writer->write(root, &sout); + return sout; +} + +} // namespace Json + +// ////////////////////////////////////////////////////////////////////// +// End of content of file: src/lib_json/json_writer.cpp +// ////////////////////////////////////////////////////////////////////// + + + + + diff --git a/packager/third_party/protobuf/csharp/.gitignore b/packager/third_party/protobuf/csharp/.gitignore new file mode 100644 index 0000000000..c88f741e37 --- /dev/null +++ b/packager/third_party/protobuf/csharp/.gitignore @@ -0,0 +1,36 @@ +# +# Untracked directories +# +src/AddressBook/bin +src/AddressBook/obj +src/Google.Protobuf/bin/ +src/Google.Protobuf/obj/ +src/Google.Protobuf.Conformance/bin/ +src/Google.Protobuf.Conformance/obj/ +src/Google.Protobuf.Test/bin/ +src/Google.Protobuf.Test/obj/ +src/Google.Protobuf.JsonDump/bin/ +src/Google.Protobuf.JsonDump/obj/ +mono/bin +mono/tmp +mono/protoc +build_output +build_temp +build/msbuild*.log +lib/Microsoft.Silverlight.Testing +lib/NUnit + +# +# Untracked files +# +*.user +*.suo +*.nupkg +_ReSharper.* +*.sln.cache +mono/TestResult.xml +mono/.libs +mono/*.exe +mono/*.dll +lib/protoc.exe +*.ncrunch* diff --git a/packager/third_party/protobuf/csharp/CHANGES.txt b/packager/third_party/protobuf/csharp/CHANGES.txt new file mode 100644 index 0000000000..a87cd4d5df --- /dev/null +++ b/packager/third_party/protobuf/csharp/CHANGES.txt @@ -0,0 +1,148 @@ +=============================================================================== +Welcome to the C# port of Google Protocol Buffers, written by Jon Skeet +(skeet@pobox.com) based on the work of many talented people. + +=============================================================================== +RELEASE NOTES - Code imported into Google's main protobuf repository +=============================================================================== + +Everything below note this represents history of protobuf-csharp-port project +before the code was merged into csharp/ subtree of GitHub google/protobuf +repository. +Frozen legacy version of the original project is available in +https://github.com/jskeet/protobuf-csharp-port. + +=============================================================================== +RELEASE NOTES - Version 2.4.1.555 +=============================================================================== + +Changes: +- Upgrade solution format to Visual Studio 2012. +- Add the ability to print a builder (not just a message) +- TextGenerator introduces a new overload of PrintTo +- Munge protoc's error format into a VS-C#-compatible output format. +- Work to make ProtoGen clone that acts as a protoc.exe plugin. +- Added the AllowPartiallyTrustedCallers attribute +- Optimized enum parsing. + +Fixes: +- Fix for bug in limited input stream's Position, Introduced Position on + output stream +- Fix for writing a character to a JSON output overflows allocated buffer +- Optimize FromBase64String to return Empty when presented with empty string. +- Use string.Concat instead of operator to avoid potential import problems +- Issue 81: quoting for NUnit parameters. +- Issue 56: NuGet package is noisy +- Issue 70: Portable library project has some invalid Nunit-based code. +- Issue 71: CodedInputStream.ReadBytes go to slow path unnecessarily +- Issue 84: warning CS0219: The variable `size' is assigned but never used + +=============================================================================== +RELEASE NOTES - Version 2.4.1.521 +=============================================================================== + +Changes: +- Add generated_code_attributes option, defaulted to false +- Added support for Portable library +- Added 'Unsafe' static type in ByteString to allow direct buffer access + +Fixes: +- Issue 50: The XML serializer will fail to deserialize a message with empty + child message +- Issue 45: Use of 'item' as a field name causes AmbiguousMatchException +- Issue 49: Generated nested static Types class should be partial +- Issue 38: Disable CLSCompliant warnings (3021) +- Issue 40: proto_path does not work for command-line file names +- Issue 54: should retire all bytes in buffer (bufferSize) +- Issue 43: Fix to correct identical 'umbrella_classname' options from trying + to write to the same filename. + +=============================================================================== +RELEASE NOTES - Version 2.4.1.473 +=============================================================================== + +Features: +- Added option service_generator_type to control service generation with + NONE, GENERIC, INTERFACE, or IRPCDISPATCH +- Added interfaces IRpcDispatch and IRpcServerStub to provide for blocking + services and implementations. +- Added ProtoGen.exe command-line argument "--protoc_dir=" to specify the + location of protoc.exe. +- Extracted interfaces for ICodedInputStream and ICodedOutputStream to allow + custom implementation of writers with both speed and size optimizations. +- Addition of the "Google.ProtoBuffers.Serialization" assembly to support + reading and writing messages to/from XML, JSON, IDictionary<,> and others. +- Several performance related fixes and tweeks +- Issue 3: Add option to mark generated code with attribute +- Issue 20: Support for decorating classes [Serializable] +- Issue 21: Decorate fields with [deprecated=true] as [System.Obsolete] +- Issue 22: Reusable Builder classes +- Issue 24: Support for using Json/Xml formats with ICodedInputStream +- Issue 25: Added support for NuGet packages +- Issue 31: Upgraded protoc.exe and descriptor to 2.4.1 + +Fixes: +- Issue 13: Message with Field same name as message causes uncompilable .cs +- Issue 16: Does not integrate well with other tooling +- Issue 19: Support for negative enum values +- Issue 26: AddRange in GeneratedBuilder iterates twice. +- Issue 27: Remove XML documentation output from test projects to clear + warnings/errors. +- Issue 28: Circular message dependencies result in null default values for + Message fields. +- Issue 29: Message classes generated have a public default constructor. You + can disable private ctor generation with the option generate_private_ctor. +- Issue 35: Fixed a bug in ProtoGen handling of arguments with trailing \ +- Big-endian support for float, and double on Silverlight +- Packed and Unpacked parsing allow for all repeated, as per version 2.3 +- Fix for leaving Builder a public ctor on internal classes for use with + generic "where T: new()" constraints. + +Other: +- Changed the code signing key to a privately held key +- Reformatted all code and line-endings to C# defaults +- Reworking of performance benchmarks to produce reliable results, option /v2 +- Issue 34: Silverlight assemblies are now unit tested + +=============================================================================== +RELEASE NOTES - Version 2.3.0.277 +=============================================================================== + +Features: +- Added cls_compliance option to generate attributes indicating + non-CLS-compliance. +- Added file_extension option to control the generated output file's extension. +- Added umbrella_namespace option to place the umbrella class into a nested + namespace to address issues with proto files having the same name as a + message it contains. +- Added output_directory option to set the output path for the source file(s). +- Added ignore_google_protobuf option to avoid generating code for includes + from the google.protobuf package. +- Added the LITE framework (Google.ProtoBuffersLite.dll) and the ability to + generate code with "option optimize_for = LITE_RUNTIME;". +- Added ability to invoke protoc.exe from within ProtoGen.exe. +- Upgraded to protoc.exe (2.3) compiler. + +Fixes: +- Issue 9: Class cannot be static and sealed error +- Issue 12: default value for enumerate fields must be filled out + +Other: +- Rewrite of build using MSbuild instead of NAnt +- Moved to NUnit Version 2.2.8.0 +- Changed to using secure .snk for releases + +=============================================================================== +RELEASE NOTES - Version 0.9.1 +=============================================================================== + +Fixes: +- issue 10: Incorrect encoding of packed fields when serialized + +=============================================================================== +RELEASE NOTES - Version 0.9.0 +=============================================================================== + +- Initial release + +=============================================================================== \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/Google.Protobuf.Tools.nuspec b/packager/third_party/protobuf/csharp/Google.Protobuf.Tools.nuspec new file mode 100644 index 0000000000..e4240dae87 --- /dev/null +++ b/packager/third_party/protobuf/csharp/Google.Protobuf.Tools.nuspec @@ -0,0 +1,37 @@ + + + + Google.Protobuf.Tools + Google Protocol Buffers tools + Tools for Protocol Buffers - Google's data interchange format. + See project site for more info. + 3.0.0-beta3 + Google Inc. + protobuf-packages + https://github.com/google/protobuf/blob/master/LICENSE + https://github.com/google/protobuf + false + Tools for Protocol Buffers + Copyright 2015, Google Inc. + Protocol Buffers Binary Serialization Format Google proto proto3 + + + + + + + + + + + + + + + + + + + + + diff --git a/packager/third_party/protobuf/csharp/README.md b/packager/third_party/protobuf/csharp/README.md new file mode 100644 index 0000000000..8c3993e014 --- /dev/null +++ b/packager/third_party/protobuf/csharp/README.md @@ -0,0 +1,70 @@ +This directory contains the C# Protocol Buffers runtime library. + +Status: Beta - ready for external testing +========================================= + +Usage +===== + +The easiest way how to use C# protobufs is via the `Google.Protobuf` +NuGet package. Just add the NuGet package to your VS project. + +Besides C# runtime library, the NuGet package also contains +precompiled version of `protoc.exe` and a copy of well known `.proto` +files under the package's `tools` directory. + +To generate C# files from your `.proto` files, invoke `protoc` with the +`--csharp_out` option. + +Supported platforms +=================== + +The runtime library is built as a portable class library, supporting: + +- .NET 4.5 +- Windows 8 +- Windows Phone Silverlight 8 +- Windows Phone 8.1 +- .NET Core + +You should be able to use Protocol Buffers in Visual Studio 2012 and +all later versions. This includes all code generated by `protoc`, +which only uses features from C# 3 and earlier. + +Building +======== + +Open the `src/Google.Protobuf.sln` solution in Visual Studio 2015 or +later. You should be able to run the NUnit test from Test Explorer +(you might need to install NUnit Visual Studio add-in). + +Although *users* of this project are only expected to have Visual +Studio 2012 or later, *developers* of the library are required to +have Visual Studio 2015 or later, as the library uses C# 6 features +in its implementation. These features have no impact when using the +compiled code - they're only relevant when building the +`Google.Protobuf` assembly. + +History of C# protobufs +======================= + +This subtree was originally imported from https://github.com/jskeet/protobuf-csharp-port +and represents the latest development version of C# protobufs, that will now be developed +and maintained by Google. All the development will be done in open, under this repository +(https://github.com/google/protobuf). + +The previous project differs from this project in a number of ways: + +- The old code only supported proto2; the new code only supports +proto3 (so no unknown fields, no required/optional distinction, no +extensions) +- The old code was based on immutable message types and builders for +them +- The old code did not support maps or `oneof` +- The old code had its own JSON representation, whereas the new code +uses the standard protobuf JSON representation +- The old code had no notion of the "well-known types" which have +special support in the new code +- The old project supported some older platforms (such as older +versions of Silverlight) which are not currently supported in the +new project diff --git a/packager/third_party/protobuf/csharp/build_packages.bat b/packager/third_party/protobuf/csharp/build_packages.bat new file mode 100644 index 0000000000..1502f0634b --- /dev/null +++ b/packager/third_party/protobuf/csharp/build_packages.bat @@ -0,0 +1,13 @@ +@rem Builds Google.Protobuf NuGet packages + +@rem Adjust the location of nuget.exe +set NUGET=C:\nuget\nuget.exe + +@rem Build src/Google.Protobuf.sln solution in Release configuration first. +%NUGET% pack src\Google.Protobuf\Google.Protobuf.nuspec -Symbols || goto :error + +goto :EOF + +:error +echo Failed! +exit /b %errorlevel% diff --git a/packager/third_party/protobuf/csharp/buildall.sh b/packager/third_party/protobuf/csharp/buildall.sh new file mode 100755 index 0000000000..45af705f38 --- /dev/null +++ b/packager/third_party/protobuf/csharp/buildall.sh @@ -0,0 +1,17 @@ +#!/bin/bash +# Use mono to build solution and run all tests. + +# Adjust these to reflect the location of nunit-console in your system. +NUNIT_CONSOLE=nunit-console + +# The rest you can leave intact +CONFIG=Release +SRC=$(dirname $0)/src + +set -ex + +echo Building the solution. +xbuild /p:Configuration=$CONFIG $SRC/Google.Protobuf.sln + +echo Running tests. +$NUNIT_CONSOLE $SRC/Google.Protobuf.Test/bin/$CONFIG/Google.Protobuf.Test.dll diff --git a/packager/third_party/protobuf/csharp/generate_protos.sh b/packager/third_party/protobuf/csharp/generate_protos.sh new file mode 100755 index 0000000000..d979aa52be --- /dev/null +++ b/packager/third_party/protobuf/csharp/generate_protos.sh @@ -0,0 +1,62 @@ +#!/bin/bash +# Generates C# source files from .proto files. +# You first need to make sure protoc has been built (see instructions on +# building protoc in root of this repository) + +set -ex + +# cd to repository root +pushd $(dirname $0)/.. + +# Protocol buffer compiler to use. If the PROTOC variable is set, +# use that. Otherwise, probe for expected locations under both +# Windows and Unix. +if [ -z "$PROTOC" ]; then + # TODO(jonskeet): Use an array and a for loop instead? + if [ -x cmake/build/Debug/protoc.exe ]; then + PROTOC=cmake/build/Debug/protoc.exe + elif [ -x cmake/build/Release/protoc.exe ]; then + PROTOC=cmake/build/Release/protoc.exe + elif [ -x src/protoc ]; then + PROTOC=src/protoc + else + echo "Unable to find protocol buffer compiler." + exit 1 + fi +fi + +# descriptor.proto and well-known types +$PROTOC -Isrc --csharp_out=csharp/src/Google.Protobuf \ + --csharp_opt=base_namespace=Google.Protobuf \ + src/google/protobuf/descriptor.proto \ + src/google/protobuf/any.proto \ + src/google/protobuf/api.proto \ + src/google/protobuf/duration.proto \ + src/google/protobuf/empty.proto \ + src/google/protobuf/field_mask.proto \ + src/google/protobuf/source_context.proto \ + src/google/protobuf/struct.proto \ + src/google/protobuf/timestamp.proto \ + src/google/protobuf/type.proto \ + src/google/protobuf/wrappers.proto + +# Test protos where the namespace matches the target location +$PROTOC -Isrc --csharp_out=csharp/src/Google.Protobuf.Test \ + --csharp_opt=base_namespace=Google.Protobuf \ + src/google/protobuf/map_unittest_proto3.proto \ + src/google/protobuf/unittest_proto3.proto \ + src/google/protobuf/unittest_import_proto3.proto \ + src/google/protobuf/unittest_import_public_proto3.proto \ + src/google/protobuf/unittest_well_known_types.proto + +# Different base namespace to the protos above +$PROTOC -Icsharp/protos --csharp_out=csharp/src/Google.Protobuf.Test \ + --csharp_opt=base_namespace=UnitTest.Issues \ + csharp/protos/unittest_issues.proto + +# AddressBook sample protos +$PROTOC -Iexamples --csharp_out=csharp/src/AddressBook \ + examples/addressbook.proto + +$PROTOC -Iconformance -Isrc --csharp_out=csharp/src/Google.Protobuf.Conformance \ + conformance/conformance.proto diff --git a/packager/third_party/protobuf/csharp/keys/Google.Protobuf.public.snk b/packager/third_party/protobuf/csharp/keys/Google.Protobuf.public.snk new file mode 100644 index 0000000000..59cd36985f Binary files /dev/null and b/packager/third_party/protobuf/csharp/keys/Google.Protobuf.public.snk differ diff --git a/packager/third_party/protobuf/csharp/keys/Google.Protobuf.snk b/packager/third_party/protobuf/csharp/keys/Google.Protobuf.snk new file mode 100644 index 0000000000..7515443ca9 Binary files /dev/null and b/packager/third_party/protobuf/csharp/keys/Google.Protobuf.snk differ diff --git a/packager/third_party/protobuf/csharp/keys/README.md b/packager/third_party/protobuf/csharp/keys/README.md new file mode 100644 index 0000000000..ede673573e --- /dev/null +++ b/packager/third_party/protobuf/csharp/keys/README.md @@ -0,0 +1,9 @@ +Contents +-------- + +- Google.Protobuf.public.snk: + Public key to verify strong name of Google.Protobuf assemblies. +- Google.Protobuf.snk: + Signing key to provide strong name of Google.Protobuf assemblies. + As per [Microsoft guidance](https://msdn.microsoft.com/en-us/library/wd40t7ad(v=vs.110).aspx) + signing key should be checked into the repository. diff --git a/packager/third_party/protobuf/csharp/protos/unittest_issues.proto b/packager/third_party/protobuf/csharp/protos/unittest_issues.proto new file mode 100644 index 0000000000..6c9f76344a --- /dev/null +++ b/packager/third_party/protobuf/csharp/protos/unittest_issues.proto @@ -0,0 +1,126 @@ +syntax = "proto3"; + +// These proto descriptors have at one time been reported as an issue or defect. +// They are kept here to replicate the issue, and continue to verify the fix. + +// Issue: Non-"Google.Protobuffers" namespace will ensure that protobuffer library types are qualified +option csharp_namespace = "UnitTest.Issues.TestProtos"; + +package unittest_issues; +option optimize_for = SPEED; + +// Issue 307: when generating doubly-nested types, any references +// should be of the form A.Types.B.Types.C. +message Issue307 { + message NestedOnce { + message NestedTwice { + } + } +} + +// Old issue 13: http://code.google.com/p/protobuf-csharp-port/issues/detail?id=13 +// New issue 309: https://github.com/google/protobuf/issues/309 + +// message A { +// optional int32 _A = 1; +// } + +// message B { +// optional int32 B_ = 1; +// } + +//message AB { +// optional int32 a_b = 1; +//} + +// Similar issue with numeric names +// Java code failed too, so probably best for this to be a restriction. +// See https://github.com/google/protobuf/issues/308 +// message NumberField { +// optional int32 _01 = 1; +// } + +// issue 19 - negative enum values + +enum NegativeEnum { + NEGATIVE_ENUM_ZERO = 0; + FiveBelow = -5; + MinusOne = -1; +} + +message NegativeEnumMessage { + NegativeEnum value = 1; + repeated NegativeEnum values = 2 [packed = false]; + repeated NegativeEnum packed_values = 3 [packed=true]; +} + +// Issue 21: http://code.google.com/p/protobuf-csharp-port/issues/detail?id=21 +// Decorate fields with [deprecated=true] as [System.Obsolete] + +message DeprecatedChild { +} + +enum DeprecatedEnum { + DEPRECATED_ZERO = 0; + one = 1; +} + +message DeprecatedFieldsMessage { + int32 PrimitiveValue = 1 [deprecated = true]; + repeated int32 PrimitiveArray = 2 [deprecated = true]; + + DeprecatedChild MessageValue = 3 [deprecated = true]; + repeated DeprecatedChild MessageArray = 4 [deprecated = true]; + + DeprecatedEnum EnumValue = 5 [deprecated = true]; + repeated DeprecatedEnum EnumArray = 6 [deprecated = true]; +} + +// Issue 45: http://code.google.com/p/protobuf-csharp-port/issues/detail?id=45 +message ItemField { + int32 item = 1; +} + +message ReservedNames { + // Force a nested type called Types + message SomeNestedType { + } + + int32 types = 1; + int32 descriptor = 2; +} + +message TestJsonFieldOrdering { + // These fields are deliberately not declared in numeric + // order, and the oneof fields aren't contiguous either. + // This allows for reasonably robust tests of JSON output + // ordering. + // TestFieldOrderings in unittest_proto3.proto is similar, + // but doesn't include oneofs. + // TODO: Consider adding oneofs to TestFieldOrderings, although + // that will require fixing other tests in multiple platforms. + // Alternatively, consider just adding this to + // unittest_proto3.proto if multiple platforms want it. + + int32 plain_int32 = 4; + + oneof o1 { + string o1_string = 2; + int32 o1_int32 = 5; + } + + string plain_string = 1; + + oneof o2 { + int32 o2_int32 = 6; + string o2_string = 3; + } + +} + +message TestJsonName { + // Message for testing the effects for of the json_name option + string name = 1; + string description = 2 [json_name = "desc"]; + string guid = 3 [json_name = "exid"]; +} diff --git a/packager/third_party/protobuf/csharp/src/AddressBook/AddPerson.cs b/packager/third_party/protobuf/csharp/src/AddressBook/AddPerson.cs new file mode 100644 index 0000000000..484f1ea25d --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/AddressBook/AddPerson.cs @@ -0,0 +1,132 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; +using System.IO; + +namespace Google.Protobuf.Examples.AddressBook +{ + internal class AddPerson + { + /// + /// Builds a person based on user input + /// + private static Person PromptForAddress(TextReader input, TextWriter output) + { + Person person = new Person(); + + output.Write("Enter person ID: "); + person.Id = int.Parse(input.ReadLine()); + + output.Write("Enter name: "); + person.Name = input.ReadLine(); + + output.Write("Enter email address (blank for none): "); + string email = input.ReadLine(); + if (email.Length > 0) + { + person.Email = email; + } + + while (true) + { + output.Write("Enter a phone number (or leave blank to finish): "); + string number = input.ReadLine(); + if (number.Length == 0) + { + break; + } + + Person.Types.PhoneNumber phoneNumber = new Person.Types.PhoneNumber { Number = number }; + + output.Write("Is this a mobile, home, or work phone? "); + String type = input.ReadLine(); + switch (type) + { + case "mobile": + phoneNumber.Type = Person.Types.PhoneType.Mobile; + break; + case "home": + phoneNumber.Type = Person.Types.PhoneType.Home; + break; + case "work": + phoneNumber.Type = Person.Types.PhoneType.Work; + break; + default: + output.Write("Unknown phone type. Using default."); + break; + } + + person.Phones.Add(phoneNumber); + } + return person; + } + + /// + /// Entry point - loads an existing addressbook or creates a new one, + /// then writes it back to the file. + /// + public static int Main(string[] args) + { + if (args.Length != 1) + { + Console.Error.WriteLine("Usage: AddPerson ADDRESS_BOOK_FILE"); + return -1; + } + + AddressBook addressBook; + + if (File.Exists(args[0])) + { + using (Stream file = File.OpenRead(args[0])) + { + addressBook = AddressBook.Parser.ParseFrom(file); + } + } + else + { + Console.WriteLine("{0}: File not found. Creating a new file.", args[0]); + addressBook = new AddressBook(); + } + + // Add an address. + addressBook.People.Add(PromptForAddress(Console.In, Console.Out)); + + // Write the new address book back to disk. + using (Stream output = File.OpenWrite(args[0])) + { + addressBook.WriteTo(output); + } + return 0; + } + } +} \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/AddressBook/AddressBook.csproj b/packager/third_party/protobuf/csharp/src/AddressBook/AddressBook.csproj new file mode 100644 index 0000000000..021647fc8c --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/AddressBook/AddressBook.csproj @@ -0,0 +1,75 @@ + + + + Debug + AnyCPU + 9.0.30729 + 2.0 + {A31F5FB2-4FF3-432A-B35B-5CD203606311} + Exe + Properties + Google.Protobuf.Examples.AddressBook + AddressBook + v4.5 + 512 + Google.Protobuf.Examples.AddressBook.Program + + + + + true + full + false + bin\Debug + obj\Debug\ + DEBUG;TRACE + prompt + 4 + true + Off + false + + + pdbonly + true + bin\Release + obj\Release\ + TRACE + prompt + 4 + true + Off + false + + + + + + + + + + + + + + + + + + {6908BDCE-D925-43F3-94AC-A531E6DF2591} + Google.Protobuf + + + + + + + + \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/AddressBook/Addressbook.cs b/packager/third_party/protobuf/csharp/src/AddressBook/Addressbook.cs new file mode 100644 index 0000000000..362e1cb6a2 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/AddressBook/Addressbook.cs @@ -0,0 +1,473 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: addressbook.proto +#pragma warning disable 1591, 0612, 3021 +#region Designer generated code + +using pb = global::Google.Protobuf; +using pbc = global::Google.Protobuf.Collections; +using pbr = global::Google.Protobuf.Reflection; +using scg = global::System.Collections.Generic; +namespace Google.Protobuf.Examples.AddressBook { + + /// Holder for reflection information generated from addressbook.proto + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public static partial class AddressbookReflection { + + #region Descriptor + /// File descriptor for addressbook.proto + public static pbr::FileDescriptor Descriptor { + get { return descriptor; } + } + private static pbr::FileDescriptor descriptor; + + static AddressbookReflection() { + byte[] descriptorData = global::System.Convert.FromBase64String( + string.Concat( + "ChFhZGRyZXNzYm9vay5wcm90bxIIdHV0b3JpYWwi1QEKBlBlcnNvbhIMCgRu", + "YW1lGAEgASgJEgoKAmlkGAIgASgFEg0KBWVtYWlsGAMgASgJEiwKBnBob25l", + "cxgEIAMoCzIcLnR1dG9yaWFsLlBlcnNvbi5QaG9uZU51bWJlchpHCgtQaG9u", + "ZU51bWJlchIOCgZudW1iZXIYASABKAkSKAoEdHlwZRgCIAEoDjIaLnR1dG9y", + "aWFsLlBlcnNvbi5QaG9uZVR5cGUiKwoJUGhvbmVUeXBlEgoKBk1PQklMRRAA", + "EggKBEhPTUUQARIICgRXT1JLEAIiLwoLQWRkcmVzc0Jvb2sSIAoGcGVvcGxl", + "GAEgAygLMhAudHV0b3JpYWwuUGVyc29uQlAKFGNvbS5leGFtcGxlLnR1dG9y", + "aWFsQhFBZGRyZXNzQm9va1Byb3Rvc6oCJEdvb2dsZS5Qcm90b2J1Zi5FeGFt", + "cGxlcy5BZGRyZXNzQm9va2IGcHJvdG8z")); + descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, + new pbr::FileDescriptor[] { }, + new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] { + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.Examples.AddressBook.Person), global::Google.Protobuf.Examples.AddressBook.Person.Parser, new[]{ "Name", "Id", "Email", "Phones" }, null, new[]{ typeof(global::Google.Protobuf.Examples.AddressBook.Person.Types.PhoneType) }, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.Examples.AddressBook.Person.Types.PhoneNumber), global::Google.Protobuf.Examples.AddressBook.Person.Types.PhoneNumber.Parser, new[]{ "Number", "Type" }, null, null, null)}), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.Examples.AddressBook.AddressBook), global::Google.Protobuf.Examples.AddressBook.AddressBook.Parser, new[]{ "People" }, null, null, null) + })); + } + #endregion + + } + #region Messages + /// + /// [START messages] + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class Person : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new Person()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.Examples.AddressBook.AddressbookReflection.Descriptor.MessageTypes[0]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public Person() { + OnConstruction(); + } + + partial void OnConstruction(); + + public Person(Person other) : this() { + name_ = other.name_; + id_ = other.id_; + email_ = other.email_; + phones_ = other.phones_.Clone(); + } + + public Person Clone() { + return new Person(this); + } + + /// Field number for the "name" field. + public const int NameFieldNumber = 1; + private string name_ = ""; + public string Name { + get { return name_; } + set { + name_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "id" field. + public const int IdFieldNumber = 2; + private int id_; + /// + /// Unique ID number for this person. + /// + public int Id { + get { return id_; } + set { + id_ = value; + } + } + + /// Field number for the "email" field. + public const int EmailFieldNumber = 3; + private string email_ = ""; + public string Email { + get { return email_; } + set { + email_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "phones" field. + public const int PhonesFieldNumber = 4; + private static readonly pb::FieldCodec _repeated_phones_codec + = pb::FieldCodec.ForMessage(34, global::Google.Protobuf.Examples.AddressBook.Person.Types.PhoneNumber.Parser); + private readonly pbc::RepeatedField phones_ = new pbc::RepeatedField(); + public pbc::RepeatedField Phones { + get { return phones_; } + } + + public override bool Equals(object other) { + return Equals(other as Person); + } + + public bool Equals(Person other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (Name != other.Name) return false; + if (Id != other.Id) return false; + if (Email != other.Email) return false; + if(!phones_.Equals(other.phones_)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (Name.Length != 0) hash ^= Name.GetHashCode(); + if (Id != 0) hash ^= Id.GetHashCode(); + if (Email.Length != 0) hash ^= Email.GetHashCode(); + hash ^= phones_.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (Name.Length != 0) { + output.WriteRawTag(10); + output.WriteString(Name); + } + if (Id != 0) { + output.WriteRawTag(16); + output.WriteInt32(Id); + } + if (Email.Length != 0) { + output.WriteRawTag(26); + output.WriteString(Email); + } + phones_.WriteTo(output, _repeated_phones_codec); + } + + public int CalculateSize() { + int size = 0; + if (Name.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(Name); + } + if (Id != 0) { + size += 1 + pb::CodedOutputStream.ComputeInt32Size(Id); + } + if (Email.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(Email); + } + size += phones_.CalculateSize(_repeated_phones_codec); + return size; + } + + public void MergeFrom(Person other) { + if (other == null) { + return; + } + if (other.Name.Length != 0) { + Name = other.Name; + } + if (other.Id != 0) { + Id = other.Id; + } + if (other.Email.Length != 0) { + Email = other.Email; + } + phones_.Add(other.phones_); + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + Name = input.ReadString(); + break; + } + case 16: { + Id = input.ReadInt32(); + break; + } + case 26: { + Email = input.ReadString(); + break; + } + case 34: { + phones_.AddEntriesFrom(input, _repeated_phones_codec); + break; + } + } + } + } + + #region Nested types + /// Container for nested types declared in the Person message type. + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public static partial class Types { + public enum PhoneType { + [pbr::OriginalName("MOBILE")] Mobile = 0, + [pbr::OriginalName("HOME")] Home = 1, + [pbr::OriginalName("WORK")] Work = 2, + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class PhoneNumber : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new PhoneNumber()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.Examples.AddressBook.Person.Descriptor.NestedTypes[0]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public PhoneNumber() { + OnConstruction(); + } + + partial void OnConstruction(); + + public PhoneNumber(PhoneNumber other) : this() { + number_ = other.number_; + type_ = other.type_; + } + + public PhoneNumber Clone() { + return new PhoneNumber(this); + } + + /// Field number for the "number" field. + public const int NumberFieldNumber = 1; + private string number_ = ""; + public string Number { + get { return number_; } + set { + number_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "type" field. + public const int TypeFieldNumber = 2; + private global::Google.Protobuf.Examples.AddressBook.Person.Types.PhoneType type_ = 0; + public global::Google.Protobuf.Examples.AddressBook.Person.Types.PhoneType Type { + get { return type_; } + set { + type_ = value; + } + } + + public override bool Equals(object other) { + return Equals(other as PhoneNumber); + } + + public bool Equals(PhoneNumber other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (Number != other.Number) return false; + if (Type != other.Type) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (Number.Length != 0) hash ^= Number.GetHashCode(); + if (Type != 0) hash ^= Type.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (Number.Length != 0) { + output.WriteRawTag(10); + output.WriteString(Number); + } + if (Type != 0) { + output.WriteRawTag(16); + output.WriteEnum((int) Type); + } + } + + public int CalculateSize() { + int size = 0; + if (Number.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(Number); + } + if (Type != 0) { + size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) Type); + } + return size; + } + + public void MergeFrom(PhoneNumber other) { + if (other == null) { + return; + } + if (other.Number.Length != 0) { + Number = other.Number; + } + if (other.Type != 0) { + Type = other.Type; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + Number = input.ReadString(); + break; + } + case 16: { + type_ = (global::Google.Protobuf.Examples.AddressBook.Person.Types.PhoneType) input.ReadEnum(); + break; + } + } + } + } + + } + + } + #endregion + + } + + /// + /// Our address book file is just one of these. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class AddressBook : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new AddressBook()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.Examples.AddressBook.AddressbookReflection.Descriptor.MessageTypes[1]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public AddressBook() { + OnConstruction(); + } + + partial void OnConstruction(); + + public AddressBook(AddressBook other) : this() { + people_ = other.people_.Clone(); + } + + public AddressBook Clone() { + return new AddressBook(this); + } + + /// Field number for the "people" field. + public const int PeopleFieldNumber = 1; + private static readonly pb::FieldCodec _repeated_people_codec + = pb::FieldCodec.ForMessage(10, global::Google.Protobuf.Examples.AddressBook.Person.Parser); + private readonly pbc::RepeatedField people_ = new pbc::RepeatedField(); + public pbc::RepeatedField People { + get { return people_; } + } + + public override bool Equals(object other) { + return Equals(other as AddressBook); + } + + public bool Equals(AddressBook other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if(!people_.Equals(other.people_)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + hash ^= people_.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + people_.WriteTo(output, _repeated_people_codec); + } + + public int CalculateSize() { + int size = 0; + size += people_.CalculateSize(_repeated_people_codec); + return size; + } + + public void MergeFrom(AddressBook other) { + if (other == null) { + return; + } + people_.Add(other.people_); + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + people_.AddEntriesFrom(input, _repeated_people_codec); + break; + } + } + } + } + + } + + #endregion + +} + +#endregion Designer generated code diff --git a/packager/third_party/protobuf/csharp/src/AddressBook/ListPeople.cs b/packager/third_party/protobuf/csharp/src/AddressBook/ListPeople.cs new file mode 100644 index 0000000000..71572289d4 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/AddressBook/ListPeople.cs @@ -0,0 +1,99 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; +using System.IO; + +namespace Google.Protobuf.Examples.AddressBook +{ + internal class ListPeople + { + /// + /// Iterates though all people in the AddressBook and prints info about them. + /// + private static void Print(AddressBook addressBook) + { + foreach (Person person in addressBook.People) + { + Console.WriteLine("Person ID: {0}", person.Id); + Console.WriteLine(" Name: {0}", person.Name); + if (person.Email != "") + { + Console.WriteLine(" E-mail address: {0}", person.Email); + } + + foreach (Person.Types.PhoneNumber phoneNumber in person.Phones) + { + switch (phoneNumber.Type) + { + case Person.Types.PhoneType.Mobile: + Console.Write(" Mobile phone #: "); + break; + case Person.Types.PhoneType.Home: + Console.Write(" Home phone #: "); + break; + case Person.Types.PhoneType.Work: + Console.Write(" Work phone #: "); + break; + } + Console.WriteLine(phoneNumber.Number); + } + } + } + + /// + /// Entry point - loads the addressbook and then displays it. + /// + public static int Main(string[] args) + { + if (args.Length != 1) + { + Console.Error.WriteLine("Usage: ListPeople ADDRESS_BOOK_FILE"); + return 1; + } + + if (!File.Exists(args[0])) + { + Console.WriteLine("{0} doesn't exist. Add a person to create the file first.", args[0]); + return 0; + } + + // Read the existing address book. + using (Stream stream = File.OpenRead(args[0])) + { + AddressBook addressBook = AddressBook.Parser.ParseFrom(stream); + Print(addressBook); + } + return 0; + } + } +} \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/AddressBook/Program.cs b/packager/third_party/protobuf/csharp/src/AddressBook/Program.cs new file mode 100644 index 0000000000..8164f44179 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/AddressBook/Program.cs @@ -0,0 +1,95 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; + +namespace Google.Protobuf.Examples.AddressBook +{ + /// + /// Entry point. Repeatedly prompts user for an action to take, delegating actual behaviour + /// to individual actions. Each action has its own Main method, so that it can be used as an + /// invidual complete program. + /// + internal class Program + { + private static int Main(string[] args) + { + if (args.Length > 1) + { + Console.Error.WriteLine("Usage: AddressBook [file]"); + Console.Error.WriteLine("If the filename isn't specified, \"addressbook.data\" is used instead."); + return 1; + } + string addressBookFile = args.Length > 0 ? args[0] : "addressbook.data"; + + bool stopping = false; + while (!stopping) + { + Console.WriteLine("Options:"); + Console.WriteLine(" L: List contents"); + Console.WriteLine(" A: Add new person"); + Console.WriteLine(" Q: Quit"); + Console.Write("Action? "); + Console.Out.Flush(); + char choice = Console.ReadKey().KeyChar; + Console.WriteLine(); + try + { + switch (choice) + { + case 'A': + case 'a': + AddPerson.Main(new string[] {addressBookFile}); + break; + case 'L': + case 'l': + ListPeople.Main(new string[] {addressBookFile}); + break; + case 'Q': + case 'q': + stopping = true; + break; + default: + Console.WriteLine("Unknown option: {0}", choice); + break; + } + } + catch (Exception e) + { + Console.WriteLine("Exception executing action: {0}", e); + } + Console.WriteLine(); + } + return 0; + } + } +} \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/AddressBook/Properties/AssemblyInfo.cs b/packager/third_party/protobuf/csharp/src/AddressBook/Properties/AssemblyInfo.cs new file mode 100644 index 0000000000..1452f7a411 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/AddressBook/Properties/AssemblyInfo.cs @@ -0,0 +1,18 @@ +using System.Reflection; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. + +[assembly: AssemblyTitle("AddressBook")] +[assembly: AssemblyDescription("")] +[assembly: AssemblyConfiguration("")] +[assembly: AssemblyCompany("")] +[assembly: AssemblyProduct("AddressBook")] +[assembly: AssemblyCopyright("Copyright © 2015")] +[assembly: AssemblyTrademark("")] +[assembly: AssemblyCulture("")] + +[assembly: AssemblyVersion("3.0.0.0")] +[assembly: AssemblyFileVersion("3.0.0.0")] diff --git a/packager/third_party/protobuf/csharp/src/AddressBook/SampleUsage.cs b/packager/third_party/protobuf/csharp/src/AddressBook/SampleUsage.cs new file mode 100644 index 0000000000..aad7d70027 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/AddressBook/SampleUsage.cs @@ -0,0 +1,73 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; +using System.IO; + +namespace Google.Protobuf.Examples.AddressBook +{ + internal class SampleUsage + { + private static void Main() + { + byte[] bytes; + // Create a new person + Person person = new Person + { + Id = 1, + Name = "Foo", + Email = "foo@bar", + Phones = { new Person.Types.PhoneNumber { Number = "555-1212" } } + }; + using (MemoryStream stream = new MemoryStream()) + { + // Save the person to a stream + person.WriteTo(stream); + bytes = stream.ToArray(); + } + Person copy = Person.Parser.ParseFrom(bytes); + + AddressBook book = new AddressBook + { + People = { copy } + }; + bytes = book.ToByteArray(); + // And read the address book back again + AddressBook restored = AddressBook.Parser.ParseFrom(bytes); + // The message performs a deep-comparison on equality: + if (restored.People.Count != 1 || !person.Equals(restored.People[0])) + { + throw new ApplicationException("There is a bad person in here!"); + } + } + } +} \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/AddressBook/app.config b/packager/third_party/protobuf/csharp/src/AddressBook/app.config new file mode 100644 index 0000000000..a80813afe3 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/AddressBook/app.config @@ -0,0 +1,3 @@ + + + diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Conformance/App.config b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Conformance/App.config new file mode 100644 index 0000000000..8e15646352 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Conformance/App.config @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Conformance/Conformance.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Conformance/Conformance.cs new file mode 100644 index 0000000000..1674a6734f --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Conformance/Conformance.cs @@ -0,0 +1,3708 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: conformance.proto +#pragma warning disable 1591, 0612, 3021 +#region Designer generated code + +using pb = global::Google.Protobuf; +using pbc = global::Google.Protobuf.Collections; +using pbr = global::Google.Protobuf.Reflection; +using scg = global::System.Collections.Generic; +namespace Conformance { + + /// Holder for reflection information generated from conformance.proto + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public static partial class ConformanceReflection { + + #region Descriptor + /// File descriptor for conformance.proto + public static pbr::FileDescriptor Descriptor { + get { return descriptor; } + } + private static pbr::FileDescriptor descriptor; + + static ConformanceReflection() { + byte[] descriptorData = global::System.Convert.FromBase64String( + string.Concat( + "ChFjb25mb3JtYW5jZS5wcm90bxILY29uZm9ybWFuY2UaGWdvb2dsZS9wcm90", + "b2J1Zi9hbnkucHJvdG8aHmdvb2dsZS9wcm90b2J1Zi9kdXJhdGlvbi5wcm90", + "bxogZ29vZ2xlL3Byb3RvYnVmL2ZpZWxkX21hc2sucHJvdG8aHGdvb2dsZS9w", + "cm90b2J1Zi9zdHJ1Y3QucHJvdG8aH2dvb2dsZS9wcm90b2J1Zi90aW1lc3Rh", + "bXAucHJvdG8aHmdvb2dsZS9wcm90b2J1Zi93cmFwcGVycy5wcm90byKNAQoS", + "Q29uZm9ybWFuY2VSZXF1ZXN0EhoKEHByb3RvYnVmX3BheWxvYWQYASABKAxI", + "ABIWCgxqc29uX3BheWxvYWQYAiABKAlIABI4ChdyZXF1ZXN0ZWRfb3V0cHV0", + "X2Zvcm1hdBgDIAEoDjIXLmNvbmZvcm1hbmNlLldpcmVGb3JtYXRCCQoHcGF5", + "bG9hZCKxAQoTQ29uZm9ybWFuY2VSZXNwb25zZRIVCgtwYXJzZV9lcnJvchgB", + "IAEoCUgAEhkKD3NlcmlhbGl6ZV9lcnJvchgGIAEoCUgAEhcKDXJ1bnRpbWVf", + "ZXJyb3IYAiABKAlIABIaChBwcm90b2J1Zl9wYXlsb2FkGAMgASgMSAASFgoM", + "anNvbl9wYXlsb2FkGAQgASgJSAASEQoHc2tpcHBlZBgFIAEoCUgAQggKBnJl", + "c3VsdCLVMgoMVGVzdEFsbFR5cGVzEhYKDm9wdGlvbmFsX2ludDMyGAEgASgF", + "EhYKDm9wdGlvbmFsX2ludDY0GAIgASgDEhcKD29wdGlvbmFsX3VpbnQzMhgD", + "IAEoDRIXCg9vcHRpb25hbF91aW50NjQYBCABKAQSFwoPb3B0aW9uYWxfc2lu", + "dDMyGAUgASgREhcKD29wdGlvbmFsX3NpbnQ2NBgGIAEoEhIYChBvcHRpb25h", + "bF9maXhlZDMyGAcgASgHEhgKEG9wdGlvbmFsX2ZpeGVkNjQYCCABKAYSGQoR", + "b3B0aW9uYWxfc2ZpeGVkMzIYCSABKA8SGQoRb3B0aW9uYWxfc2ZpeGVkNjQY", + "CiABKBASFgoOb3B0aW9uYWxfZmxvYXQYCyABKAISFwoPb3B0aW9uYWxfZG91", + "YmxlGAwgASgBEhUKDW9wdGlvbmFsX2Jvb2wYDSABKAgSFwoPb3B0aW9uYWxf", + "c3RyaW5nGA4gASgJEhYKDm9wdGlvbmFsX2J5dGVzGA8gASgMEkgKF29wdGlv", + "bmFsX25lc3RlZF9tZXNzYWdlGBIgASgLMicuY29uZm9ybWFuY2UuVGVzdEFs", + "bFR5cGVzLk5lc3RlZE1lc3NhZ2USPQoYb3B0aW9uYWxfZm9yZWlnbl9tZXNz", + "YWdlGBMgASgLMhsuY29uZm9ybWFuY2UuRm9yZWlnbk1lc3NhZ2USQgoUb3B0", + "aW9uYWxfbmVzdGVkX2VudW0YFSABKA4yJC5jb25mb3JtYW5jZS5UZXN0QWxs", + "VHlwZXMuTmVzdGVkRW51bRI3ChVvcHRpb25hbF9mb3JlaWduX2VudW0YFiAB", + "KA4yGC5jb25mb3JtYW5jZS5Gb3JlaWduRW51bRIhChVvcHRpb25hbF9zdHJp", + "bmdfcGllY2UYGCABKAlCAggCEhkKDW9wdGlvbmFsX2NvcmQYGSABKAlCAggB", + "EjQKEXJlY3Vyc2l2ZV9tZXNzYWdlGBsgASgLMhkuY29uZm9ybWFuY2UuVGVz", + "dEFsbFR5cGVzEhYKDnJlcGVhdGVkX2ludDMyGB8gAygFEhYKDnJlcGVhdGVk", + "X2ludDY0GCAgAygDEhcKD3JlcGVhdGVkX3VpbnQzMhghIAMoDRIXCg9yZXBl", + "YXRlZF91aW50NjQYIiADKAQSFwoPcmVwZWF0ZWRfc2ludDMyGCMgAygREhcK", + "D3JlcGVhdGVkX3NpbnQ2NBgkIAMoEhIYChByZXBlYXRlZF9maXhlZDMyGCUg", + "AygHEhgKEHJlcGVhdGVkX2ZpeGVkNjQYJiADKAYSGQoRcmVwZWF0ZWRfc2Zp", + "eGVkMzIYJyADKA8SGQoRcmVwZWF0ZWRfc2ZpeGVkNjQYKCADKBASFgoOcmVw", + "ZWF0ZWRfZmxvYXQYKSADKAISFwoPcmVwZWF0ZWRfZG91YmxlGCogAygBEhUK", + "DXJlcGVhdGVkX2Jvb2wYKyADKAgSFwoPcmVwZWF0ZWRfc3RyaW5nGCwgAygJ", + "EhYKDnJlcGVhdGVkX2J5dGVzGC0gAygMEkgKF3JlcGVhdGVkX25lc3RlZF9t", + "ZXNzYWdlGDAgAygLMicuY29uZm9ybWFuY2UuVGVzdEFsbFR5cGVzLk5lc3Rl", + "ZE1lc3NhZ2USPQoYcmVwZWF0ZWRfZm9yZWlnbl9tZXNzYWdlGDEgAygLMhsu", + "Y29uZm9ybWFuY2UuRm9yZWlnbk1lc3NhZ2USQgoUcmVwZWF0ZWRfbmVzdGVk", + "X2VudW0YMyADKA4yJC5jb25mb3JtYW5jZS5UZXN0QWxsVHlwZXMuTmVzdGVk", + "RW51bRI3ChVyZXBlYXRlZF9mb3JlaWduX2VudW0YNCADKA4yGC5jb25mb3Jt", + "YW5jZS5Gb3JlaWduRW51bRIhChVyZXBlYXRlZF9zdHJpbmdfcGllY2UYNiAD", + "KAlCAggCEhkKDXJlcGVhdGVkX2NvcmQYNyADKAlCAggBEkUKD21hcF9pbnQz", + "Ml9pbnQzMhg4IAMoCzIsLmNvbmZvcm1hbmNlLlRlc3RBbGxUeXBlcy5NYXBJ", + "bnQzMkludDMyRW50cnkSRQoPbWFwX2ludDY0X2ludDY0GDkgAygLMiwuY29u", + "Zm9ybWFuY2UuVGVzdEFsbFR5cGVzLk1hcEludDY0SW50NjRFbnRyeRJJChFt", + "YXBfdWludDMyX3VpbnQzMhg6IAMoCzIuLmNvbmZvcm1hbmNlLlRlc3RBbGxU", + "eXBlcy5NYXBVaW50MzJVaW50MzJFbnRyeRJJChFtYXBfdWludDY0X3VpbnQ2", + "NBg7IAMoCzIuLmNvbmZvcm1hbmNlLlRlc3RBbGxUeXBlcy5NYXBVaW50NjRV", + "aW50NjRFbnRyeRJJChFtYXBfc2ludDMyX3NpbnQzMhg8IAMoCzIuLmNvbmZv", + "cm1hbmNlLlRlc3RBbGxUeXBlcy5NYXBTaW50MzJTaW50MzJFbnRyeRJJChFt", + "YXBfc2ludDY0X3NpbnQ2NBg9IAMoCzIuLmNvbmZvcm1hbmNlLlRlc3RBbGxU", + "eXBlcy5NYXBTaW50NjRTaW50NjRFbnRyeRJNChNtYXBfZml4ZWQzMl9maXhl", + "ZDMyGD4gAygLMjAuY29uZm9ybWFuY2UuVGVzdEFsbFR5cGVzLk1hcEZpeGVk", + "MzJGaXhlZDMyRW50cnkSTQoTbWFwX2ZpeGVkNjRfZml4ZWQ2NBg/IAMoCzIw", + "LmNvbmZvcm1hbmNlLlRlc3RBbGxUeXBlcy5NYXBGaXhlZDY0Rml4ZWQ2NEVu", + "dHJ5ElEKFW1hcF9zZml4ZWQzMl9zZml4ZWQzMhhAIAMoCzIyLmNvbmZvcm1h", + "bmNlLlRlc3RBbGxUeXBlcy5NYXBTZml4ZWQzMlNmaXhlZDMyRW50cnkSUQoV", + "bWFwX3NmaXhlZDY0X3NmaXhlZDY0GEEgAygLMjIuY29uZm9ybWFuY2UuVGVz", + "dEFsbFR5cGVzLk1hcFNmaXhlZDY0U2ZpeGVkNjRFbnRyeRJFCg9tYXBfaW50", + "MzJfZmxvYXQYQiADKAsyLC5jb25mb3JtYW5jZS5UZXN0QWxsVHlwZXMuTWFw", + "SW50MzJGbG9hdEVudHJ5EkcKEG1hcF9pbnQzMl9kb3VibGUYQyADKAsyLS5j", + "b25mb3JtYW5jZS5UZXN0QWxsVHlwZXMuTWFwSW50MzJEb3VibGVFbnRyeRJB", + "Cg1tYXBfYm9vbF9ib29sGEQgAygLMiouY29uZm9ybWFuY2UuVGVzdEFsbFR5", + "cGVzLk1hcEJvb2xCb29sRW50cnkSSQoRbWFwX3N0cmluZ19zdHJpbmcYRSAD", + "KAsyLi5jb25mb3JtYW5jZS5UZXN0QWxsVHlwZXMuTWFwU3RyaW5nU3RyaW5n", + "RW50cnkSRwoQbWFwX3N0cmluZ19ieXRlcxhGIAMoCzItLmNvbmZvcm1hbmNl", + "LlRlc3RBbGxUeXBlcy5NYXBTdHJpbmdCeXRlc0VudHJ5ElgKGW1hcF9zdHJp", + "bmdfbmVzdGVkX21lc3NhZ2UYRyADKAsyNS5jb25mb3JtYW5jZS5UZXN0QWxs", + "VHlwZXMuTWFwU3RyaW5nTmVzdGVkTWVzc2FnZUVudHJ5EloKGm1hcF9zdHJp", + "bmdfZm9yZWlnbl9tZXNzYWdlGEggAygLMjYuY29uZm9ybWFuY2UuVGVzdEFs", + "bFR5cGVzLk1hcFN0cmluZ0ZvcmVpZ25NZXNzYWdlRW50cnkSUgoWbWFwX3N0", + "cmluZ19uZXN0ZWRfZW51bRhJIAMoCzIyLmNvbmZvcm1hbmNlLlRlc3RBbGxU", + "eXBlcy5NYXBTdHJpbmdOZXN0ZWRFbnVtRW50cnkSVAoXbWFwX3N0cmluZ19m", + "b3JlaWduX2VudW0YSiADKAsyMy5jb25mb3JtYW5jZS5UZXN0QWxsVHlwZXMu", + "TWFwU3RyaW5nRm9yZWlnbkVudW1FbnRyeRIWCgxvbmVvZl91aW50MzIYbyAB", + "KA1IABJHChRvbmVvZl9uZXN0ZWRfbWVzc2FnZRhwIAEoCzInLmNvbmZvcm1h", + "bmNlLlRlc3RBbGxUeXBlcy5OZXN0ZWRNZXNzYWdlSAASFgoMb25lb2Zfc3Ry", + "aW5nGHEgASgJSAASFQoLb25lb2ZfYnl0ZXMYciABKAxIABI6ChVvcHRpb25h", + "bF9ib29sX3dyYXBwZXIYyQEgASgLMhouZ29vZ2xlLnByb3RvYnVmLkJvb2xW", + "YWx1ZRI8ChZvcHRpb25hbF9pbnQzMl93cmFwcGVyGMoBIAEoCzIbLmdvb2ds", + "ZS5wcm90b2J1Zi5JbnQzMlZhbHVlEjwKFm9wdGlvbmFsX2ludDY0X3dyYXBw", + "ZXIYywEgASgLMhsuZ29vZ2xlLnByb3RvYnVmLkludDY0VmFsdWUSPgoXb3B0", + "aW9uYWxfdWludDMyX3dyYXBwZXIYzAEgASgLMhwuZ29vZ2xlLnByb3RvYnVm", + "LlVJbnQzMlZhbHVlEj4KF29wdGlvbmFsX3VpbnQ2NF93cmFwcGVyGM0BIAEo", + "CzIcLmdvb2dsZS5wcm90b2J1Zi5VSW50NjRWYWx1ZRI8ChZvcHRpb25hbF9m", + "bG9hdF93cmFwcGVyGM4BIAEoCzIbLmdvb2dsZS5wcm90b2J1Zi5GbG9hdFZh", + "bHVlEj4KF29wdGlvbmFsX2RvdWJsZV93cmFwcGVyGM8BIAEoCzIcLmdvb2ds", + "ZS5wcm90b2J1Zi5Eb3VibGVWYWx1ZRI+ChdvcHRpb25hbF9zdHJpbmdfd3Jh", + "cHBlchjQASABKAsyHC5nb29nbGUucHJvdG9idWYuU3RyaW5nVmFsdWUSPAoW", + "b3B0aW9uYWxfYnl0ZXNfd3JhcHBlchjRASABKAsyGy5nb29nbGUucHJvdG9i", + "dWYuQnl0ZXNWYWx1ZRI6ChVyZXBlYXRlZF9ib29sX3dyYXBwZXIY0wEgAygL", + "MhouZ29vZ2xlLnByb3RvYnVmLkJvb2xWYWx1ZRI8ChZyZXBlYXRlZF9pbnQz", + "Ml93cmFwcGVyGNQBIAMoCzIbLmdvb2dsZS5wcm90b2J1Zi5JbnQzMlZhbHVl", + "EjwKFnJlcGVhdGVkX2ludDY0X3dyYXBwZXIY1QEgAygLMhsuZ29vZ2xlLnBy", + "b3RvYnVmLkludDY0VmFsdWUSPgoXcmVwZWF0ZWRfdWludDMyX3dyYXBwZXIY", + "1gEgAygLMhwuZ29vZ2xlLnByb3RvYnVmLlVJbnQzMlZhbHVlEj4KF3JlcGVh", + "dGVkX3VpbnQ2NF93cmFwcGVyGNcBIAMoCzIcLmdvb2dsZS5wcm90b2J1Zi5V", + "SW50NjRWYWx1ZRI8ChZyZXBlYXRlZF9mbG9hdF93cmFwcGVyGNgBIAMoCzIb", + "Lmdvb2dsZS5wcm90b2J1Zi5GbG9hdFZhbHVlEj4KF3JlcGVhdGVkX2RvdWJs", + "ZV93cmFwcGVyGNkBIAMoCzIcLmdvb2dsZS5wcm90b2J1Zi5Eb3VibGVWYWx1", + "ZRI+ChdyZXBlYXRlZF9zdHJpbmdfd3JhcHBlchjaASADKAsyHC5nb29nbGUu", + "cHJvdG9idWYuU3RyaW5nVmFsdWUSPAoWcmVwZWF0ZWRfYnl0ZXNfd3JhcHBl", + "chjbASADKAsyGy5nb29nbGUucHJvdG9idWYuQnl0ZXNWYWx1ZRI1ChFvcHRp", + "b25hbF9kdXJhdGlvbhitAiABKAsyGS5nb29nbGUucHJvdG9idWYuRHVyYXRp", + "b24SNwoSb3B0aW9uYWxfdGltZXN0YW1wGK4CIAEoCzIaLmdvb2dsZS5wcm90", + "b2J1Zi5UaW1lc3RhbXASOAoTb3B0aW9uYWxfZmllbGRfbWFzaxivAiABKAsy", + "Gi5nb29nbGUucHJvdG9idWYuRmllbGRNYXNrEjEKD29wdGlvbmFsX3N0cnVj", + "dBiwAiABKAsyFy5nb29nbGUucHJvdG9idWYuU3RydWN0EisKDG9wdGlvbmFs", + "X2FueRixAiABKAsyFC5nb29nbGUucHJvdG9idWYuQW55Ei8KDm9wdGlvbmFs", + "X3ZhbHVlGLICIAEoCzIWLmdvb2dsZS5wcm90b2J1Zi5WYWx1ZRI1ChFyZXBl", + "YXRlZF9kdXJhdGlvbhi3AiADKAsyGS5nb29nbGUucHJvdG9idWYuRHVyYXRp", + "b24SNwoScmVwZWF0ZWRfdGltZXN0YW1wGLgCIAMoCzIaLmdvb2dsZS5wcm90", + "b2J1Zi5UaW1lc3RhbXASNwoScmVwZWF0ZWRfZmllbGRtYXNrGLkCIAMoCzIa", + "Lmdvb2dsZS5wcm90b2J1Zi5GaWVsZE1hc2sSMQoPcmVwZWF0ZWRfc3RydWN0", + "GMQCIAMoCzIXLmdvb2dsZS5wcm90b2J1Zi5TdHJ1Y3QSKwoMcmVwZWF0ZWRf", + "YW55GLsCIAMoCzIULmdvb2dsZS5wcm90b2J1Zi5BbnkSLwoOcmVwZWF0ZWRf", + "dmFsdWUYvAIgAygLMhYuZ29vZ2xlLnByb3RvYnVmLlZhbHVlEhMKCmZpZWxk", + "bmFtZTEYkQMgASgFEhQKC2ZpZWxkX25hbWUyGJIDIAEoBRIVCgxfZmllbGRf", + "bmFtZTMYkwMgASgFEhYKDWZpZWxkX19uYW1lNF8YlAMgASgFEhQKC2ZpZWxk", + "MG5hbWU1GJUDIAEoBRIWCg1maWVsZF8wX25hbWU2GJYDIAEoBRITCgpmaWVs", + "ZE5hbWU3GJcDIAEoBRITCgpGaWVsZE5hbWU4GJgDIAEoBRIUCgtmaWVsZF9O", + "YW1lORiZAyABKAUSFQoMRmllbGRfTmFtZTEwGJoDIAEoBRIVCgxGSUVMRF9O", + "QU1FMTEYmwMgASgFEhUKDEZJRUxEX25hbWUxMhicAyABKAUaSgoNTmVzdGVk", + "TWVzc2FnZRIJCgFhGAEgASgFEi4KC2NvcmVjdXJzaXZlGAIgASgLMhkuY29u", + "Zm9ybWFuY2UuVGVzdEFsbFR5cGVzGjQKEk1hcEludDMySW50MzJFbnRyeRIL", + "CgNrZXkYASABKAUSDQoFdmFsdWUYAiABKAU6AjgBGjQKEk1hcEludDY0SW50", + "NjRFbnRyeRILCgNrZXkYASABKAMSDQoFdmFsdWUYAiABKAM6AjgBGjYKFE1h", + "cFVpbnQzMlVpbnQzMkVudHJ5EgsKA2tleRgBIAEoDRINCgV2YWx1ZRgCIAEo", + "DToCOAEaNgoUTWFwVWludDY0VWludDY0RW50cnkSCwoDa2V5GAEgASgEEg0K", + "BXZhbHVlGAIgASgEOgI4ARo2ChRNYXBTaW50MzJTaW50MzJFbnRyeRILCgNr", + "ZXkYASABKBESDQoFdmFsdWUYAiABKBE6AjgBGjYKFE1hcFNpbnQ2NFNpbnQ2", + "NEVudHJ5EgsKA2tleRgBIAEoEhINCgV2YWx1ZRgCIAEoEjoCOAEaOAoWTWFw", + "Rml4ZWQzMkZpeGVkMzJFbnRyeRILCgNrZXkYASABKAcSDQoFdmFsdWUYAiAB", + "KAc6AjgBGjgKFk1hcEZpeGVkNjRGaXhlZDY0RW50cnkSCwoDa2V5GAEgASgG", + "Eg0KBXZhbHVlGAIgASgGOgI4ARo6ChhNYXBTZml4ZWQzMlNmaXhlZDMyRW50", + "cnkSCwoDa2V5GAEgASgPEg0KBXZhbHVlGAIgASgPOgI4ARo6ChhNYXBTZml4", + "ZWQ2NFNmaXhlZDY0RW50cnkSCwoDa2V5GAEgASgQEg0KBXZhbHVlGAIgASgQ", + "OgI4ARo0ChJNYXBJbnQzMkZsb2F0RW50cnkSCwoDa2V5GAEgASgFEg0KBXZh", + "bHVlGAIgASgCOgI4ARo1ChNNYXBJbnQzMkRvdWJsZUVudHJ5EgsKA2tleRgB", + "IAEoBRINCgV2YWx1ZRgCIAEoAToCOAEaMgoQTWFwQm9vbEJvb2xFbnRyeRIL", + "CgNrZXkYASABKAgSDQoFdmFsdWUYAiABKAg6AjgBGjYKFE1hcFN0cmluZ1N0", + "cmluZ0VudHJ5EgsKA2tleRgBIAEoCRINCgV2YWx1ZRgCIAEoCToCOAEaNQoT", + "TWFwU3RyaW5nQnl0ZXNFbnRyeRILCgNrZXkYASABKAkSDQoFdmFsdWUYAiAB", + "KAw6AjgBGmYKG01hcFN0cmluZ05lc3RlZE1lc3NhZ2VFbnRyeRILCgNrZXkY", + "ASABKAkSNgoFdmFsdWUYAiABKAsyJy5jb25mb3JtYW5jZS5UZXN0QWxsVHlw", + "ZXMuTmVzdGVkTWVzc2FnZToCOAEaWwocTWFwU3RyaW5nRm9yZWlnbk1lc3Nh", + "Z2VFbnRyeRILCgNrZXkYASABKAkSKgoFdmFsdWUYAiABKAsyGy5jb25mb3Jt", + "YW5jZS5Gb3JlaWduTWVzc2FnZToCOAEaYAoYTWFwU3RyaW5nTmVzdGVkRW51", + "bUVudHJ5EgsKA2tleRgBIAEoCRIzCgV2YWx1ZRgCIAEoDjIkLmNvbmZvcm1h", + "bmNlLlRlc3RBbGxUeXBlcy5OZXN0ZWRFbnVtOgI4ARpVChlNYXBTdHJpbmdG", + "b3JlaWduRW51bUVudHJ5EgsKA2tleRgBIAEoCRInCgV2YWx1ZRgCIAEoDjIY", + "LmNvbmZvcm1hbmNlLkZvcmVpZ25FbnVtOgI4ASI5CgpOZXN0ZWRFbnVtEgcK", + "A0ZPTxAAEgcKA0JBUhABEgcKA0JBWhACEhAKA05FRxD///////////8BQg0K", + "C29uZW9mX2ZpZWxkIhsKDkZvcmVpZ25NZXNzYWdlEgkKAWMYASABKAUqNQoK", + "V2lyZUZvcm1hdBIPCgtVTlNQRUNJRklFRBAAEgwKCFBST1RPQlVGEAESCAoE", + "SlNPThACKkAKC0ZvcmVpZ25FbnVtEg8KC0ZPUkVJR05fRk9PEAASDwoLRk9S", + "RUlHTl9CQVIQARIPCgtGT1JFSUdOX0JBWhACQiEKH2NvbS5nb29nbGUucHJv", + "dG9idWYuY29uZm9ybWFuY2ViBnByb3RvMw==")); + descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, + new pbr::FileDescriptor[] { global::Google.Protobuf.WellKnownTypes.AnyReflection.Descriptor, global::Google.Protobuf.WellKnownTypes.DurationReflection.Descriptor, global::Google.Protobuf.WellKnownTypes.FieldMaskReflection.Descriptor, global::Google.Protobuf.WellKnownTypes.StructReflection.Descriptor, global::Google.Protobuf.WellKnownTypes.TimestampReflection.Descriptor, global::Google.Protobuf.WellKnownTypes.WrappersReflection.Descriptor, }, + new pbr::GeneratedClrTypeInfo(new[] {typeof(global::Conformance.WireFormat), typeof(global::Conformance.ForeignEnum), }, new pbr::GeneratedClrTypeInfo[] { + new pbr::GeneratedClrTypeInfo(typeof(global::Conformance.ConformanceRequest), global::Conformance.ConformanceRequest.Parser, new[]{ "ProtobufPayload", "JsonPayload", "RequestedOutputFormat" }, new[]{ "Payload" }, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Conformance.ConformanceResponse), global::Conformance.ConformanceResponse.Parser, new[]{ "ParseError", "SerializeError", "RuntimeError", "ProtobufPayload", "JsonPayload", "Skipped" }, new[]{ "Result" }, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Conformance.TestAllTypes), global::Conformance.TestAllTypes.Parser, new[]{ "OptionalInt32", "OptionalInt64", "OptionalUint32", "OptionalUint64", "OptionalSint32", "OptionalSint64", "OptionalFixed32", "OptionalFixed64", "OptionalSfixed32", "OptionalSfixed64", "OptionalFloat", "OptionalDouble", "OptionalBool", "OptionalString", "OptionalBytes", "OptionalNestedMessage", "OptionalForeignMessage", "OptionalNestedEnum", "OptionalForeignEnum", "OptionalStringPiece", "OptionalCord", "RecursiveMessage", "RepeatedInt32", "RepeatedInt64", "RepeatedUint32", "RepeatedUint64", "RepeatedSint32", "RepeatedSint64", "RepeatedFixed32", "RepeatedFixed64", "RepeatedSfixed32", "RepeatedSfixed64", "RepeatedFloat", "RepeatedDouble", "RepeatedBool", "RepeatedString", "RepeatedBytes", "RepeatedNestedMessage", "RepeatedForeignMessage", "RepeatedNestedEnum", "RepeatedForeignEnum", "RepeatedStringPiece", "RepeatedCord", "MapInt32Int32", "MapInt64Int64", "MapUint32Uint32", "MapUint64Uint64", "MapSint32Sint32", "MapSint64Sint64", "MapFixed32Fixed32", "MapFixed64Fixed64", "MapSfixed32Sfixed32", "MapSfixed64Sfixed64", "MapInt32Float", "MapInt32Double", "MapBoolBool", "MapStringString", "MapStringBytes", "MapStringNestedMessage", "MapStringForeignMessage", "MapStringNestedEnum", "MapStringForeignEnum", "OneofUint32", "OneofNestedMessage", "OneofString", "OneofBytes", "OptionalBoolWrapper", "OptionalInt32Wrapper", "OptionalInt64Wrapper", "OptionalUint32Wrapper", "OptionalUint64Wrapper", "OptionalFloatWrapper", "OptionalDoubleWrapper", "OptionalStringWrapper", "OptionalBytesWrapper", "RepeatedBoolWrapper", "RepeatedInt32Wrapper", "RepeatedInt64Wrapper", "RepeatedUint32Wrapper", "RepeatedUint64Wrapper", "RepeatedFloatWrapper", "RepeatedDoubleWrapper", "RepeatedStringWrapper", "RepeatedBytesWrapper", "OptionalDuration", "OptionalTimestamp", "OptionalFieldMask", "OptionalStruct", "OptionalAny", "OptionalValue", "RepeatedDuration", "RepeatedTimestamp", "RepeatedFieldmask", "RepeatedStruct", "RepeatedAny", "RepeatedValue", "Fieldname1", "FieldName2", "FieldName3", "FieldName4", "Field0Name5", "Field0Name6", "FieldName7", "FieldName8", "FieldName9", "FieldName10", "FIELDNAME11", "FIELDName12" }, new[]{ "OneofField" }, new[]{ typeof(global::Conformance.TestAllTypes.Types.NestedEnum) }, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::Conformance.TestAllTypes.Types.NestedMessage), global::Conformance.TestAllTypes.Types.NestedMessage.Parser, new[]{ "A", "Corecursive" }, null, null, null), + null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, }), + new pbr::GeneratedClrTypeInfo(typeof(global::Conformance.ForeignMessage), global::Conformance.ForeignMessage.Parser, new[]{ "C" }, null, null, null) + })); + } + #endregion + + } + #region Enums + public enum WireFormat { + [pbr::OriginalName("UNSPECIFIED")] Unspecified = 0, + [pbr::OriginalName("PROTOBUF")] Protobuf = 1, + [pbr::OriginalName("JSON")] Json = 2, + } + + public enum ForeignEnum { + [pbr::OriginalName("FOREIGN_FOO")] ForeignFoo = 0, + [pbr::OriginalName("FOREIGN_BAR")] ForeignBar = 1, + [pbr::OriginalName("FOREIGN_BAZ")] ForeignBaz = 2, + } + + #endregion + + #region Messages + /// + /// Represents a single test case's input. The testee should: + /// + /// 1. parse this proto (which should always succeed) + /// 2. parse the protobuf or JSON payload in "payload" (which may fail) + /// 3. if the parse succeeded, serialize the message in the requested format. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class ConformanceRequest : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new ConformanceRequest()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Conformance.ConformanceReflection.Descriptor.MessageTypes[0]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public ConformanceRequest() { + OnConstruction(); + } + + partial void OnConstruction(); + + public ConformanceRequest(ConformanceRequest other) : this() { + requestedOutputFormat_ = other.requestedOutputFormat_; + switch (other.PayloadCase) { + case PayloadOneofCase.ProtobufPayload: + ProtobufPayload = other.ProtobufPayload; + break; + case PayloadOneofCase.JsonPayload: + JsonPayload = other.JsonPayload; + break; + } + + } + + public ConformanceRequest Clone() { + return new ConformanceRequest(this); + } + + /// Field number for the "protobuf_payload" field. + public const int ProtobufPayloadFieldNumber = 1; + public pb::ByteString ProtobufPayload { + get { return payloadCase_ == PayloadOneofCase.ProtobufPayload ? (pb::ByteString) payload_ : pb::ByteString.Empty; } + set { + payload_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + payloadCase_ = PayloadOneofCase.ProtobufPayload; + } + } + + /// Field number for the "json_payload" field. + public const int JsonPayloadFieldNumber = 2; + public string JsonPayload { + get { return payloadCase_ == PayloadOneofCase.JsonPayload ? (string) payload_ : ""; } + set { + payload_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + payloadCase_ = PayloadOneofCase.JsonPayload; + } + } + + /// Field number for the "requested_output_format" field. + public const int RequestedOutputFormatFieldNumber = 3; + private global::Conformance.WireFormat requestedOutputFormat_ = 0; + /// + /// Which format should the testee serialize its message to? + /// + public global::Conformance.WireFormat RequestedOutputFormat { + get { return requestedOutputFormat_; } + set { + requestedOutputFormat_ = value; + } + } + + private object payload_; + /// Enum of possible cases for the "payload" oneof. + public enum PayloadOneofCase { + None = 0, + ProtobufPayload = 1, + JsonPayload = 2, + } + private PayloadOneofCase payloadCase_ = PayloadOneofCase.None; + public PayloadOneofCase PayloadCase { + get { return payloadCase_; } + } + + public void ClearPayload() { + payloadCase_ = PayloadOneofCase.None; + payload_ = null; + } + + public override bool Equals(object other) { + return Equals(other as ConformanceRequest); + } + + public bool Equals(ConformanceRequest other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (ProtobufPayload != other.ProtobufPayload) return false; + if (JsonPayload != other.JsonPayload) return false; + if (RequestedOutputFormat != other.RequestedOutputFormat) return false; + if (PayloadCase != other.PayloadCase) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (payloadCase_ == PayloadOneofCase.ProtobufPayload) hash ^= ProtobufPayload.GetHashCode(); + if (payloadCase_ == PayloadOneofCase.JsonPayload) hash ^= JsonPayload.GetHashCode(); + if (RequestedOutputFormat != 0) hash ^= RequestedOutputFormat.GetHashCode(); + hash ^= (int) payloadCase_; + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (payloadCase_ == PayloadOneofCase.ProtobufPayload) { + output.WriteRawTag(10); + output.WriteBytes(ProtobufPayload); + } + if (payloadCase_ == PayloadOneofCase.JsonPayload) { + output.WriteRawTag(18); + output.WriteString(JsonPayload); + } + if (RequestedOutputFormat != 0) { + output.WriteRawTag(24); + output.WriteEnum((int) RequestedOutputFormat); + } + } + + public int CalculateSize() { + int size = 0; + if (payloadCase_ == PayloadOneofCase.ProtobufPayload) { + size += 1 + pb::CodedOutputStream.ComputeBytesSize(ProtobufPayload); + } + if (payloadCase_ == PayloadOneofCase.JsonPayload) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(JsonPayload); + } + if (RequestedOutputFormat != 0) { + size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) RequestedOutputFormat); + } + return size; + } + + public void MergeFrom(ConformanceRequest other) { + if (other == null) { + return; + } + if (other.RequestedOutputFormat != 0) { + RequestedOutputFormat = other.RequestedOutputFormat; + } + switch (other.PayloadCase) { + case PayloadOneofCase.ProtobufPayload: + ProtobufPayload = other.ProtobufPayload; + break; + case PayloadOneofCase.JsonPayload: + JsonPayload = other.JsonPayload; + break; + } + + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + ProtobufPayload = input.ReadBytes(); + break; + } + case 18: { + JsonPayload = input.ReadString(); + break; + } + case 24: { + requestedOutputFormat_ = (global::Conformance.WireFormat) input.ReadEnum(); + break; + } + } + } + } + + } + + /// + /// Represents a single test case's output. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class ConformanceResponse : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new ConformanceResponse()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Conformance.ConformanceReflection.Descriptor.MessageTypes[1]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public ConformanceResponse() { + OnConstruction(); + } + + partial void OnConstruction(); + + public ConformanceResponse(ConformanceResponse other) : this() { + switch (other.ResultCase) { + case ResultOneofCase.ParseError: + ParseError = other.ParseError; + break; + case ResultOneofCase.SerializeError: + SerializeError = other.SerializeError; + break; + case ResultOneofCase.RuntimeError: + RuntimeError = other.RuntimeError; + break; + case ResultOneofCase.ProtobufPayload: + ProtobufPayload = other.ProtobufPayload; + break; + case ResultOneofCase.JsonPayload: + JsonPayload = other.JsonPayload; + break; + case ResultOneofCase.Skipped: + Skipped = other.Skipped; + break; + } + + } + + public ConformanceResponse Clone() { + return new ConformanceResponse(this); + } + + /// Field number for the "parse_error" field. + public const int ParseErrorFieldNumber = 1; + /// + /// This string should be set to indicate parsing failed. The string can + /// provide more information about the parse error if it is available. + /// + /// Setting this string does not necessarily mean the testee failed the + /// test. Some of the test cases are intentionally invalid input. + /// + public string ParseError { + get { return resultCase_ == ResultOneofCase.ParseError ? (string) result_ : ""; } + set { + result_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + resultCase_ = ResultOneofCase.ParseError; + } + } + + /// Field number for the "serialize_error" field. + public const int SerializeErrorFieldNumber = 6; + /// + /// If the input was successfully parsed but errors occurred when + /// serializing it to the requested output format, set the error message in + /// this field. + /// + public string SerializeError { + get { return resultCase_ == ResultOneofCase.SerializeError ? (string) result_ : ""; } + set { + result_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + resultCase_ = ResultOneofCase.SerializeError; + } + } + + /// Field number for the "runtime_error" field. + public const int RuntimeErrorFieldNumber = 2; + /// + /// This should be set if some other error occurred. This will always + /// indicate that the test failed. The string can provide more information + /// about the failure. + /// + public string RuntimeError { + get { return resultCase_ == ResultOneofCase.RuntimeError ? (string) result_ : ""; } + set { + result_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + resultCase_ = ResultOneofCase.RuntimeError; + } + } + + /// Field number for the "protobuf_payload" field. + public const int ProtobufPayloadFieldNumber = 3; + /// + /// If the input was successfully parsed and the requested output was + /// protobuf, serialize it to protobuf and set it in this field. + /// + public pb::ByteString ProtobufPayload { + get { return resultCase_ == ResultOneofCase.ProtobufPayload ? (pb::ByteString) result_ : pb::ByteString.Empty; } + set { + result_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + resultCase_ = ResultOneofCase.ProtobufPayload; + } + } + + /// Field number for the "json_payload" field. + public const int JsonPayloadFieldNumber = 4; + /// + /// If the input was successfully parsed and the requested output was JSON, + /// serialize to JSON and set it in this field. + /// + public string JsonPayload { + get { return resultCase_ == ResultOneofCase.JsonPayload ? (string) result_ : ""; } + set { + result_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + resultCase_ = ResultOneofCase.JsonPayload; + } + } + + /// Field number for the "skipped" field. + public const int SkippedFieldNumber = 5; + /// + /// For when the testee skipped the test, likely because a certain feature + /// wasn't supported, like JSON input/output. + /// + public string Skipped { + get { return resultCase_ == ResultOneofCase.Skipped ? (string) result_ : ""; } + set { + result_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + resultCase_ = ResultOneofCase.Skipped; + } + } + + private object result_; + /// Enum of possible cases for the "result" oneof. + public enum ResultOneofCase { + None = 0, + ParseError = 1, + SerializeError = 6, + RuntimeError = 2, + ProtobufPayload = 3, + JsonPayload = 4, + Skipped = 5, + } + private ResultOneofCase resultCase_ = ResultOneofCase.None; + public ResultOneofCase ResultCase { + get { return resultCase_; } + } + + public void ClearResult() { + resultCase_ = ResultOneofCase.None; + result_ = null; + } + + public override bool Equals(object other) { + return Equals(other as ConformanceResponse); + } + + public bool Equals(ConformanceResponse other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (ParseError != other.ParseError) return false; + if (SerializeError != other.SerializeError) return false; + if (RuntimeError != other.RuntimeError) return false; + if (ProtobufPayload != other.ProtobufPayload) return false; + if (JsonPayload != other.JsonPayload) return false; + if (Skipped != other.Skipped) return false; + if (ResultCase != other.ResultCase) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (resultCase_ == ResultOneofCase.ParseError) hash ^= ParseError.GetHashCode(); + if (resultCase_ == ResultOneofCase.SerializeError) hash ^= SerializeError.GetHashCode(); + if (resultCase_ == ResultOneofCase.RuntimeError) hash ^= RuntimeError.GetHashCode(); + if (resultCase_ == ResultOneofCase.ProtobufPayload) hash ^= ProtobufPayload.GetHashCode(); + if (resultCase_ == ResultOneofCase.JsonPayload) hash ^= JsonPayload.GetHashCode(); + if (resultCase_ == ResultOneofCase.Skipped) hash ^= Skipped.GetHashCode(); + hash ^= (int) resultCase_; + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (resultCase_ == ResultOneofCase.ParseError) { + output.WriteRawTag(10); + output.WriteString(ParseError); + } + if (resultCase_ == ResultOneofCase.RuntimeError) { + output.WriteRawTag(18); + output.WriteString(RuntimeError); + } + if (resultCase_ == ResultOneofCase.ProtobufPayload) { + output.WriteRawTag(26); + output.WriteBytes(ProtobufPayload); + } + if (resultCase_ == ResultOneofCase.JsonPayload) { + output.WriteRawTag(34); + output.WriteString(JsonPayload); + } + if (resultCase_ == ResultOneofCase.Skipped) { + output.WriteRawTag(42); + output.WriteString(Skipped); + } + if (resultCase_ == ResultOneofCase.SerializeError) { + output.WriteRawTag(50); + output.WriteString(SerializeError); + } + } + + public int CalculateSize() { + int size = 0; + if (resultCase_ == ResultOneofCase.ParseError) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(ParseError); + } + if (resultCase_ == ResultOneofCase.SerializeError) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(SerializeError); + } + if (resultCase_ == ResultOneofCase.RuntimeError) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(RuntimeError); + } + if (resultCase_ == ResultOneofCase.ProtobufPayload) { + size += 1 + pb::CodedOutputStream.ComputeBytesSize(ProtobufPayload); + } + if (resultCase_ == ResultOneofCase.JsonPayload) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(JsonPayload); + } + if (resultCase_ == ResultOneofCase.Skipped) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(Skipped); + } + return size; + } + + public void MergeFrom(ConformanceResponse other) { + if (other == null) { + return; + } + switch (other.ResultCase) { + case ResultOneofCase.ParseError: + ParseError = other.ParseError; + break; + case ResultOneofCase.SerializeError: + SerializeError = other.SerializeError; + break; + case ResultOneofCase.RuntimeError: + RuntimeError = other.RuntimeError; + break; + case ResultOneofCase.ProtobufPayload: + ProtobufPayload = other.ProtobufPayload; + break; + case ResultOneofCase.JsonPayload: + JsonPayload = other.JsonPayload; + break; + case ResultOneofCase.Skipped: + Skipped = other.Skipped; + break; + } + + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + ParseError = input.ReadString(); + break; + } + case 18: { + RuntimeError = input.ReadString(); + break; + } + case 26: { + ProtobufPayload = input.ReadBytes(); + break; + } + case 34: { + JsonPayload = input.ReadString(); + break; + } + case 42: { + Skipped = input.ReadString(); + break; + } + case 50: { + SerializeError = input.ReadString(); + break; + } + } + } + } + + } + + /// + /// This proto includes every type of field in both singular and repeated + /// forms. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class TestAllTypes : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new TestAllTypes()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Conformance.ConformanceReflection.Descriptor.MessageTypes[2]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public TestAllTypes() { + OnConstruction(); + } + + partial void OnConstruction(); + + public TestAllTypes(TestAllTypes other) : this() { + optionalInt32_ = other.optionalInt32_; + optionalInt64_ = other.optionalInt64_; + optionalUint32_ = other.optionalUint32_; + optionalUint64_ = other.optionalUint64_; + optionalSint32_ = other.optionalSint32_; + optionalSint64_ = other.optionalSint64_; + optionalFixed32_ = other.optionalFixed32_; + optionalFixed64_ = other.optionalFixed64_; + optionalSfixed32_ = other.optionalSfixed32_; + optionalSfixed64_ = other.optionalSfixed64_; + optionalFloat_ = other.optionalFloat_; + optionalDouble_ = other.optionalDouble_; + optionalBool_ = other.optionalBool_; + optionalString_ = other.optionalString_; + optionalBytes_ = other.optionalBytes_; + OptionalNestedMessage = other.optionalNestedMessage_ != null ? other.OptionalNestedMessage.Clone() : null; + OptionalForeignMessage = other.optionalForeignMessage_ != null ? other.OptionalForeignMessage.Clone() : null; + optionalNestedEnum_ = other.optionalNestedEnum_; + optionalForeignEnum_ = other.optionalForeignEnum_; + optionalStringPiece_ = other.optionalStringPiece_; + optionalCord_ = other.optionalCord_; + RecursiveMessage = other.recursiveMessage_ != null ? other.RecursiveMessage.Clone() : null; + repeatedInt32_ = other.repeatedInt32_.Clone(); + repeatedInt64_ = other.repeatedInt64_.Clone(); + repeatedUint32_ = other.repeatedUint32_.Clone(); + repeatedUint64_ = other.repeatedUint64_.Clone(); + repeatedSint32_ = other.repeatedSint32_.Clone(); + repeatedSint64_ = other.repeatedSint64_.Clone(); + repeatedFixed32_ = other.repeatedFixed32_.Clone(); + repeatedFixed64_ = other.repeatedFixed64_.Clone(); + repeatedSfixed32_ = other.repeatedSfixed32_.Clone(); + repeatedSfixed64_ = other.repeatedSfixed64_.Clone(); + repeatedFloat_ = other.repeatedFloat_.Clone(); + repeatedDouble_ = other.repeatedDouble_.Clone(); + repeatedBool_ = other.repeatedBool_.Clone(); + repeatedString_ = other.repeatedString_.Clone(); + repeatedBytes_ = other.repeatedBytes_.Clone(); + repeatedNestedMessage_ = other.repeatedNestedMessage_.Clone(); + repeatedForeignMessage_ = other.repeatedForeignMessage_.Clone(); + repeatedNestedEnum_ = other.repeatedNestedEnum_.Clone(); + repeatedForeignEnum_ = other.repeatedForeignEnum_.Clone(); + repeatedStringPiece_ = other.repeatedStringPiece_.Clone(); + repeatedCord_ = other.repeatedCord_.Clone(); + mapInt32Int32_ = other.mapInt32Int32_.Clone(); + mapInt64Int64_ = other.mapInt64Int64_.Clone(); + mapUint32Uint32_ = other.mapUint32Uint32_.Clone(); + mapUint64Uint64_ = other.mapUint64Uint64_.Clone(); + mapSint32Sint32_ = other.mapSint32Sint32_.Clone(); + mapSint64Sint64_ = other.mapSint64Sint64_.Clone(); + mapFixed32Fixed32_ = other.mapFixed32Fixed32_.Clone(); + mapFixed64Fixed64_ = other.mapFixed64Fixed64_.Clone(); + mapSfixed32Sfixed32_ = other.mapSfixed32Sfixed32_.Clone(); + mapSfixed64Sfixed64_ = other.mapSfixed64Sfixed64_.Clone(); + mapInt32Float_ = other.mapInt32Float_.Clone(); + mapInt32Double_ = other.mapInt32Double_.Clone(); + mapBoolBool_ = other.mapBoolBool_.Clone(); + mapStringString_ = other.mapStringString_.Clone(); + mapStringBytes_ = other.mapStringBytes_.Clone(); + mapStringNestedMessage_ = other.mapStringNestedMessage_.Clone(); + mapStringForeignMessage_ = other.mapStringForeignMessage_.Clone(); + mapStringNestedEnum_ = other.mapStringNestedEnum_.Clone(); + mapStringForeignEnum_ = other.mapStringForeignEnum_.Clone(); + OptionalBoolWrapper = other.OptionalBoolWrapper; + OptionalInt32Wrapper = other.OptionalInt32Wrapper; + OptionalInt64Wrapper = other.OptionalInt64Wrapper; + OptionalUint32Wrapper = other.OptionalUint32Wrapper; + OptionalUint64Wrapper = other.OptionalUint64Wrapper; + OptionalFloatWrapper = other.OptionalFloatWrapper; + OptionalDoubleWrapper = other.OptionalDoubleWrapper; + OptionalStringWrapper = other.OptionalStringWrapper; + OptionalBytesWrapper = other.OptionalBytesWrapper; + repeatedBoolWrapper_ = other.repeatedBoolWrapper_.Clone(); + repeatedInt32Wrapper_ = other.repeatedInt32Wrapper_.Clone(); + repeatedInt64Wrapper_ = other.repeatedInt64Wrapper_.Clone(); + repeatedUint32Wrapper_ = other.repeatedUint32Wrapper_.Clone(); + repeatedUint64Wrapper_ = other.repeatedUint64Wrapper_.Clone(); + repeatedFloatWrapper_ = other.repeatedFloatWrapper_.Clone(); + repeatedDoubleWrapper_ = other.repeatedDoubleWrapper_.Clone(); + repeatedStringWrapper_ = other.repeatedStringWrapper_.Clone(); + repeatedBytesWrapper_ = other.repeatedBytesWrapper_.Clone(); + OptionalDuration = other.optionalDuration_ != null ? other.OptionalDuration.Clone() : null; + OptionalTimestamp = other.optionalTimestamp_ != null ? other.OptionalTimestamp.Clone() : null; + OptionalFieldMask = other.optionalFieldMask_ != null ? other.OptionalFieldMask.Clone() : null; + OptionalStruct = other.optionalStruct_ != null ? other.OptionalStruct.Clone() : null; + OptionalAny = other.optionalAny_ != null ? other.OptionalAny.Clone() : null; + OptionalValue = other.optionalValue_ != null ? other.OptionalValue.Clone() : null; + repeatedDuration_ = other.repeatedDuration_.Clone(); + repeatedTimestamp_ = other.repeatedTimestamp_.Clone(); + repeatedFieldmask_ = other.repeatedFieldmask_.Clone(); + repeatedStruct_ = other.repeatedStruct_.Clone(); + repeatedAny_ = other.repeatedAny_.Clone(); + repeatedValue_ = other.repeatedValue_.Clone(); + fieldname1_ = other.fieldname1_; + fieldName2_ = other.fieldName2_; + FieldName3_ = other.FieldName3_; + fieldName4_ = other.fieldName4_; + field0Name5_ = other.field0Name5_; + field0Name6_ = other.field0Name6_; + fieldName7_ = other.fieldName7_; + fieldName8_ = other.fieldName8_; + fieldName9_ = other.fieldName9_; + fieldName10_ = other.fieldName10_; + fIELDNAME11_ = other.fIELDNAME11_; + fIELDName12_ = other.fIELDName12_; + switch (other.OneofFieldCase) { + case OneofFieldOneofCase.OneofUint32: + OneofUint32 = other.OneofUint32; + break; + case OneofFieldOneofCase.OneofNestedMessage: + OneofNestedMessage = other.OneofNestedMessage.Clone(); + break; + case OneofFieldOneofCase.OneofString: + OneofString = other.OneofString; + break; + case OneofFieldOneofCase.OneofBytes: + OneofBytes = other.OneofBytes; + break; + } + + } + + public TestAllTypes Clone() { + return new TestAllTypes(this); + } + + /// Field number for the "optional_int32" field. + public const int OptionalInt32FieldNumber = 1; + private int optionalInt32_; + /// + /// Singular + /// + public int OptionalInt32 { + get { return optionalInt32_; } + set { + optionalInt32_ = value; + } + } + + /// Field number for the "optional_int64" field. + public const int OptionalInt64FieldNumber = 2; + private long optionalInt64_; + public long OptionalInt64 { + get { return optionalInt64_; } + set { + optionalInt64_ = value; + } + } + + /// Field number for the "optional_uint32" field. + public const int OptionalUint32FieldNumber = 3; + private uint optionalUint32_; + public uint OptionalUint32 { + get { return optionalUint32_; } + set { + optionalUint32_ = value; + } + } + + /// Field number for the "optional_uint64" field. + public const int OptionalUint64FieldNumber = 4; + private ulong optionalUint64_; + public ulong OptionalUint64 { + get { return optionalUint64_; } + set { + optionalUint64_ = value; + } + } + + /// Field number for the "optional_sint32" field. + public const int OptionalSint32FieldNumber = 5; + private int optionalSint32_; + public int OptionalSint32 { + get { return optionalSint32_; } + set { + optionalSint32_ = value; + } + } + + /// Field number for the "optional_sint64" field. + public const int OptionalSint64FieldNumber = 6; + private long optionalSint64_; + public long OptionalSint64 { + get { return optionalSint64_; } + set { + optionalSint64_ = value; + } + } + + /// Field number for the "optional_fixed32" field. + public const int OptionalFixed32FieldNumber = 7; + private uint optionalFixed32_; + public uint OptionalFixed32 { + get { return optionalFixed32_; } + set { + optionalFixed32_ = value; + } + } + + /// Field number for the "optional_fixed64" field. + public const int OptionalFixed64FieldNumber = 8; + private ulong optionalFixed64_; + public ulong OptionalFixed64 { + get { return optionalFixed64_; } + set { + optionalFixed64_ = value; + } + } + + /// Field number for the "optional_sfixed32" field. + public const int OptionalSfixed32FieldNumber = 9; + private int optionalSfixed32_; + public int OptionalSfixed32 { + get { return optionalSfixed32_; } + set { + optionalSfixed32_ = value; + } + } + + /// Field number for the "optional_sfixed64" field. + public const int OptionalSfixed64FieldNumber = 10; + private long optionalSfixed64_; + public long OptionalSfixed64 { + get { return optionalSfixed64_; } + set { + optionalSfixed64_ = value; + } + } + + /// Field number for the "optional_float" field. + public const int OptionalFloatFieldNumber = 11; + private float optionalFloat_; + public float OptionalFloat { + get { return optionalFloat_; } + set { + optionalFloat_ = value; + } + } + + /// Field number for the "optional_double" field. + public const int OptionalDoubleFieldNumber = 12; + private double optionalDouble_; + public double OptionalDouble { + get { return optionalDouble_; } + set { + optionalDouble_ = value; + } + } + + /// Field number for the "optional_bool" field. + public const int OptionalBoolFieldNumber = 13; + private bool optionalBool_; + public bool OptionalBool { + get { return optionalBool_; } + set { + optionalBool_ = value; + } + } + + /// Field number for the "optional_string" field. + public const int OptionalStringFieldNumber = 14; + private string optionalString_ = ""; + public string OptionalString { + get { return optionalString_; } + set { + optionalString_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "optional_bytes" field. + public const int OptionalBytesFieldNumber = 15; + private pb::ByteString optionalBytes_ = pb::ByteString.Empty; + public pb::ByteString OptionalBytes { + get { return optionalBytes_; } + set { + optionalBytes_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "optional_nested_message" field. + public const int OptionalNestedMessageFieldNumber = 18; + private global::Conformance.TestAllTypes.Types.NestedMessage optionalNestedMessage_; + public global::Conformance.TestAllTypes.Types.NestedMessage OptionalNestedMessage { + get { return optionalNestedMessage_; } + set { + optionalNestedMessage_ = value; + } + } + + /// Field number for the "optional_foreign_message" field. + public const int OptionalForeignMessageFieldNumber = 19; + private global::Conformance.ForeignMessage optionalForeignMessage_; + public global::Conformance.ForeignMessage OptionalForeignMessage { + get { return optionalForeignMessage_; } + set { + optionalForeignMessage_ = value; + } + } + + /// Field number for the "optional_nested_enum" field. + public const int OptionalNestedEnumFieldNumber = 21; + private global::Conformance.TestAllTypes.Types.NestedEnum optionalNestedEnum_ = 0; + public global::Conformance.TestAllTypes.Types.NestedEnum OptionalNestedEnum { + get { return optionalNestedEnum_; } + set { + optionalNestedEnum_ = value; + } + } + + /// Field number for the "optional_foreign_enum" field. + public const int OptionalForeignEnumFieldNumber = 22; + private global::Conformance.ForeignEnum optionalForeignEnum_ = 0; + public global::Conformance.ForeignEnum OptionalForeignEnum { + get { return optionalForeignEnum_; } + set { + optionalForeignEnum_ = value; + } + } + + /// Field number for the "optional_string_piece" field. + public const int OptionalStringPieceFieldNumber = 24; + private string optionalStringPiece_ = ""; + public string OptionalStringPiece { + get { return optionalStringPiece_; } + set { + optionalStringPiece_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "optional_cord" field. + public const int OptionalCordFieldNumber = 25; + private string optionalCord_ = ""; + public string OptionalCord { + get { return optionalCord_; } + set { + optionalCord_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "recursive_message" field. + public const int RecursiveMessageFieldNumber = 27; + private global::Conformance.TestAllTypes recursiveMessage_; + public global::Conformance.TestAllTypes RecursiveMessage { + get { return recursiveMessage_; } + set { + recursiveMessage_ = value; + } + } + + /// Field number for the "repeated_int32" field. + public const int RepeatedInt32FieldNumber = 31; + private static readonly pb::FieldCodec _repeated_repeatedInt32_codec + = pb::FieldCodec.ForInt32(250); + private readonly pbc::RepeatedField repeatedInt32_ = new pbc::RepeatedField(); + /// + /// Repeated + /// + public pbc::RepeatedField RepeatedInt32 { + get { return repeatedInt32_; } + } + + /// Field number for the "repeated_int64" field. + public const int RepeatedInt64FieldNumber = 32; + private static readonly pb::FieldCodec _repeated_repeatedInt64_codec + = pb::FieldCodec.ForInt64(258); + private readonly pbc::RepeatedField repeatedInt64_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedInt64 { + get { return repeatedInt64_; } + } + + /// Field number for the "repeated_uint32" field. + public const int RepeatedUint32FieldNumber = 33; + private static readonly pb::FieldCodec _repeated_repeatedUint32_codec + = pb::FieldCodec.ForUInt32(266); + private readonly pbc::RepeatedField repeatedUint32_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedUint32 { + get { return repeatedUint32_; } + } + + /// Field number for the "repeated_uint64" field. + public const int RepeatedUint64FieldNumber = 34; + private static readonly pb::FieldCodec _repeated_repeatedUint64_codec + = pb::FieldCodec.ForUInt64(274); + private readonly pbc::RepeatedField repeatedUint64_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedUint64 { + get { return repeatedUint64_; } + } + + /// Field number for the "repeated_sint32" field. + public const int RepeatedSint32FieldNumber = 35; + private static readonly pb::FieldCodec _repeated_repeatedSint32_codec + = pb::FieldCodec.ForSInt32(282); + private readonly pbc::RepeatedField repeatedSint32_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedSint32 { + get { return repeatedSint32_; } + } + + /// Field number for the "repeated_sint64" field. + public const int RepeatedSint64FieldNumber = 36; + private static readonly pb::FieldCodec _repeated_repeatedSint64_codec + = pb::FieldCodec.ForSInt64(290); + private readonly pbc::RepeatedField repeatedSint64_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedSint64 { + get { return repeatedSint64_; } + } + + /// Field number for the "repeated_fixed32" field. + public const int RepeatedFixed32FieldNumber = 37; + private static readonly pb::FieldCodec _repeated_repeatedFixed32_codec + = pb::FieldCodec.ForFixed32(298); + private readonly pbc::RepeatedField repeatedFixed32_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedFixed32 { + get { return repeatedFixed32_; } + } + + /// Field number for the "repeated_fixed64" field. + public const int RepeatedFixed64FieldNumber = 38; + private static readonly pb::FieldCodec _repeated_repeatedFixed64_codec + = pb::FieldCodec.ForFixed64(306); + private readonly pbc::RepeatedField repeatedFixed64_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedFixed64 { + get { return repeatedFixed64_; } + } + + /// Field number for the "repeated_sfixed32" field. + public const int RepeatedSfixed32FieldNumber = 39; + private static readonly pb::FieldCodec _repeated_repeatedSfixed32_codec + = pb::FieldCodec.ForSFixed32(314); + private readonly pbc::RepeatedField repeatedSfixed32_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedSfixed32 { + get { return repeatedSfixed32_; } + } + + /// Field number for the "repeated_sfixed64" field. + public const int RepeatedSfixed64FieldNumber = 40; + private static readonly pb::FieldCodec _repeated_repeatedSfixed64_codec + = pb::FieldCodec.ForSFixed64(322); + private readonly pbc::RepeatedField repeatedSfixed64_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedSfixed64 { + get { return repeatedSfixed64_; } + } + + /// Field number for the "repeated_float" field. + public const int RepeatedFloatFieldNumber = 41; + private static readonly pb::FieldCodec _repeated_repeatedFloat_codec + = pb::FieldCodec.ForFloat(330); + private readonly pbc::RepeatedField repeatedFloat_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedFloat { + get { return repeatedFloat_; } + } + + /// Field number for the "repeated_double" field. + public const int RepeatedDoubleFieldNumber = 42; + private static readonly pb::FieldCodec _repeated_repeatedDouble_codec + = pb::FieldCodec.ForDouble(338); + private readonly pbc::RepeatedField repeatedDouble_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedDouble { + get { return repeatedDouble_; } + } + + /// Field number for the "repeated_bool" field. + public const int RepeatedBoolFieldNumber = 43; + private static readonly pb::FieldCodec _repeated_repeatedBool_codec + = pb::FieldCodec.ForBool(346); + private readonly pbc::RepeatedField repeatedBool_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedBool { + get { return repeatedBool_; } + } + + /// Field number for the "repeated_string" field. + public const int RepeatedStringFieldNumber = 44; + private static readonly pb::FieldCodec _repeated_repeatedString_codec + = pb::FieldCodec.ForString(354); + private readonly pbc::RepeatedField repeatedString_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedString { + get { return repeatedString_; } + } + + /// Field number for the "repeated_bytes" field. + public const int RepeatedBytesFieldNumber = 45; + private static readonly pb::FieldCodec _repeated_repeatedBytes_codec + = pb::FieldCodec.ForBytes(362); + private readonly pbc::RepeatedField repeatedBytes_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedBytes { + get { return repeatedBytes_; } + } + + /// Field number for the "repeated_nested_message" field. + public const int RepeatedNestedMessageFieldNumber = 48; + private static readonly pb::FieldCodec _repeated_repeatedNestedMessage_codec + = pb::FieldCodec.ForMessage(386, global::Conformance.TestAllTypes.Types.NestedMessage.Parser); + private readonly pbc::RepeatedField repeatedNestedMessage_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedNestedMessage { + get { return repeatedNestedMessage_; } + } + + /// Field number for the "repeated_foreign_message" field. + public const int RepeatedForeignMessageFieldNumber = 49; + private static readonly pb::FieldCodec _repeated_repeatedForeignMessage_codec + = pb::FieldCodec.ForMessage(394, global::Conformance.ForeignMessage.Parser); + private readonly pbc::RepeatedField repeatedForeignMessage_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedForeignMessage { + get { return repeatedForeignMessage_; } + } + + /// Field number for the "repeated_nested_enum" field. + public const int RepeatedNestedEnumFieldNumber = 51; + private static readonly pb::FieldCodec _repeated_repeatedNestedEnum_codec + = pb::FieldCodec.ForEnum(410, x => (int) x, x => (global::Conformance.TestAllTypes.Types.NestedEnum) x); + private readonly pbc::RepeatedField repeatedNestedEnum_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedNestedEnum { + get { return repeatedNestedEnum_; } + } + + /// Field number for the "repeated_foreign_enum" field. + public const int RepeatedForeignEnumFieldNumber = 52; + private static readonly pb::FieldCodec _repeated_repeatedForeignEnum_codec + = pb::FieldCodec.ForEnum(418, x => (int) x, x => (global::Conformance.ForeignEnum) x); + private readonly pbc::RepeatedField repeatedForeignEnum_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedForeignEnum { + get { return repeatedForeignEnum_; } + } + + /// Field number for the "repeated_string_piece" field. + public const int RepeatedStringPieceFieldNumber = 54; + private static readonly pb::FieldCodec _repeated_repeatedStringPiece_codec + = pb::FieldCodec.ForString(434); + private readonly pbc::RepeatedField repeatedStringPiece_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedStringPiece { + get { return repeatedStringPiece_; } + } + + /// Field number for the "repeated_cord" field. + public const int RepeatedCordFieldNumber = 55; + private static readonly pb::FieldCodec _repeated_repeatedCord_codec + = pb::FieldCodec.ForString(442); + private readonly pbc::RepeatedField repeatedCord_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedCord { + get { return repeatedCord_; } + } + + /// Field number for the "map_int32_int32" field. + public const int MapInt32Int32FieldNumber = 56; + private static readonly pbc::MapField.Codec _map_mapInt32Int32_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForInt32(16), 450); + private readonly pbc::MapField mapInt32Int32_ = new pbc::MapField(); + /// + /// Map + /// + public pbc::MapField MapInt32Int32 { + get { return mapInt32Int32_; } + } + + /// Field number for the "map_int64_int64" field. + public const int MapInt64Int64FieldNumber = 57; + private static readonly pbc::MapField.Codec _map_mapInt64Int64_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForInt64(8), pb::FieldCodec.ForInt64(16), 458); + private readonly pbc::MapField mapInt64Int64_ = new pbc::MapField(); + public pbc::MapField MapInt64Int64 { + get { return mapInt64Int64_; } + } + + /// Field number for the "map_uint32_uint32" field. + public const int MapUint32Uint32FieldNumber = 58; + private static readonly pbc::MapField.Codec _map_mapUint32Uint32_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForUInt32(8), pb::FieldCodec.ForUInt32(16), 466); + private readonly pbc::MapField mapUint32Uint32_ = new pbc::MapField(); + public pbc::MapField MapUint32Uint32 { + get { return mapUint32Uint32_; } + } + + /// Field number for the "map_uint64_uint64" field. + public const int MapUint64Uint64FieldNumber = 59; + private static readonly pbc::MapField.Codec _map_mapUint64Uint64_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForUInt64(8), pb::FieldCodec.ForUInt64(16), 474); + private readonly pbc::MapField mapUint64Uint64_ = new pbc::MapField(); + public pbc::MapField MapUint64Uint64 { + get { return mapUint64Uint64_; } + } + + /// Field number for the "map_sint32_sint32" field. + public const int MapSint32Sint32FieldNumber = 60; + private static readonly pbc::MapField.Codec _map_mapSint32Sint32_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForSInt32(8), pb::FieldCodec.ForSInt32(16), 482); + private readonly pbc::MapField mapSint32Sint32_ = new pbc::MapField(); + public pbc::MapField MapSint32Sint32 { + get { return mapSint32Sint32_; } + } + + /// Field number for the "map_sint64_sint64" field. + public const int MapSint64Sint64FieldNumber = 61; + private static readonly pbc::MapField.Codec _map_mapSint64Sint64_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForSInt64(8), pb::FieldCodec.ForSInt64(16), 490); + private readonly pbc::MapField mapSint64Sint64_ = new pbc::MapField(); + public pbc::MapField MapSint64Sint64 { + get { return mapSint64Sint64_; } + } + + /// Field number for the "map_fixed32_fixed32" field. + public const int MapFixed32Fixed32FieldNumber = 62; + private static readonly pbc::MapField.Codec _map_mapFixed32Fixed32_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForFixed32(13), pb::FieldCodec.ForFixed32(21), 498); + private readonly pbc::MapField mapFixed32Fixed32_ = new pbc::MapField(); + public pbc::MapField MapFixed32Fixed32 { + get { return mapFixed32Fixed32_; } + } + + /// Field number for the "map_fixed64_fixed64" field. + public const int MapFixed64Fixed64FieldNumber = 63; + private static readonly pbc::MapField.Codec _map_mapFixed64Fixed64_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForFixed64(9), pb::FieldCodec.ForFixed64(17), 506); + private readonly pbc::MapField mapFixed64Fixed64_ = new pbc::MapField(); + public pbc::MapField MapFixed64Fixed64 { + get { return mapFixed64Fixed64_; } + } + + /// Field number for the "map_sfixed32_sfixed32" field. + public const int MapSfixed32Sfixed32FieldNumber = 64; + private static readonly pbc::MapField.Codec _map_mapSfixed32Sfixed32_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForSFixed32(13), pb::FieldCodec.ForSFixed32(21), 514); + private readonly pbc::MapField mapSfixed32Sfixed32_ = new pbc::MapField(); + public pbc::MapField MapSfixed32Sfixed32 { + get { return mapSfixed32Sfixed32_; } + } + + /// Field number for the "map_sfixed64_sfixed64" field. + public const int MapSfixed64Sfixed64FieldNumber = 65; + private static readonly pbc::MapField.Codec _map_mapSfixed64Sfixed64_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForSFixed64(9), pb::FieldCodec.ForSFixed64(17), 522); + private readonly pbc::MapField mapSfixed64Sfixed64_ = new pbc::MapField(); + public pbc::MapField MapSfixed64Sfixed64 { + get { return mapSfixed64Sfixed64_; } + } + + /// Field number for the "map_int32_float" field. + public const int MapInt32FloatFieldNumber = 66; + private static readonly pbc::MapField.Codec _map_mapInt32Float_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForFloat(21), 530); + private readonly pbc::MapField mapInt32Float_ = new pbc::MapField(); + public pbc::MapField MapInt32Float { + get { return mapInt32Float_; } + } + + /// Field number for the "map_int32_double" field. + public const int MapInt32DoubleFieldNumber = 67; + private static readonly pbc::MapField.Codec _map_mapInt32Double_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForDouble(17), 538); + private readonly pbc::MapField mapInt32Double_ = new pbc::MapField(); + public pbc::MapField MapInt32Double { + get { return mapInt32Double_; } + } + + /// Field number for the "map_bool_bool" field. + public const int MapBoolBoolFieldNumber = 68; + private static readonly pbc::MapField.Codec _map_mapBoolBool_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForBool(8), pb::FieldCodec.ForBool(16), 546); + private readonly pbc::MapField mapBoolBool_ = new pbc::MapField(); + public pbc::MapField MapBoolBool { + get { return mapBoolBool_; } + } + + /// Field number for the "map_string_string" field. + public const int MapStringStringFieldNumber = 69; + private static readonly pbc::MapField.Codec _map_mapStringString_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForString(10), pb::FieldCodec.ForString(18), 554); + private readonly pbc::MapField mapStringString_ = new pbc::MapField(); + public pbc::MapField MapStringString { + get { return mapStringString_; } + } + + /// Field number for the "map_string_bytes" field. + public const int MapStringBytesFieldNumber = 70; + private static readonly pbc::MapField.Codec _map_mapStringBytes_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForString(10), pb::FieldCodec.ForBytes(18), 562); + private readonly pbc::MapField mapStringBytes_ = new pbc::MapField(); + public pbc::MapField MapStringBytes { + get { return mapStringBytes_; } + } + + /// Field number for the "map_string_nested_message" field. + public const int MapStringNestedMessageFieldNumber = 71; + private static readonly pbc::MapField.Codec _map_mapStringNestedMessage_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForString(10), pb::FieldCodec.ForMessage(18, global::Conformance.TestAllTypes.Types.NestedMessage.Parser), 570); + private readonly pbc::MapField mapStringNestedMessage_ = new pbc::MapField(); + public pbc::MapField MapStringNestedMessage { + get { return mapStringNestedMessage_; } + } + + /// Field number for the "map_string_foreign_message" field. + public const int MapStringForeignMessageFieldNumber = 72; + private static readonly pbc::MapField.Codec _map_mapStringForeignMessage_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForString(10), pb::FieldCodec.ForMessage(18, global::Conformance.ForeignMessage.Parser), 578); + private readonly pbc::MapField mapStringForeignMessage_ = new pbc::MapField(); + public pbc::MapField MapStringForeignMessage { + get { return mapStringForeignMessage_; } + } + + /// Field number for the "map_string_nested_enum" field. + public const int MapStringNestedEnumFieldNumber = 73; + private static readonly pbc::MapField.Codec _map_mapStringNestedEnum_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForString(10), pb::FieldCodec.ForEnum(16, x => (int) x, x => (global::Conformance.TestAllTypes.Types.NestedEnum) x), 586); + private readonly pbc::MapField mapStringNestedEnum_ = new pbc::MapField(); + public pbc::MapField MapStringNestedEnum { + get { return mapStringNestedEnum_; } + } + + /// Field number for the "map_string_foreign_enum" field. + public const int MapStringForeignEnumFieldNumber = 74; + private static readonly pbc::MapField.Codec _map_mapStringForeignEnum_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForString(10), pb::FieldCodec.ForEnum(16, x => (int) x, x => (global::Conformance.ForeignEnum) x), 594); + private readonly pbc::MapField mapStringForeignEnum_ = new pbc::MapField(); + public pbc::MapField MapStringForeignEnum { + get { return mapStringForeignEnum_; } + } + + /// Field number for the "oneof_uint32" field. + public const int OneofUint32FieldNumber = 111; + public uint OneofUint32 { + get { return oneofFieldCase_ == OneofFieldOneofCase.OneofUint32 ? (uint) oneofField_ : 0; } + set { + oneofField_ = value; + oneofFieldCase_ = OneofFieldOneofCase.OneofUint32; + } + } + + /// Field number for the "oneof_nested_message" field. + public const int OneofNestedMessageFieldNumber = 112; + public global::Conformance.TestAllTypes.Types.NestedMessage OneofNestedMessage { + get { return oneofFieldCase_ == OneofFieldOneofCase.OneofNestedMessage ? (global::Conformance.TestAllTypes.Types.NestedMessage) oneofField_ : null; } + set { + oneofField_ = value; + oneofFieldCase_ = value == null ? OneofFieldOneofCase.None : OneofFieldOneofCase.OneofNestedMessage; + } + } + + /// Field number for the "oneof_string" field. + public const int OneofStringFieldNumber = 113; + public string OneofString { + get { return oneofFieldCase_ == OneofFieldOneofCase.OneofString ? (string) oneofField_ : ""; } + set { + oneofField_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + oneofFieldCase_ = OneofFieldOneofCase.OneofString; + } + } + + /// Field number for the "oneof_bytes" field. + public const int OneofBytesFieldNumber = 114; + public pb::ByteString OneofBytes { + get { return oneofFieldCase_ == OneofFieldOneofCase.OneofBytes ? (pb::ByteString) oneofField_ : pb::ByteString.Empty; } + set { + oneofField_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + oneofFieldCase_ = OneofFieldOneofCase.OneofBytes; + } + } + + /// Field number for the "optional_bool_wrapper" field. + public const int OptionalBoolWrapperFieldNumber = 201; + private static readonly pb::FieldCodec _single_optionalBoolWrapper_codec = pb::FieldCodec.ForStructWrapper(1610); + private bool? optionalBoolWrapper_; + /// + /// Well-known types + /// + public bool? OptionalBoolWrapper { + get { return optionalBoolWrapper_; } + set { + optionalBoolWrapper_ = value; + } + } + + /// Field number for the "optional_int32_wrapper" field. + public const int OptionalInt32WrapperFieldNumber = 202; + private static readonly pb::FieldCodec _single_optionalInt32Wrapper_codec = pb::FieldCodec.ForStructWrapper(1618); + private int? optionalInt32Wrapper_; + public int? OptionalInt32Wrapper { + get { return optionalInt32Wrapper_; } + set { + optionalInt32Wrapper_ = value; + } + } + + /// Field number for the "optional_int64_wrapper" field. + public const int OptionalInt64WrapperFieldNumber = 203; + private static readonly pb::FieldCodec _single_optionalInt64Wrapper_codec = pb::FieldCodec.ForStructWrapper(1626); + private long? optionalInt64Wrapper_; + public long? OptionalInt64Wrapper { + get { return optionalInt64Wrapper_; } + set { + optionalInt64Wrapper_ = value; + } + } + + /// Field number for the "optional_uint32_wrapper" field. + public const int OptionalUint32WrapperFieldNumber = 204; + private static readonly pb::FieldCodec _single_optionalUint32Wrapper_codec = pb::FieldCodec.ForStructWrapper(1634); + private uint? optionalUint32Wrapper_; + public uint? OptionalUint32Wrapper { + get { return optionalUint32Wrapper_; } + set { + optionalUint32Wrapper_ = value; + } + } + + /// Field number for the "optional_uint64_wrapper" field. + public const int OptionalUint64WrapperFieldNumber = 205; + private static readonly pb::FieldCodec _single_optionalUint64Wrapper_codec = pb::FieldCodec.ForStructWrapper(1642); + private ulong? optionalUint64Wrapper_; + public ulong? OptionalUint64Wrapper { + get { return optionalUint64Wrapper_; } + set { + optionalUint64Wrapper_ = value; + } + } + + /// Field number for the "optional_float_wrapper" field. + public const int OptionalFloatWrapperFieldNumber = 206; + private static readonly pb::FieldCodec _single_optionalFloatWrapper_codec = pb::FieldCodec.ForStructWrapper(1650); + private float? optionalFloatWrapper_; + public float? OptionalFloatWrapper { + get { return optionalFloatWrapper_; } + set { + optionalFloatWrapper_ = value; + } + } + + /// Field number for the "optional_double_wrapper" field. + public const int OptionalDoubleWrapperFieldNumber = 207; + private static readonly pb::FieldCodec _single_optionalDoubleWrapper_codec = pb::FieldCodec.ForStructWrapper(1658); + private double? optionalDoubleWrapper_; + public double? OptionalDoubleWrapper { + get { return optionalDoubleWrapper_; } + set { + optionalDoubleWrapper_ = value; + } + } + + /// Field number for the "optional_string_wrapper" field. + public const int OptionalStringWrapperFieldNumber = 208; + private static readonly pb::FieldCodec _single_optionalStringWrapper_codec = pb::FieldCodec.ForClassWrapper(1666); + private string optionalStringWrapper_; + public string OptionalStringWrapper { + get { return optionalStringWrapper_; } + set { + optionalStringWrapper_ = value; + } + } + + /// Field number for the "optional_bytes_wrapper" field. + public const int OptionalBytesWrapperFieldNumber = 209; + private static readonly pb::FieldCodec _single_optionalBytesWrapper_codec = pb::FieldCodec.ForClassWrapper(1674); + private pb::ByteString optionalBytesWrapper_; + public pb::ByteString OptionalBytesWrapper { + get { return optionalBytesWrapper_; } + set { + optionalBytesWrapper_ = value; + } + } + + /// Field number for the "repeated_bool_wrapper" field. + public const int RepeatedBoolWrapperFieldNumber = 211; + private static readonly pb::FieldCodec _repeated_repeatedBoolWrapper_codec + = pb::FieldCodec.ForStructWrapper(1690); + private readonly pbc::RepeatedField repeatedBoolWrapper_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedBoolWrapper { + get { return repeatedBoolWrapper_; } + } + + /// Field number for the "repeated_int32_wrapper" field. + public const int RepeatedInt32WrapperFieldNumber = 212; + private static readonly pb::FieldCodec _repeated_repeatedInt32Wrapper_codec + = pb::FieldCodec.ForStructWrapper(1698); + private readonly pbc::RepeatedField repeatedInt32Wrapper_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedInt32Wrapper { + get { return repeatedInt32Wrapper_; } + } + + /// Field number for the "repeated_int64_wrapper" field. + public const int RepeatedInt64WrapperFieldNumber = 213; + private static readonly pb::FieldCodec _repeated_repeatedInt64Wrapper_codec + = pb::FieldCodec.ForStructWrapper(1706); + private readonly pbc::RepeatedField repeatedInt64Wrapper_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedInt64Wrapper { + get { return repeatedInt64Wrapper_; } + } + + /// Field number for the "repeated_uint32_wrapper" field. + public const int RepeatedUint32WrapperFieldNumber = 214; + private static readonly pb::FieldCodec _repeated_repeatedUint32Wrapper_codec + = pb::FieldCodec.ForStructWrapper(1714); + private readonly pbc::RepeatedField repeatedUint32Wrapper_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedUint32Wrapper { + get { return repeatedUint32Wrapper_; } + } + + /// Field number for the "repeated_uint64_wrapper" field. + public const int RepeatedUint64WrapperFieldNumber = 215; + private static readonly pb::FieldCodec _repeated_repeatedUint64Wrapper_codec + = pb::FieldCodec.ForStructWrapper(1722); + private readonly pbc::RepeatedField repeatedUint64Wrapper_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedUint64Wrapper { + get { return repeatedUint64Wrapper_; } + } + + /// Field number for the "repeated_float_wrapper" field. + public const int RepeatedFloatWrapperFieldNumber = 216; + private static readonly pb::FieldCodec _repeated_repeatedFloatWrapper_codec + = pb::FieldCodec.ForStructWrapper(1730); + private readonly pbc::RepeatedField repeatedFloatWrapper_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedFloatWrapper { + get { return repeatedFloatWrapper_; } + } + + /// Field number for the "repeated_double_wrapper" field. + public const int RepeatedDoubleWrapperFieldNumber = 217; + private static readonly pb::FieldCodec _repeated_repeatedDoubleWrapper_codec + = pb::FieldCodec.ForStructWrapper(1738); + private readonly pbc::RepeatedField repeatedDoubleWrapper_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedDoubleWrapper { + get { return repeatedDoubleWrapper_; } + } + + /// Field number for the "repeated_string_wrapper" field. + public const int RepeatedStringWrapperFieldNumber = 218; + private static readonly pb::FieldCodec _repeated_repeatedStringWrapper_codec + = pb::FieldCodec.ForClassWrapper(1746); + private readonly pbc::RepeatedField repeatedStringWrapper_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedStringWrapper { + get { return repeatedStringWrapper_; } + } + + /// Field number for the "repeated_bytes_wrapper" field. + public const int RepeatedBytesWrapperFieldNumber = 219; + private static readonly pb::FieldCodec _repeated_repeatedBytesWrapper_codec + = pb::FieldCodec.ForClassWrapper(1754); + private readonly pbc::RepeatedField repeatedBytesWrapper_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedBytesWrapper { + get { return repeatedBytesWrapper_; } + } + + /// Field number for the "optional_duration" field. + public const int OptionalDurationFieldNumber = 301; + private global::Google.Protobuf.WellKnownTypes.Duration optionalDuration_; + public global::Google.Protobuf.WellKnownTypes.Duration OptionalDuration { + get { return optionalDuration_; } + set { + optionalDuration_ = value; + } + } + + /// Field number for the "optional_timestamp" field. + public const int OptionalTimestampFieldNumber = 302; + private global::Google.Protobuf.WellKnownTypes.Timestamp optionalTimestamp_; + public global::Google.Protobuf.WellKnownTypes.Timestamp OptionalTimestamp { + get { return optionalTimestamp_; } + set { + optionalTimestamp_ = value; + } + } + + /// Field number for the "optional_field_mask" field. + public const int OptionalFieldMaskFieldNumber = 303; + private global::Google.Protobuf.WellKnownTypes.FieldMask optionalFieldMask_; + public global::Google.Protobuf.WellKnownTypes.FieldMask OptionalFieldMask { + get { return optionalFieldMask_; } + set { + optionalFieldMask_ = value; + } + } + + /// Field number for the "optional_struct" field. + public const int OptionalStructFieldNumber = 304; + private global::Google.Protobuf.WellKnownTypes.Struct optionalStruct_; + public global::Google.Protobuf.WellKnownTypes.Struct OptionalStruct { + get { return optionalStruct_; } + set { + optionalStruct_ = value; + } + } + + /// Field number for the "optional_any" field. + public const int OptionalAnyFieldNumber = 305; + private global::Google.Protobuf.WellKnownTypes.Any optionalAny_; + public global::Google.Protobuf.WellKnownTypes.Any OptionalAny { + get { return optionalAny_; } + set { + optionalAny_ = value; + } + } + + /// Field number for the "optional_value" field. + public const int OptionalValueFieldNumber = 306; + private global::Google.Protobuf.WellKnownTypes.Value optionalValue_; + public global::Google.Protobuf.WellKnownTypes.Value OptionalValue { + get { return optionalValue_; } + set { + optionalValue_ = value; + } + } + + /// Field number for the "repeated_duration" field. + public const int RepeatedDurationFieldNumber = 311; + private static readonly pb::FieldCodec _repeated_repeatedDuration_codec + = pb::FieldCodec.ForMessage(2490, global::Google.Protobuf.WellKnownTypes.Duration.Parser); + private readonly pbc::RepeatedField repeatedDuration_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedDuration { + get { return repeatedDuration_; } + } + + /// Field number for the "repeated_timestamp" field. + public const int RepeatedTimestampFieldNumber = 312; + private static readonly pb::FieldCodec _repeated_repeatedTimestamp_codec + = pb::FieldCodec.ForMessage(2498, global::Google.Protobuf.WellKnownTypes.Timestamp.Parser); + private readonly pbc::RepeatedField repeatedTimestamp_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedTimestamp { + get { return repeatedTimestamp_; } + } + + /// Field number for the "repeated_fieldmask" field. + public const int RepeatedFieldmaskFieldNumber = 313; + private static readonly pb::FieldCodec _repeated_repeatedFieldmask_codec + = pb::FieldCodec.ForMessage(2506, global::Google.Protobuf.WellKnownTypes.FieldMask.Parser); + private readonly pbc::RepeatedField repeatedFieldmask_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedFieldmask { + get { return repeatedFieldmask_; } + } + + /// Field number for the "repeated_struct" field. + public const int RepeatedStructFieldNumber = 324; + private static readonly pb::FieldCodec _repeated_repeatedStruct_codec + = pb::FieldCodec.ForMessage(2594, global::Google.Protobuf.WellKnownTypes.Struct.Parser); + private readonly pbc::RepeatedField repeatedStruct_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedStruct { + get { return repeatedStruct_; } + } + + /// Field number for the "repeated_any" field. + public const int RepeatedAnyFieldNumber = 315; + private static readonly pb::FieldCodec _repeated_repeatedAny_codec + = pb::FieldCodec.ForMessage(2522, global::Google.Protobuf.WellKnownTypes.Any.Parser); + private readonly pbc::RepeatedField repeatedAny_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedAny { + get { return repeatedAny_; } + } + + /// Field number for the "repeated_value" field. + public const int RepeatedValueFieldNumber = 316; + private static readonly pb::FieldCodec _repeated_repeatedValue_codec + = pb::FieldCodec.ForMessage(2530, global::Google.Protobuf.WellKnownTypes.Value.Parser); + private readonly pbc::RepeatedField repeatedValue_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedValue { + get { return repeatedValue_; } + } + + /// Field number for the "fieldname1" field. + public const int Fieldname1FieldNumber = 401; + private int fieldname1_; + /// + /// Test field-name-to-JSON-name convention. + /// + public int Fieldname1 { + get { return fieldname1_; } + set { + fieldname1_ = value; + } + } + + /// Field number for the "field_name2" field. + public const int FieldName2FieldNumber = 402; + private int fieldName2_; + public int FieldName2 { + get { return fieldName2_; } + set { + fieldName2_ = value; + } + } + + /// Field number for the "_field_name3" field. + public const int FieldName3FieldNumber = 403; + private int FieldName3_; + public int FieldName3 { + get { return FieldName3_; } + set { + FieldName3_ = value; + } + } + + /// Field number for the "field__name4_" field. + public const int FieldName4FieldNumber = 404; + private int fieldName4_; + public int FieldName4 { + get { return fieldName4_; } + set { + fieldName4_ = value; + } + } + + /// Field number for the "field0name5" field. + public const int Field0Name5FieldNumber = 405; + private int field0Name5_; + public int Field0Name5 { + get { return field0Name5_; } + set { + field0Name5_ = value; + } + } + + /// Field number for the "field_0_name6" field. + public const int Field0Name6FieldNumber = 406; + private int field0Name6_; + public int Field0Name6 { + get { return field0Name6_; } + set { + field0Name6_ = value; + } + } + + /// Field number for the "fieldName7" field. + public const int FieldName7FieldNumber = 407; + private int fieldName7_; + public int FieldName7 { + get { return fieldName7_; } + set { + fieldName7_ = value; + } + } + + /// Field number for the "FieldName8" field. + public const int FieldName8FieldNumber = 408; + private int fieldName8_; + public int FieldName8 { + get { return fieldName8_; } + set { + fieldName8_ = value; + } + } + + /// Field number for the "field_Name9" field. + public const int FieldName9FieldNumber = 409; + private int fieldName9_; + public int FieldName9 { + get { return fieldName9_; } + set { + fieldName9_ = value; + } + } + + /// Field number for the "Field_Name10" field. + public const int FieldName10FieldNumber = 410; + private int fieldName10_; + public int FieldName10 { + get { return fieldName10_; } + set { + fieldName10_ = value; + } + } + + /// Field number for the "FIELD_NAME11" field. + public const int FIELDNAME11FieldNumber = 411; + private int fIELDNAME11_; + public int FIELDNAME11 { + get { return fIELDNAME11_; } + set { + fIELDNAME11_ = value; + } + } + + /// Field number for the "FIELD_name12" field. + public const int FIELDName12FieldNumber = 412; + private int fIELDName12_; + public int FIELDName12 { + get { return fIELDName12_; } + set { + fIELDName12_ = value; + } + } + + private object oneofField_; + /// Enum of possible cases for the "oneof_field" oneof. + public enum OneofFieldOneofCase { + None = 0, + OneofUint32 = 111, + OneofNestedMessage = 112, + OneofString = 113, + OneofBytes = 114, + } + private OneofFieldOneofCase oneofFieldCase_ = OneofFieldOneofCase.None; + public OneofFieldOneofCase OneofFieldCase { + get { return oneofFieldCase_; } + } + + public void ClearOneofField() { + oneofFieldCase_ = OneofFieldOneofCase.None; + oneofField_ = null; + } + + public override bool Equals(object other) { + return Equals(other as TestAllTypes); + } + + public bool Equals(TestAllTypes other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (OptionalInt32 != other.OptionalInt32) return false; + if (OptionalInt64 != other.OptionalInt64) return false; + if (OptionalUint32 != other.OptionalUint32) return false; + if (OptionalUint64 != other.OptionalUint64) return false; + if (OptionalSint32 != other.OptionalSint32) return false; + if (OptionalSint64 != other.OptionalSint64) return false; + if (OptionalFixed32 != other.OptionalFixed32) return false; + if (OptionalFixed64 != other.OptionalFixed64) return false; + if (OptionalSfixed32 != other.OptionalSfixed32) return false; + if (OptionalSfixed64 != other.OptionalSfixed64) return false; + if (OptionalFloat != other.OptionalFloat) return false; + if (OptionalDouble != other.OptionalDouble) return false; + if (OptionalBool != other.OptionalBool) return false; + if (OptionalString != other.OptionalString) return false; + if (OptionalBytes != other.OptionalBytes) return false; + if (!object.Equals(OptionalNestedMessage, other.OptionalNestedMessage)) return false; + if (!object.Equals(OptionalForeignMessage, other.OptionalForeignMessage)) return false; + if (OptionalNestedEnum != other.OptionalNestedEnum) return false; + if (OptionalForeignEnum != other.OptionalForeignEnum) return false; + if (OptionalStringPiece != other.OptionalStringPiece) return false; + if (OptionalCord != other.OptionalCord) return false; + if (!object.Equals(RecursiveMessage, other.RecursiveMessage)) return false; + if(!repeatedInt32_.Equals(other.repeatedInt32_)) return false; + if(!repeatedInt64_.Equals(other.repeatedInt64_)) return false; + if(!repeatedUint32_.Equals(other.repeatedUint32_)) return false; + if(!repeatedUint64_.Equals(other.repeatedUint64_)) return false; + if(!repeatedSint32_.Equals(other.repeatedSint32_)) return false; + if(!repeatedSint64_.Equals(other.repeatedSint64_)) return false; + if(!repeatedFixed32_.Equals(other.repeatedFixed32_)) return false; + if(!repeatedFixed64_.Equals(other.repeatedFixed64_)) return false; + if(!repeatedSfixed32_.Equals(other.repeatedSfixed32_)) return false; + if(!repeatedSfixed64_.Equals(other.repeatedSfixed64_)) return false; + if(!repeatedFloat_.Equals(other.repeatedFloat_)) return false; + if(!repeatedDouble_.Equals(other.repeatedDouble_)) return false; + if(!repeatedBool_.Equals(other.repeatedBool_)) return false; + if(!repeatedString_.Equals(other.repeatedString_)) return false; + if(!repeatedBytes_.Equals(other.repeatedBytes_)) return false; + if(!repeatedNestedMessage_.Equals(other.repeatedNestedMessage_)) return false; + if(!repeatedForeignMessage_.Equals(other.repeatedForeignMessage_)) return false; + if(!repeatedNestedEnum_.Equals(other.repeatedNestedEnum_)) return false; + if(!repeatedForeignEnum_.Equals(other.repeatedForeignEnum_)) return false; + if(!repeatedStringPiece_.Equals(other.repeatedStringPiece_)) return false; + if(!repeatedCord_.Equals(other.repeatedCord_)) return false; + if (!MapInt32Int32.Equals(other.MapInt32Int32)) return false; + if (!MapInt64Int64.Equals(other.MapInt64Int64)) return false; + if (!MapUint32Uint32.Equals(other.MapUint32Uint32)) return false; + if (!MapUint64Uint64.Equals(other.MapUint64Uint64)) return false; + if (!MapSint32Sint32.Equals(other.MapSint32Sint32)) return false; + if (!MapSint64Sint64.Equals(other.MapSint64Sint64)) return false; + if (!MapFixed32Fixed32.Equals(other.MapFixed32Fixed32)) return false; + if (!MapFixed64Fixed64.Equals(other.MapFixed64Fixed64)) return false; + if (!MapSfixed32Sfixed32.Equals(other.MapSfixed32Sfixed32)) return false; + if (!MapSfixed64Sfixed64.Equals(other.MapSfixed64Sfixed64)) return false; + if (!MapInt32Float.Equals(other.MapInt32Float)) return false; + if (!MapInt32Double.Equals(other.MapInt32Double)) return false; + if (!MapBoolBool.Equals(other.MapBoolBool)) return false; + if (!MapStringString.Equals(other.MapStringString)) return false; + if (!MapStringBytes.Equals(other.MapStringBytes)) return false; + if (!MapStringNestedMessage.Equals(other.MapStringNestedMessage)) return false; + if (!MapStringForeignMessage.Equals(other.MapStringForeignMessage)) return false; + if (!MapStringNestedEnum.Equals(other.MapStringNestedEnum)) return false; + if (!MapStringForeignEnum.Equals(other.MapStringForeignEnum)) return false; + if (OneofUint32 != other.OneofUint32) return false; + if (!object.Equals(OneofNestedMessage, other.OneofNestedMessage)) return false; + if (OneofString != other.OneofString) return false; + if (OneofBytes != other.OneofBytes) return false; + if (OptionalBoolWrapper != other.OptionalBoolWrapper) return false; + if (OptionalInt32Wrapper != other.OptionalInt32Wrapper) return false; + if (OptionalInt64Wrapper != other.OptionalInt64Wrapper) return false; + if (OptionalUint32Wrapper != other.OptionalUint32Wrapper) return false; + if (OptionalUint64Wrapper != other.OptionalUint64Wrapper) return false; + if (OptionalFloatWrapper != other.OptionalFloatWrapper) return false; + if (OptionalDoubleWrapper != other.OptionalDoubleWrapper) return false; + if (OptionalStringWrapper != other.OptionalStringWrapper) return false; + if (OptionalBytesWrapper != other.OptionalBytesWrapper) return false; + if(!repeatedBoolWrapper_.Equals(other.repeatedBoolWrapper_)) return false; + if(!repeatedInt32Wrapper_.Equals(other.repeatedInt32Wrapper_)) return false; + if(!repeatedInt64Wrapper_.Equals(other.repeatedInt64Wrapper_)) return false; + if(!repeatedUint32Wrapper_.Equals(other.repeatedUint32Wrapper_)) return false; + if(!repeatedUint64Wrapper_.Equals(other.repeatedUint64Wrapper_)) return false; + if(!repeatedFloatWrapper_.Equals(other.repeatedFloatWrapper_)) return false; + if(!repeatedDoubleWrapper_.Equals(other.repeatedDoubleWrapper_)) return false; + if(!repeatedStringWrapper_.Equals(other.repeatedStringWrapper_)) return false; + if(!repeatedBytesWrapper_.Equals(other.repeatedBytesWrapper_)) return false; + if (!object.Equals(OptionalDuration, other.OptionalDuration)) return false; + if (!object.Equals(OptionalTimestamp, other.OptionalTimestamp)) return false; + if (!object.Equals(OptionalFieldMask, other.OptionalFieldMask)) return false; + if (!object.Equals(OptionalStruct, other.OptionalStruct)) return false; + if (!object.Equals(OptionalAny, other.OptionalAny)) return false; + if (!object.Equals(OptionalValue, other.OptionalValue)) return false; + if(!repeatedDuration_.Equals(other.repeatedDuration_)) return false; + if(!repeatedTimestamp_.Equals(other.repeatedTimestamp_)) return false; + if(!repeatedFieldmask_.Equals(other.repeatedFieldmask_)) return false; + if(!repeatedStruct_.Equals(other.repeatedStruct_)) return false; + if(!repeatedAny_.Equals(other.repeatedAny_)) return false; + if(!repeatedValue_.Equals(other.repeatedValue_)) return false; + if (Fieldname1 != other.Fieldname1) return false; + if (FieldName2 != other.FieldName2) return false; + if (FieldName3 != other.FieldName3) return false; + if (FieldName4 != other.FieldName4) return false; + if (Field0Name5 != other.Field0Name5) return false; + if (Field0Name6 != other.Field0Name6) return false; + if (FieldName7 != other.FieldName7) return false; + if (FieldName8 != other.FieldName8) return false; + if (FieldName9 != other.FieldName9) return false; + if (FieldName10 != other.FieldName10) return false; + if (FIELDNAME11 != other.FIELDNAME11) return false; + if (FIELDName12 != other.FIELDName12) return false; + if (OneofFieldCase != other.OneofFieldCase) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (OptionalInt32 != 0) hash ^= OptionalInt32.GetHashCode(); + if (OptionalInt64 != 0L) hash ^= OptionalInt64.GetHashCode(); + if (OptionalUint32 != 0) hash ^= OptionalUint32.GetHashCode(); + if (OptionalUint64 != 0UL) hash ^= OptionalUint64.GetHashCode(); + if (OptionalSint32 != 0) hash ^= OptionalSint32.GetHashCode(); + if (OptionalSint64 != 0L) hash ^= OptionalSint64.GetHashCode(); + if (OptionalFixed32 != 0) hash ^= OptionalFixed32.GetHashCode(); + if (OptionalFixed64 != 0UL) hash ^= OptionalFixed64.GetHashCode(); + if (OptionalSfixed32 != 0) hash ^= OptionalSfixed32.GetHashCode(); + if (OptionalSfixed64 != 0L) hash ^= OptionalSfixed64.GetHashCode(); + if (OptionalFloat != 0F) hash ^= OptionalFloat.GetHashCode(); + if (OptionalDouble != 0D) hash ^= OptionalDouble.GetHashCode(); + if (OptionalBool != false) hash ^= OptionalBool.GetHashCode(); + if (OptionalString.Length != 0) hash ^= OptionalString.GetHashCode(); + if (OptionalBytes.Length != 0) hash ^= OptionalBytes.GetHashCode(); + if (optionalNestedMessage_ != null) hash ^= OptionalNestedMessage.GetHashCode(); + if (optionalForeignMessage_ != null) hash ^= OptionalForeignMessage.GetHashCode(); + if (OptionalNestedEnum != 0) hash ^= OptionalNestedEnum.GetHashCode(); + if (OptionalForeignEnum != 0) hash ^= OptionalForeignEnum.GetHashCode(); + if (OptionalStringPiece.Length != 0) hash ^= OptionalStringPiece.GetHashCode(); + if (OptionalCord.Length != 0) hash ^= OptionalCord.GetHashCode(); + if (recursiveMessage_ != null) hash ^= RecursiveMessage.GetHashCode(); + hash ^= repeatedInt32_.GetHashCode(); + hash ^= repeatedInt64_.GetHashCode(); + hash ^= repeatedUint32_.GetHashCode(); + hash ^= repeatedUint64_.GetHashCode(); + hash ^= repeatedSint32_.GetHashCode(); + hash ^= repeatedSint64_.GetHashCode(); + hash ^= repeatedFixed32_.GetHashCode(); + hash ^= repeatedFixed64_.GetHashCode(); + hash ^= repeatedSfixed32_.GetHashCode(); + hash ^= repeatedSfixed64_.GetHashCode(); + hash ^= repeatedFloat_.GetHashCode(); + hash ^= repeatedDouble_.GetHashCode(); + hash ^= repeatedBool_.GetHashCode(); + hash ^= repeatedString_.GetHashCode(); + hash ^= repeatedBytes_.GetHashCode(); + hash ^= repeatedNestedMessage_.GetHashCode(); + hash ^= repeatedForeignMessage_.GetHashCode(); + hash ^= repeatedNestedEnum_.GetHashCode(); + hash ^= repeatedForeignEnum_.GetHashCode(); + hash ^= repeatedStringPiece_.GetHashCode(); + hash ^= repeatedCord_.GetHashCode(); + hash ^= MapInt32Int32.GetHashCode(); + hash ^= MapInt64Int64.GetHashCode(); + hash ^= MapUint32Uint32.GetHashCode(); + hash ^= MapUint64Uint64.GetHashCode(); + hash ^= MapSint32Sint32.GetHashCode(); + hash ^= MapSint64Sint64.GetHashCode(); + hash ^= MapFixed32Fixed32.GetHashCode(); + hash ^= MapFixed64Fixed64.GetHashCode(); + hash ^= MapSfixed32Sfixed32.GetHashCode(); + hash ^= MapSfixed64Sfixed64.GetHashCode(); + hash ^= MapInt32Float.GetHashCode(); + hash ^= MapInt32Double.GetHashCode(); + hash ^= MapBoolBool.GetHashCode(); + hash ^= MapStringString.GetHashCode(); + hash ^= MapStringBytes.GetHashCode(); + hash ^= MapStringNestedMessage.GetHashCode(); + hash ^= MapStringForeignMessage.GetHashCode(); + hash ^= MapStringNestedEnum.GetHashCode(); + hash ^= MapStringForeignEnum.GetHashCode(); + if (oneofFieldCase_ == OneofFieldOneofCase.OneofUint32) hash ^= OneofUint32.GetHashCode(); + if (oneofFieldCase_ == OneofFieldOneofCase.OneofNestedMessage) hash ^= OneofNestedMessage.GetHashCode(); + if (oneofFieldCase_ == OneofFieldOneofCase.OneofString) hash ^= OneofString.GetHashCode(); + if (oneofFieldCase_ == OneofFieldOneofCase.OneofBytes) hash ^= OneofBytes.GetHashCode(); + if (optionalBoolWrapper_ != null) hash ^= OptionalBoolWrapper.GetHashCode(); + if (optionalInt32Wrapper_ != null) hash ^= OptionalInt32Wrapper.GetHashCode(); + if (optionalInt64Wrapper_ != null) hash ^= OptionalInt64Wrapper.GetHashCode(); + if (optionalUint32Wrapper_ != null) hash ^= OptionalUint32Wrapper.GetHashCode(); + if (optionalUint64Wrapper_ != null) hash ^= OptionalUint64Wrapper.GetHashCode(); + if (optionalFloatWrapper_ != null) hash ^= OptionalFloatWrapper.GetHashCode(); + if (optionalDoubleWrapper_ != null) hash ^= OptionalDoubleWrapper.GetHashCode(); + if (optionalStringWrapper_ != null) hash ^= OptionalStringWrapper.GetHashCode(); + if (optionalBytesWrapper_ != null) hash ^= OptionalBytesWrapper.GetHashCode(); + hash ^= repeatedBoolWrapper_.GetHashCode(); + hash ^= repeatedInt32Wrapper_.GetHashCode(); + hash ^= repeatedInt64Wrapper_.GetHashCode(); + hash ^= repeatedUint32Wrapper_.GetHashCode(); + hash ^= repeatedUint64Wrapper_.GetHashCode(); + hash ^= repeatedFloatWrapper_.GetHashCode(); + hash ^= repeatedDoubleWrapper_.GetHashCode(); + hash ^= repeatedStringWrapper_.GetHashCode(); + hash ^= repeatedBytesWrapper_.GetHashCode(); + if (optionalDuration_ != null) hash ^= OptionalDuration.GetHashCode(); + if (optionalTimestamp_ != null) hash ^= OptionalTimestamp.GetHashCode(); + if (optionalFieldMask_ != null) hash ^= OptionalFieldMask.GetHashCode(); + if (optionalStruct_ != null) hash ^= OptionalStruct.GetHashCode(); + if (optionalAny_ != null) hash ^= OptionalAny.GetHashCode(); + if (optionalValue_ != null) hash ^= OptionalValue.GetHashCode(); + hash ^= repeatedDuration_.GetHashCode(); + hash ^= repeatedTimestamp_.GetHashCode(); + hash ^= repeatedFieldmask_.GetHashCode(); + hash ^= repeatedStruct_.GetHashCode(); + hash ^= repeatedAny_.GetHashCode(); + hash ^= repeatedValue_.GetHashCode(); + if (Fieldname1 != 0) hash ^= Fieldname1.GetHashCode(); + if (FieldName2 != 0) hash ^= FieldName2.GetHashCode(); + if (FieldName3 != 0) hash ^= FieldName3.GetHashCode(); + if (FieldName4 != 0) hash ^= FieldName4.GetHashCode(); + if (Field0Name5 != 0) hash ^= Field0Name5.GetHashCode(); + if (Field0Name6 != 0) hash ^= Field0Name6.GetHashCode(); + if (FieldName7 != 0) hash ^= FieldName7.GetHashCode(); + if (FieldName8 != 0) hash ^= FieldName8.GetHashCode(); + if (FieldName9 != 0) hash ^= FieldName9.GetHashCode(); + if (FieldName10 != 0) hash ^= FieldName10.GetHashCode(); + if (FIELDNAME11 != 0) hash ^= FIELDNAME11.GetHashCode(); + if (FIELDName12 != 0) hash ^= FIELDName12.GetHashCode(); + hash ^= (int) oneofFieldCase_; + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (OptionalInt32 != 0) { + output.WriteRawTag(8); + output.WriteInt32(OptionalInt32); + } + if (OptionalInt64 != 0L) { + output.WriteRawTag(16); + output.WriteInt64(OptionalInt64); + } + if (OptionalUint32 != 0) { + output.WriteRawTag(24); + output.WriteUInt32(OptionalUint32); + } + if (OptionalUint64 != 0UL) { + output.WriteRawTag(32); + output.WriteUInt64(OptionalUint64); + } + if (OptionalSint32 != 0) { + output.WriteRawTag(40); + output.WriteSInt32(OptionalSint32); + } + if (OptionalSint64 != 0L) { + output.WriteRawTag(48); + output.WriteSInt64(OptionalSint64); + } + if (OptionalFixed32 != 0) { + output.WriteRawTag(61); + output.WriteFixed32(OptionalFixed32); + } + if (OptionalFixed64 != 0UL) { + output.WriteRawTag(65); + output.WriteFixed64(OptionalFixed64); + } + if (OptionalSfixed32 != 0) { + output.WriteRawTag(77); + output.WriteSFixed32(OptionalSfixed32); + } + if (OptionalSfixed64 != 0L) { + output.WriteRawTag(81); + output.WriteSFixed64(OptionalSfixed64); + } + if (OptionalFloat != 0F) { + output.WriteRawTag(93); + output.WriteFloat(OptionalFloat); + } + if (OptionalDouble != 0D) { + output.WriteRawTag(97); + output.WriteDouble(OptionalDouble); + } + if (OptionalBool != false) { + output.WriteRawTag(104); + output.WriteBool(OptionalBool); + } + if (OptionalString.Length != 0) { + output.WriteRawTag(114); + output.WriteString(OptionalString); + } + if (OptionalBytes.Length != 0) { + output.WriteRawTag(122); + output.WriteBytes(OptionalBytes); + } + if (optionalNestedMessage_ != null) { + output.WriteRawTag(146, 1); + output.WriteMessage(OptionalNestedMessage); + } + if (optionalForeignMessage_ != null) { + output.WriteRawTag(154, 1); + output.WriteMessage(OptionalForeignMessage); + } + if (OptionalNestedEnum != 0) { + output.WriteRawTag(168, 1); + output.WriteEnum((int) OptionalNestedEnum); + } + if (OptionalForeignEnum != 0) { + output.WriteRawTag(176, 1); + output.WriteEnum((int) OptionalForeignEnum); + } + if (OptionalStringPiece.Length != 0) { + output.WriteRawTag(194, 1); + output.WriteString(OptionalStringPiece); + } + if (OptionalCord.Length != 0) { + output.WriteRawTag(202, 1); + output.WriteString(OptionalCord); + } + if (recursiveMessage_ != null) { + output.WriteRawTag(218, 1); + output.WriteMessage(RecursiveMessage); + } + repeatedInt32_.WriteTo(output, _repeated_repeatedInt32_codec); + repeatedInt64_.WriteTo(output, _repeated_repeatedInt64_codec); + repeatedUint32_.WriteTo(output, _repeated_repeatedUint32_codec); + repeatedUint64_.WriteTo(output, _repeated_repeatedUint64_codec); + repeatedSint32_.WriteTo(output, _repeated_repeatedSint32_codec); + repeatedSint64_.WriteTo(output, _repeated_repeatedSint64_codec); + repeatedFixed32_.WriteTo(output, _repeated_repeatedFixed32_codec); + repeatedFixed64_.WriteTo(output, _repeated_repeatedFixed64_codec); + repeatedSfixed32_.WriteTo(output, _repeated_repeatedSfixed32_codec); + repeatedSfixed64_.WriteTo(output, _repeated_repeatedSfixed64_codec); + repeatedFloat_.WriteTo(output, _repeated_repeatedFloat_codec); + repeatedDouble_.WriteTo(output, _repeated_repeatedDouble_codec); + repeatedBool_.WriteTo(output, _repeated_repeatedBool_codec); + repeatedString_.WriteTo(output, _repeated_repeatedString_codec); + repeatedBytes_.WriteTo(output, _repeated_repeatedBytes_codec); + repeatedNestedMessage_.WriteTo(output, _repeated_repeatedNestedMessage_codec); + repeatedForeignMessage_.WriteTo(output, _repeated_repeatedForeignMessage_codec); + repeatedNestedEnum_.WriteTo(output, _repeated_repeatedNestedEnum_codec); + repeatedForeignEnum_.WriteTo(output, _repeated_repeatedForeignEnum_codec); + repeatedStringPiece_.WriteTo(output, _repeated_repeatedStringPiece_codec); + repeatedCord_.WriteTo(output, _repeated_repeatedCord_codec); + mapInt32Int32_.WriteTo(output, _map_mapInt32Int32_codec); + mapInt64Int64_.WriteTo(output, _map_mapInt64Int64_codec); + mapUint32Uint32_.WriteTo(output, _map_mapUint32Uint32_codec); + mapUint64Uint64_.WriteTo(output, _map_mapUint64Uint64_codec); + mapSint32Sint32_.WriteTo(output, _map_mapSint32Sint32_codec); + mapSint64Sint64_.WriteTo(output, _map_mapSint64Sint64_codec); + mapFixed32Fixed32_.WriteTo(output, _map_mapFixed32Fixed32_codec); + mapFixed64Fixed64_.WriteTo(output, _map_mapFixed64Fixed64_codec); + mapSfixed32Sfixed32_.WriteTo(output, _map_mapSfixed32Sfixed32_codec); + mapSfixed64Sfixed64_.WriteTo(output, _map_mapSfixed64Sfixed64_codec); + mapInt32Float_.WriteTo(output, _map_mapInt32Float_codec); + mapInt32Double_.WriteTo(output, _map_mapInt32Double_codec); + mapBoolBool_.WriteTo(output, _map_mapBoolBool_codec); + mapStringString_.WriteTo(output, _map_mapStringString_codec); + mapStringBytes_.WriteTo(output, _map_mapStringBytes_codec); + mapStringNestedMessage_.WriteTo(output, _map_mapStringNestedMessage_codec); + mapStringForeignMessage_.WriteTo(output, _map_mapStringForeignMessage_codec); + mapStringNestedEnum_.WriteTo(output, _map_mapStringNestedEnum_codec); + mapStringForeignEnum_.WriteTo(output, _map_mapStringForeignEnum_codec); + if (oneofFieldCase_ == OneofFieldOneofCase.OneofUint32) { + output.WriteRawTag(248, 6); + output.WriteUInt32(OneofUint32); + } + if (oneofFieldCase_ == OneofFieldOneofCase.OneofNestedMessage) { + output.WriteRawTag(130, 7); + output.WriteMessage(OneofNestedMessage); + } + if (oneofFieldCase_ == OneofFieldOneofCase.OneofString) { + output.WriteRawTag(138, 7); + output.WriteString(OneofString); + } + if (oneofFieldCase_ == OneofFieldOneofCase.OneofBytes) { + output.WriteRawTag(146, 7); + output.WriteBytes(OneofBytes); + } + if (optionalBoolWrapper_ != null) { + _single_optionalBoolWrapper_codec.WriteTagAndValue(output, OptionalBoolWrapper); + } + if (optionalInt32Wrapper_ != null) { + _single_optionalInt32Wrapper_codec.WriteTagAndValue(output, OptionalInt32Wrapper); + } + if (optionalInt64Wrapper_ != null) { + _single_optionalInt64Wrapper_codec.WriteTagAndValue(output, OptionalInt64Wrapper); + } + if (optionalUint32Wrapper_ != null) { + _single_optionalUint32Wrapper_codec.WriteTagAndValue(output, OptionalUint32Wrapper); + } + if (optionalUint64Wrapper_ != null) { + _single_optionalUint64Wrapper_codec.WriteTagAndValue(output, OptionalUint64Wrapper); + } + if (optionalFloatWrapper_ != null) { + _single_optionalFloatWrapper_codec.WriteTagAndValue(output, OptionalFloatWrapper); + } + if (optionalDoubleWrapper_ != null) { + _single_optionalDoubleWrapper_codec.WriteTagAndValue(output, OptionalDoubleWrapper); + } + if (optionalStringWrapper_ != null) { + _single_optionalStringWrapper_codec.WriteTagAndValue(output, OptionalStringWrapper); + } + if (optionalBytesWrapper_ != null) { + _single_optionalBytesWrapper_codec.WriteTagAndValue(output, OptionalBytesWrapper); + } + repeatedBoolWrapper_.WriteTo(output, _repeated_repeatedBoolWrapper_codec); + repeatedInt32Wrapper_.WriteTo(output, _repeated_repeatedInt32Wrapper_codec); + repeatedInt64Wrapper_.WriteTo(output, _repeated_repeatedInt64Wrapper_codec); + repeatedUint32Wrapper_.WriteTo(output, _repeated_repeatedUint32Wrapper_codec); + repeatedUint64Wrapper_.WriteTo(output, _repeated_repeatedUint64Wrapper_codec); + repeatedFloatWrapper_.WriteTo(output, _repeated_repeatedFloatWrapper_codec); + repeatedDoubleWrapper_.WriteTo(output, _repeated_repeatedDoubleWrapper_codec); + repeatedStringWrapper_.WriteTo(output, _repeated_repeatedStringWrapper_codec); + repeatedBytesWrapper_.WriteTo(output, _repeated_repeatedBytesWrapper_codec); + if (optionalDuration_ != null) { + output.WriteRawTag(234, 18); + output.WriteMessage(OptionalDuration); + } + if (optionalTimestamp_ != null) { + output.WriteRawTag(242, 18); + output.WriteMessage(OptionalTimestamp); + } + if (optionalFieldMask_ != null) { + output.WriteRawTag(250, 18); + output.WriteMessage(OptionalFieldMask); + } + if (optionalStruct_ != null) { + output.WriteRawTag(130, 19); + output.WriteMessage(OptionalStruct); + } + if (optionalAny_ != null) { + output.WriteRawTag(138, 19); + output.WriteMessage(OptionalAny); + } + if (optionalValue_ != null) { + output.WriteRawTag(146, 19); + output.WriteMessage(OptionalValue); + } + repeatedDuration_.WriteTo(output, _repeated_repeatedDuration_codec); + repeatedTimestamp_.WriteTo(output, _repeated_repeatedTimestamp_codec); + repeatedFieldmask_.WriteTo(output, _repeated_repeatedFieldmask_codec); + repeatedAny_.WriteTo(output, _repeated_repeatedAny_codec); + repeatedValue_.WriteTo(output, _repeated_repeatedValue_codec); + repeatedStruct_.WriteTo(output, _repeated_repeatedStruct_codec); + if (Fieldname1 != 0) { + output.WriteRawTag(136, 25); + output.WriteInt32(Fieldname1); + } + if (FieldName2 != 0) { + output.WriteRawTag(144, 25); + output.WriteInt32(FieldName2); + } + if (FieldName3 != 0) { + output.WriteRawTag(152, 25); + output.WriteInt32(FieldName3); + } + if (FieldName4 != 0) { + output.WriteRawTag(160, 25); + output.WriteInt32(FieldName4); + } + if (Field0Name5 != 0) { + output.WriteRawTag(168, 25); + output.WriteInt32(Field0Name5); + } + if (Field0Name6 != 0) { + output.WriteRawTag(176, 25); + output.WriteInt32(Field0Name6); + } + if (FieldName7 != 0) { + output.WriteRawTag(184, 25); + output.WriteInt32(FieldName7); + } + if (FieldName8 != 0) { + output.WriteRawTag(192, 25); + output.WriteInt32(FieldName8); + } + if (FieldName9 != 0) { + output.WriteRawTag(200, 25); + output.WriteInt32(FieldName9); + } + if (FieldName10 != 0) { + output.WriteRawTag(208, 25); + output.WriteInt32(FieldName10); + } + if (FIELDNAME11 != 0) { + output.WriteRawTag(216, 25); + output.WriteInt32(FIELDNAME11); + } + if (FIELDName12 != 0) { + output.WriteRawTag(224, 25); + output.WriteInt32(FIELDName12); + } + } + + public int CalculateSize() { + int size = 0; + if (OptionalInt32 != 0) { + size += 1 + pb::CodedOutputStream.ComputeInt32Size(OptionalInt32); + } + if (OptionalInt64 != 0L) { + size += 1 + pb::CodedOutputStream.ComputeInt64Size(OptionalInt64); + } + if (OptionalUint32 != 0) { + size += 1 + pb::CodedOutputStream.ComputeUInt32Size(OptionalUint32); + } + if (OptionalUint64 != 0UL) { + size += 1 + pb::CodedOutputStream.ComputeUInt64Size(OptionalUint64); + } + if (OptionalSint32 != 0) { + size += 1 + pb::CodedOutputStream.ComputeSInt32Size(OptionalSint32); + } + if (OptionalSint64 != 0L) { + size += 1 + pb::CodedOutputStream.ComputeSInt64Size(OptionalSint64); + } + if (OptionalFixed32 != 0) { + size += 1 + 4; + } + if (OptionalFixed64 != 0UL) { + size += 1 + 8; + } + if (OptionalSfixed32 != 0) { + size += 1 + 4; + } + if (OptionalSfixed64 != 0L) { + size += 1 + 8; + } + if (OptionalFloat != 0F) { + size += 1 + 4; + } + if (OptionalDouble != 0D) { + size += 1 + 8; + } + if (OptionalBool != false) { + size += 1 + 1; + } + if (OptionalString.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(OptionalString); + } + if (OptionalBytes.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeBytesSize(OptionalBytes); + } + if (optionalNestedMessage_ != null) { + size += 2 + pb::CodedOutputStream.ComputeMessageSize(OptionalNestedMessage); + } + if (optionalForeignMessage_ != null) { + size += 2 + pb::CodedOutputStream.ComputeMessageSize(OptionalForeignMessage); + } + if (OptionalNestedEnum != 0) { + size += 2 + pb::CodedOutputStream.ComputeEnumSize((int) OptionalNestedEnum); + } + if (OptionalForeignEnum != 0) { + size += 2 + pb::CodedOutputStream.ComputeEnumSize((int) OptionalForeignEnum); + } + if (OptionalStringPiece.Length != 0) { + size += 2 + pb::CodedOutputStream.ComputeStringSize(OptionalStringPiece); + } + if (OptionalCord.Length != 0) { + size += 2 + pb::CodedOutputStream.ComputeStringSize(OptionalCord); + } + if (recursiveMessage_ != null) { + size += 2 + pb::CodedOutputStream.ComputeMessageSize(RecursiveMessage); + } + size += repeatedInt32_.CalculateSize(_repeated_repeatedInt32_codec); + size += repeatedInt64_.CalculateSize(_repeated_repeatedInt64_codec); + size += repeatedUint32_.CalculateSize(_repeated_repeatedUint32_codec); + size += repeatedUint64_.CalculateSize(_repeated_repeatedUint64_codec); + size += repeatedSint32_.CalculateSize(_repeated_repeatedSint32_codec); + size += repeatedSint64_.CalculateSize(_repeated_repeatedSint64_codec); + size += repeatedFixed32_.CalculateSize(_repeated_repeatedFixed32_codec); + size += repeatedFixed64_.CalculateSize(_repeated_repeatedFixed64_codec); + size += repeatedSfixed32_.CalculateSize(_repeated_repeatedSfixed32_codec); + size += repeatedSfixed64_.CalculateSize(_repeated_repeatedSfixed64_codec); + size += repeatedFloat_.CalculateSize(_repeated_repeatedFloat_codec); + size += repeatedDouble_.CalculateSize(_repeated_repeatedDouble_codec); + size += repeatedBool_.CalculateSize(_repeated_repeatedBool_codec); + size += repeatedString_.CalculateSize(_repeated_repeatedString_codec); + size += repeatedBytes_.CalculateSize(_repeated_repeatedBytes_codec); + size += repeatedNestedMessage_.CalculateSize(_repeated_repeatedNestedMessage_codec); + size += repeatedForeignMessage_.CalculateSize(_repeated_repeatedForeignMessage_codec); + size += repeatedNestedEnum_.CalculateSize(_repeated_repeatedNestedEnum_codec); + size += repeatedForeignEnum_.CalculateSize(_repeated_repeatedForeignEnum_codec); + size += repeatedStringPiece_.CalculateSize(_repeated_repeatedStringPiece_codec); + size += repeatedCord_.CalculateSize(_repeated_repeatedCord_codec); + size += mapInt32Int32_.CalculateSize(_map_mapInt32Int32_codec); + size += mapInt64Int64_.CalculateSize(_map_mapInt64Int64_codec); + size += mapUint32Uint32_.CalculateSize(_map_mapUint32Uint32_codec); + size += mapUint64Uint64_.CalculateSize(_map_mapUint64Uint64_codec); + size += mapSint32Sint32_.CalculateSize(_map_mapSint32Sint32_codec); + size += mapSint64Sint64_.CalculateSize(_map_mapSint64Sint64_codec); + size += mapFixed32Fixed32_.CalculateSize(_map_mapFixed32Fixed32_codec); + size += mapFixed64Fixed64_.CalculateSize(_map_mapFixed64Fixed64_codec); + size += mapSfixed32Sfixed32_.CalculateSize(_map_mapSfixed32Sfixed32_codec); + size += mapSfixed64Sfixed64_.CalculateSize(_map_mapSfixed64Sfixed64_codec); + size += mapInt32Float_.CalculateSize(_map_mapInt32Float_codec); + size += mapInt32Double_.CalculateSize(_map_mapInt32Double_codec); + size += mapBoolBool_.CalculateSize(_map_mapBoolBool_codec); + size += mapStringString_.CalculateSize(_map_mapStringString_codec); + size += mapStringBytes_.CalculateSize(_map_mapStringBytes_codec); + size += mapStringNestedMessage_.CalculateSize(_map_mapStringNestedMessage_codec); + size += mapStringForeignMessage_.CalculateSize(_map_mapStringForeignMessage_codec); + size += mapStringNestedEnum_.CalculateSize(_map_mapStringNestedEnum_codec); + size += mapStringForeignEnum_.CalculateSize(_map_mapStringForeignEnum_codec); + if (oneofFieldCase_ == OneofFieldOneofCase.OneofUint32) { + size += 2 + pb::CodedOutputStream.ComputeUInt32Size(OneofUint32); + } + if (oneofFieldCase_ == OneofFieldOneofCase.OneofNestedMessage) { + size += 2 + pb::CodedOutputStream.ComputeMessageSize(OneofNestedMessage); + } + if (oneofFieldCase_ == OneofFieldOneofCase.OneofString) { + size += 2 + pb::CodedOutputStream.ComputeStringSize(OneofString); + } + if (oneofFieldCase_ == OneofFieldOneofCase.OneofBytes) { + size += 2 + pb::CodedOutputStream.ComputeBytesSize(OneofBytes); + } + if (optionalBoolWrapper_ != null) { + size += _single_optionalBoolWrapper_codec.CalculateSizeWithTag(OptionalBoolWrapper); + } + if (optionalInt32Wrapper_ != null) { + size += _single_optionalInt32Wrapper_codec.CalculateSizeWithTag(OptionalInt32Wrapper); + } + if (optionalInt64Wrapper_ != null) { + size += _single_optionalInt64Wrapper_codec.CalculateSizeWithTag(OptionalInt64Wrapper); + } + if (optionalUint32Wrapper_ != null) { + size += _single_optionalUint32Wrapper_codec.CalculateSizeWithTag(OptionalUint32Wrapper); + } + if (optionalUint64Wrapper_ != null) { + size += _single_optionalUint64Wrapper_codec.CalculateSizeWithTag(OptionalUint64Wrapper); + } + if (optionalFloatWrapper_ != null) { + size += _single_optionalFloatWrapper_codec.CalculateSizeWithTag(OptionalFloatWrapper); + } + if (optionalDoubleWrapper_ != null) { + size += _single_optionalDoubleWrapper_codec.CalculateSizeWithTag(OptionalDoubleWrapper); + } + if (optionalStringWrapper_ != null) { + size += _single_optionalStringWrapper_codec.CalculateSizeWithTag(OptionalStringWrapper); + } + if (optionalBytesWrapper_ != null) { + size += _single_optionalBytesWrapper_codec.CalculateSizeWithTag(OptionalBytesWrapper); + } + size += repeatedBoolWrapper_.CalculateSize(_repeated_repeatedBoolWrapper_codec); + size += repeatedInt32Wrapper_.CalculateSize(_repeated_repeatedInt32Wrapper_codec); + size += repeatedInt64Wrapper_.CalculateSize(_repeated_repeatedInt64Wrapper_codec); + size += repeatedUint32Wrapper_.CalculateSize(_repeated_repeatedUint32Wrapper_codec); + size += repeatedUint64Wrapper_.CalculateSize(_repeated_repeatedUint64Wrapper_codec); + size += repeatedFloatWrapper_.CalculateSize(_repeated_repeatedFloatWrapper_codec); + size += repeatedDoubleWrapper_.CalculateSize(_repeated_repeatedDoubleWrapper_codec); + size += repeatedStringWrapper_.CalculateSize(_repeated_repeatedStringWrapper_codec); + size += repeatedBytesWrapper_.CalculateSize(_repeated_repeatedBytesWrapper_codec); + if (optionalDuration_ != null) { + size += 2 + pb::CodedOutputStream.ComputeMessageSize(OptionalDuration); + } + if (optionalTimestamp_ != null) { + size += 2 + pb::CodedOutputStream.ComputeMessageSize(OptionalTimestamp); + } + if (optionalFieldMask_ != null) { + size += 2 + pb::CodedOutputStream.ComputeMessageSize(OptionalFieldMask); + } + if (optionalStruct_ != null) { + size += 2 + pb::CodedOutputStream.ComputeMessageSize(OptionalStruct); + } + if (optionalAny_ != null) { + size += 2 + pb::CodedOutputStream.ComputeMessageSize(OptionalAny); + } + if (optionalValue_ != null) { + size += 2 + pb::CodedOutputStream.ComputeMessageSize(OptionalValue); + } + size += repeatedDuration_.CalculateSize(_repeated_repeatedDuration_codec); + size += repeatedTimestamp_.CalculateSize(_repeated_repeatedTimestamp_codec); + size += repeatedFieldmask_.CalculateSize(_repeated_repeatedFieldmask_codec); + size += repeatedStruct_.CalculateSize(_repeated_repeatedStruct_codec); + size += repeatedAny_.CalculateSize(_repeated_repeatedAny_codec); + size += repeatedValue_.CalculateSize(_repeated_repeatedValue_codec); + if (Fieldname1 != 0) { + size += 2 + pb::CodedOutputStream.ComputeInt32Size(Fieldname1); + } + if (FieldName2 != 0) { + size += 2 + pb::CodedOutputStream.ComputeInt32Size(FieldName2); + } + if (FieldName3 != 0) { + size += 2 + pb::CodedOutputStream.ComputeInt32Size(FieldName3); + } + if (FieldName4 != 0) { + size += 2 + pb::CodedOutputStream.ComputeInt32Size(FieldName4); + } + if (Field0Name5 != 0) { + size += 2 + pb::CodedOutputStream.ComputeInt32Size(Field0Name5); + } + if (Field0Name6 != 0) { + size += 2 + pb::CodedOutputStream.ComputeInt32Size(Field0Name6); + } + if (FieldName7 != 0) { + size += 2 + pb::CodedOutputStream.ComputeInt32Size(FieldName7); + } + if (FieldName8 != 0) { + size += 2 + pb::CodedOutputStream.ComputeInt32Size(FieldName8); + } + if (FieldName9 != 0) { + size += 2 + pb::CodedOutputStream.ComputeInt32Size(FieldName9); + } + if (FieldName10 != 0) { + size += 2 + pb::CodedOutputStream.ComputeInt32Size(FieldName10); + } + if (FIELDNAME11 != 0) { + size += 2 + pb::CodedOutputStream.ComputeInt32Size(FIELDNAME11); + } + if (FIELDName12 != 0) { + size += 2 + pb::CodedOutputStream.ComputeInt32Size(FIELDName12); + } + return size; + } + + public void MergeFrom(TestAllTypes other) { + if (other == null) { + return; + } + if (other.OptionalInt32 != 0) { + OptionalInt32 = other.OptionalInt32; + } + if (other.OptionalInt64 != 0L) { + OptionalInt64 = other.OptionalInt64; + } + if (other.OptionalUint32 != 0) { + OptionalUint32 = other.OptionalUint32; + } + if (other.OptionalUint64 != 0UL) { + OptionalUint64 = other.OptionalUint64; + } + if (other.OptionalSint32 != 0) { + OptionalSint32 = other.OptionalSint32; + } + if (other.OptionalSint64 != 0L) { + OptionalSint64 = other.OptionalSint64; + } + if (other.OptionalFixed32 != 0) { + OptionalFixed32 = other.OptionalFixed32; + } + if (other.OptionalFixed64 != 0UL) { + OptionalFixed64 = other.OptionalFixed64; + } + if (other.OptionalSfixed32 != 0) { + OptionalSfixed32 = other.OptionalSfixed32; + } + if (other.OptionalSfixed64 != 0L) { + OptionalSfixed64 = other.OptionalSfixed64; + } + if (other.OptionalFloat != 0F) { + OptionalFloat = other.OptionalFloat; + } + if (other.OptionalDouble != 0D) { + OptionalDouble = other.OptionalDouble; + } + if (other.OptionalBool != false) { + OptionalBool = other.OptionalBool; + } + if (other.OptionalString.Length != 0) { + OptionalString = other.OptionalString; + } + if (other.OptionalBytes.Length != 0) { + OptionalBytes = other.OptionalBytes; + } + if (other.optionalNestedMessage_ != null) { + if (optionalNestedMessage_ == null) { + optionalNestedMessage_ = new global::Conformance.TestAllTypes.Types.NestedMessage(); + } + OptionalNestedMessage.MergeFrom(other.OptionalNestedMessage); + } + if (other.optionalForeignMessage_ != null) { + if (optionalForeignMessage_ == null) { + optionalForeignMessage_ = new global::Conformance.ForeignMessage(); + } + OptionalForeignMessage.MergeFrom(other.OptionalForeignMessage); + } + if (other.OptionalNestedEnum != 0) { + OptionalNestedEnum = other.OptionalNestedEnum; + } + if (other.OptionalForeignEnum != 0) { + OptionalForeignEnum = other.OptionalForeignEnum; + } + if (other.OptionalStringPiece.Length != 0) { + OptionalStringPiece = other.OptionalStringPiece; + } + if (other.OptionalCord.Length != 0) { + OptionalCord = other.OptionalCord; + } + if (other.recursiveMessage_ != null) { + if (recursiveMessage_ == null) { + recursiveMessage_ = new global::Conformance.TestAllTypes(); + } + RecursiveMessage.MergeFrom(other.RecursiveMessage); + } + repeatedInt32_.Add(other.repeatedInt32_); + repeatedInt64_.Add(other.repeatedInt64_); + repeatedUint32_.Add(other.repeatedUint32_); + repeatedUint64_.Add(other.repeatedUint64_); + repeatedSint32_.Add(other.repeatedSint32_); + repeatedSint64_.Add(other.repeatedSint64_); + repeatedFixed32_.Add(other.repeatedFixed32_); + repeatedFixed64_.Add(other.repeatedFixed64_); + repeatedSfixed32_.Add(other.repeatedSfixed32_); + repeatedSfixed64_.Add(other.repeatedSfixed64_); + repeatedFloat_.Add(other.repeatedFloat_); + repeatedDouble_.Add(other.repeatedDouble_); + repeatedBool_.Add(other.repeatedBool_); + repeatedString_.Add(other.repeatedString_); + repeatedBytes_.Add(other.repeatedBytes_); + repeatedNestedMessage_.Add(other.repeatedNestedMessage_); + repeatedForeignMessage_.Add(other.repeatedForeignMessage_); + repeatedNestedEnum_.Add(other.repeatedNestedEnum_); + repeatedForeignEnum_.Add(other.repeatedForeignEnum_); + repeatedStringPiece_.Add(other.repeatedStringPiece_); + repeatedCord_.Add(other.repeatedCord_); + mapInt32Int32_.Add(other.mapInt32Int32_); + mapInt64Int64_.Add(other.mapInt64Int64_); + mapUint32Uint32_.Add(other.mapUint32Uint32_); + mapUint64Uint64_.Add(other.mapUint64Uint64_); + mapSint32Sint32_.Add(other.mapSint32Sint32_); + mapSint64Sint64_.Add(other.mapSint64Sint64_); + mapFixed32Fixed32_.Add(other.mapFixed32Fixed32_); + mapFixed64Fixed64_.Add(other.mapFixed64Fixed64_); + mapSfixed32Sfixed32_.Add(other.mapSfixed32Sfixed32_); + mapSfixed64Sfixed64_.Add(other.mapSfixed64Sfixed64_); + mapInt32Float_.Add(other.mapInt32Float_); + mapInt32Double_.Add(other.mapInt32Double_); + mapBoolBool_.Add(other.mapBoolBool_); + mapStringString_.Add(other.mapStringString_); + mapStringBytes_.Add(other.mapStringBytes_); + mapStringNestedMessage_.Add(other.mapStringNestedMessage_); + mapStringForeignMessage_.Add(other.mapStringForeignMessage_); + mapStringNestedEnum_.Add(other.mapStringNestedEnum_); + mapStringForeignEnum_.Add(other.mapStringForeignEnum_); + if (other.optionalBoolWrapper_ != null) { + if (optionalBoolWrapper_ == null || other.OptionalBoolWrapper != false) { + OptionalBoolWrapper = other.OptionalBoolWrapper; + } + } + if (other.optionalInt32Wrapper_ != null) { + if (optionalInt32Wrapper_ == null || other.OptionalInt32Wrapper != 0) { + OptionalInt32Wrapper = other.OptionalInt32Wrapper; + } + } + if (other.optionalInt64Wrapper_ != null) { + if (optionalInt64Wrapper_ == null || other.OptionalInt64Wrapper != 0L) { + OptionalInt64Wrapper = other.OptionalInt64Wrapper; + } + } + if (other.optionalUint32Wrapper_ != null) { + if (optionalUint32Wrapper_ == null || other.OptionalUint32Wrapper != 0) { + OptionalUint32Wrapper = other.OptionalUint32Wrapper; + } + } + if (other.optionalUint64Wrapper_ != null) { + if (optionalUint64Wrapper_ == null || other.OptionalUint64Wrapper != 0UL) { + OptionalUint64Wrapper = other.OptionalUint64Wrapper; + } + } + if (other.optionalFloatWrapper_ != null) { + if (optionalFloatWrapper_ == null || other.OptionalFloatWrapper != 0F) { + OptionalFloatWrapper = other.OptionalFloatWrapper; + } + } + if (other.optionalDoubleWrapper_ != null) { + if (optionalDoubleWrapper_ == null || other.OptionalDoubleWrapper != 0D) { + OptionalDoubleWrapper = other.OptionalDoubleWrapper; + } + } + if (other.optionalStringWrapper_ != null) { + if (optionalStringWrapper_ == null || other.OptionalStringWrapper != "") { + OptionalStringWrapper = other.OptionalStringWrapper; + } + } + if (other.optionalBytesWrapper_ != null) { + if (optionalBytesWrapper_ == null || other.OptionalBytesWrapper != pb::ByteString.Empty) { + OptionalBytesWrapper = other.OptionalBytesWrapper; + } + } + repeatedBoolWrapper_.Add(other.repeatedBoolWrapper_); + repeatedInt32Wrapper_.Add(other.repeatedInt32Wrapper_); + repeatedInt64Wrapper_.Add(other.repeatedInt64Wrapper_); + repeatedUint32Wrapper_.Add(other.repeatedUint32Wrapper_); + repeatedUint64Wrapper_.Add(other.repeatedUint64Wrapper_); + repeatedFloatWrapper_.Add(other.repeatedFloatWrapper_); + repeatedDoubleWrapper_.Add(other.repeatedDoubleWrapper_); + repeatedStringWrapper_.Add(other.repeatedStringWrapper_); + repeatedBytesWrapper_.Add(other.repeatedBytesWrapper_); + if (other.optionalDuration_ != null) { + if (optionalDuration_ == null) { + optionalDuration_ = new global::Google.Protobuf.WellKnownTypes.Duration(); + } + OptionalDuration.MergeFrom(other.OptionalDuration); + } + if (other.optionalTimestamp_ != null) { + if (optionalTimestamp_ == null) { + optionalTimestamp_ = new global::Google.Protobuf.WellKnownTypes.Timestamp(); + } + OptionalTimestamp.MergeFrom(other.OptionalTimestamp); + } + if (other.optionalFieldMask_ != null) { + if (optionalFieldMask_ == null) { + optionalFieldMask_ = new global::Google.Protobuf.WellKnownTypes.FieldMask(); + } + OptionalFieldMask.MergeFrom(other.OptionalFieldMask); + } + if (other.optionalStruct_ != null) { + if (optionalStruct_ == null) { + optionalStruct_ = new global::Google.Protobuf.WellKnownTypes.Struct(); + } + OptionalStruct.MergeFrom(other.OptionalStruct); + } + if (other.optionalAny_ != null) { + if (optionalAny_ == null) { + optionalAny_ = new global::Google.Protobuf.WellKnownTypes.Any(); + } + OptionalAny.MergeFrom(other.OptionalAny); + } + if (other.optionalValue_ != null) { + if (optionalValue_ == null) { + optionalValue_ = new global::Google.Protobuf.WellKnownTypes.Value(); + } + OptionalValue.MergeFrom(other.OptionalValue); + } + repeatedDuration_.Add(other.repeatedDuration_); + repeatedTimestamp_.Add(other.repeatedTimestamp_); + repeatedFieldmask_.Add(other.repeatedFieldmask_); + repeatedStruct_.Add(other.repeatedStruct_); + repeatedAny_.Add(other.repeatedAny_); + repeatedValue_.Add(other.repeatedValue_); + if (other.Fieldname1 != 0) { + Fieldname1 = other.Fieldname1; + } + if (other.FieldName2 != 0) { + FieldName2 = other.FieldName2; + } + if (other.FieldName3 != 0) { + FieldName3 = other.FieldName3; + } + if (other.FieldName4 != 0) { + FieldName4 = other.FieldName4; + } + if (other.Field0Name5 != 0) { + Field0Name5 = other.Field0Name5; + } + if (other.Field0Name6 != 0) { + Field0Name6 = other.Field0Name6; + } + if (other.FieldName7 != 0) { + FieldName7 = other.FieldName7; + } + if (other.FieldName8 != 0) { + FieldName8 = other.FieldName8; + } + if (other.FieldName9 != 0) { + FieldName9 = other.FieldName9; + } + if (other.FieldName10 != 0) { + FieldName10 = other.FieldName10; + } + if (other.FIELDNAME11 != 0) { + FIELDNAME11 = other.FIELDNAME11; + } + if (other.FIELDName12 != 0) { + FIELDName12 = other.FIELDName12; + } + switch (other.OneofFieldCase) { + case OneofFieldOneofCase.OneofUint32: + OneofUint32 = other.OneofUint32; + break; + case OneofFieldOneofCase.OneofNestedMessage: + OneofNestedMessage = other.OneofNestedMessage; + break; + case OneofFieldOneofCase.OneofString: + OneofString = other.OneofString; + break; + case OneofFieldOneofCase.OneofBytes: + OneofBytes = other.OneofBytes; + break; + } + + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 8: { + OptionalInt32 = input.ReadInt32(); + break; + } + case 16: { + OptionalInt64 = input.ReadInt64(); + break; + } + case 24: { + OptionalUint32 = input.ReadUInt32(); + break; + } + case 32: { + OptionalUint64 = input.ReadUInt64(); + break; + } + case 40: { + OptionalSint32 = input.ReadSInt32(); + break; + } + case 48: { + OptionalSint64 = input.ReadSInt64(); + break; + } + case 61: { + OptionalFixed32 = input.ReadFixed32(); + break; + } + case 65: { + OptionalFixed64 = input.ReadFixed64(); + break; + } + case 77: { + OptionalSfixed32 = input.ReadSFixed32(); + break; + } + case 81: { + OptionalSfixed64 = input.ReadSFixed64(); + break; + } + case 93: { + OptionalFloat = input.ReadFloat(); + break; + } + case 97: { + OptionalDouble = input.ReadDouble(); + break; + } + case 104: { + OptionalBool = input.ReadBool(); + break; + } + case 114: { + OptionalString = input.ReadString(); + break; + } + case 122: { + OptionalBytes = input.ReadBytes(); + break; + } + case 146: { + if (optionalNestedMessage_ == null) { + optionalNestedMessage_ = new global::Conformance.TestAllTypes.Types.NestedMessage(); + } + input.ReadMessage(optionalNestedMessage_); + break; + } + case 154: { + if (optionalForeignMessage_ == null) { + optionalForeignMessage_ = new global::Conformance.ForeignMessage(); + } + input.ReadMessage(optionalForeignMessage_); + break; + } + case 168: { + optionalNestedEnum_ = (global::Conformance.TestAllTypes.Types.NestedEnum) input.ReadEnum(); + break; + } + case 176: { + optionalForeignEnum_ = (global::Conformance.ForeignEnum) input.ReadEnum(); + break; + } + case 194: { + OptionalStringPiece = input.ReadString(); + break; + } + case 202: { + OptionalCord = input.ReadString(); + break; + } + case 218: { + if (recursiveMessage_ == null) { + recursiveMessage_ = new global::Conformance.TestAllTypes(); + } + input.ReadMessage(recursiveMessage_); + break; + } + case 250: + case 248: { + repeatedInt32_.AddEntriesFrom(input, _repeated_repeatedInt32_codec); + break; + } + case 258: + case 256: { + repeatedInt64_.AddEntriesFrom(input, _repeated_repeatedInt64_codec); + break; + } + case 266: + case 264: { + repeatedUint32_.AddEntriesFrom(input, _repeated_repeatedUint32_codec); + break; + } + case 274: + case 272: { + repeatedUint64_.AddEntriesFrom(input, _repeated_repeatedUint64_codec); + break; + } + case 282: + case 280: { + repeatedSint32_.AddEntriesFrom(input, _repeated_repeatedSint32_codec); + break; + } + case 290: + case 288: { + repeatedSint64_.AddEntriesFrom(input, _repeated_repeatedSint64_codec); + break; + } + case 298: + case 301: { + repeatedFixed32_.AddEntriesFrom(input, _repeated_repeatedFixed32_codec); + break; + } + case 306: + case 305: { + repeatedFixed64_.AddEntriesFrom(input, _repeated_repeatedFixed64_codec); + break; + } + case 314: + case 317: { + repeatedSfixed32_.AddEntriesFrom(input, _repeated_repeatedSfixed32_codec); + break; + } + case 322: + case 321: { + repeatedSfixed64_.AddEntriesFrom(input, _repeated_repeatedSfixed64_codec); + break; + } + case 330: + case 333: { + repeatedFloat_.AddEntriesFrom(input, _repeated_repeatedFloat_codec); + break; + } + case 338: + case 337: { + repeatedDouble_.AddEntriesFrom(input, _repeated_repeatedDouble_codec); + break; + } + case 346: + case 344: { + repeatedBool_.AddEntriesFrom(input, _repeated_repeatedBool_codec); + break; + } + case 354: { + repeatedString_.AddEntriesFrom(input, _repeated_repeatedString_codec); + break; + } + case 362: { + repeatedBytes_.AddEntriesFrom(input, _repeated_repeatedBytes_codec); + break; + } + case 386: { + repeatedNestedMessage_.AddEntriesFrom(input, _repeated_repeatedNestedMessage_codec); + break; + } + case 394: { + repeatedForeignMessage_.AddEntriesFrom(input, _repeated_repeatedForeignMessage_codec); + break; + } + case 410: + case 408: { + repeatedNestedEnum_.AddEntriesFrom(input, _repeated_repeatedNestedEnum_codec); + break; + } + case 418: + case 416: { + repeatedForeignEnum_.AddEntriesFrom(input, _repeated_repeatedForeignEnum_codec); + break; + } + case 434: { + repeatedStringPiece_.AddEntriesFrom(input, _repeated_repeatedStringPiece_codec); + break; + } + case 442: { + repeatedCord_.AddEntriesFrom(input, _repeated_repeatedCord_codec); + break; + } + case 450: { + mapInt32Int32_.AddEntriesFrom(input, _map_mapInt32Int32_codec); + break; + } + case 458: { + mapInt64Int64_.AddEntriesFrom(input, _map_mapInt64Int64_codec); + break; + } + case 466: { + mapUint32Uint32_.AddEntriesFrom(input, _map_mapUint32Uint32_codec); + break; + } + case 474: { + mapUint64Uint64_.AddEntriesFrom(input, _map_mapUint64Uint64_codec); + break; + } + case 482: { + mapSint32Sint32_.AddEntriesFrom(input, _map_mapSint32Sint32_codec); + break; + } + case 490: { + mapSint64Sint64_.AddEntriesFrom(input, _map_mapSint64Sint64_codec); + break; + } + case 498: { + mapFixed32Fixed32_.AddEntriesFrom(input, _map_mapFixed32Fixed32_codec); + break; + } + case 506: { + mapFixed64Fixed64_.AddEntriesFrom(input, _map_mapFixed64Fixed64_codec); + break; + } + case 514: { + mapSfixed32Sfixed32_.AddEntriesFrom(input, _map_mapSfixed32Sfixed32_codec); + break; + } + case 522: { + mapSfixed64Sfixed64_.AddEntriesFrom(input, _map_mapSfixed64Sfixed64_codec); + break; + } + case 530: { + mapInt32Float_.AddEntriesFrom(input, _map_mapInt32Float_codec); + break; + } + case 538: { + mapInt32Double_.AddEntriesFrom(input, _map_mapInt32Double_codec); + break; + } + case 546: { + mapBoolBool_.AddEntriesFrom(input, _map_mapBoolBool_codec); + break; + } + case 554: { + mapStringString_.AddEntriesFrom(input, _map_mapStringString_codec); + break; + } + case 562: { + mapStringBytes_.AddEntriesFrom(input, _map_mapStringBytes_codec); + break; + } + case 570: { + mapStringNestedMessage_.AddEntriesFrom(input, _map_mapStringNestedMessage_codec); + break; + } + case 578: { + mapStringForeignMessage_.AddEntriesFrom(input, _map_mapStringForeignMessage_codec); + break; + } + case 586: { + mapStringNestedEnum_.AddEntriesFrom(input, _map_mapStringNestedEnum_codec); + break; + } + case 594: { + mapStringForeignEnum_.AddEntriesFrom(input, _map_mapStringForeignEnum_codec); + break; + } + case 888: { + OneofUint32 = input.ReadUInt32(); + break; + } + case 898: { + global::Conformance.TestAllTypes.Types.NestedMessage subBuilder = new global::Conformance.TestAllTypes.Types.NestedMessage(); + if (oneofFieldCase_ == OneofFieldOneofCase.OneofNestedMessage) { + subBuilder.MergeFrom(OneofNestedMessage); + } + input.ReadMessage(subBuilder); + OneofNestedMessage = subBuilder; + break; + } + case 906: { + OneofString = input.ReadString(); + break; + } + case 914: { + OneofBytes = input.ReadBytes(); + break; + } + case 1610: { + bool? value = _single_optionalBoolWrapper_codec.Read(input); + if (optionalBoolWrapper_ == null || value != false) { + OptionalBoolWrapper = value; + } + break; + } + case 1618: { + int? value = _single_optionalInt32Wrapper_codec.Read(input); + if (optionalInt32Wrapper_ == null || value != 0) { + OptionalInt32Wrapper = value; + } + break; + } + case 1626: { + long? value = _single_optionalInt64Wrapper_codec.Read(input); + if (optionalInt64Wrapper_ == null || value != 0L) { + OptionalInt64Wrapper = value; + } + break; + } + case 1634: { + uint? value = _single_optionalUint32Wrapper_codec.Read(input); + if (optionalUint32Wrapper_ == null || value != 0) { + OptionalUint32Wrapper = value; + } + break; + } + case 1642: { + ulong? value = _single_optionalUint64Wrapper_codec.Read(input); + if (optionalUint64Wrapper_ == null || value != 0UL) { + OptionalUint64Wrapper = value; + } + break; + } + case 1650: { + float? value = _single_optionalFloatWrapper_codec.Read(input); + if (optionalFloatWrapper_ == null || value != 0F) { + OptionalFloatWrapper = value; + } + break; + } + case 1658: { + double? value = _single_optionalDoubleWrapper_codec.Read(input); + if (optionalDoubleWrapper_ == null || value != 0D) { + OptionalDoubleWrapper = value; + } + break; + } + case 1666: { + string value = _single_optionalStringWrapper_codec.Read(input); + if (optionalStringWrapper_ == null || value != "") { + OptionalStringWrapper = value; + } + break; + } + case 1674: { + pb::ByteString value = _single_optionalBytesWrapper_codec.Read(input); + if (optionalBytesWrapper_ == null || value != pb::ByteString.Empty) { + OptionalBytesWrapper = value; + } + break; + } + case 1690: { + repeatedBoolWrapper_.AddEntriesFrom(input, _repeated_repeatedBoolWrapper_codec); + break; + } + case 1698: { + repeatedInt32Wrapper_.AddEntriesFrom(input, _repeated_repeatedInt32Wrapper_codec); + break; + } + case 1706: { + repeatedInt64Wrapper_.AddEntriesFrom(input, _repeated_repeatedInt64Wrapper_codec); + break; + } + case 1714: { + repeatedUint32Wrapper_.AddEntriesFrom(input, _repeated_repeatedUint32Wrapper_codec); + break; + } + case 1722: { + repeatedUint64Wrapper_.AddEntriesFrom(input, _repeated_repeatedUint64Wrapper_codec); + break; + } + case 1730: { + repeatedFloatWrapper_.AddEntriesFrom(input, _repeated_repeatedFloatWrapper_codec); + break; + } + case 1738: { + repeatedDoubleWrapper_.AddEntriesFrom(input, _repeated_repeatedDoubleWrapper_codec); + break; + } + case 1746: { + repeatedStringWrapper_.AddEntriesFrom(input, _repeated_repeatedStringWrapper_codec); + break; + } + case 1754: { + repeatedBytesWrapper_.AddEntriesFrom(input, _repeated_repeatedBytesWrapper_codec); + break; + } + case 2410: { + if (optionalDuration_ == null) { + optionalDuration_ = new global::Google.Protobuf.WellKnownTypes.Duration(); + } + input.ReadMessage(optionalDuration_); + break; + } + case 2418: { + if (optionalTimestamp_ == null) { + optionalTimestamp_ = new global::Google.Protobuf.WellKnownTypes.Timestamp(); + } + input.ReadMessage(optionalTimestamp_); + break; + } + case 2426: { + if (optionalFieldMask_ == null) { + optionalFieldMask_ = new global::Google.Protobuf.WellKnownTypes.FieldMask(); + } + input.ReadMessage(optionalFieldMask_); + break; + } + case 2434: { + if (optionalStruct_ == null) { + optionalStruct_ = new global::Google.Protobuf.WellKnownTypes.Struct(); + } + input.ReadMessage(optionalStruct_); + break; + } + case 2442: { + if (optionalAny_ == null) { + optionalAny_ = new global::Google.Protobuf.WellKnownTypes.Any(); + } + input.ReadMessage(optionalAny_); + break; + } + case 2450: { + if (optionalValue_ == null) { + optionalValue_ = new global::Google.Protobuf.WellKnownTypes.Value(); + } + input.ReadMessage(optionalValue_); + break; + } + case 2490: { + repeatedDuration_.AddEntriesFrom(input, _repeated_repeatedDuration_codec); + break; + } + case 2498: { + repeatedTimestamp_.AddEntriesFrom(input, _repeated_repeatedTimestamp_codec); + break; + } + case 2506: { + repeatedFieldmask_.AddEntriesFrom(input, _repeated_repeatedFieldmask_codec); + break; + } + case 2522: { + repeatedAny_.AddEntriesFrom(input, _repeated_repeatedAny_codec); + break; + } + case 2530: { + repeatedValue_.AddEntriesFrom(input, _repeated_repeatedValue_codec); + break; + } + case 2594: { + repeatedStruct_.AddEntriesFrom(input, _repeated_repeatedStruct_codec); + break; + } + case 3208: { + Fieldname1 = input.ReadInt32(); + break; + } + case 3216: { + FieldName2 = input.ReadInt32(); + break; + } + case 3224: { + FieldName3 = input.ReadInt32(); + break; + } + case 3232: { + FieldName4 = input.ReadInt32(); + break; + } + case 3240: { + Field0Name5 = input.ReadInt32(); + break; + } + case 3248: { + Field0Name6 = input.ReadInt32(); + break; + } + case 3256: { + FieldName7 = input.ReadInt32(); + break; + } + case 3264: { + FieldName8 = input.ReadInt32(); + break; + } + case 3272: { + FieldName9 = input.ReadInt32(); + break; + } + case 3280: { + FieldName10 = input.ReadInt32(); + break; + } + case 3288: { + FIELDNAME11 = input.ReadInt32(); + break; + } + case 3296: { + FIELDName12 = input.ReadInt32(); + break; + } + } + } + } + + #region Nested types + /// Container for nested types declared in the TestAllTypes message type. + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public static partial class Types { + public enum NestedEnum { + [pbr::OriginalName("FOO")] Foo = 0, + [pbr::OriginalName("BAR")] Bar = 1, + [pbr::OriginalName("BAZ")] Baz = 2, + /// + /// Intentionally negative. + /// + [pbr::OriginalName("NEG")] Neg = -1, + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class NestedMessage : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new NestedMessage()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Conformance.TestAllTypes.Descriptor.NestedTypes[0]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public NestedMessage() { + OnConstruction(); + } + + partial void OnConstruction(); + + public NestedMessage(NestedMessage other) : this() { + a_ = other.a_; + Corecursive = other.corecursive_ != null ? other.Corecursive.Clone() : null; + } + + public NestedMessage Clone() { + return new NestedMessage(this); + } + + /// Field number for the "a" field. + public const int AFieldNumber = 1; + private int a_; + public int A { + get { return a_; } + set { + a_ = value; + } + } + + /// Field number for the "corecursive" field. + public const int CorecursiveFieldNumber = 2; + private global::Conformance.TestAllTypes corecursive_; + public global::Conformance.TestAllTypes Corecursive { + get { return corecursive_; } + set { + corecursive_ = value; + } + } + + public override bool Equals(object other) { + return Equals(other as NestedMessage); + } + + public bool Equals(NestedMessage other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (A != other.A) return false; + if (!object.Equals(Corecursive, other.Corecursive)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (A != 0) hash ^= A.GetHashCode(); + if (corecursive_ != null) hash ^= Corecursive.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (A != 0) { + output.WriteRawTag(8); + output.WriteInt32(A); + } + if (corecursive_ != null) { + output.WriteRawTag(18); + output.WriteMessage(Corecursive); + } + } + + public int CalculateSize() { + int size = 0; + if (A != 0) { + size += 1 + pb::CodedOutputStream.ComputeInt32Size(A); + } + if (corecursive_ != null) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(Corecursive); + } + return size; + } + + public void MergeFrom(NestedMessage other) { + if (other == null) { + return; + } + if (other.A != 0) { + A = other.A; + } + if (other.corecursive_ != null) { + if (corecursive_ == null) { + corecursive_ = new global::Conformance.TestAllTypes(); + } + Corecursive.MergeFrom(other.Corecursive); + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 8: { + A = input.ReadInt32(); + break; + } + case 18: { + if (corecursive_ == null) { + corecursive_ = new global::Conformance.TestAllTypes(); + } + input.ReadMessage(corecursive_); + break; + } + } + } + } + + } + + } + #endregion + + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class ForeignMessage : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new ForeignMessage()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Conformance.ConformanceReflection.Descriptor.MessageTypes[3]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public ForeignMessage() { + OnConstruction(); + } + + partial void OnConstruction(); + + public ForeignMessage(ForeignMessage other) : this() { + c_ = other.c_; + } + + public ForeignMessage Clone() { + return new ForeignMessage(this); + } + + /// Field number for the "c" field. + public const int CFieldNumber = 1; + private int c_; + public int C { + get { return c_; } + set { + c_ = value; + } + } + + public override bool Equals(object other) { + return Equals(other as ForeignMessage); + } + + public bool Equals(ForeignMessage other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (C != other.C) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (C != 0) hash ^= C.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (C != 0) { + output.WriteRawTag(8); + output.WriteInt32(C); + } + } + + public int CalculateSize() { + int size = 0; + if (C != 0) { + size += 1 + pb::CodedOutputStream.ComputeInt32Size(C); + } + return size; + } + + public void MergeFrom(ForeignMessage other) { + if (other == null) { + return; + } + if (other.C != 0) { + C = other.C; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 8: { + C = input.ReadInt32(); + break; + } + } + } + } + + } + + #endregion + +} + +#endregion Designer generated code diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Conformance/Google.Protobuf.Conformance.csproj b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Conformance/Google.Protobuf.Conformance.csproj new file mode 100644 index 0000000000..82f728d192 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Conformance/Google.Protobuf.Conformance.csproj @@ -0,0 +1,61 @@ + + + + + Debug + AnyCPU + {0607D1B8-80D6-4B35-9857-1263C1B32B94} + Exe + Properties + Google.Protobuf.Conformance + Google.Protobuf.Conformance + v4.5 + 512 + + + AnyCPU + true + full + false + bin\Debug\ + DEBUG;TRACE + prompt + 4 + + + AnyCPU + pdbonly + true + bin\Release\ + TRACE + prompt + 4 + + + + + + + + + + + + + + + + + {6908bdce-d925-43f3-94ac-a531e6df2591} + Google.Protobuf + + + + + \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Conformance/Program.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Conformance/Program.cs new file mode 100644 index 0000000000..19827c4844 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Conformance/Program.cs @@ -0,0 +1,142 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using Conformance; +using Google.Protobuf.Reflection; +using System; +using System.IO; + +namespace Google.Protobuf.Conformance +{ + /// + /// Conformance tests. The test runner will provide JSON or proto data on stdin, + /// and this program will produce its output on stdout. + /// + class Program + { + private static void Main(string[] args) + { + // This way we get the binary streams instead of readers/writers. + var input = new BinaryReader(Console.OpenStandardInput()); + var output = new BinaryWriter(Console.OpenStandardOutput()); + var typeRegistry = TypeRegistry.FromMessages(TestAllTypes.Descriptor); + + int count = 0; + while (RunTest(input, output, typeRegistry)) + { + count++; + } + Console.Error.WriteLine("Received EOF after {0} tests", count); + } + + private static bool RunTest(BinaryReader input, BinaryWriter output, TypeRegistry typeRegistry) + { + int? size = ReadInt32(input); + if (size == null) + { + return false; + } + byte[] inputData = input.ReadBytes(size.Value); + if (inputData.Length != size.Value) + { + throw new EndOfStreamException("Read " + inputData.Length + " bytes of data when expecting " + size); + } + ConformanceRequest request = ConformanceRequest.Parser.ParseFrom(inputData); + ConformanceResponse response = PerformRequest(request, typeRegistry); + byte[] outputData = response.ToByteArray(); + output.Write(outputData.Length); + output.Write(outputData); + // Ready for another test... + return true; + } + + private static ConformanceResponse PerformRequest(ConformanceRequest request, TypeRegistry typeRegistry) + { + TestAllTypes message; + try + { + switch (request.PayloadCase) + { + case ConformanceRequest.PayloadOneofCase.JsonPayload: + var parser = new JsonParser(new JsonParser.Settings(20, typeRegistry)); + message = parser.Parse(request.JsonPayload); + break; + case ConformanceRequest.PayloadOneofCase.ProtobufPayload: + message = TestAllTypes.Parser.ParseFrom(request.ProtobufPayload); + break; + default: + throw new Exception("Unsupported request payload: " + request.PayloadCase); + } + } + catch (InvalidProtocolBufferException e) + { + return new ConformanceResponse { ParseError = e.Message }; + } + catch (InvalidJsonException e) + { + return new ConformanceResponse { ParseError = e.Message }; + } + try + { + switch (request.RequestedOutputFormat) + { + case global::Conformance.WireFormat.Json: + var formatter = new JsonFormatter(new JsonFormatter.Settings(false, typeRegistry)); + return new ConformanceResponse { JsonPayload = formatter.Format(message) }; + case global::Conformance.WireFormat.Protobuf: + return new ConformanceResponse { ProtobufPayload = message.ToByteString() }; + default: + throw new Exception("Unsupported request output format: " + request.PayloadCase); + } + } + catch (InvalidOperationException e) + { + return new ConformanceResponse { SerializeError = e.Message }; + } + } + + private static int? ReadInt32(BinaryReader input) + { + byte[] bytes = input.ReadBytes(4); + if (bytes.Length == 0) + { + // Cleanly reached the end of the stream + return null; + } + if (bytes.Length != 4) + { + throw new EndOfStreamException("Read " + bytes.Length + " bytes of size when expecting 4"); + } + return bytes[0] | (bytes[1] << 8) | (bytes[2] << 16) | (bytes[3] << 24); + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Conformance/Properties/AssemblyInfo.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Conformance/Properties/AssemblyInfo.cs new file mode 100644 index 0000000000..d22e90fd8f --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Conformance/Properties/AssemblyInfo.cs @@ -0,0 +1,48 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System.Reflection; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. +[assembly: AssemblyTitle("Google.Protobuf.Conformance")] +[assembly: AssemblyDescription("")] +[assembly: AssemblyConfiguration("")] +[assembly: AssemblyCompany("")] +[assembly: AssemblyProduct("Google.Protobuf.Conformance")] +[assembly: AssemblyCopyright("Copyright © 2015")] +[assembly: AssemblyTrademark("")] +[assembly: AssemblyCulture("")] + +[assembly: AssemblyVersion("3.0.0.0")] +[assembly: AssemblyFileVersion("3.0.0.0")] diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.JsonDump/Google.Protobuf.JsonDump.csproj b/packager/third_party/protobuf/csharp/src/Google.Protobuf.JsonDump/Google.Protobuf.JsonDump.csproj new file mode 100644 index 0000000000..67acf66d4b --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.JsonDump/Google.Protobuf.JsonDump.csproj @@ -0,0 +1,68 @@ + + + + Debug + AnyCPU + 9.0.30729 + 2.0 + {D7282E99-2DC3-405B-946F-177DB2FD2AE2} + Exe + Properties + Google.Protobuf.JsonDump + Google.Protobuf.JsonDump + v4.5 + 512 + + + + + true + full + false + bin\Debug + obj\Debug\ + DEBUG;TRACE + prompt + 4 + true + Off + false + + + pdbonly + true + bin\Release + obj\Release\ + TRACE + prompt + 4 + true + Off + false + + + + + + + + + + + + {6908BDCE-D925-43F3-94AC-A531E6DF2591} + Google.Protobuf + + + + + + + + \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.JsonDump/Program.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf.JsonDump/Program.cs new file mode 100644 index 0000000000..e8a6073e2b --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.JsonDump/Program.cs @@ -0,0 +1,72 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; +using System.IO; + +namespace Google.Protobuf.ProtoDump +{ + /// + /// Small utility to load a binary message and dump it in JSON format. + /// + internal class Program + { + private static int Main(string[] args) + { + if (args.Length != 2) + { + Console.Error.WriteLine("Usage: Google.Protobuf.JsonDump "); + Console.Error.WriteLine("The descriptor type name is the fully-qualified message name,"); + Console.Error.WriteLine("including assembly e.g. ProjectNamespace.Message,Company.Project"); + return 1; + } + Type type = Type.GetType(args[0]); + if (type == null) + { + Console.Error.WriteLine("Unable to load type {0}.", args[0]); + return 1; + } + if (!typeof(IMessage).IsAssignableFrom(type)) + { + Console.Error.WriteLine("Type {0} doesn't implement IMessage.", args[0]); + return 1; + } + IMessage message = (IMessage) Activator.CreateInstance(type); + using (var input = File.OpenRead(args[1])) + { + message.MergeFrom(input); + } + Console.WriteLine(message); + return 0; + } + } +} \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.JsonDump/Properties/AssemblyInfo.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf.JsonDump/Properties/AssemblyInfo.cs new file mode 100644 index 0000000000..0a29d2d472 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.JsonDump/Properties/AssemblyInfo.cs @@ -0,0 +1,19 @@ +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. + +[assembly: AssemblyTitle("ProtoDump")] +[assembly: AssemblyDescription("")] +[assembly: AssemblyConfiguration("")] +[assembly: AssemblyCompany("")] +[assembly: AssemblyProduct("ProtoDump")] +[assembly: AssemblyCopyright("Copyright © 2015")] +[assembly: AssemblyTrademark("")] +[assembly: AssemblyCulture("")] + +[assembly: AssemblyVersion("3.0.0.0")] +[assembly: AssemblyFileVersion("3.0.0.0")] diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.JsonDump/app.config b/packager/third_party/protobuf/csharp/src/Google.Protobuf.JsonDump/app.config new file mode 100644 index 0000000000..51278a4563 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.JsonDump/app.config @@ -0,0 +1,3 @@ + + + diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/ByteStringTest.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/ByteStringTest.cs new file mode 100644 index 0000000000..8935b7829d --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/ByteStringTest.cs @@ -0,0 +1,171 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; +using System.Text; +using NUnit.Framework; + +namespace Google.Protobuf +{ + public class ByteStringTest + { + [Test] + public void Equality() + { + ByteString b1 = ByteString.CopyFrom(1, 2, 3); + ByteString b2 = ByteString.CopyFrom(1, 2, 3); + ByteString b3 = ByteString.CopyFrom(1, 2, 4); + ByteString b4 = ByteString.CopyFrom(1, 2, 3, 4); + EqualityTester.AssertEquality(b1, b1); + EqualityTester.AssertEquality(b1, b2); + EqualityTester.AssertInequality(b1, b3); + EqualityTester.AssertInequality(b1, b4); + EqualityTester.AssertInequality(b1, null); +#pragma warning disable 1718 // Deliberately calling ==(b1, b1) and !=(b1, b1) + Assert.IsTrue(b1 == b1); + Assert.IsTrue(b1 == b2); + Assert.IsFalse(b1 == b3); + Assert.IsFalse(b1 == b4); + Assert.IsFalse(b1 == null); + Assert.IsTrue((ByteString) null == null); + Assert.IsFalse(b1 != b1); + Assert.IsFalse(b1 != b2); +#pragma warning disable 1718 + Assert.IsTrue(b1 != b3); + Assert.IsTrue(b1 != b4); + Assert.IsTrue(b1 != null); + Assert.IsFalse((ByteString) null != null); + } + + [Test] + public void EmptyByteStringHasZeroSize() + { + Assert.AreEqual(0, ByteString.Empty.Length); + } + + [Test] + public void CopyFromStringWithExplicitEncoding() + { + ByteString bs = ByteString.CopyFrom("AB", Encoding.Unicode); + Assert.AreEqual(4, bs.Length); + Assert.AreEqual(65, bs[0]); + Assert.AreEqual(0, bs[1]); + Assert.AreEqual(66, bs[2]); + Assert.AreEqual(0, bs[3]); + } + + [Test] + public void IsEmptyWhenEmpty() + { + Assert.IsTrue(ByteString.CopyFromUtf8("").IsEmpty); + } + + [Test] + public void IsEmptyWhenNotEmpty() + { + Assert.IsFalse(ByteString.CopyFromUtf8("X").IsEmpty); + } + + [Test] + public void CopyFromByteArrayCopiesContents() + { + byte[] data = new byte[1]; + data[0] = 10; + ByteString bs = ByteString.CopyFrom(data); + Assert.AreEqual(10, bs[0]); + data[0] = 5; + Assert.AreEqual(10, bs[0]); + } + + [Test] + public void ToByteArrayCopiesContents() + { + ByteString bs = ByteString.CopyFromUtf8("Hello"); + byte[] data = bs.ToByteArray(); + Assert.AreEqual((byte)'H', data[0]); + Assert.AreEqual((byte)'H', bs[0]); + data[0] = 0; + Assert.AreEqual(0, data[0]); + Assert.AreEqual((byte)'H', bs[0]); + } + + [Test] + public void CopyFromUtf8UsesUtf8() + { + ByteString bs = ByteString.CopyFromUtf8("\u20ac"); + Assert.AreEqual(3, bs.Length); + Assert.AreEqual(0xe2, bs[0]); + Assert.AreEqual(0x82, bs[1]); + Assert.AreEqual(0xac, bs[2]); + } + + [Test] + public void CopyFromPortion() + { + byte[] data = new byte[] {0, 1, 2, 3, 4, 5, 6}; + ByteString bs = ByteString.CopyFrom(data, 2, 3); + Assert.AreEqual(3, bs.Length); + Assert.AreEqual(2, bs[0]); + Assert.AreEqual(3, bs[1]); + } + + [Test] + public void ToStringUtf8() + { + ByteString bs = ByteString.CopyFromUtf8("\u20ac"); + Assert.AreEqual("\u20ac", bs.ToStringUtf8()); + } + + [Test] + public void ToStringWithExplicitEncoding() + { + ByteString bs = ByteString.CopyFrom("\u20ac", Encoding.Unicode); + Assert.AreEqual("\u20ac", bs.ToString(Encoding.Unicode)); + } + + [Test] + public void FromBase64_WithText() + { + byte[] data = new byte[] {0, 1, 2, 3, 4, 5, 6}; + string base64 = Convert.ToBase64String(data); + ByteString bs = ByteString.FromBase64(base64); + Assert.AreEqual(data, bs.ToByteArray()); + } + + [Test] + public void FromBase64_Empty() + { + // Optimization which also fixes issue 61. + Assert.AreSame(ByteString.Empty, ByteString.FromBase64("")); + } + } +} \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/CodedInputStreamExtensions.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/CodedInputStreamExtensions.cs new file mode 100644 index 0000000000..23af28870a --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/CodedInputStreamExtensions.cs @@ -0,0 +1,53 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using NUnit.Framework; + +namespace Google.Protobuf +{ + internal static class CodedInputStreamExtensions + { + public static void AssertNextTag(this CodedInputStream input, uint expectedTag) + { + uint tag = input.ReadTag(); + Assert.AreEqual(expectedTag, tag); + } + + public static T ReadMessage(this CodedInputStream stream, MessageParser parser) + where T : IMessage + { + var message = parser.CreateTemplate(); + stream.ReadMessage(message); + return message; + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/CodedInputStreamTest.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/CodedInputStreamTest.cs new file mode 100644 index 0000000000..d628ecceb6 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/CodedInputStreamTest.cs @@ -0,0 +1,598 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; +using System.IO; +using Google.Protobuf.TestProtos; +using NUnit.Framework; + +namespace Google.Protobuf +{ + public class CodedInputStreamTest + { + /// + /// Helper to construct a byte array from a bunch of bytes. The inputs are + /// actually ints so that I can use hex notation and not get stupid errors + /// about precision. + /// + private static byte[] Bytes(params int[] bytesAsInts) + { + byte[] bytes = new byte[bytesAsInts.Length]; + for (int i = 0; i < bytesAsInts.Length; i++) + { + bytes[i] = (byte) bytesAsInts[i]; + } + return bytes; + } + + /// + /// Parses the given bytes using ReadRawVarint32() and ReadRawVarint64() + /// + private static void AssertReadVarint(byte[] data, ulong value) + { + CodedInputStream input = new CodedInputStream(data); + Assert.AreEqual((uint) value, input.ReadRawVarint32()); + + input = new CodedInputStream(data); + Assert.AreEqual(value, input.ReadRawVarint64()); + Assert.IsTrue(input.IsAtEnd); + + // Try different block sizes. + for (int bufferSize = 1; bufferSize <= 16; bufferSize *= 2) + { + input = new CodedInputStream(new SmallBlockInputStream(data, bufferSize)); + Assert.AreEqual((uint) value, input.ReadRawVarint32()); + + input = new CodedInputStream(new SmallBlockInputStream(data, bufferSize)); + Assert.AreEqual(value, input.ReadRawVarint64()); + Assert.IsTrue(input.IsAtEnd); + } + + // Try reading directly from a MemoryStream. We want to verify that it + // doesn't read past the end of the input, so write an extra byte - this + // lets us test the position at the end. + MemoryStream memoryStream = new MemoryStream(); + memoryStream.Write(data, 0, data.Length); + memoryStream.WriteByte(0); + memoryStream.Position = 0; + Assert.AreEqual((uint) value, CodedInputStream.ReadRawVarint32(memoryStream)); + Assert.AreEqual(data.Length, memoryStream.Position); + } + + /// + /// Parses the given bytes using ReadRawVarint32() and ReadRawVarint64() and + /// expects them to fail with an InvalidProtocolBufferException whose + /// description matches the given one. + /// + private static void AssertReadVarintFailure(InvalidProtocolBufferException expected, byte[] data) + { + CodedInputStream input = new CodedInputStream(data); + var exception = Assert.Throws(() => input.ReadRawVarint32()); + Assert.AreEqual(expected.Message, exception.Message); + + input = new CodedInputStream(data); + exception = Assert.Throws(() => input.ReadRawVarint64()); + Assert.AreEqual(expected.Message, exception.Message); + + // Make sure we get the same error when reading directly from a Stream. + exception = Assert.Throws(() => CodedInputStream.ReadRawVarint32(new MemoryStream(data))); + Assert.AreEqual(expected.Message, exception.Message); + } + + [Test] + public void ReadVarint() + { + AssertReadVarint(Bytes(0x00), 0); + AssertReadVarint(Bytes(0x01), 1); + AssertReadVarint(Bytes(0x7f), 127); + // 14882 + AssertReadVarint(Bytes(0xa2, 0x74), (0x22 << 0) | (0x74 << 7)); + // 2961488830 + AssertReadVarint(Bytes(0xbe, 0xf7, 0x92, 0x84, 0x0b), + (0x3e << 0) | (0x77 << 7) | (0x12 << 14) | (0x04 << 21) | + (0x0bL << 28)); + + // 64-bit + // 7256456126 + AssertReadVarint(Bytes(0xbe, 0xf7, 0x92, 0x84, 0x1b), + (0x3e << 0) | (0x77 << 7) | (0x12 << 14) | (0x04 << 21) | + (0x1bL << 28)); + // 41256202580718336 + AssertReadVarint(Bytes(0x80, 0xe6, 0xeb, 0x9c, 0xc3, 0xc9, 0xa4, 0x49), + (0x00 << 0) | (0x66 << 7) | (0x6b << 14) | (0x1c << 21) | + (0x43L << 28) | (0x49L << 35) | (0x24L << 42) | (0x49L << 49)); + // 11964378330978735131 + AssertReadVarint(Bytes(0x9b, 0xa8, 0xf9, 0xc2, 0xbb, 0xd6, 0x80, 0x85, 0xa6, 0x01), + (0x1b << 0) | (0x28 << 7) | (0x79 << 14) | (0x42 << 21) | + (0x3bUL << 28) | (0x56UL << 35) | (0x00UL << 42) | + (0x05UL << 49) | (0x26UL << 56) | (0x01UL << 63)); + + // Failures + AssertReadVarintFailure( + InvalidProtocolBufferException.MalformedVarint(), + Bytes(0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, + 0x00)); + AssertReadVarintFailure( + InvalidProtocolBufferException.TruncatedMessage(), + Bytes(0x80)); + } + + /// + /// Parses the given bytes using ReadRawLittleEndian32() and checks + /// that the result matches the given value. + /// + private static void AssertReadLittleEndian32(byte[] data, uint value) + { + CodedInputStream input = new CodedInputStream(data); + Assert.AreEqual(value, input.ReadRawLittleEndian32()); + Assert.IsTrue(input.IsAtEnd); + + // Try different block sizes. + for (int blockSize = 1; blockSize <= 16; blockSize *= 2) + { + input = new CodedInputStream( + new SmallBlockInputStream(data, blockSize)); + Assert.AreEqual(value, input.ReadRawLittleEndian32()); + Assert.IsTrue(input.IsAtEnd); + } + } + + /// + /// Parses the given bytes using ReadRawLittleEndian64() and checks + /// that the result matches the given value. + /// + private static void AssertReadLittleEndian64(byte[] data, ulong value) + { + CodedInputStream input = new CodedInputStream(data); + Assert.AreEqual(value, input.ReadRawLittleEndian64()); + Assert.IsTrue(input.IsAtEnd); + + // Try different block sizes. + for (int blockSize = 1; blockSize <= 16; blockSize *= 2) + { + input = new CodedInputStream( + new SmallBlockInputStream(data, blockSize)); + Assert.AreEqual(value, input.ReadRawLittleEndian64()); + Assert.IsTrue(input.IsAtEnd); + } + } + + [Test] + public void ReadLittleEndian() + { + AssertReadLittleEndian32(Bytes(0x78, 0x56, 0x34, 0x12), 0x12345678); + AssertReadLittleEndian32(Bytes(0xf0, 0xde, 0xbc, 0x9a), 0x9abcdef0); + + AssertReadLittleEndian64(Bytes(0xf0, 0xde, 0xbc, 0x9a, 0x78, 0x56, 0x34, 0x12), + 0x123456789abcdef0L); + AssertReadLittleEndian64( + Bytes(0x78, 0x56, 0x34, 0x12, 0xf0, 0xde, 0xbc, 0x9a), 0x9abcdef012345678UL); + } + + [Test] + public void DecodeZigZag32() + { + Assert.AreEqual(0, CodedInputStream.DecodeZigZag32(0)); + Assert.AreEqual(-1, CodedInputStream.DecodeZigZag32(1)); + Assert.AreEqual(1, CodedInputStream.DecodeZigZag32(2)); + Assert.AreEqual(-2, CodedInputStream.DecodeZigZag32(3)); + Assert.AreEqual(0x3FFFFFFF, CodedInputStream.DecodeZigZag32(0x7FFFFFFE)); + Assert.AreEqual(unchecked((int) 0xC0000000), CodedInputStream.DecodeZigZag32(0x7FFFFFFF)); + Assert.AreEqual(0x7FFFFFFF, CodedInputStream.DecodeZigZag32(0xFFFFFFFE)); + Assert.AreEqual(unchecked((int) 0x80000000), CodedInputStream.DecodeZigZag32(0xFFFFFFFF)); + } + + [Test] + public void DecodeZigZag64() + { + Assert.AreEqual(0, CodedInputStream.DecodeZigZag64(0)); + Assert.AreEqual(-1, CodedInputStream.DecodeZigZag64(1)); + Assert.AreEqual(1, CodedInputStream.DecodeZigZag64(2)); + Assert.AreEqual(-2, CodedInputStream.DecodeZigZag64(3)); + Assert.AreEqual(0x000000003FFFFFFFL, CodedInputStream.DecodeZigZag64(0x000000007FFFFFFEL)); + Assert.AreEqual(unchecked((long) 0xFFFFFFFFC0000000L), CodedInputStream.DecodeZigZag64(0x000000007FFFFFFFL)); + Assert.AreEqual(0x000000007FFFFFFFL, CodedInputStream.DecodeZigZag64(0x00000000FFFFFFFEL)); + Assert.AreEqual(unchecked((long) 0xFFFFFFFF80000000L), CodedInputStream.DecodeZigZag64(0x00000000FFFFFFFFL)); + Assert.AreEqual(0x7FFFFFFFFFFFFFFFL, CodedInputStream.DecodeZigZag64(0xFFFFFFFFFFFFFFFEL)); + Assert.AreEqual(unchecked((long) 0x8000000000000000L), CodedInputStream.DecodeZigZag64(0xFFFFFFFFFFFFFFFFL)); + } + + [Test] + public void ReadWholeMessage_VaryingBlockSizes() + { + TestAllTypes message = SampleMessages.CreateFullTestAllTypes(); + + byte[] rawBytes = message.ToByteArray(); + Assert.AreEqual(rawBytes.Length, message.CalculateSize()); + TestAllTypes message2 = TestAllTypes.Parser.ParseFrom(rawBytes); + Assert.AreEqual(message, message2); + + // Try different block sizes. + for (int blockSize = 1; blockSize < 256; blockSize *= 2) + { + message2 = TestAllTypes.Parser.ParseFrom(new SmallBlockInputStream(rawBytes, blockSize)); + Assert.AreEqual(message, message2); + } + } + + [Test] + public void ReadHugeBlob() + { + // Allocate and initialize a 1MB blob. + byte[] blob = new byte[1 << 20]; + for (int i = 0; i < blob.Length; i++) + { + blob[i] = (byte) i; + } + + // Make a message containing it. + var message = new TestAllTypes { SingleBytes = ByteString.CopyFrom(blob) }; + + // Serialize and parse it. Make sure to parse from an InputStream, not + // directly from a ByteString, so that CodedInputStream uses buffered + // reading. + TestAllTypes message2 = TestAllTypes.Parser.ParseFrom(message.ToByteString()); + + Assert.AreEqual(message, message2); + } + + [Test] + public void ReadMaliciouslyLargeBlob() + { + MemoryStream ms = new MemoryStream(); + CodedOutputStream output = new CodedOutputStream(ms); + + uint tag = WireFormat.MakeTag(1, WireFormat.WireType.LengthDelimited); + output.WriteRawVarint32(tag); + output.WriteRawVarint32(0x7FFFFFFF); + output.WriteRawBytes(new byte[32]); // Pad with a few random bytes. + output.Flush(); + ms.Position = 0; + + CodedInputStream input = new CodedInputStream(ms); + Assert.AreEqual(tag, input.ReadTag()); + + Assert.Throws(() => input.ReadBytes()); + } + + internal static TestRecursiveMessage MakeRecursiveMessage(int depth) + { + if (depth == 0) + { + return new TestRecursiveMessage { I = 5 }; + } + else + { + return new TestRecursiveMessage { A = MakeRecursiveMessage(depth - 1) }; + } + } + + internal static void AssertMessageDepth(TestRecursiveMessage message, int depth) + { + if (depth == 0) + { + Assert.IsNull(message.A); + Assert.AreEqual(5, message.I); + } + else + { + Assert.IsNotNull(message.A); + AssertMessageDepth(message.A, depth - 1); + } + } + + [Test] + public void MaliciousRecursion() + { + ByteString data64 = MakeRecursiveMessage(64).ToByteString(); + ByteString data65 = MakeRecursiveMessage(65).ToByteString(); + + AssertMessageDepth(TestRecursiveMessage.Parser.ParseFrom(data64), 64); + + Assert.Throws(() => TestRecursiveMessage.Parser.ParseFrom(data65)); + + CodedInputStream input = CodedInputStream.CreateWithLimits(new MemoryStream(data64.ToByteArray()), 1000000, 63); + Assert.Throws(() => TestRecursiveMessage.Parser.ParseFrom(input)); + } + + [Test] + public void SizeLimit() + { + // Have to use a Stream rather than ByteString.CreateCodedInput as SizeLimit doesn't + // apply to the latter case. + MemoryStream ms = new MemoryStream(SampleMessages.CreateFullTestAllTypes().ToByteArray()); + CodedInputStream input = CodedInputStream.CreateWithLimits(ms, 16, 100); + Assert.Throws(() => TestAllTypes.Parser.ParseFrom(input)); + } + + /// + /// Tests that if we read an string that contains invalid UTF-8, no exception + /// is thrown. Instead, the invalid bytes are replaced with the Unicode + /// "replacement character" U+FFFD. + /// + [Test] + public void ReadInvalidUtf8() + { + MemoryStream ms = new MemoryStream(); + CodedOutputStream output = new CodedOutputStream(ms); + + uint tag = WireFormat.MakeTag(1, WireFormat.WireType.LengthDelimited); + output.WriteRawVarint32(tag); + output.WriteRawVarint32(1); + output.WriteRawBytes(new byte[] {0x80}); + output.Flush(); + ms.Position = 0; + + CodedInputStream input = new CodedInputStream(ms); + + Assert.AreEqual(tag, input.ReadTag()); + string text = input.ReadString(); + Assert.AreEqual('\ufffd', text[0]); + } + + /// + /// A stream which limits the number of bytes it reads at a time. + /// We use this to make sure that CodedInputStream doesn't screw up when + /// reading in small blocks. + /// + private sealed class SmallBlockInputStream : MemoryStream + { + private readonly int blockSize; + + public SmallBlockInputStream(byte[] data, int blockSize) + : base(data) + { + this.blockSize = blockSize; + } + + public override int Read(byte[] buffer, int offset, int count) + { + return base.Read(buffer, offset, Math.Min(count, blockSize)); + } + } + + [Test] + public void TestNegativeEnum() + { + byte[] bytes = { 0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x01 }; + CodedInputStream input = new CodedInputStream(bytes); + Assert.AreEqual((int)SampleEnum.NegativeValue, input.ReadEnum()); + Assert.IsTrue(input.IsAtEnd); + } + + //Issue 71: CodedInputStream.ReadBytes go to slow path unnecessarily + [Test] + public void TestSlowPathAvoidance() + { + using (var ms = new MemoryStream()) + { + CodedOutputStream output = new CodedOutputStream(ms); + output.WriteTag(1, WireFormat.WireType.LengthDelimited); + output.WriteBytes(ByteString.CopyFrom(new byte[100])); + output.WriteTag(2, WireFormat.WireType.LengthDelimited); + output.WriteBytes(ByteString.CopyFrom(new byte[100])); + output.Flush(); + + ms.Position = 0; + CodedInputStream input = new CodedInputStream(ms, new byte[ms.Length / 2], 0, 0); + + uint tag = input.ReadTag(); + Assert.AreEqual(1, WireFormat.GetTagFieldNumber(tag)); + Assert.AreEqual(100, input.ReadBytes().Length); + + tag = input.ReadTag(); + Assert.AreEqual(2, WireFormat.GetTagFieldNumber(tag)); + Assert.AreEqual(100, input.ReadBytes().Length); + } + } + + [Test] + public void Tag0Throws() + { + var input = new CodedInputStream(new byte[] { 0 }); + Assert.Throws(() => input.ReadTag()); + } + + [Test] + public void SkipGroup() + { + // Create an output stream with a group in: + // Field 1: string "field 1" + // Field 2: group containing: + // Field 1: fixed int32 value 100 + // Field 2: string "ignore me" + // Field 3: nested group containing + // Field 1: fixed int64 value 1000 + // Field 3: string "field 3" + var stream = new MemoryStream(); + var output = new CodedOutputStream(stream); + output.WriteTag(1, WireFormat.WireType.LengthDelimited); + output.WriteString("field 1"); + + // The outer group... + output.WriteTag(2, WireFormat.WireType.StartGroup); + output.WriteTag(1, WireFormat.WireType.Fixed32); + output.WriteFixed32(100); + output.WriteTag(2, WireFormat.WireType.LengthDelimited); + output.WriteString("ignore me"); + // The nested group... + output.WriteTag(3, WireFormat.WireType.StartGroup); + output.WriteTag(1, WireFormat.WireType.Fixed64); + output.WriteFixed64(1000); + // Note: Not sure the field number is relevant for end group... + output.WriteTag(3, WireFormat.WireType.EndGroup); + + // End the outer group + output.WriteTag(2, WireFormat.WireType.EndGroup); + + output.WriteTag(3, WireFormat.WireType.LengthDelimited); + output.WriteString("field 3"); + output.Flush(); + stream.Position = 0; + + // Now act like a generated client + var input = new CodedInputStream(stream); + Assert.AreEqual(WireFormat.MakeTag(1, WireFormat.WireType.LengthDelimited), input.ReadTag()); + Assert.AreEqual("field 1", input.ReadString()); + Assert.AreEqual(WireFormat.MakeTag(2, WireFormat.WireType.StartGroup), input.ReadTag()); + input.SkipLastField(); // Should consume the whole group, including the nested one. + Assert.AreEqual(WireFormat.MakeTag(3, WireFormat.WireType.LengthDelimited), input.ReadTag()); + Assert.AreEqual("field 3", input.ReadString()); + } + + [Test] + public void SkipGroup_WrongEndGroupTag() + { + // Create an output stream with: + // Field 1: string "field 1" + // Start group 2 + // Field 3: fixed int32 + // End group 4 (should give an error) + var stream = new MemoryStream(); + var output = new CodedOutputStream(stream); + output.WriteTag(1, WireFormat.WireType.LengthDelimited); + output.WriteString("field 1"); + + // The outer group... + output.WriteTag(2, WireFormat.WireType.StartGroup); + output.WriteTag(3, WireFormat.WireType.Fixed32); + output.WriteFixed32(100); + output.WriteTag(4, WireFormat.WireType.EndGroup); + output.Flush(); + stream.Position = 0; + + // Now act like a generated client + var input = new CodedInputStream(stream); + Assert.AreEqual(WireFormat.MakeTag(1, WireFormat.WireType.LengthDelimited), input.ReadTag()); + Assert.AreEqual("field 1", input.ReadString()); + Assert.AreEqual(WireFormat.MakeTag(2, WireFormat.WireType.StartGroup), input.ReadTag()); + Assert.Throws(input.SkipLastField); + } + + [Test] + public void RogueEndGroupTag() + { + // If we have an end-group tag without a leading start-group tag, generated + // code will just call SkipLastField... so that should fail. + + var stream = new MemoryStream(); + var output = new CodedOutputStream(stream); + output.WriteTag(1, WireFormat.WireType.EndGroup); + output.Flush(); + stream.Position = 0; + + var input = new CodedInputStream(stream); + Assert.AreEqual(WireFormat.MakeTag(1, WireFormat.WireType.EndGroup), input.ReadTag()); + Assert.Throws(input.SkipLastField); + } + + [Test] + public void EndOfStreamReachedWhileSkippingGroup() + { + var stream = new MemoryStream(); + var output = new CodedOutputStream(stream); + output.WriteTag(1, WireFormat.WireType.StartGroup); + output.WriteTag(2, WireFormat.WireType.StartGroup); + output.WriteTag(2, WireFormat.WireType.EndGroup); + + output.Flush(); + stream.Position = 0; + + // Now act like a generated client + var input = new CodedInputStream(stream); + input.ReadTag(); + Assert.Throws(input.SkipLastField); + } + + [Test] + public void RecursionLimitAppliedWhileSkippingGroup() + { + var stream = new MemoryStream(); + var output = new CodedOutputStream(stream); + for (int i = 0; i < CodedInputStream.DefaultRecursionLimit + 1; i++) + { + output.WriteTag(1, WireFormat.WireType.StartGroup); + } + for (int i = 0; i < CodedInputStream.DefaultRecursionLimit + 1; i++) + { + output.WriteTag(1, WireFormat.WireType.EndGroup); + } + output.Flush(); + stream.Position = 0; + + // Now act like a generated client + var input = new CodedInputStream(stream); + Assert.AreEqual(WireFormat.MakeTag(1, WireFormat.WireType.StartGroup), input.ReadTag()); + Assert.Throws(input.SkipLastField); + } + + [Test] + public void Construction_Invalid() + { + Assert.Throws(() => new CodedInputStream((byte[]) null)); + Assert.Throws(() => new CodedInputStream(null, 0, 0)); + Assert.Throws(() => new CodedInputStream((Stream) null)); + Assert.Throws(() => new CodedInputStream(new byte[10], 100, 0)); + Assert.Throws(() => new CodedInputStream(new byte[10], 5, 10)); + } + + [Test] + public void CreateWithLimits_InvalidLimits() + { + var stream = new MemoryStream(); + Assert.Throws(() => CodedInputStream.CreateWithLimits(stream, 0, 1)); + Assert.Throws(() => CodedInputStream.CreateWithLimits(stream, 1, 0)); + } + + [Test] + public void Dispose_DisposesUnderlyingStream() + { + var memoryStream = new MemoryStream(); + Assert.IsTrue(memoryStream.CanRead); + using (var cis = new CodedInputStream(memoryStream)) + { + } + Assert.IsFalse(memoryStream.CanRead); // Disposed + } + + [Test] + public void Dispose_WithLeaveOpen() + { + var memoryStream = new MemoryStream(); + Assert.IsTrue(memoryStream.CanRead); + using (var cis = new CodedInputStream(memoryStream, true)) + { + } + Assert.IsTrue(memoryStream.CanRead); // We left the stream open + } + } +} \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/CodedOutputStreamTest.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/CodedOutputStreamTest.cs new file mode 100644 index 0000000000..83bcb17606 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/CodedOutputStreamTest.cs @@ -0,0 +1,419 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; +using System.IO; +using Google.Protobuf.TestProtos; +using NUnit.Framework; + +namespace Google.Protobuf +{ + public class CodedOutputStreamTest + { + /// + /// Writes the given value using WriteRawVarint32() and WriteRawVarint64() and + /// checks that the result matches the given bytes + /// + private static void AssertWriteVarint(byte[] data, ulong value) + { + // Only do 32-bit write if the value fits in 32 bits. + if ((value >> 32) == 0) + { + MemoryStream rawOutput = new MemoryStream(); + CodedOutputStream output = new CodedOutputStream(rawOutput); + output.WriteRawVarint32((uint) value); + output.Flush(); + Assert.AreEqual(data, rawOutput.ToArray()); + // Also try computing size. + Assert.AreEqual(data.Length, CodedOutputStream.ComputeRawVarint32Size((uint) value)); + } + + { + MemoryStream rawOutput = new MemoryStream(); + CodedOutputStream output = new CodedOutputStream(rawOutput); + output.WriteRawVarint64(value); + output.Flush(); + Assert.AreEqual(data, rawOutput.ToArray()); + + // Also try computing size. + Assert.AreEqual(data.Length, CodedOutputStream.ComputeRawVarint64Size(value)); + } + + // Try different buffer sizes. + for (int bufferSize = 1; bufferSize <= 16; bufferSize *= 2) + { + // Only do 32-bit write if the value fits in 32 bits. + if ((value >> 32) == 0) + { + MemoryStream rawOutput = new MemoryStream(); + CodedOutputStream output = + new CodedOutputStream(rawOutput, bufferSize); + output.WriteRawVarint32((uint) value); + output.Flush(); + Assert.AreEqual(data, rawOutput.ToArray()); + } + + { + MemoryStream rawOutput = new MemoryStream(); + CodedOutputStream output = new CodedOutputStream(rawOutput, bufferSize); + output.WriteRawVarint64(value); + output.Flush(); + Assert.AreEqual(data, rawOutput.ToArray()); + } + } + } + + /// + /// Tests WriteRawVarint32() and WriteRawVarint64() + /// + [Test] + public void WriteVarint() + { + AssertWriteVarint(new byte[] {0x00}, 0); + AssertWriteVarint(new byte[] {0x01}, 1); + AssertWriteVarint(new byte[] {0x7f}, 127); + // 14882 + AssertWriteVarint(new byte[] {0xa2, 0x74}, (0x22 << 0) | (0x74 << 7)); + // 2961488830 + AssertWriteVarint(new byte[] {0xbe, 0xf7, 0x92, 0x84, 0x0b}, + (0x3e << 0) | (0x77 << 7) | (0x12 << 14) | (0x04 << 21) | + (0x0bL << 28)); + + // 64-bit + // 7256456126 + AssertWriteVarint(new byte[] {0xbe, 0xf7, 0x92, 0x84, 0x1b}, + (0x3e << 0) | (0x77 << 7) | (0x12 << 14) | (0x04 << 21) | + (0x1bL << 28)); + // 41256202580718336 + AssertWriteVarint( + new byte[] {0x80, 0xe6, 0xeb, 0x9c, 0xc3, 0xc9, 0xa4, 0x49}, + (0x00 << 0) | (0x66 << 7) | (0x6b << 14) | (0x1c << 21) | + (0x43UL << 28) | (0x49L << 35) | (0x24UL << 42) | (0x49UL << 49)); + // 11964378330978735131 + AssertWriteVarint( + new byte[] {0x9b, 0xa8, 0xf9, 0xc2, 0xbb, 0xd6, 0x80, 0x85, 0xa6, 0x01}, + unchecked((ulong) + ((0x1b << 0) | (0x28 << 7) | (0x79 << 14) | (0x42 << 21) | + (0x3bL << 28) | (0x56L << 35) | (0x00L << 42) | + (0x05L << 49) | (0x26L << 56) | (0x01L << 63)))); + } + + /// + /// Parses the given bytes using WriteRawLittleEndian32() and checks + /// that the result matches the given value. + /// + private static void AssertWriteLittleEndian32(byte[] data, uint value) + { + MemoryStream rawOutput = new MemoryStream(); + CodedOutputStream output = new CodedOutputStream(rawOutput); + output.WriteRawLittleEndian32(value); + output.Flush(); + Assert.AreEqual(data, rawOutput.ToArray()); + + // Try different buffer sizes. + for (int bufferSize = 1; bufferSize <= 16; bufferSize *= 2) + { + rawOutput = new MemoryStream(); + output = new CodedOutputStream(rawOutput, bufferSize); + output.WriteRawLittleEndian32(value); + output.Flush(); + Assert.AreEqual(data, rawOutput.ToArray()); + } + } + + /// + /// Parses the given bytes using WriteRawLittleEndian64() and checks + /// that the result matches the given value. + /// + private static void AssertWriteLittleEndian64(byte[] data, ulong value) + { + MemoryStream rawOutput = new MemoryStream(); + CodedOutputStream output = new CodedOutputStream(rawOutput); + output.WriteRawLittleEndian64(value); + output.Flush(); + Assert.AreEqual(data, rawOutput.ToArray()); + + // Try different block sizes. + for (int blockSize = 1; blockSize <= 16; blockSize *= 2) + { + rawOutput = new MemoryStream(); + output = new CodedOutputStream(rawOutput, blockSize); + output.WriteRawLittleEndian64(value); + output.Flush(); + Assert.AreEqual(data, rawOutput.ToArray()); + } + } + + /// + /// Tests writeRawLittleEndian32() and writeRawLittleEndian64(). + /// + [Test] + public void WriteLittleEndian() + { + AssertWriteLittleEndian32(new byte[] {0x78, 0x56, 0x34, 0x12}, 0x12345678); + AssertWriteLittleEndian32(new byte[] {0xf0, 0xde, 0xbc, 0x9a}, 0x9abcdef0); + + AssertWriteLittleEndian64( + new byte[] {0xf0, 0xde, 0xbc, 0x9a, 0x78, 0x56, 0x34, 0x12}, + 0x123456789abcdef0L); + AssertWriteLittleEndian64( + new byte[] {0x78, 0x56, 0x34, 0x12, 0xf0, 0xde, 0xbc, 0x9a}, + 0x9abcdef012345678UL); + } + + [Test] + public void WriteWholeMessage_VaryingBlockSizes() + { + TestAllTypes message = SampleMessages.CreateFullTestAllTypes(); + + byte[] rawBytes = message.ToByteArray(); + + // Try different block sizes. + for (int blockSize = 1; blockSize < 256; blockSize *= 2) + { + MemoryStream rawOutput = new MemoryStream(); + CodedOutputStream output = new CodedOutputStream(rawOutput, blockSize); + message.WriteTo(output); + output.Flush(); + Assert.AreEqual(rawBytes, rawOutput.ToArray()); + } + } + + [Test] + public void EncodeZigZag32() + { + Assert.AreEqual(0u, CodedOutputStream.EncodeZigZag32(0)); + Assert.AreEqual(1u, CodedOutputStream.EncodeZigZag32(-1)); + Assert.AreEqual(2u, CodedOutputStream.EncodeZigZag32(1)); + Assert.AreEqual(3u, CodedOutputStream.EncodeZigZag32(-2)); + Assert.AreEqual(0x7FFFFFFEu, CodedOutputStream.EncodeZigZag32(0x3FFFFFFF)); + Assert.AreEqual(0x7FFFFFFFu, CodedOutputStream.EncodeZigZag32(unchecked((int) 0xC0000000))); + Assert.AreEqual(0xFFFFFFFEu, CodedOutputStream.EncodeZigZag32(0x7FFFFFFF)); + Assert.AreEqual(0xFFFFFFFFu, CodedOutputStream.EncodeZigZag32(unchecked((int) 0x80000000))); + } + + [Test] + public void EncodeZigZag64() + { + Assert.AreEqual(0u, CodedOutputStream.EncodeZigZag64(0)); + Assert.AreEqual(1u, CodedOutputStream.EncodeZigZag64(-1)); + Assert.AreEqual(2u, CodedOutputStream.EncodeZigZag64(1)); + Assert.AreEqual(3u, CodedOutputStream.EncodeZigZag64(-2)); + Assert.AreEqual(0x000000007FFFFFFEuL, + CodedOutputStream.EncodeZigZag64(unchecked((long) 0x000000003FFFFFFFUL))); + Assert.AreEqual(0x000000007FFFFFFFuL, + CodedOutputStream.EncodeZigZag64(unchecked((long) 0xFFFFFFFFC0000000UL))); + Assert.AreEqual(0x00000000FFFFFFFEuL, + CodedOutputStream.EncodeZigZag64(unchecked((long) 0x000000007FFFFFFFUL))); + Assert.AreEqual(0x00000000FFFFFFFFuL, + CodedOutputStream.EncodeZigZag64(unchecked((long) 0xFFFFFFFF80000000UL))); + Assert.AreEqual(0xFFFFFFFFFFFFFFFEL, + CodedOutputStream.EncodeZigZag64(unchecked((long) 0x7FFFFFFFFFFFFFFFUL))); + Assert.AreEqual(0xFFFFFFFFFFFFFFFFL, + CodedOutputStream.EncodeZigZag64(unchecked((long) 0x8000000000000000UL))); + } + + [Test] + public void RoundTripZigZag32() + { + // Some easier-to-verify round-trip tests. The inputs (other than 0, 1, -1) + // were chosen semi-randomly via keyboard bashing. + Assert.AreEqual(0, CodedInputStream.DecodeZigZag32(CodedOutputStream.EncodeZigZag32(0))); + Assert.AreEqual(1, CodedInputStream.DecodeZigZag32(CodedOutputStream.EncodeZigZag32(1))); + Assert.AreEqual(-1, CodedInputStream.DecodeZigZag32(CodedOutputStream.EncodeZigZag32(-1))); + Assert.AreEqual(14927, CodedInputStream.DecodeZigZag32(CodedOutputStream.EncodeZigZag32(14927))); + Assert.AreEqual(-3612, CodedInputStream.DecodeZigZag32(CodedOutputStream.EncodeZigZag32(-3612))); + } + + [Test] + public void RoundTripZigZag64() + { + Assert.AreEqual(0, CodedInputStream.DecodeZigZag64(CodedOutputStream.EncodeZigZag64(0))); + Assert.AreEqual(1, CodedInputStream.DecodeZigZag64(CodedOutputStream.EncodeZigZag64(1))); + Assert.AreEqual(-1, CodedInputStream.DecodeZigZag64(CodedOutputStream.EncodeZigZag64(-1))); + Assert.AreEqual(14927, CodedInputStream.DecodeZigZag64(CodedOutputStream.EncodeZigZag64(14927))); + Assert.AreEqual(-3612, CodedInputStream.DecodeZigZag64(CodedOutputStream.EncodeZigZag64(-3612))); + + Assert.AreEqual(856912304801416L, + CodedInputStream.DecodeZigZag64(CodedOutputStream.EncodeZigZag64(856912304801416L))); + Assert.AreEqual(-75123905439571256L, + CodedInputStream.DecodeZigZag64(CodedOutputStream.EncodeZigZag64(-75123905439571256L))); + } + + [Test] + public void TestNegativeEnumNoTag() + { + Assert.AreEqual(10, CodedOutputStream.ComputeInt32Size(-2)); + Assert.AreEqual(10, CodedOutputStream.ComputeEnumSize((int) SampleEnum.NegativeValue)); + + byte[] bytes = new byte[10]; + CodedOutputStream output = new CodedOutputStream(bytes); + output.WriteEnum((int) SampleEnum.NegativeValue); + + Assert.AreEqual(0, output.SpaceLeft); + Assert.AreEqual("FE-FF-FF-FF-FF-FF-FF-FF-FF-01", BitConverter.ToString(bytes)); + } + + [Test] + public void TestCodedInputOutputPosition() + { + byte[] content = new byte[110]; + for (int i = 0; i < content.Length; i++) + content[i] = (byte)i; + + byte[] child = new byte[120]; + { + MemoryStream ms = new MemoryStream(child); + CodedOutputStream cout = new CodedOutputStream(ms, 20); + // Field 11: numeric value: 500 + cout.WriteTag(11, WireFormat.WireType.Varint); + Assert.AreEqual(1, cout.Position); + cout.WriteInt32(500); + Assert.AreEqual(3, cout.Position); + //Field 12: length delimited 120 bytes + cout.WriteTag(12, WireFormat.WireType.LengthDelimited); + Assert.AreEqual(4, cout.Position); + cout.WriteBytes(ByteString.CopyFrom(content)); + Assert.AreEqual(115, cout.Position); + // Field 13: fixed numeric value: 501 + cout.WriteTag(13, WireFormat.WireType.Fixed32); + Assert.AreEqual(116, cout.Position); + cout.WriteSFixed32(501); + Assert.AreEqual(120, cout.Position); + cout.Flush(); + } + + byte[] bytes = new byte[130]; + { + CodedOutputStream cout = new CodedOutputStream(bytes); + // Field 1: numeric value: 500 + cout.WriteTag(1, WireFormat.WireType.Varint); + Assert.AreEqual(1, cout.Position); + cout.WriteInt32(500); + Assert.AreEqual(3, cout.Position); + //Field 2: length delimited 120 bytes + cout.WriteTag(2, WireFormat.WireType.LengthDelimited); + Assert.AreEqual(4, cout.Position); + cout.WriteBytes(ByteString.CopyFrom(child)); + Assert.AreEqual(125, cout.Position); + // Field 3: fixed numeric value: 500 + cout.WriteTag(3, WireFormat.WireType.Fixed32); + Assert.AreEqual(126, cout.Position); + cout.WriteSFixed32(501); + Assert.AreEqual(130, cout.Position); + cout.Flush(); + } + // Now test Input stream: + { + CodedInputStream cin = new CodedInputStream(new MemoryStream(bytes), new byte[50], 0, 0); + Assert.AreEqual(0, cin.Position); + // Field 1: + uint tag = cin.ReadTag(); + Assert.AreEqual(1, tag >> 3); + Assert.AreEqual(1, cin.Position); + Assert.AreEqual(500, cin.ReadInt32()); + Assert.AreEqual(3, cin.Position); + //Field 2: + tag = cin.ReadTag(); + Assert.AreEqual(2, tag >> 3); + Assert.AreEqual(4, cin.Position); + int childlen = cin.ReadLength(); + Assert.AreEqual(120, childlen); + Assert.AreEqual(5, cin.Position); + int oldlimit = cin.PushLimit((int)childlen); + Assert.AreEqual(5, cin.Position); + // Now we are reading child message + { + // Field 11: numeric value: 500 + tag = cin.ReadTag(); + Assert.AreEqual(11, tag >> 3); + Assert.AreEqual(6, cin.Position); + Assert.AreEqual(500, cin.ReadInt32()); + Assert.AreEqual(8, cin.Position); + //Field 12: length delimited 120 bytes + tag = cin.ReadTag(); + Assert.AreEqual(12, tag >> 3); + Assert.AreEqual(9, cin.Position); + ByteString bstr = cin.ReadBytes(); + Assert.AreEqual(110, bstr.Length); + Assert.AreEqual((byte) 109, bstr[109]); + Assert.AreEqual(120, cin.Position); + // Field 13: fixed numeric value: 501 + tag = cin.ReadTag(); + Assert.AreEqual(13, tag >> 3); + // ROK - Previously broken here, this returned 126 failing to account for bufferSizeAfterLimit + Assert.AreEqual(121, cin.Position); + Assert.AreEqual(501, cin.ReadSFixed32()); + Assert.AreEqual(125, cin.Position); + Assert.IsTrue(cin.IsAtEnd); + } + cin.PopLimit(oldlimit); + Assert.AreEqual(125, cin.Position); + // Field 3: fixed numeric value: 501 + tag = cin.ReadTag(); + Assert.AreEqual(3, tag >> 3); + Assert.AreEqual(126, cin.Position); + Assert.AreEqual(501, cin.ReadSFixed32()); + Assert.AreEqual(130, cin.Position); + Assert.IsTrue(cin.IsAtEnd); + } + } + + [Test] + public void Dispose_DisposesUnderlyingStream() + { + var memoryStream = new MemoryStream(); + Assert.IsTrue(memoryStream.CanWrite); + using (var cos = new CodedOutputStream(memoryStream)) + { + cos.WriteRawByte(0); + Assert.AreEqual(0, memoryStream.Position); // Not flushed yet + } + Assert.AreEqual(1, memoryStream.ToArray().Length); // Flushed data from CodedOutputStream to MemoryStream + Assert.IsFalse(memoryStream.CanWrite); // Disposed + } + + [Test] + public void Dispose_WithLeaveOpen() + { + var memoryStream = new MemoryStream(); + Assert.IsTrue(memoryStream.CanWrite); + using (var cos = new CodedOutputStream(memoryStream, true)) + { + cos.WriteRawByte(0); + Assert.AreEqual(0, memoryStream.Position); // Not flushed yet + } + Assert.AreEqual(1, memoryStream.Position); // Flushed data from CodedOutputStream to MemoryStream + Assert.IsTrue(memoryStream.CanWrite); // We left the stream open + } + } +} \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/Collections/MapFieldTest.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/Collections/MapFieldTest.cs new file mode 100644 index 0000000000..9c8459073c --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/Collections/MapFieldTest.cs @@ -0,0 +1,532 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; +using System.Collections.Generic; +using Google.Protobuf.TestProtos; +using NUnit.Framework; +using System.Collections; +using System.Linq; + +namespace Google.Protobuf.Collections +{ + /// + /// Tests for MapField which aren't reliant on the encoded format - + /// tests for serialization/deserialization are part of GeneratedMessageTest. + /// + public class MapFieldTest + { + [Test] + public void Clone_ClonesMessages() + { + var message = new ForeignMessage { C = 20 }; + var map = new MapField { { "x", message } }; + var clone = map.Clone(); + map["x"].C = 30; + Assert.AreEqual(20, clone["x"].C); + } + + [Test] + public void NullValuesProhibited() + { + TestNullValues(0); + TestNullValues(""); + TestNullValues(new TestAllTypes()); + } + + private void TestNullValues(T nonNullValue) + { + var map = new MapField(); + var nullValue = (T) (object) null; + Assert.Throws(() => map.Add(0, nullValue)); + Assert.Throws(() => map[0] = nullValue); + map.Add(1, nonNullValue); + map[1] = nonNullValue; + } + + [Test] + public void Add_ForbidsNullKeys() + { + var map = new MapField(); + Assert.Throws(() => map.Add(null, new ForeignMessage())); + } + + [Test] + public void Indexer_ForbidsNullKeys() + { + var map = new MapField(); + Assert.Throws(() => map[null] = new ForeignMessage()); + } + + [Test] + public void AddPreservesInsertionOrder() + { + var map = new MapField(); + map.Add("a", "v1"); + map.Add("b", "v2"); + map.Add("c", "v3"); + map.Remove("b"); + map.Add("d", "v4"); + CollectionAssert.AreEqual(new[] { "a", "c", "d" }, map.Keys); + CollectionAssert.AreEqual(new[] { "v1", "v3", "v4" }, map.Values); + } + + [Test] + public void EqualityIsOrderInsensitive() + { + var map1 = new MapField(); + map1.Add("a", "v1"); + map1.Add("b", "v2"); + + var map2 = new MapField(); + map2.Add("b", "v2"); + map2.Add("a", "v1"); + + EqualityTester.AssertEquality(map1, map2); + } + + [Test] + public void EqualityIsKeySensitive() + { + var map1 = new MapField(); + map1.Add("first key", "v1"); + map1.Add("second key", "v2"); + + var map2 = new MapField(); + map2.Add("third key", "v1"); + map2.Add("fourth key", "v2"); + + EqualityTester.AssertInequality(map1, map2); + } + + [Test] + public void Equality_Simple() + { + var map = new MapField(); + EqualityTester.AssertEquality(map, map); + EqualityTester.AssertInequality(map, null); + Assert.IsFalse(map.Equals(new object())); + } + + [Test] + public void EqualityIsValueSensitive() + { + // Note: Without some care, it's a little easier than one might + // hope to see hash collisions, but only in some environments... + var map1 = new MapField(); + map1.Add("a", "first value"); + map1.Add("b", "second value"); + + var map2 = new MapField(); + map2.Add("a", "third value"); + map2.Add("b", "fourth value"); + + EqualityTester.AssertInequality(map1, map2); + } + + [Test] + public void Add_Dictionary() + { + var map1 = new MapField + { + { "x", "y" }, + { "a", "b" } + }; + var map2 = new MapField + { + { "before", "" }, + map1, + { "after", "" } + }; + var expected = new MapField + { + { "before", "" }, + { "x", "y" }, + { "a", "b" }, + { "after", "" } + }; + Assert.AreEqual(expected, map2); + CollectionAssert.AreEqual(new[] { "before", "x", "a", "after" }, map2.Keys); + } + + // General IDictionary behavior tests + [Test] + public void Add_KeyAlreadyExists() + { + var map = new MapField(); + map.Add("foo", "bar"); + Assert.Throws(() => map.Add("foo", "baz")); + } + + [Test] + public void Add_Pair() + { + var map = new MapField(); + ICollection> collection = map; + collection.Add(NewKeyValuePair("x", "y")); + Assert.AreEqual("y", map["x"]); + Assert.Throws(() => collection.Add(NewKeyValuePair("x", "z"))); + } + + [Test] + public void Contains_Pair() + { + var map = new MapField { { "x", "y" } }; + ICollection> collection = map; + Assert.IsTrue(collection.Contains(NewKeyValuePair("x", "y"))); + Assert.IsFalse(collection.Contains(NewKeyValuePair("x", "z"))); + Assert.IsFalse(collection.Contains(NewKeyValuePair("z", "y"))); + } + + [Test] + public void Remove_Key() + { + var map = new MapField(); + map.Add("foo", "bar"); + Assert.AreEqual(1, map.Count); + Assert.IsFalse(map.Remove("missing")); + Assert.AreEqual(1, map.Count); + Assert.IsTrue(map.Remove("foo")); + Assert.AreEqual(0, map.Count); + Assert.Throws(() => map.Remove(null)); + } + + [Test] + public void Remove_Pair() + { + var map = new MapField(); + map.Add("foo", "bar"); + ICollection> collection = map; + Assert.AreEqual(1, map.Count); + Assert.IsFalse(collection.Remove(NewKeyValuePair("wrong key", "bar"))); + Assert.AreEqual(1, map.Count); + Assert.IsFalse(collection.Remove(NewKeyValuePair("foo", "wrong value"))); + Assert.AreEqual(1, map.Count); + Assert.IsTrue(collection.Remove(NewKeyValuePair("foo", "bar"))); + Assert.AreEqual(0, map.Count); + Assert.Throws(() => collection.Remove(new KeyValuePair(null, ""))); + } + + [Test] + public void CopyTo_Pair() + { + var map = new MapField(); + map.Add("foo", "bar"); + ICollection> collection = map; + KeyValuePair[] array = new KeyValuePair[3]; + collection.CopyTo(array, 1); + Assert.AreEqual(NewKeyValuePair("foo", "bar"), array[1]); + } + + [Test] + public void Clear() + { + var map = new MapField { { "x", "y" } }; + Assert.AreEqual(1, map.Count); + map.Clear(); + Assert.AreEqual(0, map.Count); + map.Add("x", "y"); + Assert.AreEqual(1, map.Count); + } + + [Test] + public void Indexer_Get() + { + var map = new MapField { { "x", "y" } }; + Assert.AreEqual("y", map["x"]); + Assert.Throws(() => { var ignored = map["z"]; }); + } + + [Test] + public void Indexer_Set() + { + var map = new MapField(); + map["x"] = "y"; + Assert.AreEqual("y", map["x"]); + map["x"] = "z"; // This won't throw, unlike Add. + Assert.AreEqual("z", map["x"]); + } + + [Test] + public void GetEnumerator_NonGeneric() + { + IEnumerable map = new MapField { { "x", "y" } }; + CollectionAssert.AreEqual(new[] { new KeyValuePair("x", "y") }, + map.Cast().ToList()); + } + + // Test for the explicitly-implemented non-generic IDictionary interface + [Test] + public void IDictionary_GetEnumerator() + { + IDictionary map = new MapField { { "x", "y" } }; + var enumerator = map.GetEnumerator(); + + // Commented assertions show an ideal situation - it looks like + // the LinkedList enumerator doesn't throw when you ask for the current entry + // at an inappropriate time; fixing this would be more work than it's worth. + // Assert.Throws(() => enumerator.Current.GetHashCode()); + Assert.IsTrue(enumerator.MoveNext()); + Assert.AreEqual("x", enumerator.Key); + Assert.AreEqual("y", enumerator.Value); + Assert.AreEqual(new DictionaryEntry("x", "y"), enumerator.Current); + Assert.AreEqual(new DictionaryEntry("x", "y"), enumerator.Entry); + Assert.IsFalse(enumerator.MoveNext()); + // Assert.Throws(() => enumerator.Current.GetHashCode()); + enumerator.Reset(); + // Assert.Throws(() => enumerator.Current.GetHashCode()); + Assert.IsTrue(enumerator.MoveNext()); + Assert.AreEqual("x", enumerator.Key); // Assume the rest are okay + } + + [Test] + public void IDictionary_Add() + { + var map = new MapField { { "x", "y" } }; + IDictionary dictionary = map; + dictionary.Add("a", "b"); + Assert.AreEqual("b", map["a"]); + Assert.Throws(() => dictionary.Add("a", "duplicate")); + Assert.Throws(() => dictionary.Add(new object(), "key is bad")); + Assert.Throws(() => dictionary.Add("value is bad", new object())); + } + + [Test] + public void IDictionary_Contains() + { + var map = new MapField { { "x", "y" } }; + IDictionary dictionary = map; + + Assert.IsFalse(dictionary.Contains("a")); + Assert.IsFalse(dictionary.Contains(5)); + // Surprising, but IDictionary.Contains is only about keys. + Assert.IsFalse(dictionary.Contains(new DictionaryEntry("x", "y"))); + Assert.IsTrue(dictionary.Contains("x")); + } + + [Test] + public void IDictionary_Remove() + { + var map = new MapField { { "x", "y" } }; + IDictionary dictionary = map; + dictionary.Remove("a"); + Assert.AreEqual(1, dictionary.Count); + dictionary.Remove(5); + Assert.AreEqual(1, dictionary.Count); + dictionary.Remove(new DictionaryEntry("x", "y")); + Assert.AreEqual(1, dictionary.Count); + dictionary.Remove("x"); + Assert.AreEqual(0, dictionary.Count); + Assert.Throws(() => dictionary.Remove(null)); + } + + [Test] + public void IDictionary_CopyTo() + { + var map = new MapField { { "x", "y" } }; + IDictionary dictionary = map; + var array = new DictionaryEntry[3]; + dictionary.CopyTo(array, 1); + CollectionAssert.AreEqual(new[] { default(DictionaryEntry), new DictionaryEntry("x", "y"), default(DictionaryEntry) }, + array); + var objectArray = new object[3]; + dictionary.CopyTo(objectArray, 1); + CollectionAssert.AreEqual(new object[] { null, new DictionaryEntry("x", "y"), null }, + objectArray); + } + + [Test] + public void IDictionary_IsFixedSize() + { + var map = new MapField { { "x", "y" } }; + IDictionary dictionary = map; + Assert.IsFalse(dictionary.IsFixedSize); + } + + [Test] + public void IDictionary_Keys() + { + IDictionary dictionary = new MapField { { "x", "y" } }; + CollectionAssert.AreEqual(new[] { "x" }, dictionary.Keys); + } + + [Test] + public void IDictionary_Values() + { + IDictionary dictionary = new MapField { { "x", "y" } }; + CollectionAssert.AreEqual(new[] { "y" }, dictionary.Values); + } + + [Test] + public void IDictionary_IsSynchronized() + { + IDictionary dictionary = new MapField { { "x", "y" } }; + Assert.IsFalse(dictionary.IsSynchronized); + } + + [Test] + public void IDictionary_SyncRoot() + { + IDictionary dictionary = new MapField { { "x", "y" } }; + Assert.AreSame(dictionary, dictionary.SyncRoot); + } + + [Test] + public void IDictionary_Indexer_Get() + { + IDictionary dictionary = new MapField { { "x", "y" } }; + Assert.AreEqual("y", dictionary["x"]); + Assert.IsNull(dictionary["a"]); + Assert.IsNull(dictionary[5]); + Assert.Throws(() => dictionary[null].GetHashCode()); + } + + [Test] + public void IDictionary_Indexer_Set() + { + var map = new MapField { { "x", "y" } }; + IDictionary dictionary = map; + map["a"] = "b"; + Assert.AreEqual("b", map["a"]); + map["a"] = "c"; + Assert.AreEqual("c", map["a"]); + Assert.Throws(() => dictionary[5] = "x"); + Assert.Throws(() => dictionary["x"] = 5); + Assert.Throws(() => dictionary[null] = "z"); + Assert.Throws(() => dictionary["x"] = null); + } + + [Test] + public void KeysReturnsLiveView() + { + var map = new MapField(); + var keys = map.Keys; + CollectionAssert.AreEqual(new string[0], keys); + map["foo"] = "bar"; + map["x"] = "y"; + CollectionAssert.AreEqual(new[] { "foo", "x" }, keys); + } + + [Test] + public void ValuesReturnsLiveView() + { + var map = new MapField(); + var values = map.Values; + CollectionAssert.AreEqual(new string[0], values); + map["foo"] = "bar"; + map["x"] = "y"; + CollectionAssert.AreEqual(new[] { "bar", "y" }, values); + } + + // Just test keys - we know the implementation is the same for values + [Test] + public void ViewsAreReadOnly() + { + var map = new MapField(); + var keys = map.Keys; + Assert.IsTrue(keys.IsReadOnly); + Assert.Throws(() => keys.Clear()); + Assert.Throws(() => keys.Remove("a")); + Assert.Throws(() => keys.Add("a")); + } + + // Just test keys - we know the implementation is the same for values + [Test] + public void ViewCopyTo() + { + var map = new MapField { { "foo", "bar" }, { "x", "y" } }; + var keys = map.Keys; + var array = new string[4]; + Assert.Throws(() => keys.CopyTo(array, 3)); + Assert.Throws(() => keys.CopyTo(array, -1)); + keys.CopyTo(array, 1); + CollectionAssert.AreEqual(new[] { null, "foo", "x", null }, array); + } + + // Just test keys - we know the implementation is the same for values + [Test] + public void NonGenericViewCopyTo() + { + IDictionary map = new MapField { { "foo", "bar" }, { "x", "y" } }; + ICollection keys = map.Keys; + // Note the use of the Array type here rather than string[] + Array array = new string[4]; + Assert.Throws(() => keys.CopyTo(array, 3)); + Assert.Throws(() => keys.CopyTo(array, -1)); + keys.CopyTo(array, 1); + CollectionAssert.AreEqual(new[] { null, "foo", "x", null }, array); + } + + [Test] + public void KeysContains() + { + var map = new MapField { { "foo", "bar" }, { "x", "y" } }; + var keys = map.Keys; + Assert.IsTrue(keys.Contains("foo")); + Assert.IsFalse(keys.Contains("bar")); // It's a value! + Assert.IsFalse(keys.Contains("1")); + // Keys can't be null, so we should prevent contains check + Assert.Throws(() => keys.Contains(null)); + } + + [Test] + public void ValuesContains() + { + var map = new MapField { { "foo", "bar" }, { "x", "y" } }; + var values = map.Values; + Assert.IsTrue(values.Contains("bar")); + Assert.IsFalse(values.Contains("foo")); // It's a key! + Assert.IsFalse(values.Contains("1")); + // Values can be null, so this makes sense + Assert.IsFalse(values.Contains(null)); + } + + [Test] + public void ToString_StringToString() + { + var map = new MapField { { "foo", "bar" }, { "x", "y" } }; + Assert.AreEqual("{ \"foo\": \"bar\", \"x\": \"y\" }", map.ToString()); + } + + [Test] + public void ToString_UnsupportedKeyType() + { + var map = new MapField { { 10, "foo" } }; + Assert.Throws(() => map.ToString()); + } + + private static KeyValuePair NewKeyValuePair(TKey key, TValue value) + { + return new KeyValuePair(key, value); + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/Collections/RepeatedFieldTest.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/Collections/RepeatedFieldTest.cs new file mode 100644 index 0000000000..8ed54cfb48 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/Collections/RepeatedFieldTest.cs @@ -0,0 +1,660 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; +using System.Collections; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text; +using Google.Protobuf.TestProtos; +using Google.Protobuf.WellKnownTypes; +using NUnit.Framework; + +namespace Google.Protobuf.Collections +{ + public class RepeatedFieldTest + { + [Test] + public void NullValuesRejected() + { + var list = new RepeatedField(); + Assert.Throws(() => list.Add((string)null)); + Assert.Throws(() => list.Add((IEnumerable)null)); + Assert.Throws(() => list.Add((RepeatedField)null)); + Assert.Throws(() => list.Contains(null)); + Assert.Throws(() => list.IndexOf(null)); + } + + [Test] + public void Add_SingleItem() + { + var list = new RepeatedField(); + list.Add("foo"); + Assert.AreEqual(1, list.Count); + Assert.AreEqual("foo", list[0]); + } + + [Test] + public void Add_Sequence() + { + var list = new RepeatedField(); + list.Add(new[] { "foo", "bar" }); + Assert.AreEqual(2, list.Count); + Assert.AreEqual("foo", list[0]); + Assert.AreEqual("bar", list[1]); + } + + [Test] + public void Add_RepeatedField() + { + var list = new RepeatedField { "original" }; + list.Add(new RepeatedField { "foo", "bar" }); + Assert.AreEqual(3, list.Count); + Assert.AreEqual("original", list[0]); + Assert.AreEqual("foo", list[1]); + Assert.AreEqual("bar", list[2]); + } + + [Test] + public void RemoveAt_Valid() + { + var list = new RepeatedField { "first", "second", "third" }; + list.RemoveAt(1); + CollectionAssert.AreEqual(new[] { "first", "third" }, list); + // Just check that these don't throw... + list.RemoveAt(list.Count - 1); // Now the count will be 1... + list.RemoveAt(0); + Assert.AreEqual(0, list.Count); + } + + [Test] + public void RemoveAt_Invalid() + { + var list = new RepeatedField { "first", "second", "third" }; + Assert.Throws(() => list.RemoveAt(-1)); + Assert.Throws(() => list.RemoveAt(3)); + } + + [Test] + public void Insert_Valid() + { + var list = new RepeatedField { "first", "second" }; + list.Insert(1, "middle"); + CollectionAssert.AreEqual(new[] { "first", "middle", "second" }, list); + list.Insert(3, "end"); + CollectionAssert.AreEqual(new[] { "first", "middle", "second", "end" }, list); + list.Insert(0, "start"); + CollectionAssert.AreEqual(new[] { "start", "first", "middle", "second", "end" }, list); + } + + [Test] + public void Insert_Invalid() + { + var list = new RepeatedField { "first", "second" }; + Assert.Throws(() => list.Insert(-1, "foo")); + Assert.Throws(() => list.Insert(3, "foo")); + Assert.Throws(() => list.Insert(0, null)); + } + + [Test] + public void Equals_RepeatedField() + { + var list = new RepeatedField { "first", "second" }; + Assert.IsFalse(list.Equals((RepeatedField) null)); + Assert.IsTrue(list.Equals(list)); + Assert.IsFalse(list.Equals(new RepeatedField { "first", "third" })); + Assert.IsFalse(list.Equals(new RepeatedField { "first" })); + Assert.IsTrue(list.Equals(new RepeatedField { "first", "second" })); + } + + [Test] + public void Equals_Object() + { + var list = new RepeatedField { "first", "second" }; + Assert.IsFalse(list.Equals((object) null)); + Assert.IsTrue(list.Equals((object) list)); + Assert.IsFalse(list.Equals((object) new RepeatedField { "first", "third" })); + Assert.IsFalse(list.Equals((object) new RepeatedField { "first" })); + Assert.IsTrue(list.Equals((object) new RepeatedField { "first", "second" })); + Assert.IsFalse(list.Equals(new object())); + } + + [Test] + public void GetEnumerator_GenericInterface() + { + IEnumerable list = new RepeatedField { "first", "second" }; + // Select gets rid of the optimizations in ToList... + CollectionAssert.AreEqual(new[] { "first", "second" }, list.Select(x => x).ToList()); + } + + [Test] + public void GetEnumerator_NonGenericInterface() + { + IEnumerable list = new RepeatedField { "first", "second" }; + CollectionAssert.AreEqual(new[] { "first", "second" }, list.Cast().ToList()); + } + + [Test] + public void CopyTo() + { + var list = new RepeatedField { "first", "second" }; + string[] stringArray = new string[4]; + list.CopyTo(stringArray, 1); + CollectionAssert.AreEqual(new[] { null, "first", "second", null }, stringArray); + } + + [Test] + public void Indexer_Get() + { + var list = new RepeatedField { "first", "second" }; + Assert.AreEqual("first", list[0]); + Assert.AreEqual("second", list[1]); + Assert.Throws(() => list[-1].GetHashCode()); + Assert.Throws(() => list[2].GetHashCode()); + } + + [Test] + public void Indexer_Set() + { + var list = new RepeatedField { "first", "second" }; + list[0] = "changed"; + Assert.AreEqual("changed", list[0]); + Assert.Throws(() => list[0] = null); + Assert.Throws(() => list[-1] = "bad"); + Assert.Throws(() => list[2] = "bad"); + } + + [Test] + public void Clone_ReturnsMutable() + { + var list = new RepeatedField { 0 }; + var clone = list.Clone(); + clone[0] = 1; + } + + [Test] + public void Enumerator() + { + var list = new RepeatedField { "first", "second" }; + using (var enumerator = list.GetEnumerator()) + { + Assert.IsTrue(enumerator.MoveNext()); + Assert.AreEqual("first", enumerator.Current); + Assert.IsTrue(enumerator.MoveNext()); + Assert.AreEqual("second", enumerator.Current); + Assert.IsFalse(enumerator.MoveNext()); + Assert.IsFalse(enumerator.MoveNext()); + } + } + + [Test] + public void AddEntriesFrom_PackedInt32() + { + uint packedTag = WireFormat.MakeTag(10, WireFormat.WireType.LengthDelimited); + var stream = new MemoryStream(); + var output = new CodedOutputStream(stream); + var length = CodedOutputStream.ComputeInt32Size(10) + + CodedOutputStream.ComputeInt32Size(999) + + CodedOutputStream.ComputeInt32Size(-1000); + output.WriteTag(packedTag); + output.WriteRawVarint32((uint) length); + output.WriteInt32(10); + output.WriteInt32(999); + output.WriteInt32(-1000); + output.Flush(); + stream.Position = 0; + + // Deliberately "expecting" a non-packed tag, but we detect that the data is + // actually packed. + uint nonPackedTag = WireFormat.MakeTag(10, WireFormat.WireType.LengthDelimited); + var field = new RepeatedField(); + var input = new CodedInputStream(stream); + input.AssertNextTag(packedTag); + field.AddEntriesFrom(input, FieldCodec.ForInt32(nonPackedTag)); + CollectionAssert.AreEqual(new[] { 10, 999, -1000 }, field); + Assert.IsTrue(input.IsAtEnd); + } + + [Test] + public void AddEntriesFrom_NonPackedInt32() + { + uint nonPackedTag = WireFormat.MakeTag(10, WireFormat.WireType.Varint); + var stream = new MemoryStream(); + var output = new CodedOutputStream(stream); + output.WriteTag(nonPackedTag); + output.WriteInt32(10); + output.WriteTag(nonPackedTag); + output.WriteInt32(999); + output.WriteTag(nonPackedTag); + output.WriteInt32(-1000); // Just for variety... + output.Flush(); + stream.Position = 0; + + // Deliberately "expecting" a packed tag, but we detect that the data is + // actually not packed. + uint packedTag = WireFormat.MakeTag(10, WireFormat.WireType.LengthDelimited); + var field = new RepeatedField(); + var input = new CodedInputStream(stream); + input.AssertNextTag(nonPackedTag); + field.AddEntriesFrom(input, FieldCodec.ForInt32(packedTag)); + CollectionAssert.AreEqual(new[] { 10, 999, -1000 }, field); + Assert.IsTrue(input.IsAtEnd); + } + + [Test] + public void AddEntriesFrom_String() + { + uint tag = WireFormat.MakeTag(10, WireFormat.WireType.LengthDelimited); + var stream = new MemoryStream(); + var output = new CodedOutputStream(stream); + output.WriteTag(tag); + output.WriteString("Foo"); + output.WriteTag(tag); + output.WriteString(""); + output.WriteTag(tag); + output.WriteString("Bar"); + output.Flush(); + stream.Position = 0; + + var field = new RepeatedField(); + var input = new CodedInputStream(stream); + input.AssertNextTag(tag); + field.AddEntriesFrom(input, FieldCodec.ForString(tag)); + CollectionAssert.AreEqual(new[] { "Foo", "", "Bar" }, field); + Assert.IsTrue(input.IsAtEnd); + } + + [Test] + public void AddEntriesFrom_Message() + { + var message1 = new ForeignMessage { C = 2000 }; + var message2 = new ForeignMessage { C = -250 }; + + uint tag = WireFormat.MakeTag(10, WireFormat.WireType.LengthDelimited); + var stream = new MemoryStream(); + var output = new CodedOutputStream(stream); + output.WriteTag(tag); + output.WriteMessage(message1); + output.WriteTag(tag); + output.WriteMessage(message2); + output.Flush(); + stream.Position = 0; + + var field = new RepeatedField(); + var input = new CodedInputStream(stream); + input.AssertNextTag(tag); + field.AddEntriesFrom(input, FieldCodec.ForMessage(tag, ForeignMessage.Parser)); + CollectionAssert.AreEqual(new[] { message1, message2}, field); + Assert.IsTrue(input.IsAtEnd); + } + + [Test] + public void WriteTo_PackedInt32() + { + uint tag = WireFormat.MakeTag(10, WireFormat.WireType.LengthDelimited); + var field = new RepeatedField { 10, 1000, 1000000 }; + var stream = new MemoryStream(); + var output = new CodedOutputStream(stream); + field.WriteTo(output, FieldCodec.ForInt32(tag)); + output.Flush(); + stream.Position = 0; + + var input = new CodedInputStream(stream); + input.AssertNextTag(tag); + var length = input.ReadLength(); + Assert.AreEqual(10, input.ReadInt32()); + Assert.AreEqual(1000, input.ReadInt32()); + Assert.AreEqual(1000000, input.ReadInt32()); + Assert.IsTrue(input.IsAtEnd); + Assert.AreEqual(1 + CodedOutputStream.ComputeLengthSize(length) + length, stream.Length); + } + + [Test] + public void WriteTo_NonPackedInt32() + { + uint tag = WireFormat.MakeTag(10, WireFormat.WireType.Varint); + var field = new RepeatedField { 10, 1000, 1000000}; + var stream = new MemoryStream(); + var output = new CodedOutputStream(stream); + field.WriteTo(output, FieldCodec.ForInt32(tag)); + output.Flush(); + stream.Position = 0; + + var input = new CodedInputStream(stream); + input.AssertNextTag(tag); + Assert.AreEqual(10, input.ReadInt32()); + input.AssertNextTag(tag); + Assert.AreEqual(1000, input.ReadInt32()); + input.AssertNextTag(tag); + Assert.AreEqual(1000000, input.ReadInt32()); + Assert.IsTrue(input.IsAtEnd); + } + + [Test] + public void WriteTo_String() + { + uint tag = WireFormat.MakeTag(10, WireFormat.WireType.LengthDelimited); + var field = new RepeatedField { "Foo", "", "Bar" }; + var stream = new MemoryStream(); + var output = new CodedOutputStream(stream); + field.WriteTo(output, FieldCodec.ForString(tag)); + output.Flush(); + stream.Position = 0; + + var input = new CodedInputStream(stream); + input.AssertNextTag(tag); + Assert.AreEqual("Foo", input.ReadString()); + input.AssertNextTag(tag); + Assert.AreEqual("", input.ReadString()); + input.AssertNextTag(tag); + Assert.AreEqual("Bar", input.ReadString()); + Assert.IsTrue(input.IsAtEnd); + } + + [Test] + public void WriteTo_Message() + { + var message1 = new ForeignMessage { C = 20 }; + var message2 = new ForeignMessage { C = 25 }; + uint tag = WireFormat.MakeTag(10, WireFormat.WireType.LengthDelimited); + var field = new RepeatedField { message1, message2 }; + var stream = new MemoryStream(); + var output = new CodedOutputStream(stream); + field.WriteTo(output, FieldCodec.ForMessage(tag, ForeignMessage.Parser)); + output.Flush(); + stream.Position = 0; + + var input = new CodedInputStream(stream); + input.AssertNextTag(tag); + Assert.AreEqual(message1, input.ReadMessage(ForeignMessage.Parser)); + input.AssertNextTag(tag); + Assert.AreEqual(message2, input.ReadMessage(ForeignMessage.Parser)); + Assert.IsTrue(input.IsAtEnd); + } + + [Test] + public void CalculateSize_VariableSizeNonPacked() + { + var list = new RepeatedField { 1, 500, 1 }; + var tag = WireFormat.MakeTag(1, WireFormat.WireType.Varint); + // 2 bytes for the first entry, 3 bytes for the second, 2 bytes for the third + Assert.AreEqual(7, list.CalculateSize(FieldCodec.ForInt32(tag))); + } + + [Test] + public void CalculateSize_FixedSizeNonPacked() + { + var list = new RepeatedField { 1, 500, 1 }; + var tag = WireFormat.MakeTag(1, WireFormat.WireType.Fixed32); + // 5 bytes for the each entry + Assert.AreEqual(15, list.CalculateSize(FieldCodec.ForSFixed32(tag))); + } + + [Test] + public void CalculateSize_VariableSizePacked() + { + var list = new RepeatedField { 1, 500, 1}; + var tag = WireFormat.MakeTag(1, WireFormat.WireType.LengthDelimited); + // 1 byte for the tag, 1 byte for the length, + // 1 byte for the first entry, 2 bytes for the second, 1 byte for the third + Assert.AreEqual(6, list.CalculateSize(FieldCodec.ForInt32(tag))); + } + + [Test] + public void CalculateSize_FixedSizePacked() + { + var list = new RepeatedField { 1, 500, 1 }; + var tag = WireFormat.MakeTag(1, WireFormat.WireType.LengthDelimited); + // 1 byte for the tag, 1 byte for the length, 4 bytes per entry + Assert.AreEqual(14, list.CalculateSize(FieldCodec.ForSFixed32(tag))); + } + + [Test] + public void TestNegativeEnumArray() + { + int arraySize = 1 + 1 + (11 * 5); + int msgSize = arraySize; + byte[] bytes = new byte[msgSize]; + CodedOutputStream output = new CodedOutputStream(bytes); + uint tag = WireFormat.MakeTag(8, WireFormat.WireType.Varint); + for (int i = 0; i >= -5; i--) + { + output.WriteTag(tag); + output.WriteEnum(i); + } + + Assert.AreEqual(0, output.SpaceLeft); + + CodedInputStream input = new CodedInputStream(bytes); + tag = input.ReadTag(); + + RepeatedField values = new RepeatedField(); + values.AddEntriesFrom(input, FieldCodec.ForEnum(tag, x => (int)x, x => (SampleEnum)x)); + + Assert.AreEqual(6, values.Count); + Assert.AreEqual(SampleEnum.None, values[0]); + Assert.AreEqual(((SampleEnum)(-1)), values[1]); + Assert.AreEqual(SampleEnum.NegativeValue, values[2]); + Assert.AreEqual(((SampleEnum)(-3)), values[3]); + Assert.AreEqual(((SampleEnum)(-4)), values[4]); + Assert.AreEqual(((SampleEnum)(-5)), values[5]); + } + + + [Test] + public void TestNegativeEnumPackedArray() + { + int arraySize = 1 + (10 * 5); + int msgSize = 1 + 1 + arraySize; + byte[] bytes = new byte[msgSize]; + CodedOutputStream output = new CodedOutputStream(bytes); + // Length-delimited to show we want the packed representation + uint tag = WireFormat.MakeTag(8, WireFormat.WireType.LengthDelimited); + output.WriteTag(tag); + int size = 0; + for (int i = 0; i >= -5; i--) + { + size += CodedOutputStream.ComputeEnumSize(i); + } + output.WriteRawVarint32((uint)size); + for (int i = 0; i >= -5; i--) + { + output.WriteEnum(i); + } + Assert.AreEqual(0, output.SpaceLeft); + + CodedInputStream input = new CodedInputStream(bytes); + tag = input.ReadTag(); + + RepeatedField values = new RepeatedField(); + values.AddEntriesFrom(input, FieldCodec.ForEnum(tag, x => (int)x, x => (SampleEnum)x)); + + Assert.AreEqual(6, values.Count); + Assert.AreEqual(SampleEnum.None, values[0]); + Assert.AreEqual(((SampleEnum)(-1)), values[1]); + Assert.AreEqual(SampleEnum.NegativeValue, values[2]); + Assert.AreEqual(((SampleEnum)(-3)), values[3]); + Assert.AreEqual(((SampleEnum)(-4)), values[4]); + Assert.AreEqual(((SampleEnum)(-5)), values[5]); + } + + // Fairly perfunctory tests for the non-generic IList implementation + [Test] + public void IList_Indexer() + { + var field = new RepeatedField { "first", "second" }; + IList list = field; + Assert.AreEqual("first", list[0]); + list[1] = "changed"; + Assert.AreEqual("changed", field[1]); + } + + [Test] + public void IList_Contains() + { + IList list = new RepeatedField { "first", "second" }; + Assert.IsTrue(list.Contains("second")); + Assert.IsFalse(list.Contains("third")); + Assert.IsFalse(list.Contains(new object())); + } + + [Test] + public void IList_Add() + { + IList list = new RepeatedField { "first", "second" }; + list.Add("third"); + CollectionAssert.AreEqual(new[] { "first", "second", "third" }, list); + } + + [Test] + public void IList_Remove() + { + IList list = new RepeatedField { "first", "second" }; + list.Remove("third"); // No-op, no exception + list.Remove(new object()); // No-op, no exception + list.Remove("first"); + CollectionAssert.AreEqual(new[] { "second" }, list); + } + + [Test] + public void IList_IsFixedSize() + { + var field = new RepeatedField { "first", "second" }; + IList list = field; + Assert.IsFalse(list.IsFixedSize); + } + + [Test] + public void IList_IndexOf() + { + IList list = new RepeatedField { "first", "second" }; + Assert.AreEqual(1, list.IndexOf("second")); + Assert.AreEqual(-1, list.IndexOf("third")); + Assert.AreEqual(-1, list.IndexOf(new object())); + } + + [Test] + public void IList_SyncRoot() + { + IList list = new RepeatedField { "first", "second" }; + Assert.AreSame(list, list.SyncRoot); + } + + [Test] + public void IList_CopyTo() + { + IList list = new RepeatedField { "first", "second" }; + string[] stringArray = new string[4]; + list.CopyTo(stringArray, 1); + CollectionAssert.AreEqual(new[] { null, "first", "second", null }, stringArray); + + object[] objectArray = new object[4]; + list.CopyTo(objectArray, 1); + CollectionAssert.AreEqual(new[] { null, "first", "second", null }, objectArray); + + Assert.Throws(() => list.CopyTo(new StringBuilder[4], 1)); + Assert.Throws(() => list.CopyTo(new int[4], 1)); + } + + [Test] + public void IList_IsSynchronized() + { + IList list = new RepeatedField { "first", "second" }; + Assert.IsFalse(list.IsSynchronized); + } + + [Test] + public void IList_Insert() + { + IList list = new RepeatedField { "first", "second" }; + list.Insert(1, "middle"); + CollectionAssert.AreEqual(new[] { "first", "middle", "second" }, list); + } + + [Test] + public void ToString_Integers() + { + var list = new RepeatedField { 5, 10, 20 }; + var text = list.ToString(); + Assert.AreEqual("[ 5, 10, 20 ]", text); + } + + [Test] + public void ToString_Strings() + { + var list = new RepeatedField { "x", "y", "z" }; + var text = list.ToString(); + Assert.AreEqual("[ \"x\", \"y\", \"z\" ]", text); + } + + [Test] + public void ToString_Messages() + { + var list = new RepeatedField { new TestAllTypes { SingleDouble = 1.5 }, new TestAllTypes { SingleInt32 = 10 } }; + var text = list.ToString(); + Assert.AreEqual("[ { \"singleDouble\": 1.5 }, { \"singleInt32\": 10 } ]", text); + } + + [Test] + public void ToString_Empty() + { + var list = new RepeatedField { }; + var text = list.ToString(); + Assert.AreEqual("[ ]", text); + } + + [Test] + public void ToString_InvalidElementType() + { + var list = new RepeatedField { 15m }; + Assert.Throws(() => list.ToString()); + } + + [Test] + public void ToString_Timestamp() + { + var list = new RepeatedField { Timestamp.FromDateTime(new DateTime(2015, 10, 1, 12, 34, 56, DateTimeKind.Utc)) }; + var text = list.ToString(); + Assert.AreEqual("[ \"2015-10-01T12:34:56Z\" ]", text); + } + + [Test] + public void ToString_Struct() + { + var message = new Struct { Fields = { { "foo", new Value { NumberValue = 20 } } } }; + var list = new RepeatedField { message }; + var text = list.ToString(); + Assert.AreEqual(text, "[ { \"foo\": 20 } ]", message.ToString()); + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/Compatibility/PropertyInfoExtensionsTest.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/Compatibility/PropertyInfoExtensionsTest.cs new file mode 100644 index 0000000000..df23a09cd4 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/Compatibility/PropertyInfoExtensionsTest.cs @@ -0,0 +1,98 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using NUnit.Framework; +using System.Reflection; + +namespace Google.Protobuf.Compatibility +{ + public class PropertyInfoExtensionsTest + { + public string PublicReadWrite { get; set; } + private string PrivateReadWrite { get; set; } + public string PublicReadPrivateWrite { get; private set; } + public string PrivateReadPublicWrite { private get; set; } + public string PublicReadOnly { get { return null; } } + private string PrivateReadOnly { get { return null; } } + public string PublicWriteOnly { set { } } + private string PrivateWriteOnly { set { } } + + [Test] + [TestCase("PublicReadWrite")] + [TestCase("PublicReadPrivateWrite")] + [TestCase("PublicReadOnly")] + public void GetGetMethod_Success(string name) + { + var propertyInfo = typeof(PropertyInfoExtensionsTest) + .GetProperty(name, BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public); + Assert.IsNotNull(PropertyInfoExtensions.GetGetMethod(propertyInfo)); + } + + [Test] + [TestCase("PrivateReadWrite")] + [TestCase("PrivateReadPublicWrite")] + [TestCase("PrivateReadOnly")] + [TestCase("PublicWriteOnly")] + [TestCase("PrivateWriteOnly")] + public void GetGetMethod_NoAccessibleGetter(string name) + { + var propertyInfo = typeof(PropertyInfoExtensionsTest) + .GetProperty(name, BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public); + Assert.IsNull(PropertyInfoExtensions.GetGetMethod(propertyInfo)); + } + + [Test] + [TestCase("PublicReadWrite")] + [TestCase("PrivateReadPublicWrite")] + [TestCase("PublicWriteOnly")] + public void GetSetMethod_Success(string name) + { + var propertyInfo = typeof(PropertyInfoExtensionsTest) + .GetProperty(name, BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public); + Assert.IsNotNull(PropertyInfoExtensions.GetSetMethod(propertyInfo)); + } + + [Test] + [TestCase("PublicReadPrivateWrite")] + [TestCase("PrivateReadWrite")] + [TestCase("PrivateReadOnly")] + [TestCase("PublicReadOnly")] + [TestCase("PrivateWriteOnly")] + public void GetSetMethod_NoAccessibleGetter(string name) + { + var propertyInfo = typeof(PropertyInfoExtensionsTest) + .GetProperty(name, BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public); + Assert.IsNull(PropertyInfoExtensions.GetSetMethod(propertyInfo)); + } + } + +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/Compatibility/TypeExtensionsTest.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/Compatibility/TypeExtensionsTest.cs new file mode 100644 index 0000000000..f0c8d3bcc6 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/Compatibility/TypeExtensionsTest.cs @@ -0,0 +1,133 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion +using NUnit.Framework; +using System; +using System.Collections.Generic; +using System.Reflection; + +namespace Google.Protobuf.Compatibility +{ + public class TypeExtensionsTest + { + public class DerivedList : List { } + public string PublicProperty { get; set; } + private string PrivateProperty { get; set; } + + public void PublicMethod() + { + } + + private void PrivateMethod() + { + } + + [Test] + [TestCase(typeof(int), true)] + [TestCase(typeof(int?), true)] + [TestCase(typeof(Nullable<>), true)] + [TestCase(typeof(WireFormat.WireType), true)] + [TestCase(typeof(string), false)] + [TestCase(typeof(object), false)] + [TestCase(typeof(Enum), false)] + [TestCase(typeof(ValueType), false)] + [TestCase(typeof(TypeExtensionsTest), false)] + [TestCase(typeof(Action), false)] + [TestCase(typeof(Action<>), false)] + [TestCase(typeof(IDisposable), false)] + public void IsValueType(Type type, bool expected) + { + Assert.AreEqual(expected, TypeExtensions.IsValueType(type)); + } + + [Test] + [TestCase(typeof(object), typeof(string), true)] + [TestCase(typeof(object), typeof(int), true)] + [TestCase(typeof(string), typeof(string), true)] + [TestCase(typeof(string), typeof(object), false)] + [TestCase(typeof(string), typeof(int), false)] + [TestCase(typeof(int), typeof(int), true)] + [TestCase(typeof(ValueType), typeof(int), true)] + [TestCase(typeof(long), typeof(int), false)] // + public void IsAssignableFrom(Type target, Type argument, bool expected) + { + Assert.AreEqual(expected, TypeExtensions.IsAssignableFrom(target, argument)); + } + + [Test] + [TestCase(typeof(DerivedList), "Count")] // Go up the type hierarchy + [TestCase(typeof(List), "Count")] + [TestCase(typeof(List<>), "Count")] + [TestCase(typeof(TypeExtensionsTest), "PublicProperty")] + public void GetProperty_Success(Type type, string name) + { + var property = TypeExtensions.GetProperty(type, name); + Assert.IsNotNull(property); + Assert.AreEqual(name, property.Name); + } + + [Test] + [TestCase(typeof(TypeExtensionsTest), "PrivateProperty")] + [TestCase(typeof(TypeExtensionsTest), "Garbage")] + public void GetProperty_NoSuchProperty(Type type, string name) + { + var property = TypeExtensions.GetProperty(type, name); + Assert.IsNull(property); + } + + [Test] + [TestCase(typeof(DerivedList), "RemoveAt")] // Go up the type hierarchy + [TestCase(typeof(List<>), "RemoveAt")] + [TestCase(typeof(TypeExtensionsTest), "PublicMethod")] + public void GetMethod_Success(Type type, string name) + { + var method = TypeExtensions.GetMethod(type, name); + Assert.IsNotNull(method); + Assert.AreEqual(name, method.Name); + } + + [Test] + [TestCase(typeof(TypeExtensionsTest), "PrivateMethod")] + [TestCase(typeof(TypeExtensionsTest), "GarbageMethod")] + public void GetMethod_NoSuchMethod(Type type, string name) + { + var method = TypeExtensions.GetMethod(type, name); + Assert.IsNull(method); + } + + [Test] + [TestCase(typeof(List), "IndexOf")] + public void GetMethod_Ambiguous(Type type, string name) + { + Assert.Throws(() => TypeExtensions.GetMethod(type, name)); + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/DeprecatedMemberTest.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/DeprecatedMemberTest.cs new file mode 100644 index 0000000000..8dfad8b331 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/DeprecatedMemberTest.cs @@ -0,0 +1,55 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; +using System.Reflection; +using Google.Protobuf.TestProtos; +using NUnit.Framework; + +namespace Google.Protobuf +{ + public class DeprecatedMemberTest + { + private static void AssertIsDeprecated(MemberInfo member) + { + Assert.NotNull(member); + Assert.IsTrue(member.IsDefined(typeof(ObsoleteAttribute), false), "Member not obsolete: " + member); + } + + [Test] + public void TestDepreatedPrimitiveValue() + { + AssertIsDeprecated(typeof(TestDeprecatedFields).GetProperty("DeprecatedInt32")); + } + + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/EqualityTester.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/EqualityTester.cs new file mode 100644 index 0000000000..a669baba17 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/EqualityTester.cs @@ -0,0 +1,64 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; +using NUnit.Framework; + +namespace Google.Protobuf +{ + /// + /// Helper methods when testing equality. NUnit's Assert.AreEqual and + /// Assert.AreNotEqual methods try to be clever with collections, which can + /// be annoying... + /// + internal static class EqualityTester + { + public static void AssertEquality(T first, T second) where T : IEquatable + { + Assert.IsTrue(first.Equals(second)); + Assert.IsTrue(first.Equals((object) second)); + Assert.AreEqual(first.GetHashCode(), second.GetHashCode()); + } + + public static void AssertInequality(T first, T second) where T : IEquatable + { + Assert.IsFalse(first.Equals(second)); + Assert.IsFalse(first.Equals((object) second)); + // While this isn't a requirement, the chances of this test failing due to + // coincidence rather than a bug are very small. + if (first != null && second != null) + { + Assert.AreNotEqual(first.GetHashCode(), second.GetHashCode()); + } + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/FieldCodecTest.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/FieldCodecTest.cs new file mode 100644 index 0000000000..c616470e81 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/FieldCodecTest.cs @@ -0,0 +1,195 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System.Collections.Generic; +using System.IO; +using Google.Protobuf.TestProtos; +using NUnit.Framework; + +namespace Google.Protobuf +{ + public class FieldCodecTest + { +#pragma warning disable 0414 // Used by tests via reflection - do not remove! + private static readonly List Codecs = new List + { + new FieldCodecTestData(FieldCodec.ForBool(100), true, "Bool"), + new FieldCodecTestData(FieldCodec.ForString(100), "sample", "String"), + new FieldCodecTestData(FieldCodec.ForBytes(100), ByteString.CopyFrom(1, 2, 3), "Bytes"), + new FieldCodecTestData(FieldCodec.ForInt32(100), -1000, "Int32"), + new FieldCodecTestData(FieldCodec.ForSInt32(100), -1000, "SInt32"), + new FieldCodecTestData(FieldCodec.ForSFixed32(100), -1000, "SFixed32"), + new FieldCodecTestData(FieldCodec.ForUInt32(100), 1234, "UInt32"), + new FieldCodecTestData(FieldCodec.ForFixed32(100), 1234, "Fixed32"), + new FieldCodecTestData(FieldCodec.ForInt64(100), -1000, "Int64"), + new FieldCodecTestData(FieldCodec.ForSInt64(100), -1000, "SInt64"), + new FieldCodecTestData(FieldCodec.ForSFixed64(100), -1000, "SFixed64"), + new FieldCodecTestData(FieldCodec.ForUInt64(100), 1234, "UInt64"), + new FieldCodecTestData(FieldCodec.ForFixed64(100), 1234, "Fixed64"), + new FieldCodecTestData(FieldCodec.ForFloat(100), 1234.5f, "Float"), + new FieldCodecTestData(FieldCodec.ForDouble(100), 1234567890.5d, "Double"), + new FieldCodecTestData( + FieldCodec.ForEnum(100, t => (int) t, t => (ForeignEnum) t), ForeignEnum.ForeignBaz, "Enum"), + new FieldCodecTestData( + FieldCodec.ForMessage(100, ForeignMessage.Parser), new ForeignMessage { C = 10 }, "Message"), + }; +#pragma warning restore 0414 + + [Test, TestCaseSource("Codecs")] + public void RoundTripWithTag(ICodecTestData codec) + { + codec.TestRoundTripWithTag(); + } + + [Test, TestCaseSource("Codecs")] + public void RoundTripRaw(ICodecTestData codec) + { + codec.TestRoundTripRaw(); + } + + [Test, TestCaseSource("Codecs")] + public void CalculateSize(ICodecTestData codec) + { + codec.TestCalculateSizeWithTag(); + } + + [Test, TestCaseSource("Codecs")] + public void DefaultValue(ICodecTestData codec) + { + codec.TestDefaultValue(); + } + + [Test, TestCaseSource("Codecs")] + public void FixedSize(ICodecTestData codec) + { + codec.TestFixedSize(); + } + + // This is ugly, but it means we can have a non-generic interface. + // It feels like NUnit should support this better, but I don't know + // of any better ways right now. + public interface ICodecTestData + { + void TestRoundTripRaw(); + void TestRoundTripWithTag(); + void TestCalculateSizeWithTag(); + void TestDefaultValue(); + void TestFixedSize(); + } + + public class FieldCodecTestData : ICodecTestData + { + private readonly FieldCodec codec; + private readonly T sampleValue; + private readonly string name; + + public FieldCodecTestData(FieldCodec codec, T sampleValue, string name) + { + this.codec = codec; + this.sampleValue = sampleValue; + this.name = name; + } + + public void TestRoundTripRaw() + { + var stream = new MemoryStream(); + var codedOutput = new CodedOutputStream(stream); + codec.ValueWriter(codedOutput, sampleValue); + codedOutput.Flush(); + stream.Position = 0; + var codedInput = new CodedInputStream(stream); + Assert.AreEqual(sampleValue, codec.ValueReader(codedInput)); + Assert.IsTrue(codedInput.IsAtEnd); + } + + public void TestRoundTripWithTag() + { + var stream = new MemoryStream(); + var codedOutput = new CodedOutputStream(stream); + codec.WriteTagAndValue(codedOutput, sampleValue); + codedOutput.Flush(); + stream.Position = 0; + var codedInput = new CodedInputStream(stream); + codedInput.AssertNextTag(codec.Tag); + Assert.AreEqual(sampleValue, codec.Read(codedInput)); + Assert.IsTrue(codedInput.IsAtEnd); + } + + public void TestCalculateSizeWithTag() + { + var stream = new MemoryStream(); + var codedOutput = new CodedOutputStream(stream); + codec.WriteTagAndValue(codedOutput, sampleValue); + codedOutput.Flush(); + Assert.AreEqual(stream.Position, codec.CalculateSizeWithTag(sampleValue)); + } + + public void TestDefaultValue() + { + // WriteTagAndValue ignores default values + var stream = new MemoryStream(); + var codedOutput = new CodedOutputStream(stream); + codec.WriteTagAndValue(codedOutput, codec.DefaultValue); + codedOutput.Flush(); + Assert.AreEqual(0, stream.Position); + Assert.AreEqual(0, codec.CalculateSizeWithTag(codec.DefaultValue)); + if (typeof(T).IsValueType) + { + Assert.AreEqual(default(T), codec.DefaultValue); + } + + // The plain ValueWriter/ValueReader delegates don't. + if (codec.DefaultValue != null) // This part isn't appropriate for message types. + { + codedOutput = new CodedOutputStream(stream); + codec.ValueWriter(codedOutput, codec.DefaultValue); + codedOutput.Flush(); + Assert.AreNotEqual(0, stream.Position); + Assert.AreEqual(stream.Position, codec.ValueSizeCalculator(codec.DefaultValue)); + stream.Position = 0; + var codedInput = new CodedInputStream(stream); + Assert.AreEqual(codec.DefaultValue, codec.ValueReader(codedInput)); + } + } + + public void TestFixedSize() + { + Assert.AreEqual(name.Contains("Fixed"), codec.FixedSize != 0); + } + + public override string ToString() + { + return name; + } + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/GeneratedMessageTest.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/GeneratedMessageTest.cs new file mode 100644 index 0000000000..b029551c0b --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/GeneratedMessageTest.cs @@ -0,0 +1,723 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; +using System.IO; +using Google.Protobuf.TestProtos; +using NUnit.Framework; +using System.Collections; +using System.Collections.Generic; +using System.Linq; +using Google.Protobuf.WellKnownTypes; + +namespace Google.Protobuf +{ + /// + /// Tests around the generated TestAllTypes message. + /// + public class GeneratedMessageTest + { + [Test] + public void EmptyMessageFieldDistinctFromMissingMessageField() + { + // This demonstrates what we're really interested in... + var message1 = new TestAllTypes { SingleForeignMessage = new ForeignMessage() }; + var message2 = new TestAllTypes(); // SingleForeignMessage is null + EqualityTester.AssertInequality(message1, message2); + } + + [Test] + public void DefaultValues() + { + // Single fields + var message = new TestAllTypes(); + Assert.AreEqual(false, message.SingleBool); + Assert.AreEqual(ByteString.Empty, message.SingleBytes); + Assert.AreEqual(0.0, message.SingleDouble); + Assert.AreEqual(0, message.SingleFixed32); + Assert.AreEqual(0L, message.SingleFixed64); + Assert.AreEqual(0.0f, message.SingleFloat); + Assert.AreEqual(ForeignEnum.ForeignUnspecified, message.SingleForeignEnum); + Assert.IsNull(message.SingleForeignMessage); + Assert.AreEqual(ImportEnum.Unspecified, message.SingleImportEnum); + Assert.IsNull(message.SingleImportMessage); + Assert.AreEqual(0, message.SingleInt32); + Assert.AreEqual(0L, message.SingleInt64); + Assert.AreEqual(TestAllTypes.Types.NestedEnum.Unspecified, message.SingleNestedEnum); + Assert.IsNull(message.SingleNestedMessage); + Assert.IsNull(message.SinglePublicImportMessage); + Assert.AreEqual(0, message.SingleSfixed32); + Assert.AreEqual(0L, message.SingleSfixed64); + Assert.AreEqual(0, message.SingleSint32); + Assert.AreEqual(0L, message.SingleSint64); + Assert.AreEqual("", message.SingleString); + Assert.AreEqual(0U, message.SingleUint32); + Assert.AreEqual(0UL, message.SingleUint64); + + // Repeated fields + Assert.AreEqual(0, message.RepeatedBool.Count); + Assert.AreEqual(0, message.RepeatedBytes.Count); + Assert.AreEqual(0, message.RepeatedDouble.Count); + Assert.AreEqual(0, message.RepeatedFixed32.Count); + Assert.AreEqual(0, message.RepeatedFixed64.Count); + Assert.AreEqual(0, message.RepeatedFloat.Count); + Assert.AreEqual(0, message.RepeatedForeignEnum.Count); + Assert.AreEqual(0, message.RepeatedForeignMessage.Count); + Assert.AreEqual(0, message.RepeatedImportEnum.Count); + Assert.AreEqual(0, message.RepeatedImportMessage.Count); + Assert.AreEqual(0, message.RepeatedNestedEnum.Count); + Assert.AreEqual(0, message.RepeatedNestedMessage.Count); + Assert.AreEqual(0, message.RepeatedPublicImportMessage.Count); + Assert.AreEqual(0, message.RepeatedSfixed32.Count); + Assert.AreEqual(0, message.RepeatedSfixed64.Count); + Assert.AreEqual(0, message.RepeatedSint32.Count); + Assert.AreEqual(0, message.RepeatedSint64.Count); + Assert.AreEqual(0, message.RepeatedString.Count); + Assert.AreEqual(0, message.RepeatedUint32.Count); + Assert.AreEqual(0, message.RepeatedUint64.Count); + + // Oneof fields + Assert.AreEqual(TestAllTypes.OneofFieldOneofCase.None, message.OneofFieldCase); + Assert.AreEqual(0, message.OneofUint32); + Assert.AreEqual("", message.OneofString); + Assert.AreEqual(ByteString.Empty, message.OneofBytes); + Assert.IsNull(message.OneofNestedMessage); + } + + [Test] + public void NullStringAndBytesRejected() + { + var message = new TestAllTypes(); + Assert.Throws(() => message.SingleString = null); + Assert.Throws(() => message.OneofString = null); + Assert.Throws(() => message.SingleBytes = null); + Assert.Throws(() => message.OneofBytes = null); + } + + [Test] + public void RoundTrip_Empty() + { + var message = new TestAllTypes(); + // Without setting any values, there's nothing to write. + byte[] bytes = message.ToByteArray(); + Assert.AreEqual(0, bytes.Length); + TestAllTypes parsed = TestAllTypes.Parser.ParseFrom(bytes); + Assert.AreEqual(message, parsed); + } + + [Test] + public void RoundTrip_SingleValues() + { + var message = new TestAllTypes + { + SingleBool = true, + SingleBytes = ByteString.CopyFrom(1, 2, 3, 4), + SingleDouble = 23.5, + SingleFixed32 = 23, + SingleFixed64 = 1234567890123, + SingleFloat = 12.25f, + SingleForeignEnum = ForeignEnum.ForeignBar, + SingleForeignMessage = new ForeignMessage { C = 10 }, + SingleImportEnum = ImportEnum.ImportBaz, + SingleImportMessage = new ImportMessage { D = 20 }, + SingleInt32 = 100, + SingleInt64 = 3210987654321, + SingleNestedEnum = TestAllTypes.Types.NestedEnum.Foo, + SingleNestedMessage = new TestAllTypes.Types.NestedMessage { Bb = 35 }, + SinglePublicImportMessage = new PublicImportMessage { E = 54 }, + SingleSfixed32 = -123, + SingleSfixed64 = -12345678901234, + SingleSint32 = -456, + SingleSint64 = -12345678901235, + SingleString = "test", + SingleUint32 = uint.MaxValue, + SingleUint64 = ulong.MaxValue + }; + + byte[] bytes = message.ToByteArray(); + TestAllTypes parsed = TestAllTypes.Parser.ParseFrom(bytes); + Assert.AreEqual(message, parsed); + } + + [Test] + public void RoundTrip_RepeatedValues() + { + var message = new TestAllTypes + { + RepeatedBool = { true, false }, + RepeatedBytes = { ByteString.CopyFrom(1, 2, 3, 4), ByteString.CopyFrom(5, 6) }, + RepeatedDouble = { -12.25, 23.5 }, + RepeatedFixed32 = { uint.MaxValue, 23 }, + RepeatedFixed64 = { ulong.MaxValue, 1234567890123 }, + RepeatedFloat = { 100f, 12.25f }, + RepeatedForeignEnum = { ForeignEnum.ForeignFoo, ForeignEnum.ForeignBar }, + RepeatedForeignMessage = { new ForeignMessage(), new ForeignMessage { C = 10 } }, + RepeatedImportEnum = { ImportEnum.ImportBaz, ImportEnum.Unspecified }, + RepeatedImportMessage = { new ImportMessage { D = 20 }, new ImportMessage { D = 25 } }, + RepeatedInt32 = { 100, 200 }, + RepeatedInt64 = { 3210987654321, long.MaxValue }, + RepeatedNestedEnum = { TestAllTypes.Types.NestedEnum.Foo, TestAllTypes.Types.NestedEnum.Neg }, + RepeatedNestedMessage = { new TestAllTypes.Types.NestedMessage { Bb = 35 }, new TestAllTypes.Types.NestedMessage { Bb = 10 } }, + RepeatedPublicImportMessage = { new PublicImportMessage { E = 54 }, new PublicImportMessage { E = -1 } }, + RepeatedSfixed32 = { -123, 123 }, + RepeatedSfixed64 = { -12345678901234, 12345678901234 }, + RepeatedSint32 = { -456, 100 }, + RepeatedSint64 = { -12345678901235, 123 }, + RepeatedString = { "foo", "bar" }, + RepeatedUint32 = { uint.MaxValue, uint.MinValue }, + RepeatedUint64 = { ulong.MaxValue, uint.MinValue } + }; + + byte[] bytes = message.ToByteArray(); + TestAllTypes parsed = TestAllTypes.Parser.ParseFrom(bytes); + Assert.AreEqual(message, parsed); + } + + // Note that not every map within map_unittest_proto3 is used. They all go through very + // similar code paths. The fact that all maps are present is validation that we have codecs + // for every type. + [Test] + public void RoundTrip_Maps() + { + var message = new TestMap + { + MapBoolBool = { + { false, true }, + { true, false } + }, + MapInt32Bytes = { + { 5, ByteString.CopyFrom(6, 7, 8) }, + { 25, ByteString.CopyFrom(1, 2, 3, 4, 5) }, + { 10, ByteString.Empty } + }, + MapInt32ForeignMessage = { + { 0, new ForeignMessage { C = 10 } }, + { 5, new ForeignMessage() }, + }, + MapInt32Enum = { + { 1, MapEnum.Bar }, + { 2000, MapEnum.Foo } + } + }; + + byte[] bytes = message.ToByteArray(); + TestMap parsed = TestMap.Parser.ParseFrom(bytes); + Assert.AreEqual(message, parsed); + } + + [Test] + public void MapWithEmptyEntry() + { + var message = new TestMap + { + MapInt32Bytes = { { 0, ByteString.Empty } } + }; + + byte[] bytes = message.ToByteArray(); + Assert.AreEqual(2, bytes.Length); // Tag for field entry (1 byte), length of entry (0; 1 byte) + + var parsed = TestMap.Parser.ParseFrom(bytes); + Assert.AreEqual(1, parsed.MapInt32Bytes.Count); + Assert.AreEqual(ByteString.Empty, parsed.MapInt32Bytes[0]); + } + + [Test] + public void MapWithOnlyValue() + { + // Hand-craft the stream to contain a single entry with just a value. + var memoryStream = new MemoryStream(); + var output = new CodedOutputStream(memoryStream); + output.WriteTag(TestMap.MapInt32ForeignMessageFieldNumber, WireFormat.WireType.LengthDelimited); + var nestedMessage = new ForeignMessage { C = 20 }; + // Size of the entry (tag, size written by WriteMessage, data written by WriteMessage) + output.WriteLength(2 + nestedMessage.CalculateSize()); + output.WriteTag(2, WireFormat.WireType.LengthDelimited); + output.WriteMessage(nestedMessage); + output.Flush(); + + var parsed = TestMap.Parser.ParseFrom(memoryStream.ToArray()); + Assert.AreEqual(nestedMessage, parsed.MapInt32ForeignMessage[0]); + } + + [Test] + public void MapWithOnlyKey_PrimitiveValue() + { + // Hand-craft the stream to contain a single entry with just a key. + var memoryStream = new MemoryStream(); + var output = new CodedOutputStream(memoryStream); + output.WriteTag(TestMap.MapInt32DoubleFieldNumber, WireFormat.WireType.LengthDelimited); + int key = 10; + output.WriteLength(1 + CodedOutputStream.ComputeInt32Size(key)); + output.WriteTag(1, WireFormat.WireType.Varint); + output.WriteInt32(key); + output.Flush(); + + var parsed = TestMap.Parser.ParseFrom(memoryStream.ToArray()); + Assert.AreEqual(0.0, parsed.MapInt32Double[key]); + } + + [Test] + public void MapWithOnlyKey_MessageValue() + { + // Hand-craft the stream to contain a single entry with just a key. + var memoryStream = new MemoryStream(); + var output = new CodedOutputStream(memoryStream); + output.WriteTag(TestMap.MapInt32ForeignMessageFieldNumber, WireFormat.WireType.LengthDelimited); + int key = 10; + output.WriteLength(1 + CodedOutputStream.ComputeInt32Size(key)); + output.WriteTag(1, WireFormat.WireType.Varint); + output.WriteInt32(key); + output.Flush(); + + var parsed = TestMap.Parser.ParseFrom(memoryStream.ToArray()); + Assert.AreEqual(new ForeignMessage(), parsed.MapInt32ForeignMessage[key]); + } + + [Test] + public void MapIgnoresExtraFieldsWithinEntryMessages() + { + // Hand-craft the stream to contain a single entry with three fields + var memoryStream = new MemoryStream(); + var output = new CodedOutputStream(memoryStream); + + output.WriteTag(TestMap.MapInt32Int32FieldNumber, WireFormat.WireType.LengthDelimited); + + var key = 10; // Field 1 + var value = 20; // Field 2 + var extra = 30; // Field 3 + + // Each field can be represented in a single byte, with a single byte tag. + // Total message size: 6 bytes. + output.WriteLength(6); + output.WriteTag(1, WireFormat.WireType.Varint); + output.WriteInt32(key); + output.WriteTag(2, WireFormat.WireType.Varint); + output.WriteInt32(value); + output.WriteTag(3, WireFormat.WireType.Varint); + output.WriteInt32(extra); + output.Flush(); + + var parsed = TestMap.Parser.ParseFrom(memoryStream.ToArray()); + Assert.AreEqual(value, parsed.MapInt32Int32[key]); + } + + [Test] + public void MapFieldOrderIsIrrelevant() + { + var memoryStream = new MemoryStream(); + var output = new CodedOutputStream(memoryStream); + + output.WriteTag(TestMap.MapInt32Int32FieldNumber, WireFormat.WireType.LengthDelimited); + + var key = 10; + var value = 20; + + // Each field can be represented in a single byte, with a single byte tag. + // Total message size: 4 bytes. + output.WriteLength(4); + output.WriteTag(2, WireFormat.WireType.Varint); + output.WriteInt32(value); + output.WriteTag(1, WireFormat.WireType.Varint); + output.WriteInt32(key); + output.Flush(); + + var parsed = TestMap.Parser.ParseFrom(memoryStream.ToArray()); + Assert.AreEqual(value, parsed.MapInt32Int32[key]); + } + + [Test] + public void MapNonContiguousEntries() + { + var memoryStream = new MemoryStream(); + var output = new CodedOutputStream(memoryStream); + + // Message structure: + // Entry for MapInt32Int32 + // Entry for MapStringString + // Entry for MapInt32Int32 + + // First entry + var key1 = 10; + var value1 = 20; + output.WriteTag(TestMap.MapInt32Int32FieldNumber, WireFormat.WireType.LengthDelimited); + output.WriteLength(4); + output.WriteTag(1, WireFormat.WireType.Varint); + output.WriteInt32(key1); + output.WriteTag(2, WireFormat.WireType.Varint); + output.WriteInt32(value1); + + // Second entry + var key2 = "a"; + var value2 = "b"; + output.WriteTag(TestMap.MapStringStringFieldNumber, WireFormat.WireType.LengthDelimited); + output.WriteLength(6); // 3 bytes per entry: tag, size, character + output.WriteTag(1, WireFormat.WireType.LengthDelimited); + output.WriteString(key2); + output.WriteTag(2, WireFormat.WireType.LengthDelimited); + output.WriteString(value2); + + // Third entry + var key3 = 15; + var value3 = 25; + output.WriteTag(TestMap.MapInt32Int32FieldNumber, WireFormat.WireType.LengthDelimited); + output.WriteLength(4); + output.WriteTag(1, WireFormat.WireType.Varint); + output.WriteInt32(key3); + output.WriteTag(2, WireFormat.WireType.Varint); + output.WriteInt32(value3); + + output.Flush(); + var parsed = TestMap.Parser.ParseFrom(memoryStream.ToArray()); + var expected = new TestMap + { + MapInt32Int32 = { { key1, value1 }, { key3, value3 } }, + MapStringString = { { key2, value2 } } + }; + Assert.AreEqual(expected, parsed); + } + + [Test] + public void DuplicateKeys_LastEntryWins() + { + var memoryStream = new MemoryStream(); + var output = new CodedOutputStream(memoryStream); + + var key = 10; + var value1 = 20; + var value2 = 30; + + // First entry + output.WriteTag(TestMap.MapInt32Int32FieldNumber, WireFormat.WireType.LengthDelimited); + output.WriteLength(4); + output.WriteTag(1, WireFormat.WireType.Varint); + output.WriteInt32(key); + output.WriteTag(2, WireFormat.WireType.Varint); + output.WriteInt32(value1); + + // Second entry - same key, different value + output.WriteTag(TestMap.MapInt32Int32FieldNumber, WireFormat.WireType.LengthDelimited); + output.WriteLength(4); + output.WriteTag(1, WireFormat.WireType.Varint); + output.WriteInt32(key); + output.WriteTag(2, WireFormat.WireType.Varint); + output.WriteInt32(value2); + output.Flush(); + + var parsed = TestMap.Parser.ParseFrom(memoryStream.ToArray()); + Assert.AreEqual(value2, parsed.MapInt32Int32[key]); + } + + [Test] + public void CloneSingleNonMessageValues() + { + var original = new TestAllTypes + { + SingleBool = true, + SingleBytes = ByteString.CopyFrom(1, 2, 3, 4), + SingleDouble = 23.5, + SingleFixed32 = 23, + SingleFixed64 = 1234567890123, + SingleFloat = 12.25f, + SingleInt32 = 100, + SingleInt64 = 3210987654321, + SingleNestedEnum = TestAllTypes.Types.NestedEnum.Foo, + SingleSfixed32 = -123, + SingleSfixed64 = -12345678901234, + SingleSint32 = -456, + SingleSint64 = -12345678901235, + SingleString = "test", + SingleUint32 = uint.MaxValue, + SingleUint64 = ulong.MaxValue + }; + var clone = original.Clone(); + Assert.AreNotSame(original, clone); + Assert.AreEqual(original, clone); + // Just as a single example + clone.SingleInt32 = 150; + Assert.AreNotEqual(original, clone); + } + + [Test] + public void CloneRepeatedNonMessageValues() + { + var original = new TestAllTypes + { + RepeatedBool = { true, false }, + RepeatedBytes = { ByteString.CopyFrom(1, 2, 3, 4), ByteString.CopyFrom(5, 6) }, + RepeatedDouble = { -12.25, 23.5 }, + RepeatedFixed32 = { uint.MaxValue, 23 }, + RepeatedFixed64 = { ulong.MaxValue, 1234567890123 }, + RepeatedFloat = { 100f, 12.25f }, + RepeatedInt32 = { 100, 200 }, + RepeatedInt64 = { 3210987654321, long.MaxValue }, + RepeatedNestedEnum = { TestAllTypes.Types.NestedEnum.Foo, TestAllTypes.Types.NestedEnum.Neg }, + RepeatedSfixed32 = { -123, 123 }, + RepeatedSfixed64 = { -12345678901234, 12345678901234 }, + RepeatedSint32 = { -456, 100 }, + RepeatedSint64 = { -12345678901235, 123 }, + RepeatedString = { "foo", "bar" }, + RepeatedUint32 = { uint.MaxValue, uint.MinValue }, + RepeatedUint64 = { ulong.MaxValue, uint.MinValue } + }; + + var clone = original.Clone(); + Assert.AreNotSame(original, clone); + Assert.AreEqual(original, clone); + // Just as a single example + clone.RepeatedDouble.Add(25.5); + Assert.AreNotEqual(original, clone); + } + + [Test] + public void CloneSingleMessageField() + { + var original = new TestAllTypes + { + SingleNestedMessage = new TestAllTypes.Types.NestedMessage { Bb = 20 } + }; + + var clone = original.Clone(); + Assert.AreNotSame(original, clone); + Assert.AreNotSame(original.SingleNestedMessage, clone.SingleNestedMessage); + Assert.AreEqual(original, clone); + + clone.SingleNestedMessage.Bb = 30; + Assert.AreNotEqual(original, clone); + } + + [Test] + public void CloneRepeatedMessageField() + { + var original = new TestAllTypes + { + RepeatedNestedMessage = { new TestAllTypes.Types.NestedMessage { Bb = 20 } } + }; + + var clone = original.Clone(); + Assert.AreNotSame(original, clone); + Assert.AreNotSame(original.RepeatedNestedMessage, clone.RepeatedNestedMessage); + Assert.AreNotSame(original.RepeatedNestedMessage[0], clone.RepeatedNestedMessage[0]); + Assert.AreEqual(original, clone); + + clone.RepeatedNestedMessage[0].Bb = 30; + Assert.AreNotEqual(original, clone); + } + + [Test] + public void CloneOneofField() + { + var original = new TestAllTypes + { + OneofNestedMessage = new TestAllTypes.Types.NestedMessage { Bb = 20 } + }; + + var clone = original.Clone(); + Assert.AreNotSame(original, clone); + Assert.AreEqual(original, clone); + + // We should have cloned the message + original.OneofNestedMessage.Bb = 30; + Assert.AreNotEqual(original, clone); + } + + [Test] + public void OneofProperties() + { + // Switch the oneof case between each of the different options, and check everything behaves + // as expected in each case. + var message = new TestAllTypes(); + Assert.AreEqual("", message.OneofString); + Assert.AreEqual(0, message.OneofUint32); + Assert.AreEqual(ByteString.Empty, message.OneofBytes); + Assert.IsNull(message.OneofNestedMessage); + Assert.AreEqual(TestAllTypes.OneofFieldOneofCase.None, message.OneofFieldCase); + + message.OneofString = "sample"; + Assert.AreEqual("sample", message.OneofString); + Assert.AreEqual(0, message.OneofUint32); + Assert.AreEqual(ByteString.Empty, message.OneofBytes); + Assert.IsNull(message.OneofNestedMessage); + Assert.AreEqual(TestAllTypes.OneofFieldOneofCase.OneofString, message.OneofFieldCase); + + var bytes = ByteString.CopyFrom(1, 2, 3); + message.OneofBytes = bytes; + Assert.AreEqual("", message.OneofString); + Assert.AreEqual(0, message.OneofUint32); + Assert.AreEqual(bytes, message.OneofBytes); + Assert.IsNull(message.OneofNestedMessage); + Assert.AreEqual(TestAllTypes.OneofFieldOneofCase.OneofBytes, message.OneofFieldCase); + + message.OneofUint32 = 20; + Assert.AreEqual("", message.OneofString); + Assert.AreEqual(20, message.OneofUint32); + Assert.AreEqual(ByteString.Empty, message.OneofBytes); + Assert.IsNull(message.OneofNestedMessage); + Assert.AreEqual(TestAllTypes.OneofFieldOneofCase.OneofUint32, message.OneofFieldCase); + + var nestedMessage = new TestAllTypes.Types.NestedMessage { Bb = 25 }; + message.OneofNestedMessage = nestedMessage; + Assert.AreEqual("", message.OneofString); + Assert.AreEqual(0, message.OneofUint32); + Assert.AreEqual(ByteString.Empty, message.OneofBytes); + Assert.AreEqual(nestedMessage, message.OneofNestedMessage); + Assert.AreEqual(TestAllTypes.OneofFieldOneofCase.OneofNestedMessage, message.OneofFieldCase); + + message.ClearOneofField(); + Assert.AreEqual("", message.OneofString); + Assert.AreEqual(0, message.OneofUint32); + Assert.AreEqual(ByteString.Empty, message.OneofBytes); + Assert.IsNull(message.OneofNestedMessage); + Assert.AreEqual(TestAllTypes.OneofFieldOneofCase.None, message.OneofFieldCase); + } + + [Test] + public void Oneof_DefaultValuesNotEqual() + { + var message1 = new TestAllTypes { OneofString = "" }; + var message2 = new TestAllTypes { OneofUint32 = 0 }; + Assert.AreEqual(TestAllTypes.OneofFieldOneofCase.OneofString, message1.OneofFieldCase); + Assert.AreEqual(TestAllTypes.OneofFieldOneofCase.OneofUint32, message2.OneofFieldCase); + Assert.AreNotEqual(message1, message2); + } + + [Test] + public void OneofSerialization_NonDefaultValue() + { + var message = new TestAllTypes(); + message.OneofString = "this would take a bit of space"; + message.OneofUint32 = 10; + var bytes = message.ToByteArray(); + Assert.AreEqual(3, bytes.Length); // 2 bytes for the tag + 1 for the value - no string! + + var message2 = TestAllTypes.Parser.ParseFrom(bytes); + Assert.AreEqual(message, message2); + Assert.AreEqual(TestAllTypes.OneofFieldOneofCase.OneofUint32, message2.OneofFieldCase); + } + + [Test] + public void OneofSerialization_DefaultValue() + { + var message = new TestAllTypes(); + message.OneofString = "this would take a bit of space"; + message.OneofUint32 = 0; // This is the default value for UInt32; normally wouldn't be serialized + var bytes = message.ToByteArray(); + Assert.AreEqual(3, bytes.Length); // 2 bytes for the tag + 1 for the value - it's still serialized + + var message2 = TestAllTypes.Parser.ParseFrom(bytes); + Assert.AreEqual(message, message2); + Assert.AreEqual(TestAllTypes.OneofFieldOneofCase.OneofUint32, message2.OneofFieldCase); + } + + [Test] + public void IgnoreUnknownFields_RealDataStillRead() + { + var message = SampleMessages.CreateFullTestAllTypes(); + var stream = new MemoryStream(); + var output = new CodedOutputStream(stream); + var unusedFieldNumber = 23456; + Assert.IsFalse(TestAllTypes.Descriptor.Fields.InDeclarationOrder().Select(x => x.FieldNumber).Contains(unusedFieldNumber)); + output.WriteTag(unusedFieldNumber, WireFormat.WireType.LengthDelimited); + output.WriteString("ignore me"); + message.WriteTo(output); + output.Flush(); + + stream.Position = 0; + var parsed = TestAllTypes.Parser.ParseFrom(stream); + Assert.AreEqual(message, parsed); + } + + [Test] + public void IgnoreUnknownFields_AllTypes() + { + // Simple way of ensuring we can skip all kinds of fields. + var data = SampleMessages.CreateFullTestAllTypes().ToByteArray(); + var empty = Empty.Parser.ParseFrom(data); + Assert.AreEqual(new Empty(), empty); + } + + // This was originally seen as a conformance test failure. + [Test] + public void TruncatedMessageFieldThrows() + { + // 130, 3 is the message tag + // 1 is the data length - but there's no data. + var data = new byte[] { 130, 3, 1 }; + Assert.Throws(() => TestAllTypes.Parser.ParseFrom(data)); + } + + /// + /// Demonstrates current behaviour with an extraneous end group tag - see issue 688 + /// for details; we may want to change this. + /// + [Test] + public void ExtraEndGroupThrows() + { + var message = SampleMessages.CreateFullTestAllTypes(); + var stream = new MemoryStream(); + var output = new CodedOutputStream(stream); + + output.WriteTag(TestAllTypes.SingleFixed32FieldNumber, WireFormat.WireType.Fixed32); + output.WriteFixed32(123); + output.WriteTag(100, WireFormat.WireType.EndGroup); + + output.Flush(); + + stream.Position = 0; + Assert.Throws(() => TestAllTypes.Parser.ParseFrom(stream)); + } + + [Test] + public void CustomDiagnosticMessage_DirectToStringCall() + { + var message = new ForeignMessage { C = 31 }; + Assert.AreEqual("{ \"c\": 31, \"@cInHex\": \"1f\" }", message.ToString()); + Assert.AreEqual("{ \"c\": 31 }", JsonFormatter.Default.Format(message)); + } + + [Test] + public void CustomDiagnosticMessage_Nested() + { + var message = new TestAllTypes { SingleForeignMessage = new ForeignMessage { C = 16 } }; + Assert.AreEqual("{ \"singleForeignMessage\": { \"c\": 16, \"@cInHex\": \"10\" } }", message.ToString()); + Assert.AreEqual("{ \"singleForeignMessage\": { \"c\": 16 } }", JsonFormatter.Default.Format(message)); + } + + [Test] + public void CustomDiagnosticMessage_DirectToTextWriterCall() + { + var message = new ForeignMessage { C = 31 }; + var writer = new StringWriter(); + JsonFormatter.Default.Format(message, writer); + Assert.AreEqual("{ \"c\": 31 }", writer.ToString()); + } + } +} \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/Google.Protobuf.Test.csproj b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/Google.Protobuf.Test.csproj new file mode 100644 index 0000000000..5d3de4a436 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/Google.Protobuf.Test.csproj @@ -0,0 +1,143 @@ + + + + Debug + AnyCPU + 9.0.30729 + 2.0 + {DD01ED24-3750-4567-9A23-1DB676A15610} + Library + Properties + Google.Protobuf + Google.Protobuf.Test + v4.5 + 512 + 3.5 + + + + + + + true + full + false + bin\Debug + obj\Debug\ + DEBUG;TRACE;$(EnvironmentFlavor);$(EnvironmentTemplate) + prompt + 4 + true + Off + false + + + pdbonly + true + bin\Release + obj\Release\ + TRACE;$(EnvironmentFlavor);$(EnvironmentTemplate) + prompt + 4 + true + Off + false + + + pdbonly + true + bin\ReleaseSigned + obj\ReleaseSigned\ + TRACE;$(EnvironmentFlavor);$(EnvironmentTemplate) + prompt + 4 + true + Off + false + True + ..\..\keys\Google.Protobuf.snk + + + + + ..\packages\NUnitTestAdapter.2.0.0\lib\nunit.core.dll + True + + + ..\packages\NUnitTestAdapter.2.0.0\lib\nunit.core.interfaces.dll + True + + + ..\packages\NUnit.2.6.4\lib\nunit.framework.dll + True + + + ..\packages\NUnitTestAdapter.2.0.0\lib\nunit.util.dll + True + + + ..\packages\NUnitTestAdapter.2.0.0\lib\NUnit.VisualStudio.TestAdapter.dll + True + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + {6908BDCE-D925-43F3-94AC-A531E6DF2591} + Google.Protobuf + + + + + + + + + + + + \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/IssuesTest.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/IssuesTest.cs new file mode 100644 index 0000000000..ddb23aa685 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/IssuesTest.cs @@ -0,0 +1,82 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using Google.Protobuf.Reflection; +using UnitTest.Issues.TestProtos; +using NUnit.Framework; + + +namespace Google.Protobuf +{ + /// + /// Tests for issues which aren't easily compartmentalized into other unit tests. + /// + public class IssuesTest + { + // Issue 45 + [Test] + public void FieldCalledItem() + { + ItemField message = new ItemField { Item = 3 }; + FieldDescriptor field = ItemField.Descriptor.FindFieldByName("item"); + Assert.NotNull(field); + Assert.AreEqual(3, (int)field.Accessor.GetValue(message)); + } + + [Test] + public void ReservedNames() + { + var message = new ReservedNames { Types_ = 10, Descriptor_ = 20 }; + // Underscores aren't reflected in the JSON. + Assert.AreEqual("{ \"types\": 10, \"descriptor\": 20 }", message.ToString()); + } + + [Test] + public void JsonNameParseTest() + { + var settings = new JsonParser.Settings(10, TypeRegistry.FromFiles(UnittestIssuesReflection.Descriptor)); + var parser = new JsonParser(settings); + + // It is safe to use either original field name or explicitly specified json_name + Assert.AreEqual(new TestJsonName { Name = "test", Description = "test2", Guid = "test3" }, + parser.Parse("{ \"name\": \"test\", \"desc\": \"test2\", \"guid\": \"test3\" }")); + } + + [Test] + public void JsonNameFormatTest() + { + var message = new TestJsonName { Name = "test", Description = "test2", Guid = "test3" }; + Assert.AreEqual("{ \"name\": \"test\", \"desc\": \"test2\", \"exid\": \"test3\" }", + JsonFormatter.Default.Format(message)); + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/JsonFormatterTest.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/JsonFormatterTest.cs new file mode 100644 index 0000000000..827a75956f --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/JsonFormatterTest.cs @@ -0,0 +1,526 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; +using Google.Protobuf.TestProtos; +using NUnit.Framework; +using UnitTest.Issues.TestProtos; +using Google.Protobuf.WellKnownTypes; +using Google.Protobuf.Reflection; + +using static Google.Protobuf.JsonParserTest; // For WrapInQuotes + +namespace Google.Protobuf +{ + /// + /// Tests for the JSON formatter. Note that in these tests, double quotes are replaced with apostrophes + /// for the sake of readability (embedding \" everywhere is painful). See the AssertJson method for details. + /// + public class JsonFormatterTest + { + [Test] + public void DefaultValues_WhenOmitted() + { + var formatter = new JsonFormatter(new JsonFormatter.Settings(formatDefaultValues: false)); + + AssertJson("{ }", formatter.Format(new ForeignMessage())); + AssertJson("{ }", formatter.Format(new TestAllTypes())); + AssertJson("{ }", formatter.Format(new TestMap())); + } + + [Test] + public void DefaultValues_WhenIncluded() + { + var formatter = new JsonFormatter(new JsonFormatter.Settings(formatDefaultValues: true)); + AssertJson("{ 'c': 0 }", formatter.Format(new ForeignMessage())); + } + + [Test] + public void AllSingleFields() + { + var message = new TestAllTypes + { + SingleBool = true, + SingleBytes = ByteString.CopyFrom(1, 2, 3, 4), + SingleDouble = 23.5, + SingleFixed32 = 23, + SingleFixed64 = 1234567890123, + SingleFloat = 12.25f, + SingleForeignEnum = ForeignEnum.ForeignBar, + SingleForeignMessage = new ForeignMessage { C = 10 }, + SingleImportEnum = ImportEnum.ImportBaz, + SingleImportMessage = new ImportMessage { D = 20 }, + SingleInt32 = 100, + SingleInt64 = 3210987654321, + SingleNestedEnum = TestAllTypes.Types.NestedEnum.Foo, + SingleNestedMessage = new TestAllTypes.Types.NestedMessage { Bb = 35 }, + SinglePublicImportMessage = new PublicImportMessage { E = 54 }, + SingleSfixed32 = -123, + SingleSfixed64 = -12345678901234, + SingleSint32 = -456, + SingleSint64 = -12345678901235, + SingleString = "test\twith\ttabs", + SingleUint32 = uint.MaxValue, + SingleUint64 = ulong.MaxValue, + }; + var actualText = JsonFormatter.Default.Format(message); + + // Fields in numeric order + var expectedText = "{ " + + "'singleInt32': 100, " + + "'singleInt64': '3210987654321', " + + "'singleUint32': 4294967295, " + + "'singleUint64': '18446744073709551615', " + + "'singleSint32': -456, " + + "'singleSint64': '-12345678901235', " + + "'singleFixed32': 23, " + + "'singleFixed64': '1234567890123', " + + "'singleSfixed32': -123, " + + "'singleSfixed64': '-12345678901234', " + + "'singleFloat': 12.25, " + + "'singleDouble': 23.5, " + + "'singleBool': true, " + + "'singleString': 'test\\twith\\ttabs', " + + "'singleBytes': 'AQIDBA==', " + + "'singleNestedMessage': { 'bb': 35 }, " + + "'singleForeignMessage': { 'c': 10 }, " + + "'singleImportMessage': { 'd': 20 }, " + + "'singleNestedEnum': 'FOO', " + + "'singleForeignEnum': 'FOREIGN_BAR', " + + "'singleImportEnum': 'IMPORT_BAZ', " + + "'singlePublicImportMessage': { 'e': 54 }" + + " }"; + AssertJson(expectedText, actualText); + } + + [Test] + public void RepeatedField() + { + AssertJson("{ 'repeatedInt32': [ 1, 2, 3, 4, 5 ] }", + JsonFormatter.Default.Format(new TestAllTypes { RepeatedInt32 = { 1, 2, 3, 4, 5 } })); + } + + [Test] + public void MapField_StringString() + { + AssertJson("{ 'mapStringString': { 'with spaces': 'bar', 'a': 'b' } }", + JsonFormatter.Default.Format(new TestMap { MapStringString = { { "with spaces", "bar" }, { "a", "b" } } })); + } + + [Test] + public void MapField_Int32Int32() + { + // The keys are quoted, but the values aren't. + AssertJson("{ 'mapInt32Int32': { '0': 1, '2': 3 } }", + JsonFormatter.Default.Format(new TestMap { MapInt32Int32 = { { 0, 1 }, { 2, 3 } } })); + } + + [Test] + public void MapField_BoolBool() + { + // The keys are quoted, but the values aren't. + AssertJson("{ 'mapBoolBool': { 'false': true, 'true': false } }", + JsonFormatter.Default.Format(new TestMap { MapBoolBool = { { false, true }, { true, false } } })); + } + + [TestCase(1.0, "1")] + [TestCase(double.NaN, "'NaN'")] + [TestCase(double.PositiveInfinity, "'Infinity'")] + [TestCase(double.NegativeInfinity, "'-Infinity'")] + public void DoubleRepresentations(double value, string expectedValueText) + { + var message = new TestAllTypes { SingleDouble = value }; + string actualText = JsonFormatter.Default.Format(message); + string expectedText = "{ 'singleDouble': " + expectedValueText + " }"; + AssertJson(expectedText, actualText); + } + + [Test] + public void UnknownEnumValueNumeric_SingleField() + { + var message = new TestAllTypes { SingleForeignEnum = (ForeignEnum) 100 }; + AssertJson("{ 'singleForeignEnum': 100 }", JsonFormatter.Default.Format(message)); + } + + [Test] + public void UnknownEnumValueNumeric_RepeatedField() + { + var message = new TestAllTypes { RepeatedForeignEnum = { ForeignEnum.ForeignBaz, (ForeignEnum) 100, ForeignEnum.ForeignFoo } }; + AssertJson("{ 'repeatedForeignEnum': [ 'FOREIGN_BAZ', 100, 'FOREIGN_FOO' ] }", JsonFormatter.Default.Format(message)); + } + + [Test] + public void UnknownEnumValueNumeric_MapField() + { + var message = new TestMap { MapInt32Enum = { { 1, MapEnum.Foo }, { 2, (MapEnum) 100 }, { 3, MapEnum.Bar } } }; + AssertJson("{ 'mapInt32Enum': { '1': 'MAP_ENUM_FOO', '2': 100, '3': 'MAP_ENUM_BAR' } }", JsonFormatter.Default.Format(message)); + } + + [Test] + public void UnknownEnumValue_RepeatedField_AllEntriesUnknown() + { + var message = new TestAllTypes { RepeatedForeignEnum = { (ForeignEnum) 200, (ForeignEnum) 100 } }; + AssertJson("{ 'repeatedForeignEnum': [ 200, 100 ] }", JsonFormatter.Default.Format(message)); + } + + [Test] + [TestCase("a\u17b4b", "a\\u17b4b")] // Explicit + [TestCase("a\u0601b", "a\\u0601b")] // Ranged + [TestCase("a\u0605b", "a\u0605b")] // Passthrough (note lack of double backslash...) + public void SimpleNonAscii(string text, string encoded) + { + var message = new TestAllTypes { SingleString = text }; + AssertJson("{ 'singleString': '" + encoded + "' }", JsonFormatter.Default.Format(message)); + } + + [Test] + public void SurrogatePairEscaping() + { + var message = new TestAllTypes { SingleString = "a\uD801\uDC01b" }; + AssertJson("{ 'singleString': 'a\\ud801\\udc01b' }", JsonFormatter.Default.Format(message)); + } + + [Test] + public void InvalidSurrogatePairsFail() + { + // Note: don't use TestCase for these, as the strings can't be reliably represented + // See http://codeblog.jonskeet.uk/2014/11/07/when-is-a-string-not-a-string/ + + // Lone low surrogate + var message = new TestAllTypes { SingleString = "a\uDC01b" }; + Assert.Throws(() => JsonFormatter.Default.Format(message)); + + // Lone high surrogate + message = new TestAllTypes { SingleString = "a\uD801b" }; + Assert.Throws(() => JsonFormatter.Default.Format(message)); + } + + [Test] + [TestCase("foo_bar", "fooBar")] + [TestCase("bananaBanana", "bananaBanana")] + [TestCase("BANANABanana", "bananaBanana")] + public void ToCamelCase(string original, string expected) + { + Assert.AreEqual(expected, JsonFormatter.ToCamelCase(original)); + } + + [Test] + [TestCase(null, "{ }")] + [TestCase("x", "{ 'fooString': 'x' }")] + [TestCase("", "{ 'fooString': '' }")] + public void Oneof(string fooStringValue, string expectedJson) + { + var message = new TestOneof(); + if (fooStringValue != null) + { + message.FooString = fooStringValue; + } + + // We should get the same result both with and without "format default values". + var formatter = new JsonFormatter(new JsonFormatter.Settings(false)); + AssertJson(expectedJson, formatter.Format(message)); + formatter = new JsonFormatter(new JsonFormatter.Settings(true)); + AssertJson(expectedJson, formatter.Format(message)); + } + + [Test] + public void WrapperFormatting_Single() + { + // Just a few examples, handling both classes and value types, and + // default vs non-default values + var message = new TestWellKnownTypes + { + Int64Field = 10, + Int32Field = 0, + BytesField = ByteString.FromBase64("ABCD"), + StringField = "" + }; + var expectedJson = "{ 'int64Field': '10', 'int32Field': 0, 'stringField': '', 'bytesField': 'ABCD' }"; + AssertJson(expectedJson, JsonFormatter.Default.Format(message)); + } + + [Test] + public void WrapperFormatting_Message() + { + Assert.AreEqual("\"\"", JsonFormatter.Default.Format(new StringValue())); + Assert.AreEqual("0", JsonFormatter.Default.Format(new Int32Value())); + } + + [Test] + public void WrapperFormatting_IncludeNull() + { + // The actual JSON here is very large because there are lots of fields. Just test a couple of them. + var message = new TestWellKnownTypes { Int32Field = 10 }; + var formatter = new JsonFormatter(new JsonFormatter.Settings(true)); + var actualJson = formatter.Format(message); + Assert.IsTrue(actualJson.Contains("\"int64Field\": null")); + Assert.IsFalse(actualJson.Contains("\"int32Field\": null")); + } + + [Test] + public void OutputIsInNumericFieldOrder_NoDefaults() + { + var formatter = new JsonFormatter(new JsonFormatter.Settings(false)); + var message = new TestJsonFieldOrdering { PlainString = "p1", PlainInt32 = 2 }; + AssertJson("{ 'plainString': 'p1', 'plainInt32': 2 }", formatter.Format(message)); + message = new TestJsonFieldOrdering { O1Int32 = 5, O2String = "o2", PlainInt32 = 10, PlainString = "plain" }; + AssertJson("{ 'plainString': 'plain', 'o2String': 'o2', 'plainInt32': 10, 'o1Int32': 5 }", formatter.Format(message)); + message = new TestJsonFieldOrdering { O1String = "", O2Int32 = 0, PlainInt32 = 10, PlainString = "plain" }; + AssertJson("{ 'plainString': 'plain', 'o1String': '', 'plainInt32': 10, 'o2Int32': 0 }", formatter.Format(message)); + } + + [Test] + public void OutputIsInNumericFieldOrder_WithDefaults() + { + var formatter = new JsonFormatter(new JsonFormatter.Settings(true)); + var message = new TestJsonFieldOrdering(); + AssertJson("{ 'plainString': '', 'plainInt32': 0 }", formatter.Format(message)); + message = new TestJsonFieldOrdering { O1Int32 = 5, O2String = "o2", PlainInt32 = 10, PlainString = "plain" }; + AssertJson("{ 'plainString': 'plain', 'o2String': 'o2', 'plainInt32': 10, 'o1Int32': 5 }", formatter.Format(message)); + message = new TestJsonFieldOrdering { O1String = "", O2Int32 = 0, PlainInt32 = 10, PlainString = "plain" }; + AssertJson("{ 'plainString': 'plain', 'o1String': '', 'plainInt32': 10, 'o2Int32': 0 }", formatter.Format(message)); + } + + [Test] + [TestCase("1970-01-01T00:00:00Z", 0)] + [TestCase("1970-01-01T00:00:00.000000001Z", 1)] + [TestCase("1970-01-01T00:00:00.000000010Z", 10)] + [TestCase("1970-01-01T00:00:00.000000100Z", 100)] + [TestCase("1970-01-01T00:00:00.000001Z", 1000)] + [TestCase("1970-01-01T00:00:00.000010Z", 10000)] + [TestCase("1970-01-01T00:00:00.000100Z", 100000)] + [TestCase("1970-01-01T00:00:00.001Z", 1000000)] + [TestCase("1970-01-01T00:00:00.010Z", 10000000)] + [TestCase("1970-01-01T00:00:00.100Z", 100000000)] + [TestCase("1970-01-01T00:00:00.120Z", 120000000)] + [TestCase("1970-01-01T00:00:00.123Z", 123000000)] + [TestCase("1970-01-01T00:00:00.123400Z", 123400000)] + [TestCase("1970-01-01T00:00:00.123450Z", 123450000)] + [TestCase("1970-01-01T00:00:00.123456Z", 123456000)] + [TestCase("1970-01-01T00:00:00.123456700Z", 123456700)] + [TestCase("1970-01-01T00:00:00.123456780Z", 123456780)] + [TestCase("1970-01-01T00:00:00.123456789Z", 123456789)] + public void TimestampStandalone(string expected, int nanos) + { + Assert.AreEqual(WrapInQuotes(expected), new Timestamp { Nanos = nanos }.ToString()); + } + + [Test] + public void TimestampStandalone_FromDateTime() + { + // One before and one after the Unix epoch, more easily represented via DateTime. + Assert.AreEqual("\"1673-06-19T12:34:56Z\"", + new DateTime(1673, 6, 19, 12, 34, 56, DateTimeKind.Utc).ToTimestamp().ToString()); + Assert.AreEqual("\"2015-07-31T10:29:34Z\"", + new DateTime(2015, 7, 31, 10, 29, 34, DateTimeKind.Utc).ToTimestamp().ToString()); + } + + [Test] + [TestCase(-1, -1)] // Would be valid as duration + [TestCase(1, Timestamp.MaxNanos + 1)] + [TestCase(Timestamp.UnixSecondsAtBclMaxValue + 1, 0)] + [TestCase(Timestamp.UnixSecondsAtBclMinValue - 1, 0)] + public void TimestampStandalone_NonNormalized(long seconds, int nanoseconds) + { + var timestamp = new Timestamp { Seconds = seconds, Nanos = nanoseconds }; + Assert.Throws(() => JsonFormatter.Default.Format(timestamp)); + } + + [Test] + public void TimestampField() + { + var message = new TestWellKnownTypes { TimestampField = new Timestamp() }; + AssertJson("{ 'timestampField': '1970-01-01T00:00:00Z' }", JsonFormatter.Default.Format(message)); + } + + [Test] + [TestCase(0, 0, "0s")] + [TestCase(1, 0, "1s")] + [TestCase(-1, 0, "-1s")] + [TestCase(0, 1, "0.000000001s")] + [TestCase(0, 10, "0.000000010s")] + [TestCase(0, 100, "0.000000100s")] + [TestCase(0, 1000, "0.000001s")] + [TestCase(0, 10000, "0.000010s")] + [TestCase(0, 100000, "0.000100s")] + [TestCase(0, 1000000, "0.001s")] + [TestCase(0, 10000000, "0.010s")] + [TestCase(0, 100000000, "0.100s")] + [TestCase(0, 120000000, "0.120s")] + [TestCase(0, 123000000, "0.123s")] + [TestCase(0, 123400000, "0.123400s")] + [TestCase(0, 123450000, "0.123450s")] + [TestCase(0, 123456000, "0.123456s")] + [TestCase(0, 123456700, "0.123456700s")] + [TestCase(0, 123456780, "0.123456780s")] + [TestCase(0, 123456789, "0.123456789s")] + [TestCase(0, -100000000, "-0.100s")] + [TestCase(1, 100000000, "1.100s")] + [TestCase(-1, -100000000, "-1.100s")] + public void DurationStandalone(long seconds, int nanoseconds, string expected) + { + var json = JsonFormatter.Default.Format(new Duration { Seconds = seconds, Nanos = nanoseconds }); + Assert.AreEqual(WrapInQuotes(expected), json); + } + + [Test] + [TestCase(1, 2123456789)] + [TestCase(1, -100000000)] + public void DurationStandalone_NonNormalized(long seconds, int nanoseconds) + { + var duration = new Duration { Seconds = seconds, Nanos = nanoseconds }; + Assert.Throws(() => JsonFormatter.Default.Format(duration)); + } + + [Test] + public void DurationField() + { + var message = new TestWellKnownTypes { DurationField = new Duration() }; + AssertJson("{ 'durationField': '0s' }", JsonFormatter.Default.Format(message)); + } + + [Test] + public void StructSample() + { + var message = new Struct + { + Fields = + { + { "a", Value.ForNull() }, + { "b", Value.ForBool(false) }, + { "c", Value.ForNumber(10.5) }, + { "d", Value.ForString("text") }, + { "e", Value.ForList(Value.ForString("t1"), Value.ForNumber(5)) }, + { "f", Value.ForStruct(new Struct { Fields = { { "nested", Value.ForString("value") } } }) } + } + }; + AssertJson("{ 'a': null, 'b': false, 'c': 10.5, 'd': 'text', 'e': [ 't1', 5 ], 'f': { 'nested': 'value' } }", message.ToString()); + } + + [Test] + [TestCase("foo__bar")] + [TestCase("foo_3_ar")] + [TestCase("fooBar")] + public void FieldMaskInvalid(string input) + { + var mask = new FieldMask { Paths = { input } }; + Assert.Throws(() => JsonFormatter.Default.Format(mask)); + } + + [Test] + public void FieldMaskStandalone() + { + var fieldMask = new FieldMask { Paths = { "", "single", "with_underscore", "nested.field.name", "nested..double_dot" } }; + Assert.AreEqual("\",single,withUnderscore,nested.field.name,nested..doubleDot\"", fieldMask.ToString()); + + // Invalid, but we shouldn't create broken JSON... + fieldMask = new FieldMask { Paths = { "x\\y" } }; + Assert.AreEqual(@"""x\\y""", fieldMask.ToString()); + } + + [Test] + public void FieldMaskField() + { + var message = new TestWellKnownTypes { FieldMaskField = new FieldMask { Paths = { "user.display_name", "photo" } } }; + AssertJson("{ 'fieldMaskField': 'user.displayName,photo' }", JsonFormatter.Default.Format(message)); + } + + // SourceContext is an example of a well-known type with no special JSON handling + [Test] + public void SourceContextStandalone() + { + var message = new SourceContext { FileName = "foo.proto" }; + AssertJson("{ 'fileName': 'foo.proto' }", JsonFormatter.Default.Format(message)); + } + + [Test] + public void AnyWellKnownType() + { + var formatter = new JsonFormatter(new JsonFormatter.Settings(false, TypeRegistry.FromMessages(Timestamp.Descriptor))); + var timestamp = new DateTime(1673, 6, 19, 12, 34, 56, DateTimeKind.Utc).ToTimestamp(); + var any = Any.Pack(timestamp); + AssertJson("{ '@type': 'type.googleapis.com/google.protobuf.Timestamp', 'value': '1673-06-19T12:34:56Z' }", formatter.Format(any)); + } + + [Test] + public void AnyMessageType() + { + var formatter = new JsonFormatter(new JsonFormatter.Settings(false, TypeRegistry.FromMessages(TestAllTypes.Descriptor))); + var message = new TestAllTypes { SingleInt32 = 10, SingleNestedMessage = new TestAllTypes.Types.NestedMessage { Bb = 20 } }; + var any = Any.Pack(message); + AssertJson("{ '@type': 'type.googleapis.com/protobuf_unittest.TestAllTypes', 'singleInt32': 10, 'singleNestedMessage': { 'bb': 20 } }", formatter.Format(any)); + } + + [Test] + public void AnyMessageType_CustomPrefix() + { + var formatter = new JsonFormatter(new JsonFormatter.Settings(false, TypeRegistry.FromMessages(TestAllTypes.Descriptor))); + var message = new TestAllTypes { SingleInt32 = 10 }; + var any = Any.Pack(message, "foo.bar/baz"); + AssertJson("{ '@type': 'foo.bar/baz/protobuf_unittest.TestAllTypes', 'singleInt32': 10 }", formatter.Format(any)); + } + + [Test] + public void AnyNested() + { + var registry = TypeRegistry.FromMessages(TestWellKnownTypes.Descriptor, TestAllTypes.Descriptor); + var formatter = new JsonFormatter(new JsonFormatter.Settings(false, registry)); + + // Nest an Any as the value of an Any. + var doubleNestedMessage = new TestAllTypes { SingleInt32 = 20 }; + var nestedMessage = Any.Pack(doubleNestedMessage); + var message = new TestWellKnownTypes { AnyField = Any.Pack(nestedMessage) }; + AssertJson("{ 'anyField': { '@type': 'type.googleapis.com/google.protobuf.Any', 'value': { '@type': 'type.googleapis.com/protobuf_unittest.TestAllTypes', 'singleInt32': 20 } } }", + formatter.Format(message)); + } + + [Test] + public void AnyUnknownType() + { + // The default type registry doesn't have any types in it. + var message = new TestAllTypes(); + var any = Any.Pack(message); + Assert.Throws(() => JsonFormatter.Default.Format(any)); + } + + /// + /// Checks that the actual JSON is the same as the expected JSON - but after replacing + /// all apostrophes in the expected JSON with double quotes. This basically makes the tests easier + /// to read. + /// + private static void AssertJson(string expectedJsonWithApostrophes, string actualJson) + { + var expectedJson = expectedJsonWithApostrophes.Replace("'", "\""); + Assert.AreEqual(expectedJson, actualJson); + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/JsonParserTest.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/JsonParserTest.cs new file mode 100644 index 0000000000..c3ad851b8a --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/JsonParserTest.cs @@ -0,0 +1,936 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using Google.Protobuf.Reflection; +using Google.Protobuf.TestProtos; +using Google.Protobuf.WellKnownTypes; +using NUnit.Framework; +using System; + +namespace Google.Protobuf +{ + /// + /// Unit tests for JSON parsing. + /// + public class JsonParserTest + { + // Sanity smoke test + [Test] + public void AllTypesRoundtrip() + { + AssertRoundtrip(SampleMessages.CreateFullTestAllTypes()); + } + + [Test] + public void Maps() + { + AssertRoundtrip(new TestMap { MapStringString = { { "with spaces", "bar" }, { "a", "b" } } }); + AssertRoundtrip(new TestMap { MapInt32Int32 = { { 0, 1 }, { 2, 3 } } }); + AssertRoundtrip(new TestMap { MapBoolBool = { { false, true }, { true, false } } }); + } + + [Test] + [TestCase(" 1 ")] + [TestCase("+1")] + [TestCase("1,000")] + [TestCase("1.5")] + public void IntegerMapKeysAreStrict(string keyText) + { + // Test that integer parsing is strict. We assume that if this is correct for int32, + // it's correct for other numeric key types. + var json = "{ \"mapInt32Int32\": { \"" + keyText + "\" : \"1\" } }"; + Assert.Throws(() => JsonParser.Default.Parse(json)); + } + + [Test] + public void OriginalFieldNameAccepted() + { + var json = "{ \"single_int32\": 10 }"; + var expected = new TestAllTypes { SingleInt32 = 10 }; + Assert.AreEqual(expected, TestAllTypes.Parser.ParseJson(json)); + } + + [Test] + public void SourceContextRoundtrip() + { + AssertRoundtrip(new SourceContext { FileName = "foo.proto" }); + } + + [Test] + public void SingularWrappers_DefaultNonNullValues() + { + var message = new TestWellKnownTypes + { + StringField = "", + BytesField = ByteString.Empty, + BoolField = false, + FloatField = 0f, + DoubleField = 0d, + Int32Field = 0, + Int64Field = 0, + Uint32Field = 0, + Uint64Field = 0 + }; + AssertRoundtrip(message); + } + + [Test] + public void SingularWrappers_NonDefaultValues() + { + var message = new TestWellKnownTypes + { + StringField = "x", + BytesField = ByteString.CopyFrom(1, 2, 3), + BoolField = true, + FloatField = 12.5f, + DoubleField = 12.25d, + Int32Field = 1, + Int64Field = 2, + Uint32Field = 3, + Uint64Field = 4 + }; + AssertRoundtrip(message); + } + + [Test] + public void SingularWrappers_ExplicitNulls() + { + // When we parse the "valueField": null part, we remember it... basically, it's one case + // where explicit default values don't fully roundtrip. + var message = new TestWellKnownTypes { ValueField = Value.ForNull() }; + var json = new JsonFormatter(new JsonFormatter.Settings(true)).Format(message); + var parsed = JsonParser.Default.Parse(json); + Assert.AreEqual(message, parsed); + } + + [Test] + [TestCase(typeof(Int32Value), "32", 32)] + [TestCase(typeof(Int64Value), "32", 32L)] + [TestCase(typeof(UInt32Value), "32", 32U)] + [TestCase(typeof(UInt64Value), "32", 32UL)] + [TestCase(typeof(StringValue), "\"foo\"", "foo")] + [TestCase(typeof(FloatValue), "1.5", 1.5f)] + [TestCase(typeof(DoubleValue), "1.5", 1.5d)] + public void Wrappers_Standalone(System.Type wrapperType, string json, object expectedValue) + { + IMessage parsed = (IMessage)Activator.CreateInstance(wrapperType); + IMessage expected = (IMessage)Activator.CreateInstance(wrapperType); + JsonParser.Default.Merge(parsed, "null"); + Assert.AreEqual(expected, parsed); + + JsonParser.Default.Merge(parsed, json); + expected.Descriptor.Fields[WrappersReflection.WrapperValueFieldNumber].Accessor.SetValue(expected, expectedValue); + Assert.AreEqual(expected, parsed); + } + + [Test] + public void ExplicitNullValue() + { + string json = "{\"valueField\": null}"; + var message = JsonParser.Default.Parse(json); + Assert.AreEqual(new TestWellKnownTypes { ValueField = Value.ForNull() }, message); + } + + [Test] + public void BytesWrapper_Standalone() + { + ByteString data = ByteString.CopyFrom(1, 2, 3); + // Can't do this with attributes... + var parsed = JsonParser.Default.Parse(WrapInQuotes(data.ToBase64())); + var expected = new BytesValue { Value = data }; + Assert.AreEqual(expected, parsed); + } + + [Test] + public void RepeatedWrappers() + { + var message = new RepeatedWellKnownTypes + { + BoolField = { true, false }, + BytesField = { ByteString.CopyFrom(1, 2, 3), ByteString.CopyFrom(4, 5, 6), ByteString.Empty }, + DoubleField = { 12.5, -1.5, 0d }, + FloatField = { 123.25f, -20f, 0f }, + Int32Field = { int.MaxValue, int.MinValue, 0 }, + Int64Field = { long.MaxValue, long.MinValue, 0L }, + StringField = { "First", "Second", "" }, + Uint32Field = { uint.MaxValue, uint.MinValue, 0U }, + Uint64Field = { ulong.MaxValue, ulong.MinValue, 0UL }, + }; + AssertRoundtrip(message); + } + + [Test] + public void RepeatedField_NullElementProhibited() + { + string json = "{ \"repeated_foreign_message\": [null] }"; + Assert.Throws(() => TestAllTypes.Parser.ParseJson(json)); + } + + [Test] + public void RepeatedField_NullOverallValueAllowed() + { + string json = "{ \"repeated_foreign_message\": null }"; + Assert.AreEqual(new TestAllTypes(), TestAllTypes.Parser.ParseJson(json)); + } + + [Test] + [TestCase("{ \"mapInt32Int32\": { \"10\": null }")] + [TestCase("{ \"mapStringString\": { \"abc\": null }")] + [TestCase("{ \"mapInt32ForeignMessage\": { \"10\": null }")] + public void MapField_NullValueProhibited(string json) + { + Assert.Throws(() => TestMap.Parser.ParseJson(json)); + } + + [Test] + public void MapField_NullOverallValueAllowed() + { + string json = "{ \"mapInt32Int32\": null }"; + Assert.AreEqual(new TestMap(), TestMap.Parser.ParseJson(json)); + } + + [Test] + public void IndividualWrapperTypes() + { + Assert.AreEqual(new StringValue { Value = "foo" }, StringValue.Parser.ParseJson("\"foo\"")); + Assert.AreEqual(new Int32Value { Value = 1 }, Int32Value.Parser.ParseJson("1")); + // Can parse strings directly too + Assert.AreEqual(new Int32Value { Value = 1 }, Int32Value.Parser.ParseJson("\"1\"")); + } + + private static void AssertRoundtrip(T message) where T : IMessage, new() + { + var clone = message.Clone(); + var json = JsonFormatter.Default.Format(message); + var parsed = JsonParser.Default.Parse(json); + Assert.AreEqual(clone, parsed); + } + + [Test] + [TestCase("0", 0)] + [TestCase("-0", 0)] // Not entirely clear whether we intend to allow this... + [TestCase("1", 1)] + [TestCase("-1", -1)] + [TestCase("2147483647", 2147483647)] + [TestCase("-2147483648", -2147483648)] + public void StringToInt32_Valid(string jsonValue, int expectedParsedValue) + { + string json = "{ \"singleInt32\": \"" + jsonValue + "\"}"; + var parsed = TestAllTypes.Parser.ParseJson(json); + Assert.AreEqual(expectedParsedValue, parsed.SingleInt32); + } + + [Test] + [TestCase("+0")] + [TestCase(" 1")] + [TestCase("1 ")] + [TestCase("00")] + [TestCase("-00")] + [TestCase("--1")] + [TestCase("+1")] + [TestCase("1.5")] + [TestCase("1e10")] + [TestCase("2147483648")] + [TestCase("-2147483649")] + public void StringToInt32_Invalid(string jsonValue) + { + string json = "{ \"singleInt32\": \"" + jsonValue + "\"}"; + Assert.Throws(() => TestAllTypes.Parser.ParseJson(json)); + } + + [Test] + [TestCase("0", 0U)] + [TestCase("1", 1U)] + [TestCase("4294967295", 4294967295U)] + public void StringToUInt32_Valid(string jsonValue, uint expectedParsedValue) + { + string json = "{ \"singleUint32\": \"" + jsonValue + "\"}"; + var parsed = TestAllTypes.Parser.ParseJson(json); + Assert.AreEqual(expectedParsedValue, parsed.SingleUint32); + } + + // Assume that anything non-bounds-related is covered in the Int32 case + [Test] + [TestCase("-1")] + [TestCase("4294967296")] + public void StringToUInt32_Invalid(string jsonValue) + { + string json = "{ \"singleUint32\": \"" + jsonValue + "\"}"; + Assert.Throws(() => TestAllTypes.Parser.ParseJson(json)); + } + + [Test] + [TestCase("0", 0L)] + [TestCase("1", 1L)] + [TestCase("-1", -1L)] + [TestCase("9223372036854775807", 9223372036854775807)] + [TestCase("-9223372036854775808", -9223372036854775808)] + public void StringToInt64_Valid(string jsonValue, long expectedParsedValue) + { + string json = "{ \"singleInt64\": \"" + jsonValue + "\"}"; + var parsed = TestAllTypes.Parser.ParseJson(json); + Assert.AreEqual(expectedParsedValue, parsed.SingleInt64); + } + + // Assume that anything non-bounds-related is covered in the Int32 case + [Test] + [TestCase("-9223372036854775809")] + [TestCase("9223372036854775808")] + public void StringToInt64_Invalid(string jsonValue) + { + string json = "{ \"singleInt64\": \"" + jsonValue + "\"}"; + Assert.Throws(() => TestAllTypes.Parser.ParseJson(json)); + } + + [Test] + [TestCase("0", 0UL)] + [TestCase("1", 1UL)] + [TestCase("18446744073709551615", 18446744073709551615)] + public void StringToUInt64_Valid(string jsonValue, ulong expectedParsedValue) + { + string json = "{ \"singleUint64\": \"" + jsonValue + "\"}"; + var parsed = TestAllTypes.Parser.ParseJson(json); + Assert.AreEqual(expectedParsedValue, parsed.SingleUint64); + } + + // Assume that anything non-bounds-related is covered in the Int32 case + [Test] + [TestCase("-1")] + [TestCase("18446744073709551616")] + public void StringToUInt64_Invalid(string jsonValue) + { + string json = "{ \"singleUint64\": \"" + jsonValue + "\"}"; + Assert.Throws(() => TestAllTypes.Parser.ParseJson(json)); + } + + [Test] + [TestCase("0", 0d)] + [TestCase("1", 1d)] + [TestCase("1.000000", 1d)] + [TestCase("1.0000000000000000000000001", 1d)] // We don't notice that we haven't preserved the exact value + [TestCase("-1", -1d)] + [TestCase("1e1", 10d)] + [TestCase("1e01", 10d)] // Leading decimals are allowed in exponents + [TestCase("1E1", 10d)] // Either case is fine + [TestCase("-1e1", -10d)] + [TestCase("1.5e1", 15d)] + [TestCase("-1.5e1", -15d)] + [TestCase("15e-1", 1.5d)] + [TestCase("-15e-1", -1.5d)] + [TestCase("1.79769e308", 1.79769e308)] + [TestCase("-1.79769e308", -1.79769e308)] + [TestCase("Infinity", double.PositiveInfinity)] + [TestCase("-Infinity", double.NegativeInfinity)] + [TestCase("NaN", double.NaN)] + public void StringToDouble_Valid(string jsonValue, double expectedParsedValue) + { + string json = "{ \"singleDouble\": \"" + jsonValue + "\"}"; + var parsed = TestAllTypes.Parser.ParseJson(json); + Assert.AreEqual(expectedParsedValue, parsed.SingleDouble); + } + + [Test] + [TestCase("1.7977e308")] + [TestCase("-1.7977e308")] + [TestCase("1e309")] + [TestCase("1,0")] + [TestCase("1.0.0")] + [TestCase("+1")] + [TestCase("00")] + [TestCase("01")] + [TestCase("-00")] + [TestCase("-01")] + [TestCase("--1")] + [TestCase(" Infinity")] + [TestCase(" -Infinity")] + [TestCase("NaN ")] + [TestCase("Infinity ")] + [TestCase("-Infinity ")] + [TestCase(" NaN")] + [TestCase("INFINITY")] + [TestCase("nan")] + [TestCase("\u00BD")] // 1/2 as a single Unicode character. Just sanity checking... + public void StringToDouble_Invalid(string jsonValue) + { + string json = "{ \"singleDouble\": \"" + jsonValue + "\"}"; + Assert.Throws(() => TestAllTypes.Parser.ParseJson(json)); + } + + [Test] + [TestCase("0", 0f)] + [TestCase("1", 1f)] + [TestCase("1.000000", 1f)] + [TestCase("-1", -1f)] + [TestCase("3.402823e38", 3.402823e38f)] + [TestCase("-3.402823e38", -3.402823e38f)] + [TestCase("1.5e1", 15f)] + [TestCase("15e-1", 1.5f)] + public void StringToFloat_Valid(string jsonValue, float expectedParsedValue) + { + string json = "{ \"singleFloat\": \"" + jsonValue + "\"}"; + var parsed = TestAllTypes.Parser.ParseJson(json); + Assert.AreEqual(expectedParsedValue, parsed.SingleFloat); + } + + [Test] + [TestCase("3.402824e38")] + [TestCase("-3.402824e38")] + [TestCase("1,0")] + [TestCase("1.0.0")] + [TestCase("+1")] + [TestCase("00")] + [TestCase("--1")] + public void StringToFloat_Invalid(string jsonValue) + { + string json = "{ \"singleFloat\": \"" + jsonValue + "\"}"; + Assert.Throws(() => TestAllTypes.Parser.ParseJson(json)); + } + + [Test] + [TestCase("0", 0)] + [TestCase("-0", 0)] // Not entirely clear whether we intend to allow this... + [TestCase("1", 1)] + [TestCase("-1", -1)] + [TestCase("2147483647", 2147483647)] + [TestCase("-2147483648", -2147483648)] + [TestCase("1e1", 10)] + [TestCase("-1e1", -10)] + [TestCase("10.00", 10)] + [TestCase("-10.00", -10)] + public void NumberToInt32_Valid(string jsonValue, int expectedParsedValue) + { + string json = "{ \"singleInt32\": " + jsonValue + "}"; + var parsed = TestAllTypes.Parser.ParseJson(json); + Assert.AreEqual(expectedParsedValue, parsed.SingleInt32); + } + + [Test] + [TestCase("+0", typeof(InvalidJsonException))] + [TestCase("00", typeof(InvalidJsonException))] + [TestCase("-00", typeof(InvalidJsonException))] + [TestCase("--1", typeof(InvalidJsonException))] + [TestCase("+1", typeof(InvalidJsonException))] + [TestCase("1.5", typeof(InvalidProtocolBufferException))] + // Value is out of range + [TestCase("1e10", typeof(InvalidProtocolBufferException))] + [TestCase("2147483648", typeof(InvalidProtocolBufferException))] + [TestCase("-2147483649", typeof(InvalidProtocolBufferException))] + public void NumberToInt32_Invalid(string jsonValue, System.Type expectedExceptionType) + { + string json = "{ \"singleInt32\": " + jsonValue + "}"; + Assert.Throws(expectedExceptionType, () => TestAllTypes.Parser.ParseJson(json)); + } + + [Test] + [TestCase("0", 0U)] + [TestCase("1", 1U)] + [TestCase("4294967295", 4294967295U)] + public void NumberToUInt32_Valid(string jsonValue, uint expectedParsedValue) + { + string json = "{ \"singleUint32\": " + jsonValue + "}"; + var parsed = TestAllTypes.Parser.ParseJson(json); + Assert.AreEqual(expectedParsedValue, parsed.SingleUint32); + } + + // Assume that anything non-bounds-related is covered in the Int32 case + [Test] + [TestCase("-1")] + [TestCase("4294967296")] + public void NumberToUInt32_Invalid(string jsonValue) + { + string json = "{ \"singleUint32\": " + jsonValue + "}"; + Assert.Throws(() => TestAllTypes.Parser.ParseJson(json)); + } + + [Test] + [TestCase("0", 0L)] + [TestCase("1", 1L)] + [TestCase("-1", -1L)] + // long.MaxValue isn't actually representable as a double. This string value is the highest + // representable value which isn't greater than long.MaxValue. + [TestCase("9223372036854774784", 9223372036854774784)] + [TestCase("-9223372036854775808", -9223372036854775808)] + public void NumberToInt64_Valid(string jsonValue, long expectedParsedValue) + { + string json = "{ \"singleInt64\": " + jsonValue + "}"; + var parsed = TestAllTypes.Parser.ParseJson(json); + Assert.AreEqual(expectedParsedValue, parsed.SingleInt64); + } + + // Assume that anything non-bounds-related is covered in the Int32 case + [Test] + [TestCase("9223372036854775808")] + // Theoretical bound would be -9223372036854775809, but when that is parsed to a double + // we end up with the exact value of long.MinValue due to lack of precision. The value here + // is the "next double down". + [TestCase("-9223372036854780000")] + public void NumberToInt64_Invalid(string jsonValue) + { + string json = "{ \"singleInt64\": " + jsonValue + "}"; + Assert.Throws(() => TestAllTypes.Parser.ParseJson(json)); + } + + [Test] + [TestCase("0", 0UL)] + [TestCase("1", 1UL)] + // ulong.MaxValue isn't representable as a double. This value is the largest double within + // the range of ulong. + [TestCase("18446744073709549568", 18446744073709549568UL)] + public void NumberToUInt64_Valid(string jsonValue, ulong expectedParsedValue) + { + string json = "{ \"singleUint64\": " + jsonValue + "}"; + var parsed = TestAllTypes.Parser.ParseJson(json); + Assert.AreEqual(expectedParsedValue, parsed.SingleUint64); + } + + // Assume that anything non-bounds-related is covered in the Int32 case + [Test] + [TestCase("-1")] + [TestCase("18446744073709551616")] + public void NumberToUInt64_Invalid(string jsonValue) + { + string json = "{ \"singleUint64\": " + jsonValue + "}"; + Assert.Throws(() => TestAllTypes.Parser.ParseJson(json)); + } + + [Test] + [TestCase("0", 0d)] + [TestCase("1", 1d)] + [TestCase("1.000000", 1d)] + [TestCase("1.0000000000000000000000001", 1d)] // We don't notice that we haven't preserved the exact value + [TestCase("-1", -1d)] + [TestCase("1e1", 10d)] + [TestCase("1e01", 10d)] // Leading decimals are allowed in exponents + [TestCase("1E1", 10d)] // Either case is fine + [TestCase("-1e1", -10d)] + [TestCase("1.5e1", 15d)] + [TestCase("-1.5e1", -15d)] + [TestCase("15e-1", 1.5d)] + [TestCase("-15e-1", -1.5d)] + [TestCase("1.79769e308", 1.79769e308)] + [TestCase("-1.79769e308", -1.79769e308)] + public void NumberToDouble_Valid(string jsonValue, double expectedParsedValue) + { + string json = "{ \"singleDouble\": " + jsonValue + "}"; + var parsed = TestAllTypes.Parser.ParseJson(json); + Assert.AreEqual(expectedParsedValue, parsed.SingleDouble); + } + + [Test] + [TestCase("1.7977e308")] + [TestCase("-1.7977e308")] + [TestCase("1e309")] + [TestCase("1,0")] + [TestCase("1.0.0")] + [TestCase("+1")] + [TestCase("00")] + [TestCase("--1")] + [TestCase("\u00BD")] // 1/2 as a single Unicode character. Just sanity checking... + public void NumberToDouble_Invalid(string jsonValue) + { + string json = "{ \"singleDouble\": " + jsonValue + "}"; + Assert.Throws(() => TestAllTypes.Parser.ParseJson(json)); + } + + [Test] + [TestCase("0", 0f)] + [TestCase("1", 1f)] + [TestCase("1.000000", 1f)] + [TestCase("-1", -1f)] + [TestCase("3.402823e38", 3.402823e38f)] + [TestCase("-3.402823e38", -3.402823e38f)] + [TestCase("1.5e1", 15f)] + [TestCase("15e-1", 1.5f)] + public void NumberToFloat_Valid(string jsonValue, float expectedParsedValue) + { + string json = "{ \"singleFloat\": " + jsonValue + "}"; + var parsed = TestAllTypes.Parser.ParseJson(json); + Assert.AreEqual(expectedParsedValue, parsed.SingleFloat); + } + + [Test] + [TestCase("3.402824e38", typeof(InvalidProtocolBufferException))] + [TestCase("-3.402824e38", typeof(InvalidProtocolBufferException))] + [TestCase("1,0", typeof(InvalidJsonException))] + [TestCase("1.0.0", typeof(InvalidJsonException))] + [TestCase("+1", typeof(InvalidJsonException))] + [TestCase("00", typeof(InvalidJsonException))] + [TestCase("--1", typeof(InvalidJsonException))] + public void NumberToFloat_Invalid(string jsonValue, System.Type expectedExceptionType) + { + string json = "{ \"singleFloat\": " + jsonValue + "}"; + Assert.Throws(expectedExceptionType, () => TestAllTypes.Parser.ParseJson(json)); + } + + // The simplest way of testing that the value has parsed correctly is to reformat it, + // as we trust the formatting. In many cases that will give the same result as the input, + // so in those cases we accept an expectedFormatted value of null. Sometimes the results + // will be different though, due to a different number of digits being provided. + [Test] + // Z offset + [TestCase("2015-10-09T14:46:23.123456789Z", null)] + [TestCase("2015-10-09T14:46:23.123456Z", null)] + [TestCase("2015-10-09T14:46:23.123Z", null)] + [TestCase("2015-10-09T14:46:23Z", null)] + [TestCase("2015-10-09T14:46:23.123456000Z", "2015-10-09T14:46:23.123456Z")] + [TestCase("2015-10-09T14:46:23.1234560Z", "2015-10-09T14:46:23.123456Z")] + [TestCase("2015-10-09T14:46:23.123000000Z", "2015-10-09T14:46:23.123Z")] + [TestCase("2015-10-09T14:46:23.1230Z", "2015-10-09T14:46:23.123Z")] + [TestCase("2015-10-09T14:46:23.00Z", "2015-10-09T14:46:23Z")] + + // +00:00 offset + [TestCase("2015-10-09T14:46:23.123456789+00:00", "2015-10-09T14:46:23.123456789Z")] + [TestCase("2015-10-09T14:46:23.123456+00:00", "2015-10-09T14:46:23.123456Z")] + [TestCase("2015-10-09T14:46:23.123+00:00", "2015-10-09T14:46:23.123Z")] + [TestCase("2015-10-09T14:46:23+00:00", "2015-10-09T14:46:23Z")] + [TestCase("2015-10-09T14:46:23.123456000+00:00", "2015-10-09T14:46:23.123456Z")] + [TestCase("2015-10-09T14:46:23.1234560+00:00", "2015-10-09T14:46:23.123456Z")] + [TestCase("2015-10-09T14:46:23.123000000+00:00", "2015-10-09T14:46:23.123Z")] + [TestCase("2015-10-09T14:46:23.1230+00:00", "2015-10-09T14:46:23.123Z")] + [TestCase("2015-10-09T14:46:23.00+00:00", "2015-10-09T14:46:23Z")] + + // Other offsets (assume by now that the subsecond handling is okay) + [TestCase("2015-10-09T15:46:23.123456789+01:00", "2015-10-09T14:46:23.123456789Z")] + [TestCase("2015-10-09T13:46:23.123456789-01:00", "2015-10-09T14:46:23.123456789Z")] + [TestCase("2015-10-09T15:16:23.123456789+00:30", "2015-10-09T14:46:23.123456789Z")] + [TestCase("2015-10-09T14:16:23.123456789-00:30", "2015-10-09T14:46:23.123456789Z")] + [TestCase("2015-10-09T16:31:23.123456789+01:45", "2015-10-09T14:46:23.123456789Z")] + [TestCase("2015-10-09T13:01:23.123456789-01:45", "2015-10-09T14:46:23.123456789Z")] + [TestCase("2015-10-10T08:46:23.123456789+18:00", "2015-10-09T14:46:23.123456789Z")] + [TestCase("2015-10-08T20:46:23.123456789-18:00", "2015-10-09T14:46:23.123456789Z")] + + // Leap years and min/max + [TestCase("2016-02-29T14:46:23.123456789Z", null)] + [TestCase("2000-02-29T14:46:23.123456789Z", null)] + [TestCase("0001-01-01T00:00:00Z", null)] + [TestCase("9999-12-31T23:59:59.999999999Z", null)] + public void Timestamp_Valid(string jsonValue, string expectedFormatted) + { + expectedFormatted = expectedFormatted ?? jsonValue; + string json = WrapInQuotes(jsonValue); + var parsed = Timestamp.Parser.ParseJson(json); + Assert.AreEqual(WrapInQuotes(expectedFormatted), parsed.ToString()); + } + + [Test] + [TestCase("2015-10-09 14:46:23.123456789Z", Description = "No T between date and time")] + [TestCase("2015/10/09T14:46:23.123456789Z", Description = "Wrong date separators")] + [TestCase("2015-10-09T14.46.23.123456789Z", Description = "Wrong time separators")] + [TestCase("2015-10-09T14:46:23,123456789Z", Description = "Wrong fractional second separators (valid ISO-8601 though)")] + [TestCase(" 2015-10-09T14:46:23.123456789Z", Description = "Whitespace at start")] + [TestCase("2015-10-09T14:46:23.123456789Z ", Description = "Whitespace at end")] + [TestCase("2015-10-09T14:46:23.1234567890", Description = "Too many digits")] + [TestCase("2015-10-09T14:46:23.123456789", Description = "No offset")] + [TestCase("2015-13-09T14:46:23.123456789Z", Description = "Invalid month")] + [TestCase("2015-10-32T14:46:23.123456789Z", Description = "Invalid day")] + [TestCase("2015-10-09T24:00:00.000000000Z", Description = "Invalid hour (valid ISO-8601 though)")] + [TestCase("2015-10-09T14:60:23.123456789Z", Description = "Invalid minutes")] + [TestCase("2015-10-09T14:46:60.123456789Z", Description = "Invalid seconds")] + [TestCase("2015-10-09T14:46:23.123456789+18:01", Description = "Offset too large (positive)")] + [TestCase("2015-10-09T14:46:23.123456789-18:01", Description = "Offset too large (negative)")] + [TestCase("2015-10-09T14:46:23.123456789-00:00", Description = "Local offset (-00:00) makes no sense here")] + [TestCase("0001-01-01T00:00:00+00:01", Description = "Value before earliest when offset applied")] + [TestCase("9999-12-31T23:59:59.999999999-00:01", Description = "Value after latest when offset applied")] + [TestCase("2100-02-29T14:46:23.123456789Z", Description = "Feb 29th on a non-leap-year")] + public void Timestamp_Invalid(string jsonValue) + { + string json = WrapInQuotes(jsonValue); + Assert.Throws(() => Timestamp.Parser.ParseJson(json)); + } + + [Test] + public void StructValue_Null() + { + Assert.AreEqual(new Value { NullValue = 0 }, Value.Parser.ParseJson("null")); + } + + [Test] + public void StructValue_String() + { + Assert.AreEqual(new Value { StringValue = "hi" }, Value.Parser.ParseJson("\"hi\"")); + } + + [Test] + public void StructValue_Bool() + { + Assert.AreEqual(new Value { BoolValue = true }, Value.Parser.ParseJson("true")); + Assert.AreEqual(new Value { BoolValue = false }, Value.Parser.ParseJson("false")); + } + + [Test] + public void StructValue_List() + { + Assert.AreEqual(Value.ForList(Value.ForNumber(1), Value.ForString("x")), Value.Parser.ParseJson("[1, \"x\"]")); + } + + [Test] + public void ParseListValue() + { + Assert.AreEqual(new ListValue { Values = { Value.ForNumber(1), Value.ForString("x") } }, ListValue.Parser.ParseJson("[1, \"x\"]")); + } + + [Test] + public void StructValue_Struct() + { + Assert.AreEqual( + Value.ForStruct(new Struct { Fields = { { "x", Value.ForNumber(1) }, { "y", Value.ForString("z") } } }), + Value.Parser.ParseJson("{ \"x\": 1, \"y\": \"z\" }")); + } + + [Test] + public void ParseStruct() + { + Assert.AreEqual(new Struct { Fields = { { "x", Value.ForNumber(1) }, { "y", Value.ForString("z") } } }, + Struct.Parser.ParseJson("{ \"x\": 1, \"y\": \"z\" }")); + } + + // TODO for duration parsing: upper and lower bounds. + // +/- 315576000000 seconds + + [Test] + [TestCase("1.123456789s", null)] + [TestCase("1.123456s", null)] + [TestCase("1.123s", null)] + [TestCase("1.12300s", "1.123s")] + [TestCase("1.12345s", "1.123450s")] + [TestCase("1s", null)] + [TestCase("-1.123456789s", null)] + [TestCase("-1.123456s", null)] + [TestCase("-1.123s", null)] + [TestCase("-1s", null)] + [TestCase("0.123s", null)] + [TestCase("-0.123s", null)] + [TestCase("123456.123s", null)] + [TestCase("-123456.123s", null)] + // Upper and lower bounds + [TestCase("315576000000s", null)] + [TestCase("-315576000000s", null)] + public void Duration_Valid(string jsonValue, string expectedFormatted) + { + expectedFormatted = expectedFormatted ?? jsonValue; + string json = WrapInQuotes(jsonValue); + var parsed = Duration.Parser.ParseJson(json); + Assert.AreEqual(WrapInQuotes(expectedFormatted), parsed.ToString()); + } + + // The simplest way of testing that the value has parsed correctly is to reformat it, + // as we trust the formatting. In many cases that will give the same result as the input, + // so in those cases we accept an expectedFormatted value of null. Sometimes the results + // will be different though, due to a different number of digits being provided. + [Test] + [TestCase("1.1234567890s", Description = "Too many digits")] + [TestCase("1.123456789", Description = "No suffix")] + [TestCase("1.123456789ss", Description = "Too much suffix")] + [TestCase("1.123456789S", Description = "Upper case suffix")] + [TestCase("+1.123456789s", Description = "Leading +")] + [TestCase(".123456789s", Description = "No integer before the fraction")] + [TestCase("1,123456789s", Description = "Comma as decimal separator")] + [TestCase("1x1.123456789s", Description = "Non-digit in integer part")] + [TestCase("1.1x3456789s", Description = "Non-digit in fractional part")] + [TestCase(" 1.123456789s", Description = "Whitespace before fraction")] + [TestCase("1.123456789s ", Description = "Whitespace after value")] + [TestCase("01.123456789s", Description = "Leading zero (positive)")] + [TestCase("-01.123456789s", Description = "Leading zero (negative)")] + [TestCase("--0.123456789s", Description = "Double minus sign")] + // Violate upper/lower bounds in various ways + [TestCase("315576000001s", Description = "Integer part too large")] + [TestCase("3155760000000s", Description = "Integer part too long (positive)")] + [TestCase("-3155760000000s", Description = "Integer part too long (negative)")] + public void Duration_Invalid(string jsonValue) + { + string json = WrapInQuotes(jsonValue); + Assert.Throws(() => Duration.Parser.ParseJson(json)); + } + + // Not as many tests for field masks as I'd like; more to be added when we have more + // detailed specifications. + + [Test] + [TestCase("")] + [TestCase("foo", "foo")] + [TestCase("foo,bar", "foo", "bar")] + [TestCase("foo.bar", "foo.bar")] + [TestCase("fooBar", "foo_bar")] + [TestCase("fooBar.bazQux", "foo_bar.baz_qux")] + public void FieldMask_Valid(string jsonValue, params string[] expectedPaths) + { + string json = WrapInQuotes(jsonValue); + var parsed = FieldMask.Parser.ParseJson(json); + CollectionAssert.AreEqual(expectedPaths, parsed.Paths); + } + + [Test] + [TestCase("foo_bar")] + public void FieldMask_Invalid(string jsonValue) + { + string json = WrapInQuotes(jsonValue); + Assert.Throws(() => FieldMask.Parser.ParseJson(json)); + } + + [Test] + public void Any_RegularMessage() + { + var registry = TypeRegistry.FromMessages(TestAllTypes.Descriptor); + var formatter = new JsonFormatter(new JsonFormatter.Settings(false, TypeRegistry.FromMessages(TestAllTypes.Descriptor))); + var message = new TestAllTypes { SingleInt32 = 10, SingleNestedMessage = new TestAllTypes.Types.NestedMessage { Bb = 20 } }; + var original = Any.Pack(message); + var json = formatter.Format(original); // This is tested in JsonFormatterTest + var parser = new JsonParser(new JsonParser.Settings(10, registry)); + Assert.AreEqual(original, parser.Parse(json)); + string valueFirstJson = "{ \"singleInt32\": 10, \"singleNestedMessage\": { \"bb\": 20 }, \"@type\": \"type.googleapis.com/protobuf_unittest.TestAllTypes\" }"; + Assert.AreEqual(original, parser.Parse(valueFirstJson)); + } + + [Test] + public void Any_CustomPrefix() + { + var registry = TypeRegistry.FromMessages(TestAllTypes.Descriptor); + var message = new TestAllTypes { SingleInt32 = 10 }; + var original = Any.Pack(message, "custom.prefix/middle-part"); + var parser = new JsonParser(new JsonParser.Settings(10, registry)); + string json = "{ \"@type\": \"custom.prefix/middle-part/protobuf_unittest.TestAllTypes\", \"singleInt32\": 10 }"; + Assert.AreEqual(original, parser.Parse(json)); + } + + [Test] + public void Any_UnknownType() + { + string json = "{ \"@type\": \"type.googleapis.com/bogus\" }"; + Assert.Throws(() => Any.Parser.ParseJson(json)); + } + + [Test] + public void Any_NoTypeUrl() + { + string json = "{ \"foo\": \"bar\" }"; + Assert.Throws(() => Any.Parser.ParseJson(json)); + } + + [Test] + public void Any_WellKnownType() + { + var registry = TypeRegistry.FromMessages(Timestamp.Descriptor); + var formatter = new JsonFormatter(new JsonFormatter.Settings(false, registry)); + var timestamp = new DateTime(1673, 6, 19, 12, 34, 56, DateTimeKind.Utc).ToTimestamp(); + var original = Any.Pack(timestamp); + var json = formatter.Format(original); // This is tested in JsonFormatterTest + var parser = new JsonParser(new JsonParser.Settings(10, registry)); + Assert.AreEqual(original, parser.Parse(json)); + string valueFirstJson = "{ \"value\": \"1673-06-19T12:34:56Z\", \"@type\": \"type.googleapis.com/google.protobuf.Timestamp\" }"; + Assert.AreEqual(original, parser.Parse(valueFirstJson)); + } + + [Test] + public void Any_Nested() + { + var registry = TypeRegistry.FromMessages(TestWellKnownTypes.Descriptor, TestAllTypes.Descriptor); + var formatter = new JsonFormatter(new JsonFormatter.Settings(false, registry)); + var parser = new JsonParser(new JsonParser.Settings(10, registry)); + var doubleNestedMessage = new TestAllTypes { SingleInt32 = 20 }; + var nestedMessage = Any.Pack(doubleNestedMessage); + var message = new TestWellKnownTypes { AnyField = Any.Pack(nestedMessage) }; + var json = formatter.Format(message); + // Use the descriptor-based parser just for a change. + Assert.AreEqual(message, parser.Parse(json, TestWellKnownTypes.Descriptor)); + } + + [Test] + public void DataAfterObject() + { + string json = "{} 10"; + Assert.Throws(() => TestAllTypes.Parser.ParseJson(json)); + } + + /// + /// JSON equivalent to + /// + [Test] + public void MaliciousRecursion() + { + string data64 = CodedInputStreamTest.MakeRecursiveMessage(64).ToString(); + string data65 = CodedInputStreamTest.MakeRecursiveMessage(65).ToString(); + + var parser64 = new JsonParser(new JsonParser.Settings(64)); + CodedInputStreamTest.AssertMessageDepth(parser64.Parse(data64), 64); + Assert.Throws(() => parser64.Parse(data65)); + + var parser63 = new JsonParser(new JsonParser.Settings(63)); + Assert.Throws(() => parser63.Parse(data64)); + } + + [Test] + [TestCase("AQI")] + [TestCase("_-==")] + public void Bytes_InvalidBase64(string badBase64) + { + string json = "{ \"singleBytes\": \"" + badBase64 + "\" }"; + Assert.Throws(() => TestAllTypes.Parser.ParseJson(json)); + } + + [Test] + [TestCase("\"FOREIGN_BAR\"", ForeignEnum.ForeignBar)] + [TestCase("5", ForeignEnum.ForeignBar)] + [TestCase("100", (ForeignEnum)100)] + public void EnumValid(string value, ForeignEnum expectedValue) + { + string json = "{ \"singleForeignEnum\": " + value + " }"; + var parsed = TestAllTypes.Parser.ParseJson(json); + Assert.AreEqual(new TestAllTypes { SingleForeignEnum = expectedValue }, parsed); + } + + [Test] + [TestCase("\"NOT_A_VALID_VALUE\"")] + [TestCase("5.5")] + public void Enum_Invalid(string value) + { + string json = "{ \"singleForeignEnum\": " + value + " }"; + Assert.Throws(() => TestAllTypes.Parser.ParseJson(json)); + } + + [Test] + public void OneofDuplicate_Invalid() + { + string json = "{ \"oneofString\": \"x\", \"oneofUint32\": 10 }"; + Assert.Throws(() => TestAllTypes.Parser.ParseJson(json)); + } + + /// + /// Various tests use strings which have quotes round them for parsing or as the result + /// of formatting, but without those quotes being specified in the tests (for the sake of readability). + /// This method simply returns the input, wrapped in double quotes. + /// + internal static string WrapInQuotes(string text) + { + return '"' + text + '"'; + } + } +} \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/JsonTokenizerTest.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/JsonTokenizerTest.cs new file mode 100644 index 0000000000..527ab3361e --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/JsonTokenizerTest.cs @@ -0,0 +1,408 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion +using NUnit.Framework; +using System; +using System.IO; + +namespace Google.Protobuf +{ + public class JsonTokenizerTest + { + [Test] + public void EmptyObjectValue() + { + AssertTokens("{}", JsonToken.StartObject, JsonToken.EndObject); + } + + [Test] + public void EmptyArrayValue() + { + AssertTokens("[]", JsonToken.StartArray, JsonToken.EndArray); + } + + [Test] + [TestCase("foo", "foo")] + [TestCase("tab\\t", "tab\t")] + [TestCase("line\\nfeed", "line\nfeed")] + [TestCase("carriage\\rreturn", "carriage\rreturn")] + [TestCase("back\\bspace", "back\bspace")] + [TestCase("form\\ffeed", "form\ffeed")] + [TestCase("escaped\\/slash", "escaped/slash")] + [TestCase("escaped\\\\backslash", "escaped\\backslash")] + [TestCase("escaped\\\"quote", "escaped\"quote")] + [TestCase("foo {}[] bar", "foo {}[] bar")] + [TestCase("foo\\u09aFbar", "foo\u09afbar")] // Digits, upper hex, lower hex + [TestCase("ab\ud800\udc00cd", "ab\ud800\udc00cd")] + [TestCase("ab\\ud800\\udc00cd", "ab\ud800\udc00cd")] + public void StringValue(string json, string expectedValue) + { + AssertTokensNoReplacement("\"" + json + "\"", JsonToken.Value(expectedValue)); + } + + // Valid surrogate pairs, with mixed escaping. These test cases can't be expressed + // using TestCase as they have no valid UTF-8 representation. + // It's unclear exactly how we should handle a mixture of escaped or not: that can't + // come from UTF-8 text, but could come from a .NET string. For the moment, + // treat it as valid in the obvious way. + [Test] + public void MixedSurrogatePairs() + { + string expected = "\ud800\udc00"; + AssertTokens("'\\ud800\udc00'", JsonToken.Value(expected)); + AssertTokens("'\ud800\\udc00'", JsonToken.Value(expected)); + } + + [Test] + public void ObjectDepth() + { + string json = "{ \"foo\": { \"x\": 1, \"y\": [ 0 ] } }"; + var tokenizer = JsonTokenizer.FromTextReader(new StringReader(json)); + // If we had more tests like this, I'd introduce a helper method... but for one test, it's not worth it. + Assert.AreEqual(0, tokenizer.ObjectDepth); + Assert.AreEqual(JsonToken.StartObject, tokenizer.Next()); + Assert.AreEqual(1, tokenizer.ObjectDepth); + Assert.AreEqual(JsonToken.Name("foo"), tokenizer.Next()); + Assert.AreEqual(1, tokenizer.ObjectDepth); + Assert.AreEqual(JsonToken.StartObject, tokenizer.Next()); + Assert.AreEqual(2, tokenizer.ObjectDepth); + Assert.AreEqual(JsonToken.Name("x"), tokenizer.Next()); + Assert.AreEqual(2, tokenizer.ObjectDepth); + Assert.AreEqual(JsonToken.Value(1), tokenizer.Next()); + Assert.AreEqual(2, tokenizer.ObjectDepth); + Assert.AreEqual(JsonToken.Name("y"), tokenizer.Next()); + Assert.AreEqual(2, tokenizer.ObjectDepth); + Assert.AreEqual(JsonToken.StartArray, tokenizer.Next()); + Assert.AreEqual(2, tokenizer.ObjectDepth); // Depth hasn't changed in array + Assert.AreEqual(JsonToken.Value(0), tokenizer.Next()); + Assert.AreEqual(2, tokenizer.ObjectDepth); + Assert.AreEqual(JsonToken.EndArray, tokenizer.Next()); + Assert.AreEqual(2, tokenizer.ObjectDepth); + Assert.AreEqual(JsonToken.EndObject, tokenizer.Next()); + Assert.AreEqual(1, tokenizer.ObjectDepth); + Assert.AreEqual(JsonToken.EndObject, tokenizer.Next()); + Assert.AreEqual(0, tokenizer.ObjectDepth); + Assert.AreEqual(JsonToken.EndDocument, tokenizer.Next()); + Assert.AreEqual(0, tokenizer.ObjectDepth); + } + + [Test] + public void ObjectDepth_WithPushBack() + { + string json = "{}"; + var tokenizer = JsonTokenizer.FromTextReader(new StringReader(json)); + Assert.AreEqual(0, tokenizer.ObjectDepth); + var token = tokenizer.Next(); + Assert.AreEqual(1, tokenizer.ObjectDepth); + // When we push back a "start object", we should effectively be back to the previous depth. + tokenizer.PushBack(token); + Assert.AreEqual(0, tokenizer.ObjectDepth); + // Read the same token again, and get back to depth 1 + token = tokenizer.Next(); + Assert.AreEqual(1, tokenizer.ObjectDepth); + + // Now the same in reverse, with EndObject + token = tokenizer.Next(); + Assert.AreEqual(0, tokenizer.ObjectDepth); + tokenizer.PushBack(token); + Assert.AreEqual(1, tokenizer.ObjectDepth); + tokenizer.Next(); + Assert.AreEqual(0, tokenizer.ObjectDepth); + } + + [Test] + [TestCase("embedded tab\t")] + [TestCase("embedded CR\r")] + [TestCase("embedded LF\n")] + [TestCase("embedded bell\u0007")] + [TestCase("bad escape\\a")] + [TestCase("incomplete escape\\")] + [TestCase("incomplete Unicode escape\\u000")] + [TestCase("invalid Unicode escape\\u000H")] + // Surrogate pair handling, both in raw .NET strings and escaped. We only need + // to detect this in strings, as non-ASCII characters anywhere other than in strings + // will already lead to parsing errors. + [TestCase("\\ud800")] + [TestCase("\\udc00")] + [TestCase("\\ud800x")] + [TestCase("\\udc00x")] + [TestCase("\\udc00\\ud800y")] + public void InvalidStringValue(string json) + { + AssertThrowsAfter("\"" + json + "\""); + } + + // Tests for invalid strings that can't be expressed in attributes, + // as the constants can't be expressed as UTF-8 strings. + [Test] + public void InvalidSurrogatePairs() + { + AssertThrowsAfter("\"\ud800x\""); + AssertThrowsAfter("\"\udc00y\""); + AssertThrowsAfter("\"\udc00\ud800y\""); + } + + [Test] + [TestCase("0", 0)] + [TestCase("-0", 0)] // We don't distinguish between positive and negative 0 + [TestCase("1", 1)] + [TestCase("-1", -1)] + // From here on, assume leading sign is okay... + [TestCase("1.125", 1.125)] + [TestCase("1.0", 1)] + [TestCase("1e5", 100000)] + [TestCase("1e000000", 1)] // Weird, but not prohibited by the spec + [TestCase("1E5", 100000)] + [TestCase("1e+5", 100000)] + [TestCase("1E-5", 0.00001)] + [TestCase("123E-2", 1.23)] + [TestCase("123.45E3", 123450)] + [TestCase(" 1 ", 1)] + public void NumberValue(string json, double expectedValue) + { + AssertTokens(json, JsonToken.Value(expectedValue)); + } + + [Test] + [TestCase("00")] + [TestCase(".5")] + [TestCase("1.")] + [TestCase("1e")] + [TestCase("1e-")] + [TestCase("--")] + [TestCase("--1")] + [TestCase("-1.7977e308")] + [TestCase("1.7977e308")] + public void InvalidNumberValue(string json) + { + AssertThrowsAfter(json); + } + + [Test] + [TestCase("nul")] + [TestCase("nothing")] + [TestCase("truth")] + [TestCase("fALSEhood")] + public void InvalidLiterals(string json) + { + AssertThrowsAfter(json); + } + + [Test] + public void NullValue() + { + AssertTokens("null", JsonToken.Null); + } + + [Test] + public void TrueValue() + { + AssertTokens("true", JsonToken.True); + } + + [Test] + public void FalseValue() + { + AssertTokens("false", JsonToken.False); + } + + [Test] + public void SimpleObject() + { + AssertTokens("{'x': 'y'}", + JsonToken.StartObject, JsonToken.Name("x"), JsonToken.Value("y"), JsonToken.EndObject); + } + + [Test] + [TestCase("[10, 20", 3)] + [TestCase("[10,", 2)] + [TestCase("[10:20]", 2)] + [TestCase("[", 1)] + [TestCase("[,", 1)] + [TestCase("{", 1)] + [TestCase("{,", 1)] + [TestCase("{[", 1)] + [TestCase("{{", 1)] + [TestCase("{0", 1)] + [TestCase("{null", 1)] + [TestCase("{false", 1)] + [TestCase("{true", 1)] + [TestCase("}", 0)] + [TestCase("]", 0)] + [TestCase(",", 0)] + [TestCase("'foo' 'bar'", 1)] + [TestCase(":", 0)] + [TestCase("'foo", 0)] // Incomplete string + [TestCase("{ 'foo' }", 2)] + [TestCase("{ x:1", 1)] // Property names must be quoted + [TestCase("{]", 1)] + [TestCase("[}", 1)] + [TestCase("[1,", 2)] + [TestCase("{'x':0]", 3)] + [TestCase("{ 'foo': }", 2)] + [TestCase("{ 'foo':'bar', }", 3)] + public void InvalidStructure(string json, int expectedValidTokens) + { + // Note: we don't test that the earlier tokens are exactly as expected, + // partly because that's hard to parameterize. + var reader = new StringReader(json.Replace('\'', '"')); + var tokenizer = JsonTokenizer.FromTextReader(reader); + for (int i = 0; i < expectedValidTokens; i++) + { + Assert.IsNotNull(tokenizer.Next()); + } + Assert.Throws(() => tokenizer.Next()); + } + + [Test] + public void ArrayMixedType() + { + AssertTokens("[1, 'foo', null, false, true, [2], {'x':'y' }]", + JsonToken.StartArray, + JsonToken.Value(1), + JsonToken.Value("foo"), + JsonToken.Null, + JsonToken.False, + JsonToken.True, + JsonToken.StartArray, + JsonToken.Value(2), + JsonToken.EndArray, + JsonToken.StartObject, + JsonToken.Name("x"), + JsonToken.Value("y"), + JsonToken.EndObject, + JsonToken.EndArray); + } + + [Test] + public void ObjectMixedType() + { + AssertTokens(@"{'a': 1, 'b': 'bar', 'c': null, 'd': false, 'e': true, + 'f': [2], 'g': {'x':'y' }}", + JsonToken.StartObject, + JsonToken.Name("a"), + JsonToken.Value(1), + JsonToken.Name("b"), + JsonToken.Value("bar"), + JsonToken.Name("c"), + JsonToken.Null, + JsonToken.Name("d"), + JsonToken.False, + JsonToken.Name("e"), + JsonToken.True, + JsonToken.Name("f"), + JsonToken.StartArray, + JsonToken.Value(2), + JsonToken.EndArray, + JsonToken.Name("g"), + JsonToken.StartObject, + JsonToken.Name("x"), + JsonToken.Value("y"), + JsonToken.EndObject, + JsonToken.EndObject); + } + + [Test] + public void NextAfterEndDocumentThrows() + { + var tokenizer = JsonTokenizer.FromTextReader(new StringReader("null")); + Assert.AreEqual(JsonToken.Null, tokenizer.Next()); + Assert.AreEqual(JsonToken.EndDocument, tokenizer.Next()); + Assert.Throws(() => tokenizer.Next()); + } + + [Test] + public void CanPushBackEndDocument() + { + var tokenizer = JsonTokenizer.FromTextReader(new StringReader("null")); + Assert.AreEqual(JsonToken.Null, tokenizer.Next()); + Assert.AreEqual(JsonToken.EndDocument, tokenizer.Next()); + tokenizer.PushBack(JsonToken.EndDocument); + Assert.AreEqual(JsonToken.EndDocument, tokenizer.Next()); + Assert.Throws(() => tokenizer.Next()); + } + + /// + /// Asserts that the specified JSON is tokenized into the given sequence of tokens. + /// All apostrophes are first converted to double quotes, allowing any tests + /// that don't need to check actual apostrophe handling to use apostrophes in the JSON, avoiding + /// messy string literal escaping. The "end document" token is not specified in the list of + /// expected tokens, but is implicit. + /// + private static void AssertTokens(string json, params JsonToken[] expectedTokens) + { + AssertTokensNoReplacement(json.Replace('\'', '"'), expectedTokens); + } + + /// + /// Asserts that the specified JSON is tokenized into the given sequence of tokens. + /// Unlike , this does not perform any character + /// replacement on the specified JSON, and should be used when the text contains apostrophes which + /// are expected to be used *as* apostrophes. The "end document" token is not specified in the list of + /// expected tokens, but is implicit. + /// + private static void AssertTokensNoReplacement(string json, params JsonToken[] expectedTokens) + { + var reader = new StringReader(json); + var tokenizer = JsonTokenizer.FromTextReader(reader); + for (int i = 0; i < expectedTokens.Length; i++) + { + var actualToken = tokenizer.Next(); + if (actualToken == JsonToken.EndDocument) + { + Assert.Fail("Expected {0} but reached end of token stream", expectedTokens[i]); + } + Assert.AreEqual(expectedTokens[i], actualToken); + } + var finalToken = tokenizer.Next(); + if (finalToken != JsonToken.EndDocument) + { + Assert.Fail("Expected token stream to be exhausted; received {0}", finalToken); + } + } + + private static void AssertThrowsAfter(string json, params JsonToken[] expectedTokens) + { + var reader = new StringReader(json); + var tokenizer = JsonTokenizer.FromTextReader(reader); + for (int i = 0; i < expectedTokens.Length; i++) + { + var actualToken = tokenizer.Next(); + if (actualToken == JsonToken.EndDocument) + { + Assert.Fail("Expected {0} but reached end of document", expectedTokens[i]); + } + Assert.AreEqual(expectedTokens[i], actualToken); + } + Assert.Throws(() => tokenizer.Next()); + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/Properties/AppManifest.xml b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/Properties/AppManifest.xml new file mode 100644 index 0000000000..6712a11783 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/Properties/AppManifest.xml @@ -0,0 +1,6 @@ + + + + diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/Properties/AssemblyInfo.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/Properties/AssemblyInfo.cs new file mode 100644 index 0000000000..f50940e3a6 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/Properties/AssemblyInfo.cs @@ -0,0 +1,20 @@ +using System; +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. + +[assembly: AssemblyTitle("Google.Protobuf.Test")] +[assembly: AssemblyDescription("")] +[assembly: AssemblyConfiguration("")] +[assembly: AssemblyCompany("")] +[assembly: AssemblyProduct("Google.Protobuf.Test")] +[assembly: AssemblyCopyright("Copyright © 2015")] +[assembly: AssemblyTrademark("")] +[assembly: AssemblyCulture("")] + +[assembly: AssemblyVersion("3.0.0.0")] +[assembly: AssemblyFileVersion("3.0.0.0")] diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/Reflection/DescriptorsTest.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/Reflection/DescriptorsTest.cs new file mode 100644 index 0000000000..52d5a67697 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/Reflection/DescriptorsTest.cs @@ -0,0 +1,259 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System.Linq; +using Google.Protobuf.TestProtos; +using NUnit.Framework; +using UnitTest.Issues.TestProtos; + +namespace Google.Protobuf.Reflection +{ + /// + /// Tests for descriptors. (Not in its own namespace or broken up into individual classes as the + /// size doesn't warrant it. On the other hand, this makes me feel a bit dirty...) + /// + public class DescriptorsTest + { + [Test] + public void FileDescriptor() + { + FileDescriptor file = UnittestProto3Reflection.Descriptor; + + Assert.AreEqual("google/protobuf/unittest_proto3.proto", file.Name); + Assert.AreEqual("protobuf_unittest", file.Package); + + Assert.AreEqual("UnittestProto", file.Proto.Options.JavaOuterClassname); + Assert.AreEqual("google/protobuf/unittest_proto3.proto", file.Proto.Name); + + // unittest.proto doesn't have any public imports, but unittest_import.proto does. + Assert.AreEqual(0, file.PublicDependencies.Count); + Assert.AreEqual(1, UnittestImportProto3Reflection.Descriptor.PublicDependencies.Count); + Assert.AreEqual(UnittestImportPublicProto3Reflection.Descriptor, UnittestImportProto3Reflection.Descriptor.PublicDependencies[0]); + + Assert.AreEqual(1, file.Dependencies.Count); + Assert.AreEqual(UnittestImportProto3Reflection.Descriptor, file.Dependencies[0]); + + MessageDescriptor messageType = TestAllTypes.Descriptor; + Assert.AreSame(typeof(TestAllTypes), messageType.ClrType); + Assert.AreSame(TestAllTypes.Parser, messageType.Parser); + Assert.AreEqual(messageType, file.MessageTypes[0]); + Assert.AreEqual(messageType, file.FindTypeByName("TestAllTypes")); + Assert.Null(file.FindTypeByName("NoSuchType")); + Assert.Null(file.FindTypeByName("protobuf_unittest.TestAllTypes")); + for (int i = 0; i < file.MessageTypes.Count; i++) + { + Assert.AreEqual(i, file.MessageTypes[i].Index); + } + + Assert.AreEqual(file.EnumTypes[0], file.FindTypeByName("ForeignEnum")); + Assert.Null(file.FindTypeByName("NoSuchType")); + Assert.Null(file.FindTypeByName("protobuf_unittest.ForeignEnum")); + Assert.AreEqual(1, UnittestImportProto3Reflection.Descriptor.EnumTypes.Count); + Assert.AreEqual("ImportEnum", UnittestImportProto3Reflection.Descriptor.EnumTypes[0].Name); + for (int i = 0; i < file.EnumTypes.Count; i++) + { + Assert.AreEqual(i, file.EnumTypes[i].Index); + } + + Assert.AreEqual(10, file.SerializedData[0]); + } + + [Test] + public void MessageDescriptor() + { + MessageDescriptor messageType = TestAllTypes.Descriptor; + MessageDescriptor nestedType = TestAllTypes.Types.NestedMessage.Descriptor; + + Assert.AreEqual("TestAllTypes", messageType.Name); + Assert.AreEqual("protobuf_unittest.TestAllTypes", messageType.FullName); + Assert.AreEqual(UnittestProto3Reflection.Descriptor, messageType.File); + Assert.IsNull(messageType.ContainingType); + Assert.IsNull(messageType.Proto.Options); + + Assert.AreEqual("TestAllTypes", messageType.Name); + + Assert.AreEqual("NestedMessage", nestedType.Name); + Assert.AreEqual("protobuf_unittest.TestAllTypes.NestedMessage", nestedType.FullName); + Assert.AreEqual(UnittestProto3Reflection.Descriptor, nestedType.File); + Assert.AreEqual(messageType, nestedType.ContainingType); + + FieldDescriptor field = messageType.Fields.InDeclarationOrder()[0]; + Assert.AreEqual("single_int32", field.Name); + Assert.AreEqual(field, messageType.FindDescriptor("single_int32")); + Assert.Null(messageType.FindDescriptor("no_such_field")); + Assert.AreEqual(field, messageType.FindFieldByNumber(1)); + Assert.Null(messageType.FindFieldByNumber(571283)); + var fieldsInDeclarationOrder = messageType.Fields.InDeclarationOrder(); + for (int i = 0; i < fieldsInDeclarationOrder.Count; i++) + { + Assert.AreEqual(i, fieldsInDeclarationOrder[i].Index); + } + + Assert.AreEqual(nestedType, messageType.NestedTypes[0]); + Assert.AreEqual(nestedType, messageType.FindDescriptor("NestedMessage")); + Assert.Null(messageType.FindDescriptor("NoSuchType")); + for (int i = 0; i < messageType.NestedTypes.Count; i++) + { + Assert.AreEqual(i, messageType.NestedTypes[i].Index); + } + + Assert.AreEqual(messageType.EnumTypes[0], messageType.FindDescriptor("NestedEnum")); + Assert.Null(messageType.FindDescriptor("NoSuchType")); + for (int i = 0; i < messageType.EnumTypes.Count; i++) + { + Assert.AreEqual(i, messageType.EnumTypes[i].Index); + } + } + + [Test] + public void FieldDescriptor() + { + MessageDescriptor messageType = TestAllTypes.Descriptor; + FieldDescriptor primitiveField = messageType.FindDescriptor("single_int32"); + FieldDescriptor enumField = messageType.FindDescriptor("single_nested_enum"); + FieldDescriptor messageField = messageType.FindDescriptor("single_foreign_message"); + + Assert.AreEqual("single_int32", primitiveField.Name); + Assert.AreEqual("protobuf_unittest.TestAllTypes.single_int32", + primitiveField.FullName); + Assert.AreEqual(1, primitiveField.FieldNumber); + Assert.AreEqual(messageType, primitiveField.ContainingType); + Assert.AreEqual(UnittestProto3Reflection.Descriptor, primitiveField.File); + Assert.AreEqual(FieldType.Int32, primitiveField.FieldType); + Assert.IsNull(primitiveField.Proto.Options); + + Assert.AreEqual("single_nested_enum", enumField.Name); + Assert.AreEqual(FieldType.Enum, enumField.FieldType); + // Assert.AreEqual(TestAllTypes.Types.NestedEnum.DescriptorProtoFile, enumField.EnumType); + + Assert.AreEqual("single_foreign_message", messageField.Name); + Assert.AreEqual(FieldType.Message, messageField.FieldType); + Assert.AreEqual(ForeignMessage.Descriptor, messageField.MessageType); + } + + [Test] + public void FieldDescriptorLabel() + { + FieldDescriptor singleField = + TestAllTypes.Descriptor.FindDescriptor("single_int32"); + FieldDescriptor repeatedField = + TestAllTypes.Descriptor.FindDescriptor("repeated_int32"); + + Assert.IsFalse(singleField.IsRepeated); + Assert.IsTrue(repeatedField.IsRepeated); + } + + [Test] + public void EnumDescriptor() + { + // Note: this test is a bit different to the Java version because there's no static way of getting to the descriptor + EnumDescriptor enumType = UnittestProto3Reflection.Descriptor.FindTypeByName("ForeignEnum"); + EnumDescriptor nestedType = TestAllTypes.Descriptor.FindDescriptor("NestedEnum"); + + Assert.AreEqual("ForeignEnum", enumType.Name); + Assert.AreEqual("protobuf_unittest.ForeignEnum", enumType.FullName); + Assert.AreEqual(UnittestProto3Reflection.Descriptor, enumType.File); + Assert.Null(enumType.ContainingType); + Assert.Null(enumType.Proto.Options); + + Assert.AreEqual("NestedEnum", nestedType.Name); + Assert.AreEqual("protobuf_unittest.TestAllTypes.NestedEnum", + nestedType.FullName); + Assert.AreEqual(UnittestProto3Reflection.Descriptor, nestedType.File); + Assert.AreEqual(TestAllTypes.Descriptor, nestedType.ContainingType); + + EnumValueDescriptor value = enumType.FindValueByName("FOREIGN_FOO"); + Assert.AreEqual(value, enumType.Values[1]); + Assert.AreEqual("FOREIGN_FOO", value.Name); + Assert.AreEqual(4, value.Number); + Assert.AreEqual((int) ForeignEnum.ForeignFoo, value.Number); + Assert.AreEqual(value, enumType.FindValueByNumber(4)); + Assert.Null(enumType.FindValueByName("NO_SUCH_VALUE")); + for (int i = 0; i < enumType.Values.Count; i++) + { + Assert.AreEqual(i, enumType.Values[i].Index); + } + } + + [Test] + public void OneofDescriptor() + { + OneofDescriptor descriptor = TestAllTypes.Descriptor.FindDescriptor("oneof_field"); + Assert.AreEqual("oneof_field", descriptor.Name); + Assert.AreEqual("protobuf_unittest.TestAllTypes.oneof_field", descriptor.FullName); + + var expectedFields = new[] { + TestAllTypes.OneofBytesFieldNumber, + TestAllTypes.OneofNestedMessageFieldNumber, + TestAllTypes.OneofStringFieldNumber, + TestAllTypes.OneofUint32FieldNumber } + .Select(fieldNumber => TestAllTypes.Descriptor.FindFieldByNumber(fieldNumber)) + .ToList(); + foreach (var field in expectedFields) + { + Assert.AreSame(descriptor, field.ContainingOneof); + } + + CollectionAssert.AreEquivalent(expectedFields, descriptor.Fields); + } + + [Test] + public void MapEntryMessageDescriptor() + { + var descriptor = MapWellKnownTypes.Descriptor.NestedTypes[0]; + Assert.IsNull(descriptor.Parser); + Assert.IsNull(descriptor.ClrType); + Assert.IsNull(descriptor.Fields[1].Accessor); + } + + // From TestFieldOrdering: + // string my_string = 11; + // int64 my_int = 1; + // float my_float = 101; + // NestedMessage single_nested_message = 200; + [Test] + public void FieldListOrderings() + { + var fields = TestFieldOrderings.Descriptor.Fields; + Assert.AreEqual(new[] { 11, 1, 101, 200 }, fields.InDeclarationOrder().Select(x => x.FieldNumber)); + Assert.AreEqual(new[] { 1, 11, 101, 200 }, fields.InFieldNumberOrder().Select(x => x.FieldNumber)); + } + + + [Test] + public void DescriptorProtoFileDescriptor() + { + var descriptor = Google.Protobuf.Reflection.FileDescriptor.DescriptorProtoFileDescriptor; + Assert.AreEqual("google/protobuf/descriptor.proto", descriptor.Name); + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/Reflection/FieldAccessTest.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/Reflection/FieldAccessTest.cs new file mode 100644 index 0000000000..a488af30d4 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/Reflection/FieldAccessTest.cs @@ -0,0 +1,218 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using Google.Protobuf.TestProtos; +using NUnit.Framework; +using System; +using System.Collections; +using System.Collections.Generic; + +namespace Google.Protobuf.Reflection +{ + public class FieldAccessTest + { + [Test] + public void GetValue() + { + var message = SampleMessages.CreateFullTestAllTypes(); + var fields = TestAllTypes.Descriptor.Fields; + Assert.AreEqual(message.SingleBool, fields[TestAllTypes.SingleBoolFieldNumber].Accessor.GetValue(message)); + Assert.AreEqual(message.SingleBytes, fields[TestAllTypes.SingleBytesFieldNumber].Accessor.GetValue(message)); + Assert.AreEqual(message.SingleDouble, fields[TestAllTypes.SingleDoubleFieldNumber].Accessor.GetValue(message)); + Assert.AreEqual(message.SingleFixed32, fields[TestAllTypes.SingleFixed32FieldNumber].Accessor.GetValue(message)); + Assert.AreEqual(message.SingleFixed64, fields[TestAllTypes.SingleFixed64FieldNumber].Accessor.GetValue(message)); + Assert.AreEqual(message.SingleFloat, fields[TestAllTypes.SingleFloatFieldNumber].Accessor.GetValue(message)); + Assert.AreEqual(message.SingleForeignEnum, fields[TestAllTypes.SingleForeignEnumFieldNumber].Accessor.GetValue(message)); + Assert.AreEqual(message.SingleForeignMessage, fields[TestAllTypes.SingleForeignMessageFieldNumber].Accessor.GetValue(message)); + Assert.AreEqual(message.SingleImportEnum, fields[TestAllTypes.SingleImportEnumFieldNumber].Accessor.GetValue(message)); + Assert.AreEqual(message.SingleImportMessage, fields[TestAllTypes.SingleImportMessageFieldNumber].Accessor.GetValue(message)); + Assert.AreEqual(message.SingleInt32, fields[TestAllTypes.SingleInt32FieldNumber].Accessor.GetValue(message)); + Assert.AreEqual(message.SingleInt64, fields[TestAllTypes.SingleInt64FieldNumber].Accessor.GetValue(message)); + Assert.AreEqual(message.SingleNestedEnum, fields[TestAllTypes.SingleNestedEnumFieldNumber].Accessor.GetValue(message)); + Assert.AreEqual(message.SingleNestedMessage, fields[TestAllTypes.SingleNestedMessageFieldNumber].Accessor.GetValue(message)); + Assert.AreEqual(message.SinglePublicImportMessage, fields[TestAllTypes.SinglePublicImportMessageFieldNumber].Accessor.GetValue(message)); + Assert.AreEqual(message.SingleSint32, fields[TestAllTypes.SingleSint32FieldNumber].Accessor.GetValue(message)); + Assert.AreEqual(message.SingleSint64, fields[TestAllTypes.SingleSint64FieldNumber].Accessor.GetValue(message)); + Assert.AreEqual(message.SingleString, fields[TestAllTypes.SingleStringFieldNumber].Accessor.GetValue(message)); + Assert.AreEqual(message.SingleSfixed32, fields[TestAllTypes.SingleSfixed32FieldNumber].Accessor.GetValue(message)); + Assert.AreEqual(message.SingleSfixed64, fields[TestAllTypes.SingleSfixed64FieldNumber].Accessor.GetValue(message)); + Assert.AreEqual(message.SingleUint32, fields[TestAllTypes.SingleUint32FieldNumber].Accessor.GetValue(message)); + Assert.AreEqual(message.SingleUint64, fields[TestAllTypes.SingleUint64FieldNumber].Accessor.GetValue(message)); + Assert.AreEqual(message.OneofBytes, fields[TestAllTypes.OneofBytesFieldNumber].Accessor.GetValue(message)); + Assert.AreEqual(message.OneofString, fields[TestAllTypes.OneofStringFieldNumber].Accessor.GetValue(message)); + Assert.AreEqual(message.OneofNestedMessage, fields[TestAllTypes.OneofNestedMessageFieldNumber].Accessor.GetValue(message)); + Assert.AreEqual(message.OneofUint32, fields[TestAllTypes.OneofUint32FieldNumber].Accessor.GetValue(message)); + + // Just one example for repeated fields - they're all just returning the list + var list = (IList) fields[TestAllTypes.RepeatedInt32FieldNumber].Accessor.GetValue(message); + Assert.AreEqual(message.RepeatedInt32, list); + Assert.AreEqual(message.RepeatedInt32[0], list[0]); // Just in case there was any doubt... + + // Just a single map field, for the same reason + var mapMessage = new TestMap { MapStringString = { { "key1", "value1" }, { "key2", "value2" } } }; + fields = TestMap.Descriptor.Fields; + var dictionary = (IDictionary) fields[TestMap.MapStringStringFieldNumber].Accessor.GetValue(mapMessage); + Assert.AreEqual(mapMessage.MapStringString, dictionary); + Assert.AreEqual("value1", dictionary["key1"]); + } + + [Test] + public void Clear() + { + var message = SampleMessages.CreateFullTestAllTypes(); + var fields = TestAllTypes.Descriptor.Fields; + fields[TestAllTypes.SingleBoolFieldNumber].Accessor.Clear(message); + fields[TestAllTypes.SingleInt32FieldNumber].Accessor.Clear(message); + fields[TestAllTypes.SingleStringFieldNumber].Accessor.Clear(message); + fields[TestAllTypes.SingleBytesFieldNumber].Accessor.Clear(message); + fields[TestAllTypes.SingleForeignEnumFieldNumber].Accessor.Clear(message); + fields[TestAllTypes.SingleForeignMessageFieldNumber].Accessor.Clear(message); + fields[TestAllTypes.RepeatedDoubleFieldNumber].Accessor.Clear(message); + + var expected = new TestAllTypes(SampleMessages.CreateFullTestAllTypes()) + { + SingleBool = false, + SingleInt32 = 0, + SingleString = "", + SingleBytes = ByteString.Empty, + SingleForeignEnum = 0, + SingleForeignMessage = null, + }; + expected.RepeatedDouble.Clear(); + + Assert.AreEqual(expected, message); + + // Separately, maps. + var mapMessage = new TestMap { MapStringString = { { "key1", "value1" }, { "key2", "value2" } } }; + fields = TestMap.Descriptor.Fields; + fields[TestMap.MapStringStringFieldNumber].Accessor.Clear(mapMessage); + Assert.AreEqual(0, mapMessage.MapStringString.Count); + } + + [Test] + public void SetValue_SingleFields() + { + // Just a sample (primitives, messages, enums, strings, byte strings) + var message = SampleMessages.CreateFullTestAllTypes(); + var fields = TestAllTypes.Descriptor.Fields; + fields[TestAllTypes.SingleBoolFieldNumber].Accessor.SetValue(message, false); + fields[TestAllTypes.SingleInt32FieldNumber].Accessor.SetValue(message, 500); + fields[TestAllTypes.SingleStringFieldNumber].Accessor.SetValue(message, "It's a string"); + fields[TestAllTypes.SingleBytesFieldNumber].Accessor.SetValue(message, ByteString.CopyFrom(99, 98, 97)); + fields[TestAllTypes.SingleForeignEnumFieldNumber].Accessor.SetValue(message, ForeignEnum.ForeignFoo); + fields[TestAllTypes.SingleForeignMessageFieldNumber].Accessor.SetValue(message, new ForeignMessage { C = 12345 }); + fields[TestAllTypes.SingleDoubleFieldNumber].Accessor.SetValue(message, 20150701.5); + + var expected = new TestAllTypes(SampleMessages.CreateFullTestAllTypes()) + { + SingleBool = false, + SingleInt32 = 500, + SingleString = "It's a string", + SingleBytes = ByteString.CopyFrom(99, 98, 97), + SingleForeignEnum = ForeignEnum.ForeignFoo, + SingleForeignMessage = new ForeignMessage { C = 12345 }, + SingleDouble = 20150701.5 + }; + + Assert.AreEqual(expected, message); + } + + [Test] + public void SetValue_SingleFields_WrongType() + { + IMessage message = SampleMessages.CreateFullTestAllTypes(); + var fields = message.Descriptor.Fields; + Assert.Throws(() => fields[TestAllTypes.SingleBoolFieldNumber].Accessor.SetValue(message, "This isn't a bool")); + } + + [Test] + public void SetValue_MapFields() + { + IMessage message = new TestMap(); + var fields = message.Descriptor.Fields; + Assert.Throws(() => fields[TestMap.MapStringStringFieldNumber].Accessor.SetValue(message, new Dictionary())); + } + + [Test] + public void SetValue_RepeatedFields() + { + IMessage message = SampleMessages.CreateFullTestAllTypes(); + var fields = message.Descriptor.Fields; + Assert.Throws(() => fields[TestAllTypes.RepeatedDoubleFieldNumber].Accessor.SetValue(message, new double[10])); + } + + [Test] + public void GetValue_IncorrectType() + { + IMessage message = SampleMessages.CreateFullTestAllTypes(); + var fields = message.Descriptor.Fields; + Assert.Throws(() => fields[TestAllTypes.SingleBoolFieldNumber].Accessor.GetValue(new TestMap())); + } + + [Test] + public void Oneof() + { + var message = new TestAllTypes(); + var descriptor = TestAllTypes.Descriptor; + Assert.AreEqual(1, descriptor.Oneofs.Count); + var oneof = descriptor.Oneofs[0]; + Assert.AreEqual("oneof_field", oneof.Name); + Assert.IsNull(oneof.Accessor.GetCaseFieldDescriptor(message)); + + message.OneofString = "foo"; + Assert.AreSame(descriptor.Fields[TestAllTypes.OneofStringFieldNumber], oneof.Accessor.GetCaseFieldDescriptor(message)); + + message.OneofUint32 = 10; + Assert.AreSame(descriptor.Fields[TestAllTypes.OneofUint32FieldNumber], oneof.Accessor.GetCaseFieldDescriptor(message)); + + oneof.Accessor.Clear(message); + Assert.AreEqual(TestAllTypes.OneofFieldOneofCase.None, message.OneofFieldCase); + } + + [Test] + public void FieldDescriptor_ByName() + { + var descriptor = TestAllTypes.Descriptor; + Assert.AreSame( + descriptor.Fields[TestAllTypes.SingleBoolFieldNumber], + descriptor.Fields["single_bool"]); + } + + [Test] + public void FieldDescriptor_NotFound() + { + var descriptor = TestAllTypes.Descriptor; + Assert.Throws(() => descriptor.Fields[999999].ToString()); + Assert.Throws(() => descriptor.Fields["not found"].ToString()); + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/Reflection/TypeRegistryTest.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/Reflection/TypeRegistryTest.cs new file mode 100644 index 0000000000..5be7ca2361 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/Reflection/TypeRegistryTest.cs @@ -0,0 +1,94 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using Google.Protobuf.TestProtos; +using Google.Protobuf.WellKnownTypes; +using NUnit.Framework; + +namespace Google.Protobuf.Reflection +{ + public class TypeRegistryTest + { + // Most of our tests use messages. Simple test that we really can use files... + [Test] + public void CreateWithFileDescriptor() + { + var registry = TypeRegistry.FromFiles(DurationReflection.Descriptor, StructReflection.Descriptor); + AssertDescriptorPresent(registry, Duration.Descriptor); + AssertDescriptorPresent(registry, ListValue.Descriptor); + AssertDescriptorAbsent(registry, Timestamp.Descriptor); + } + + [Test] + public void TypesFromSameFile() + { + // Just for kicks, let's start with a nested type + var registry = TypeRegistry.FromMessages(TestAllTypes.Types.NestedMessage.Descriptor); + // Top-level... + AssertDescriptorPresent(registry, TestFieldOrderings.Descriptor); + // ... and nested (not the same as the original NestedMessage!) + AssertDescriptorPresent(registry, TestFieldOrderings.Types.NestedMessage.Descriptor); + } + + [Test] + public void DependenciesAreIncluded() + { + var registry = TypeRegistry.FromMessages(TestAllTypes.Descriptor); + // Direct dependencies + AssertDescriptorPresent(registry, ImportMessage.Descriptor); + // Public dependencies + AssertDescriptorPresent(registry, PublicImportMessage.Descriptor); + } + + [Test] + public void DuplicateFiles() + { + // Duplicates via dependencies and simply via repetition + var registry = TypeRegistry.FromFiles( + UnittestProto3Reflection.Descriptor, UnittestImportProto3Reflection.Descriptor, + TimestampReflection.Descriptor, TimestampReflection.Descriptor); + AssertDescriptorPresent(registry, TestAllTypes.Descriptor); + AssertDescriptorPresent(registry, ImportMessage.Descriptor); + AssertDescriptorPresent(registry, Timestamp.Descriptor); + } + + private static void AssertDescriptorPresent(TypeRegistry registry, MessageDescriptor descriptor) + { + Assert.AreSame(descriptor, registry.Find(descriptor.FullName)); + } + + private static void AssertDescriptorAbsent(TypeRegistry registry, MessageDescriptor descriptor) + { + Assert.IsNull(registry.Find(descriptor.FullName)); + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/SampleEnum.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/SampleEnum.cs new file mode 100644 index 0000000000..77447afa12 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/SampleEnum.cs @@ -0,0 +1,42 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +namespace Google.Protobuf +{ + // Just a sample enum with positive and negative values to be used in tests. + internal enum SampleEnum + { + NegativeValue = -2, + None = 0, + PositiveValue = 3 + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/SampleMessages.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/SampleMessages.cs new file mode 100644 index 0000000000..ffa4e2a7c2 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/SampleMessages.cs @@ -0,0 +1,99 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; +using Google.Protobuf.TestProtos; + +namespace Google.Protobuf +{ + /// + /// Helper methods to create sample instances of types generated from unit test messages. + /// + public class SampleMessages + { + /// + /// Creates a new sample TestAllTypes message with all fields populated. + /// The "oneof" field is populated with the string property (OneofString). + /// + public static TestAllTypes CreateFullTestAllTypes() + { + return new TestAllTypes + { + SingleBool = true, + SingleBytes = ByteString.CopyFrom(1, 2, 3, 4), + SingleDouble = 23.5, + SingleFixed32 = 23, + SingleFixed64 = 1234567890123, + SingleFloat = 12.25f, + SingleForeignEnum = ForeignEnum.ForeignBar, + SingleForeignMessage = new ForeignMessage { C = 10 }, + SingleImportEnum = ImportEnum.ImportBaz, + SingleImportMessage = new ImportMessage { D = 20 }, + SingleInt32 = 100, + SingleInt64 = 3210987654321, + SingleNestedEnum = TestAllTypes.Types.NestedEnum.Foo, + SingleNestedMessage = new TestAllTypes.Types.NestedMessage { Bb = 35 }, + SinglePublicImportMessage = new PublicImportMessage { E = 54 }, + SingleSfixed32 = -123, + SingleSfixed64 = -12345678901234, + SingleSint32 = -456, + SingleSint64 = -12345678901235, + SingleString = "test", + SingleUint32 = UInt32.MaxValue, + SingleUint64 = UInt64.MaxValue, + RepeatedBool = { true, false }, + RepeatedBytes = { ByteString.CopyFrom(1, 2, 3, 4), ByteString.CopyFrom(5, 6), ByteString.CopyFrom(new byte[1000]) }, + RepeatedDouble = { -12.25, 23.5 }, + RepeatedFixed32 = { UInt32.MaxValue, 23 }, + RepeatedFixed64 = { UInt64.MaxValue, 1234567890123 }, + RepeatedFloat = { 100f, 12.25f }, + RepeatedForeignEnum = { ForeignEnum.ForeignFoo, ForeignEnum.ForeignBar }, + RepeatedForeignMessage = { new ForeignMessage(), new ForeignMessage { C = 10 } }, + RepeatedImportEnum = { ImportEnum.ImportBaz, ImportEnum.Unspecified }, + RepeatedImportMessage = { new ImportMessage { D = 20 }, new ImportMessage { D = 25 } }, + RepeatedInt32 = { 100, 200 }, + RepeatedInt64 = { 3210987654321, Int64.MaxValue }, + RepeatedNestedEnum = { TestAllTypes.Types.NestedEnum.Foo, TestAllTypes.Types.NestedEnum.Neg }, + RepeatedNestedMessage = { new TestAllTypes.Types.NestedMessage { Bb = 35 }, new TestAllTypes.Types.NestedMessage { Bb = 10 } }, + RepeatedPublicImportMessage = { new PublicImportMessage { E = 54 }, new PublicImportMessage { E = -1 } }, + RepeatedSfixed32 = { -123, 123 }, + RepeatedSfixed64 = { -12345678901234, 12345678901234 }, + RepeatedSint32 = { -456, 100 }, + RepeatedSint64 = { -12345678901235, 123 }, + RepeatedString = { "foo", "bar" }, + RepeatedUint32 = { UInt32.MaxValue, UInt32.MinValue }, + RepeatedUint64 = { UInt64.MaxValue, UInt32.MinValue }, + OneofString = "Oneof string" + }; + } + } +} \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/TestCornerCases.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/TestCornerCases.cs new file mode 100644 index 0000000000..fd75b19f42 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/TestCornerCases.cs @@ -0,0 +1,62 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using UnitTest.Issues.TestProtos; +using NUnit.Framework; + +namespace Google.Protobuf +{ + public class TestCornerCases + { + [Test] + public void TestRoundTripNegativeEnums() + { + NegativeEnumMessage msg = new NegativeEnumMessage + { + Value = NegativeEnum.MinusOne, + Values = { NegativeEnum.Zero, NegativeEnum.MinusOne, NegativeEnum.FiveBelow }, + PackedValues = { NegativeEnum.Zero, NegativeEnum.MinusOne, NegativeEnum.FiveBelow } + }; + + Assert.AreEqual(58, msg.CalculateSize()); + + byte[] bytes = new byte[58]; + CodedOutputStream output = new CodedOutputStream(bytes); + + msg.WriteTo(output); + Assert.AreEqual(0, output.SpaceLeft); + + NegativeEnumMessage copy = NegativeEnumMessage.Parser.ParseFrom(bytes); + Assert.AreEqual(msg, copy); + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/TestProtos/ForeignMessagePartial.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/TestProtos/ForeignMessagePartial.cs new file mode 100644 index 0000000000..5663a69902 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/TestProtos/ForeignMessagePartial.cs @@ -0,0 +1,45 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2016 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +namespace Google.Protobuf.TestProtos +{ + /// + /// A message with custom diagnostics (to test that they work). + /// + public partial class ForeignMessage : ICustomDiagnosticMessage + { + public string ToDiagnosticString() + { + return $"{{ \"c\": {C}, \"@cInHex\": \"{C:x}\" }}"; + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/TestProtos/MapUnittestProto3.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/TestProtos/MapUnittestProto3.cs new file mode 100644 index 0000000000..3ba4a2b663 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/TestProtos/MapUnittestProto3.cs @@ -0,0 +1,1471 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/protobuf/map_unittest_proto3.proto +#pragma warning disable 1591, 0612, 3021 +#region Designer generated code + +using pb = global::Google.Protobuf; +using pbc = global::Google.Protobuf.Collections; +using pbr = global::Google.Protobuf.Reflection; +using scg = global::System.Collections.Generic; +namespace Google.Protobuf.TestProtos { + + /// Holder for reflection information generated from google/protobuf/map_unittest_proto3.proto + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public static partial class MapUnittestProto3Reflection { + + #region Descriptor + /// File descriptor for google/protobuf/map_unittest_proto3.proto + public static pbr::FileDescriptor Descriptor { + get { return descriptor; } + } + private static pbr::FileDescriptor descriptor; + + static MapUnittestProto3Reflection() { + byte[] descriptorData = global::System.Convert.FromBase64String( + string.Concat( + "Cilnb29nbGUvcHJvdG9idWYvbWFwX3VuaXR0ZXN0X3Byb3RvMy5wcm90bxIR", + "cHJvdG9idWZfdW5pdHRlc3QaJWdvb2dsZS9wcm90b2J1Zi91bml0dGVzdF9w", + "cm90bzMucHJvdG8ilhIKB1Rlc3RNYXASRgoPbWFwX2ludDMyX2ludDMyGAEg", + "AygLMi0ucHJvdG9idWZfdW5pdHRlc3QuVGVzdE1hcC5NYXBJbnQzMkludDMy", + "RW50cnkSRgoPbWFwX2ludDY0X2ludDY0GAIgAygLMi0ucHJvdG9idWZfdW5p", + "dHRlc3QuVGVzdE1hcC5NYXBJbnQ2NEludDY0RW50cnkSSgoRbWFwX3VpbnQz", + "Ml91aW50MzIYAyADKAsyLy5wcm90b2J1Zl91bml0dGVzdC5UZXN0TWFwLk1h", + "cFVpbnQzMlVpbnQzMkVudHJ5EkoKEW1hcF91aW50NjRfdWludDY0GAQgAygL", + "Mi8ucHJvdG9idWZfdW5pdHRlc3QuVGVzdE1hcC5NYXBVaW50NjRVaW50NjRF", + "bnRyeRJKChFtYXBfc2ludDMyX3NpbnQzMhgFIAMoCzIvLnByb3RvYnVmX3Vu", + "aXR0ZXN0LlRlc3RNYXAuTWFwU2ludDMyU2ludDMyRW50cnkSSgoRbWFwX3Np", + "bnQ2NF9zaW50NjQYBiADKAsyLy5wcm90b2J1Zl91bml0dGVzdC5UZXN0TWFw", + "Lk1hcFNpbnQ2NFNpbnQ2NEVudHJ5Ek4KE21hcF9maXhlZDMyX2ZpeGVkMzIY", + "ByADKAsyMS5wcm90b2J1Zl91bml0dGVzdC5UZXN0TWFwLk1hcEZpeGVkMzJG", + "aXhlZDMyRW50cnkSTgoTbWFwX2ZpeGVkNjRfZml4ZWQ2NBgIIAMoCzIxLnBy", + "b3RvYnVmX3VuaXR0ZXN0LlRlc3RNYXAuTWFwRml4ZWQ2NEZpeGVkNjRFbnRy", + "eRJSChVtYXBfc2ZpeGVkMzJfc2ZpeGVkMzIYCSADKAsyMy5wcm90b2J1Zl91", + "bml0dGVzdC5UZXN0TWFwLk1hcFNmaXhlZDMyU2ZpeGVkMzJFbnRyeRJSChVt", + "YXBfc2ZpeGVkNjRfc2ZpeGVkNjQYCiADKAsyMy5wcm90b2J1Zl91bml0dGVz", + "dC5UZXN0TWFwLk1hcFNmaXhlZDY0U2ZpeGVkNjRFbnRyeRJGCg9tYXBfaW50", + "MzJfZmxvYXQYCyADKAsyLS5wcm90b2J1Zl91bml0dGVzdC5UZXN0TWFwLk1h", + "cEludDMyRmxvYXRFbnRyeRJIChBtYXBfaW50MzJfZG91YmxlGAwgAygLMi4u", + "cHJvdG9idWZfdW5pdHRlc3QuVGVzdE1hcC5NYXBJbnQzMkRvdWJsZUVudHJ5", + "EkIKDW1hcF9ib29sX2Jvb2wYDSADKAsyKy5wcm90b2J1Zl91bml0dGVzdC5U", + "ZXN0TWFwLk1hcEJvb2xCb29sRW50cnkSSgoRbWFwX3N0cmluZ19zdHJpbmcY", + "DiADKAsyLy5wcm90b2J1Zl91bml0dGVzdC5UZXN0TWFwLk1hcFN0cmluZ1N0", + "cmluZ0VudHJ5EkYKD21hcF9pbnQzMl9ieXRlcxgPIAMoCzItLnByb3RvYnVm", + "X3VuaXR0ZXN0LlRlc3RNYXAuTWFwSW50MzJCeXRlc0VudHJ5EkQKDm1hcF9p", + "bnQzMl9lbnVtGBAgAygLMiwucHJvdG9idWZfdW5pdHRlc3QuVGVzdE1hcC5N", + "YXBJbnQzMkVudW1FbnRyeRJZChltYXBfaW50MzJfZm9yZWlnbl9tZXNzYWdl", + "GBEgAygLMjYucHJvdG9idWZfdW5pdHRlc3QuVGVzdE1hcC5NYXBJbnQzMkZv", + "cmVpZ25NZXNzYWdlRW50cnkaNAoSTWFwSW50MzJJbnQzMkVudHJ5EgsKA2tl", + "eRgBIAEoBRINCgV2YWx1ZRgCIAEoBToCOAEaNAoSTWFwSW50NjRJbnQ2NEVu", + "dHJ5EgsKA2tleRgBIAEoAxINCgV2YWx1ZRgCIAEoAzoCOAEaNgoUTWFwVWlu", + "dDMyVWludDMyRW50cnkSCwoDa2V5GAEgASgNEg0KBXZhbHVlGAIgASgNOgI4", + "ARo2ChRNYXBVaW50NjRVaW50NjRFbnRyeRILCgNrZXkYASABKAQSDQoFdmFs", + "dWUYAiABKAQ6AjgBGjYKFE1hcFNpbnQzMlNpbnQzMkVudHJ5EgsKA2tleRgB", + "IAEoERINCgV2YWx1ZRgCIAEoEToCOAEaNgoUTWFwU2ludDY0U2ludDY0RW50", + "cnkSCwoDa2V5GAEgASgSEg0KBXZhbHVlGAIgASgSOgI4ARo4ChZNYXBGaXhl", + "ZDMyRml4ZWQzMkVudHJ5EgsKA2tleRgBIAEoBxINCgV2YWx1ZRgCIAEoBzoC", + "OAEaOAoWTWFwRml4ZWQ2NEZpeGVkNjRFbnRyeRILCgNrZXkYASABKAYSDQoF", + "dmFsdWUYAiABKAY6AjgBGjoKGE1hcFNmaXhlZDMyU2ZpeGVkMzJFbnRyeRIL", + "CgNrZXkYASABKA8SDQoFdmFsdWUYAiABKA86AjgBGjoKGE1hcFNmaXhlZDY0", + "U2ZpeGVkNjRFbnRyeRILCgNrZXkYASABKBASDQoFdmFsdWUYAiABKBA6AjgB", + "GjQKEk1hcEludDMyRmxvYXRFbnRyeRILCgNrZXkYASABKAUSDQoFdmFsdWUY", + "AiABKAI6AjgBGjUKE01hcEludDMyRG91YmxlRW50cnkSCwoDa2V5GAEgASgF", + "Eg0KBXZhbHVlGAIgASgBOgI4ARoyChBNYXBCb29sQm9vbEVudHJ5EgsKA2tl", + "eRgBIAEoCBINCgV2YWx1ZRgCIAEoCDoCOAEaNgoUTWFwU3RyaW5nU3RyaW5n", + "RW50cnkSCwoDa2V5GAEgASgJEg0KBXZhbHVlGAIgASgJOgI4ARo0ChJNYXBJ", + "bnQzMkJ5dGVzRW50cnkSCwoDa2V5GAEgASgFEg0KBXZhbHVlGAIgASgMOgI4", + "ARpPChFNYXBJbnQzMkVudW1FbnRyeRILCgNrZXkYASABKAUSKQoFdmFsdWUY", + "AiABKA4yGi5wcm90b2J1Zl91bml0dGVzdC5NYXBFbnVtOgI4ARpgChtNYXBJ", + "bnQzMkZvcmVpZ25NZXNzYWdlRW50cnkSCwoDa2V5GAEgASgFEjAKBXZhbHVl", + "GAIgASgLMiEucHJvdG9idWZfdW5pdHRlc3QuRm9yZWlnbk1lc3NhZ2U6AjgB", + "IkEKEVRlc3RNYXBTdWJtZXNzYWdlEiwKCHRlc3RfbWFwGAEgASgLMhoucHJv", + "dG9idWZfdW5pdHRlc3QuVGVzdE1hcCK8AQoOVGVzdE1lc3NhZ2VNYXASUQoR", + "bWFwX2ludDMyX21lc3NhZ2UYASADKAsyNi5wcm90b2J1Zl91bml0dGVzdC5U", + "ZXN0TWVzc2FnZU1hcC5NYXBJbnQzMk1lc3NhZ2VFbnRyeRpXChRNYXBJbnQz", + "Mk1lc3NhZ2VFbnRyeRILCgNrZXkYASABKAUSLgoFdmFsdWUYAiABKAsyHy5w", + "cm90b2J1Zl91bml0dGVzdC5UZXN0QWxsVHlwZXM6AjgBIuMBCg9UZXN0U2Ft", + "ZVR5cGVNYXASOgoEbWFwMRgBIAMoCzIsLnByb3RvYnVmX3VuaXR0ZXN0LlRl", + "c3RTYW1lVHlwZU1hcC5NYXAxRW50cnkSOgoEbWFwMhgCIAMoCzIsLnByb3Rv", + "YnVmX3VuaXR0ZXN0LlRlc3RTYW1lVHlwZU1hcC5NYXAyRW50cnkaKwoJTWFw", + "MUVudHJ5EgsKA2tleRgBIAEoBRINCgV2YWx1ZRgCIAEoBToCOAEaKwoJTWFw", + "MkVudHJ5EgsKA2tleRgBIAEoBRINCgV2YWx1ZRgCIAEoBToCOAEi5BAKDFRl", + "c3RBcmVuYU1hcBJLCg9tYXBfaW50MzJfaW50MzIYASADKAsyMi5wcm90b2J1", + "Zl91bml0dGVzdC5UZXN0QXJlbmFNYXAuTWFwSW50MzJJbnQzMkVudHJ5EksK", + "D21hcF9pbnQ2NF9pbnQ2NBgCIAMoCzIyLnByb3RvYnVmX3VuaXR0ZXN0LlRl", + "c3RBcmVuYU1hcC5NYXBJbnQ2NEludDY0RW50cnkSTwoRbWFwX3VpbnQzMl91", + "aW50MzIYAyADKAsyNC5wcm90b2J1Zl91bml0dGVzdC5UZXN0QXJlbmFNYXAu", + "TWFwVWludDMyVWludDMyRW50cnkSTwoRbWFwX3VpbnQ2NF91aW50NjQYBCAD", + "KAsyNC5wcm90b2J1Zl91bml0dGVzdC5UZXN0QXJlbmFNYXAuTWFwVWludDY0", + "VWludDY0RW50cnkSTwoRbWFwX3NpbnQzMl9zaW50MzIYBSADKAsyNC5wcm90", + "b2J1Zl91bml0dGVzdC5UZXN0QXJlbmFNYXAuTWFwU2ludDMyU2ludDMyRW50", + "cnkSTwoRbWFwX3NpbnQ2NF9zaW50NjQYBiADKAsyNC5wcm90b2J1Zl91bml0", + "dGVzdC5UZXN0QXJlbmFNYXAuTWFwU2ludDY0U2ludDY0RW50cnkSUwoTbWFw", + "X2ZpeGVkMzJfZml4ZWQzMhgHIAMoCzI2LnByb3RvYnVmX3VuaXR0ZXN0LlRl", + "c3RBcmVuYU1hcC5NYXBGaXhlZDMyRml4ZWQzMkVudHJ5ElMKE21hcF9maXhl", + "ZDY0X2ZpeGVkNjQYCCADKAsyNi5wcm90b2J1Zl91bml0dGVzdC5UZXN0QXJl", + "bmFNYXAuTWFwRml4ZWQ2NEZpeGVkNjRFbnRyeRJXChVtYXBfc2ZpeGVkMzJf", + "c2ZpeGVkMzIYCSADKAsyOC5wcm90b2J1Zl91bml0dGVzdC5UZXN0QXJlbmFN", + "YXAuTWFwU2ZpeGVkMzJTZml4ZWQzMkVudHJ5ElcKFW1hcF9zZml4ZWQ2NF9z", + "Zml4ZWQ2NBgKIAMoCzI4LnByb3RvYnVmX3VuaXR0ZXN0LlRlc3RBcmVuYU1h", + "cC5NYXBTZml4ZWQ2NFNmaXhlZDY0RW50cnkSSwoPbWFwX2ludDMyX2Zsb2F0", + "GAsgAygLMjIucHJvdG9idWZfdW5pdHRlc3QuVGVzdEFyZW5hTWFwLk1hcElu", + "dDMyRmxvYXRFbnRyeRJNChBtYXBfaW50MzJfZG91YmxlGAwgAygLMjMucHJv", + "dG9idWZfdW5pdHRlc3QuVGVzdEFyZW5hTWFwLk1hcEludDMyRG91YmxlRW50", + "cnkSRwoNbWFwX2Jvb2xfYm9vbBgNIAMoCzIwLnByb3RvYnVmX3VuaXR0ZXN0", + "LlRlc3RBcmVuYU1hcC5NYXBCb29sQm9vbEVudHJ5EkkKDm1hcF9pbnQzMl9l", + "bnVtGA4gAygLMjEucHJvdG9idWZfdW5pdHRlc3QuVGVzdEFyZW5hTWFwLk1h", + "cEludDMyRW51bUVudHJ5El4KGW1hcF9pbnQzMl9mb3JlaWduX21lc3NhZ2UY", + "DyADKAsyOy5wcm90b2J1Zl91bml0dGVzdC5UZXN0QXJlbmFNYXAuTWFwSW50", + "MzJGb3JlaWduTWVzc2FnZUVudHJ5GjQKEk1hcEludDMySW50MzJFbnRyeRIL", + "CgNrZXkYASABKAUSDQoFdmFsdWUYAiABKAU6AjgBGjQKEk1hcEludDY0SW50", + "NjRFbnRyeRILCgNrZXkYASABKAMSDQoFdmFsdWUYAiABKAM6AjgBGjYKFE1h", + "cFVpbnQzMlVpbnQzMkVudHJ5EgsKA2tleRgBIAEoDRINCgV2YWx1ZRgCIAEo", + "DToCOAEaNgoUTWFwVWludDY0VWludDY0RW50cnkSCwoDa2V5GAEgASgEEg0K", + "BXZhbHVlGAIgASgEOgI4ARo2ChRNYXBTaW50MzJTaW50MzJFbnRyeRILCgNr", + "ZXkYASABKBESDQoFdmFsdWUYAiABKBE6AjgBGjYKFE1hcFNpbnQ2NFNpbnQ2", + "NEVudHJ5EgsKA2tleRgBIAEoEhINCgV2YWx1ZRgCIAEoEjoCOAEaOAoWTWFw", + "Rml4ZWQzMkZpeGVkMzJFbnRyeRILCgNrZXkYASABKAcSDQoFdmFsdWUYAiAB", + "KAc6AjgBGjgKFk1hcEZpeGVkNjRGaXhlZDY0RW50cnkSCwoDa2V5GAEgASgG", + "Eg0KBXZhbHVlGAIgASgGOgI4ARo6ChhNYXBTZml4ZWQzMlNmaXhlZDMyRW50", + "cnkSCwoDa2V5GAEgASgPEg0KBXZhbHVlGAIgASgPOgI4ARo6ChhNYXBTZml4", + "ZWQ2NFNmaXhlZDY0RW50cnkSCwoDa2V5GAEgASgQEg0KBXZhbHVlGAIgASgQ", + "OgI4ARo0ChJNYXBJbnQzMkZsb2F0RW50cnkSCwoDa2V5GAEgASgFEg0KBXZh", + "bHVlGAIgASgCOgI4ARo1ChNNYXBJbnQzMkRvdWJsZUVudHJ5EgsKA2tleRgB", + "IAEoBRINCgV2YWx1ZRgCIAEoAToCOAEaMgoQTWFwQm9vbEJvb2xFbnRyeRIL", + "CgNrZXkYASABKAgSDQoFdmFsdWUYAiABKAg6AjgBGk8KEU1hcEludDMyRW51", + "bUVudHJ5EgsKA2tleRgBIAEoBRIpCgV2YWx1ZRgCIAEoDjIaLnByb3RvYnVm", + "X3VuaXR0ZXN0Lk1hcEVudW06AjgBGmAKG01hcEludDMyRm9yZWlnbk1lc3Nh", + "Z2VFbnRyeRILCgNrZXkYASABKAUSMAoFdmFsdWUYAiABKAsyIS5wcm90b2J1", + "Zl91bml0dGVzdC5Gb3JlaWduTWVzc2FnZToCOAEi5AEKH01lc3NhZ2VDb250", + "YWluaW5nRW51bUNhbGxlZFR5cGUSSgoEdHlwZRgBIAMoCzI8LnByb3RvYnVm", + "X3VuaXR0ZXN0Lk1lc3NhZ2VDb250YWluaW5nRW51bUNhbGxlZFR5cGUuVHlw", + "ZUVudHJ5Gl8KCVR5cGVFbnRyeRILCgNrZXkYASABKAUSQQoFdmFsdWUYAiAB", + "KAsyMi5wcm90b2J1Zl91bml0dGVzdC5NZXNzYWdlQ29udGFpbmluZ0VudW1D", + "YWxsZWRUeXBlOgI4ASIUCgRUeXBlEgwKCFRZUEVfRk9PEAAinQEKH01lc3Nh", + "Z2VDb250YWluaW5nTWFwQ2FsbGVkRW50cnkSTAoFZW50cnkYASADKAsyPS5w", + "cm90b2J1Zl91bml0dGVzdC5NZXNzYWdlQ29udGFpbmluZ01hcENhbGxlZEVu", + "dHJ5LkVudHJ5RW50cnkaLAoKRW50cnlFbnRyeRILCgNrZXkYASABKAUSDQoF", + "dmFsdWUYAiABKAU6AjgBKj8KB01hcEVudW0SEAoMTUFQX0VOVU1fRk9PEAAS", + "EAoMTUFQX0VOVU1fQkFSEAESEAoMTUFQX0VOVU1fQkFaEAJCIPgBAaoCGkdv", + "b2dsZS5Qcm90b2J1Zi5UZXN0UHJvdG9zYgZwcm90bzM=")); + descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, + new pbr::FileDescriptor[] { global::Google.Protobuf.TestProtos.UnittestProto3Reflection.Descriptor, }, + new pbr::GeneratedClrTypeInfo(new[] {typeof(global::Google.Protobuf.TestProtos.MapEnum), }, new pbr::GeneratedClrTypeInfo[] { + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.TestMap), global::Google.Protobuf.TestProtos.TestMap.Parser, new[]{ "MapInt32Int32", "MapInt64Int64", "MapUint32Uint32", "MapUint64Uint64", "MapSint32Sint32", "MapSint64Sint64", "MapFixed32Fixed32", "MapFixed64Fixed64", "MapSfixed32Sfixed32", "MapSfixed64Sfixed64", "MapInt32Float", "MapInt32Double", "MapBoolBool", "MapStringString", "MapInt32Bytes", "MapInt32Enum", "MapInt32ForeignMessage" }, null, null, new pbr::GeneratedClrTypeInfo[] { null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, }), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.TestMapSubmessage), global::Google.Protobuf.TestProtos.TestMapSubmessage.Parser, new[]{ "TestMap" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.TestMessageMap), global::Google.Protobuf.TestProtos.TestMessageMap.Parser, new[]{ "MapInt32Message" }, null, null, new pbr::GeneratedClrTypeInfo[] { null, }), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.TestSameTypeMap), global::Google.Protobuf.TestProtos.TestSameTypeMap.Parser, new[]{ "Map1", "Map2" }, null, null, new pbr::GeneratedClrTypeInfo[] { null, null, }), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.TestArenaMap), global::Google.Protobuf.TestProtos.TestArenaMap.Parser, new[]{ "MapInt32Int32", "MapInt64Int64", "MapUint32Uint32", "MapUint64Uint64", "MapSint32Sint32", "MapSint64Sint64", "MapFixed32Fixed32", "MapFixed64Fixed64", "MapSfixed32Sfixed32", "MapSfixed64Sfixed64", "MapInt32Float", "MapInt32Double", "MapBoolBool", "MapInt32Enum", "MapInt32ForeignMessage" }, null, null, new pbr::GeneratedClrTypeInfo[] { null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, }), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.MessageContainingEnumCalledType), global::Google.Protobuf.TestProtos.MessageContainingEnumCalledType.Parser, new[]{ "Type" }, null, new[]{ typeof(global::Google.Protobuf.TestProtos.MessageContainingEnumCalledType.Types.Type) }, new pbr::GeneratedClrTypeInfo[] { null, }), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.MessageContainingMapCalledEntry), global::Google.Protobuf.TestProtos.MessageContainingMapCalledEntry.Parser, new[]{ "Entry" }, null, null, new pbr::GeneratedClrTypeInfo[] { null, }) + })); + } + #endregion + + } + #region Enums + public enum MapEnum { + [pbr::OriginalName("MAP_ENUM_FOO")] Foo = 0, + [pbr::OriginalName("MAP_ENUM_BAR")] Bar = 1, + [pbr::OriginalName("MAP_ENUM_BAZ")] Baz = 2, + } + + #endregion + + #region Messages + /// + /// Tests maps. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class TestMap : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new TestMap()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.MapUnittestProto3Reflection.Descriptor.MessageTypes[0]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public TestMap() { + OnConstruction(); + } + + partial void OnConstruction(); + + public TestMap(TestMap other) : this() { + mapInt32Int32_ = other.mapInt32Int32_.Clone(); + mapInt64Int64_ = other.mapInt64Int64_.Clone(); + mapUint32Uint32_ = other.mapUint32Uint32_.Clone(); + mapUint64Uint64_ = other.mapUint64Uint64_.Clone(); + mapSint32Sint32_ = other.mapSint32Sint32_.Clone(); + mapSint64Sint64_ = other.mapSint64Sint64_.Clone(); + mapFixed32Fixed32_ = other.mapFixed32Fixed32_.Clone(); + mapFixed64Fixed64_ = other.mapFixed64Fixed64_.Clone(); + mapSfixed32Sfixed32_ = other.mapSfixed32Sfixed32_.Clone(); + mapSfixed64Sfixed64_ = other.mapSfixed64Sfixed64_.Clone(); + mapInt32Float_ = other.mapInt32Float_.Clone(); + mapInt32Double_ = other.mapInt32Double_.Clone(); + mapBoolBool_ = other.mapBoolBool_.Clone(); + mapStringString_ = other.mapStringString_.Clone(); + mapInt32Bytes_ = other.mapInt32Bytes_.Clone(); + mapInt32Enum_ = other.mapInt32Enum_.Clone(); + mapInt32ForeignMessage_ = other.mapInt32ForeignMessage_.Clone(); + } + + public TestMap Clone() { + return new TestMap(this); + } + + /// Field number for the "map_int32_int32" field. + public const int MapInt32Int32FieldNumber = 1; + private static readonly pbc::MapField.Codec _map_mapInt32Int32_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForInt32(16), 10); + private readonly pbc::MapField mapInt32Int32_ = new pbc::MapField(); + public pbc::MapField MapInt32Int32 { + get { return mapInt32Int32_; } + } + + /// Field number for the "map_int64_int64" field. + public const int MapInt64Int64FieldNumber = 2; + private static readonly pbc::MapField.Codec _map_mapInt64Int64_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForInt64(8), pb::FieldCodec.ForInt64(16), 18); + private readonly pbc::MapField mapInt64Int64_ = new pbc::MapField(); + public pbc::MapField MapInt64Int64 { + get { return mapInt64Int64_; } + } + + /// Field number for the "map_uint32_uint32" field. + public const int MapUint32Uint32FieldNumber = 3; + private static readonly pbc::MapField.Codec _map_mapUint32Uint32_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForUInt32(8), pb::FieldCodec.ForUInt32(16), 26); + private readonly pbc::MapField mapUint32Uint32_ = new pbc::MapField(); + public pbc::MapField MapUint32Uint32 { + get { return mapUint32Uint32_; } + } + + /// Field number for the "map_uint64_uint64" field. + public const int MapUint64Uint64FieldNumber = 4; + private static readonly pbc::MapField.Codec _map_mapUint64Uint64_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForUInt64(8), pb::FieldCodec.ForUInt64(16), 34); + private readonly pbc::MapField mapUint64Uint64_ = new pbc::MapField(); + public pbc::MapField MapUint64Uint64 { + get { return mapUint64Uint64_; } + } + + /// Field number for the "map_sint32_sint32" field. + public const int MapSint32Sint32FieldNumber = 5; + private static readonly pbc::MapField.Codec _map_mapSint32Sint32_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForSInt32(8), pb::FieldCodec.ForSInt32(16), 42); + private readonly pbc::MapField mapSint32Sint32_ = new pbc::MapField(); + public pbc::MapField MapSint32Sint32 { + get { return mapSint32Sint32_; } + } + + /// Field number for the "map_sint64_sint64" field. + public const int MapSint64Sint64FieldNumber = 6; + private static readonly pbc::MapField.Codec _map_mapSint64Sint64_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForSInt64(8), pb::FieldCodec.ForSInt64(16), 50); + private readonly pbc::MapField mapSint64Sint64_ = new pbc::MapField(); + public pbc::MapField MapSint64Sint64 { + get { return mapSint64Sint64_; } + } + + /// Field number for the "map_fixed32_fixed32" field. + public const int MapFixed32Fixed32FieldNumber = 7; + private static readonly pbc::MapField.Codec _map_mapFixed32Fixed32_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForFixed32(13), pb::FieldCodec.ForFixed32(21), 58); + private readonly pbc::MapField mapFixed32Fixed32_ = new pbc::MapField(); + public pbc::MapField MapFixed32Fixed32 { + get { return mapFixed32Fixed32_; } + } + + /// Field number for the "map_fixed64_fixed64" field. + public const int MapFixed64Fixed64FieldNumber = 8; + private static readonly pbc::MapField.Codec _map_mapFixed64Fixed64_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForFixed64(9), pb::FieldCodec.ForFixed64(17), 66); + private readonly pbc::MapField mapFixed64Fixed64_ = new pbc::MapField(); + public pbc::MapField MapFixed64Fixed64 { + get { return mapFixed64Fixed64_; } + } + + /// Field number for the "map_sfixed32_sfixed32" field. + public const int MapSfixed32Sfixed32FieldNumber = 9; + private static readonly pbc::MapField.Codec _map_mapSfixed32Sfixed32_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForSFixed32(13), pb::FieldCodec.ForSFixed32(21), 74); + private readonly pbc::MapField mapSfixed32Sfixed32_ = new pbc::MapField(); + public pbc::MapField MapSfixed32Sfixed32 { + get { return mapSfixed32Sfixed32_; } + } + + /// Field number for the "map_sfixed64_sfixed64" field. + public const int MapSfixed64Sfixed64FieldNumber = 10; + private static readonly pbc::MapField.Codec _map_mapSfixed64Sfixed64_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForSFixed64(9), pb::FieldCodec.ForSFixed64(17), 82); + private readonly pbc::MapField mapSfixed64Sfixed64_ = new pbc::MapField(); + public pbc::MapField MapSfixed64Sfixed64 { + get { return mapSfixed64Sfixed64_; } + } + + /// Field number for the "map_int32_float" field. + public const int MapInt32FloatFieldNumber = 11; + private static readonly pbc::MapField.Codec _map_mapInt32Float_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForFloat(21), 90); + private readonly pbc::MapField mapInt32Float_ = new pbc::MapField(); + public pbc::MapField MapInt32Float { + get { return mapInt32Float_; } + } + + /// Field number for the "map_int32_double" field. + public const int MapInt32DoubleFieldNumber = 12; + private static readonly pbc::MapField.Codec _map_mapInt32Double_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForDouble(17), 98); + private readonly pbc::MapField mapInt32Double_ = new pbc::MapField(); + public pbc::MapField MapInt32Double { + get { return mapInt32Double_; } + } + + /// Field number for the "map_bool_bool" field. + public const int MapBoolBoolFieldNumber = 13; + private static readonly pbc::MapField.Codec _map_mapBoolBool_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForBool(8), pb::FieldCodec.ForBool(16), 106); + private readonly pbc::MapField mapBoolBool_ = new pbc::MapField(); + public pbc::MapField MapBoolBool { + get { return mapBoolBool_; } + } + + /// Field number for the "map_string_string" field. + public const int MapStringStringFieldNumber = 14; + private static readonly pbc::MapField.Codec _map_mapStringString_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForString(10), pb::FieldCodec.ForString(18), 114); + private readonly pbc::MapField mapStringString_ = new pbc::MapField(); + public pbc::MapField MapStringString { + get { return mapStringString_; } + } + + /// Field number for the "map_int32_bytes" field. + public const int MapInt32BytesFieldNumber = 15; + private static readonly pbc::MapField.Codec _map_mapInt32Bytes_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForBytes(18), 122); + private readonly pbc::MapField mapInt32Bytes_ = new pbc::MapField(); + public pbc::MapField MapInt32Bytes { + get { return mapInt32Bytes_; } + } + + /// Field number for the "map_int32_enum" field. + public const int MapInt32EnumFieldNumber = 16; + private static readonly pbc::MapField.Codec _map_mapInt32Enum_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForEnum(16, x => (int) x, x => (global::Google.Protobuf.TestProtos.MapEnum) x), 130); + private readonly pbc::MapField mapInt32Enum_ = new pbc::MapField(); + public pbc::MapField MapInt32Enum { + get { return mapInt32Enum_; } + } + + /// Field number for the "map_int32_foreign_message" field. + public const int MapInt32ForeignMessageFieldNumber = 17; + private static readonly pbc::MapField.Codec _map_mapInt32ForeignMessage_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForMessage(18, global::Google.Protobuf.TestProtos.ForeignMessage.Parser), 138); + private readonly pbc::MapField mapInt32ForeignMessage_ = new pbc::MapField(); + public pbc::MapField MapInt32ForeignMessage { + get { return mapInt32ForeignMessage_; } + } + + public override bool Equals(object other) { + return Equals(other as TestMap); + } + + public bool Equals(TestMap other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (!MapInt32Int32.Equals(other.MapInt32Int32)) return false; + if (!MapInt64Int64.Equals(other.MapInt64Int64)) return false; + if (!MapUint32Uint32.Equals(other.MapUint32Uint32)) return false; + if (!MapUint64Uint64.Equals(other.MapUint64Uint64)) return false; + if (!MapSint32Sint32.Equals(other.MapSint32Sint32)) return false; + if (!MapSint64Sint64.Equals(other.MapSint64Sint64)) return false; + if (!MapFixed32Fixed32.Equals(other.MapFixed32Fixed32)) return false; + if (!MapFixed64Fixed64.Equals(other.MapFixed64Fixed64)) return false; + if (!MapSfixed32Sfixed32.Equals(other.MapSfixed32Sfixed32)) return false; + if (!MapSfixed64Sfixed64.Equals(other.MapSfixed64Sfixed64)) return false; + if (!MapInt32Float.Equals(other.MapInt32Float)) return false; + if (!MapInt32Double.Equals(other.MapInt32Double)) return false; + if (!MapBoolBool.Equals(other.MapBoolBool)) return false; + if (!MapStringString.Equals(other.MapStringString)) return false; + if (!MapInt32Bytes.Equals(other.MapInt32Bytes)) return false; + if (!MapInt32Enum.Equals(other.MapInt32Enum)) return false; + if (!MapInt32ForeignMessage.Equals(other.MapInt32ForeignMessage)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + hash ^= MapInt32Int32.GetHashCode(); + hash ^= MapInt64Int64.GetHashCode(); + hash ^= MapUint32Uint32.GetHashCode(); + hash ^= MapUint64Uint64.GetHashCode(); + hash ^= MapSint32Sint32.GetHashCode(); + hash ^= MapSint64Sint64.GetHashCode(); + hash ^= MapFixed32Fixed32.GetHashCode(); + hash ^= MapFixed64Fixed64.GetHashCode(); + hash ^= MapSfixed32Sfixed32.GetHashCode(); + hash ^= MapSfixed64Sfixed64.GetHashCode(); + hash ^= MapInt32Float.GetHashCode(); + hash ^= MapInt32Double.GetHashCode(); + hash ^= MapBoolBool.GetHashCode(); + hash ^= MapStringString.GetHashCode(); + hash ^= MapInt32Bytes.GetHashCode(); + hash ^= MapInt32Enum.GetHashCode(); + hash ^= MapInt32ForeignMessage.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + mapInt32Int32_.WriteTo(output, _map_mapInt32Int32_codec); + mapInt64Int64_.WriteTo(output, _map_mapInt64Int64_codec); + mapUint32Uint32_.WriteTo(output, _map_mapUint32Uint32_codec); + mapUint64Uint64_.WriteTo(output, _map_mapUint64Uint64_codec); + mapSint32Sint32_.WriteTo(output, _map_mapSint32Sint32_codec); + mapSint64Sint64_.WriteTo(output, _map_mapSint64Sint64_codec); + mapFixed32Fixed32_.WriteTo(output, _map_mapFixed32Fixed32_codec); + mapFixed64Fixed64_.WriteTo(output, _map_mapFixed64Fixed64_codec); + mapSfixed32Sfixed32_.WriteTo(output, _map_mapSfixed32Sfixed32_codec); + mapSfixed64Sfixed64_.WriteTo(output, _map_mapSfixed64Sfixed64_codec); + mapInt32Float_.WriteTo(output, _map_mapInt32Float_codec); + mapInt32Double_.WriteTo(output, _map_mapInt32Double_codec); + mapBoolBool_.WriteTo(output, _map_mapBoolBool_codec); + mapStringString_.WriteTo(output, _map_mapStringString_codec); + mapInt32Bytes_.WriteTo(output, _map_mapInt32Bytes_codec); + mapInt32Enum_.WriteTo(output, _map_mapInt32Enum_codec); + mapInt32ForeignMessage_.WriteTo(output, _map_mapInt32ForeignMessage_codec); + } + + public int CalculateSize() { + int size = 0; + size += mapInt32Int32_.CalculateSize(_map_mapInt32Int32_codec); + size += mapInt64Int64_.CalculateSize(_map_mapInt64Int64_codec); + size += mapUint32Uint32_.CalculateSize(_map_mapUint32Uint32_codec); + size += mapUint64Uint64_.CalculateSize(_map_mapUint64Uint64_codec); + size += mapSint32Sint32_.CalculateSize(_map_mapSint32Sint32_codec); + size += mapSint64Sint64_.CalculateSize(_map_mapSint64Sint64_codec); + size += mapFixed32Fixed32_.CalculateSize(_map_mapFixed32Fixed32_codec); + size += mapFixed64Fixed64_.CalculateSize(_map_mapFixed64Fixed64_codec); + size += mapSfixed32Sfixed32_.CalculateSize(_map_mapSfixed32Sfixed32_codec); + size += mapSfixed64Sfixed64_.CalculateSize(_map_mapSfixed64Sfixed64_codec); + size += mapInt32Float_.CalculateSize(_map_mapInt32Float_codec); + size += mapInt32Double_.CalculateSize(_map_mapInt32Double_codec); + size += mapBoolBool_.CalculateSize(_map_mapBoolBool_codec); + size += mapStringString_.CalculateSize(_map_mapStringString_codec); + size += mapInt32Bytes_.CalculateSize(_map_mapInt32Bytes_codec); + size += mapInt32Enum_.CalculateSize(_map_mapInt32Enum_codec); + size += mapInt32ForeignMessage_.CalculateSize(_map_mapInt32ForeignMessage_codec); + return size; + } + + public void MergeFrom(TestMap other) { + if (other == null) { + return; + } + mapInt32Int32_.Add(other.mapInt32Int32_); + mapInt64Int64_.Add(other.mapInt64Int64_); + mapUint32Uint32_.Add(other.mapUint32Uint32_); + mapUint64Uint64_.Add(other.mapUint64Uint64_); + mapSint32Sint32_.Add(other.mapSint32Sint32_); + mapSint64Sint64_.Add(other.mapSint64Sint64_); + mapFixed32Fixed32_.Add(other.mapFixed32Fixed32_); + mapFixed64Fixed64_.Add(other.mapFixed64Fixed64_); + mapSfixed32Sfixed32_.Add(other.mapSfixed32Sfixed32_); + mapSfixed64Sfixed64_.Add(other.mapSfixed64Sfixed64_); + mapInt32Float_.Add(other.mapInt32Float_); + mapInt32Double_.Add(other.mapInt32Double_); + mapBoolBool_.Add(other.mapBoolBool_); + mapStringString_.Add(other.mapStringString_); + mapInt32Bytes_.Add(other.mapInt32Bytes_); + mapInt32Enum_.Add(other.mapInt32Enum_); + mapInt32ForeignMessage_.Add(other.mapInt32ForeignMessage_); + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + mapInt32Int32_.AddEntriesFrom(input, _map_mapInt32Int32_codec); + break; + } + case 18: { + mapInt64Int64_.AddEntriesFrom(input, _map_mapInt64Int64_codec); + break; + } + case 26: { + mapUint32Uint32_.AddEntriesFrom(input, _map_mapUint32Uint32_codec); + break; + } + case 34: { + mapUint64Uint64_.AddEntriesFrom(input, _map_mapUint64Uint64_codec); + break; + } + case 42: { + mapSint32Sint32_.AddEntriesFrom(input, _map_mapSint32Sint32_codec); + break; + } + case 50: { + mapSint64Sint64_.AddEntriesFrom(input, _map_mapSint64Sint64_codec); + break; + } + case 58: { + mapFixed32Fixed32_.AddEntriesFrom(input, _map_mapFixed32Fixed32_codec); + break; + } + case 66: { + mapFixed64Fixed64_.AddEntriesFrom(input, _map_mapFixed64Fixed64_codec); + break; + } + case 74: { + mapSfixed32Sfixed32_.AddEntriesFrom(input, _map_mapSfixed32Sfixed32_codec); + break; + } + case 82: { + mapSfixed64Sfixed64_.AddEntriesFrom(input, _map_mapSfixed64Sfixed64_codec); + break; + } + case 90: { + mapInt32Float_.AddEntriesFrom(input, _map_mapInt32Float_codec); + break; + } + case 98: { + mapInt32Double_.AddEntriesFrom(input, _map_mapInt32Double_codec); + break; + } + case 106: { + mapBoolBool_.AddEntriesFrom(input, _map_mapBoolBool_codec); + break; + } + case 114: { + mapStringString_.AddEntriesFrom(input, _map_mapStringString_codec); + break; + } + case 122: { + mapInt32Bytes_.AddEntriesFrom(input, _map_mapInt32Bytes_codec); + break; + } + case 130: { + mapInt32Enum_.AddEntriesFrom(input, _map_mapInt32Enum_codec); + break; + } + case 138: { + mapInt32ForeignMessage_.AddEntriesFrom(input, _map_mapInt32ForeignMessage_codec); + break; + } + } + } + } + + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class TestMapSubmessage : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new TestMapSubmessage()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.MapUnittestProto3Reflection.Descriptor.MessageTypes[1]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public TestMapSubmessage() { + OnConstruction(); + } + + partial void OnConstruction(); + + public TestMapSubmessage(TestMapSubmessage other) : this() { + TestMap = other.testMap_ != null ? other.TestMap.Clone() : null; + } + + public TestMapSubmessage Clone() { + return new TestMapSubmessage(this); + } + + /// Field number for the "test_map" field. + public const int TestMapFieldNumber = 1; + private global::Google.Protobuf.TestProtos.TestMap testMap_; + public global::Google.Protobuf.TestProtos.TestMap TestMap { + get { return testMap_; } + set { + testMap_ = value; + } + } + + public override bool Equals(object other) { + return Equals(other as TestMapSubmessage); + } + + public bool Equals(TestMapSubmessage other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (!object.Equals(TestMap, other.TestMap)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (testMap_ != null) hash ^= TestMap.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (testMap_ != null) { + output.WriteRawTag(10); + output.WriteMessage(TestMap); + } + } + + public int CalculateSize() { + int size = 0; + if (testMap_ != null) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(TestMap); + } + return size; + } + + public void MergeFrom(TestMapSubmessage other) { + if (other == null) { + return; + } + if (other.testMap_ != null) { + if (testMap_ == null) { + testMap_ = new global::Google.Protobuf.TestProtos.TestMap(); + } + TestMap.MergeFrom(other.TestMap); + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + if (testMap_ == null) { + testMap_ = new global::Google.Protobuf.TestProtos.TestMap(); + } + input.ReadMessage(testMap_); + break; + } + } + } + } + + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class TestMessageMap : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new TestMessageMap()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.MapUnittestProto3Reflection.Descriptor.MessageTypes[2]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public TestMessageMap() { + OnConstruction(); + } + + partial void OnConstruction(); + + public TestMessageMap(TestMessageMap other) : this() { + mapInt32Message_ = other.mapInt32Message_.Clone(); + } + + public TestMessageMap Clone() { + return new TestMessageMap(this); + } + + /// Field number for the "map_int32_message" field. + public const int MapInt32MessageFieldNumber = 1; + private static readonly pbc::MapField.Codec _map_mapInt32Message_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForMessage(18, global::Google.Protobuf.TestProtos.TestAllTypes.Parser), 10); + private readonly pbc::MapField mapInt32Message_ = new pbc::MapField(); + public pbc::MapField MapInt32Message { + get { return mapInt32Message_; } + } + + public override bool Equals(object other) { + return Equals(other as TestMessageMap); + } + + public bool Equals(TestMessageMap other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (!MapInt32Message.Equals(other.MapInt32Message)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + hash ^= MapInt32Message.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + mapInt32Message_.WriteTo(output, _map_mapInt32Message_codec); + } + + public int CalculateSize() { + int size = 0; + size += mapInt32Message_.CalculateSize(_map_mapInt32Message_codec); + return size; + } + + public void MergeFrom(TestMessageMap other) { + if (other == null) { + return; + } + mapInt32Message_.Add(other.mapInt32Message_); + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + mapInt32Message_.AddEntriesFrom(input, _map_mapInt32Message_codec); + break; + } + } + } + } + + } + + /// + /// Two map fields share the same entry default instance. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class TestSameTypeMap : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new TestSameTypeMap()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.MapUnittestProto3Reflection.Descriptor.MessageTypes[3]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public TestSameTypeMap() { + OnConstruction(); + } + + partial void OnConstruction(); + + public TestSameTypeMap(TestSameTypeMap other) : this() { + map1_ = other.map1_.Clone(); + map2_ = other.map2_.Clone(); + } + + public TestSameTypeMap Clone() { + return new TestSameTypeMap(this); + } + + /// Field number for the "map1" field. + public const int Map1FieldNumber = 1; + private static readonly pbc::MapField.Codec _map_map1_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForInt32(16), 10); + private readonly pbc::MapField map1_ = new pbc::MapField(); + public pbc::MapField Map1 { + get { return map1_; } + } + + /// Field number for the "map2" field. + public const int Map2FieldNumber = 2; + private static readonly pbc::MapField.Codec _map_map2_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForInt32(16), 18); + private readonly pbc::MapField map2_ = new pbc::MapField(); + public pbc::MapField Map2 { + get { return map2_; } + } + + public override bool Equals(object other) { + return Equals(other as TestSameTypeMap); + } + + public bool Equals(TestSameTypeMap other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (!Map1.Equals(other.Map1)) return false; + if (!Map2.Equals(other.Map2)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + hash ^= Map1.GetHashCode(); + hash ^= Map2.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + map1_.WriteTo(output, _map_map1_codec); + map2_.WriteTo(output, _map_map2_codec); + } + + public int CalculateSize() { + int size = 0; + size += map1_.CalculateSize(_map_map1_codec); + size += map2_.CalculateSize(_map_map2_codec); + return size; + } + + public void MergeFrom(TestSameTypeMap other) { + if (other == null) { + return; + } + map1_.Add(other.map1_); + map2_.Add(other.map2_); + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + map1_.AddEntriesFrom(input, _map_map1_codec); + break; + } + case 18: { + map2_.AddEntriesFrom(input, _map_map2_codec); + break; + } + } + } + } + + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class TestArenaMap : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new TestArenaMap()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.MapUnittestProto3Reflection.Descriptor.MessageTypes[4]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public TestArenaMap() { + OnConstruction(); + } + + partial void OnConstruction(); + + public TestArenaMap(TestArenaMap other) : this() { + mapInt32Int32_ = other.mapInt32Int32_.Clone(); + mapInt64Int64_ = other.mapInt64Int64_.Clone(); + mapUint32Uint32_ = other.mapUint32Uint32_.Clone(); + mapUint64Uint64_ = other.mapUint64Uint64_.Clone(); + mapSint32Sint32_ = other.mapSint32Sint32_.Clone(); + mapSint64Sint64_ = other.mapSint64Sint64_.Clone(); + mapFixed32Fixed32_ = other.mapFixed32Fixed32_.Clone(); + mapFixed64Fixed64_ = other.mapFixed64Fixed64_.Clone(); + mapSfixed32Sfixed32_ = other.mapSfixed32Sfixed32_.Clone(); + mapSfixed64Sfixed64_ = other.mapSfixed64Sfixed64_.Clone(); + mapInt32Float_ = other.mapInt32Float_.Clone(); + mapInt32Double_ = other.mapInt32Double_.Clone(); + mapBoolBool_ = other.mapBoolBool_.Clone(); + mapInt32Enum_ = other.mapInt32Enum_.Clone(); + mapInt32ForeignMessage_ = other.mapInt32ForeignMessage_.Clone(); + } + + public TestArenaMap Clone() { + return new TestArenaMap(this); + } + + /// Field number for the "map_int32_int32" field. + public const int MapInt32Int32FieldNumber = 1; + private static readonly pbc::MapField.Codec _map_mapInt32Int32_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForInt32(16), 10); + private readonly pbc::MapField mapInt32Int32_ = new pbc::MapField(); + public pbc::MapField MapInt32Int32 { + get { return mapInt32Int32_; } + } + + /// Field number for the "map_int64_int64" field. + public const int MapInt64Int64FieldNumber = 2; + private static readonly pbc::MapField.Codec _map_mapInt64Int64_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForInt64(8), pb::FieldCodec.ForInt64(16), 18); + private readonly pbc::MapField mapInt64Int64_ = new pbc::MapField(); + public pbc::MapField MapInt64Int64 { + get { return mapInt64Int64_; } + } + + /// Field number for the "map_uint32_uint32" field. + public const int MapUint32Uint32FieldNumber = 3; + private static readonly pbc::MapField.Codec _map_mapUint32Uint32_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForUInt32(8), pb::FieldCodec.ForUInt32(16), 26); + private readonly pbc::MapField mapUint32Uint32_ = new pbc::MapField(); + public pbc::MapField MapUint32Uint32 { + get { return mapUint32Uint32_; } + } + + /// Field number for the "map_uint64_uint64" field. + public const int MapUint64Uint64FieldNumber = 4; + private static readonly pbc::MapField.Codec _map_mapUint64Uint64_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForUInt64(8), pb::FieldCodec.ForUInt64(16), 34); + private readonly pbc::MapField mapUint64Uint64_ = new pbc::MapField(); + public pbc::MapField MapUint64Uint64 { + get { return mapUint64Uint64_; } + } + + /// Field number for the "map_sint32_sint32" field. + public const int MapSint32Sint32FieldNumber = 5; + private static readonly pbc::MapField.Codec _map_mapSint32Sint32_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForSInt32(8), pb::FieldCodec.ForSInt32(16), 42); + private readonly pbc::MapField mapSint32Sint32_ = new pbc::MapField(); + public pbc::MapField MapSint32Sint32 { + get { return mapSint32Sint32_; } + } + + /// Field number for the "map_sint64_sint64" field. + public const int MapSint64Sint64FieldNumber = 6; + private static readonly pbc::MapField.Codec _map_mapSint64Sint64_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForSInt64(8), pb::FieldCodec.ForSInt64(16), 50); + private readonly pbc::MapField mapSint64Sint64_ = new pbc::MapField(); + public pbc::MapField MapSint64Sint64 { + get { return mapSint64Sint64_; } + } + + /// Field number for the "map_fixed32_fixed32" field. + public const int MapFixed32Fixed32FieldNumber = 7; + private static readonly pbc::MapField.Codec _map_mapFixed32Fixed32_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForFixed32(13), pb::FieldCodec.ForFixed32(21), 58); + private readonly pbc::MapField mapFixed32Fixed32_ = new pbc::MapField(); + public pbc::MapField MapFixed32Fixed32 { + get { return mapFixed32Fixed32_; } + } + + /// Field number for the "map_fixed64_fixed64" field. + public const int MapFixed64Fixed64FieldNumber = 8; + private static readonly pbc::MapField.Codec _map_mapFixed64Fixed64_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForFixed64(9), pb::FieldCodec.ForFixed64(17), 66); + private readonly pbc::MapField mapFixed64Fixed64_ = new pbc::MapField(); + public pbc::MapField MapFixed64Fixed64 { + get { return mapFixed64Fixed64_; } + } + + /// Field number for the "map_sfixed32_sfixed32" field. + public const int MapSfixed32Sfixed32FieldNumber = 9; + private static readonly pbc::MapField.Codec _map_mapSfixed32Sfixed32_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForSFixed32(13), pb::FieldCodec.ForSFixed32(21), 74); + private readonly pbc::MapField mapSfixed32Sfixed32_ = new pbc::MapField(); + public pbc::MapField MapSfixed32Sfixed32 { + get { return mapSfixed32Sfixed32_; } + } + + /// Field number for the "map_sfixed64_sfixed64" field. + public const int MapSfixed64Sfixed64FieldNumber = 10; + private static readonly pbc::MapField.Codec _map_mapSfixed64Sfixed64_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForSFixed64(9), pb::FieldCodec.ForSFixed64(17), 82); + private readonly pbc::MapField mapSfixed64Sfixed64_ = new pbc::MapField(); + public pbc::MapField MapSfixed64Sfixed64 { + get { return mapSfixed64Sfixed64_; } + } + + /// Field number for the "map_int32_float" field. + public const int MapInt32FloatFieldNumber = 11; + private static readonly pbc::MapField.Codec _map_mapInt32Float_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForFloat(21), 90); + private readonly pbc::MapField mapInt32Float_ = new pbc::MapField(); + public pbc::MapField MapInt32Float { + get { return mapInt32Float_; } + } + + /// Field number for the "map_int32_double" field. + public const int MapInt32DoubleFieldNumber = 12; + private static readonly pbc::MapField.Codec _map_mapInt32Double_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForDouble(17), 98); + private readonly pbc::MapField mapInt32Double_ = new pbc::MapField(); + public pbc::MapField MapInt32Double { + get { return mapInt32Double_; } + } + + /// Field number for the "map_bool_bool" field. + public const int MapBoolBoolFieldNumber = 13; + private static readonly pbc::MapField.Codec _map_mapBoolBool_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForBool(8), pb::FieldCodec.ForBool(16), 106); + private readonly pbc::MapField mapBoolBool_ = new pbc::MapField(); + public pbc::MapField MapBoolBool { + get { return mapBoolBool_; } + } + + /// Field number for the "map_int32_enum" field. + public const int MapInt32EnumFieldNumber = 14; + private static readonly pbc::MapField.Codec _map_mapInt32Enum_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForEnum(16, x => (int) x, x => (global::Google.Protobuf.TestProtos.MapEnum) x), 114); + private readonly pbc::MapField mapInt32Enum_ = new pbc::MapField(); + public pbc::MapField MapInt32Enum { + get { return mapInt32Enum_; } + } + + /// Field number for the "map_int32_foreign_message" field. + public const int MapInt32ForeignMessageFieldNumber = 15; + private static readonly pbc::MapField.Codec _map_mapInt32ForeignMessage_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForMessage(18, global::Google.Protobuf.TestProtos.ForeignMessage.Parser), 122); + private readonly pbc::MapField mapInt32ForeignMessage_ = new pbc::MapField(); + public pbc::MapField MapInt32ForeignMessage { + get { return mapInt32ForeignMessage_; } + } + + public override bool Equals(object other) { + return Equals(other as TestArenaMap); + } + + public bool Equals(TestArenaMap other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (!MapInt32Int32.Equals(other.MapInt32Int32)) return false; + if (!MapInt64Int64.Equals(other.MapInt64Int64)) return false; + if (!MapUint32Uint32.Equals(other.MapUint32Uint32)) return false; + if (!MapUint64Uint64.Equals(other.MapUint64Uint64)) return false; + if (!MapSint32Sint32.Equals(other.MapSint32Sint32)) return false; + if (!MapSint64Sint64.Equals(other.MapSint64Sint64)) return false; + if (!MapFixed32Fixed32.Equals(other.MapFixed32Fixed32)) return false; + if (!MapFixed64Fixed64.Equals(other.MapFixed64Fixed64)) return false; + if (!MapSfixed32Sfixed32.Equals(other.MapSfixed32Sfixed32)) return false; + if (!MapSfixed64Sfixed64.Equals(other.MapSfixed64Sfixed64)) return false; + if (!MapInt32Float.Equals(other.MapInt32Float)) return false; + if (!MapInt32Double.Equals(other.MapInt32Double)) return false; + if (!MapBoolBool.Equals(other.MapBoolBool)) return false; + if (!MapInt32Enum.Equals(other.MapInt32Enum)) return false; + if (!MapInt32ForeignMessage.Equals(other.MapInt32ForeignMessage)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + hash ^= MapInt32Int32.GetHashCode(); + hash ^= MapInt64Int64.GetHashCode(); + hash ^= MapUint32Uint32.GetHashCode(); + hash ^= MapUint64Uint64.GetHashCode(); + hash ^= MapSint32Sint32.GetHashCode(); + hash ^= MapSint64Sint64.GetHashCode(); + hash ^= MapFixed32Fixed32.GetHashCode(); + hash ^= MapFixed64Fixed64.GetHashCode(); + hash ^= MapSfixed32Sfixed32.GetHashCode(); + hash ^= MapSfixed64Sfixed64.GetHashCode(); + hash ^= MapInt32Float.GetHashCode(); + hash ^= MapInt32Double.GetHashCode(); + hash ^= MapBoolBool.GetHashCode(); + hash ^= MapInt32Enum.GetHashCode(); + hash ^= MapInt32ForeignMessage.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + mapInt32Int32_.WriteTo(output, _map_mapInt32Int32_codec); + mapInt64Int64_.WriteTo(output, _map_mapInt64Int64_codec); + mapUint32Uint32_.WriteTo(output, _map_mapUint32Uint32_codec); + mapUint64Uint64_.WriteTo(output, _map_mapUint64Uint64_codec); + mapSint32Sint32_.WriteTo(output, _map_mapSint32Sint32_codec); + mapSint64Sint64_.WriteTo(output, _map_mapSint64Sint64_codec); + mapFixed32Fixed32_.WriteTo(output, _map_mapFixed32Fixed32_codec); + mapFixed64Fixed64_.WriteTo(output, _map_mapFixed64Fixed64_codec); + mapSfixed32Sfixed32_.WriteTo(output, _map_mapSfixed32Sfixed32_codec); + mapSfixed64Sfixed64_.WriteTo(output, _map_mapSfixed64Sfixed64_codec); + mapInt32Float_.WriteTo(output, _map_mapInt32Float_codec); + mapInt32Double_.WriteTo(output, _map_mapInt32Double_codec); + mapBoolBool_.WriteTo(output, _map_mapBoolBool_codec); + mapInt32Enum_.WriteTo(output, _map_mapInt32Enum_codec); + mapInt32ForeignMessage_.WriteTo(output, _map_mapInt32ForeignMessage_codec); + } + + public int CalculateSize() { + int size = 0; + size += mapInt32Int32_.CalculateSize(_map_mapInt32Int32_codec); + size += mapInt64Int64_.CalculateSize(_map_mapInt64Int64_codec); + size += mapUint32Uint32_.CalculateSize(_map_mapUint32Uint32_codec); + size += mapUint64Uint64_.CalculateSize(_map_mapUint64Uint64_codec); + size += mapSint32Sint32_.CalculateSize(_map_mapSint32Sint32_codec); + size += mapSint64Sint64_.CalculateSize(_map_mapSint64Sint64_codec); + size += mapFixed32Fixed32_.CalculateSize(_map_mapFixed32Fixed32_codec); + size += mapFixed64Fixed64_.CalculateSize(_map_mapFixed64Fixed64_codec); + size += mapSfixed32Sfixed32_.CalculateSize(_map_mapSfixed32Sfixed32_codec); + size += mapSfixed64Sfixed64_.CalculateSize(_map_mapSfixed64Sfixed64_codec); + size += mapInt32Float_.CalculateSize(_map_mapInt32Float_codec); + size += mapInt32Double_.CalculateSize(_map_mapInt32Double_codec); + size += mapBoolBool_.CalculateSize(_map_mapBoolBool_codec); + size += mapInt32Enum_.CalculateSize(_map_mapInt32Enum_codec); + size += mapInt32ForeignMessage_.CalculateSize(_map_mapInt32ForeignMessage_codec); + return size; + } + + public void MergeFrom(TestArenaMap other) { + if (other == null) { + return; + } + mapInt32Int32_.Add(other.mapInt32Int32_); + mapInt64Int64_.Add(other.mapInt64Int64_); + mapUint32Uint32_.Add(other.mapUint32Uint32_); + mapUint64Uint64_.Add(other.mapUint64Uint64_); + mapSint32Sint32_.Add(other.mapSint32Sint32_); + mapSint64Sint64_.Add(other.mapSint64Sint64_); + mapFixed32Fixed32_.Add(other.mapFixed32Fixed32_); + mapFixed64Fixed64_.Add(other.mapFixed64Fixed64_); + mapSfixed32Sfixed32_.Add(other.mapSfixed32Sfixed32_); + mapSfixed64Sfixed64_.Add(other.mapSfixed64Sfixed64_); + mapInt32Float_.Add(other.mapInt32Float_); + mapInt32Double_.Add(other.mapInt32Double_); + mapBoolBool_.Add(other.mapBoolBool_); + mapInt32Enum_.Add(other.mapInt32Enum_); + mapInt32ForeignMessage_.Add(other.mapInt32ForeignMessage_); + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + mapInt32Int32_.AddEntriesFrom(input, _map_mapInt32Int32_codec); + break; + } + case 18: { + mapInt64Int64_.AddEntriesFrom(input, _map_mapInt64Int64_codec); + break; + } + case 26: { + mapUint32Uint32_.AddEntriesFrom(input, _map_mapUint32Uint32_codec); + break; + } + case 34: { + mapUint64Uint64_.AddEntriesFrom(input, _map_mapUint64Uint64_codec); + break; + } + case 42: { + mapSint32Sint32_.AddEntriesFrom(input, _map_mapSint32Sint32_codec); + break; + } + case 50: { + mapSint64Sint64_.AddEntriesFrom(input, _map_mapSint64Sint64_codec); + break; + } + case 58: { + mapFixed32Fixed32_.AddEntriesFrom(input, _map_mapFixed32Fixed32_codec); + break; + } + case 66: { + mapFixed64Fixed64_.AddEntriesFrom(input, _map_mapFixed64Fixed64_codec); + break; + } + case 74: { + mapSfixed32Sfixed32_.AddEntriesFrom(input, _map_mapSfixed32Sfixed32_codec); + break; + } + case 82: { + mapSfixed64Sfixed64_.AddEntriesFrom(input, _map_mapSfixed64Sfixed64_codec); + break; + } + case 90: { + mapInt32Float_.AddEntriesFrom(input, _map_mapInt32Float_codec); + break; + } + case 98: { + mapInt32Double_.AddEntriesFrom(input, _map_mapInt32Double_codec); + break; + } + case 106: { + mapBoolBool_.AddEntriesFrom(input, _map_mapBoolBool_codec); + break; + } + case 114: { + mapInt32Enum_.AddEntriesFrom(input, _map_mapInt32Enum_codec); + break; + } + case 122: { + mapInt32ForeignMessage_.AddEntriesFrom(input, _map_mapInt32ForeignMessage_codec); + break; + } + } + } + } + + } + + /// + /// Previously, message containing enum called Type cannot be used as value of + /// map field. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class MessageContainingEnumCalledType : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new MessageContainingEnumCalledType()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.MapUnittestProto3Reflection.Descriptor.MessageTypes[5]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public MessageContainingEnumCalledType() { + OnConstruction(); + } + + partial void OnConstruction(); + + public MessageContainingEnumCalledType(MessageContainingEnumCalledType other) : this() { + type_ = other.type_.Clone(); + } + + public MessageContainingEnumCalledType Clone() { + return new MessageContainingEnumCalledType(this); + } + + /// Field number for the "type" field. + public const int TypeFieldNumber = 1; + private static readonly pbc::MapField.Codec _map_type_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForMessage(18, global::Google.Protobuf.TestProtos.MessageContainingEnumCalledType.Parser), 10); + private readonly pbc::MapField type_ = new pbc::MapField(); + public pbc::MapField Type { + get { return type_; } + } + + public override bool Equals(object other) { + return Equals(other as MessageContainingEnumCalledType); + } + + public bool Equals(MessageContainingEnumCalledType other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (!Type.Equals(other.Type)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + hash ^= Type.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + type_.WriteTo(output, _map_type_codec); + } + + public int CalculateSize() { + int size = 0; + size += type_.CalculateSize(_map_type_codec); + return size; + } + + public void MergeFrom(MessageContainingEnumCalledType other) { + if (other == null) { + return; + } + type_.Add(other.type_); + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + type_.AddEntriesFrom(input, _map_type_codec); + break; + } + } + } + } + + #region Nested types + /// Container for nested types declared in the MessageContainingEnumCalledType message type. + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public static partial class Types { + public enum Type { + [pbr::OriginalName("TYPE_FOO")] Foo = 0, + } + + } + #endregion + + } + + /// + /// Previously, message cannot contain map field called "entry". + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class MessageContainingMapCalledEntry : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new MessageContainingMapCalledEntry()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.MapUnittestProto3Reflection.Descriptor.MessageTypes[6]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public MessageContainingMapCalledEntry() { + OnConstruction(); + } + + partial void OnConstruction(); + + public MessageContainingMapCalledEntry(MessageContainingMapCalledEntry other) : this() { + entry_ = other.entry_.Clone(); + } + + public MessageContainingMapCalledEntry Clone() { + return new MessageContainingMapCalledEntry(this); + } + + /// Field number for the "entry" field. + public const int EntryFieldNumber = 1; + private static readonly pbc::MapField.Codec _map_entry_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForInt32(16), 10); + private readonly pbc::MapField entry_ = new pbc::MapField(); + public pbc::MapField Entry { + get { return entry_; } + } + + public override bool Equals(object other) { + return Equals(other as MessageContainingMapCalledEntry); + } + + public bool Equals(MessageContainingMapCalledEntry other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (!Entry.Equals(other.Entry)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + hash ^= Entry.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + entry_.WriteTo(output, _map_entry_codec); + } + + public int CalculateSize() { + int size = 0; + size += entry_.CalculateSize(_map_entry_codec); + return size; + } + + public void MergeFrom(MessageContainingMapCalledEntry other) { + if (other == null) { + return; + } + entry_.Add(other.entry_); + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + entry_.AddEntriesFrom(input, _map_entry_codec); + break; + } + } + } + } + + } + + #endregion + +} + +#endregion Designer generated code diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/TestProtos/UnittestImportProto3.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/TestProtos/UnittestImportProto3.cs new file mode 100644 index 0000000000..263e17c021 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/TestProtos/UnittestImportProto3.cs @@ -0,0 +1,161 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/protobuf/unittest_import_proto3.proto +#pragma warning disable 1591, 0612, 3021 +#region Designer generated code + +using pb = global::Google.Protobuf; +using pbc = global::Google.Protobuf.Collections; +using pbr = global::Google.Protobuf.Reflection; +using scg = global::System.Collections.Generic; +namespace Google.Protobuf.TestProtos { + + /// Holder for reflection information generated from google/protobuf/unittest_import_proto3.proto + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public static partial class UnittestImportProto3Reflection { + + #region Descriptor + /// File descriptor for google/protobuf/unittest_import_proto3.proto + public static pbr::FileDescriptor Descriptor { + get { return descriptor; } + } + private static pbr::FileDescriptor descriptor; + + static UnittestImportProto3Reflection() { + byte[] descriptorData = global::System.Convert.FromBase64String( + string.Concat( + "Cixnb29nbGUvcHJvdG9idWYvdW5pdHRlc3RfaW1wb3J0X3Byb3RvMy5wcm90", + "bxIYcHJvdG9idWZfdW5pdHRlc3RfaW1wb3J0GjNnb29nbGUvcHJvdG9idWYv", + "dW5pdHRlc3RfaW1wb3J0X3B1YmxpY19wcm90bzMucHJvdG8iGgoNSW1wb3J0", + "TWVzc2FnZRIJCgFkGAEgASgFKlkKCkltcG9ydEVudW0SGwoXSU1QT1JUX0VO", + "VU1fVU5TUEVDSUZJRUQQABIOCgpJTVBPUlRfRk9PEAcSDgoKSU1QT1JUX0JB", + "UhAIEg4KCklNUE9SVF9CQVoQCUI8Chhjb20uZ29vZ2xlLnByb3RvYnVmLnRl", + "c3RIAfgBAaoCGkdvb2dsZS5Qcm90b2J1Zi5UZXN0UHJvdG9zUABiBnByb3Rv", + "Mw==")); + descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, + new pbr::FileDescriptor[] { global::Google.Protobuf.TestProtos.UnittestImportPublicProto3Reflection.Descriptor, }, + new pbr::GeneratedClrTypeInfo(new[] {typeof(global::Google.Protobuf.TestProtos.ImportEnum), }, new pbr::GeneratedClrTypeInfo[] { + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.ImportMessage), global::Google.Protobuf.TestProtos.ImportMessage.Parser, new[]{ "D" }, null, null, null) + })); + } + #endregion + + } + #region Enums + public enum ImportEnum { + [pbr::OriginalName("IMPORT_ENUM_UNSPECIFIED")] Unspecified = 0, + [pbr::OriginalName("IMPORT_FOO")] ImportFoo = 7, + [pbr::OriginalName("IMPORT_BAR")] ImportBar = 8, + [pbr::OriginalName("IMPORT_BAZ")] ImportBaz = 9, + } + + #endregion + + #region Messages + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class ImportMessage : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new ImportMessage()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.UnittestImportProto3Reflection.Descriptor.MessageTypes[0]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public ImportMessage() { + OnConstruction(); + } + + partial void OnConstruction(); + + public ImportMessage(ImportMessage other) : this() { + d_ = other.d_; + } + + public ImportMessage Clone() { + return new ImportMessage(this); + } + + /// Field number for the "d" field. + public const int DFieldNumber = 1; + private int d_; + public int D { + get { return d_; } + set { + d_ = value; + } + } + + public override bool Equals(object other) { + return Equals(other as ImportMessage); + } + + public bool Equals(ImportMessage other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (D != other.D) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (D != 0) hash ^= D.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (D != 0) { + output.WriteRawTag(8); + output.WriteInt32(D); + } + } + + public int CalculateSize() { + int size = 0; + if (D != 0) { + size += 1 + pb::CodedOutputStream.ComputeInt32Size(D); + } + return size; + } + + public void MergeFrom(ImportMessage other) { + if (other == null) { + return; + } + if (other.D != 0) { + D = other.D; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 8: { + D = input.ReadInt32(); + break; + } + } + } + } + + } + + #endregion + +} + +#endregion Designer generated code diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/TestProtos/UnittestImportPublicProto3.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/TestProtos/UnittestImportPublicProto3.cs new file mode 100644 index 0000000000..b471a8cf1a --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/TestProtos/UnittestImportPublicProto3.cs @@ -0,0 +1,147 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/protobuf/unittest_import_public_proto3.proto +#pragma warning disable 1591, 0612, 3021 +#region Designer generated code + +using pb = global::Google.Protobuf; +using pbc = global::Google.Protobuf.Collections; +using pbr = global::Google.Protobuf.Reflection; +using scg = global::System.Collections.Generic; +namespace Google.Protobuf.TestProtos { + + /// Holder for reflection information generated from google/protobuf/unittest_import_public_proto3.proto + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public static partial class UnittestImportPublicProto3Reflection { + + #region Descriptor + /// File descriptor for google/protobuf/unittest_import_public_proto3.proto + public static pbr::FileDescriptor Descriptor { + get { return descriptor; } + } + private static pbr::FileDescriptor descriptor; + + static UnittestImportPublicProto3Reflection() { + byte[] descriptorData = global::System.Convert.FromBase64String( + string.Concat( + "CjNnb29nbGUvcHJvdG9idWYvdW5pdHRlc3RfaW1wb3J0X3B1YmxpY19wcm90", + "bzMucHJvdG8SGHByb3RvYnVmX3VuaXR0ZXN0X2ltcG9ydCIgChNQdWJsaWNJ", + "bXBvcnRNZXNzYWdlEgkKAWUYASABKAVCNwoYY29tLmdvb2dsZS5wcm90b2J1", + "Zi50ZXN0qgIaR29vZ2xlLlByb3RvYnVmLlRlc3RQcm90b3NiBnByb3RvMw==")); + descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, + new pbr::FileDescriptor[] { }, + new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] { + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.PublicImportMessage), global::Google.Protobuf.TestProtos.PublicImportMessage.Parser, new[]{ "E" }, null, null, null) + })); + } + #endregion + + } + #region Messages + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class PublicImportMessage : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new PublicImportMessage()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.UnittestImportPublicProto3Reflection.Descriptor.MessageTypes[0]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public PublicImportMessage() { + OnConstruction(); + } + + partial void OnConstruction(); + + public PublicImportMessage(PublicImportMessage other) : this() { + e_ = other.e_; + } + + public PublicImportMessage Clone() { + return new PublicImportMessage(this); + } + + /// Field number for the "e" field. + public const int EFieldNumber = 1; + private int e_; + public int E { + get { return e_; } + set { + e_ = value; + } + } + + public override bool Equals(object other) { + return Equals(other as PublicImportMessage); + } + + public bool Equals(PublicImportMessage other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (E != other.E) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (E != 0) hash ^= E.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (E != 0) { + output.WriteRawTag(8); + output.WriteInt32(E); + } + } + + public int CalculateSize() { + int size = 0; + if (E != 0) { + size += 1 + pb::CodedOutputStream.ComputeInt32Size(E); + } + return size; + } + + public void MergeFrom(PublicImportMessage other) { + if (other == null) { + return; + } + if (other.E != 0) { + E = other.E; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 8: { + E = input.ReadInt32(); + break; + } + } + } + } + + } + + #endregion + +} + +#endregion Designer generated code diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/TestProtos/UnittestIssues.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/TestProtos/UnittestIssues.cs new file mode 100644 index 0000000000..7d4451b093 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/TestProtos/UnittestIssues.cs @@ -0,0 +1,1569 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: unittest_issues.proto +#pragma warning disable 1591, 0612, 3021 +#region Designer generated code + +using pb = global::Google.Protobuf; +using pbc = global::Google.Protobuf.Collections; +using pbr = global::Google.Protobuf.Reflection; +using scg = global::System.Collections.Generic; +namespace UnitTest.Issues.TestProtos { + + /// Holder for reflection information generated from unittest_issues.proto + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public static partial class UnittestIssuesReflection { + + #region Descriptor + /// File descriptor for unittest_issues.proto + public static pbr::FileDescriptor Descriptor { + get { return descriptor; } + } + private static pbr::FileDescriptor descriptor; + + static UnittestIssuesReflection() { + byte[] descriptorData = global::System.Convert.FromBase64String( + string.Concat( + "ChV1bml0dGVzdF9pc3N1ZXMucHJvdG8SD3VuaXR0ZXN0X2lzc3VlcyInCghJ", + "c3N1ZTMwNxobCgpOZXN0ZWRPbmNlGg0KC05lc3RlZFR3aWNlIrABChNOZWdh", + "dGl2ZUVudW1NZXNzYWdlEiwKBXZhbHVlGAEgASgOMh0udW5pdHRlc3RfaXNz", + "dWVzLk5lZ2F0aXZlRW51bRIxCgZ2YWx1ZXMYAiADKA4yHS51bml0dGVzdF9p", + "c3N1ZXMuTmVnYXRpdmVFbnVtQgIQABI4Cg1wYWNrZWRfdmFsdWVzGAMgAygO", + "Mh0udW5pdHRlc3RfaXNzdWVzLk5lZ2F0aXZlRW51bUICEAEiEQoPRGVwcmVj", + "YXRlZENoaWxkIrkCChdEZXByZWNhdGVkRmllbGRzTWVzc2FnZRIaCg5Qcmlt", + "aXRpdmVWYWx1ZRgBIAEoBUICGAESGgoOUHJpbWl0aXZlQXJyYXkYAiADKAVC", + "AhgBEjoKDE1lc3NhZ2VWYWx1ZRgDIAEoCzIgLnVuaXR0ZXN0X2lzc3Vlcy5E", + "ZXByZWNhdGVkQ2hpbGRCAhgBEjoKDE1lc3NhZ2VBcnJheRgEIAMoCzIgLnVu", + "aXR0ZXN0X2lzc3Vlcy5EZXByZWNhdGVkQ2hpbGRCAhgBEjYKCUVudW1WYWx1", + "ZRgFIAEoDjIfLnVuaXR0ZXN0X2lzc3Vlcy5EZXByZWNhdGVkRW51bUICGAES", + "NgoJRW51bUFycmF5GAYgAygOMh8udW5pdHRlc3RfaXNzdWVzLkRlcHJlY2F0", + "ZWRFbnVtQgIYASIZCglJdGVtRmllbGQSDAoEaXRlbRgBIAEoBSJECg1SZXNl", + "cnZlZE5hbWVzEg0KBXR5cGVzGAEgASgFEhIKCmRlc2NyaXB0b3IYAiABKAUa", + "EAoOU29tZU5lc3RlZFR5cGUioAEKFVRlc3RKc29uRmllbGRPcmRlcmluZxIT", + "CgtwbGFpbl9pbnQzMhgEIAEoBRITCglvMV9zdHJpbmcYAiABKAlIABISCghv", + "MV9pbnQzMhgFIAEoBUgAEhQKDHBsYWluX3N0cmluZxgBIAEoCRISCghvMl9p", + "bnQzMhgGIAEoBUgBEhMKCW8yX3N0cmluZxgDIAEoCUgBQgQKAm8xQgQKAm8y", + "IksKDFRlc3RKc29uTmFtZRIMCgRuYW1lGAEgASgJEhkKC2Rlc2NyaXB0aW9u", + "GAIgASgJUgRkZXNjEhIKBGd1aWQYAyABKAlSBGV4aWQqVQoMTmVnYXRpdmVF", + "bnVtEhYKEk5FR0FUSVZFX0VOVU1fWkVSTxAAEhYKCUZpdmVCZWxvdxD7////", + "//////8BEhUKCE1pbnVzT25lEP///////////wEqLgoORGVwcmVjYXRlZEVu", + "dW0SEwoPREVQUkVDQVRFRF9aRVJPEAASBwoDb25lEAFCH0gBqgIaVW5pdFRl", + "c3QuSXNzdWVzLlRlc3RQcm90b3NiBnByb3RvMw==")); + descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, + new pbr::FileDescriptor[] { }, + new pbr::GeneratedClrTypeInfo(new[] {typeof(global::UnitTest.Issues.TestProtos.NegativeEnum), typeof(global::UnitTest.Issues.TestProtos.DeprecatedEnum), }, new pbr::GeneratedClrTypeInfo[] { + new pbr::GeneratedClrTypeInfo(typeof(global::UnitTest.Issues.TestProtos.Issue307), global::UnitTest.Issues.TestProtos.Issue307.Parser, null, null, null, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::UnitTest.Issues.TestProtos.Issue307.Types.NestedOnce), global::UnitTest.Issues.TestProtos.Issue307.Types.NestedOnce.Parser, null, null, null, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::UnitTest.Issues.TestProtos.Issue307.Types.NestedOnce.Types.NestedTwice), global::UnitTest.Issues.TestProtos.Issue307.Types.NestedOnce.Types.NestedTwice.Parser, null, null, null, null)})}), + new pbr::GeneratedClrTypeInfo(typeof(global::UnitTest.Issues.TestProtos.NegativeEnumMessage), global::UnitTest.Issues.TestProtos.NegativeEnumMessage.Parser, new[]{ "Value", "Values", "PackedValues" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::UnitTest.Issues.TestProtos.DeprecatedChild), global::UnitTest.Issues.TestProtos.DeprecatedChild.Parser, null, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::UnitTest.Issues.TestProtos.DeprecatedFieldsMessage), global::UnitTest.Issues.TestProtos.DeprecatedFieldsMessage.Parser, new[]{ "PrimitiveValue", "PrimitiveArray", "MessageValue", "MessageArray", "EnumValue", "EnumArray" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::UnitTest.Issues.TestProtos.ItemField), global::UnitTest.Issues.TestProtos.ItemField.Parser, new[]{ "Item" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::UnitTest.Issues.TestProtos.ReservedNames), global::UnitTest.Issues.TestProtos.ReservedNames.Parser, new[]{ "Types_", "Descriptor_" }, null, null, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::UnitTest.Issues.TestProtos.ReservedNames.Types.SomeNestedType), global::UnitTest.Issues.TestProtos.ReservedNames.Types.SomeNestedType.Parser, null, null, null, null)}), + new pbr::GeneratedClrTypeInfo(typeof(global::UnitTest.Issues.TestProtos.TestJsonFieldOrdering), global::UnitTest.Issues.TestProtos.TestJsonFieldOrdering.Parser, new[]{ "PlainInt32", "O1String", "O1Int32", "PlainString", "O2Int32", "O2String" }, new[]{ "O1", "O2" }, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::UnitTest.Issues.TestProtos.TestJsonName), global::UnitTest.Issues.TestProtos.TestJsonName.Parser, new[]{ "Name", "Description", "Guid" }, null, null, null) + })); + } + #endregion + + } + #region Enums + public enum NegativeEnum { + [pbr::OriginalName("NEGATIVE_ENUM_ZERO")] Zero = 0, + [pbr::OriginalName("FiveBelow")] FiveBelow = -5, + [pbr::OriginalName("MinusOne")] MinusOne = -1, + } + + public enum DeprecatedEnum { + [pbr::OriginalName("DEPRECATED_ZERO")] DeprecatedZero = 0, + [pbr::OriginalName("one")] One = 1, + } + + #endregion + + #region Messages + /// + /// Issue 307: when generating doubly-nested types, any references + /// should be of the form A.Types.B.Types.C. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class Issue307 : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new Issue307()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::UnitTest.Issues.TestProtos.UnittestIssuesReflection.Descriptor.MessageTypes[0]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public Issue307() { + OnConstruction(); + } + + partial void OnConstruction(); + + public Issue307(Issue307 other) : this() { + } + + public Issue307 Clone() { + return new Issue307(this); + } + + public override bool Equals(object other) { + return Equals(other as Issue307); + } + + public bool Equals(Issue307 other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + return true; + } + + public override int GetHashCode() { + int hash = 1; + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + } + + public int CalculateSize() { + int size = 0; + return size; + } + + public void MergeFrom(Issue307 other) { + if (other == null) { + return; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + } + } + } + + #region Nested types + /// Container for nested types declared in the Issue307 message type. + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public static partial class Types { + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class NestedOnce : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new NestedOnce()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::UnitTest.Issues.TestProtos.Issue307.Descriptor.NestedTypes[0]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public NestedOnce() { + OnConstruction(); + } + + partial void OnConstruction(); + + public NestedOnce(NestedOnce other) : this() { + } + + public NestedOnce Clone() { + return new NestedOnce(this); + } + + public override bool Equals(object other) { + return Equals(other as NestedOnce); + } + + public bool Equals(NestedOnce other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + return true; + } + + public override int GetHashCode() { + int hash = 1; + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + } + + public int CalculateSize() { + int size = 0; + return size; + } + + public void MergeFrom(NestedOnce other) { + if (other == null) { + return; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + } + } + } + + #region Nested types + /// Container for nested types declared in the NestedOnce message type. + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public static partial class Types { + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class NestedTwice : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new NestedTwice()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::UnitTest.Issues.TestProtos.Issue307.Types.NestedOnce.Descriptor.NestedTypes[0]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public NestedTwice() { + OnConstruction(); + } + + partial void OnConstruction(); + + public NestedTwice(NestedTwice other) : this() { + } + + public NestedTwice Clone() { + return new NestedTwice(this); + } + + public override bool Equals(object other) { + return Equals(other as NestedTwice); + } + + public bool Equals(NestedTwice other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + return true; + } + + public override int GetHashCode() { + int hash = 1; + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + } + + public int CalculateSize() { + int size = 0; + return size; + } + + public void MergeFrom(NestedTwice other) { + if (other == null) { + return; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + } + } + } + + } + + } + #endregion + + } + + } + #endregion + + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class NegativeEnumMessage : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new NegativeEnumMessage()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::UnitTest.Issues.TestProtos.UnittestIssuesReflection.Descriptor.MessageTypes[1]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public NegativeEnumMessage() { + OnConstruction(); + } + + partial void OnConstruction(); + + public NegativeEnumMessage(NegativeEnumMessage other) : this() { + value_ = other.value_; + values_ = other.values_.Clone(); + packedValues_ = other.packedValues_.Clone(); + } + + public NegativeEnumMessage Clone() { + return new NegativeEnumMessage(this); + } + + /// Field number for the "value" field. + public const int ValueFieldNumber = 1; + private global::UnitTest.Issues.TestProtos.NegativeEnum value_ = 0; + public global::UnitTest.Issues.TestProtos.NegativeEnum Value { + get { return value_; } + set { + value_ = value; + } + } + + /// Field number for the "values" field. + public const int ValuesFieldNumber = 2; + private static readonly pb::FieldCodec _repeated_values_codec + = pb::FieldCodec.ForEnum(16, x => (int) x, x => (global::UnitTest.Issues.TestProtos.NegativeEnum) x); + private readonly pbc::RepeatedField values_ = new pbc::RepeatedField(); + public pbc::RepeatedField Values { + get { return values_; } + } + + /// Field number for the "packed_values" field. + public const int PackedValuesFieldNumber = 3; + private static readonly pb::FieldCodec _repeated_packedValues_codec + = pb::FieldCodec.ForEnum(26, x => (int) x, x => (global::UnitTest.Issues.TestProtos.NegativeEnum) x); + private readonly pbc::RepeatedField packedValues_ = new pbc::RepeatedField(); + public pbc::RepeatedField PackedValues { + get { return packedValues_; } + } + + public override bool Equals(object other) { + return Equals(other as NegativeEnumMessage); + } + + public bool Equals(NegativeEnumMessage other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (Value != other.Value) return false; + if(!values_.Equals(other.values_)) return false; + if(!packedValues_.Equals(other.packedValues_)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (Value != 0) hash ^= Value.GetHashCode(); + hash ^= values_.GetHashCode(); + hash ^= packedValues_.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (Value != 0) { + output.WriteRawTag(8); + output.WriteEnum((int) Value); + } + values_.WriteTo(output, _repeated_values_codec); + packedValues_.WriteTo(output, _repeated_packedValues_codec); + } + + public int CalculateSize() { + int size = 0; + if (Value != 0) { + size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) Value); + } + size += values_.CalculateSize(_repeated_values_codec); + size += packedValues_.CalculateSize(_repeated_packedValues_codec); + return size; + } + + public void MergeFrom(NegativeEnumMessage other) { + if (other == null) { + return; + } + if (other.Value != 0) { + Value = other.Value; + } + values_.Add(other.values_); + packedValues_.Add(other.packedValues_); + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 8: { + value_ = (global::UnitTest.Issues.TestProtos.NegativeEnum) input.ReadEnum(); + break; + } + case 18: + case 16: { + values_.AddEntriesFrom(input, _repeated_values_codec); + break; + } + case 26: + case 24: { + packedValues_.AddEntriesFrom(input, _repeated_packedValues_codec); + break; + } + } + } + } + + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class DeprecatedChild : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new DeprecatedChild()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::UnitTest.Issues.TestProtos.UnittestIssuesReflection.Descriptor.MessageTypes[2]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public DeprecatedChild() { + OnConstruction(); + } + + partial void OnConstruction(); + + public DeprecatedChild(DeprecatedChild other) : this() { + } + + public DeprecatedChild Clone() { + return new DeprecatedChild(this); + } + + public override bool Equals(object other) { + return Equals(other as DeprecatedChild); + } + + public bool Equals(DeprecatedChild other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + return true; + } + + public override int GetHashCode() { + int hash = 1; + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + } + + public int CalculateSize() { + int size = 0; + return size; + } + + public void MergeFrom(DeprecatedChild other) { + if (other == null) { + return; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + } + } + } + + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class DeprecatedFieldsMessage : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new DeprecatedFieldsMessage()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::UnitTest.Issues.TestProtos.UnittestIssuesReflection.Descriptor.MessageTypes[3]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public DeprecatedFieldsMessage() { + OnConstruction(); + } + + partial void OnConstruction(); + + public DeprecatedFieldsMessage(DeprecatedFieldsMessage other) : this() { + primitiveValue_ = other.primitiveValue_; + primitiveArray_ = other.primitiveArray_.Clone(); + MessageValue = other.messageValue_ != null ? other.MessageValue.Clone() : null; + messageArray_ = other.messageArray_.Clone(); + enumValue_ = other.enumValue_; + enumArray_ = other.enumArray_.Clone(); + } + + public DeprecatedFieldsMessage Clone() { + return new DeprecatedFieldsMessage(this); + } + + /// Field number for the "PrimitiveValue" field. + public const int PrimitiveValueFieldNumber = 1; + private int primitiveValue_; + [global::System.ObsoleteAttribute()] + public int PrimitiveValue { + get { return primitiveValue_; } + set { + primitiveValue_ = value; + } + } + + /// Field number for the "PrimitiveArray" field. + public const int PrimitiveArrayFieldNumber = 2; + private static readonly pb::FieldCodec _repeated_primitiveArray_codec + = pb::FieldCodec.ForInt32(18); + private readonly pbc::RepeatedField primitiveArray_ = new pbc::RepeatedField(); + [global::System.ObsoleteAttribute()] + public pbc::RepeatedField PrimitiveArray { + get { return primitiveArray_; } + } + + /// Field number for the "MessageValue" field. + public const int MessageValueFieldNumber = 3; + private global::UnitTest.Issues.TestProtos.DeprecatedChild messageValue_; + [global::System.ObsoleteAttribute()] + public global::UnitTest.Issues.TestProtos.DeprecatedChild MessageValue { + get { return messageValue_; } + set { + messageValue_ = value; + } + } + + /// Field number for the "MessageArray" field. + public const int MessageArrayFieldNumber = 4; + private static readonly pb::FieldCodec _repeated_messageArray_codec + = pb::FieldCodec.ForMessage(34, global::UnitTest.Issues.TestProtos.DeprecatedChild.Parser); + private readonly pbc::RepeatedField messageArray_ = new pbc::RepeatedField(); + [global::System.ObsoleteAttribute()] + public pbc::RepeatedField MessageArray { + get { return messageArray_; } + } + + /// Field number for the "EnumValue" field. + public const int EnumValueFieldNumber = 5; + private global::UnitTest.Issues.TestProtos.DeprecatedEnum enumValue_ = 0; + [global::System.ObsoleteAttribute()] + public global::UnitTest.Issues.TestProtos.DeprecatedEnum EnumValue { + get { return enumValue_; } + set { + enumValue_ = value; + } + } + + /// Field number for the "EnumArray" field. + public const int EnumArrayFieldNumber = 6; + private static readonly pb::FieldCodec _repeated_enumArray_codec + = pb::FieldCodec.ForEnum(50, x => (int) x, x => (global::UnitTest.Issues.TestProtos.DeprecatedEnum) x); + private readonly pbc::RepeatedField enumArray_ = new pbc::RepeatedField(); + [global::System.ObsoleteAttribute()] + public pbc::RepeatedField EnumArray { + get { return enumArray_; } + } + + public override bool Equals(object other) { + return Equals(other as DeprecatedFieldsMessage); + } + + public bool Equals(DeprecatedFieldsMessage other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (PrimitiveValue != other.PrimitiveValue) return false; + if(!primitiveArray_.Equals(other.primitiveArray_)) return false; + if (!object.Equals(MessageValue, other.MessageValue)) return false; + if(!messageArray_.Equals(other.messageArray_)) return false; + if (EnumValue != other.EnumValue) return false; + if(!enumArray_.Equals(other.enumArray_)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (PrimitiveValue != 0) hash ^= PrimitiveValue.GetHashCode(); + hash ^= primitiveArray_.GetHashCode(); + if (messageValue_ != null) hash ^= MessageValue.GetHashCode(); + hash ^= messageArray_.GetHashCode(); + if (EnumValue != 0) hash ^= EnumValue.GetHashCode(); + hash ^= enumArray_.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (PrimitiveValue != 0) { + output.WriteRawTag(8); + output.WriteInt32(PrimitiveValue); + } + primitiveArray_.WriteTo(output, _repeated_primitiveArray_codec); + if (messageValue_ != null) { + output.WriteRawTag(26); + output.WriteMessage(MessageValue); + } + messageArray_.WriteTo(output, _repeated_messageArray_codec); + if (EnumValue != 0) { + output.WriteRawTag(40); + output.WriteEnum((int) EnumValue); + } + enumArray_.WriteTo(output, _repeated_enumArray_codec); + } + + public int CalculateSize() { + int size = 0; + if (PrimitiveValue != 0) { + size += 1 + pb::CodedOutputStream.ComputeInt32Size(PrimitiveValue); + } + size += primitiveArray_.CalculateSize(_repeated_primitiveArray_codec); + if (messageValue_ != null) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(MessageValue); + } + size += messageArray_.CalculateSize(_repeated_messageArray_codec); + if (EnumValue != 0) { + size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) EnumValue); + } + size += enumArray_.CalculateSize(_repeated_enumArray_codec); + return size; + } + + public void MergeFrom(DeprecatedFieldsMessage other) { + if (other == null) { + return; + } + if (other.PrimitiveValue != 0) { + PrimitiveValue = other.PrimitiveValue; + } + primitiveArray_.Add(other.primitiveArray_); + if (other.messageValue_ != null) { + if (messageValue_ == null) { + messageValue_ = new global::UnitTest.Issues.TestProtos.DeprecatedChild(); + } + MessageValue.MergeFrom(other.MessageValue); + } + messageArray_.Add(other.messageArray_); + if (other.EnumValue != 0) { + EnumValue = other.EnumValue; + } + enumArray_.Add(other.enumArray_); + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 8: { + PrimitiveValue = input.ReadInt32(); + break; + } + case 18: + case 16: { + primitiveArray_.AddEntriesFrom(input, _repeated_primitiveArray_codec); + break; + } + case 26: { + if (messageValue_ == null) { + messageValue_ = new global::UnitTest.Issues.TestProtos.DeprecatedChild(); + } + input.ReadMessage(messageValue_); + break; + } + case 34: { + messageArray_.AddEntriesFrom(input, _repeated_messageArray_codec); + break; + } + case 40: { + enumValue_ = (global::UnitTest.Issues.TestProtos.DeprecatedEnum) input.ReadEnum(); + break; + } + case 50: + case 48: { + enumArray_.AddEntriesFrom(input, _repeated_enumArray_codec); + break; + } + } + } + } + + } + + /// + /// Issue 45: http://code.google.com/p/protobuf-csharp-port/issues/detail?id=45 + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class ItemField : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new ItemField()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::UnitTest.Issues.TestProtos.UnittestIssuesReflection.Descriptor.MessageTypes[4]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public ItemField() { + OnConstruction(); + } + + partial void OnConstruction(); + + public ItemField(ItemField other) : this() { + item_ = other.item_; + } + + public ItemField Clone() { + return new ItemField(this); + } + + /// Field number for the "item" field. + public const int ItemFieldNumber = 1; + private int item_; + public int Item { + get { return item_; } + set { + item_ = value; + } + } + + public override bool Equals(object other) { + return Equals(other as ItemField); + } + + public bool Equals(ItemField other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (Item != other.Item) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (Item != 0) hash ^= Item.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (Item != 0) { + output.WriteRawTag(8); + output.WriteInt32(Item); + } + } + + public int CalculateSize() { + int size = 0; + if (Item != 0) { + size += 1 + pb::CodedOutputStream.ComputeInt32Size(Item); + } + return size; + } + + public void MergeFrom(ItemField other) { + if (other == null) { + return; + } + if (other.Item != 0) { + Item = other.Item; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 8: { + Item = input.ReadInt32(); + break; + } + } + } + } + + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class ReservedNames : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new ReservedNames()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::UnitTest.Issues.TestProtos.UnittestIssuesReflection.Descriptor.MessageTypes[5]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public ReservedNames() { + OnConstruction(); + } + + partial void OnConstruction(); + + public ReservedNames(ReservedNames other) : this() { + types_ = other.types_; + descriptor_ = other.descriptor_; + } + + public ReservedNames Clone() { + return new ReservedNames(this); + } + + /// Field number for the "types" field. + public const int Types_FieldNumber = 1; + private int types_; + public int Types_ { + get { return types_; } + set { + types_ = value; + } + } + + /// Field number for the "descriptor" field. + public const int Descriptor_FieldNumber = 2; + private int descriptor_; + public int Descriptor_ { + get { return descriptor_; } + set { + descriptor_ = value; + } + } + + public override bool Equals(object other) { + return Equals(other as ReservedNames); + } + + public bool Equals(ReservedNames other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (Types_ != other.Types_) return false; + if (Descriptor_ != other.Descriptor_) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (Types_ != 0) hash ^= Types_.GetHashCode(); + if (Descriptor_ != 0) hash ^= Descriptor_.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (Types_ != 0) { + output.WriteRawTag(8); + output.WriteInt32(Types_); + } + if (Descriptor_ != 0) { + output.WriteRawTag(16); + output.WriteInt32(Descriptor_); + } + } + + public int CalculateSize() { + int size = 0; + if (Types_ != 0) { + size += 1 + pb::CodedOutputStream.ComputeInt32Size(Types_); + } + if (Descriptor_ != 0) { + size += 1 + pb::CodedOutputStream.ComputeInt32Size(Descriptor_); + } + return size; + } + + public void MergeFrom(ReservedNames other) { + if (other == null) { + return; + } + if (other.Types_ != 0) { + Types_ = other.Types_; + } + if (other.Descriptor_ != 0) { + Descriptor_ = other.Descriptor_; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 8: { + Types_ = input.ReadInt32(); + break; + } + case 16: { + Descriptor_ = input.ReadInt32(); + break; + } + } + } + } + + #region Nested types + /// Container for nested types declared in the ReservedNames message type. + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public static partial class Types { + /// + /// Force a nested type called Types + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class SomeNestedType : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new SomeNestedType()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::UnitTest.Issues.TestProtos.ReservedNames.Descriptor.NestedTypes[0]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public SomeNestedType() { + OnConstruction(); + } + + partial void OnConstruction(); + + public SomeNestedType(SomeNestedType other) : this() { + } + + public SomeNestedType Clone() { + return new SomeNestedType(this); + } + + public override bool Equals(object other) { + return Equals(other as SomeNestedType); + } + + public bool Equals(SomeNestedType other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + return true; + } + + public override int GetHashCode() { + int hash = 1; + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + } + + public int CalculateSize() { + int size = 0; + return size; + } + + public void MergeFrom(SomeNestedType other) { + if (other == null) { + return; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + } + } + } + + } + + } + #endregion + + } + + /// + /// These fields are deliberately not declared in numeric + /// order, and the oneof fields aren't contiguous either. + /// This allows for reasonably robust tests of JSON output + /// ordering. + /// TestFieldOrderings in unittest_proto3.proto is similar, + /// but doesn't include oneofs. + /// TODO: Consider adding oneofs to TestFieldOrderings, although + /// that will require fixing other tests in multiple platforms. + /// Alternatively, consider just adding this to + /// unittest_proto3.proto if multiple platforms want it. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class TestJsonFieldOrdering : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new TestJsonFieldOrdering()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::UnitTest.Issues.TestProtos.UnittestIssuesReflection.Descriptor.MessageTypes[6]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public TestJsonFieldOrdering() { + OnConstruction(); + } + + partial void OnConstruction(); + + public TestJsonFieldOrdering(TestJsonFieldOrdering other) : this() { + plainInt32_ = other.plainInt32_; + plainString_ = other.plainString_; + switch (other.O1Case) { + case O1OneofCase.O1String: + O1String = other.O1String; + break; + case O1OneofCase.O1Int32: + O1Int32 = other.O1Int32; + break; + } + + switch (other.O2Case) { + case O2OneofCase.O2Int32: + O2Int32 = other.O2Int32; + break; + case O2OneofCase.O2String: + O2String = other.O2String; + break; + } + + } + + public TestJsonFieldOrdering Clone() { + return new TestJsonFieldOrdering(this); + } + + /// Field number for the "plain_int32" field. + public const int PlainInt32FieldNumber = 4; + private int plainInt32_; + public int PlainInt32 { + get { return plainInt32_; } + set { + plainInt32_ = value; + } + } + + /// Field number for the "o1_string" field. + public const int O1StringFieldNumber = 2; + public string O1String { + get { return o1Case_ == O1OneofCase.O1String ? (string) o1_ : ""; } + set { + o1_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + o1Case_ = O1OneofCase.O1String; + } + } + + /// Field number for the "o1_int32" field. + public const int O1Int32FieldNumber = 5; + public int O1Int32 { + get { return o1Case_ == O1OneofCase.O1Int32 ? (int) o1_ : 0; } + set { + o1_ = value; + o1Case_ = O1OneofCase.O1Int32; + } + } + + /// Field number for the "plain_string" field. + public const int PlainStringFieldNumber = 1; + private string plainString_ = ""; + public string PlainString { + get { return plainString_; } + set { + plainString_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "o2_int32" field. + public const int O2Int32FieldNumber = 6; + public int O2Int32 { + get { return o2Case_ == O2OneofCase.O2Int32 ? (int) o2_ : 0; } + set { + o2_ = value; + o2Case_ = O2OneofCase.O2Int32; + } + } + + /// Field number for the "o2_string" field. + public const int O2StringFieldNumber = 3; + public string O2String { + get { return o2Case_ == O2OneofCase.O2String ? (string) o2_ : ""; } + set { + o2_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + o2Case_ = O2OneofCase.O2String; + } + } + + private object o1_; + /// Enum of possible cases for the "o1" oneof. + public enum O1OneofCase { + None = 0, + O1String = 2, + O1Int32 = 5, + } + private O1OneofCase o1Case_ = O1OneofCase.None; + public O1OneofCase O1Case { + get { return o1Case_; } + } + + public void ClearO1() { + o1Case_ = O1OneofCase.None; + o1_ = null; + } + + private object o2_; + /// Enum of possible cases for the "o2" oneof. + public enum O2OneofCase { + None = 0, + O2Int32 = 6, + O2String = 3, + } + private O2OneofCase o2Case_ = O2OneofCase.None; + public O2OneofCase O2Case { + get { return o2Case_; } + } + + public void ClearO2() { + o2Case_ = O2OneofCase.None; + o2_ = null; + } + + public override bool Equals(object other) { + return Equals(other as TestJsonFieldOrdering); + } + + public bool Equals(TestJsonFieldOrdering other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (PlainInt32 != other.PlainInt32) return false; + if (O1String != other.O1String) return false; + if (O1Int32 != other.O1Int32) return false; + if (PlainString != other.PlainString) return false; + if (O2Int32 != other.O2Int32) return false; + if (O2String != other.O2String) return false; + if (O1Case != other.O1Case) return false; + if (O2Case != other.O2Case) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (PlainInt32 != 0) hash ^= PlainInt32.GetHashCode(); + if (o1Case_ == O1OneofCase.O1String) hash ^= O1String.GetHashCode(); + if (o1Case_ == O1OneofCase.O1Int32) hash ^= O1Int32.GetHashCode(); + if (PlainString.Length != 0) hash ^= PlainString.GetHashCode(); + if (o2Case_ == O2OneofCase.O2Int32) hash ^= O2Int32.GetHashCode(); + if (o2Case_ == O2OneofCase.O2String) hash ^= O2String.GetHashCode(); + hash ^= (int) o1Case_; + hash ^= (int) o2Case_; + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (PlainString.Length != 0) { + output.WriteRawTag(10); + output.WriteString(PlainString); + } + if (o1Case_ == O1OneofCase.O1String) { + output.WriteRawTag(18); + output.WriteString(O1String); + } + if (o2Case_ == O2OneofCase.O2String) { + output.WriteRawTag(26); + output.WriteString(O2String); + } + if (PlainInt32 != 0) { + output.WriteRawTag(32); + output.WriteInt32(PlainInt32); + } + if (o1Case_ == O1OneofCase.O1Int32) { + output.WriteRawTag(40); + output.WriteInt32(O1Int32); + } + if (o2Case_ == O2OneofCase.O2Int32) { + output.WriteRawTag(48); + output.WriteInt32(O2Int32); + } + } + + public int CalculateSize() { + int size = 0; + if (PlainInt32 != 0) { + size += 1 + pb::CodedOutputStream.ComputeInt32Size(PlainInt32); + } + if (o1Case_ == O1OneofCase.O1String) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(O1String); + } + if (o1Case_ == O1OneofCase.O1Int32) { + size += 1 + pb::CodedOutputStream.ComputeInt32Size(O1Int32); + } + if (PlainString.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(PlainString); + } + if (o2Case_ == O2OneofCase.O2Int32) { + size += 1 + pb::CodedOutputStream.ComputeInt32Size(O2Int32); + } + if (o2Case_ == O2OneofCase.O2String) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(O2String); + } + return size; + } + + public void MergeFrom(TestJsonFieldOrdering other) { + if (other == null) { + return; + } + if (other.PlainInt32 != 0) { + PlainInt32 = other.PlainInt32; + } + if (other.PlainString.Length != 0) { + PlainString = other.PlainString; + } + switch (other.O1Case) { + case O1OneofCase.O1String: + O1String = other.O1String; + break; + case O1OneofCase.O1Int32: + O1Int32 = other.O1Int32; + break; + } + + switch (other.O2Case) { + case O2OneofCase.O2Int32: + O2Int32 = other.O2Int32; + break; + case O2OneofCase.O2String: + O2String = other.O2String; + break; + } + + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + PlainString = input.ReadString(); + break; + } + case 18: { + O1String = input.ReadString(); + break; + } + case 26: { + O2String = input.ReadString(); + break; + } + case 32: { + PlainInt32 = input.ReadInt32(); + break; + } + case 40: { + O1Int32 = input.ReadInt32(); + break; + } + case 48: { + O2Int32 = input.ReadInt32(); + break; + } + } + } + } + + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class TestJsonName : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new TestJsonName()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::UnitTest.Issues.TestProtos.UnittestIssuesReflection.Descriptor.MessageTypes[7]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public TestJsonName() { + OnConstruction(); + } + + partial void OnConstruction(); + + public TestJsonName(TestJsonName other) : this() { + name_ = other.name_; + description_ = other.description_; + guid_ = other.guid_; + } + + public TestJsonName Clone() { + return new TestJsonName(this); + } + + /// Field number for the "name" field. + public const int NameFieldNumber = 1; + private string name_ = ""; + /// + /// Message for testing the effects for of the json_name option + /// + public string Name { + get { return name_; } + set { + name_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "description" field. + public const int DescriptionFieldNumber = 2; + private string description_ = ""; + public string Description { + get { return description_; } + set { + description_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "guid" field. + public const int GuidFieldNumber = 3; + private string guid_ = ""; + public string Guid { + get { return guid_; } + set { + guid_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + public override bool Equals(object other) { + return Equals(other as TestJsonName); + } + + public bool Equals(TestJsonName other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (Name != other.Name) return false; + if (Description != other.Description) return false; + if (Guid != other.Guid) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (Name.Length != 0) hash ^= Name.GetHashCode(); + if (Description.Length != 0) hash ^= Description.GetHashCode(); + if (Guid.Length != 0) hash ^= Guid.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (Name.Length != 0) { + output.WriteRawTag(10); + output.WriteString(Name); + } + if (Description.Length != 0) { + output.WriteRawTag(18); + output.WriteString(Description); + } + if (Guid.Length != 0) { + output.WriteRawTag(26); + output.WriteString(Guid); + } + } + + public int CalculateSize() { + int size = 0; + if (Name.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(Name); + } + if (Description.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(Description); + } + if (Guid.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(Guid); + } + return size; + } + + public void MergeFrom(TestJsonName other) { + if (other == null) { + return; + } + if (other.Name.Length != 0) { + Name = other.Name; + } + if (other.Description.Length != 0) { + Description = other.Description; + } + if (other.Guid.Length != 0) { + Guid = other.Guid; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + Name = input.ReadString(); + break; + } + case 18: { + Description = input.ReadString(); + break; + } + case 26: { + Guid = input.ReadString(); + break; + } + } + } + } + + } + + #endregion + +} + +#endregion Designer generated code diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/TestProtos/UnittestProto3.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/TestProtos/UnittestProto3.cs new file mode 100644 index 0000000000..b8d159bb5a --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/TestProtos/UnittestProto3.cs @@ -0,0 +1,6064 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/protobuf/unittest_proto3.proto +#pragma warning disable 1591, 0612, 3021 +#region Designer generated code + +using pb = global::Google.Protobuf; +using pbc = global::Google.Protobuf.Collections; +using pbr = global::Google.Protobuf.Reflection; +using scg = global::System.Collections.Generic; +namespace Google.Protobuf.TestProtos { + + /// Holder for reflection information generated from google/protobuf/unittest_proto3.proto + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public static partial class UnittestProto3Reflection { + + #region Descriptor + /// File descriptor for google/protobuf/unittest_proto3.proto + public static pbr::FileDescriptor Descriptor { + get { return descriptor; } + } + private static pbr::FileDescriptor descriptor; + + static UnittestProto3Reflection() { + byte[] descriptorData = global::System.Convert.FromBase64String( + string.Concat( + "CiVnb29nbGUvcHJvdG9idWYvdW5pdHRlc3RfcHJvdG8zLnByb3RvEhFwcm90", + "b2J1Zl91bml0dGVzdBosZ29vZ2xlL3Byb3RvYnVmL3VuaXR0ZXN0X2ltcG9y", + "dF9wcm90bzMucHJvdG8i8A8KDFRlc3RBbGxUeXBlcxIUCgxzaW5nbGVfaW50", + "MzIYASABKAUSFAoMc2luZ2xlX2ludDY0GAIgASgDEhUKDXNpbmdsZV91aW50", + "MzIYAyABKA0SFQoNc2luZ2xlX3VpbnQ2NBgEIAEoBBIVCg1zaW5nbGVfc2lu", + "dDMyGAUgASgREhUKDXNpbmdsZV9zaW50NjQYBiABKBISFgoOc2luZ2xlX2Zp", + "eGVkMzIYByABKAcSFgoOc2luZ2xlX2ZpeGVkNjQYCCABKAYSFwoPc2luZ2xl", + "X3NmaXhlZDMyGAkgASgPEhcKD3NpbmdsZV9zZml4ZWQ2NBgKIAEoEBIUCgxz", + "aW5nbGVfZmxvYXQYCyABKAISFQoNc2luZ2xlX2RvdWJsZRgMIAEoARITCgtz", + "aW5nbGVfYm9vbBgNIAEoCBIVCg1zaW5nbGVfc3RyaW5nGA4gASgJEhQKDHNp", + "bmdsZV9ieXRlcxgPIAEoDBJMChVzaW5nbGVfbmVzdGVkX21lc3NhZ2UYEiAB", + "KAsyLS5wcm90b2J1Zl91bml0dGVzdC5UZXN0QWxsVHlwZXMuTmVzdGVkTWVz", + "c2FnZRJBChZzaW5nbGVfZm9yZWlnbl9tZXNzYWdlGBMgASgLMiEucHJvdG9i", + "dWZfdW5pdHRlc3QuRm9yZWlnbk1lc3NhZ2USRgoVc2luZ2xlX2ltcG9ydF9t", + "ZXNzYWdlGBQgASgLMicucHJvdG9idWZfdW5pdHRlc3RfaW1wb3J0LkltcG9y", + "dE1lc3NhZ2USRgoSc2luZ2xlX25lc3RlZF9lbnVtGBUgASgOMioucHJvdG9i", + "dWZfdW5pdHRlc3QuVGVzdEFsbFR5cGVzLk5lc3RlZEVudW0SOwoTc2luZ2xl", + "X2ZvcmVpZ25fZW51bRgWIAEoDjIeLnByb3RvYnVmX3VuaXR0ZXN0LkZvcmVp", + "Z25FbnVtEkAKEnNpbmdsZV9pbXBvcnRfZW51bRgXIAEoDjIkLnByb3RvYnVm", + "X3VuaXR0ZXN0X2ltcG9ydC5JbXBvcnRFbnVtElMKHHNpbmdsZV9wdWJsaWNf", + "aW1wb3J0X21lc3NhZ2UYGiABKAsyLS5wcm90b2J1Zl91bml0dGVzdF9pbXBv", + "cnQuUHVibGljSW1wb3J0TWVzc2FnZRIWCg5yZXBlYXRlZF9pbnQzMhgfIAMo", + "BRIWCg5yZXBlYXRlZF9pbnQ2NBggIAMoAxIXCg9yZXBlYXRlZF91aW50MzIY", + "ISADKA0SFwoPcmVwZWF0ZWRfdWludDY0GCIgAygEEhcKD3JlcGVhdGVkX3Np", + "bnQzMhgjIAMoERIXCg9yZXBlYXRlZF9zaW50NjQYJCADKBISGAoQcmVwZWF0", + "ZWRfZml4ZWQzMhglIAMoBxIYChByZXBlYXRlZF9maXhlZDY0GCYgAygGEhkK", + "EXJlcGVhdGVkX3NmaXhlZDMyGCcgAygPEhkKEXJlcGVhdGVkX3NmaXhlZDY0", + "GCggAygQEhYKDnJlcGVhdGVkX2Zsb2F0GCkgAygCEhcKD3JlcGVhdGVkX2Rv", + "dWJsZRgqIAMoARIVCg1yZXBlYXRlZF9ib29sGCsgAygIEhcKD3JlcGVhdGVk", + "X3N0cmluZxgsIAMoCRIWCg5yZXBlYXRlZF9ieXRlcxgtIAMoDBJOChdyZXBl", + "YXRlZF9uZXN0ZWRfbWVzc2FnZRgwIAMoCzItLnByb3RvYnVmX3VuaXR0ZXN0", + "LlRlc3RBbGxUeXBlcy5OZXN0ZWRNZXNzYWdlEkMKGHJlcGVhdGVkX2ZvcmVp", + "Z25fbWVzc2FnZRgxIAMoCzIhLnByb3RvYnVmX3VuaXR0ZXN0LkZvcmVpZ25N", + "ZXNzYWdlEkgKF3JlcGVhdGVkX2ltcG9ydF9tZXNzYWdlGDIgAygLMicucHJv", + "dG9idWZfdW5pdHRlc3RfaW1wb3J0LkltcG9ydE1lc3NhZ2USSAoUcmVwZWF0", + "ZWRfbmVzdGVkX2VudW0YMyADKA4yKi5wcm90b2J1Zl91bml0dGVzdC5UZXN0", + "QWxsVHlwZXMuTmVzdGVkRW51bRI9ChVyZXBlYXRlZF9mb3JlaWduX2VudW0Y", + "NCADKA4yHi5wcm90b2J1Zl91bml0dGVzdC5Gb3JlaWduRW51bRJCChRyZXBl", + "YXRlZF9pbXBvcnRfZW51bRg1IAMoDjIkLnByb3RvYnVmX3VuaXR0ZXN0X2lt", + "cG9ydC5JbXBvcnRFbnVtElUKHnJlcGVhdGVkX3B1YmxpY19pbXBvcnRfbWVz", + "c2FnZRg2IAMoCzItLnByb3RvYnVmX3VuaXR0ZXN0X2ltcG9ydC5QdWJsaWNJ", + "bXBvcnRNZXNzYWdlEhYKDG9uZW9mX3VpbnQzMhhvIAEoDUgAEk0KFG9uZW9m", + "X25lc3RlZF9tZXNzYWdlGHAgASgLMi0ucHJvdG9idWZfdW5pdHRlc3QuVGVz", + "dEFsbFR5cGVzLk5lc3RlZE1lc3NhZ2VIABIWCgxvbmVvZl9zdHJpbmcYcSAB", + "KAlIABIVCgtvbmVvZl9ieXRlcxhyIAEoDEgAGhsKDU5lc3RlZE1lc3NhZ2US", + "CgoCYmIYASABKAUiVgoKTmVzdGVkRW51bRIbChdORVNURURfRU5VTV9VTlNQ", + "RUNJRklFRBAAEgcKA0ZPTxABEgcKA0JBUhACEgcKA0JBWhADEhAKA05FRxD/", + "//////////8BQg0KC29uZW9mX2ZpZWxkIrsBChJOZXN0ZWRUZXN0QWxsVHlw", + "ZXMSNAoFY2hpbGQYASABKAsyJS5wcm90b2J1Zl91bml0dGVzdC5OZXN0ZWRU", + "ZXN0QWxsVHlwZXMSMAoHcGF5bG9hZBgCIAEoCzIfLnByb3RvYnVmX3VuaXR0", + "ZXN0LlRlc3RBbGxUeXBlcxI9Cg5yZXBlYXRlZF9jaGlsZBgDIAMoCzIlLnBy", + "b3RvYnVmX3VuaXR0ZXN0Lk5lc3RlZFRlc3RBbGxUeXBlcyI0ChRUZXN0RGVw", + "cmVjYXRlZEZpZWxkcxIcChBkZXByZWNhdGVkX2ludDMyGAEgASgFQgIYASIb", + "Cg5Gb3JlaWduTWVzc2FnZRIJCgFjGAEgASgFIjAKElRlc3RSZXNlcnZlZEZp", + "ZWxkc0oECAIQA0oECA8QEEoECAkQDFIDYmFyUgNiYXoiWgoRVGVzdEZvcmVp", + "Z25OZXN0ZWQSRQoOZm9yZWlnbl9uZXN0ZWQYASABKAsyLS5wcm90b2J1Zl91", + "bml0dGVzdC5UZXN0QWxsVHlwZXMuTmVzdGVkTWVzc2FnZSI0ChhUZXN0UmVh", + "bGx5TGFyZ2VUYWdOdW1iZXISCQoBYRgBIAEoBRINCgJiYhj///9/IAEoBSJV", + "ChRUZXN0UmVjdXJzaXZlTWVzc2FnZRIyCgFhGAEgASgLMicucHJvdG9idWZf", + "dW5pdHRlc3QuVGVzdFJlY3Vyc2l2ZU1lc3NhZ2USCQoBaRgCIAEoBSJLChRU", + "ZXN0TXV0dWFsUmVjdXJzaW9uQRIzCgJiYhgBIAEoCzInLnByb3RvYnVmX3Vu", + "aXR0ZXN0LlRlc3RNdXR1YWxSZWN1cnNpb25CImIKFFRlc3RNdXR1YWxSZWN1", + "cnNpb25CEjIKAWEYASABKAsyJy5wcm90b2J1Zl91bml0dGVzdC5UZXN0TXV0", + "dWFsUmVjdXJzaW9uQRIWCg5vcHRpb25hbF9pbnQzMhgCIAEoBSLrAgoXVGVz", + "dENhbWVsQ2FzZUZpZWxkTmFtZXMSFgoOUHJpbWl0aXZlRmllbGQYASABKAUS", + "EwoLU3RyaW5nRmllbGQYAiABKAkSMQoJRW51bUZpZWxkGAMgASgOMh4ucHJv", + "dG9idWZfdW5pdHRlc3QuRm9yZWlnbkVudW0SNwoMTWVzc2FnZUZpZWxkGAQg", + "ASgLMiEucHJvdG9idWZfdW5pdHRlc3QuRm9yZWlnbk1lc3NhZ2USHgoWUmVw", + "ZWF0ZWRQcmltaXRpdmVGaWVsZBgHIAMoBRIbChNSZXBlYXRlZFN0cmluZ0Zp", + "ZWxkGAggAygJEjkKEVJlcGVhdGVkRW51bUZpZWxkGAkgAygOMh4ucHJvdG9i", + "dWZfdW5pdHRlc3QuRm9yZWlnbkVudW0SPwoUUmVwZWF0ZWRNZXNzYWdlRmll", + "bGQYCiADKAsyIS5wcm90b2J1Zl91bml0dGVzdC5Gb3JlaWduTWVzc2FnZSLH", + "AQoSVGVzdEZpZWxkT3JkZXJpbmdzEhEKCW15X3N0cmluZxgLIAEoCRIOCgZt", + "eV9pbnQYASABKAMSEAoIbXlfZmxvYXQYZSABKAISUwoVc2luZ2xlX25lc3Rl", + "ZF9tZXNzYWdlGMgBIAEoCzIzLnByb3RvYnVmX3VuaXR0ZXN0LlRlc3RGaWVs", + "ZE9yZGVyaW5ncy5OZXN0ZWRNZXNzYWdlGicKDU5lc3RlZE1lc3NhZ2USCgoC", + "b28YAiABKAMSCgoCYmIYASABKAUiSwoRU3BhcnNlRW51bU1lc3NhZ2USNgoL", + "c3BhcnNlX2VudW0YASABKA4yIS5wcm90b2J1Zl91bml0dGVzdC5UZXN0U3Bh", + "cnNlRW51bSIZCglPbmVTdHJpbmcSDAoEZGF0YRgBIAEoCSIaCgpNb3JlU3Ry", + "aW5nEgwKBGRhdGEYASADKAkiGAoIT25lQnl0ZXMSDAoEZGF0YRgBIAEoDCIZ", + "CglNb3JlQnl0ZXMSDAoEZGF0YRgBIAEoDCIcCgxJbnQzMk1lc3NhZ2USDAoE", + "ZGF0YRgBIAEoBSIdCg1VaW50MzJNZXNzYWdlEgwKBGRhdGEYASABKA0iHAoM", + "SW50NjRNZXNzYWdlEgwKBGRhdGEYASABKAMiHQoNVWludDY0TWVzc2FnZRIM", + "CgRkYXRhGAEgASgEIhsKC0Jvb2xNZXNzYWdlEgwKBGRhdGEYASABKAgicwoJ", + "VGVzdE9uZW9mEhEKB2Zvb19pbnQYASABKAVIABIUCgpmb29fc3RyaW5nGAIg", + "ASgJSAASNgoLZm9vX21lc3NhZ2UYAyABKAsyHy5wcm90b2J1Zl91bml0dGVz", + "dC5UZXN0QWxsVHlwZXNIAEIFCgNmb28iqgMKD1Rlc3RQYWNrZWRUeXBlcxIY", + "CgxwYWNrZWRfaW50MzIYWiADKAVCAhABEhgKDHBhY2tlZF9pbnQ2NBhbIAMo", + "A0ICEAESGQoNcGFja2VkX3VpbnQzMhhcIAMoDUICEAESGQoNcGFja2VkX3Vp", + "bnQ2NBhdIAMoBEICEAESGQoNcGFja2VkX3NpbnQzMhheIAMoEUICEAESGQoN", + "cGFja2VkX3NpbnQ2NBhfIAMoEkICEAESGgoOcGFja2VkX2ZpeGVkMzIYYCAD", + "KAdCAhABEhoKDnBhY2tlZF9maXhlZDY0GGEgAygGQgIQARIbCg9wYWNrZWRf", + "c2ZpeGVkMzIYYiADKA9CAhABEhsKD3BhY2tlZF9zZml4ZWQ2NBhjIAMoEEIC", + "EAESGAoMcGFja2VkX2Zsb2F0GGQgAygCQgIQARIZCg1wYWNrZWRfZG91Ymxl", + "GGUgAygBQgIQARIXCgtwYWNrZWRfYm9vbBhmIAMoCEICEAESNwoLcGFja2Vk", + "X2VudW0YZyADKA4yHi5wcm90b2J1Zl91bml0dGVzdC5Gb3JlaWduRW51bUIC", + "EAEiyAMKEVRlc3RVbnBhY2tlZFR5cGVzEhoKDnVucGFja2VkX2ludDMyGFog", + "AygFQgIQABIaCg51bnBhY2tlZF9pbnQ2NBhbIAMoA0ICEAASGwoPdW5wYWNr", + "ZWRfdWludDMyGFwgAygNQgIQABIbCg91bnBhY2tlZF91aW50NjQYXSADKARC", + "AhAAEhsKD3VucGFja2VkX3NpbnQzMhheIAMoEUICEAASGwoPdW5wYWNrZWRf", + "c2ludDY0GF8gAygSQgIQABIcChB1bnBhY2tlZF9maXhlZDMyGGAgAygHQgIQ", + "ABIcChB1bnBhY2tlZF9maXhlZDY0GGEgAygGQgIQABIdChF1bnBhY2tlZF9z", + "Zml4ZWQzMhhiIAMoD0ICEAASHQoRdW5wYWNrZWRfc2ZpeGVkNjQYYyADKBBC", + "AhAAEhoKDnVucGFja2VkX2Zsb2F0GGQgAygCQgIQABIbCg91bnBhY2tlZF9k", + "b3VibGUYZSADKAFCAhAAEhkKDXVucGFja2VkX2Jvb2wYZiADKAhCAhAAEjkK", + "DXVucGFja2VkX2VudW0YZyADKA4yHi5wcm90b2J1Zl91bml0dGVzdC5Gb3Jl", + "aWduRW51bUICEAAiwAEKI1Rlc3RSZXBlYXRlZFNjYWxhckRpZmZlcmVudFRh", + "Z1NpemVzEhgKEHJlcGVhdGVkX2ZpeGVkMzIYDCADKAcSFgoOcmVwZWF0ZWRf", + "aW50MzIYDSADKAUSGQoQcmVwZWF0ZWRfZml4ZWQ2NBj+DyADKAYSFwoOcmVw", + "ZWF0ZWRfaW50NjQY/w8gAygDEhgKDnJlcGVhdGVkX2Zsb2F0GP7/DyADKAIS", + "GQoPcmVwZWF0ZWRfdWludDY0GP//DyADKAQiKAobVGVzdENvbW1lbnRJbmpl", + "Y3Rpb25NZXNzYWdlEgkKAWEYASABKAkiDAoKRm9vUmVxdWVzdCINCgtGb29S", + "ZXNwb25zZSISChBGb29DbGllbnRNZXNzYWdlIhIKEEZvb1NlcnZlck1lc3Nh", + "Z2UiDAoKQmFyUmVxdWVzdCINCgtCYXJSZXNwb25zZSpZCgtGb3JlaWduRW51", + "bRIXChNGT1JFSUdOX1VOU1BFQ0lGSUVEEAASDwoLRk9SRUlHTl9GT08QBBIP", + "CgtGT1JFSUdOX0JBUhAFEg8KC0ZPUkVJR05fQkFaEAYqdQoUVGVzdEVudW1X", + "aXRoRHVwVmFsdWUSKAokVEVTVF9FTlVNX1dJVEhfRFVQX1ZBTFVFX1VOU1BF", + "Q0lGSUVEEAASCAoERk9PMRABEggKBEJBUjEQAhIHCgNCQVoQAxIICgRGT08y", + "EAESCAoEQkFSMhACGgIQASqdAQoOVGVzdFNwYXJzZUVudW0SIAocVEVTVF9T", + "UEFSU0VfRU5VTV9VTlNQRUNJRklFRBAAEgwKCFNQQVJTRV9BEHsSDgoIU1BB", + "UlNFX0IQpucDEg8KCFNQQVJTRV9DELKxgAYSFQoIU1BBUlNFX0QQ8f//////", + "////ARIVCghTUEFSU0VfRRC03vz///////8BEgwKCFNQQVJTRV9HEAIymQEK", + "C1Rlc3RTZXJ2aWNlEkQKA0ZvbxIdLnByb3RvYnVmX3VuaXR0ZXN0LkZvb1Jl", + "cXVlc3QaHi5wcm90b2J1Zl91bml0dGVzdC5Gb29SZXNwb25zZRJECgNCYXIS", + "HS5wcm90b2J1Zl91bml0dGVzdC5CYXJSZXF1ZXN0Gh4ucHJvdG9idWZfdW5p", + "dHRlc3QuQmFyUmVzcG9uc2VCOkINVW5pdHRlc3RQcm90b0gBgAEBiAEBkAEB", + "+AEBqgIaR29vZ2xlLlByb3RvYnVmLlRlc3RQcm90b3NiBnByb3RvMw==")); + descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, + new pbr::FileDescriptor[] { global::Google.Protobuf.TestProtos.UnittestImportProto3Reflection.Descriptor, }, + new pbr::GeneratedClrTypeInfo(new[] {typeof(global::Google.Protobuf.TestProtos.ForeignEnum), typeof(global::Google.Protobuf.TestProtos.TestEnumWithDupValue), typeof(global::Google.Protobuf.TestProtos.TestSparseEnum), }, new pbr::GeneratedClrTypeInfo[] { + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.TestAllTypes), global::Google.Protobuf.TestProtos.TestAllTypes.Parser, new[]{ "SingleInt32", "SingleInt64", "SingleUint32", "SingleUint64", "SingleSint32", "SingleSint64", "SingleFixed32", "SingleFixed64", "SingleSfixed32", "SingleSfixed64", "SingleFloat", "SingleDouble", "SingleBool", "SingleString", "SingleBytes", "SingleNestedMessage", "SingleForeignMessage", "SingleImportMessage", "SingleNestedEnum", "SingleForeignEnum", "SingleImportEnum", "SinglePublicImportMessage", "RepeatedInt32", "RepeatedInt64", "RepeatedUint32", "RepeatedUint64", "RepeatedSint32", "RepeatedSint64", "RepeatedFixed32", "RepeatedFixed64", "RepeatedSfixed32", "RepeatedSfixed64", "RepeatedFloat", "RepeatedDouble", "RepeatedBool", "RepeatedString", "RepeatedBytes", "RepeatedNestedMessage", "RepeatedForeignMessage", "RepeatedImportMessage", "RepeatedNestedEnum", "RepeatedForeignEnum", "RepeatedImportEnum", "RepeatedPublicImportMessage", "OneofUint32", "OneofNestedMessage", "OneofString", "OneofBytes" }, new[]{ "OneofField" }, new[]{ typeof(global::Google.Protobuf.TestProtos.TestAllTypes.Types.NestedEnum) }, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.TestAllTypes.Types.NestedMessage), global::Google.Protobuf.TestProtos.TestAllTypes.Types.NestedMessage.Parser, new[]{ "Bb" }, null, null, null)}), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.NestedTestAllTypes), global::Google.Protobuf.TestProtos.NestedTestAllTypes.Parser, new[]{ "Child", "Payload", "RepeatedChild" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.TestDeprecatedFields), global::Google.Protobuf.TestProtos.TestDeprecatedFields.Parser, new[]{ "DeprecatedInt32" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.ForeignMessage), global::Google.Protobuf.TestProtos.ForeignMessage.Parser, new[]{ "C" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.TestReservedFields), global::Google.Protobuf.TestProtos.TestReservedFields.Parser, null, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.TestForeignNested), global::Google.Protobuf.TestProtos.TestForeignNested.Parser, new[]{ "ForeignNested" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.TestReallyLargeTagNumber), global::Google.Protobuf.TestProtos.TestReallyLargeTagNumber.Parser, new[]{ "A", "Bb" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.TestRecursiveMessage), global::Google.Protobuf.TestProtos.TestRecursiveMessage.Parser, new[]{ "A", "I" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.TestMutualRecursionA), global::Google.Protobuf.TestProtos.TestMutualRecursionA.Parser, new[]{ "Bb" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.TestMutualRecursionB), global::Google.Protobuf.TestProtos.TestMutualRecursionB.Parser, new[]{ "A", "OptionalInt32" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.TestCamelCaseFieldNames), global::Google.Protobuf.TestProtos.TestCamelCaseFieldNames.Parser, new[]{ "PrimitiveField", "StringField", "EnumField", "MessageField", "RepeatedPrimitiveField", "RepeatedStringField", "RepeatedEnumField", "RepeatedMessageField" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.TestFieldOrderings), global::Google.Protobuf.TestProtos.TestFieldOrderings.Parser, new[]{ "MyString", "MyInt", "MyFloat", "SingleNestedMessage" }, null, null, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.TestFieldOrderings.Types.NestedMessage), global::Google.Protobuf.TestProtos.TestFieldOrderings.Types.NestedMessage.Parser, new[]{ "Oo", "Bb" }, null, null, null)}), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.SparseEnumMessage), global::Google.Protobuf.TestProtos.SparseEnumMessage.Parser, new[]{ "SparseEnum" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.OneString), global::Google.Protobuf.TestProtos.OneString.Parser, new[]{ "Data" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.MoreString), global::Google.Protobuf.TestProtos.MoreString.Parser, new[]{ "Data" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.OneBytes), global::Google.Protobuf.TestProtos.OneBytes.Parser, new[]{ "Data" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.MoreBytes), global::Google.Protobuf.TestProtos.MoreBytes.Parser, new[]{ "Data" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.Int32Message), global::Google.Protobuf.TestProtos.Int32Message.Parser, new[]{ "Data" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.Uint32Message), global::Google.Protobuf.TestProtos.Uint32Message.Parser, new[]{ "Data" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.Int64Message), global::Google.Protobuf.TestProtos.Int64Message.Parser, new[]{ "Data" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.Uint64Message), global::Google.Protobuf.TestProtos.Uint64Message.Parser, new[]{ "Data" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.BoolMessage), global::Google.Protobuf.TestProtos.BoolMessage.Parser, new[]{ "Data" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.TestOneof), global::Google.Protobuf.TestProtos.TestOneof.Parser, new[]{ "FooInt", "FooString", "FooMessage" }, new[]{ "Foo" }, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.TestPackedTypes), global::Google.Protobuf.TestProtos.TestPackedTypes.Parser, new[]{ "PackedInt32", "PackedInt64", "PackedUint32", "PackedUint64", "PackedSint32", "PackedSint64", "PackedFixed32", "PackedFixed64", "PackedSfixed32", "PackedSfixed64", "PackedFloat", "PackedDouble", "PackedBool", "PackedEnum" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.TestUnpackedTypes), global::Google.Protobuf.TestProtos.TestUnpackedTypes.Parser, new[]{ "UnpackedInt32", "UnpackedInt64", "UnpackedUint32", "UnpackedUint64", "UnpackedSint32", "UnpackedSint64", "UnpackedFixed32", "UnpackedFixed64", "UnpackedSfixed32", "UnpackedSfixed64", "UnpackedFloat", "UnpackedDouble", "UnpackedBool", "UnpackedEnum" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.TestRepeatedScalarDifferentTagSizes), global::Google.Protobuf.TestProtos.TestRepeatedScalarDifferentTagSizes.Parser, new[]{ "RepeatedFixed32", "RepeatedInt32", "RepeatedFixed64", "RepeatedInt64", "RepeatedFloat", "RepeatedUint64" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.TestCommentInjectionMessage), global::Google.Protobuf.TestProtos.TestCommentInjectionMessage.Parser, new[]{ "A" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.FooRequest), global::Google.Protobuf.TestProtos.FooRequest.Parser, null, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.FooResponse), global::Google.Protobuf.TestProtos.FooResponse.Parser, null, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.FooClientMessage), global::Google.Protobuf.TestProtos.FooClientMessage.Parser, null, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.FooServerMessage), global::Google.Protobuf.TestProtos.FooServerMessage.Parser, null, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.BarRequest), global::Google.Protobuf.TestProtos.BarRequest.Parser, null, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.BarResponse), global::Google.Protobuf.TestProtos.BarResponse.Parser, null, null, null, null) + })); + } + #endregion + + } + #region Enums + public enum ForeignEnum { + [pbr::OriginalName("FOREIGN_UNSPECIFIED")] ForeignUnspecified = 0, + [pbr::OriginalName("FOREIGN_FOO")] ForeignFoo = 4, + [pbr::OriginalName("FOREIGN_BAR")] ForeignBar = 5, + [pbr::OriginalName("FOREIGN_BAZ")] ForeignBaz = 6, + } + + /// + /// Test an enum that has multiple values with the same number. + /// + public enum TestEnumWithDupValue { + [pbr::OriginalName("TEST_ENUM_WITH_DUP_VALUE_UNSPECIFIED")] Unspecified = 0, + [pbr::OriginalName("FOO1")] Foo1 = 1, + [pbr::OriginalName("BAR1")] Bar1 = 2, + [pbr::OriginalName("BAZ")] Baz = 3, + [pbr::OriginalName("FOO2")] Foo2 = 1, + [pbr::OriginalName("BAR2")] Bar2 = 2, + } + + /// + /// Test an enum with large, unordered values. + /// + public enum TestSparseEnum { + [pbr::OriginalName("TEST_SPARSE_ENUM_UNSPECIFIED")] Unspecified = 0, + [pbr::OriginalName("SPARSE_A")] SparseA = 123, + [pbr::OriginalName("SPARSE_B")] SparseB = 62374, + [pbr::OriginalName("SPARSE_C")] SparseC = 12589234, + [pbr::OriginalName("SPARSE_D")] SparseD = -15, + [pbr::OriginalName("SPARSE_E")] SparseE = -53452, + /// + /// In proto3, value 0 must be the first one specified + /// SPARSE_F = 0; + /// + [pbr::OriginalName("SPARSE_G")] SparseG = 2, + } + + #endregion + + #region Messages + /// + /// This proto includes every type of field in both singular and repeated + /// forms. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class TestAllTypes : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new TestAllTypes()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.UnittestProto3Reflection.Descriptor.MessageTypes[0]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public TestAllTypes() { + OnConstruction(); + } + + partial void OnConstruction(); + + public TestAllTypes(TestAllTypes other) : this() { + singleInt32_ = other.singleInt32_; + singleInt64_ = other.singleInt64_; + singleUint32_ = other.singleUint32_; + singleUint64_ = other.singleUint64_; + singleSint32_ = other.singleSint32_; + singleSint64_ = other.singleSint64_; + singleFixed32_ = other.singleFixed32_; + singleFixed64_ = other.singleFixed64_; + singleSfixed32_ = other.singleSfixed32_; + singleSfixed64_ = other.singleSfixed64_; + singleFloat_ = other.singleFloat_; + singleDouble_ = other.singleDouble_; + singleBool_ = other.singleBool_; + singleString_ = other.singleString_; + singleBytes_ = other.singleBytes_; + SingleNestedMessage = other.singleNestedMessage_ != null ? other.SingleNestedMessage.Clone() : null; + SingleForeignMessage = other.singleForeignMessage_ != null ? other.SingleForeignMessage.Clone() : null; + SingleImportMessage = other.singleImportMessage_ != null ? other.SingleImportMessage.Clone() : null; + singleNestedEnum_ = other.singleNestedEnum_; + singleForeignEnum_ = other.singleForeignEnum_; + singleImportEnum_ = other.singleImportEnum_; + SinglePublicImportMessage = other.singlePublicImportMessage_ != null ? other.SinglePublicImportMessage.Clone() : null; + repeatedInt32_ = other.repeatedInt32_.Clone(); + repeatedInt64_ = other.repeatedInt64_.Clone(); + repeatedUint32_ = other.repeatedUint32_.Clone(); + repeatedUint64_ = other.repeatedUint64_.Clone(); + repeatedSint32_ = other.repeatedSint32_.Clone(); + repeatedSint64_ = other.repeatedSint64_.Clone(); + repeatedFixed32_ = other.repeatedFixed32_.Clone(); + repeatedFixed64_ = other.repeatedFixed64_.Clone(); + repeatedSfixed32_ = other.repeatedSfixed32_.Clone(); + repeatedSfixed64_ = other.repeatedSfixed64_.Clone(); + repeatedFloat_ = other.repeatedFloat_.Clone(); + repeatedDouble_ = other.repeatedDouble_.Clone(); + repeatedBool_ = other.repeatedBool_.Clone(); + repeatedString_ = other.repeatedString_.Clone(); + repeatedBytes_ = other.repeatedBytes_.Clone(); + repeatedNestedMessage_ = other.repeatedNestedMessage_.Clone(); + repeatedForeignMessage_ = other.repeatedForeignMessage_.Clone(); + repeatedImportMessage_ = other.repeatedImportMessage_.Clone(); + repeatedNestedEnum_ = other.repeatedNestedEnum_.Clone(); + repeatedForeignEnum_ = other.repeatedForeignEnum_.Clone(); + repeatedImportEnum_ = other.repeatedImportEnum_.Clone(); + repeatedPublicImportMessage_ = other.repeatedPublicImportMessage_.Clone(); + switch (other.OneofFieldCase) { + case OneofFieldOneofCase.OneofUint32: + OneofUint32 = other.OneofUint32; + break; + case OneofFieldOneofCase.OneofNestedMessage: + OneofNestedMessage = other.OneofNestedMessage.Clone(); + break; + case OneofFieldOneofCase.OneofString: + OneofString = other.OneofString; + break; + case OneofFieldOneofCase.OneofBytes: + OneofBytes = other.OneofBytes; + break; + } + + } + + public TestAllTypes Clone() { + return new TestAllTypes(this); + } + + /// Field number for the "single_int32" field. + public const int SingleInt32FieldNumber = 1; + private int singleInt32_; + /// + /// Singular + /// + public int SingleInt32 { + get { return singleInt32_; } + set { + singleInt32_ = value; + } + } + + /// Field number for the "single_int64" field. + public const int SingleInt64FieldNumber = 2; + private long singleInt64_; + public long SingleInt64 { + get { return singleInt64_; } + set { + singleInt64_ = value; + } + } + + /// Field number for the "single_uint32" field. + public const int SingleUint32FieldNumber = 3; + private uint singleUint32_; + public uint SingleUint32 { + get { return singleUint32_; } + set { + singleUint32_ = value; + } + } + + /// Field number for the "single_uint64" field. + public const int SingleUint64FieldNumber = 4; + private ulong singleUint64_; + public ulong SingleUint64 { + get { return singleUint64_; } + set { + singleUint64_ = value; + } + } + + /// Field number for the "single_sint32" field. + public const int SingleSint32FieldNumber = 5; + private int singleSint32_; + public int SingleSint32 { + get { return singleSint32_; } + set { + singleSint32_ = value; + } + } + + /// Field number for the "single_sint64" field. + public const int SingleSint64FieldNumber = 6; + private long singleSint64_; + public long SingleSint64 { + get { return singleSint64_; } + set { + singleSint64_ = value; + } + } + + /// Field number for the "single_fixed32" field. + public const int SingleFixed32FieldNumber = 7; + private uint singleFixed32_; + public uint SingleFixed32 { + get { return singleFixed32_; } + set { + singleFixed32_ = value; + } + } + + /// Field number for the "single_fixed64" field. + public const int SingleFixed64FieldNumber = 8; + private ulong singleFixed64_; + public ulong SingleFixed64 { + get { return singleFixed64_; } + set { + singleFixed64_ = value; + } + } + + /// Field number for the "single_sfixed32" field. + public const int SingleSfixed32FieldNumber = 9; + private int singleSfixed32_; + public int SingleSfixed32 { + get { return singleSfixed32_; } + set { + singleSfixed32_ = value; + } + } + + /// Field number for the "single_sfixed64" field. + public const int SingleSfixed64FieldNumber = 10; + private long singleSfixed64_; + public long SingleSfixed64 { + get { return singleSfixed64_; } + set { + singleSfixed64_ = value; + } + } + + /// Field number for the "single_float" field. + public const int SingleFloatFieldNumber = 11; + private float singleFloat_; + public float SingleFloat { + get { return singleFloat_; } + set { + singleFloat_ = value; + } + } + + /// Field number for the "single_double" field. + public const int SingleDoubleFieldNumber = 12; + private double singleDouble_; + public double SingleDouble { + get { return singleDouble_; } + set { + singleDouble_ = value; + } + } + + /// Field number for the "single_bool" field. + public const int SingleBoolFieldNumber = 13; + private bool singleBool_; + public bool SingleBool { + get { return singleBool_; } + set { + singleBool_ = value; + } + } + + /// Field number for the "single_string" field. + public const int SingleStringFieldNumber = 14; + private string singleString_ = ""; + public string SingleString { + get { return singleString_; } + set { + singleString_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "single_bytes" field. + public const int SingleBytesFieldNumber = 15; + private pb::ByteString singleBytes_ = pb::ByteString.Empty; + public pb::ByteString SingleBytes { + get { return singleBytes_; } + set { + singleBytes_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "single_nested_message" field. + public const int SingleNestedMessageFieldNumber = 18; + private global::Google.Protobuf.TestProtos.TestAllTypes.Types.NestedMessage singleNestedMessage_; + public global::Google.Protobuf.TestProtos.TestAllTypes.Types.NestedMessage SingleNestedMessage { + get { return singleNestedMessage_; } + set { + singleNestedMessage_ = value; + } + } + + /// Field number for the "single_foreign_message" field. + public const int SingleForeignMessageFieldNumber = 19; + private global::Google.Protobuf.TestProtos.ForeignMessage singleForeignMessage_; + public global::Google.Protobuf.TestProtos.ForeignMessage SingleForeignMessage { + get { return singleForeignMessage_; } + set { + singleForeignMessage_ = value; + } + } + + /// Field number for the "single_import_message" field. + public const int SingleImportMessageFieldNumber = 20; + private global::Google.Protobuf.TestProtos.ImportMessage singleImportMessage_; + public global::Google.Protobuf.TestProtos.ImportMessage SingleImportMessage { + get { return singleImportMessage_; } + set { + singleImportMessage_ = value; + } + } + + /// Field number for the "single_nested_enum" field. + public const int SingleNestedEnumFieldNumber = 21; + private global::Google.Protobuf.TestProtos.TestAllTypes.Types.NestedEnum singleNestedEnum_ = 0; + public global::Google.Protobuf.TestProtos.TestAllTypes.Types.NestedEnum SingleNestedEnum { + get { return singleNestedEnum_; } + set { + singleNestedEnum_ = value; + } + } + + /// Field number for the "single_foreign_enum" field. + public const int SingleForeignEnumFieldNumber = 22; + private global::Google.Protobuf.TestProtos.ForeignEnum singleForeignEnum_ = 0; + public global::Google.Protobuf.TestProtos.ForeignEnum SingleForeignEnum { + get { return singleForeignEnum_; } + set { + singleForeignEnum_ = value; + } + } + + /// Field number for the "single_import_enum" field. + public const int SingleImportEnumFieldNumber = 23; + private global::Google.Protobuf.TestProtos.ImportEnum singleImportEnum_ = 0; + public global::Google.Protobuf.TestProtos.ImportEnum SingleImportEnum { + get { return singleImportEnum_; } + set { + singleImportEnum_ = value; + } + } + + /// Field number for the "single_public_import_message" field. + public const int SinglePublicImportMessageFieldNumber = 26; + private global::Google.Protobuf.TestProtos.PublicImportMessage singlePublicImportMessage_; + /// + /// Defined in unittest_import_public.proto + /// + public global::Google.Protobuf.TestProtos.PublicImportMessage SinglePublicImportMessage { + get { return singlePublicImportMessage_; } + set { + singlePublicImportMessage_ = value; + } + } + + /// Field number for the "repeated_int32" field. + public const int RepeatedInt32FieldNumber = 31; + private static readonly pb::FieldCodec _repeated_repeatedInt32_codec + = pb::FieldCodec.ForInt32(250); + private readonly pbc::RepeatedField repeatedInt32_ = new pbc::RepeatedField(); + /// + /// Repeated + /// + public pbc::RepeatedField RepeatedInt32 { + get { return repeatedInt32_; } + } + + /// Field number for the "repeated_int64" field. + public const int RepeatedInt64FieldNumber = 32; + private static readonly pb::FieldCodec _repeated_repeatedInt64_codec + = pb::FieldCodec.ForInt64(258); + private readonly pbc::RepeatedField repeatedInt64_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedInt64 { + get { return repeatedInt64_; } + } + + /// Field number for the "repeated_uint32" field. + public const int RepeatedUint32FieldNumber = 33; + private static readonly pb::FieldCodec _repeated_repeatedUint32_codec + = pb::FieldCodec.ForUInt32(266); + private readonly pbc::RepeatedField repeatedUint32_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedUint32 { + get { return repeatedUint32_; } + } + + /// Field number for the "repeated_uint64" field. + public const int RepeatedUint64FieldNumber = 34; + private static readonly pb::FieldCodec _repeated_repeatedUint64_codec + = pb::FieldCodec.ForUInt64(274); + private readonly pbc::RepeatedField repeatedUint64_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedUint64 { + get { return repeatedUint64_; } + } + + /// Field number for the "repeated_sint32" field. + public const int RepeatedSint32FieldNumber = 35; + private static readonly pb::FieldCodec _repeated_repeatedSint32_codec + = pb::FieldCodec.ForSInt32(282); + private readonly pbc::RepeatedField repeatedSint32_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedSint32 { + get { return repeatedSint32_; } + } + + /// Field number for the "repeated_sint64" field. + public const int RepeatedSint64FieldNumber = 36; + private static readonly pb::FieldCodec _repeated_repeatedSint64_codec + = pb::FieldCodec.ForSInt64(290); + private readonly pbc::RepeatedField repeatedSint64_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedSint64 { + get { return repeatedSint64_; } + } + + /// Field number for the "repeated_fixed32" field. + public const int RepeatedFixed32FieldNumber = 37; + private static readonly pb::FieldCodec _repeated_repeatedFixed32_codec + = pb::FieldCodec.ForFixed32(298); + private readonly pbc::RepeatedField repeatedFixed32_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedFixed32 { + get { return repeatedFixed32_; } + } + + /// Field number for the "repeated_fixed64" field. + public const int RepeatedFixed64FieldNumber = 38; + private static readonly pb::FieldCodec _repeated_repeatedFixed64_codec + = pb::FieldCodec.ForFixed64(306); + private readonly pbc::RepeatedField repeatedFixed64_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedFixed64 { + get { return repeatedFixed64_; } + } + + /// Field number for the "repeated_sfixed32" field. + public const int RepeatedSfixed32FieldNumber = 39; + private static readonly pb::FieldCodec _repeated_repeatedSfixed32_codec + = pb::FieldCodec.ForSFixed32(314); + private readonly pbc::RepeatedField repeatedSfixed32_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedSfixed32 { + get { return repeatedSfixed32_; } + } + + /// Field number for the "repeated_sfixed64" field. + public const int RepeatedSfixed64FieldNumber = 40; + private static readonly pb::FieldCodec _repeated_repeatedSfixed64_codec + = pb::FieldCodec.ForSFixed64(322); + private readonly pbc::RepeatedField repeatedSfixed64_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedSfixed64 { + get { return repeatedSfixed64_; } + } + + /// Field number for the "repeated_float" field. + public const int RepeatedFloatFieldNumber = 41; + private static readonly pb::FieldCodec _repeated_repeatedFloat_codec + = pb::FieldCodec.ForFloat(330); + private readonly pbc::RepeatedField repeatedFloat_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedFloat { + get { return repeatedFloat_; } + } + + /// Field number for the "repeated_double" field. + public const int RepeatedDoubleFieldNumber = 42; + private static readonly pb::FieldCodec _repeated_repeatedDouble_codec + = pb::FieldCodec.ForDouble(338); + private readonly pbc::RepeatedField repeatedDouble_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedDouble { + get { return repeatedDouble_; } + } + + /// Field number for the "repeated_bool" field. + public const int RepeatedBoolFieldNumber = 43; + private static readonly pb::FieldCodec _repeated_repeatedBool_codec + = pb::FieldCodec.ForBool(346); + private readonly pbc::RepeatedField repeatedBool_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedBool { + get { return repeatedBool_; } + } + + /// Field number for the "repeated_string" field. + public const int RepeatedStringFieldNumber = 44; + private static readonly pb::FieldCodec _repeated_repeatedString_codec + = pb::FieldCodec.ForString(354); + private readonly pbc::RepeatedField repeatedString_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedString { + get { return repeatedString_; } + } + + /// Field number for the "repeated_bytes" field. + public const int RepeatedBytesFieldNumber = 45; + private static readonly pb::FieldCodec _repeated_repeatedBytes_codec + = pb::FieldCodec.ForBytes(362); + private readonly pbc::RepeatedField repeatedBytes_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedBytes { + get { return repeatedBytes_; } + } + + /// Field number for the "repeated_nested_message" field. + public const int RepeatedNestedMessageFieldNumber = 48; + private static readonly pb::FieldCodec _repeated_repeatedNestedMessage_codec + = pb::FieldCodec.ForMessage(386, global::Google.Protobuf.TestProtos.TestAllTypes.Types.NestedMessage.Parser); + private readonly pbc::RepeatedField repeatedNestedMessage_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedNestedMessage { + get { return repeatedNestedMessage_; } + } + + /// Field number for the "repeated_foreign_message" field. + public const int RepeatedForeignMessageFieldNumber = 49; + private static readonly pb::FieldCodec _repeated_repeatedForeignMessage_codec + = pb::FieldCodec.ForMessage(394, global::Google.Protobuf.TestProtos.ForeignMessage.Parser); + private readonly pbc::RepeatedField repeatedForeignMessage_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedForeignMessage { + get { return repeatedForeignMessage_; } + } + + /// Field number for the "repeated_import_message" field. + public const int RepeatedImportMessageFieldNumber = 50; + private static readonly pb::FieldCodec _repeated_repeatedImportMessage_codec + = pb::FieldCodec.ForMessage(402, global::Google.Protobuf.TestProtos.ImportMessage.Parser); + private readonly pbc::RepeatedField repeatedImportMessage_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedImportMessage { + get { return repeatedImportMessage_; } + } + + /// Field number for the "repeated_nested_enum" field. + public const int RepeatedNestedEnumFieldNumber = 51; + private static readonly pb::FieldCodec _repeated_repeatedNestedEnum_codec + = pb::FieldCodec.ForEnum(410, x => (int) x, x => (global::Google.Protobuf.TestProtos.TestAllTypes.Types.NestedEnum) x); + private readonly pbc::RepeatedField repeatedNestedEnum_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedNestedEnum { + get { return repeatedNestedEnum_; } + } + + /// Field number for the "repeated_foreign_enum" field. + public const int RepeatedForeignEnumFieldNumber = 52; + private static readonly pb::FieldCodec _repeated_repeatedForeignEnum_codec + = pb::FieldCodec.ForEnum(418, x => (int) x, x => (global::Google.Protobuf.TestProtos.ForeignEnum) x); + private readonly pbc::RepeatedField repeatedForeignEnum_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedForeignEnum { + get { return repeatedForeignEnum_; } + } + + /// Field number for the "repeated_import_enum" field. + public const int RepeatedImportEnumFieldNumber = 53; + private static readonly pb::FieldCodec _repeated_repeatedImportEnum_codec + = pb::FieldCodec.ForEnum(426, x => (int) x, x => (global::Google.Protobuf.TestProtos.ImportEnum) x); + private readonly pbc::RepeatedField repeatedImportEnum_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedImportEnum { + get { return repeatedImportEnum_; } + } + + /// Field number for the "repeated_public_import_message" field. + public const int RepeatedPublicImportMessageFieldNumber = 54; + private static readonly pb::FieldCodec _repeated_repeatedPublicImportMessage_codec + = pb::FieldCodec.ForMessage(434, global::Google.Protobuf.TestProtos.PublicImportMessage.Parser); + private readonly pbc::RepeatedField repeatedPublicImportMessage_ = new pbc::RepeatedField(); + /// + /// Defined in unittest_import_public.proto + /// + public pbc::RepeatedField RepeatedPublicImportMessage { + get { return repeatedPublicImportMessage_; } + } + + /// Field number for the "oneof_uint32" field. + public const int OneofUint32FieldNumber = 111; + public uint OneofUint32 { + get { return oneofFieldCase_ == OneofFieldOneofCase.OneofUint32 ? (uint) oneofField_ : 0; } + set { + oneofField_ = value; + oneofFieldCase_ = OneofFieldOneofCase.OneofUint32; + } + } + + /// Field number for the "oneof_nested_message" field. + public const int OneofNestedMessageFieldNumber = 112; + public global::Google.Protobuf.TestProtos.TestAllTypes.Types.NestedMessage OneofNestedMessage { + get { return oneofFieldCase_ == OneofFieldOneofCase.OneofNestedMessage ? (global::Google.Protobuf.TestProtos.TestAllTypes.Types.NestedMessage) oneofField_ : null; } + set { + oneofField_ = value; + oneofFieldCase_ = value == null ? OneofFieldOneofCase.None : OneofFieldOneofCase.OneofNestedMessage; + } + } + + /// Field number for the "oneof_string" field. + public const int OneofStringFieldNumber = 113; + public string OneofString { + get { return oneofFieldCase_ == OneofFieldOneofCase.OneofString ? (string) oneofField_ : ""; } + set { + oneofField_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + oneofFieldCase_ = OneofFieldOneofCase.OneofString; + } + } + + /// Field number for the "oneof_bytes" field. + public const int OneofBytesFieldNumber = 114; + public pb::ByteString OneofBytes { + get { return oneofFieldCase_ == OneofFieldOneofCase.OneofBytes ? (pb::ByteString) oneofField_ : pb::ByteString.Empty; } + set { + oneofField_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + oneofFieldCase_ = OneofFieldOneofCase.OneofBytes; + } + } + + private object oneofField_; + /// Enum of possible cases for the "oneof_field" oneof. + public enum OneofFieldOneofCase { + None = 0, + OneofUint32 = 111, + OneofNestedMessage = 112, + OneofString = 113, + OneofBytes = 114, + } + private OneofFieldOneofCase oneofFieldCase_ = OneofFieldOneofCase.None; + public OneofFieldOneofCase OneofFieldCase { + get { return oneofFieldCase_; } + } + + public void ClearOneofField() { + oneofFieldCase_ = OneofFieldOneofCase.None; + oneofField_ = null; + } + + public override bool Equals(object other) { + return Equals(other as TestAllTypes); + } + + public bool Equals(TestAllTypes other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (SingleInt32 != other.SingleInt32) return false; + if (SingleInt64 != other.SingleInt64) return false; + if (SingleUint32 != other.SingleUint32) return false; + if (SingleUint64 != other.SingleUint64) return false; + if (SingleSint32 != other.SingleSint32) return false; + if (SingleSint64 != other.SingleSint64) return false; + if (SingleFixed32 != other.SingleFixed32) return false; + if (SingleFixed64 != other.SingleFixed64) return false; + if (SingleSfixed32 != other.SingleSfixed32) return false; + if (SingleSfixed64 != other.SingleSfixed64) return false; + if (SingleFloat != other.SingleFloat) return false; + if (SingleDouble != other.SingleDouble) return false; + if (SingleBool != other.SingleBool) return false; + if (SingleString != other.SingleString) return false; + if (SingleBytes != other.SingleBytes) return false; + if (!object.Equals(SingleNestedMessage, other.SingleNestedMessage)) return false; + if (!object.Equals(SingleForeignMessage, other.SingleForeignMessage)) return false; + if (!object.Equals(SingleImportMessage, other.SingleImportMessage)) return false; + if (SingleNestedEnum != other.SingleNestedEnum) return false; + if (SingleForeignEnum != other.SingleForeignEnum) return false; + if (SingleImportEnum != other.SingleImportEnum) return false; + if (!object.Equals(SinglePublicImportMessage, other.SinglePublicImportMessage)) return false; + if(!repeatedInt32_.Equals(other.repeatedInt32_)) return false; + if(!repeatedInt64_.Equals(other.repeatedInt64_)) return false; + if(!repeatedUint32_.Equals(other.repeatedUint32_)) return false; + if(!repeatedUint64_.Equals(other.repeatedUint64_)) return false; + if(!repeatedSint32_.Equals(other.repeatedSint32_)) return false; + if(!repeatedSint64_.Equals(other.repeatedSint64_)) return false; + if(!repeatedFixed32_.Equals(other.repeatedFixed32_)) return false; + if(!repeatedFixed64_.Equals(other.repeatedFixed64_)) return false; + if(!repeatedSfixed32_.Equals(other.repeatedSfixed32_)) return false; + if(!repeatedSfixed64_.Equals(other.repeatedSfixed64_)) return false; + if(!repeatedFloat_.Equals(other.repeatedFloat_)) return false; + if(!repeatedDouble_.Equals(other.repeatedDouble_)) return false; + if(!repeatedBool_.Equals(other.repeatedBool_)) return false; + if(!repeatedString_.Equals(other.repeatedString_)) return false; + if(!repeatedBytes_.Equals(other.repeatedBytes_)) return false; + if(!repeatedNestedMessage_.Equals(other.repeatedNestedMessage_)) return false; + if(!repeatedForeignMessage_.Equals(other.repeatedForeignMessage_)) return false; + if(!repeatedImportMessage_.Equals(other.repeatedImportMessage_)) return false; + if(!repeatedNestedEnum_.Equals(other.repeatedNestedEnum_)) return false; + if(!repeatedForeignEnum_.Equals(other.repeatedForeignEnum_)) return false; + if(!repeatedImportEnum_.Equals(other.repeatedImportEnum_)) return false; + if(!repeatedPublicImportMessage_.Equals(other.repeatedPublicImportMessage_)) return false; + if (OneofUint32 != other.OneofUint32) return false; + if (!object.Equals(OneofNestedMessage, other.OneofNestedMessage)) return false; + if (OneofString != other.OneofString) return false; + if (OneofBytes != other.OneofBytes) return false; + if (OneofFieldCase != other.OneofFieldCase) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (SingleInt32 != 0) hash ^= SingleInt32.GetHashCode(); + if (SingleInt64 != 0L) hash ^= SingleInt64.GetHashCode(); + if (SingleUint32 != 0) hash ^= SingleUint32.GetHashCode(); + if (SingleUint64 != 0UL) hash ^= SingleUint64.GetHashCode(); + if (SingleSint32 != 0) hash ^= SingleSint32.GetHashCode(); + if (SingleSint64 != 0L) hash ^= SingleSint64.GetHashCode(); + if (SingleFixed32 != 0) hash ^= SingleFixed32.GetHashCode(); + if (SingleFixed64 != 0UL) hash ^= SingleFixed64.GetHashCode(); + if (SingleSfixed32 != 0) hash ^= SingleSfixed32.GetHashCode(); + if (SingleSfixed64 != 0L) hash ^= SingleSfixed64.GetHashCode(); + if (SingleFloat != 0F) hash ^= SingleFloat.GetHashCode(); + if (SingleDouble != 0D) hash ^= SingleDouble.GetHashCode(); + if (SingleBool != false) hash ^= SingleBool.GetHashCode(); + if (SingleString.Length != 0) hash ^= SingleString.GetHashCode(); + if (SingleBytes.Length != 0) hash ^= SingleBytes.GetHashCode(); + if (singleNestedMessage_ != null) hash ^= SingleNestedMessage.GetHashCode(); + if (singleForeignMessage_ != null) hash ^= SingleForeignMessage.GetHashCode(); + if (singleImportMessage_ != null) hash ^= SingleImportMessage.GetHashCode(); + if (SingleNestedEnum != 0) hash ^= SingleNestedEnum.GetHashCode(); + if (SingleForeignEnum != 0) hash ^= SingleForeignEnum.GetHashCode(); + if (SingleImportEnum != 0) hash ^= SingleImportEnum.GetHashCode(); + if (singlePublicImportMessage_ != null) hash ^= SinglePublicImportMessage.GetHashCode(); + hash ^= repeatedInt32_.GetHashCode(); + hash ^= repeatedInt64_.GetHashCode(); + hash ^= repeatedUint32_.GetHashCode(); + hash ^= repeatedUint64_.GetHashCode(); + hash ^= repeatedSint32_.GetHashCode(); + hash ^= repeatedSint64_.GetHashCode(); + hash ^= repeatedFixed32_.GetHashCode(); + hash ^= repeatedFixed64_.GetHashCode(); + hash ^= repeatedSfixed32_.GetHashCode(); + hash ^= repeatedSfixed64_.GetHashCode(); + hash ^= repeatedFloat_.GetHashCode(); + hash ^= repeatedDouble_.GetHashCode(); + hash ^= repeatedBool_.GetHashCode(); + hash ^= repeatedString_.GetHashCode(); + hash ^= repeatedBytes_.GetHashCode(); + hash ^= repeatedNestedMessage_.GetHashCode(); + hash ^= repeatedForeignMessage_.GetHashCode(); + hash ^= repeatedImportMessage_.GetHashCode(); + hash ^= repeatedNestedEnum_.GetHashCode(); + hash ^= repeatedForeignEnum_.GetHashCode(); + hash ^= repeatedImportEnum_.GetHashCode(); + hash ^= repeatedPublicImportMessage_.GetHashCode(); + if (oneofFieldCase_ == OneofFieldOneofCase.OneofUint32) hash ^= OneofUint32.GetHashCode(); + if (oneofFieldCase_ == OneofFieldOneofCase.OneofNestedMessage) hash ^= OneofNestedMessage.GetHashCode(); + if (oneofFieldCase_ == OneofFieldOneofCase.OneofString) hash ^= OneofString.GetHashCode(); + if (oneofFieldCase_ == OneofFieldOneofCase.OneofBytes) hash ^= OneofBytes.GetHashCode(); + hash ^= (int) oneofFieldCase_; + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (SingleInt32 != 0) { + output.WriteRawTag(8); + output.WriteInt32(SingleInt32); + } + if (SingleInt64 != 0L) { + output.WriteRawTag(16); + output.WriteInt64(SingleInt64); + } + if (SingleUint32 != 0) { + output.WriteRawTag(24); + output.WriteUInt32(SingleUint32); + } + if (SingleUint64 != 0UL) { + output.WriteRawTag(32); + output.WriteUInt64(SingleUint64); + } + if (SingleSint32 != 0) { + output.WriteRawTag(40); + output.WriteSInt32(SingleSint32); + } + if (SingleSint64 != 0L) { + output.WriteRawTag(48); + output.WriteSInt64(SingleSint64); + } + if (SingleFixed32 != 0) { + output.WriteRawTag(61); + output.WriteFixed32(SingleFixed32); + } + if (SingleFixed64 != 0UL) { + output.WriteRawTag(65); + output.WriteFixed64(SingleFixed64); + } + if (SingleSfixed32 != 0) { + output.WriteRawTag(77); + output.WriteSFixed32(SingleSfixed32); + } + if (SingleSfixed64 != 0L) { + output.WriteRawTag(81); + output.WriteSFixed64(SingleSfixed64); + } + if (SingleFloat != 0F) { + output.WriteRawTag(93); + output.WriteFloat(SingleFloat); + } + if (SingleDouble != 0D) { + output.WriteRawTag(97); + output.WriteDouble(SingleDouble); + } + if (SingleBool != false) { + output.WriteRawTag(104); + output.WriteBool(SingleBool); + } + if (SingleString.Length != 0) { + output.WriteRawTag(114); + output.WriteString(SingleString); + } + if (SingleBytes.Length != 0) { + output.WriteRawTag(122); + output.WriteBytes(SingleBytes); + } + if (singleNestedMessage_ != null) { + output.WriteRawTag(146, 1); + output.WriteMessage(SingleNestedMessage); + } + if (singleForeignMessage_ != null) { + output.WriteRawTag(154, 1); + output.WriteMessage(SingleForeignMessage); + } + if (singleImportMessage_ != null) { + output.WriteRawTag(162, 1); + output.WriteMessage(SingleImportMessage); + } + if (SingleNestedEnum != 0) { + output.WriteRawTag(168, 1); + output.WriteEnum((int) SingleNestedEnum); + } + if (SingleForeignEnum != 0) { + output.WriteRawTag(176, 1); + output.WriteEnum((int) SingleForeignEnum); + } + if (SingleImportEnum != 0) { + output.WriteRawTag(184, 1); + output.WriteEnum((int) SingleImportEnum); + } + if (singlePublicImportMessage_ != null) { + output.WriteRawTag(210, 1); + output.WriteMessage(SinglePublicImportMessage); + } + repeatedInt32_.WriteTo(output, _repeated_repeatedInt32_codec); + repeatedInt64_.WriteTo(output, _repeated_repeatedInt64_codec); + repeatedUint32_.WriteTo(output, _repeated_repeatedUint32_codec); + repeatedUint64_.WriteTo(output, _repeated_repeatedUint64_codec); + repeatedSint32_.WriteTo(output, _repeated_repeatedSint32_codec); + repeatedSint64_.WriteTo(output, _repeated_repeatedSint64_codec); + repeatedFixed32_.WriteTo(output, _repeated_repeatedFixed32_codec); + repeatedFixed64_.WriteTo(output, _repeated_repeatedFixed64_codec); + repeatedSfixed32_.WriteTo(output, _repeated_repeatedSfixed32_codec); + repeatedSfixed64_.WriteTo(output, _repeated_repeatedSfixed64_codec); + repeatedFloat_.WriteTo(output, _repeated_repeatedFloat_codec); + repeatedDouble_.WriteTo(output, _repeated_repeatedDouble_codec); + repeatedBool_.WriteTo(output, _repeated_repeatedBool_codec); + repeatedString_.WriteTo(output, _repeated_repeatedString_codec); + repeatedBytes_.WriteTo(output, _repeated_repeatedBytes_codec); + repeatedNestedMessage_.WriteTo(output, _repeated_repeatedNestedMessage_codec); + repeatedForeignMessage_.WriteTo(output, _repeated_repeatedForeignMessage_codec); + repeatedImportMessage_.WriteTo(output, _repeated_repeatedImportMessage_codec); + repeatedNestedEnum_.WriteTo(output, _repeated_repeatedNestedEnum_codec); + repeatedForeignEnum_.WriteTo(output, _repeated_repeatedForeignEnum_codec); + repeatedImportEnum_.WriteTo(output, _repeated_repeatedImportEnum_codec); + repeatedPublicImportMessage_.WriteTo(output, _repeated_repeatedPublicImportMessage_codec); + if (oneofFieldCase_ == OneofFieldOneofCase.OneofUint32) { + output.WriteRawTag(248, 6); + output.WriteUInt32(OneofUint32); + } + if (oneofFieldCase_ == OneofFieldOneofCase.OneofNestedMessage) { + output.WriteRawTag(130, 7); + output.WriteMessage(OneofNestedMessage); + } + if (oneofFieldCase_ == OneofFieldOneofCase.OneofString) { + output.WriteRawTag(138, 7); + output.WriteString(OneofString); + } + if (oneofFieldCase_ == OneofFieldOneofCase.OneofBytes) { + output.WriteRawTag(146, 7); + output.WriteBytes(OneofBytes); + } + } + + public int CalculateSize() { + int size = 0; + if (SingleInt32 != 0) { + size += 1 + pb::CodedOutputStream.ComputeInt32Size(SingleInt32); + } + if (SingleInt64 != 0L) { + size += 1 + pb::CodedOutputStream.ComputeInt64Size(SingleInt64); + } + if (SingleUint32 != 0) { + size += 1 + pb::CodedOutputStream.ComputeUInt32Size(SingleUint32); + } + if (SingleUint64 != 0UL) { + size += 1 + pb::CodedOutputStream.ComputeUInt64Size(SingleUint64); + } + if (SingleSint32 != 0) { + size += 1 + pb::CodedOutputStream.ComputeSInt32Size(SingleSint32); + } + if (SingleSint64 != 0L) { + size += 1 + pb::CodedOutputStream.ComputeSInt64Size(SingleSint64); + } + if (SingleFixed32 != 0) { + size += 1 + 4; + } + if (SingleFixed64 != 0UL) { + size += 1 + 8; + } + if (SingleSfixed32 != 0) { + size += 1 + 4; + } + if (SingleSfixed64 != 0L) { + size += 1 + 8; + } + if (SingleFloat != 0F) { + size += 1 + 4; + } + if (SingleDouble != 0D) { + size += 1 + 8; + } + if (SingleBool != false) { + size += 1 + 1; + } + if (SingleString.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(SingleString); + } + if (SingleBytes.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeBytesSize(SingleBytes); + } + if (singleNestedMessage_ != null) { + size += 2 + pb::CodedOutputStream.ComputeMessageSize(SingleNestedMessage); + } + if (singleForeignMessage_ != null) { + size += 2 + pb::CodedOutputStream.ComputeMessageSize(SingleForeignMessage); + } + if (singleImportMessage_ != null) { + size += 2 + pb::CodedOutputStream.ComputeMessageSize(SingleImportMessage); + } + if (SingleNestedEnum != 0) { + size += 2 + pb::CodedOutputStream.ComputeEnumSize((int) SingleNestedEnum); + } + if (SingleForeignEnum != 0) { + size += 2 + pb::CodedOutputStream.ComputeEnumSize((int) SingleForeignEnum); + } + if (SingleImportEnum != 0) { + size += 2 + pb::CodedOutputStream.ComputeEnumSize((int) SingleImportEnum); + } + if (singlePublicImportMessage_ != null) { + size += 2 + pb::CodedOutputStream.ComputeMessageSize(SinglePublicImportMessage); + } + size += repeatedInt32_.CalculateSize(_repeated_repeatedInt32_codec); + size += repeatedInt64_.CalculateSize(_repeated_repeatedInt64_codec); + size += repeatedUint32_.CalculateSize(_repeated_repeatedUint32_codec); + size += repeatedUint64_.CalculateSize(_repeated_repeatedUint64_codec); + size += repeatedSint32_.CalculateSize(_repeated_repeatedSint32_codec); + size += repeatedSint64_.CalculateSize(_repeated_repeatedSint64_codec); + size += repeatedFixed32_.CalculateSize(_repeated_repeatedFixed32_codec); + size += repeatedFixed64_.CalculateSize(_repeated_repeatedFixed64_codec); + size += repeatedSfixed32_.CalculateSize(_repeated_repeatedSfixed32_codec); + size += repeatedSfixed64_.CalculateSize(_repeated_repeatedSfixed64_codec); + size += repeatedFloat_.CalculateSize(_repeated_repeatedFloat_codec); + size += repeatedDouble_.CalculateSize(_repeated_repeatedDouble_codec); + size += repeatedBool_.CalculateSize(_repeated_repeatedBool_codec); + size += repeatedString_.CalculateSize(_repeated_repeatedString_codec); + size += repeatedBytes_.CalculateSize(_repeated_repeatedBytes_codec); + size += repeatedNestedMessage_.CalculateSize(_repeated_repeatedNestedMessage_codec); + size += repeatedForeignMessage_.CalculateSize(_repeated_repeatedForeignMessage_codec); + size += repeatedImportMessage_.CalculateSize(_repeated_repeatedImportMessage_codec); + size += repeatedNestedEnum_.CalculateSize(_repeated_repeatedNestedEnum_codec); + size += repeatedForeignEnum_.CalculateSize(_repeated_repeatedForeignEnum_codec); + size += repeatedImportEnum_.CalculateSize(_repeated_repeatedImportEnum_codec); + size += repeatedPublicImportMessage_.CalculateSize(_repeated_repeatedPublicImportMessage_codec); + if (oneofFieldCase_ == OneofFieldOneofCase.OneofUint32) { + size += 2 + pb::CodedOutputStream.ComputeUInt32Size(OneofUint32); + } + if (oneofFieldCase_ == OneofFieldOneofCase.OneofNestedMessage) { + size += 2 + pb::CodedOutputStream.ComputeMessageSize(OneofNestedMessage); + } + if (oneofFieldCase_ == OneofFieldOneofCase.OneofString) { + size += 2 + pb::CodedOutputStream.ComputeStringSize(OneofString); + } + if (oneofFieldCase_ == OneofFieldOneofCase.OneofBytes) { + size += 2 + pb::CodedOutputStream.ComputeBytesSize(OneofBytes); + } + return size; + } + + public void MergeFrom(TestAllTypes other) { + if (other == null) { + return; + } + if (other.SingleInt32 != 0) { + SingleInt32 = other.SingleInt32; + } + if (other.SingleInt64 != 0L) { + SingleInt64 = other.SingleInt64; + } + if (other.SingleUint32 != 0) { + SingleUint32 = other.SingleUint32; + } + if (other.SingleUint64 != 0UL) { + SingleUint64 = other.SingleUint64; + } + if (other.SingleSint32 != 0) { + SingleSint32 = other.SingleSint32; + } + if (other.SingleSint64 != 0L) { + SingleSint64 = other.SingleSint64; + } + if (other.SingleFixed32 != 0) { + SingleFixed32 = other.SingleFixed32; + } + if (other.SingleFixed64 != 0UL) { + SingleFixed64 = other.SingleFixed64; + } + if (other.SingleSfixed32 != 0) { + SingleSfixed32 = other.SingleSfixed32; + } + if (other.SingleSfixed64 != 0L) { + SingleSfixed64 = other.SingleSfixed64; + } + if (other.SingleFloat != 0F) { + SingleFloat = other.SingleFloat; + } + if (other.SingleDouble != 0D) { + SingleDouble = other.SingleDouble; + } + if (other.SingleBool != false) { + SingleBool = other.SingleBool; + } + if (other.SingleString.Length != 0) { + SingleString = other.SingleString; + } + if (other.SingleBytes.Length != 0) { + SingleBytes = other.SingleBytes; + } + if (other.singleNestedMessage_ != null) { + if (singleNestedMessage_ == null) { + singleNestedMessage_ = new global::Google.Protobuf.TestProtos.TestAllTypes.Types.NestedMessage(); + } + SingleNestedMessage.MergeFrom(other.SingleNestedMessage); + } + if (other.singleForeignMessage_ != null) { + if (singleForeignMessage_ == null) { + singleForeignMessage_ = new global::Google.Protobuf.TestProtos.ForeignMessage(); + } + SingleForeignMessage.MergeFrom(other.SingleForeignMessage); + } + if (other.singleImportMessage_ != null) { + if (singleImportMessage_ == null) { + singleImportMessage_ = new global::Google.Protobuf.TestProtos.ImportMessage(); + } + SingleImportMessage.MergeFrom(other.SingleImportMessage); + } + if (other.SingleNestedEnum != 0) { + SingleNestedEnum = other.SingleNestedEnum; + } + if (other.SingleForeignEnum != 0) { + SingleForeignEnum = other.SingleForeignEnum; + } + if (other.SingleImportEnum != 0) { + SingleImportEnum = other.SingleImportEnum; + } + if (other.singlePublicImportMessage_ != null) { + if (singlePublicImportMessage_ == null) { + singlePublicImportMessage_ = new global::Google.Protobuf.TestProtos.PublicImportMessage(); + } + SinglePublicImportMessage.MergeFrom(other.SinglePublicImportMessage); + } + repeatedInt32_.Add(other.repeatedInt32_); + repeatedInt64_.Add(other.repeatedInt64_); + repeatedUint32_.Add(other.repeatedUint32_); + repeatedUint64_.Add(other.repeatedUint64_); + repeatedSint32_.Add(other.repeatedSint32_); + repeatedSint64_.Add(other.repeatedSint64_); + repeatedFixed32_.Add(other.repeatedFixed32_); + repeatedFixed64_.Add(other.repeatedFixed64_); + repeatedSfixed32_.Add(other.repeatedSfixed32_); + repeatedSfixed64_.Add(other.repeatedSfixed64_); + repeatedFloat_.Add(other.repeatedFloat_); + repeatedDouble_.Add(other.repeatedDouble_); + repeatedBool_.Add(other.repeatedBool_); + repeatedString_.Add(other.repeatedString_); + repeatedBytes_.Add(other.repeatedBytes_); + repeatedNestedMessage_.Add(other.repeatedNestedMessage_); + repeatedForeignMessage_.Add(other.repeatedForeignMessage_); + repeatedImportMessage_.Add(other.repeatedImportMessage_); + repeatedNestedEnum_.Add(other.repeatedNestedEnum_); + repeatedForeignEnum_.Add(other.repeatedForeignEnum_); + repeatedImportEnum_.Add(other.repeatedImportEnum_); + repeatedPublicImportMessage_.Add(other.repeatedPublicImportMessage_); + switch (other.OneofFieldCase) { + case OneofFieldOneofCase.OneofUint32: + OneofUint32 = other.OneofUint32; + break; + case OneofFieldOneofCase.OneofNestedMessage: + OneofNestedMessage = other.OneofNestedMessage; + break; + case OneofFieldOneofCase.OneofString: + OneofString = other.OneofString; + break; + case OneofFieldOneofCase.OneofBytes: + OneofBytes = other.OneofBytes; + break; + } + + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 8: { + SingleInt32 = input.ReadInt32(); + break; + } + case 16: { + SingleInt64 = input.ReadInt64(); + break; + } + case 24: { + SingleUint32 = input.ReadUInt32(); + break; + } + case 32: { + SingleUint64 = input.ReadUInt64(); + break; + } + case 40: { + SingleSint32 = input.ReadSInt32(); + break; + } + case 48: { + SingleSint64 = input.ReadSInt64(); + break; + } + case 61: { + SingleFixed32 = input.ReadFixed32(); + break; + } + case 65: { + SingleFixed64 = input.ReadFixed64(); + break; + } + case 77: { + SingleSfixed32 = input.ReadSFixed32(); + break; + } + case 81: { + SingleSfixed64 = input.ReadSFixed64(); + break; + } + case 93: { + SingleFloat = input.ReadFloat(); + break; + } + case 97: { + SingleDouble = input.ReadDouble(); + break; + } + case 104: { + SingleBool = input.ReadBool(); + break; + } + case 114: { + SingleString = input.ReadString(); + break; + } + case 122: { + SingleBytes = input.ReadBytes(); + break; + } + case 146: { + if (singleNestedMessage_ == null) { + singleNestedMessage_ = new global::Google.Protobuf.TestProtos.TestAllTypes.Types.NestedMessage(); + } + input.ReadMessage(singleNestedMessage_); + break; + } + case 154: { + if (singleForeignMessage_ == null) { + singleForeignMessage_ = new global::Google.Protobuf.TestProtos.ForeignMessage(); + } + input.ReadMessage(singleForeignMessage_); + break; + } + case 162: { + if (singleImportMessage_ == null) { + singleImportMessage_ = new global::Google.Protobuf.TestProtos.ImportMessage(); + } + input.ReadMessage(singleImportMessage_); + break; + } + case 168: { + singleNestedEnum_ = (global::Google.Protobuf.TestProtos.TestAllTypes.Types.NestedEnum) input.ReadEnum(); + break; + } + case 176: { + singleForeignEnum_ = (global::Google.Protobuf.TestProtos.ForeignEnum) input.ReadEnum(); + break; + } + case 184: { + singleImportEnum_ = (global::Google.Protobuf.TestProtos.ImportEnum) input.ReadEnum(); + break; + } + case 210: { + if (singlePublicImportMessage_ == null) { + singlePublicImportMessage_ = new global::Google.Protobuf.TestProtos.PublicImportMessage(); + } + input.ReadMessage(singlePublicImportMessage_); + break; + } + case 250: + case 248: { + repeatedInt32_.AddEntriesFrom(input, _repeated_repeatedInt32_codec); + break; + } + case 258: + case 256: { + repeatedInt64_.AddEntriesFrom(input, _repeated_repeatedInt64_codec); + break; + } + case 266: + case 264: { + repeatedUint32_.AddEntriesFrom(input, _repeated_repeatedUint32_codec); + break; + } + case 274: + case 272: { + repeatedUint64_.AddEntriesFrom(input, _repeated_repeatedUint64_codec); + break; + } + case 282: + case 280: { + repeatedSint32_.AddEntriesFrom(input, _repeated_repeatedSint32_codec); + break; + } + case 290: + case 288: { + repeatedSint64_.AddEntriesFrom(input, _repeated_repeatedSint64_codec); + break; + } + case 298: + case 301: { + repeatedFixed32_.AddEntriesFrom(input, _repeated_repeatedFixed32_codec); + break; + } + case 306: + case 305: { + repeatedFixed64_.AddEntriesFrom(input, _repeated_repeatedFixed64_codec); + break; + } + case 314: + case 317: { + repeatedSfixed32_.AddEntriesFrom(input, _repeated_repeatedSfixed32_codec); + break; + } + case 322: + case 321: { + repeatedSfixed64_.AddEntriesFrom(input, _repeated_repeatedSfixed64_codec); + break; + } + case 330: + case 333: { + repeatedFloat_.AddEntriesFrom(input, _repeated_repeatedFloat_codec); + break; + } + case 338: + case 337: { + repeatedDouble_.AddEntriesFrom(input, _repeated_repeatedDouble_codec); + break; + } + case 346: + case 344: { + repeatedBool_.AddEntriesFrom(input, _repeated_repeatedBool_codec); + break; + } + case 354: { + repeatedString_.AddEntriesFrom(input, _repeated_repeatedString_codec); + break; + } + case 362: { + repeatedBytes_.AddEntriesFrom(input, _repeated_repeatedBytes_codec); + break; + } + case 386: { + repeatedNestedMessage_.AddEntriesFrom(input, _repeated_repeatedNestedMessage_codec); + break; + } + case 394: { + repeatedForeignMessage_.AddEntriesFrom(input, _repeated_repeatedForeignMessage_codec); + break; + } + case 402: { + repeatedImportMessage_.AddEntriesFrom(input, _repeated_repeatedImportMessage_codec); + break; + } + case 410: + case 408: { + repeatedNestedEnum_.AddEntriesFrom(input, _repeated_repeatedNestedEnum_codec); + break; + } + case 418: + case 416: { + repeatedForeignEnum_.AddEntriesFrom(input, _repeated_repeatedForeignEnum_codec); + break; + } + case 426: + case 424: { + repeatedImportEnum_.AddEntriesFrom(input, _repeated_repeatedImportEnum_codec); + break; + } + case 434: { + repeatedPublicImportMessage_.AddEntriesFrom(input, _repeated_repeatedPublicImportMessage_codec); + break; + } + case 888: { + OneofUint32 = input.ReadUInt32(); + break; + } + case 898: { + global::Google.Protobuf.TestProtos.TestAllTypes.Types.NestedMessage subBuilder = new global::Google.Protobuf.TestProtos.TestAllTypes.Types.NestedMessage(); + if (oneofFieldCase_ == OneofFieldOneofCase.OneofNestedMessage) { + subBuilder.MergeFrom(OneofNestedMessage); + } + input.ReadMessage(subBuilder); + OneofNestedMessage = subBuilder; + break; + } + case 906: { + OneofString = input.ReadString(); + break; + } + case 914: { + OneofBytes = input.ReadBytes(); + break; + } + } + } + } + + #region Nested types + /// Container for nested types declared in the TestAllTypes message type. + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public static partial class Types { + public enum NestedEnum { + [pbr::OriginalName("NESTED_ENUM_UNSPECIFIED")] Unspecified = 0, + [pbr::OriginalName("FOO")] Foo = 1, + [pbr::OriginalName("BAR")] Bar = 2, + [pbr::OriginalName("BAZ")] Baz = 3, + /// + /// Intentionally negative. + /// + [pbr::OriginalName("NEG")] Neg = -1, + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class NestedMessage : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new NestedMessage()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.TestAllTypes.Descriptor.NestedTypes[0]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public NestedMessage() { + OnConstruction(); + } + + partial void OnConstruction(); + + public NestedMessage(NestedMessage other) : this() { + bb_ = other.bb_; + } + + public NestedMessage Clone() { + return new NestedMessage(this); + } + + /// Field number for the "bb" field. + public const int BbFieldNumber = 1; + private int bb_; + /// + /// The field name "b" fails to compile in proto1 because it conflicts with + /// a local variable named "b" in one of the generated methods. Doh. + /// This file needs to compile in proto1 to test backwards-compatibility. + /// + public int Bb { + get { return bb_; } + set { + bb_ = value; + } + } + + public override bool Equals(object other) { + return Equals(other as NestedMessage); + } + + public bool Equals(NestedMessage other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (Bb != other.Bb) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (Bb != 0) hash ^= Bb.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (Bb != 0) { + output.WriteRawTag(8); + output.WriteInt32(Bb); + } + } + + public int CalculateSize() { + int size = 0; + if (Bb != 0) { + size += 1 + pb::CodedOutputStream.ComputeInt32Size(Bb); + } + return size; + } + + public void MergeFrom(NestedMessage other) { + if (other == null) { + return; + } + if (other.Bb != 0) { + Bb = other.Bb; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 8: { + Bb = input.ReadInt32(); + break; + } + } + } + } + + } + + } + #endregion + + } + + /// + /// This proto includes a recusively nested message. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class NestedTestAllTypes : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new NestedTestAllTypes()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.UnittestProto3Reflection.Descriptor.MessageTypes[1]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public NestedTestAllTypes() { + OnConstruction(); + } + + partial void OnConstruction(); + + public NestedTestAllTypes(NestedTestAllTypes other) : this() { + Child = other.child_ != null ? other.Child.Clone() : null; + Payload = other.payload_ != null ? other.Payload.Clone() : null; + repeatedChild_ = other.repeatedChild_.Clone(); + } + + public NestedTestAllTypes Clone() { + return new NestedTestAllTypes(this); + } + + /// Field number for the "child" field. + public const int ChildFieldNumber = 1; + private global::Google.Protobuf.TestProtos.NestedTestAllTypes child_; + public global::Google.Protobuf.TestProtos.NestedTestAllTypes Child { + get { return child_; } + set { + child_ = value; + } + } + + /// Field number for the "payload" field. + public const int PayloadFieldNumber = 2; + private global::Google.Protobuf.TestProtos.TestAllTypes payload_; + public global::Google.Protobuf.TestProtos.TestAllTypes Payload { + get { return payload_; } + set { + payload_ = value; + } + } + + /// Field number for the "repeated_child" field. + public const int RepeatedChildFieldNumber = 3; + private static readonly pb::FieldCodec _repeated_repeatedChild_codec + = pb::FieldCodec.ForMessage(26, global::Google.Protobuf.TestProtos.NestedTestAllTypes.Parser); + private readonly pbc::RepeatedField repeatedChild_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedChild { + get { return repeatedChild_; } + } + + public override bool Equals(object other) { + return Equals(other as NestedTestAllTypes); + } + + public bool Equals(NestedTestAllTypes other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (!object.Equals(Child, other.Child)) return false; + if (!object.Equals(Payload, other.Payload)) return false; + if(!repeatedChild_.Equals(other.repeatedChild_)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (child_ != null) hash ^= Child.GetHashCode(); + if (payload_ != null) hash ^= Payload.GetHashCode(); + hash ^= repeatedChild_.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (child_ != null) { + output.WriteRawTag(10); + output.WriteMessage(Child); + } + if (payload_ != null) { + output.WriteRawTag(18); + output.WriteMessage(Payload); + } + repeatedChild_.WriteTo(output, _repeated_repeatedChild_codec); + } + + public int CalculateSize() { + int size = 0; + if (child_ != null) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(Child); + } + if (payload_ != null) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(Payload); + } + size += repeatedChild_.CalculateSize(_repeated_repeatedChild_codec); + return size; + } + + public void MergeFrom(NestedTestAllTypes other) { + if (other == null) { + return; + } + if (other.child_ != null) { + if (child_ == null) { + child_ = new global::Google.Protobuf.TestProtos.NestedTestAllTypes(); + } + Child.MergeFrom(other.Child); + } + if (other.payload_ != null) { + if (payload_ == null) { + payload_ = new global::Google.Protobuf.TestProtos.TestAllTypes(); + } + Payload.MergeFrom(other.Payload); + } + repeatedChild_.Add(other.repeatedChild_); + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + if (child_ == null) { + child_ = new global::Google.Protobuf.TestProtos.NestedTestAllTypes(); + } + input.ReadMessage(child_); + break; + } + case 18: { + if (payload_ == null) { + payload_ = new global::Google.Protobuf.TestProtos.TestAllTypes(); + } + input.ReadMessage(payload_); + break; + } + case 26: { + repeatedChild_.AddEntriesFrom(input, _repeated_repeatedChild_codec); + break; + } + } + } + } + + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class TestDeprecatedFields : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new TestDeprecatedFields()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.UnittestProto3Reflection.Descriptor.MessageTypes[2]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public TestDeprecatedFields() { + OnConstruction(); + } + + partial void OnConstruction(); + + public TestDeprecatedFields(TestDeprecatedFields other) : this() { + deprecatedInt32_ = other.deprecatedInt32_; + } + + public TestDeprecatedFields Clone() { + return new TestDeprecatedFields(this); + } + + /// Field number for the "deprecated_int32" field. + public const int DeprecatedInt32FieldNumber = 1; + private int deprecatedInt32_; + [global::System.ObsoleteAttribute()] + public int DeprecatedInt32 { + get { return deprecatedInt32_; } + set { + deprecatedInt32_ = value; + } + } + + public override bool Equals(object other) { + return Equals(other as TestDeprecatedFields); + } + + public bool Equals(TestDeprecatedFields other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (DeprecatedInt32 != other.DeprecatedInt32) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (DeprecatedInt32 != 0) hash ^= DeprecatedInt32.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (DeprecatedInt32 != 0) { + output.WriteRawTag(8); + output.WriteInt32(DeprecatedInt32); + } + } + + public int CalculateSize() { + int size = 0; + if (DeprecatedInt32 != 0) { + size += 1 + pb::CodedOutputStream.ComputeInt32Size(DeprecatedInt32); + } + return size; + } + + public void MergeFrom(TestDeprecatedFields other) { + if (other == null) { + return; + } + if (other.DeprecatedInt32 != 0) { + DeprecatedInt32 = other.DeprecatedInt32; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 8: { + DeprecatedInt32 = input.ReadInt32(); + break; + } + } + } + } + + } + + /// + /// Define these after TestAllTypes to make sure the compiler can handle + /// that. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class ForeignMessage : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new ForeignMessage()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.UnittestProto3Reflection.Descriptor.MessageTypes[3]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public ForeignMessage() { + OnConstruction(); + } + + partial void OnConstruction(); + + public ForeignMessage(ForeignMessage other) : this() { + c_ = other.c_; + } + + public ForeignMessage Clone() { + return new ForeignMessage(this); + } + + /// Field number for the "c" field. + public const int CFieldNumber = 1; + private int c_; + public int C { + get { return c_; } + set { + c_ = value; + } + } + + public override bool Equals(object other) { + return Equals(other as ForeignMessage); + } + + public bool Equals(ForeignMessage other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (C != other.C) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (C != 0) hash ^= C.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (C != 0) { + output.WriteRawTag(8); + output.WriteInt32(C); + } + } + + public int CalculateSize() { + int size = 0; + if (C != 0) { + size += 1 + pb::CodedOutputStream.ComputeInt32Size(C); + } + return size; + } + + public void MergeFrom(ForeignMessage other) { + if (other == null) { + return; + } + if (other.C != 0) { + C = other.C; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 8: { + C = input.ReadInt32(); + break; + } + } + } + } + + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class TestReservedFields : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new TestReservedFields()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.UnittestProto3Reflection.Descriptor.MessageTypes[4]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public TestReservedFields() { + OnConstruction(); + } + + partial void OnConstruction(); + + public TestReservedFields(TestReservedFields other) : this() { + } + + public TestReservedFields Clone() { + return new TestReservedFields(this); + } + + public override bool Equals(object other) { + return Equals(other as TestReservedFields); + } + + public bool Equals(TestReservedFields other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + return true; + } + + public override int GetHashCode() { + int hash = 1; + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + } + + public int CalculateSize() { + int size = 0; + return size; + } + + public void MergeFrom(TestReservedFields other) { + if (other == null) { + return; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + } + } + } + + } + + /// + /// Test that we can use NestedMessage from outside TestAllTypes. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class TestForeignNested : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new TestForeignNested()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.UnittestProto3Reflection.Descriptor.MessageTypes[5]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public TestForeignNested() { + OnConstruction(); + } + + partial void OnConstruction(); + + public TestForeignNested(TestForeignNested other) : this() { + ForeignNested = other.foreignNested_ != null ? other.ForeignNested.Clone() : null; + } + + public TestForeignNested Clone() { + return new TestForeignNested(this); + } + + /// Field number for the "foreign_nested" field. + public const int ForeignNestedFieldNumber = 1; + private global::Google.Protobuf.TestProtos.TestAllTypes.Types.NestedMessage foreignNested_; + public global::Google.Protobuf.TestProtos.TestAllTypes.Types.NestedMessage ForeignNested { + get { return foreignNested_; } + set { + foreignNested_ = value; + } + } + + public override bool Equals(object other) { + return Equals(other as TestForeignNested); + } + + public bool Equals(TestForeignNested other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (!object.Equals(ForeignNested, other.ForeignNested)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (foreignNested_ != null) hash ^= ForeignNested.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (foreignNested_ != null) { + output.WriteRawTag(10); + output.WriteMessage(ForeignNested); + } + } + + public int CalculateSize() { + int size = 0; + if (foreignNested_ != null) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(ForeignNested); + } + return size; + } + + public void MergeFrom(TestForeignNested other) { + if (other == null) { + return; + } + if (other.foreignNested_ != null) { + if (foreignNested_ == null) { + foreignNested_ = new global::Google.Protobuf.TestProtos.TestAllTypes.Types.NestedMessage(); + } + ForeignNested.MergeFrom(other.ForeignNested); + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + if (foreignNested_ == null) { + foreignNested_ = new global::Google.Protobuf.TestProtos.TestAllTypes.Types.NestedMessage(); + } + input.ReadMessage(foreignNested_); + break; + } + } + } + } + + } + + /// + /// Test that really large tag numbers don't break anything. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class TestReallyLargeTagNumber : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new TestReallyLargeTagNumber()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.UnittestProto3Reflection.Descriptor.MessageTypes[6]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public TestReallyLargeTagNumber() { + OnConstruction(); + } + + partial void OnConstruction(); + + public TestReallyLargeTagNumber(TestReallyLargeTagNumber other) : this() { + a_ = other.a_; + bb_ = other.bb_; + } + + public TestReallyLargeTagNumber Clone() { + return new TestReallyLargeTagNumber(this); + } + + /// Field number for the "a" field. + public const int AFieldNumber = 1; + private int a_; + /// + /// The largest possible tag number is 2^28 - 1, since the wire format uses + /// three bits to communicate wire type. + /// + public int A { + get { return a_; } + set { + a_ = value; + } + } + + /// Field number for the "bb" field. + public const int BbFieldNumber = 268435455; + private int bb_; + public int Bb { + get { return bb_; } + set { + bb_ = value; + } + } + + public override bool Equals(object other) { + return Equals(other as TestReallyLargeTagNumber); + } + + public bool Equals(TestReallyLargeTagNumber other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (A != other.A) return false; + if (Bb != other.Bb) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (A != 0) hash ^= A.GetHashCode(); + if (Bb != 0) hash ^= Bb.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (A != 0) { + output.WriteRawTag(8); + output.WriteInt32(A); + } + if (Bb != 0) { + output.WriteRawTag(248, 255, 255, 255, 7); + output.WriteInt32(Bb); + } + } + + public int CalculateSize() { + int size = 0; + if (A != 0) { + size += 1 + pb::CodedOutputStream.ComputeInt32Size(A); + } + if (Bb != 0) { + size += 5 + pb::CodedOutputStream.ComputeInt32Size(Bb); + } + return size; + } + + public void MergeFrom(TestReallyLargeTagNumber other) { + if (other == null) { + return; + } + if (other.A != 0) { + A = other.A; + } + if (other.Bb != 0) { + Bb = other.Bb; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 8: { + A = input.ReadInt32(); + break; + } + case 2147483640: { + Bb = input.ReadInt32(); + break; + } + } + } + } + + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class TestRecursiveMessage : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new TestRecursiveMessage()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.UnittestProto3Reflection.Descriptor.MessageTypes[7]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public TestRecursiveMessage() { + OnConstruction(); + } + + partial void OnConstruction(); + + public TestRecursiveMessage(TestRecursiveMessage other) : this() { + A = other.a_ != null ? other.A.Clone() : null; + i_ = other.i_; + } + + public TestRecursiveMessage Clone() { + return new TestRecursiveMessage(this); + } + + /// Field number for the "a" field. + public const int AFieldNumber = 1; + private global::Google.Protobuf.TestProtos.TestRecursiveMessage a_; + public global::Google.Protobuf.TestProtos.TestRecursiveMessage A { + get { return a_; } + set { + a_ = value; + } + } + + /// Field number for the "i" field. + public const int IFieldNumber = 2; + private int i_; + public int I { + get { return i_; } + set { + i_ = value; + } + } + + public override bool Equals(object other) { + return Equals(other as TestRecursiveMessage); + } + + public bool Equals(TestRecursiveMessage other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (!object.Equals(A, other.A)) return false; + if (I != other.I) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (a_ != null) hash ^= A.GetHashCode(); + if (I != 0) hash ^= I.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (a_ != null) { + output.WriteRawTag(10); + output.WriteMessage(A); + } + if (I != 0) { + output.WriteRawTag(16); + output.WriteInt32(I); + } + } + + public int CalculateSize() { + int size = 0; + if (a_ != null) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(A); + } + if (I != 0) { + size += 1 + pb::CodedOutputStream.ComputeInt32Size(I); + } + return size; + } + + public void MergeFrom(TestRecursiveMessage other) { + if (other == null) { + return; + } + if (other.a_ != null) { + if (a_ == null) { + a_ = new global::Google.Protobuf.TestProtos.TestRecursiveMessage(); + } + A.MergeFrom(other.A); + } + if (other.I != 0) { + I = other.I; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + if (a_ == null) { + a_ = new global::Google.Protobuf.TestProtos.TestRecursiveMessage(); + } + input.ReadMessage(a_); + break; + } + case 16: { + I = input.ReadInt32(); + break; + } + } + } + } + + } + + /// + /// Test that mutual recursion works. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class TestMutualRecursionA : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new TestMutualRecursionA()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.UnittestProto3Reflection.Descriptor.MessageTypes[8]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public TestMutualRecursionA() { + OnConstruction(); + } + + partial void OnConstruction(); + + public TestMutualRecursionA(TestMutualRecursionA other) : this() { + Bb = other.bb_ != null ? other.Bb.Clone() : null; + } + + public TestMutualRecursionA Clone() { + return new TestMutualRecursionA(this); + } + + /// Field number for the "bb" field. + public const int BbFieldNumber = 1; + private global::Google.Protobuf.TestProtos.TestMutualRecursionB bb_; + public global::Google.Protobuf.TestProtos.TestMutualRecursionB Bb { + get { return bb_; } + set { + bb_ = value; + } + } + + public override bool Equals(object other) { + return Equals(other as TestMutualRecursionA); + } + + public bool Equals(TestMutualRecursionA other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (!object.Equals(Bb, other.Bb)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (bb_ != null) hash ^= Bb.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (bb_ != null) { + output.WriteRawTag(10); + output.WriteMessage(Bb); + } + } + + public int CalculateSize() { + int size = 0; + if (bb_ != null) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(Bb); + } + return size; + } + + public void MergeFrom(TestMutualRecursionA other) { + if (other == null) { + return; + } + if (other.bb_ != null) { + if (bb_ == null) { + bb_ = new global::Google.Protobuf.TestProtos.TestMutualRecursionB(); + } + Bb.MergeFrom(other.Bb); + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + if (bb_ == null) { + bb_ = new global::Google.Protobuf.TestProtos.TestMutualRecursionB(); + } + input.ReadMessage(bb_); + break; + } + } + } + } + + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class TestMutualRecursionB : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new TestMutualRecursionB()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.UnittestProto3Reflection.Descriptor.MessageTypes[9]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public TestMutualRecursionB() { + OnConstruction(); + } + + partial void OnConstruction(); + + public TestMutualRecursionB(TestMutualRecursionB other) : this() { + A = other.a_ != null ? other.A.Clone() : null; + optionalInt32_ = other.optionalInt32_; + } + + public TestMutualRecursionB Clone() { + return new TestMutualRecursionB(this); + } + + /// Field number for the "a" field. + public const int AFieldNumber = 1; + private global::Google.Protobuf.TestProtos.TestMutualRecursionA a_; + public global::Google.Protobuf.TestProtos.TestMutualRecursionA A { + get { return a_; } + set { + a_ = value; + } + } + + /// Field number for the "optional_int32" field. + public const int OptionalInt32FieldNumber = 2; + private int optionalInt32_; + public int OptionalInt32 { + get { return optionalInt32_; } + set { + optionalInt32_ = value; + } + } + + public override bool Equals(object other) { + return Equals(other as TestMutualRecursionB); + } + + public bool Equals(TestMutualRecursionB other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (!object.Equals(A, other.A)) return false; + if (OptionalInt32 != other.OptionalInt32) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (a_ != null) hash ^= A.GetHashCode(); + if (OptionalInt32 != 0) hash ^= OptionalInt32.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (a_ != null) { + output.WriteRawTag(10); + output.WriteMessage(A); + } + if (OptionalInt32 != 0) { + output.WriteRawTag(16); + output.WriteInt32(OptionalInt32); + } + } + + public int CalculateSize() { + int size = 0; + if (a_ != null) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(A); + } + if (OptionalInt32 != 0) { + size += 1 + pb::CodedOutputStream.ComputeInt32Size(OptionalInt32); + } + return size; + } + + public void MergeFrom(TestMutualRecursionB other) { + if (other == null) { + return; + } + if (other.a_ != null) { + if (a_ == null) { + a_ = new global::Google.Protobuf.TestProtos.TestMutualRecursionA(); + } + A.MergeFrom(other.A); + } + if (other.OptionalInt32 != 0) { + OptionalInt32 = other.OptionalInt32; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + if (a_ == null) { + a_ = new global::Google.Protobuf.TestProtos.TestMutualRecursionA(); + } + input.ReadMessage(a_); + break; + } + case 16: { + OptionalInt32 = input.ReadInt32(); + break; + } + } + } + } + + } + + /// + /// Test message with CamelCase field names. This violates Protocol Buffer + /// standard style. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class TestCamelCaseFieldNames : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new TestCamelCaseFieldNames()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.UnittestProto3Reflection.Descriptor.MessageTypes[10]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public TestCamelCaseFieldNames() { + OnConstruction(); + } + + partial void OnConstruction(); + + public TestCamelCaseFieldNames(TestCamelCaseFieldNames other) : this() { + primitiveField_ = other.primitiveField_; + stringField_ = other.stringField_; + enumField_ = other.enumField_; + MessageField = other.messageField_ != null ? other.MessageField.Clone() : null; + repeatedPrimitiveField_ = other.repeatedPrimitiveField_.Clone(); + repeatedStringField_ = other.repeatedStringField_.Clone(); + repeatedEnumField_ = other.repeatedEnumField_.Clone(); + repeatedMessageField_ = other.repeatedMessageField_.Clone(); + } + + public TestCamelCaseFieldNames Clone() { + return new TestCamelCaseFieldNames(this); + } + + /// Field number for the "PrimitiveField" field. + public const int PrimitiveFieldFieldNumber = 1; + private int primitiveField_; + public int PrimitiveField { + get { return primitiveField_; } + set { + primitiveField_ = value; + } + } + + /// Field number for the "StringField" field. + public const int StringFieldFieldNumber = 2; + private string stringField_ = ""; + public string StringField { + get { return stringField_; } + set { + stringField_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "EnumField" field. + public const int EnumFieldFieldNumber = 3; + private global::Google.Protobuf.TestProtos.ForeignEnum enumField_ = 0; + public global::Google.Protobuf.TestProtos.ForeignEnum EnumField { + get { return enumField_; } + set { + enumField_ = value; + } + } + + /// Field number for the "MessageField" field. + public const int MessageFieldFieldNumber = 4; + private global::Google.Protobuf.TestProtos.ForeignMessage messageField_; + public global::Google.Protobuf.TestProtos.ForeignMessage MessageField { + get { return messageField_; } + set { + messageField_ = value; + } + } + + /// Field number for the "RepeatedPrimitiveField" field. + public const int RepeatedPrimitiveFieldFieldNumber = 7; + private static readonly pb::FieldCodec _repeated_repeatedPrimitiveField_codec + = pb::FieldCodec.ForInt32(58); + private readonly pbc::RepeatedField repeatedPrimitiveField_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedPrimitiveField { + get { return repeatedPrimitiveField_; } + } + + /// Field number for the "RepeatedStringField" field. + public const int RepeatedStringFieldFieldNumber = 8; + private static readonly pb::FieldCodec _repeated_repeatedStringField_codec + = pb::FieldCodec.ForString(66); + private readonly pbc::RepeatedField repeatedStringField_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedStringField { + get { return repeatedStringField_; } + } + + /// Field number for the "RepeatedEnumField" field. + public const int RepeatedEnumFieldFieldNumber = 9; + private static readonly pb::FieldCodec _repeated_repeatedEnumField_codec + = pb::FieldCodec.ForEnum(74, x => (int) x, x => (global::Google.Protobuf.TestProtos.ForeignEnum) x); + private readonly pbc::RepeatedField repeatedEnumField_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedEnumField { + get { return repeatedEnumField_; } + } + + /// Field number for the "RepeatedMessageField" field. + public const int RepeatedMessageFieldFieldNumber = 10; + private static readonly pb::FieldCodec _repeated_repeatedMessageField_codec + = pb::FieldCodec.ForMessage(82, global::Google.Protobuf.TestProtos.ForeignMessage.Parser); + private readonly pbc::RepeatedField repeatedMessageField_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedMessageField { + get { return repeatedMessageField_; } + } + + public override bool Equals(object other) { + return Equals(other as TestCamelCaseFieldNames); + } + + public bool Equals(TestCamelCaseFieldNames other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (PrimitiveField != other.PrimitiveField) return false; + if (StringField != other.StringField) return false; + if (EnumField != other.EnumField) return false; + if (!object.Equals(MessageField, other.MessageField)) return false; + if(!repeatedPrimitiveField_.Equals(other.repeatedPrimitiveField_)) return false; + if(!repeatedStringField_.Equals(other.repeatedStringField_)) return false; + if(!repeatedEnumField_.Equals(other.repeatedEnumField_)) return false; + if(!repeatedMessageField_.Equals(other.repeatedMessageField_)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (PrimitiveField != 0) hash ^= PrimitiveField.GetHashCode(); + if (StringField.Length != 0) hash ^= StringField.GetHashCode(); + if (EnumField != 0) hash ^= EnumField.GetHashCode(); + if (messageField_ != null) hash ^= MessageField.GetHashCode(); + hash ^= repeatedPrimitiveField_.GetHashCode(); + hash ^= repeatedStringField_.GetHashCode(); + hash ^= repeatedEnumField_.GetHashCode(); + hash ^= repeatedMessageField_.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (PrimitiveField != 0) { + output.WriteRawTag(8); + output.WriteInt32(PrimitiveField); + } + if (StringField.Length != 0) { + output.WriteRawTag(18); + output.WriteString(StringField); + } + if (EnumField != 0) { + output.WriteRawTag(24); + output.WriteEnum((int) EnumField); + } + if (messageField_ != null) { + output.WriteRawTag(34); + output.WriteMessage(MessageField); + } + repeatedPrimitiveField_.WriteTo(output, _repeated_repeatedPrimitiveField_codec); + repeatedStringField_.WriteTo(output, _repeated_repeatedStringField_codec); + repeatedEnumField_.WriteTo(output, _repeated_repeatedEnumField_codec); + repeatedMessageField_.WriteTo(output, _repeated_repeatedMessageField_codec); + } + + public int CalculateSize() { + int size = 0; + if (PrimitiveField != 0) { + size += 1 + pb::CodedOutputStream.ComputeInt32Size(PrimitiveField); + } + if (StringField.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(StringField); + } + if (EnumField != 0) { + size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) EnumField); + } + if (messageField_ != null) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(MessageField); + } + size += repeatedPrimitiveField_.CalculateSize(_repeated_repeatedPrimitiveField_codec); + size += repeatedStringField_.CalculateSize(_repeated_repeatedStringField_codec); + size += repeatedEnumField_.CalculateSize(_repeated_repeatedEnumField_codec); + size += repeatedMessageField_.CalculateSize(_repeated_repeatedMessageField_codec); + return size; + } + + public void MergeFrom(TestCamelCaseFieldNames other) { + if (other == null) { + return; + } + if (other.PrimitiveField != 0) { + PrimitiveField = other.PrimitiveField; + } + if (other.StringField.Length != 0) { + StringField = other.StringField; + } + if (other.EnumField != 0) { + EnumField = other.EnumField; + } + if (other.messageField_ != null) { + if (messageField_ == null) { + messageField_ = new global::Google.Protobuf.TestProtos.ForeignMessage(); + } + MessageField.MergeFrom(other.MessageField); + } + repeatedPrimitiveField_.Add(other.repeatedPrimitiveField_); + repeatedStringField_.Add(other.repeatedStringField_); + repeatedEnumField_.Add(other.repeatedEnumField_); + repeatedMessageField_.Add(other.repeatedMessageField_); + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 8: { + PrimitiveField = input.ReadInt32(); + break; + } + case 18: { + StringField = input.ReadString(); + break; + } + case 24: { + enumField_ = (global::Google.Protobuf.TestProtos.ForeignEnum) input.ReadEnum(); + break; + } + case 34: { + if (messageField_ == null) { + messageField_ = new global::Google.Protobuf.TestProtos.ForeignMessage(); + } + input.ReadMessage(messageField_); + break; + } + case 58: + case 56: { + repeatedPrimitiveField_.AddEntriesFrom(input, _repeated_repeatedPrimitiveField_codec); + break; + } + case 66: { + repeatedStringField_.AddEntriesFrom(input, _repeated_repeatedStringField_codec); + break; + } + case 74: + case 72: { + repeatedEnumField_.AddEntriesFrom(input, _repeated_repeatedEnumField_codec); + break; + } + case 82: { + repeatedMessageField_.AddEntriesFrom(input, _repeated_repeatedMessageField_codec); + break; + } + } + } + } + + } + + /// + /// We list fields out of order, to ensure that we're using field number and not + /// field index to determine serialization order. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class TestFieldOrderings : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new TestFieldOrderings()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.UnittestProto3Reflection.Descriptor.MessageTypes[11]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public TestFieldOrderings() { + OnConstruction(); + } + + partial void OnConstruction(); + + public TestFieldOrderings(TestFieldOrderings other) : this() { + myString_ = other.myString_; + myInt_ = other.myInt_; + myFloat_ = other.myFloat_; + SingleNestedMessage = other.singleNestedMessage_ != null ? other.SingleNestedMessage.Clone() : null; + } + + public TestFieldOrderings Clone() { + return new TestFieldOrderings(this); + } + + /// Field number for the "my_string" field. + public const int MyStringFieldNumber = 11; + private string myString_ = ""; + public string MyString { + get { return myString_; } + set { + myString_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "my_int" field. + public const int MyIntFieldNumber = 1; + private long myInt_; + public long MyInt { + get { return myInt_; } + set { + myInt_ = value; + } + } + + /// Field number for the "my_float" field. + public const int MyFloatFieldNumber = 101; + private float myFloat_; + public float MyFloat { + get { return myFloat_; } + set { + myFloat_ = value; + } + } + + /// Field number for the "single_nested_message" field. + public const int SingleNestedMessageFieldNumber = 200; + private global::Google.Protobuf.TestProtos.TestFieldOrderings.Types.NestedMessage singleNestedMessage_; + public global::Google.Protobuf.TestProtos.TestFieldOrderings.Types.NestedMessage SingleNestedMessage { + get { return singleNestedMessage_; } + set { + singleNestedMessage_ = value; + } + } + + public override bool Equals(object other) { + return Equals(other as TestFieldOrderings); + } + + public bool Equals(TestFieldOrderings other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (MyString != other.MyString) return false; + if (MyInt != other.MyInt) return false; + if (MyFloat != other.MyFloat) return false; + if (!object.Equals(SingleNestedMessage, other.SingleNestedMessage)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (MyString.Length != 0) hash ^= MyString.GetHashCode(); + if (MyInt != 0L) hash ^= MyInt.GetHashCode(); + if (MyFloat != 0F) hash ^= MyFloat.GetHashCode(); + if (singleNestedMessage_ != null) hash ^= SingleNestedMessage.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (MyInt != 0L) { + output.WriteRawTag(8); + output.WriteInt64(MyInt); + } + if (MyString.Length != 0) { + output.WriteRawTag(90); + output.WriteString(MyString); + } + if (MyFloat != 0F) { + output.WriteRawTag(173, 6); + output.WriteFloat(MyFloat); + } + if (singleNestedMessage_ != null) { + output.WriteRawTag(194, 12); + output.WriteMessage(SingleNestedMessage); + } + } + + public int CalculateSize() { + int size = 0; + if (MyString.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(MyString); + } + if (MyInt != 0L) { + size += 1 + pb::CodedOutputStream.ComputeInt64Size(MyInt); + } + if (MyFloat != 0F) { + size += 2 + 4; + } + if (singleNestedMessage_ != null) { + size += 2 + pb::CodedOutputStream.ComputeMessageSize(SingleNestedMessage); + } + return size; + } + + public void MergeFrom(TestFieldOrderings other) { + if (other == null) { + return; + } + if (other.MyString.Length != 0) { + MyString = other.MyString; + } + if (other.MyInt != 0L) { + MyInt = other.MyInt; + } + if (other.MyFloat != 0F) { + MyFloat = other.MyFloat; + } + if (other.singleNestedMessage_ != null) { + if (singleNestedMessage_ == null) { + singleNestedMessage_ = new global::Google.Protobuf.TestProtos.TestFieldOrderings.Types.NestedMessage(); + } + SingleNestedMessage.MergeFrom(other.SingleNestedMessage); + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 8: { + MyInt = input.ReadInt64(); + break; + } + case 90: { + MyString = input.ReadString(); + break; + } + case 813: { + MyFloat = input.ReadFloat(); + break; + } + case 1602: { + if (singleNestedMessage_ == null) { + singleNestedMessage_ = new global::Google.Protobuf.TestProtos.TestFieldOrderings.Types.NestedMessage(); + } + input.ReadMessage(singleNestedMessage_); + break; + } + } + } + } + + #region Nested types + /// Container for nested types declared in the TestFieldOrderings message type. + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public static partial class Types { + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class NestedMessage : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new NestedMessage()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.TestFieldOrderings.Descriptor.NestedTypes[0]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public NestedMessage() { + OnConstruction(); + } + + partial void OnConstruction(); + + public NestedMessage(NestedMessage other) : this() { + oo_ = other.oo_; + bb_ = other.bb_; + } + + public NestedMessage Clone() { + return new NestedMessage(this); + } + + /// Field number for the "oo" field. + public const int OoFieldNumber = 2; + private long oo_; + public long Oo { + get { return oo_; } + set { + oo_ = value; + } + } + + /// Field number for the "bb" field. + public const int BbFieldNumber = 1; + private int bb_; + /// + /// The field name "b" fails to compile in proto1 because it conflicts with + /// a local variable named "b" in one of the generated methods. Doh. + /// This file needs to compile in proto1 to test backwards-compatibility. + /// + public int Bb { + get { return bb_; } + set { + bb_ = value; + } + } + + public override bool Equals(object other) { + return Equals(other as NestedMessage); + } + + public bool Equals(NestedMessage other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (Oo != other.Oo) return false; + if (Bb != other.Bb) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (Oo != 0L) hash ^= Oo.GetHashCode(); + if (Bb != 0) hash ^= Bb.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (Bb != 0) { + output.WriteRawTag(8); + output.WriteInt32(Bb); + } + if (Oo != 0L) { + output.WriteRawTag(16); + output.WriteInt64(Oo); + } + } + + public int CalculateSize() { + int size = 0; + if (Oo != 0L) { + size += 1 + pb::CodedOutputStream.ComputeInt64Size(Oo); + } + if (Bb != 0) { + size += 1 + pb::CodedOutputStream.ComputeInt32Size(Bb); + } + return size; + } + + public void MergeFrom(NestedMessage other) { + if (other == null) { + return; + } + if (other.Oo != 0L) { + Oo = other.Oo; + } + if (other.Bb != 0) { + Bb = other.Bb; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 8: { + Bb = input.ReadInt32(); + break; + } + case 16: { + Oo = input.ReadInt64(); + break; + } + } + } + } + + } + + } + #endregion + + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class SparseEnumMessage : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new SparseEnumMessage()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.UnittestProto3Reflection.Descriptor.MessageTypes[12]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public SparseEnumMessage() { + OnConstruction(); + } + + partial void OnConstruction(); + + public SparseEnumMessage(SparseEnumMessage other) : this() { + sparseEnum_ = other.sparseEnum_; + } + + public SparseEnumMessage Clone() { + return new SparseEnumMessage(this); + } + + /// Field number for the "sparse_enum" field. + public const int SparseEnumFieldNumber = 1; + private global::Google.Protobuf.TestProtos.TestSparseEnum sparseEnum_ = 0; + public global::Google.Protobuf.TestProtos.TestSparseEnum SparseEnum { + get { return sparseEnum_; } + set { + sparseEnum_ = value; + } + } + + public override bool Equals(object other) { + return Equals(other as SparseEnumMessage); + } + + public bool Equals(SparseEnumMessage other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (SparseEnum != other.SparseEnum) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (SparseEnum != 0) hash ^= SparseEnum.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (SparseEnum != 0) { + output.WriteRawTag(8); + output.WriteEnum((int) SparseEnum); + } + } + + public int CalculateSize() { + int size = 0; + if (SparseEnum != 0) { + size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) SparseEnum); + } + return size; + } + + public void MergeFrom(SparseEnumMessage other) { + if (other == null) { + return; + } + if (other.SparseEnum != 0) { + SparseEnum = other.SparseEnum; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 8: { + sparseEnum_ = (global::Google.Protobuf.TestProtos.TestSparseEnum) input.ReadEnum(); + break; + } + } + } + } + + } + + /// + /// Test String and Bytes: string is for valid UTF-8 strings + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class OneString : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new OneString()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.UnittestProto3Reflection.Descriptor.MessageTypes[13]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public OneString() { + OnConstruction(); + } + + partial void OnConstruction(); + + public OneString(OneString other) : this() { + data_ = other.data_; + } + + public OneString Clone() { + return new OneString(this); + } + + /// Field number for the "data" field. + public const int DataFieldNumber = 1; + private string data_ = ""; + public string Data { + get { return data_; } + set { + data_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + public override bool Equals(object other) { + return Equals(other as OneString); + } + + public bool Equals(OneString other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (Data != other.Data) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (Data.Length != 0) hash ^= Data.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (Data.Length != 0) { + output.WriteRawTag(10); + output.WriteString(Data); + } + } + + public int CalculateSize() { + int size = 0; + if (Data.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(Data); + } + return size; + } + + public void MergeFrom(OneString other) { + if (other == null) { + return; + } + if (other.Data.Length != 0) { + Data = other.Data; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + Data = input.ReadString(); + break; + } + } + } + } + + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class MoreString : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new MoreString()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.UnittestProto3Reflection.Descriptor.MessageTypes[14]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public MoreString() { + OnConstruction(); + } + + partial void OnConstruction(); + + public MoreString(MoreString other) : this() { + data_ = other.data_.Clone(); + } + + public MoreString Clone() { + return new MoreString(this); + } + + /// Field number for the "data" field. + public const int DataFieldNumber = 1; + private static readonly pb::FieldCodec _repeated_data_codec + = pb::FieldCodec.ForString(10); + private readonly pbc::RepeatedField data_ = new pbc::RepeatedField(); + public pbc::RepeatedField Data { + get { return data_; } + } + + public override bool Equals(object other) { + return Equals(other as MoreString); + } + + public bool Equals(MoreString other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if(!data_.Equals(other.data_)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + hash ^= data_.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + data_.WriteTo(output, _repeated_data_codec); + } + + public int CalculateSize() { + int size = 0; + size += data_.CalculateSize(_repeated_data_codec); + return size; + } + + public void MergeFrom(MoreString other) { + if (other == null) { + return; + } + data_.Add(other.data_); + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + data_.AddEntriesFrom(input, _repeated_data_codec); + break; + } + } + } + } + + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class OneBytes : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new OneBytes()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.UnittestProto3Reflection.Descriptor.MessageTypes[15]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public OneBytes() { + OnConstruction(); + } + + partial void OnConstruction(); + + public OneBytes(OneBytes other) : this() { + data_ = other.data_; + } + + public OneBytes Clone() { + return new OneBytes(this); + } + + /// Field number for the "data" field. + public const int DataFieldNumber = 1; + private pb::ByteString data_ = pb::ByteString.Empty; + public pb::ByteString Data { + get { return data_; } + set { + data_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + public override bool Equals(object other) { + return Equals(other as OneBytes); + } + + public bool Equals(OneBytes other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (Data != other.Data) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (Data.Length != 0) hash ^= Data.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (Data.Length != 0) { + output.WriteRawTag(10); + output.WriteBytes(Data); + } + } + + public int CalculateSize() { + int size = 0; + if (Data.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeBytesSize(Data); + } + return size; + } + + public void MergeFrom(OneBytes other) { + if (other == null) { + return; + } + if (other.Data.Length != 0) { + Data = other.Data; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + Data = input.ReadBytes(); + break; + } + } + } + } + + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class MoreBytes : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new MoreBytes()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.UnittestProto3Reflection.Descriptor.MessageTypes[16]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public MoreBytes() { + OnConstruction(); + } + + partial void OnConstruction(); + + public MoreBytes(MoreBytes other) : this() { + data_ = other.data_; + } + + public MoreBytes Clone() { + return new MoreBytes(this); + } + + /// Field number for the "data" field. + public const int DataFieldNumber = 1; + private pb::ByteString data_ = pb::ByteString.Empty; + public pb::ByteString Data { + get { return data_; } + set { + data_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + public override bool Equals(object other) { + return Equals(other as MoreBytes); + } + + public bool Equals(MoreBytes other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (Data != other.Data) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (Data.Length != 0) hash ^= Data.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (Data.Length != 0) { + output.WriteRawTag(10); + output.WriteBytes(Data); + } + } + + public int CalculateSize() { + int size = 0; + if (Data.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeBytesSize(Data); + } + return size; + } + + public void MergeFrom(MoreBytes other) { + if (other == null) { + return; + } + if (other.Data.Length != 0) { + Data = other.Data; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + Data = input.ReadBytes(); + break; + } + } + } + } + + } + + /// + /// Test int32, uint32, int64, uint64, and bool are all compatible + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class Int32Message : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new Int32Message()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.UnittestProto3Reflection.Descriptor.MessageTypes[17]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public Int32Message() { + OnConstruction(); + } + + partial void OnConstruction(); + + public Int32Message(Int32Message other) : this() { + data_ = other.data_; + } + + public Int32Message Clone() { + return new Int32Message(this); + } + + /// Field number for the "data" field. + public const int DataFieldNumber = 1; + private int data_; + public int Data { + get { return data_; } + set { + data_ = value; + } + } + + public override bool Equals(object other) { + return Equals(other as Int32Message); + } + + public bool Equals(Int32Message other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (Data != other.Data) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (Data != 0) hash ^= Data.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (Data != 0) { + output.WriteRawTag(8); + output.WriteInt32(Data); + } + } + + public int CalculateSize() { + int size = 0; + if (Data != 0) { + size += 1 + pb::CodedOutputStream.ComputeInt32Size(Data); + } + return size; + } + + public void MergeFrom(Int32Message other) { + if (other == null) { + return; + } + if (other.Data != 0) { + Data = other.Data; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 8: { + Data = input.ReadInt32(); + break; + } + } + } + } + + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class Uint32Message : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new Uint32Message()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.UnittestProto3Reflection.Descriptor.MessageTypes[18]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public Uint32Message() { + OnConstruction(); + } + + partial void OnConstruction(); + + public Uint32Message(Uint32Message other) : this() { + data_ = other.data_; + } + + public Uint32Message Clone() { + return new Uint32Message(this); + } + + /// Field number for the "data" field. + public const int DataFieldNumber = 1; + private uint data_; + public uint Data { + get { return data_; } + set { + data_ = value; + } + } + + public override bool Equals(object other) { + return Equals(other as Uint32Message); + } + + public bool Equals(Uint32Message other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (Data != other.Data) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (Data != 0) hash ^= Data.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (Data != 0) { + output.WriteRawTag(8); + output.WriteUInt32(Data); + } + } + + public int CalculateSize() { + int size = 0; + if (Data != 0) { + size += 1 + pb::CodedOutputStream.ComputeUInt32Size(Data); + } + return size; + } + + public void MergeFrom(Uint32Message other) { + if (other == null) { + return; + } + if (other.Data != 0) { + Data = other.Data; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 8: { + Data = input.ReadUInt32(); + break; + } + } + } + } + + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class Int64Message : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new Int64Message()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.UnittestProto3Reflection.Descriptor.MessageTypes[19]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public Int64Message() { + OnConstruction(); + } + + partial void OnConstruction(); + + public Int64Message(Int64Message other) : this() { + data_ = other.data_; + } + + public Int64Message Clone() { + return new Int64Message(this); + } + + /// Field number for the "data" field. + public const int DataFieldNumber = 1; + private long data_; + public long Data { + get { return data_; } + set { + data_ = value; + } + } + + public override bool Equals(object other) { + return Equals(other as Int64Message); + } + + public bool Equals(Int64Message other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (Data != other.Data) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (Data != 0L) hash ^= Data.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (Data != 0L) { + output.WriteRawTag(8); + output.WriteInt64(Data); + } + } + + public int CalculateSize() { + int size = 0; + if (Data != 0L) { + size += 1 + pb::CodedOutputStream.ComputeInt64Size(Data); + } + return size; + } + + public void MergeFrom(Int64Message other) { + if (other == null) { + return; + } + if (other.Data != 0L) { + Data = other.Data; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 8: { + Data = input.ReadInt64(); + break; + } + } + } + } + + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class Uint64Message : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new Uint64Message()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.UnittestProto3Reflection.Descriptor.MessageTypes[20]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public Uint64Message() { + OnConstruction(); + } + + partial void OnConstruction(); + + public Uint64Message(Uint64Message other) : this() { + data_ = other.data_; + } + + public Uint64Message Clone() { + return new Uint64Message(this); + } + + /// Field number for the "data" field. + public const int DataFieldNumber = 1; + private ulong data_; + public ulong Data { + get { return data_; } + set { + data_ = value; + } + } + + public override bool Equals(object other) { + return Equals(other as Uint64Message); + } + + public bool Equals(Uint64Message other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (Data != other.Data) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (Data != 0UL) hash ^= Data.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (Data != 0UL) { + output.WriteRawTag(8); + output.WriteUInt64(Data); + } + } + + public int CalculateSize() { + int size = 0; + if (Data != 0UL) { + size += 1 + pb::CodedOutputStream.ComputeUInt64Size(Data); + } + return size; + } + + public void MergeFrom(Uint64Message other) { + if (other == null) { + return; + } + if (other.Data != 0UL) { + Data = other.Data; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 8: { + Data = input.ReadUInt64(); + break; + } + } + } + } + + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class BoolMessage : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new BoolMessage()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.UnittestProto3Reflection.Descriptor.MessageTypes[21]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public BoolMessage() { + OnConstruction(); + } + + partial void OnConstruction(); + + public BoolMessage(BoolMessage other) : this() { + data_ = other.data_; + } + + public BoolMessage Clone() { + return new BoolMessage(this); + } + + /// Field number for the "data" field. + public const int DataFieldNumber = 1; + private bool data_; + public bool Data { + get { return data_; } + set { + data_ = value; + } + } + + public override bool Equals(object other) { + return Equals(other as BoolMessage); + } + + public bool Equals(BoolMessage other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (Data != other.Data) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (Data != false) hash ^= Data.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (Data != false) { + output.WriteRawTag(8); + output.WriteBool(Data); + } + } + + public int CalculateSize() { + int size = 0; + if (Data != false) { + size += 1 + 1; + } + return size; + } + + public void MergeFrom(BoolMessage other) { + if (other == null) { + return; + } + if (other.Data != false) { + Data = other.Data; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 8: { + Data = input.ReadBool(); + break; + } + } + } + } + + } + + /// + /// Test oneofs. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class TestOneof : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new TestOneof()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.UnittestProto3Reflection.Descriptor.MessageTypes[22]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public TestOneof() { + OnConstruction(); + } + + partial void OnConstruction(); + + public TestOneof(TestOneof other) : this() { + switch (other.FooCase) { + case FooOneofCase.FooInt: + FooInt = other.FooInt; + break; + case FooOneofCase.FooString: + FooString = other.FooString; + break; + case FooOneofCase.FooMessage: + FooMessage = other.FooMessage.Clone(); + break; + } + + } + + public TestOneof Clone() { + return new TestOneof(this); + } + + /// Field number for the "foo_int" field. + public const int FooIntFieldNumber = 1; + public int FooInt { + get { return fooCase_ == FooOneofCase.FooInt ? (int) foo_ : 0; } + set { + foo_ = value; + fooCase_ = FooOneofCase.FooInt; + } + } + + /// Field number for the "foo_string" field. + public const int FooStringFieldNumber = 2; + public string FooString { + get { return fooCase_ == FooOneofCase.FooString ? (string) foo_ : ""; } + set { + foo_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + fooCase_ = FooOneofCase.FooString; + } + } + + /// Field number for the "foo_message" field. + public const int FooMessageFieldNumber = 3; + public global::Google.Protobuf.TestProtos.TestAllTypes FooMessage { + get { return fooCase_ == FooOneofCase.FooMessage ? (global::Google.Protobuf.TestProtos.TestAllTypes) foo_ : null; } + set { + foo_ = value; + fooCase_ = value == null ? FooOneofCase.None : FooOneofCase.FooMessage; + } + } + + private object foo_; + /// Enum of possible cases for the "foo" oneof. + public enum FooOneofCase { + None = 0, + FooInt = 1, + FooString = 2, + FooMessage = 3, + } + private FooOneofCase fooCase_ = FooOneofCase.None; + public FooOneofCase FooCase { + get { return fooCase_; } + } + + public void ClearFoo() { + fooCase_ = FooOneofCase.None; + foo_ = null; + } + + public override bool Equals(object other) { + return Equals(other as TestOneof); + } + + public bool Equals(TestOneof other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (FooInt != other.FooInt) return false; + if (FooString != other.FooString) return false; + if (!object.Equals(FooMessage, other.FooMessage)) return false; + if (FooCase != other.FooCase) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (fooCase_ == FooOneofCase.FooInt) hash ^= FooInt.GetHashCode(); + if (fooCase_ == FooOneofCase.FooString) hash ^= FooString.GetHashCode(); + if (fooCase_ == FooOneofCase.FooMessage) hash ^= FooMessage.GetHashCode(); + hash ^= (int) fooCase_; + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (fooCase_ == FooOneofCase.FooInt) { + output.WriteRawTag(8); + output.WriteInt32(FooInt); + } + if (fooCase_ == FooOneofCase.FooString) { + output.WriteRawTag(18); + output.WriteString(FooString); + } + if (fooCase_ == FooOneofCase.FooMessage) { + output.WriteRawTag(26); + output.WriteMessage(FooMessage); + } + } + + public int CalculateSize() { + int size = 0; + if (fooCase_ == FooOneofCase.FooInt) { + size += 1 + pb::CodedOutputStream.ComputeInt32Size(FooInt); + } + if (fooCase_ == FooOneofCase.FooString) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(FooString); + } + if (fooCase_ == FooOneofCase.FooMessage) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(FooMessage); + } + return size; + } + + public void MergeFrom(TestOneof other) { + if (other == null) { + return; + } + switch (other.FooCase) { + case FooOneofCase.FooInt: + FooInt = other.FooInt; + break; + case FooOneofCase.FooString: + FooString = other.FooString; + break; + case FooOneofCase.FooMessage: + FooMessage = other.FooMessage; + break; + } + + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 8: { + FooInt = input.ReadInt32(); + break; + } + case 18: { + FooString = input.ReadString(); + break; + } + case 26: { + global::Google.Protobuf.TestProtos.TestAllTypes subBuilder = new global::Google.Protobuf.TestProtos.TestAllTypes(); + if (fooCase_ == FooOneofCase.FooMessage) { + subBuilder.MergeFrom(FooMessage); + } + input.ReadMessage(subBuilder); + FooMessage = subBuilder; + break; + } + } + } + } + + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class TestPackedTypes : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new TestPackedTypes()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.UnittestProto3Reflection.Descriptor.MessageTypes[23]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public TestPackedTypes() { + OnConstruction(); + } + + partial void OnConstruction(); + + public TestPackedTypes(TestPackedTypes other) : this() { + packedInt32_ = other.packedInt32_.Clone(); + packedInt64_ = other.packedInt64_.Clone(); + packedUint32_ = other.packedUint32_.Clone(); + packedUint64_ = other.packedUint64_.Clone(); + packedSint32_ = other.packedSint32_.Clone(); + packedSint64_ = other.packedSint64_.Clone(); + packedFixed32_ = other.packedFixed32_.Clone(); + packedFixed64_ = other.packedFixed64_.Clone(); + packedSfixed32_ = other.packedSfixed32_.Clone(); + packedSfixed64_ = other.packedSfixed64_.Clone(); + packedFloat_ = other.packedFloat_.Clone(); + packedDouble_ = other.packedDouble_.Clone(); + packedBool_ = other.packedBool_.Clone(); + packedEnum_ = other.packedEnum_.Clone(); + } + + public TestPackedTypes Clone() { + return new TestPackedTypes(this); + } + + /// Field number for the "packed_int32" field. + public const int PackedInt32FieldNumber = 90; + private static readonly pb::FieldCodec _repeated_packedInt32_codec + = pb::FieldCodec.ForInt32(722); + private readonly pbc::RepeatedField packedInt32_ = new pbc::RepeatedField(); + public pbc::RepeatedField PackedInt32 { + get { return packedInt32_; } + } + + /// Field number for the "packed_int64" field. + public const int PackedInt64FieldNumber = 91; + private static readonly pb::FieldCodec _repeated_packedInt64_codec + = pb::FieldCodec.ForInt64(730); + private readonly pbc::RepeatedField packedInt64_ = new pbc::RepeatedField(); + public pbc::RepeatedField PackedInt64 { + get { return packedInt64_; } + } + + /// Field number for the "packed_uint32" field. + public const int PackedUint32FieldNumber = 92; + private static readonly pb::FieldCodec _repeated_packedUint32_codec + = pb::FieldCodec.ForUInt32(738); + private readonly pbc::RepeatedField packedUint32_ = new pbc::RepeatedField(); + public pbc::RepeatedField PackedUint32 { + get { return packedUint32_; } + } + + /// Field number for the "packed_uint64" field. + public const int PackedUint64FieldNumber = 93; + private static readonly pb::FieldCodec _repeated_packedUint64_codec + = pb::FieldCodec.ForUInt64(746); + private readonly pbc::RepeatedField packedUint64_ = new pbc::RepeatedField(); + public pbc::RepeatedField PackedUint64 { + get { return packedUint64_; } + } + + /// Field number for the "packed_sint32" field. + public const int PackedSint32FieldNumber = 94; + private static readonly pb::FieldCodec _repeated_packedSint32_codec + = pb::FieldCodec.ForSInt32(754); + private readonly pbc::RepeatedField packedSint32_ = new pbc::RepeatedField(); + public pbc::RepeatedField PackedSint32 { + get { return packedSint32_; } + } + + /// Field number for the "packed_sint64" field. + public const int PackedSint64FieldNumber = 95; + private static readonly pb::FieldCodec _repeated_packedSint64_codec + = pb::FieldCodec.ForSInt64(762); + private readonly pbc::RepeatedField packedSint64_ = new pbc::RepeatedField(); + public pbc::RepeatedField PackedSint64 { + get { return packedSint64_; } + } + + /// Field number for the "packed_fixed32" field. + public const int PackedFixed32FieldNumber = 96; + private static readonly pb::FieldCodec _repeated_packedFixed32_codec + = pb::FieldCodec.ForFixed32(770); + private readonly pbc::RepeatedField packedFixed32_ = new pbc::RepeatedField(); + public pbc::RepeatedField PackedFixed32 { + get { return packedFixed32_; } + } + + /// Field number for the "packed_fixed64" field. + public const int PackedFixed64FieldNumber = 97; + private static readonly pb::FieldCodec _repeated_packedFixed64_codec + = pb::FieldCodec.ForFixed64(778); + private readonly pbc::RepeatedField packedFixed64_ = new pbc::RepeatedField(); + public pbc::RepeatedField PackedFixed64 { + get { return packedFixed64_; } + } + + /// Field number for the "packed_sfixed32" field. + public const int PackedSfixed32FieldNumber = 98; + private static readonly pb::FieldCodec _repeated_packedSfixed32_codec + = pb::FieldCodec.ForSFixed32(786); + private readonly pbc::RepeatedField packedSfixed32_ = new pbc::RepeatedField(); + public pbc::RepeatedField PackedSfixed32 { + get { return packedSfixed32_; } + } + + /// Field number for the "packed_sfixed64" field. + public const int PackedSfixed64FieldNumber = 99; + private static readonly pb::FieldCodec _repeated_packedSfixed64_codec + = pb::FieldCodec.ForSFixed64(794); + private readonly pbc::RepeatedField packedSfixed64_ = new pbc::RepeatedField(); + public pbc::RepeatedField PackedSfixed64 { + get { return packedSfixed64_; } + } + + /// Field number for the "packed_float" field. + public const int PackedFloatFieldNumber = 100; + private static readonly pb::FieldCodec _repeated_packedFloat_codec + = pb::FieldCodec.ForFloat(802); + private readonly pbc::RepeatedField packedFloat_ = new pbc::RepeatedField(); + public pbc::RepeatedField PackedFloat { + get { return packedFloat_; } + } + + /// Field number for the "packed_double" field. + public const int PackedDoubleFieldNumber = 101; + private static readonly pb::FieldCodec _repeated_packedDouble_codec + = pb::FieldCodec.ForDouble(810); + private readonly pbc::RepeatedField packedDouble_ = new pbc::RepeatedField(); + public pbc::RepeatedField PackedDouble { + get { return packedDouble_; } + } + + /// Field number for the "packed_bool" field. + public const int PackedBoolFieldNumber = 102; + private static readonly pb::FieldCodec _repeated_packedBool_codec + = pb::FieldCodec.ForBool(818); + private readonly pbc::RepeatedField packedBool_ = new pbc::RepeatedField(); + public pbc::RepeatedField PackedBool { + get { return packedBool_; } + } + + /// Field number for the "packed_enum" field. + public const int PackedEnumFieldNumber = 103; + private static readonly pb::FieldCodec _repeated_packedEnum_codec + = pb::FieldCodec.ForEnum(826, x => (int) x, x => (global::Google.Protobuf.TestProtos.ForeignEnum) x); + private readonly pbc::RepeatedField packedEnum_ = new pbc::RepeatedField(); + public pbc::RepeatedField PackedEnum { + get { return packedEnum_; } + } + + public override bool Equals(object other) { + return Equals(other as TestPackedTypes); + } + + public bool Equals(TestPackedTypes other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if(!packedInt32_.Equals(other.packedInt32_)) return false; + if(!packedInt64_.Equals(other.packedInt64_)) return false; + if(!packedUint32_.Equals(other.packedUint32_)) return false; + if(!packedUint64_.Equals(other.packedUint64_)) return false; + if(!packedSint32_.Equals(other.packedSint32_)) return false; + if(!packedSint64_.Equals(other.packedSint64_)) return false; + if(!packedFixed32_.Equals(other.packedFixed32_)) return false; + if(!packedFixed64_.Equals(other.packedFixed64_)) return false; + if(!packedSfixed32_.Equals(other.packedSfixed32_)) return false; + if(!packedSfixed64_.Equals(other.packedSfixed64_)) return false; + if(!packedFloat_.Equals(other.packedFloat_)) return false; + if(!packedDouble_.Equals(other.packedDouble_)) return false; + if(!packedBool_.Equals(other.packedBool_)) return false; + if(!packedEnum_.Equals(other.packedEnum_)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + hash ^= packedInt32_.GetHashCode(); + hash ^= packedInt64_.GetHashCode(); + hash ^= packedUint32_.GetHashCode(); + hash ^= packedUint64_.GetHashCode(); + hash ^= packedSint32_.GetHashCode(); + hash ^= packedSint64_.GetHashCode(); + hash ^= packedFixed32_.GetHashCode(); + hash ^= packedFixed64_.GetHashCode(); + hash ^= packedSfixed32_.GetHashCode(); + hash ^= packedSfixed64_.GetHashCode(); + hash ^= packedFloat_.GetHashCode(); + hash ^= packedDouble_.GetHashCode(); + hash ^= packedBool_.GetHashCode(); + hash ^= packedEnum_.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + packedInt32_.WriteTo(output, _repeated_packedInt32_codec); + packedInt64_.WriteTo(output, _repeated_packedInt64_codec); + packedUint32_.WriteTo(output, _repeated_packedUint32_codec); + packedUint64_.WriteTo(output, _repeated_packedUint64_codec); + packedSint32_.WriteTo(output, _repeated_packedSint32_codec); + packedSint64_.WriteTo(output, _repeated_packedSint64_codec); + packedFixed32_.WriteTo(output, _repeated_packedFixed32_codec); + packedFixed64_.WriteTo(output, _repeated_packedFixed64_codec); + packedSfixed32_.WriteTo(output, _repeated_packedSfixed32_codec); + packedSfixed64_.WriteTo(output, _repeated_packedSfixed64_codec); + packedFloat_.WriteTo(output, _repeated_packedFloat_codec); + packedDouble_.WriteTo(output, _repeated_packedDouble_codec); + packedBool_.WriteTo(output, _repeated_packedBool_codec); + packedEnum_.WriteTo(output, _repeated_packedEnum_codec); + } + + public int CalculateSize() { + int size = 0; + size += packedInt32_.CalculateSize(_repeated_packedInt32_codec); + size += packedInt64_.CalculateSize(_repeated_packedInt64_codec); + size += packedUint32_.CalculateSize(_repeated_packedUint32_codec); + size += packedUint64_.CalculateSize(_repeated_packedUint64_codec); + size += packedSint32_.CalculateSize(_repeated_packedSint32_codec); + size += packedSint64_.CalculateSize(_repeated_packedSint64_codec); + size += packedFixed32_.CalculateSize(_repeated_packedFixed32_codec); + size += packedFixed64_.CalculateSize(_repeated_packedFixed64_codec); + size += packedSfixed32_.CalculateSize(_repeated_packedSfixed32_codec); + size += packedSfixed64_.CalculateSize(_repeated_packedSfixed64_codec); + size += packedFloat_.CalculateSize(_repeated_packedFloat_codec); + size += packedDouble_.CalculateSize(_repeated_packedDouble_codec); + size += packedBool_.CalculateSize(_repeated_packedBool_codec); + size += packedEnum_.CalculateSize(_repeated_packedEnum_codec); + return size; + } + + public void MergeFrom(TestPackedTypes other) { + if (other == null) { + return; + } + packedInt32_.Add(other.packedInt32_); + packedInt64_.Add(other.packedInt64_); + packedUint32_.Add(other.packedUint32_); + packedUint64_.Add(other.packedUint64_); + packedSint32_.Add(other.packedSint32_); + packedSint64_.Add(other.packedSint64_); + packedFixed32_.Add(other.packedFixed32_); + packedFixed64_.Add(other.packedFixed64_); + packedSfixed32_.Add(other.packedSfixed32_); + packedSfixed64_.Add(other.packedSfixed64_); + packedFloat_.Add(other.packedFloat_); + packedDouble_.Add(other.packedDouble_); + packedBool_.Add(other.packedBool_); + packedEnum_.Add(other.packedEnum_); + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 722: + case 720: { + packedInt32_.AddEntriesFrom(input, _repeated_packedInt32_codec); + break; + } + case 730: + case 728: { + packedInt64_.AddEntriesFrom(input, _repeated_packedInt64_codec); + break; + } + case 738: + case 736: { + packedUint32_.AddEntriesFrom(input, _repeated_packedUint32_codec); + break; + } + case 746: + case 744: { + packedUint64_.AddEntriesFrom(input, _repeated_packedUint64_codec); + break; + } + case 754: + case 752: { + packedSint32_.AddEntriesFrom(input, _repeated_packedSint32_codec); + break; + } + case 762: + case 760: { + packedSint64_.AddEntriesFrom(input, _repeated_packedSint64_codec); + break; + } + case 770: + case 773: { + packedFixed32_.AddEntriesFrom(input, _repeated_packedFixed32_codec); + break; + } + case 778: + case 777: { + packedFixed64_.AddEntriesFrom(input, _repeated_packedFixed64_codec); + break; + } + case 786: + case 789: { + packedSfixed32_.AddEntriesFrom(input, _repeated_packedSfixed32_codec); + break; + } + case 794: + case 793: { + packedSfixed64_.AddEntriesFrom(input, _repeated_packedSfixed64_codec); + break; + } + case 802: + case 805: { + packedFloat_.AddEntriesFrom(input, _repeated_packedFloat_codec); + break; + } + case 810: + case 809: { + packedDouble_.AddEntriesFrom(input, _repeated_packedDouble_codec); + break; + } + case 818: + case 816: { + packedBool_.AddEntriesFrom(input, _repeated_packedBool_codec); + break; + } + case 826: + case 824: { + packedEnum_.AddEntriesFrom(input, _repeated_packedEnum_codec); + break; + } + } + } + } + + } + + /// + /// A message with the same fields as TestPackedTypes, but without packing. Used + /// to test packed <-> unpacked wire compatibility. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class TestUnpackedTypes : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new TestUnpackedTypes()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.UnittestProto3Reflection.Descriptor.MessageTypes[24]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public TestUnpackedTypes() { + OnConstruction(); + } + + partial void OnConstruction(); + + public TestUnpackedTypes(TestUnpackedTypes other) : this() { + unpackedInt32_ = other.unpackedInt32_.Clone(); + unpackedInt64_ = other.unpackedInt64_.Clone(); + unpackedUint32_ = other.unpackedUint32_.Clone(); + unpackedUint64_ = other.unpackedUint64_.Clone(); + unpackedSint32_ = other.unpackedSint32_.Clone(); + unpackedSint64_ = other.unpackedSint64_.Clone(); + unpackedFixed32_ = other.unpackedFixed32_.Clone(); + unpackedFixed64_ = other.unpackedFixed64_.Clone(); + unpackedSfixed32_ = other.unpackedSfixed32_.Clone(); + unpackedSfixed64_ = other.unpackedSfixed64_.Clone(); + unpackedFloat_ = other.unpackedFloat_.Clone(); + unpackedDouble_ = other.unpackedDouble_.Clone(); + unpackedBool_ = other.unpackedBool_.Clone(); + unpackedEnum_ = other.unpackedEnum_.Clone(); + } + + public TestUnpackedTypes Clone() { + return new TestUnpackedTypes(this); + } + + /// Field number for the "unpacked_int32" field. + public const int UnpackedInt32FieldNumber = 90; + private static readonly pb::FieldCodec _repeated_unpackedInt32_codec + = pb::FieldCodec.ForInt32(720); + private readonly pbc::RepeatedField unpackedInt32_ = new pbc::RepeatedField(); + public pbc::RepeatedField UnpackedInt32 { + get { return unpackedInt32_; } + } + + /// Field number for the "unpacked_int64" field. + public const int UnpackedInt64FieldNumber = 91; + private static readonly pb::FieldCodec _repeated_unpackedInt64_codec + = pb::FieldCodec.ForInt64(728); + private readonly pbc::RepeatedField unpackedInt64_ = new pbc::RepeatedField(); + public pbc::RepeatedField UnpackedInt64 { + get { return unpackedInt64_; } + } + + /// Field number for the "unpacked_uint32" field. + public const int UnpackedUint32FieldNumber = 92; + private static readonly pb::FieldCodec _repeated_unpackedUint32_codec + = pb::FieldCodec.ForUInt32(736); + private readonly pbc::RepeatedField unpackedUint32_ = new pbc::RepeatedField(); + public pbc::RepeatedField UnpackedUint32 { + get { return unpackedUint32_; } + } + + /// Field number for the "unpacked_uint64" field. + public const int UnpackedUint64FieldNumber = 93; + private static readonly pb::FieldCodec _repeated_unpackedUint64_codec + = pb::FieldCodec.ForUInt64(744); + private readonly pbc::RepeatedField unpackedUint64_ = new pbc::RepeatedField(); + public pbc::RepeatedField UnpackedUint64 { + get { return unpackedUint64_; } + } + + /// Field number for the "unpacked_sint32" field. + public const int UnpackedSint32FieldNumber = 94; + private static readonly pb::FieldCodec _repeated_unpackedSint32_codec + = pb::FieldCodec.ForSInt32(752); + private readonly pbc::RepeatedField unpackedSint32_ = new pbc::RepeatedField(); + public pbc::RepeatedField UnpackedSint32 { + get { return unpackedSint32_; } + } + + /// Field number for the "unpacked_sint64" field. + public const int UnpackedSint64FieldNumber = 95; + private static readonly pb::FieldCodec _repeated_unpackedSint64_codec + = pb::FieldCodec.ForSInt64(760); + private readonly pbc::RepeatedField unpackedSint64_ = new pbc::RepeatedField(); + public pbc::RepeatedField UnpackedSint64 { + get { return unpackedSint64_; } + } + + /// Field number for the "unpacked_fixed32" field. + public const int UnpackedFixed32FieldNumber = 96; + private static readonly pb::FieldCodec _repeated_unpackedFixed32_codec + = pb::FieldCodec.ForFixed32(773); + private readonly pbc::RepeatedField unpackedFixed32_ = new pbc::RepeatedField(); + public pbc::RepeatedField UnpackedFixed32 { + get { return unpackedFixed32_; } + } + + /// Field number for the "unpacked_fixed64" field. + public const int UnpackedFixed64FieldNumber = 97; + private static readonly pb::FieldCodec _repeated_unpackedFixed64_codec + = pb::FieldCodec.ForFixed64(777); + private readonly pbc::RepeatedField unpackedFixed64_ = new pbc::RepeatedField(); + public pbc::RepeatedField UnpackedFixed64 { + get { return unpackedFixed64_; } + } + + /// Field number for the "unpacked_sfixed32" field. + public const int UnpackedSfixed32FieldNumber = 98; + private static readonly pb::FieldCodec _repeated_unpackedSfixed32_codec + = pb::FieldCodec.ForSFixed32(789); + private readonly pbc::RepeatedField unpackedSfixed32_ = new pbc::RepeatedField(); + public pbc::RepeatedField UnpackedSfixed32 { + get { return unpackedSfixed32_; } + } + + /// Field number for the "unpacked_sfixed64" field. + public const int UnpackedSfixed64FieldNumber = 99; + private static readonly pb::FieldCodec _repeated_unpackedSfixed64_codec + = pb::FieldCodec.ForSFixed64(793); + private readonly pbc::RepeatedField unpackedSfixed64_ = new pbc::RepeatedField(); + public pbc::RepeatedField UnpackedSfixed64 { + get { return unpackedSfixed64_; } + } + + /// Field number for the "unpacked_float" field. + public const int UnpackedFloatFieldNumber = 100; + private static readonly pb::FieldCodec _repeated_unpackedFloat_codec + = pb::FieldCodec.ForFloat(805); + private readonly pbc::RepeatedField unpackedFloat_ = new pbc::RepeatedField(); + public pbc::RepeatedField UnpackedFloat { + get { return unpackedFloat_; } + } + + /// Field number for the "unpacked_double" field. + public const int UnpackedDoubleFieldNumber = 101; + private static readonly pb::FieldCodec _repeated_unpackedDouble_codec + = pb::FieldCodec.ForDouble(809); + private readonly pbc::RepeatedField unpackedDouble_ = new pbc::RepeatedField(); + public pbc::RepeatedField UnpackedDouble { + get { return unpackedDouble_; } + } + + /// Field number for the "unpacked_bool" field. + public const int UnpackedBoolFieldNumber = 102; + private static readonly pb::FieldCodec _repeated_unpackedBool_codec + = pb::FieldCodec.ForBool(816); + private readonly pbc::RepeatedField unpackedBool_ = new pbc::RepeatedField(); + public pbc::RepeatedField UnpackedBool { + get { return unpackedBool_; } + } + + /// Field number for the "unpacked_enum" field. + public const int UnpackedEnumFieldNumber = 103; + private static readonly pb::FieldCodec _repeated_unpackedEnum_codec + = pb::FieldCodec.ForEnum(824, x => (int) x, x => (global::Google.Protobuf.TestProtos.ForeignEnum) x); + private readonly pbc::RepeatedField unpackedEnum_ = new pbc::RepeatedField(); + public pbc::RepeatedField UnpackedEnum { + get { return unpackedEnum_; } + } + + public override bool Equals(object other) { + return Equals(other as TestUnpackedTypes); + } + + public bool Equals(TestUnpackedTypes other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if(!unpackedInt32_.Equals(other.unpackedInt32_)) return false; + if(!unpackedInt64_.Equals(other.unpackedInt64_)) return false; + if(!unpackedUint32_.Equals(other.unpackedUint32_)) return false; + if(!unpackedUint64_.Equals(other.unpackedUint64_)) return false; + if(!unpackedSint32_.Equals(other.unpackedSint32_)) return false; + if(!unpackedSint64_.Equals(other.unpackedSint64_)) return false; + if(!unpackedFixed32_.Equals(other.unpackedFixed32_)) return false; + if(!unpackedFixed64_.Equals(other.unpackedFixed64_)) return false; + if(!unpackedSfixed32_.Equals(other.unpackedSfixed32_)) return false; + if(!unpackedSfixed64_.Equals(other.unpackedSfixed64_)) return false; + if(!unpackedFloat_.Equals(other.unpackedFloat_)) return false; + if(!unpackedDouble_.Equals(other.unpackedDouble_)) return false; + if(!unpackedBool_.Equals(other.unpackedBool_)) return false; + if(!unpackedEnum_.Equals(other.unpackedEnum_)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + hash ^= unpackedInt32_.GetHashCode(); + hash ^= unpackedInt64_.GetHashCode(); + hash ^= unpackedUint32_.GetHashCode(); + hash ^= unpackedUint64_.GetHashCode(); + hash ^= unpackedSint32_.GetHashCode(); + hash ^= unpackedSint64_.GetHashCode(); + hash ^= unpackedFixed32_.GetHashCode(); + hash ^= unpackedFixed64_.GetHashCode(); + hash ^= unpackedSfixed32_.GetHashCode(); + hash ^= unpackedSfixed64_.GetHashCode(); + hash ^= unpackedFloat_.GetHashCode(); + hash ^= unpackedDouble_.GetHashCode(); + hash ^= unpackedBool_.GetHashCode(); + hash ^= unpackedEnum_.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + unpackedInt32_.WriteTo(output, _repeated_unpackedInt32_codec); + unpackedInt64_.WriteTo(output, _repeated_unpackedInt64_codec); + unpackedUint32_.WriteTo(output, _repeated_unpackedUint32_codec); + unpackedUint64_.WriteTo(output, _repeated_unpackedUint64_codec); + unpackedSint32_.WriteTo(output, _repeated_unpackedSint32_codec); + unpackedSint64_.WriteTo(output, _repeated_unpackedSint64_codec); + unpackedFixed32_.WriteTo(output, _repeated_unpackedFixed32_codec); + unpackedFixed64_.WriteTo(output, _repeated_unpackedFixed64_codec); + unpackedSfixed32_.WriteTo(output, _repeated_unpackedSfixed32_codec); + unpackedSfixed64_.WriteTo(output, _repeated_unpackedSfixed64_codec); + unpackedFloat_.WriteTo(output, _repeated_unpackedFloat_codec); + unpackedDouble_.WriteTo(output, _repeated_unpackedDouble_codec); + unpackedBool_.WriteTo(output, _repeated_unpackedBool_codec); + unpackedEnum_.WriteTo(output, _repeated_unpackedEnum_codec); + } + + public int CalculateSize() { + int size = 0; + size += unpackedInt32_.CalculateSize(_repeated_unpackedInt32_codec); + size += unpackedInt64_.CalculateSize(_repeated_unpackedInt64_codec); + size += unpackedUint32_.CalculateSize(_repeated_unpackedUint32_codec); + size += unpackedUint64_.CalculateSize(_repeated_unpackedUint64_codec); + size += unpackedSint32_.CalculateSize(_repeated_unpackedSint32_codec); + size += unpackedSint64_.CalculateSize(_repeated_unpackedSint64_codec); + size += unpackedFixed32_.CalculateSize(_repeated_unpackedFixed32_codec); + size += unpackedFixed64_.CalculateSize(_repeated_unpackedFixed64_codec); + size += unpackedSfixed32_.CalculateSize(_repeated_unpackedSfixed32_codec); + size += unpackedSfixed64_.CalculateSize(_repeated_unpackedSfixed64_codec); + size += unpackedFloat_.CalculateSize(_repeated_unpackedFloat_codec); + size += unpackedDouble_.CalculateSize(_repeated_unpackedDouble_codec); + size += unpackedBool_.CalculateSize(_repeated_unpackedBool_codec); + size += unpackedEnum_.CalculateSize(_repeated_unpackedEnum_codec); + return size; + } + + public void MergeFrom(TestUnpackedTypes other) { + if (other == null) { + return; + } + unpackedInt32_.Add(other.unpackedInt32_); + unpackedInt64_.Add(other.unpackedInt64_); + unpackedUint32_.Add(other.unpackedUint32_); + unpackedUint64_.Add(other.unpackedUint64_); + unpackedSint32_.Add(other.unpackedSint32_); + unpackedSint64_.Add(other.unpackedSint64_); + unpackedFixed32_.Add(other.unpackedFixed32_); + unpackedFixed64_.Add(other.unpackedFixed64_); + unpackedSfixed32_.Add(other.unpackedSfixed32_); + unpackedSfixed64_.Add(other.unpackedSfixed64_); + unpackedFloat_.Add(other.unpackedFloat_); + unpackedDouble_.Add(other.unpackedDouble_); + unpackedBool_.Add(other.unpackedBool_); + unpackedEnum_.Add(other.unpackedEnum_); + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 722: + case 720: { + unpackedInt32_.AddEntriesFrom(input, _repeated_unpackedInt32_codec); + break; + } + case 730: + case 728: { + unpackedInt64_.AddEntriesFrom(input, _repeated_unpackedInt64_codec); + break; + } + case 738: + case 736: { + unpackedUint32_.AddEntriesFrom(input, _repeated_unpackedUint32_codec); + break; + } + case 746: + case 744: { + unpackedUint64_.AddEntriesFrom(input, _repeated_unpackedUint64_codec); + break; + } + case 754: + case 752: { + unpackedSint32_.AddEntriesFrom(input, _repeated_unpackedSint32_codec); + break; + } + case 762: + case 760: { + unpackedSint64_.AddEntriesFrom(input, _repeated_unpackedSint64_codec); + break; + } + case 770: + case 773: { + unpackedFixed32_.AddEntriesFrom(input, _repeated_unpackedFixed32_codec); + break; + } + case 778: + case 777: { + unpackedFixed64_.AddEntriesFrom(input, _repeated_unpackedFixed64_codec); + break; + } + case 786: + case 789: { + unpackedSfixed32_.AddEntriesFrom(input, _repeated_unpackedSfixed32_codec); + break; + } + case 794: + case 793: { + unpackedSfixed64_.AddEntriesFrom(input, _repeated_unpackedSfixed64_codec); + break; + } + case 802: + case 805: { + unpackedFloat_.AddEntriesFrom(input, _repeated_unpackedFloat_codec); + break; + } + case 810: + case 809: { + unpackedDouble_.AddEntriesFrom(input, _repeated_unpackedDouble_codec); + break; + } + case 818: + case 816: { + unpackedBool_.AddEntriesFrom(input, _repeated_unpackedBool_codec); + break; + } + case 826: + case 824: { + unpackedEnum_.AddEntriesFrom(input, _repeated_unpackedEnum_codec); + break; + } + } + } + } + + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class TestRepeatedScalarDifferentTagSizes : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new TestRepeatedScalarDifferentTagSizes()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.UnittestProto3Reflection.Descriptor.MessageTypes[25]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public TestRepeatedScalarDifferentTagSizes() { + OnConstruction(); + } + + partial void OnConstruction(); + + public TestRepeatedScalarDifferentTagSizes(TestRepeatedScalarDifferentTagSizes other) : this() { + repeatedFixed32_ = other.repeatedFixed32_.Clone(); + repeatedInt32_ = other.repeatedInt32_.Clone(); + repeatedFixed64_ = other.repeatedFixed64_.Clone(); + repeatedInt64_ = other.repeatedInt64_.Clone(); + repeatedFloat_ = other.repeatedFloat_.Clone(); + repeatedUint64_ = other.repeatedUint64_.Clone(); + } + + public TestRepeatedScalarDifferentTagSizes Clone() { + return new TestRepeatedScalarDifferentTagSizes(this); + } + + /// Field number for the "repeated_fixed32" field. + public const int RepeatedFixed32FieldNumber = 12; + private static readonly pb::FieldCodec _repeated_repeatedFixed32_codec + = pb::FieldCodec.ForFixed32(98); + private readonly pbc::RepeatedField repeatedFixed32_ = new pbc::RepeatedField(); + /// + /// Parsing repeated fixed size values used to fail. This message needs to be + /// used in order to get a tag of the right size; all of the repeated fields + /// in TestAllTypes didn't trigger the check. + /// + public pbc::RepeatedField RepeatedFixed32 { + get { return repeatedFixed32_; } + } + + /// Field number for the "repeated_int32" field. + public const int RepeatedInt32FieldNumber = 13; + private static readonly pb::FieldCodec _repeated_repeatedInt32_codec + = pb::FieldCodec.ForInt32(106); + private readonly pbc::RepeatedField repeatedInt32_ = new pbc::RepeatedField(); + /// + /// Check for a varint type, just for good measure. + /// + public pbc::RepeatedField RepeatedInt32 { + get { return repeatedInt32_; } + } + + /// Field number for the "repeated_fixed64" field. + public const int RepeatedFixed64FieldNumber = 2046; + private static readonly pb::FieldCodec _repeated_repeatedFixed64_codec + = pb::FieldCodec.ForFixed64(16370); + private readonly pbc::RepeatedField repeatedFixed64_ = new pbc::RepeatedField(); + /// + /// These have two-byte tags. + /// + public pbc::RepeatedField RepeatedFixed64 { + get { return repeatedFixed64_; } + } + + /// Field number for the "repeated_int64" field. + public const int RepeatedInt64FieldNumber = 2047; + private static readonly pb::FieldCodec _repeated_repeatedInt64_codec + = pb::FieldCodec.ForInt64(16378); + private readonly pbc::RepeatedField repeatedInt64_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedInt64 { + get { return repeatedInt64_; } + } + + /// Field number for the "repeated_float" field. + public const int RepeatedFloatFieldNumber = 262142; + private static readonly pb::FieldCodec _repeated_repeatedFloat_codec + = pb::FieldCodec.ForFloat(2097138); + private readonly pbc::RepeatedField repeatedFloat_ = new pbc::RepeatedField(); + /// + /// Three byte tags. + /// + public pbc::RepeatedField RepeatedFloat { + get { return repeatedFloat_; } + } + + /// Field number for the "repeated_uint64" field. + public const int RepeatedUint64FieldNumber = 262143; + private static readonly pb::FieldCodec _repeated_repeatedUint64_codec + = pb::FieldCodec.ForUInt64(2097146); + private readonly pbc::RepeatedField repeatedUint64_ = new pbc::RepeatedField(); + public pbc::RepeatedField RepeatedUint64 { + get { return repeatedUint64_; } + } + + public override bool Equals(object other) { + return Equals(other as TestRepeatedScalarDifferentTagSizes); + } + + public bool Equals(TestRepeatedScalarDifferentTagSizes other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if(!repeatedFixed32_.Equals(other.repeatedFixed32_)) return false; + if(!repeatedInt32_.Equals(other.repeatedInt32_)) return false; + if(!repeatedFixed64_.Equals(other.repeatedFixed64_)) return false; + if(!repeatedInt64_.Equals(other.repeatedInt64_)) return false; + if(!repeatedFloat_.Equals(other.repeatedFloat_)) return false; + if(!repeatedUint64_.Equals(other.repeatedUint64_)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + hash ^= repeatedFixed32_.GetHashCode(); + hash ^= repeatedInt32_.GetHashCode(); + hash ^= repeatedFixed64_.GetHashCode(); + hash ^= repeatedInt64_.GetHashCode(); + hash ^= repeatedFloat_.GetHashCode(); + hash ^= repeatedUint64_.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + repeatedFixed32_.WriteTo(output, _repeated_repeatedFixed32_codec); + repeatedInt32_.WriteTo(output, _repeated_repeatedInt32_codec); + repeatedFixed64_.WriteTo(output, _repeated_repeatedFixed64_codec); + repeatedInt64_.WriteTo(output, _repeated_repeatedInt64_codec); + repeatedFloat_.WriteTo(output, _repeated_repeatedFloat_codec); + repeatedUint64_.WriteTo(output, _repeated_repeatedUint64_codec); + } + + public int CalculateSize() { + int size = 0; + size += repeatedFixed32_.CalculateSize(_repeated_repeatedFixed32_codec); + size += repeatedInt32_.CalculateSize(_repeated_repeatedInt32_codec); + size += repeatedFixed64_.CalculateSize(_repeated_repeatedFixed64_codec); + size += repeatedInt64_.CalculateSize(_repeated_repeatedInt64_codec); + size += repeatedFloat_.CalculateSize(_repeated_repeatedFloat_codec); + size += repeatedUint64_.CalculateSize(_repeated_repeatedUint64_codec); + return size; + } + + public void MergeFrom(TestRepeatedScalarDifferentTagSizes other) { + if (other == null) { + return; + } + repeatedFixed32_.Add(other.repeatedFixed32_); + repeatedInt32_.Add(other.repeatedInt32_); + repeatedFixed64_.Add(other.repeatedFixed64_); + repeatedInt64_.Add(other.repeatedInt64_); + repeatedFloat_.Add(other.repeatedFloat_); + repeatedUint64_.Add(other.repeatedUint64_); + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 98: + case 101: { + repeatedFixed32_.AddEntriesFrom(input, _repeated_repeatedFixed32_codec); + break; + } + case 106: + case 104: { + repeatedInt32_.AddEntriesFrom(input, _repeated_repeatedInt32_codec); + break; + } + case 16370: + case 16369: { + repeatedFixed64_.AddEntriesFrom(input, _repeated_repeatedFixed64_codec); + break; + } + case 16378: + case 16376: { + repeatedInt64_.AddEntriesFrom(input, _repeated_repeatedInt64_codec); + break; + } + case 2097138: + case 2097141: { + repeatedFloat_.AddEntriesFrom(input, _repeated_repeatedFloat_codec); + break; + } + case 2097146: + case 2097144: { + repeatedUint64_.AddEntriesFrom(input, _repeated_repeatedUint64_codec); + break; + } + } + } + } + + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class TestCommentInjectionMessage : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new TestCommentInjectionMessage()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.UnittestProto3Reflection.Descriptor.MessageTypes[26]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public TestCommentInjectionMessage() { + OnConstruction(); + } + + partial void OnConstruction(); + + public TestCommentInjectionMessage(TestCommentInjectionMessage other) : this() { + a_ = other.a_; + } + + public TestCommentInjectionMessage Clone() { + return new TestCommentInjectionMessage(this); + } + + /// Field number for the "a" field. + public const int AFieldNumber = 1; + private string a_ = ""; + /// + /// */ <- This should not close the generated doc comment + /// + public string A { + get { return a_; } + set { + a_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + public override bool Equals(object other) { + return Equals(other as TestCommentInjectionMessage); + } + + public bool Equals(TestCommentInjectionMessage other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (A != other.A) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (A.Length != 0) hash ^= A.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (A.Length != 0) { + output.WriteRawTag(10); + output.WriteString(A); + } + } + + public int CalculateSize() { + int size = 0; + if (A.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(A); + } + return size; + } + + public void MergeFrom(TestCommentInjectionMessage other) { + if (other == null) { + return; + } + if (other.A.Length != 0) { + A = other.A; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + A = input.ReadString(); + break; + } + } + } + } + + } + + /// + /// Test that RPC services work. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class FooRequest : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new FooRequest()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.UnittestProto3Reflection.Descriptor.MessageTypes[27]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public FooRequest() { + OnConstruction(); + } + + partial void OnConstruction(); + + public FooRequest(FooRequest other) : this() { + } + + public FooRequest Clone() { + return new FooRequest(this); + } + + public override bool Equals(object other) { + return Equals(other as FooRequest); + } + + public bool Equals(FooRequest other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + return true; + } + + public override int GetHashCode() { + int hash = 1; + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + } + + public int CalculateSize() { + int size = 0; + return size; + } + + public void MergeFrom(FooRequest other) { + if (other == null) { + return; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + } + } + } + + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class FooResponse : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new FooResponse()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.UnittestProto3Reflection.Descriptor.MessageTypes[28]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public FooResponse() { + OnConstruction(); + } + + partial void OnConstruction(); + + public FooResponse(FooResponse other) : this() { + } + + public FooResponse Clone() { + return new FooResponse(this); + } + + public override bool Equals(object other) { + return Equals(other as FooResponse); + } + + public bool Equals(FooResponse other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + return true; + } + + public override int GetHashCode() { + int hash = 1; + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + } + + public int CalculateSize() { + int size = 0; + return size; + } + + public void MergeFrom(FooResponse other) { + if (other == null) { + return; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + } + } + } + + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class FooClientMessage : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new FooClientMessage()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.UnittestProto3Reflection.Descriptor.MessageTypes[29]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public FooClientMessage() { + OnConstruction(); + } + + partial void OnConstruction(); + + public FooClientMessage(FooClientMessage other) : this() { + } + + public FooClientMessage Clone() { + return new FooClientMessage(this); + } + + public override bool Equals(object other) { + return Equals(other as FooClientMessage); + } + + public bool Equals(FooClientMessage other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + return true; + } + + public override int GetHashCode() { + int hash = 1; + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + } + + public int CalculateSize() { + int size = 0; + return size; + } + + public void MergeFrom(FooClientMessage other) { + if (other == null) { + return; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + } + } + } + + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class FooServerMessage : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new FooServerMessage()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.UnittestProto3Reflection.Descriptor.MessageTypes[30]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public FooServerMessage() { + OnConstruction(); + } + + partial void OnConstruction(); + + public FooServerMessage(FooServerMessage other) : this() { + } + + public FooServerMessage Clone() { + return new FooServerMessage(this); + } + + public override bool Equals(object other) { + return Equals(other as FooServerMessage); + } + + public bool Equals(FooServerMessage other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + return true; + } + + public override int GetHashCode() { + int hash = 1; + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + } + + public int CalculateSize() { + int size = 0; + return size; + } + + public void MergeFrom(FooServerMessage other) { + if (other == null) { + return; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + } + } + } + + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class BarRequest : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new BarRequest()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.UnittestProto3Reflection.Descriptor.MessageTypes[31]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public BarRequest() { + OnConstruction(); + } + + partial void OnConstruction(); + + public BarRequest(BarRequest other) : this() { + } + + public BarRequest Clone() { + return new BarRequest(this); + } + + public override bool Equals(object other) { + return Equals(other as BarRequest); + } + + public bool Equals(BarRequest other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + return true; + } + + public override int GetHashCode() { + int hash = 1; + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + } + + public int CalculateSize() { + int size = 0; + return size; + } + + public void MergeFrom(BarRequest other) { + if (other == null) { + return; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + } + } + } + + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class BarResponse : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new BarResponse()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.UnittestProto3Reflection.Descriptor.MessageTypes[32]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public BarResponse() { + OnConstruction(); + } + + partial void OnConstruction(); + + public BarResponse(BarResponse other) : this() { + } + + public BarResponse Clone() { + return new BarResponse(this); + } + + public override bool Equals(object other) { + return Equals(other as BarResponse); + } + + public bool Equals(BarResponse other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + return true; + } + + public override int GetHashCode() { + int hash = 1; + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + } + + public int CalculateSize() { + int size = 0; + return size; + } + + public void MergeFrom(BarResponse other) { + if (other == null) { + return; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + } + } + } + + } + + #endregion + +} + +#endregion Designer generated code diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/TestProtos/UnittestWellKnownTypes.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/TestProtos/UnittestWellKnownTypes.cs new file mode 100644 index 0000000000..ae12f4a4b9 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/TestProtos/UnittestWellKnownTypes.cs @@ -0,0 +1,2413 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/protobuf/unittest_well_known_types.proto +#pragma warning disable 1591, 0612, 3021 +#region Designer generated code + +using pb = global::Google.Protobuf; +using pbc = global::Google.Protobuf.Collections; +using pbr = global::Google.Protobuf.Reflection; +using scg = global::System.Collections.Generic; +namespace Google.Protobuf.TestProtos { + + /// Holder for reflection information generated from google/protobuf/unittest_well_known_types.proto + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public static partial class UnittestWellKnownTypesReflection { + + #region Descriptor + /// File descriptor for google/protobuf/unittest_well_known_types.proto + public static pbr::FileDescriptor Descriptor { + get { return descriptor; } + } + private static pbr::FileDescriptor descriptor; + + static UnittestWellKnownTypesReflection() { + byte[] descriptorData = global::System.Convert.FromBase64String( + string.Concat( + "Ci9nb29nbGUvcHJvdG9idWYvdW5pdHRlc3Rfd2VsbF9rbm93bl90eXBlcy5w", + "cm90bxIRcHJvdG9idWZfdW5pdHRlc3QaGWdvb2dsZS9wcm90b2J1Zi9hbnku", + "cHJvdG8aGWdvb2dsZS9wcm90b2J1Zi9hcGkucHJvdG8aHmdvb2dsZS9wcm90", + "b2J1Zi9kdXJhdGlvbi5wcm90bxobZ29vZ2xlL3Byb3RvYnVmL2VtcHR5LnBy", + "b3RvGiBnb29nbGUvcHJvdG9idWYvZmllbGRfbWFzay5wcm90bxokZ29vZ2xl", + "L3Byb3RvYnVmL3NvdXJjZV9jb250ZXh0LnByb3RvGhxnb29nbGUvcHJvdG9i", + "dWYvc3RydWN0LnByb3RvGh9nb29nbGUvcHJvdG9idWYvdGltZXN0YW1wLnBy", + "b3RvGhpnb29nbGUvcHJvdG9idWYvdHlwZS5wcm90bxoeZ29vZ2xlL3Byb3Rv", + "YnVmL3dyYXBwZXJzLnByb3RvIr4HChJUZXN0V2VsbEtub3duVHlwZXMSJwoJ", + "YW55X2ZpZWxkGAEgASgLMhQuZ29vZ2xlLnByb3RvYnVmLkFueRInCglhcGlf", + "ZmllbGQYAiABKAsyFC5nb29nbGUucHJvdG9idWYuQXBpEjEKDmR1cmF0aW9u", + "X2ZpZWxkGAMgASgLMhkuZ29vZ2xlLnByb3RvYnVmLkR1cmF0aW9uEisKC2Vt", + "cHR5X2ZpZWxkGAQgASgLMhYuZ29vZ2xlLnByb3RvYnVmLkVtcHR5EjQKEGZp", + "ZWxkX21hc2tfZmllbGQYBSABKAsyGi5nb29nbGUucHJvdG9idWYuRmllbGRN", + "YXNrEjwKFHNvdXJjZV9jb250ZXh0X2ZpZWxkGAYgASgLMh4uZ29vZ2xlLnBy", + "b3RvYnVmLlNvdXJjZUNvbnRleHQSLQoMc3RydWN0X2ZpZWxkGAcgASgLMhcu", + "Z29vZ2xlLnByb3RvYnVmLlN0cnVjdBIzCg90aW1lc3RhbXBfZmllbGQYCCAB", + "KAsyGi5nb29nbGUucHJvdG9idWYuVGltZXN0YW1wEikKCnR5cGVfZmllbGQY", + "CSABKAsyFS5nb29nbGUucHJvdG9idWYuVHlwZRIyCgxkb3VibGVfZmllbGQY", + "CiABKAsyHC5nb29nbGUucHJvdG9idWYuRG91YmxlVmFsdWUSMAoLZmxvYXRf", + "ZmllbGQYCyABKAsyGy5nb29nbGUucHJvdG9idWYuRmxvYXRWYWx1ZRIwCgtp", + "bnQ2NF9maWVsZBgMIAEoCzIbLmdvb2dsZS5wcm90b2J1Zi5JbnQ2NFZhbHVl", + "EjIKDHVpbnQ2NF9maWVsZBgNIAEoCzIcLmdvb2dsZS5wcm90b2J1Zi5VSW50", + "NjRWYWx1ZRIwCgtpbnQzMl9maWVsZBgOIAEoCzIbLmdvb2dsZS5wcm90b2J1", + "Zi5JbnQzMlZhbHVlEjIKDHVpbnQzMl9maWVsZBgPIAEoCzIcLmdvb2dsZS5w", + "cm90b2J1Zi5VSW50MzJWYWx1ZRIuCgpib29sX2ZpZWxkGBAgASgLMhouZ29v", + "Z2xlLnByb3RvYnVmLkJvb2xWYWx1ZRIyCgxzdHJpbmdfZmllbGQYESABKAsy", + "HC5nb29nbGUucHJvdG9idWYuU3RyaW5nVmFsdWUSMAoLYnl0ZXNfZmllbGQY", + "EiABKAsyGy5nb29nbGUucHJvdG9idWYuQnl0ZXNWYWx1ZRIrCgt2YWx1ZV9m", + "aWVsZBgTIAEoCzIWLmdvb2dsZS5wcm90b2J1Zi5WYWx1ZSKVBwoWUmVwZWF0", + "ZWRXZWxsS25vd25UeXBlcxInCglhbnlfZmllbGQYASADKAsyFC5nb29nbGUu", + "cHJvdG9idWYuQW55EicKCWFwaV9maWVsZBgCIAMoCzIULmdvb2dsZS5wcm90", + "b2J1Zi5BcGkSMQoOZHVyYXRpb25fZmllbGQYAyADKAsyGS5nb29nbGUucHJv", + "dG9idWYuRHVyYXRpb24SKwoLZW1wdHlfZmllbGQYBCADKAsyFi5nb29nbGUu", + "cHJvdG9idWYuRW1wdHkSNAoQZmllbGRfbWFza19maWVsZBgFIAMoCzIaLmdv", + "b2dsZS5wcm90b2J1Zi5GaWVsZE1hc2sSPAoUc291cmNlX2NvbnRleHRfZmll", + "bGQYBiADKAsyHi5nb29nbGUucHJvdG9idWYuU291cmNlQ29udGV4dBItCgxz", + "dHJ1Y3RfZmllbGQYByADKAsyFy5nb29nbGUucHJvdG9idWYuU3RydWN0EjMK", + "D3RpbWVzdGFtcF9maWVsZBgIIAMoCzIaLmdvb2dsZS5wcm90b2J1Zi5UaW1l", + "c3RhbXASKQoKdHlwZV9maWVsZBgJIAMoCzIVLmdvb2dsZS5wcm90b2J1Zi5U", + "eXBlEjIKDGRvdWJsZV9maWVsZBgKIAMoCzIcLmdvb2dsZS5wcm90b2J1Zi5E", + "b3VibGVWYWx1ZRIwCgtmbG9hdF9maWVsZBgLIAMoCzIbLmdvb2dsZS5wcm90", + "b2J1Zi5GbG9hdFZhbHVlEjAKC2ludDY0X2ZpZWxkGAwgAygLMhsuZ29vZ2xl", + "LnByb3RvYnVmLkludDY0VmFsdWUSMgoMdWludDY0X2ZpZWxkGA0gAygLMhwu", + "Z29vZ2xlLnByb3RvYnVmLlVJbnQ2NFZhbHVlEjAKC2ludDMyX2ZpZWxkGA4g", + "AygLMhsuZ29vZ2xlLnByb3RvYnVmLkludDMyVmFsdWUSMgoMdWludDMyX2Zp", + "ZWxkGA8gAygLMhwuZ29vZ2xlLnByb3RvYnVmLlVJbnQzMlZhbHVlEi4KCmJv", + "b2xfZmllbGQYECADKAsyGi5nb29nbGUucHJvdG9idWYuQm9vbFZhbHVlEjIK", + "DHN0cmluZ19maWVsZBgRIAMoCzIcLmdvb2dsZS5wcm90b2J1Zi5TdHJpbmdW", + "YWx1ZRIwCgtieXRlc19maWVsZBgSIAMoCzIbLmdvb2dsZS5wcm90b2J1Zi5C", + "eXRlc1ZhbHVlIsUHChNPbmVvZldlbGxLbm93blR5cGVzEikKCWFueV9maWVs", + "ZBgBIAEoCzIULmdvb2dsZS5wcm90b2J1Zi5BbnlIABIpCglhcGlfZmllbGQY", + "AiABKAsyFC5nb29nbGUucHJvdG9idWYuQXBpSAASMwoOZHVyYXRpb25fZmll", + "bGQYAyABKAsyGS5nb29nbGUucHJvdG9idWYuRHVyYXRpb25IABItCgtlbXB0", + "eV9maWVsZBgEIAEoCzIWLmdvb2dsZS5wcm90b2J1Zi5FbXB0eUgAEjYKEGZp", + "ZWxkX21hc2tfZmllbGQYBSABKAsyGi5nb29nbGUucHJvdG9idWYuRmllbGRN", + "YXNrSAASPgoUc291cmNlX2NvbnRleHRfZmllbGQYBiABKAsyHi5nb29nbGUu", + "cHJvdG9idWYuU291cmNlQ29udGV4dEgAEi8KDHN0cnVjdF9maWVsZBgHIAEo", + "CzIXLmdvb2dsZS5wcm90b2J1Zi5TdHJ1Y3RIABI1Cg90aW1lc3RhbXBfZmll", + "bGQYCCABKAsyGi5nb29nbGUucHJvdG9idWYuVGltZXN0YW1wSAASKwoKdHlw", + "ZV9maWVsZBgJIAEoCzIVLmdvb2dsZS5wcm90b2J1Zi5UeXBlSAASNAoMZG91", + "YmxlX2ZpZWxkGAogASgLMhwuZ29vZ2xlLnByb3RvYnVmLkRvdWJsZVZhbHVl", + "SAASMgoLZmxvYXRfZmllbGQYCyABKAsyGy5nb29nbGUucHJvdG9idWYuRmxv", + "YXRWYWx1ZUgAEjIKC2ludDY0X2ZpZWxkGAwgASgLMhsuZ29vZ2xlLnByb3Rv", + "YnVmLkludDY0VmFsdWVIABI0Cgx1aW50NjRfZmllbGQYDSABKAsyHC5nb29n", + "bGUucHJvdG9idWYuVUludDY0VmFsdWVIABIyCgtpbnQzMl9maWVsZBgOIAEo", + "CzIbLmdvb2dsZS5wcm90b2J1Zi5JbnQzMlZhbHVlSAASNAoMdWludDMyX2Zp", + "ZWxkGA8gASgLMhwuZ29vZ2xlLnByb3RvYnVmLlVJbnQzMlZhbHVlSAASMAoK", + "Ym9vbF9maWVsZBgQIAEoCzIaLmdvb2dsZS5wcm90b2J1Zi5Cb29sVmFsdWVI", + "ABI0CgxzdHJpbmdfZmllbGQYESABKAsyHC5nb29nbGUucHJvdG9idWYuU3Ry", + "aW5nVmFsdWVIABIyCgtieXRlc19maWVsZBgSIAEoCzIbLmdvb2dsZS5wcm90", + "b2J1Zi5CeXRlc1ZhbHVlSABCDQoLb25lb2ZfZmllbGQilhYKEU1hcFdlbGxL", + "bm93blR5cGVzEkUKCWFueV9maWVsZBgBIAMoCzIyLnByb3RvYnVmX3VuaXR0", + "ZXN0Lk1hcFdlbGxLbm93blR5cGVzLkFueUZpZWxkRW50cnkSRQoJYXBpX2Zp", + "ZWxkGAIgAygLMjIucHJvdG9idWZfdW5pdHRlc3QuTWFwV2VsbEtub3duVHlw", + "ZXMuQXBpRmllbGRFbnRyeRJPCg5kdXJhdGlvbl9maWVsZBgDIAMoCzI3LnBy", + "b3RvYnVmX3VuaXR0ZXN0Lk1hcFdlbGxLbm93blR5cGVzLkR1cmF0aW9uRmll", + "bGRFbnRyeRJJCgtlbXB0eV9maWVsZBgEIAMoCzI0LnByb3RvYnVmX3VuaXR0", + "ZXN0Lk1hcFdlbGxLbm93blR5cGVzLkVtcHR5RmllbGRFbnRyeRJSChBmaWVs", + "ZF9tYXNrX2ZpZWxkGAUgAygLMjgucHJvdG9idWZfdW5pdHRlc3QuTWFwV2Vs", + "bEtub3duVHlwZXMuRmllbGRNYXNrRmllbGRFbnRyeRJaChRzb3VyY2VfY29u", + "dGV4dF9maWVsZBgGIAMoCzI8LnByb3RvYnVmX3VuaXR0ZXN0Lk1hcFdlbGxL", + "bm93blR5cGVzLlNvdXJjZUNvbnRleHRGaWVsZEVudHJ5EksKDHN0cnVjdF9m", + "aWVsZBgHIAMoCzI1LnByb3RvYnVmX3VuaXR0ZXN0Lk1hcFdlbGxLbm93blR5", + "cGVzLlN0cnVjdEZpZWxkRW50cnkSUQoPdGltZXN0YW1wX2ZpZWxkGAggAygL", + "MjgucHJvdG9idWZfdW5pdHRlc3QuTWFwV2VsbEtub3duVHlwZXMuVGltZXN0", + "YW1wRmllbGRFbnRyeRJHCgp0eXBlX2ZpZWxkGAkgAygLMjMucHJvdG9idWZf", + "dW5pdHRlc3QuTWFwV2VsbEtub3duVHlwZXMuVHlwZUZpZWxkRW50cnkSSwoM", + "ZG91YmxlX2ZpZWxkGAogAygLMjUucHJvdG9idWZfdW5pdHRlc3QuTWFwV2Vs", + "bEtub3duVHlwZXMuRG91YmxlRmllbGRFbnRyeRJJCgtmbG9hdF9maWVsZBgL", + "IAMoCzI0LnByb3RvYnVmX3VuaXR0ZXN0Lk1hcFdlbGxLbm93blR5cGVzLkZs", + "b2F0RmllbGRFbnRyeRJJCgtpbnQ2NF9maWVsZBgMIAMoCzI0LnByb3RvYnVm", + "X3VuaXR0ZXN0Lk1hcFdlbGxLbm93blR5cGVzLkludDY0RmllbGRFbnRyeRJL", + "Cgx1aW50NjRfZmllbGQYDSADKAsyNS5wcm90b2J1Zl91bml0dGVzdC5NYXBX", + "ZWxsS25vd25UeXBlcy5VaW50NjRGaWVsZEVudHJ5EkkKC2ludDMyX2ZpZWxk", + "GA4gAygLMjQucHJvdG9idWZfdW5pdHRlc3QuTWFwV2VsbEtub3duVHlwZXMu", + "SW50MzJGaWVsZEVudHJ5EksKDHVpbnQzMl9maWVsZBgPIAMoCzI1LnByb3Rv", + "YnVmX3VuaXR0ZXN0Lk1hcFdlbGxLbm93blR5cGVzLlVpbnQzMkZpZWxkRW50", + "cnkSRwoKYm9vbF9maWVsZBgQIAMoCzIzLnByb3RvYnVmX3VuaXR0ZXN0Lk1h", + "cFdlbGxLbm93blR5cGVzLkJvb2xGaWVsZEVudHJ5EksKDHN0cmluZ19maWVs", + "ZBgRIAMoCzI1LnByb3RvYnVmX3VuaXR0ZXN0Lk1hcFdlbGxLbm93blR5cGVz", + "LlN0cmluZ0ZpZWxkRW50cnkSSQoLYnl0ZXNfZmllbGQYEiADKAsyNC5wcm90", + "b2J1Zl91bml0dGVzdC5NYXBXZWxsS25vd25UeXBlcy5CeXRlc0ZpZWxkRW50", + "cnkaRQoNQW55RmllbGRFbnRyeRILCgNrZXkYASABKAUSIwoFdmFsdWUYAiAB", + "KAsyFC5nb29nbGUucHJvdG9idWYuQW55OgI4ARpFCg1BcGlGaWVsZEVudHJ5", + "EgsKA2tleRgBIAEoBRIjCgV2YWx1ZRgCIAEoCzIULmdvb2dsZS5wcm90b2J1", + "Zi5BcGk6AjgBGk8KEkR1cmF0aW9uRmllbGRFbnRyeRILCgNrZXkYASABKAUS", + "KAoFdmFsdWUYAiABKAsyGS5nb29nbGUucHJvdG9idWYuRHVyYXRpb246AjgB", + "GkkKD0VtcHR5RmllbGRFbnRyeRILCgNrZXkYASABKAUSJQoFdmFsdWUYAiAB", + "KAsyFi5nb29nbGUucHJvdG9idWYuRW1wdHk6AjgBGlEKE0ZpZWxkTWFza0Zp", + "ZWxkRW50cnkSCwoDa2V5GAEgASgFEikKBXZhbHVlGAIgASgLMhouZ29vZ2xl", + "LnByb3RvYnVmLkZpZWxkTWFzazoCOAEaWQoXU291cmNlQ29udGV4dEZpZWxk", + "RW50cnkSCwoDa2V5GAEgASgFEi0KBXZhbHVlGAIgASgLMh4uZ29vZ2xlLnBy", + "b3RvYnVmLlNvdXJjZUNvbnRleHQ6AjgBGksKEFN0cnVjdEZpZWxkRW50cnkS", + "CwoDa2V5GAEgASgFEiYKBXZhbHVlGAIgASgLMhcuZ29vZ2xlLnByb3RvYnVm", + "LlN0cnVjdDoCOAEaUQoTVGltZXN0YW1wRmllbGRFbnRyeRILCgNrZXkYASAB", + "KAUSKQoFdmFsdWUYAiABKAsyGi5nb29nbGUucHJvdG9idWYuVGltZXN0YW1w", + "OgI4ARpHCg5UeXBlRmllbGRFbnRyeRILCgNrZXkYASABKAUSJAoFdmFsdWUY", + "AiABKAsyFS5nb29nbGUucHJvdG9idWYuVHlwZToCOAEaUAoQRG91YmxlRmll", + "bGRFbnRyeRILCgNrZXkYASABKAUSKwoFdmFsdWUYAiABKAsyHC5nb29nbGUu", + "cHJvdG9idWYuRG91YmxlVmFsdWU6AjgBGk4KD0Zsb2F0RmllbGRFbnRyeRIL", + "CgNrZXkYASABKAUSKgoFdmFsdWUYAiABKAsyGy5nb29nbGUucHJvdG9idWYu", + "RmxvYXRWYWx1ZToCOAEaTgoPSW50NjRGaWVsZEVudHJ5EgsKA2tleRgBIAEo", + "BRIqCgV2YWx1ZRgCIAEoCzIbLmdvb2dsZS5wcm90b2J1Zi5JbnQ2NFZhbHVl", + "OgI4ARpQChBVaW50NjRGaWVsZEVudHJ5EgsKA2tleRgBIAEoBRIrCgV2YWx1", + "ZRgCIAEoCzIcLmdvb2dsZS5wcm90b2J1Zi5VSW50NjRWYWx1ZToCOAEaTgoP", + "SW50MzJGaWVsZEVudHJ5EgsKA2tleRgBIAEoBRIqCgV2YWx1ZRgCIAEoCzIb", + "Lmdvb2dsZS5wcm90b2J1Zi5JbnQzMlZhbHVlOgI4ARpQChBVaW50MzJGaWVs", + "ZEVudHJ5EgsKA2tleRgBIAEoBRIrCgV2YWx1ZRgCIAEoCzIcLmdvb2dsZS5w", + "cm90b2J1Zi5VSW50MzJWYWx1ZToCOAEaTAoOQm9vbEZpZWxkRW50cnkSCwoD", + "a2V5GAEgASgFEikKBXZhbHVlGAIgASgLMhouZ29vZ2xlLnByb3RvYnVmLkJv", + "b2xWYWx1ZToCOAEaUAoQU3RyaW5nRmllbGRFbnRyeRILCgNrZXkYASABKAUS", + "KwoFdmFsdWUYAiABKAsyHC5nb29nbGUucHJvdG9idWYuU3RyaW5nVmFsdWU6", + "AjgBGk4KD0J5dGVzRmllbGRFbnRyeRILCgNrZXkYASABKAUSKgoFdmFsdWUY", + "AiABKAsyGy5nb29nbGUucHJvdG9idWYuQnl0ZXNWYWx1ZToCOAFCOQoYY29t", + "Lmdvb2dsZS5wcm90b2J1Zi50ZXN0UAGqAhpHb29nbGUuUHJvdG9idWYuVGVz", + "dFByb3Rvc2IGcHJvdG8z")); + descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, + new pbr::FileDescriptor[] { global::Google.Protobuf.WellKnownTypes.AnyReflection.Descriptor, global::Google.Protobuf.WellKnownTypes.ApiReflection.Descriptor, global::Google.Protobuf.WellKnownTypes.DurationReflection.Descriptor, global::Google.Protobuf.WellKnownTypes.EmptyReflection.Descriptor, global::Google.Protobuf.WellKnownTypes.FieldMaskReflection.Descriptor, global::Google.Protobuf.WellKnownTypes.SourceContextReflection.Descriptor, global::Google.Protobuf.WellKnownTypes.StructReflection.Descriptor, global::Google.Protobuf.WellKnownTypes.TimestampReflection.Descriptor, global::Google.Protobuf.WellKnownTypes.TypeReflection.Descriptor, global::Google.Protobuf.WellKnownTypes.WrappersReflection.Descriptor, }, + new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] { + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.TestWellKnownTypes), global::Google.Protobuf.TestProtos.TestWellKnownTypes.Parser, new[]{ "AnyField", "ApiField", "DurationField", "EmptyField", "FieldMaskField", "SourceContextField", "StructField", "TimestampField", "TypeField", "DoubleField", "FloatField", "Int64Field", "Uint64Field", "Int32Field", "Uint32Field", "BoolField", "StringField", "BytesField", "ValueField" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.RepeatedWellKnownTypes), global::Google.Protobuf.TestProtos.RepeatedWellKnownTypes.Parser, new[]{ "AnyField", "ApiField", "DurationField", "EmptyField", "FieldMaskField", "SourceContextField", "StructField", "TimestampField", "TypeField", "DoubleField", "FloatField", "Int64Field", "Uint64Field", "Int32Field", "Uint32Field", "BoolField", "StringField", "BytesField" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.OneofWellKnownTypes), global::Google.Protobuf.TestProtos.OneofWellKnownTypes.Parser, new[]{ "AnyField", "ApiField", "DurationField", "EmptyField", "FieldMaskField", "SourceContextField", "StructField", "TimestampField", "TypeField", "DoubleField", "FloatField", "Int64Field", "Uint64Field", "Int32Field", "Uint32Field", "BoolField", "StringField", "BytesField" }, new[]{ "OneofField" }, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.MapWellKnownTypes), global::Google.Protobuf.TestProtos.MapWellKnownTypes.Parser, new[]{ "AnyField", "ApiField", "DurationField", "EmptyField", "FieldMaskField", "SourceContextField", "StructField", "TimestampField", "TypeField", "DoubleField", "FloatField", "Int64Field", "Uint64Field", "Int32Field", "Uint32Field", "BoolField", "StringField", "BytesField" }, null, null, new pbr::GeneratedClrTypeInfo[] { null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, }) + })); + } + #endregion + + } + #region Messages + /// + /// Test that we can include all well-known types. + /// Each wrapper type is included separately, as languages + /// map handle different wrappers in different ways. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class TestWellKnownTypes : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new TestWellKnownTypes()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.UnittestWellKnownTypesReflection.Descriptor.MessageTypes[0]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public TestWellKnownTypes() { + OnConstruction(); + } + + partial void OnConstruction(); + + public TestWellKnownTypes(TestWellKnownTypes other) : this() { + AnyField = other.anyField_ != null ? other.AnyField.Clone() : null; + ApiField = other.apiField_ != null ? other.ApiField.Clone() : null; + DurationField = other.durationField_ != null ? other.DurationField.Clone() : null; + EmptyField = other.emptyField_ != null ? other.EmptyField.Clone() : null; + FieldMaskField = other.fieldMaskField_ != null ? other.FieldMaskField.Clone() : null; + SourceContextField = other.sourceContextField_ != null ? other.SourceContextField.Clone() : null; + StructField = other.structField_ != null ? other.StructField.Clone() : null; + TimestampField = other.timestampField_ != null ? other.TimestampField.Clone() : null; + TypeField = other.typeField_ != null ? other.TypeField.Clone() : null; + DoubleField = other.DoubleField; + FloatField = other.FloatField; + Int64Field = other.Int64Field; + Uint64Field = other.Uint64Field; + Int32Field = other.Int32Field; + Uint32Field = other.Uint32Field; + BoolField = other.BoolField; + StringField = other.StringField; + BytesField = other.BytesField; + ValueField = other.valueField_ != null ? other.ValueField.Clone() : null; + } + + public TestWellKnownTypes Clone() { + return new TestWellKnownTypes(this); + } + + /// Field number for the "any_field" field. + public const int AnyFieldFieldNumber = 1; + private global::Google.Protobuf.WellKnownTypes.Any anyField_; + public global::Google.Protobuf.WellKnownTypes.Any AnyField { + get { return anyField_; } + set { + anyField_ = value; + } + } + + /// Field number for the "api_field" field. + public const int ApiFieldFieldNumber = 2; + private global::Google.Protobuf.WellKnownTypes.Api apiField_; + public global::Google.Protobuf.WellKnownTypes.Api ApiField { + get { return apiField_; } + set { + apiField_ = value; + } + } + + /// Field number for the "duration_field" field. + public const int DurationFieldFieldNumber = 3; + private global::Google.Protobuf.WellKnownTypes.Duration durationField_; + public global::Google.Protobuf.WellKnownTypes.Duration DurationField { + get { return durationField_; } + set { + durationField_ = value; + } + } + + /// Field number for the "empty_field" field. + public const int EmptyFieldFieldNumber = 4; + private global::Google.Protobuf.WellKnownTypes.Empty emptyField_; + public global::Google.Protobuf.WellKnownTypes.Empty EmptyField { + get { return emptyField_; } + set { + emptyField_ = value; + } + } + + /// Field number for the "field_mask_field" field. + public const int FieldMaskFieldFieldNumber = 5; + private global::Google.Protobuf.WellKnownTypes.FieldMask fieldMaskField_; + public global::Google.Protobuf.WellKnownTypes.FieldMask FieldMaskField { + get { return fieldMaskField_; } + set { + fieldMaskField_ = value; + } + } + + /// Field number for the "source_context_field" field. + public const int SourceContextFieldFieldNumber = 6; + private global::Google.Protobuf.WellKnownTypes.SourceContext sourceContextField_; + public global::Google.Protobuf.WellKnownTypes.SourceContext SourceContextField { + get { return sourceContextField_; } + set { + sourceContextField_ = value; + } + } + + /// Field number for the "struct_field" field. + public const int StructFieldFieldNumber = 7; + private global::Google.Protobuf.WellKnownTypes.Struct structField_; + public global::Google.Protobuf.WellKnownTypes.Struct StructField { + get { return structField_; } + set { + structField_ = value; + } + } + + /// Field number for the "timestamp_field" field. + public const int TimestampFieldFieldNumber = 8; + private global::Google.Protobuf.WellKnownTypes.Timestamp timestampField_; + public global::Google.Protobuf.WellKnownTypes.Timestamp TimestampField { + get { return timestampField_; } + set { + timestampField_ = value; + } + } + + /// Field number for the "type_field" field. + public const int TypeFieldFieldNumber = 9; + private global::Google.Protobuf.WellKnownTypes.Type typeField_; + public global::Google.Protobuf.WellKnownTypes.Type TypeField { + get { return typeField_; } + set { + typeField_ = value; + } + } + + /// Field number for the "double_field" field. + public const int DoubleFieldFieldNumber = 10; + private static readonly pb::FieldCodec _single_doubleField_codec = pb::FieldCodec.ForStructWrapper(82); + private double? doubleField_; + public double? DoubleField { + get { return doubleField_; } + set { + doubleField_ = value; + } + } + + /// Field number for the "float_field" field. + public const int FloatFieldFieldNumber = 11; + private static readonly pb::FieldCodec _single_floatField_codec = pb::FieldCodec.ForStructWrapper(90); + private float? floatField_; + public float? FloatField { + get { return floatField_; } + set { + floatField_ = value; + } + } + + /// Field number for the "int64_field" field. + public const int Int64FieldFieldNumber = 12; + private static readonly pb::FieldCodec _single_int64Field_codec = pb::FieldCodec.ForStructWrapper(98); + private long? int64Field_; + public long? Int64Field { + get { return int64Field_; } + set { + int64Field_ = value; + } + } + + /// Field number for the "uint64_field" field. + public const int Uint64FieldFieldNumber = 13; + private static readonly pb::FieldCodec _single_uint64Field_codec = pb::FieldCodec.ForStructWrapper(106); + private ulong? uint64Field_; + public ulong? Uint64Field { + get { return uint64Field_; } + set { + uint64Field_ = value; + } + } + + /// Field number for the "int32_field" field. + public const int Int32FieldFieldNumber = 14; + private static readonly pb::FieldCodec _single_int32Field_codec = pb::FieldCodec.ForStructWrapper(114); + private int? int32Field_; + public int? Int32Field { + get { return int32Field_; } + set { + int32Field_ = value; + } + } + + /// Field number for the "uint32_field" field. + public const int Uint32FieldFieldNumber = 15; + private static readonly pb::FieldCodec _single_uint32Field_codec = pb::FieldCodec.ForStructWrapper(122); + private uint? uint32Field_; + public uint? Uint32Field { + get { return uint32Field_; } + set { + uint32Field_ = value; + } + } + + /// Field number for the "bool_field" field. + public const int BoolFieldFieldNumber = 16; + private static readonly pb::FieldCodec _single_boolField_codec = pb::FieldCodec.ForStructWrapper(130); + private bool? boolField_; + public bool? BoolField { + get { return boolField_; } + set { + boolField_ = value; + } + } + + /// Field number for the "string_field" field. + public const int StringFieldFieldNumber = 17; + private static readonly pb::FieldCodec _single_stringField_codec = pb::FieldCodec.ForClassWrapper(138); + private string stringField_; + public string StringField { + get { return stringField_; } + set { + stringField_ = value; + } + } + + /// Field number for the "bytes_field" field. + public const int BytesFieldFieldNumber = 18; + private static readonly pb::FieldCodec _single_bytesField_codec = pb::FieldCodec.ForClassWrapper(146); + private pb::ByteString bytesField_; + public pb::ByteString BytesField { + get { return bytesField_; } + set { + bytesField_ = value; + } + } + + /// Field number for the "value_field" field. + public const int ValueFieldFieldNumber = 19; + private global::Google.Protobuf.WellKnownTypes.Value valueField_; + /// + /// Part of struct, but useful to be able to test separately + /// + public global::Google.Protobuf.WellKnownTypes.Value ValueField { + get { return valueField_; } + set { + valueField_ = value; + } + } + + public override bool Equals(object other) { + return Equals(other as TestWellKnownTypes); + } + + public bool Equals(TestWellKnownTypes other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (!object.Equals(AnyField, other.AnyField)) return false; + if (!object.Equals(ApiField, other.ApiField)) return false; + if (!object.Equals(DurationField, other.DurationField)) return false; + if (!object.Equals(EmptyField, other.EmptyField)) return false; + if (!object.Equals(FieldMaskField, other.FieldMaskField)) return false; + if (!object.Equals(SourceContextField, other.SourceContextField)) return false; + if (!object.Equals(StructField, other.StructField)) return false; + if (!object.Equals(TimestampField, other.TimestampField)) return false; + if (!object.Equals(TypeField, other.TypeField)) return false; + if (DoubleField != other.DoubleField) return false; + if (FloatField != other.FloatField) return false; + if (Int64Field != other.Int64Field) return false; + if (Uint64Field != other.Uint64Field) return false; + if (Int32Field != other.Int32Field) return false; + if (Uint32Field != other.Uint32Field) return false; + if (BoolField != other.BoolField) return false; + if (StringField != other.StringField) return false; + if (BytesField != other.BytesField) return false; + if (!object.Equals(ValueField, other.ValueField)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (anyField_ != null) hash ^= AnyField.GetHashCode(); + if (apiField_ != null) hash ^= ApiField.GetHashCode(); + if (durationField_ != null) hash ^= DurationField.GetHashCode(); + if (emptyField_ != null) hash ^= EmptyField.GetHashCode(); + if (fieldMaskField_ != null) hash ^= FieldMaskField.GetHashCode(); + if (sourceContextField_ != null) hash ^= SourceContextField.GetHashCode(); + if (structField_ != null) hash ^= StructField.GetHashCode(); + if (timestampField_ != null) hash ^= TimestampField.GetHashCode(); + if (typeField_ != null) hash ^= TypeField.GetHashCode(); + if (doubleField_ != null) hash ^= DoubleField.GetHashCode(); + if (floatField_ != null) hash ^= FloatField.GetHashCode(); + if (int64Field_ != null) hash ^= Int64Field.GetHashCode(); + if (uint64Field_ != null) hash ^= Uint64Field.GetHashCode(); + if (int32Field_ != null) hash ^= Int32Field.GetHashCode(); + if (uint32Field_ != null) hash ^= Uint32Field.GetHashCode(); + if (boolField_ != null) hash ^= BoolField.GetHashCode(); + if (stringField_ != null) hash ^= StringField.GetHashCode(); + if (bytesField_ != null) hash ^= BytesField.GetHashCode(); + if (valueField_ != null) hash ^= ValueField.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (anyField_ != null) { + output.WriteRawTag(10); + output.WriteMessage(AnyField); + } + if (apiField_ != null) { + output.WriteRawTag(18); + output.WriteMessage(ApiField); + } + if (durationField_ != null) { + output.WriteRawTag(26); + output.WriteMessage(DurationField); + } + if (emptyField_ != null) { + output.WriteRawTag(34); + output.WriteMessage(EmptyField); + } + if (fieldMaskField_ != null) { + output.WriteRawTag(42); + output.WriteMessage(FieldMaskField); + } + if (sourceContextField_ != null) { + output.WriteRawTag(50); + output.WriteMessage(SourceContextField); + } + if (structField_ != null) { + output.WriteRawTag(58); + output.WriteMessage(StructField); + } + if (timestampField_ != null) { + output.WriteRawTag(66); + output.WriteMessage(TimestampField); + } + if (typeField_ != null) { + output.WriteRawTag(74); + output.WriteMessage(TypeField); + } + if (doubleField_ != null) { + _single_doubleField_codec.WriteTagAndValue(output, DoubleField); + } + if (floatField_ != null) { + _single_floatField_codec.WriteTagAndValue(output, FloatField); + } + if (int64Field_ != null) { + _single_int64Field_codec.WriteTagAndValue(output, Int64Field); + } + if (uint64Field_ != null) { + _single_uint64Field_codec.WriteTagAndValue(output, Uint64Field); + } + if (int32Field_ != null) { + _single_int32Field_codec.WriteTagAndValue(output, Int32Field); + } + if (uint32Field_ != null) { + _single_uint32Field_codec.WriteTagAndValue(output, Uint32Field); + } + if (boolField_ != null) { + _single_boolField_codec.WriteTagAndValue(output, BoolField); + } + if (stringField_ != null) { + _single_stringField_codec.WriteTagAndValue(output, StringField); + } + if (bytesField_ != null) { + _single_bytesField_codec.WriteTagAndValue(output, BytesField); + } + if (valueField_ != null) { + output.WriteRawTag(154, 1); + output.WriteMessage(ValueField); + } + } + + public int CalculateSize() { + int size = 0; + if (anyField_ != null) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(AnyField); + } + if (apiField_ != null) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(ApiField); + } + if (durationField_ != null) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(DurationField); + } + if (emptyField_ != null) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(EmptyField); + } + if (fieldMaskField_ != null) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(FieldMaskField); + } + if (sourceContextField_ != null) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(SourceContextField); + } + if (structField_ != null) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(StructField); + } + if (timestampField_ != null) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(TimestampField); + } + if (typeField_ != null) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(TypeField); + } + if (doubleField_ != null) { + size += _single_doubleField_codec.CalculateSizeWithTag(DoubleField); + } + if (floatField_ != null) { + size += _single_floatField_codec.CalculateSizeWithTag(FloatField); + } + if (int64Field_ != null) { + size += _single_int64Field_codec.CalculateSizeWithTag(Int64Field); + } + if (uint64Field_ != null) { + size += _single_uint64Field_codec.CalculateSizeWithTag(Uint64Field); + } + if (int32Field_ != null) { + size += _single_int32Field_codec.CalculateSizeWithTag(Int32Field); + } + if (uint32Field_ != null) { + size += _single_uint32Field_codec.CalculateSizeWithTag(Uint32Field); + } + if (boolField_ != null) { + size += _single_boolField_codec.CalculateSizeWithTag(BoolField); + } + if (stringField_ != null) { + size += _single_stringField_codec.CalculateSizeWithTag(StringField); + } + if (bytesField_ != null) { + size += _single_bytesField_codec.CalculateSizeWithTag(BytesField); + } + if (valueField_ != null) { + size += 2 + pb::CodedOutputStream.ComputeMessageSize(ValueField); + } + return size; + } + + public void MergeFrom(TestWellKnownTypes other) { + if (other == null) { + return; + } + if (other.anyField_ != null) { + if (anyField_ == null) { + anyField_ = new global::Google.Protobuf.WellKnownTypes.Any(); + } + AnyField.MergeFrom(other.AnyField); + } + if (other.apiField_ != null) { + if (apiField_ == null) { + apiField_ = new global::Google.Protobuf.WellKnownTypes.Api(); + } + ApiField.MergeFrom(other.ApiField); + } + if (other.durationField_ != null) { + if (durationField_ == null) { + durationField_ = new global::Google.Protobuf.WellKnownTypes.Duration(); + } + DurationField.MergeFrom(other.DurationField); + } + if (other.emptyField_ != null) { + if (emptyField_ == null) { + emptyField_ = new global::Google.Protobuf.WellKnownTypes.Empty(); + } + EmptyField.MergeFrom(other.EmptyField); + } + if (other.fieldMaskField_ != null) { + if (fieldMaskField_ == null) { + fieldMaskField_ = new global::Google.Protobuf.WellKnownTypes.FieldMask(); + } + FieldMaskField.MergeFrom(other.FieldMaskField); + } + if (other.sourceContextField_ != null) { + if (sourceContextField_ == null) { + sourceContextField_ = new global::Google.Protobuf.WellKnownTypes.SourceContext(); + } + SourceContextField.MergeFrom(other.SourceContextField); + } + if (other.structField_ != null) { + if (structField_ == null) { + structField_ = new global::Google.Protobuf.WellKnownTypes.Struct(); + } + StructField.MergeFrom(other.StructField); + } + if (other.timestampField_ != null) { + if (timestampField_ == null) { + timestampField_ = new global::Google.Protobuf.WellKnownTypes.Timestamp(); + } + TimestampField.MergeFrom(other.TimestampField); + } + if (other.typeField_ != null) { + if (typeField_ == null) { + typeField_ = new global::Google.Protobuf.WellKnownTypes.Type(); + } + TypeField.MergeFrom(other.TypeField); + } + if (other.doubleField_ != null) { + if (doubleField_ == null || other.DoubleField != 0D) { + DoubleField = other.DoubleField; + } + } + if (other.floatField_ != null) { + if (floatField_ == null || other.FloatField != 0F) { + FloatField = other.FloatField; + } + } + if (other.int64Field_ != null) { + if (int64Field_ == null || other.Int64Field != 0L) { + Int64Field = other.Int64Field; + } + } + if (other.uint64Field_ != null) { + if (uint64Field_ == null || other.Uint64Field != 0UL) { + Uint64Field = other.Uint64Field; + } + } + if (other.int32Field_ != null) { + if (int32Field_ == null || other.Int32Field != 0) { + Int32Field = other.Int32Field; + } + } + if (other.uint32Field_ != null) { + if (uint32Field_ == null || other.Uint32Field != 0) { + Uint32Field = other.Uint32Field; + } + } + if (other.boolField_ != null) { + if (boolField_ == null || other.BoolField != false) { + BoolField = other.BoolField; + } + } + if (other.stringField_ != null) { + if (stringField_ == null || other.StringField != "") { + StringField = other.StringField; + } + } + if (other.bytesField_ != null) { + if (bytesField_ == null || other.BytesField != pb::ByteString.Empty) { + BytesField = other.BytesField; + } + } + if (other.valueField_ != null) { + if (valueField_ == null) { + valueField_ = new global::Google.Protobuf.WellKnownTypes.Value(); + } + ValueField.MergeFrom(other.ValueField); + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + if (anyField_ == null) { + anyField_ = new global::Google.Protobuf.WellKnownTypes.Any(); + } + input.ReadMessage(anyField_); + break; + } + case 18: { + if (apiField_ == null) { + apiField_ = new global::Google.Protobuf.WellKnownTypes.Api(); + } + input.ReadMessage(apiField_); + break; + } + case 26: { + if (durationField_ == null) { + durationField_ = new global::Google.Protobuf.WellKnownTypes.Duration(); + } + input.ReadMessage(durationField_); + break; + } + case 34: { + if (emptyField_ == null) { + emptyField_ = new global::Google.Protobuf.WellKnownTypes.Empty(); + } + input.ReadMessage(emptyField_); + break; + } + case 42: { + if (fieldMaskField_ == null) { + fieldMaskField_ = new global::Google.Protobuf.WellKnownTypes.FieldMask(); + } + input.ReadMessage(fieldMaskField_); + break; + } + case 50: { + if (sourceContextField_ == null) { + sourceContextField_ = new global::Google.Protobuf.WellKnownTypes.SourceContext(); + } + input.ReadMessage(sourceContextField_); + break; + } + case 58: { + if (structField_ == null) { + structField_ = new global::Google.Protobuf.WellKnownTypes.Struct(); + } + input.ReadMessage(structField_); + break; + } + case 66: { + if (timestampField_ == null) { + timestampField_ = new global::Google.Protobuf.WellKnownTypes.Timestamp(); + } + input.ReadMessage(timestampField_); + break; + } + case 74: { + if (typeField_ == null) { + typeField_ = new global::Google.Protobuf.WellKnownTypes.Type(); + } + input.ReadMessage(typeField_); + break; + } + case 82: { + double? value = _single_doubleField_codec.Read(input); + if (doubleField_ == null || value != 0D) { + DoubleField = value; + } + break; + } + case 90: { + float? value = _single_floatField_codec.Read(input); + if (floatField_ == null || value != 0F) { + FloatField = value; + } + break; + } + case 98: { + long? value = _single_int64Field_codec.Read(input); + if (int64Field_ == null || value != 0L) { + Int64Field = value; + } + break; + } + case 106: { + ulong? value = _single_uint64Field_codec.Read(input); + if (uint64Field_ == null || value != 0UL) { + Uint64Field = value; + } + break; + } + case 114: { + int? value = _single_int32Field_codec.Read(input); + if (int32Field_ == null || value != 0) { + Int32Field = value; + } + break; + } + case 122: { + uint? value = _single_uint32Field_codec.Read(input); + if (uint32Field_ == null || value != 0) { + Uint32Field = value; + } + break; + } + case 130: { + bool? value = _single_boolField_codec.Read(input); + if (boolField_ == null || value != false) { + BoolField = value; + } + break; + } + case 138: { + string value = _single_stringField_codec.Read(input); + if (stringField_ == null || value != "") { + StringField = value; + } + break; + } + case 146: { + pb::ByteString value = _single_bytesField_codec.Read(input); + if (bytesField_ == null || value != pb::ByteString.Empty) { + BytesField = value; + } + break; + } + case 154: { + if (valueField_ == null) { + valueField_ = new global::Google.Protobuf.WellKnownTypes.Value(); + } + input.ReadMessage(valueField_); + break; + } + } + } + } + + } + + /// + /// A repeated field for each well-known type. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class RepeatedWellKnownTypes : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new RepeatedWellKnownTypes()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.UnittestWellKnownTypesReflection.Descriptor.MessageTypes[1]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public RepeatedWellKnownTypes() { + OnConstruction(); + } + + partial void OnConstruction(); + + public RepeatedWellKnownTypes(RepeatedWellKnownTypes other) : this() { + anyField_ = other.anyField_.Clone(); + apiField_ = other.apiField_.Clone(); + durationField_ = other.durationField_.Clone(); + emptyField_ = other.emptyField_.Clone(); + fieldMaskField_ = other.fieldMaskField_.Clone(); + sourceContextField_ = other.sourceContextField_.Clone(); + structField_ = other.structField_.Clone(); + timestampField_ = other.timestampField_.Clone(); + typeField_ = other.typeField_.Clone(); + doubleField_ = other.doubleField_.Clone(); + floatField_ = other.floatField_.Clone(); + int64Field_ = other.int64Field_.Clone(); + uint64Field_ = other.uint64Field_.Clone(); + int32Field_ = other.int32Field_.Clone(); + uint32Field_ = other.uint32Field_.Clone(); + boolField_ = other.boolField_.Clone(); + stringField_ = other.stringField_.Clone(); + bytesField_ = other.bytesField_.Clone(); + } + + public RepeatedWellKnownTypes Clone() { + return new RepeatedWellKnownTypes(this); + } + + /// Field number for the "any_field" field. + public const int AnyFieldFieldNumber = 1; + private static readonly pb::FieldCodec _repeated_anyField_codec + = pb::FieldCodec.ForMessage(10, global::Google.Protobuf.WellKnownTypes.Any.Parser); + private readonly pbc::RepeatedField anyField_ = new pbc::RepeatedField(); + public pbc::RepeatedField AnyField { + get { return anyField_; } + } + + /// Field number for the "api_field" field. + public const int ApiFieldFieldNumber = 2; + private static readonly pb::FieldCodec _repeated_apiField_codec + = pb::FieldCodec.ForMessage(18, global::Google.Protobuf.WellKnownTypes.Api.Parser); + private readonly pbc::RepeatedField apiField_ = new pbc::RepeatedField(); + public pbc::RepeatedField ApiField { + get { return apiField_; } + } + + /// Field number for the "duration_field" field. + public const int DurationFieldFieldNumber = 3; + private static readonly pb::FieldCodec _repeated_durationField_codec + = pb::FieldCodec.ForMessage(26, global::Google.Protobuf.WellKnownTypes.Duration.Parser); + private readonly pbc::RepeatedField durationField_ = new pbc::RepeatedField(); + public pbc::RepeatedField DurationField { + get { return durationField_; } + } + + /// Field number for the "empty_field" field. + public const int EmptyFieldFieldNumber = 4; + private static readonly pb::FieldCodec _repeated_emptyField_codec + = pb::FieldCodec.ForMessage(34, global::Google.Protobuf.WellKnownTypes.Empty.Parser); + private readonly pbc::RepeatedField emptyField_ = new pbc::RepeatedField(); + public pbc::RepeatedField EmptyField { + get { return emptyField_; } + } + + /// Field number for the "field_mask_field" field. + public const int FieldMaskFieldFieldNumber = 5; + private static readonly pb::FieldCodec _repeated_fieldMaskField_codec + = pb::FieldCodec.ForMessage(42, global::Google.Protobuf.WellKnownTypes.FieldMask.Parser); + private readonly pbc::RepeatedField fieldMaskField_ = new pbc::RepeatedField(); + public pbc::RepeatedField FieldMaskField { + get { return fieldMaskField_; } + } + + /// Field number for the "source_context_field" field. + public const int SourceContextFieldFieldNumber = 6; + private static readonly pb::FieldCodec _repeated_sourceContextField_codec + = pb::FieldCodec.ForMessage(50, global::Google.Protobuf.WellKnownTypes.SourceContext.Parser); + private readonly pbc::RepeatedField sourceContextField_ = new pbc::RepeatedField(); + public pbc::RepeatedField SourceContextField { + get { return sourceContextField_; } + } + + /// Field number for the "struct_field" field. + public const int StructFieldFieldNumber = 7; + private static readonly pb::FieldCodec _repeated_structField_codec + = pb::FieldCodec.ForMessage(58, global::Google.Protobuf.WellKnownTypes.Struct.Parser); + private readonly pbc::RepeatedField structField_ = new pbc::RepeatedField(); + public pbc::RepeatedField StructField { + get { return structField_; } + } + + /// Field number for the "timestamp_field" field. + public const int TimestampFieldFieldNumber = 8; + private static readonly pb::FieldCodec _repeated_timestampField_codec + = pb::FieldCodec.ForMessage(66, global::Google.Protobuf.WellKnownTypes.Timestamp.Parser); + private readonly pbc::RepeatedField timestampField_ = new pbc::RepeatedField(); + public pbc::RepeatedField TimestampField { + get { return timestampField_; } + } + + /// Field number for the "type_field" field. + public const int TypeFieldFieldNumber = 9; + private static readonly pb::FieldCodec _repeated_typeField_codec + = pb::FieldCodec.ForMessage(74, global::Google.Protobuf.WellKnownTypes.Type.Parser); + private readonly pbc::RepeatedField typeField_ = new pbc::RepeatedField(); + public pbc::RepeatedField TypeField { + get { return typeField_; } + } + + /// Field number for the "double_field" field. + public const int DoubleFieldFieldNumber = 10; + private static readonly pb::FieldCodec _repeated_doubleField_codec + = pb::FieldCodec.ForStructWrapper(82); + private readonly pbc::RepeatedField doubleField_ = new pbc::RepeatedField(); + /// + /// These don't actually make a lot of sense, but they're not prohibited... + /// + public pbc::RepeatedField DoubleField { + get { return doubleField_; } + } + + /// Field number for the "float_field" field. + public const int FloatFieldFieldNumber = 11; + private static readonly pb::FieldCodec _repeated_floatField_codec + = pb::FieldCodec.ForStructWrapper(90); + private readonly pbc::RepeatedField floatField_ = new pbc::RepeatedField(); + public pbc::RepeatedField FloatField { + get { return floatField_; } + } + + /// Field number for the "int64_field" field. + public const int Int64FieldFieldNumber = 12; + private static readonly pb::FieldCodec _repeated_int64Field_codec + = pb::FieldCodec.ForStructWrapper(98); + private readonly pbc::RepeatedField int64Field_ = new pbc::RepeatedField(); + public pbc::RepeatedField Int64Field { + get { return int64Field_; } + } + + /// Field number for the "uint64_field" field. + public const int Uint64FieldFieldNumber = 13; + private static readonly pb::FieldCodec _repeated_uint64Field_codec + = pb::FieldCodec.ForStructWrapper(106); + private readonly pbc::RepeatedField uint64Field_ = new pbc::RepeatedField(); + public pbc::RepeatedField Uint64Field { + get { return uint64Field_; } + } + + /// Field number for the "int32_field" field. + public const int Int32FieldFieldNumber = 14; + private static readonly pb::FieldCodec _repeated_int32Field_codec + = pb::FieldCodec.ForStructWrapper(114); + private readonly pbc::RepeatedField int32Field_ = new pbc::RepeatedField(); + public pbc::RepeatedField Int32Field { + get { return int32Field_; } + } + + /// Field number for the "uint32_field" field. + public const int Uint32FieldFieldNumber = 15; + private static readonly pb::FieldCodec _repeated_uint32Field_codec + = pb::FieldCodec.ForStructWrapper(122); + private readonly pbc::RepeatedField uint32Field_ = new pbc::RepeatedField(); + public pbc::RepeatedField Uint32Field { + get { return uint32Field_; } + } + + /// Field number for the "bool_field" field. + public const int BoolFieldFieldNumber = 16; + private static readonly pb::FieldCodec _repeated_boolField_codec + = pb::FieldCodec.ForStructWrapper(130); + private readonly pbc::RepeatedField boolField_ = new pbc::RepeatedField(); + public pbc::RepeatedField BoolField { + get { return boolField_; } + } + + /// Field number for the "string_field" field. + public const int StringFieldFieldNumber = 17; + private static readonly pb::FieldCodec _repeated_stringField_codec + = pb::FieldCodec.ForClassWrapper(138); + private readonly pbc::RepeatedField stringField_ = new pbc::RepeatedField(); + public pbc::RepeatedField StringField { + get { return stringField_; } + } + + /// Field number for the "bytes_field" field. + public const int BytesFieldFieldNumber = 18; + private static readonly pb::FieldCodec _repeated_bytesField_codec + = pb::FieldCodec.ForClassWrapper(146); + private readonly pbc::RepeatedField bytesField_ = new pbc::RepeatedField(); + public pbc::RepeatedField BytesField { + get { return bytesField_; } + } + + public override bool Equals(object other) { + return Equals(other as RepeatedWellKnownTypes); + } + + public bool Equals(RepeatedWellKnownTypes other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if(!anyField_.Equals(other.anyField_)) return false; + if(!apiField_.Equals(other.apiField_)) return false; + if(!durationField_.Equals(other.durationField_)) return false; + if(!emptyField_.Equals(other.emptyField_)) return false; + if(!fieldMaskField_.Equals(other.fieldMaskField_)) return false; + if(!sourceContextField_.Equals(other.sourceContextField_)) return false; + if(!structField_.Equals(other.structField_)) return false; + if(!timestampField_.Equals(other.timestampField_)) return false; + if(!typeField_.Equals(other.typeField_)) return false; + if(!doubleField_.Equals(other.doubleField_)) return false; + if(!floatField_.Equals(other.floatField_)) return false; + if(!int64Field_.Equals(other.int64Field_)) return false; + if(!uint64Field_.Equals(other.uint64Field_)) return false; + if(!int32Field_.Equals(other.int32Field_)) return false; + if(!uint32Field_.Equals(other.uint32Field_)) return false; + if(!boolField_.Equals(other.boolField_)) return false; + if(!stringField_.Equals(other.stringField_)) return false; + if(!bytesField_.Equals(other.bytesField_)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + hash ^= anyField_.GetHashCode(); + hash ^= apiField_.GetHashCode(); + hash ^= durationField_.GetHashCode(); + hash ^= emptyField_.GetHashCode(); + hash ^= fieldMaskField_.GetHashCode(); + hash ^= sourceContextField_.GetHashCode(); + hash ^= structField_.GetHashCode(); + hash ^= timestampField_.GetHashCode(); + hash ^= typeField_.GetHashCode(); + hash ^= doubleField_.GetHashCode(); + hash ^= floatField_.GetHashCode(); + hash ^= int64Field_.GetHashCode(); + hash ^= uint64Field_.GetHashCode(); + hash ^= int32Field_.GetHashCode(); + hash ^= uint32Field_.GetHashCode(); + hash ^= boolField_.GetHashCode(); + hash ^= stringField_.GetHashCode(); + hash ^= bytesField_.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + anyField_.WriteTo(output, _repeated_anyField_codec); + apiField_.WriteTo(output, _repeated_apiField_codec); + durationField_.WriteTo(output, _repeated_durationField_codec); + emptyField_.WriteTo(output, _repeated_emptyField_codec); + fieldMaskField_.WriteTo(output, _repeated_fieldMaskField_codec); + sourceContextField_.WriteTo(output, _repeated_sourceContextField_codec); + structField_.WriteTo(output, _repeated_structField_codec); + timestampField_.WriteTo(output, _repeated_timestampField_codec); + typeField_.WriteTo(output, _repeated_typeField_codec); + doubleField_.WriteTo(output, _repeated_doubleField_codec); + floatField_.WriteTo(output, _repeated_floatField_codec); + int64Field_.WriteTo(output, _repeated_int64Field_codec); + uint64Field_.WriteTo(output, _repeated_uint64Field_codec); + int32Field_.WriteTo(output, _repeated_int32Field_codec); + uint32Field_.WriteTo(output, _repeated_uint32Field_codec); + boolField_.WriteTo(output, _repeated_boolField_codec); + stringField_.WriteTo(output, _repeated_stringField_codec); + bytesField_.WriteTo(output, _repeated_bytesField_codec); + } + + public int CalculateSize() { + int size = 0; + size += anyField_.CalculateSize(_repeated_anyField_codec); + size += apiField_.CalculateSize(_repeated_apiField_codec); + size += durationField_.CalculateSize(_repeated_durationField_codec); + size += emptyField_.CalculateSize(_repeated_emptyField_codec); + size += fieldMaskField_.CalculateSize(_repeated_fieldMaskField_codec); + size += sourceContextField_.CalculateSize(_repeated_sourceContextField_codec); + size += structField_.CalculateSize(_repeated_structField_codec); + size += timestampField_.CalculateSize(_repeated_timestampField_codec); + size += typeField_.CalculateSize(_repeated_typeField_codec); + size += doubleField_.CalculateSize(_repeated_doubleField_codec); + size += floatField_.CalculateSize(_repeated_floatField_codec); + size += int64Field_.CalculateSize(_repeated_int64Field_codec); + size += uint64Field_.CalculateSize(_repeated_uint64Field_codec); + size += int32Field_.CalculateSize(_repeated_int32Field_codec); + size += uint32Field_.CalculateSize(_repeated_uint32Field_codec); + size += boolField_.CalculateSize(_repeated_boolField_codec); + size += stringField_.CalculateSize(_repeated_stringField_codec); + size += bytesField_.CalculateSize(_repeated_bytesField_codec); + return size; + } + + public void MergeFrom(RepeatedWellKnownTypes other) { + if (other == null) { + return; + } + anyField_.Add(other.anyField_); + apiField_.Add(other.apiField_); + durationField_.Add(other.durationField_); + emptyField_.Add(other.emptyField_); + fieldMaskField_.Add(other.fieldMaskField_); + sourceContextField_.Add(other.sourceContextField_); + structField_.Add(other.structField_); + timestampField_.Add(other.timestampField_); + typeField_.Add(other.typeField_); + doubleField_.Add(other.doubleField_); + floatField_.Add(other.floatField_); + int64Field_.Add(other.int64Field_); + uint64Field_.Add(other.uint64Field_); + int32Field_.Add(other.int32Field_); + uint32Field_.Add(other.uint32Field_); + boolField_.Add(other.boolField_); + stringField_.Add(other.stringField_); + bytesField_.Add(other.bytesField_); + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + anyField_.AddEntriesFrom(input, _repeated_anyField_codec); + break; + } + case 18: { + apiField_.AddEntriesFrom(input, _repeated_apiField_codec); + break; + } + case 26: { + durationField_.AddEntriesFrom(input, _repeated_durationField_codec); + break; + } + case 34: { + emptyField_.AddEntriesFrom(input, _repeated_emptyField_codec); + break; + } + case 42: { + fieldMaskField_.AddEntriesFrom(input, _repeated_fieldMaskField_codec); + break; + } + case 50: { + sourceContextField_.AddEntriesFrom(input, _repeated_sourceContextField_codec); + break; + } + case 58: { + structField_.AddEntriesFrom(input, _repeated_structField_codec); + break; + } + case 66: { + timestampField_.AddEntriesFrom(input, _repeated_timestampField_codec); + break; + } + case 74: { + typeField_.AddEntriesFrom(input, _repeated_typeField_codec); + break; + } + case 82: { + doubleField_.AddEntriesFrom(input, _repeated_doubleField_codec); + break; + } + case 90: { + floatField_.AddEntriesFrom(input, _repeated_floatField_codec); + break; + } + case 98: { + int64Field_.AddEntriesFrom(input, _repeated_int64Field_codec); + break; + } + case 106: { + uint64Field_.AddEntriesFrom(input, _repeated_uint64Field_codec); + break; + } + case 114: { + int32Field_.AddEntriesFrom(input, _repeated_int32Field_codec); + break; + } + case 122: { + uint32Field_.AddEntriesFrom(input, _repeated_uint32Field_codec); + break; + } + case 130: { + boolField_.AddEntriesFrom(input, _repeated_boolField_codec); + break; + } + case 138: { + stringField_.AddEntriesFrom(input, _repeated_stringField_codec); + break; + } + case 146: { + bytesField_.AddEntriesFrom(input, _repeated_bytesField_codec); + break; + } + } + } + } + + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class OneofWellKnownTypes : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new OneofWellKnownTypes()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.UnittestWellKnownTypesReflection.Descriptor.MessageTypes[2]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public OneofWellKnownTypes() { + OnConstruction(); + } + + partial void OnConstruction(); + + public OneofWellKnownTypes(OneofWellKnownTypes other) : this() { + switch (other.OneofFieldCase) { + case OneofFieldOneofCase.AnyField: + AnyField = other.AnyField.Clone(); + break; + case OneofFieldOneofCase.ApiField: + ApiField = other.ApiField.Clone(); + break; + case OneofFieldOneofCase.DurationField: + DurationField = other.DurationField.Clone(); + break; + case OneofFieldOneofCase.EmptyField: + EmptyField = other.EmptyField.Clone(); + break; + case OneofFieldOneofCase.FieldMaskField: + FieldMaskField = other.FieldMaskField.Clone(); + break; + case OneofFieldOneofCase.SourceContextField: + SourceContextField = other.SourceContextField.Clone(); + break; + case OneofFieldOneofCase.StructField: + StructField = other.StructField.Clone(); + break; + case OneofFieldOneofCase.TimestampField: + TimestampField = other.TimestampField.Clone(); + break; + case OneofFieldOneofCase.TypeField: + TypeField = other.TypeField.Clone(); + break; + case OneofFieldOneofCase.DoubleField: + DoubleField = other.DoubleField; + break; + case OneofFieldOneofCase.FloatField: + FloatField = other.FloatField; + break; + case OneofFieldOneofCase.Int64Field: + Int64Field = other.Int64Field; + break; + case OneofFieldOneofCase.Uint64Field: + Uint64Field = other.Uint64Field; + break; + case OneofFieldOneofCase.Int32Field: + Int32Field = other.Int32Field; + break; + case OneofFieldOneofCase.Uint32Field: + Uint32Field = other.Uint32Field; + break; + case OneofFieldOneofCase.BoolField: + BoolField = other.BoolField; + break; + case OneofFieldOneofCase.StringField: + StringField = other.StringField; + break; + case OneofFieldOneofCase.BytesField: + BytesField = other.BytesField; + break; + } + + } + + public OneofWellKnownTypes Clone() { + return new OneofWellKnownTypes(this); + } + + /// Field number for the "any_field" field. + public const int AnyFieldFieldNumber = 1; + public global::Google.Protobuf.WellKnownTypes.Any AnyField { + get { return oneofFieldCase_ == OneofFieldOneofCase.AnyField ? (global::Google.Protobuf.WellKnownTypes.Any) oneofField_ : null; } + set { + oneofField_ = value; + oneofFieldCase_ = value == null ? OneofFieldOneofCase.None : OneofFieldOneofCase.AnyField; + } + } + + /// Field number for the "api_field" field. + public const int ApiFieldFieldNumber = 2; + public global::Google.Protobuf.WellKnownTypes.Api ApiField { + get { return oneofFieldCase_ == OneofFieldOneofCase.ApiField ? (global::Google.Protobuf.WellKnownTypes.Api) oneofField_ : null; } + set { + oneofField_ = value; + oneofFieldCase_ = value == null ? OneofFieldOneofCase.None : OneofFieldOneofCase.ApiField; + } + } + + /// Field number for the "duration_field" field. + public const int DurationFieldFieldNumber = 3; + public global::Google.Protobuf.WellKnownTypes.Duration DurationField { + get { return oneofFieldCase_ == OneofFieldOneofCase.DurationField ? (global::Google.Protobuf.WellKnownTypes.Duration) oneofField_ : null; } + set { + oneofField_ = value; + oneofFieldCase_ = value == null ? OneofFieldOneofCase.None : OneofFieldOneofCase.DurationField; + } + } + + /// Field number for the "empty_field" field. + public const int EmptyFieldFieldNumber = 4; + public global::Google.Protobuf.WellKnownTypes.Empty EmptyField { + get { return oneofFieldCase_ == OneofFieldOneofCase.EmptyField ? (global::Google.Protobuf.WellKnownTypes.Empty) oneofField_ : null; } + set { + oneofField_ = value; + oneofFieldCase_ = value == null ? OneofFieldOneofCase.None : OneofFieldOneofCase.EmptyField; + } + } + + /// Field number for the "field_mask_field" field. + public const int FieldMaskFieldFieldNumber = 5; + public global::Google.Protobuf.WellKnownTypes.FieldMask FieldMaskField { + get { return oneofFieldCase_ == OneofFieldOneofCase.FieldMaskField ? (global::Google.Protobuf.WellKnownTypes.FieldMask) oneofField_ : null; } + set { + oneofField_ = value; + oneofFieldCase_ = value == null ? OneofFieldOneofCase.None : OneofFieldOneofCase.FieldMaskField; + } + } + + /// Field number for the "source_context_field" field. + public const int SourceContextFieldFieldNumber = 6; + public global::Google.Protobuf.WellKnownTypes.SourceContext SourceContextField { + get { return oneofFieldCase_ == OneofFieldOneofCase.SourceContextField ? (global::Google.Protobuf.WellKnownTypes.SourceContext) oneofField_ : null; } + set { + oneofField_ = value; + oneofFieldCase_ = value == null ? OneofFieldOneofCase.None : OneofFieldOneofCase.SourceContextField; + } + } + + /// Field number for the "struct_field" field. + public const int StructFieldFieldNumber = 7; + public global::Google.Protobuf.WellKnownTypes.Struct StructField { + get { return oneofFieldCase_ == OneofFieldOneofCase.StructField ? (global::Google.Protobuf.WellKnownTypes.Struct) oneofField_ : null; } + set { + oneofField_ = value; + oneofFieldCase_ = value == null ? OneofFieldOneofCase.None : OneofFieldOneofCase.StructField; + } + } + + /// Field number for the "timestamp_field" field. + public const int TimestampFieldFieldNumber = 8; + public global::Google.Protobuf.WellKnownTypes.Timestamp TimestampField { + get { return oneofFieldCase_ == OneofFieldOneofCase.TimestampField ? (global::Google.Protobuf.WellKnownTypes.Timestamp) oneofField_ : null; } + set { + oneofField_ = value; + oneofFieldCase_ = value == null ? OneofFieldOneofCase.None : OneofFieldOneofCase.TimestampField; + } + } + + /// Field number for the "type_field" field. + public const int TypeFieldFieldNumber = 9; + public global::Google.Protobuf.WellKnownTypes.Type TypeField { + get { return oneofFieldCase_ == OneofFieldOneofCase.TypeField ? (global::Google.Protobuf.WellKnownTypes.Type) oneofField_ : null; } + set { + oneofField_ = value; + oneofFieldCase_ = value == null ? OneofFieldOneofCase.None : OneofFieldOneofCase.TypeField; + } + } + + /// Field number for the "double_field" field. + public const int DoubleFieldFieldNumber = 10; + private static readonly pb::FieldCodec _oneof_doubleField_codec = pb::FieldCodec.ForStructWrapper(82); + public double? DoubleField { + get { return oneofFieldCase_ == OneofFieldOneofCase.DoubleField ? (double?) oneofField_ : (double?) null; } + set { + oneofField_ = value; + oneofFieldCase_ = value == null ? OneofFieldOneofCase.None : OneofFieldOneofCase.DoubleField; + } + } + + /// Field number for the "float_field" field. + public const int FloatFieldFieldNumber = 11; + private static readonly pb::FieldCodec _oneof_floatField_codec = pb::FieldCodec.ForStructWrapper(90); + public float? FloatField { + get { return oneofFieldCase_ == OneofFieldOneofCase.FloatField ? (float?) oneofField_ : (float?) null; } + set { + oneofField_ = value; + oneofFieldCase_ = value == null ? OneofFieldOneofCase.None : OneofFieldOneofCase.FloatField; + } + } + + /// Field number for the "int64_field" field. + public const int Int64FieldFieldNumber = 12; + private static readonly pb::FieldCodec _oneof_int64Field_codec = pb::FieldCodec.ForStructWrapper(98); + public long? Int64Field { + get { return oneofFieldCase_ == OneofFieldOneofCase.Int64Field ? (long?) oneofField_ : (long?) null; } + set { + oneofField_ = value; + oneofFieldCase_ = value == null ? OneofFieldOneofCase.None : OneofFieldOneofCase.Int64Field; + } + } + + /// Field number for the "uint64_field" field. + public const int Uint64FieldFieldNumber = 13; + private static readonly pb::FieldCodec _oneof_uint64Field_codec = pb::FieldCodec.ForStructWrapper(106); + public ulong? Uint64Field { + get { return oneofFieldCase_ == OneofFieldOneofCase.Uint64Field ? (ulong?) oneofField_ : (ulong?) null; } + set { + oneofField_ = value; + oneofFieldCase_ = value == null ? OneofFieldOneofCase.None : OneofFieldOneofCase.Uint64Field; + } + } + + /// Field number for the "int32_field" field. + public const int Int32FieldFieldNumber = 14; + private static readonly pb::FieldCodec _oneof_int32Field_codec = pb::FieldCodec.ForStructWrapper(114); + public int? Int32Field { + get { return oneofFieldCase_ == OneofFieldOneofCase.Int32Field ? (int?) oneofField_ : (int?) null; } + set { + oneofField_ = value; + oneofFieldCase_ = value == null ? OneofFieldOneofCase.None : OneofFieldOneofCase.Int32Field; + } + } + + /// Field number for the "uint32_field" field. + public const int Uint32FieldFieldNumber = 15; + private static readonly pb::FieldCodec _oneof_uint32Field_codec = pb::FieldCodec.ForStructWrapper(122); + public uint? Uint32Field { + get { return oneofFieldCase_ == OneofFieldOneofCase.Uint32Field ? (uint?) oneofField_ : (uint?) null; } + set { + oneofField_ = value; + oneofFieldCase_ = value == null ? OneofFieldOneofCase.None : OneofFieldOneofCase.Uint32Field; + } + } + + /// Field number for the "bool_field" field. + public const int BoolFieldFieldNumber = 16; + private static readonly pb::FieldCodec _oneof_boolField_codec = pb::FieldCodec.ForStructWrapper(130); + public bool? BoolField { + get { return oneofFieldCase_ == OneofFieldOneofCase.BoolField ? (bool?) oneofField_ : (bool?) null; } + set { + oneofField_ = value; + oneofFieldCase_ = value == null ? OneofFieldOneofCase.None : OneofFieldOneofCase.BoolField; + } + } + + /// Field number for the "string_field" field. + public const int StringFieldFieldNumber = 17; + private static readonly pb::FieldCodec _oneof_stringField_codec = pb::FieldCodec.ForClassWrapper(138); + public string StringField { + get { return oneofFieldCase_ == OneofFieldOneofCase.StringField ? (string) oneofField_ : (string) null; } + set { + oneofField_ = value; + oneofFieldCase_ = value == null ? OneofFieldOneofCase.None : OneofFieldOneofCase.StringField; + } + } + + /// Field number for the "bytes_field" field. + public const int BytesFieldFieldNumber = 18; + private static readonly pb::FieldCodec _oneof_bytesField_codec = pb::FieldCodec.ForClassWrapper(146); + public pb::ByteString BytesField { + get { return oneofFieldCase_ == OneofFieldOneofCase.BytesField ? (pb::ByteString) oneofField_ : (pb::ByteString) null; } + set { + oneofField_ = value; + oneofFieldCase_ = value == null ? OneofFieldOneofCase.None : OneofFieldOneofCase.BytesField; + } + } + + private object oneofField_; + /// Enum of possible cases for the "oneof_field" oneof. + public enum OneofFieldOneofCase { + None = 0, + AnyField = 1, + ApiField = 2, + DurationField = 3, + EmptyField = 4, + FieldMaskField = 5, + SourceContextField = 6, + StructField = 7, + TimestampField = 8, + TypeField = 9, + DoubleField = 10, + FloatField = 11, + Int64Field = 12, + Uint64Field = 13, + Int32Field = 14, + Uint32Field = 15, + BoolField = 16, + StringField = 17, + BytesField = 18, + } + private OneofFieldOneofCase oneofFieldCase_ = OneofFieldOneofCase.None; + public OneofFieldOneofCase OneofFieldCase { + get { return oneofFieldCase_; } + } + + public void ClearOneofField() { + oneofFieldCase_ = OneofFieldOneofCase.None; + oneofField_ = null; + } + + public override bool Equals(object other) { + return Equals(other as OneofWellKnownTypes); + } + + public bool Equals(OneofWellKnownTypes other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (!object.Equals(AnyField, other.AnyField)) return false; + if (!object.Equals(ApiField, other.ApiField)) return false; + if (!object.Equals(DurationField, other.DurationField)) return false; + if (!object.Equals(EmptyField, other.EmptyField)) return false; + if (!object.Equals(FieldMaskField, other.FieldMaskField)) return false; + if (!object.Equals(SourceContextField, other.SourceContextField)) return false; + if (!object.Equals(StructField, other.StructField)) return false; + if (!object.Equals(TimestampField, other.TimestampField)) return false; + if (!object.Equals(TypeField, other.TypeField)) return false; + if (DoubleField != other.DoubleField) return false; + if (FloatField != other.FloatField) return false; + if (Int64Field != other.Int64Field) return false; + if (Uint64Field != other.Uint64Field) return false; + if (Int32Field != other.Int32Field) return false; + if (Uint32Field != other.Uint32Field) return false; + if (BoolField != other.BoolField) return false; + if (StringField != other.StringField) return false; + if (BytesField != other.BytesField) return false; + if (OneofFieldCase != other.OneofFieldCase) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (oneofFieldCase_ == OneofFieldOneofCase.AnyField) hash ^= AnyField.GetHashCode(); + if (oneofFieldCase_ == OneofFieldOneofCase.ApiField) hash ^= ApiField.GetHashCode(); + if (oneofFieldCase_ == OneofFieldOneofCase.DurationField) hash ^= DurationField.GetHashCode(); + if (oneofFieldCase_ == OneofFieldOneofCase.EmptyField) hash ^= EmptyField.GetHashCode(); + if (oneofFieldCase_ == OneofFieldOneofCase.FieldMaskField) hash ^= FieldMaskField.GetHashCode(); + if (oneofFieldCase_ == OneofFieldOneofCase.SourceContextField) hash ^= SourceContextField.GetHashCode(); + if (oneofFieldCase_ == OneofFieldOneofCase.StructField) hash ^= StructField.GetHashCode(); + if (oneofFieldCase_ == OneofFieldOneofCase.TimestampField) hash ^= TimestampField.GetHashCode(); + if (oneofFieldCase_ == OneofFieldOneofCase.TypeField) hash ^= TypeField.GetHashCode(); + if (oneofFieldCase_ == OneofFieldOneofCase.DoubleField) hash ^= DoubleField.GetHashCode(); + if (oneofFieldCase_ == OneofFieldOneofCase.FloatField) hash ^= FloatField.GetHashCode(); + if (oneofFieldCase_ == OneofFieldOneofCase.Int64Field) hash ^= Int64Field.GetHashCode(); + if (oneofFieldCase_ == OneofFieldOneofCase.Uint64Field) hash ^= Uint64Field.GetHashCode(); + if (oneofFieldCase_ == OneofFieldOneofCase.Int32Field) hash ^= Int32Field.GetHashCode(); + if (oneofFieldCase_ == OneofFieldOneofCase.Uint32Field) hash ^= Uint32Field.GetHashCode(); + if (oneofFieldCase_ == OneofFieldOneofCase.BoolField) hash ^= BoolField.GetHashCode(); + if (oneofFieldCase_ == OneofFieldOneofCase.StringField) hash ^= StringField.GetHashCode(); + if (oneofFieldCase_ == OneofFieldOneofCase.BytesField) hash ^= BytesField.GetHashCode(); + hash ^= (int) oneofFieldCase_; + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (oneofFieldCase_ == OneofFieldOneofCase.AnyField) { + output.WriteRawTag(10); + output.WriteMessage(AnyField); + } + if (oneofFieldCase_ == OneofFieldOneofCase.ApiField) { + output.WriteRawTag(18); + output.WriteMessage(ApiField); + } + if (oneofFieldCase_ == OneofFieldOneofCase.DurationField) { + output.WriteRawTag(26); + output.WriteMessage(DurationField); + } + if (oneofFieldCase_ == OneofFieldOneofCase.EmptyField) { + output.WriteRawTag(34); + output.WriteMessage(EmptyField); + } + if (oneofFieldCase_ == OneofFieldOneofCase.FieldMaskField) { + output.WriteRawTag(42); + output.WriteMessage(FieldMaskField); + } + if (oneofFieldCase_ == OneofFieldOneofCase.SourceContextField) { + output.WriteRawTag(50); + output.WriteMessage(SourceContextField); + } + if (oneofFieldCase_ == OneofFieldOneofCase.StructField) { + output.WriteRawTag(58); + output.WriteMessage(StructField); + } + if (oneofFieldCase_ == OneofFieldOneofCase.TimestampField) { + output.WriteRawTag(66); + output.WriteMessage(TimestampField); + } + if (oneofFieldCase_ == OneofFieldOneofCase.TypeField) { + output.WriteRawTag(74); + output.WriteMessage(TypeField); + } + if (oneofFieldCase_ == OneofFieldOneofCase.DoubleField) { + _oneof_doubleField_codec.WriteTagAndValue(output, (double?) oneofField_); + } + if (oneofFieldCase_ == OneofFieldOneofCase.FloatField) { + _oneof_floatField_codec.WriteTagAndValue(output, (float?) oneofField_); + } + if (oneofFieldCase_ == OneofFieldOneofCase.Int64Field) { + _oneof_int64Field_codec.WriteTagAndValue(output, (long?) oneofField_); + } + if (oneofFieldCase_ == OneofFieldOneofCase.Uint64Field) { + _oneof_uint64Field_codec.WriteTagAndValue(output, (ulong?) oneofField_); + } + if (oneofFieldCase_ == OneofFieldOneofCase.Int32Field) { + _oneof_int32Field_codec.WriteTagAndValue(output, (int?) oneofField_); + } + if (oneofFieldCase_ == OneofFieldOneofCase.Uint32Field) { + _oneof_uint32Field_codec.WriteTagAndValue(output, (uint?) oneofField_); + } + if (oneofFieldCase_ == OneofFieldOneofCase.BoolField) { + _oneof_boolField_codec.WriteTagAndValue(output, (bool?) oneofField_); + } + if (oneofFieldCase_ == OneofFieldOneofCase.StringField) { + _oneof_stringField_codec.WriteTagAndValue(output, (string) oneofField_); + } + if (oneofFieldCase_ == OneofFieldOneofCase.BytesField) { + _oneof_bytesField_codec.WriteTagAndValue(output, (pb::ByteString) oneofField_); + } + } + + public int CalculateSize() { + int size = 0; + if (oneofFieldCase_ == OneofFieldOneofCase.AnyField) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(AnyField); + } + if (oneofFieldCase_ == OneofFieldOneofCase.ApiField) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(ApiField); + } + if (oneofFieldCase_ == OneofFieldOneofCase.DurationField) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(DurationField); + } + if (oneofFieldCase_ == OneofFieldOneofCase.EmptyField) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(EmptyField); + } + if (oneofFieldCase_ == OneofFieldOneofCase.FieldMaskField) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(FieldMaskField); + } + if (oneofFieldCase_ == OneofFieldOneofCase.SourceContextField) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(SourceContextField); + } + if (oneofFieldCase_ == OneofFieldOneofCase.StructField) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(StructField); + } + if (oneofFieldCase_ == OneofFieldOneofCase.TimestampField) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(TimestampField); + } + if (oneofFieldCase_ == OneofFieldOneofCase.TypeField) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(TypeField); + } + if (oneofFieldCase_ == OneofFieldOneofCase.DoubleField) { + size += _oneof_doubleField_codec.CalculateSizeWithTag(DoubleField); + } + if (oneofFieldCase_ == OneofFieldOneofCase.FloatField) { + size += _oneof_floatField_codec.CalculateSizeWithTag(FloatField); + } + if (oneofFieldCase_ == OneofFieldOneofCase.Int64Field) { + size += _oneof_int64Field_codec.CalculateSizeWithTag(Int64Field); + } + if (oneofFieldCase_ == OneofFieldOneofCase.Uint64Field) { + size += _oneof_uint64Field_codec.CalculateSizeWithTag(Uint64Field); + } + if (oneofFieldCase_ == OneofFieldOneofCase.Int32Field) { + size += _oneof_int32Field_codec.CalculateSizeWithTag(Int32Field); + } + if (oneofFieldCase_ == OneofFieldOneofCase.Uint32Field) { + size += _oneof_uint32Field_codec.CalculateSizeWithTag(Uint32Field); + } + if (oneofFieldCase_ == OneofFieldOneofCase.BoolField) { + size += _oneof_boolField_codec.CalculateSizeWithTag(BoolField); + } + if (oneofFieldCase_ == OneofFieldOneofCase.StringField) { + size += _oneof_stringField_codec.CalculateSizeWithTag(StringField); + } + if (oneofFieldCase_ == OneofFieldOneofCase.BytesField) { + size += _oneof_bytesField_codec.CalculateSizeWithTag(BytesField); + } + return size; + } + + public void MergeFrom(OneofWellKnownTypes other) { + if (other == null) { + return; + } + switch (other.OneofFieldCase) { + case OneofFieldOneofCase.AnyField: + AnyField = other.AnyField; + break; + case OneofFieldOneofCase.ApiField: + ApiField = other.ApiField; + break; + case OneofFieldOneofCase.DurationField: + DurationField = other.DurationField; + break; + case OneofFieldOneofCase.EmptyField: + EmptyField = other.EmptyField; + break; + case OneofFieldOneofCase.FieldMaskField: + FieldMaskField = other.FieldMaskField; + break; + case OneofFieldOneofCase.SourceContextField: + SourceContextField = other.SourceContextField; + break; + case OneofFieldOneofCase.StructField: + StructField = other.StructField; + break; + case OneofFieldOneofCase.TimestampField: + TimestampField = other.TimestampField; + break; + case OneofFieldOneofCase.TypeField: + TypeField = other.TypeField; + break; + case OneofFieldOneofCase.DoubleField: + DoubleField = other.DoubleField; + break; + case OneofFieldOneofCase.FloatField: + FloatField = other.FloatField; + break; + case OneofFieldOneofCase.Int64Field: + Int64Field = other.Int64Field; + break; + case OneofFieldOneofCase.Uint64Field: + Uint64Field = other.Uint64Field; + break; + case OneofFieldOneofCase.Int32Field: + Int32Field = other.Int32Field; + break; + case OneofFieldOneofCase.Uint32Field: + Uint32Field = other.Uint32Field; + break; + case OneofFieldOneofCase.BoolField: + BoolField = other.BoolField; + break; + case OneofFieldOneofCase.StringField: + StringField = other.StringField; + break; + case OneofFieldOneofCase.BytesField: + BytesField = other.BytesField; + break; + } + + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + global::Google.Protobuf.WellKnownTypes.Any subBuilder = new global::Google.Protobuf.WellKnownTypes.Any(); + if (oneofFieldCase_ == OneofFieldOneofCase.AnyField) { + subBuilder.MergeFrom(AnyField); + } + input.ReadMessage(subBuilder); + AnyField = subBuilder; + break; + } + case 18: { + global::Google.Protobuf.WellKnownTypes.Api subBuilder = new global::Google.Protobuf.WellKnownTypes.Api(); + if (oneofFieldCase_ == OneofFieldOneofCase.ApiField) { + subBuilder.MergeFrom(ApiField); + } + input.ReadMessage(subBuilder); + ApiField = subBuilder; + break; + } + case 26: { + global::Google.Protobuf.WellKnownTypes.Duration subBuilder = new global::Google.Protobuf.WellKnownTypes.Duration(); + if (oneofFieldCase_ == OneofFieldOneofCase.DurationField) { + subBuilder.MergeFrom(DurationField); + } + input.ReadMessage(subBuilder); + DurationField = subBuilder; + break; + } + case 34: { + global::Google.Protobuf.WellKnownTypes.Empty subBuilder = new global::Google.Protobuf.WellKnownTypes.Empty(); + if (oneofFieldCase_ == OneofFieldOneofCase.EmptyField) { + subBuilder.MergeFrom(EmptyField); + } + input.ReadMessage(subBuilder); + EmptyField = subBuilder; + break; + } + case 42: { + global::Google.Protobuf.WellKnownTypes.FieldMask subBuilder = new global::Google.Protobuf.WellKnownTypes.FieldMask(); + if (oneofFieldCase_ == OneofFieldOneofCase.FieldMaskField) { + subBuilder.MergeFrom(FieldMaskField); + } + input.ReadMessage(subBuilder); + FieldMaskField = subBuilder; + break; + } + case 50: { + global::Google.Protobuf.WellKnownTypes.SourceContext subBuilder = new global::Google.Protobuf.WellKnownTypes.SourceContext(); + if (oneofFieldCase_ == OneofFieldOneofCase.SourceContextField) { + subBuilder.MergeFrom(SourceContextField); + } + input.ReadMessage(subBuilder); + SourceContextField = subBuilder; + break; + } + case 58: { + global::Google.Protobuf.WellKnownTypes.Struct subBuilder = new global::Google.Protobuf.WellKnownTypes.Struct(); + if (oneofFieldCase_ == OneofFieldOneofCase.StructField) { + subBuilder.MergeFrom(StructField); + } + input.ReadMessage(subBuilder); + StructField = subBuilder; + break; + } + case 66: { + global::Google.Protobuf.WellKnownTypes.Timestamp subBuilder = new global::Google.Protobuf.WellKnownTypes.Timestamp(); + if (oneofFieldCase_ == OneofFieldOneofCase.TimestampField) { + subBuilder.MergeFrom(TimestampField); + } + input.ReadMessage(subBuilder); + TimestampField = subBuilder; + break; + } + case 74: { + global::Google.Protobuf.WellKnownTypes.Type subBuilder = new global::Google.Protobuf.WellKnownTypes.Type(); + if (oneofFieldCase_ == OneofFieldOneofCase.TypeField) { + subBuilder.MergeFrom(TypeField); + } + input.ReadMessage(subBuilder); + TypeField = subBuilder; + break; + } + case 82: { + DoubleField = _oneof_doubleField_codec.Read(input); + break; + } + case 90: { + FloatField = _oneof_floatField_codec.Read(input); + break; + } + case 98: { + Int64Field = _oneof_int64Field_codec.Read(input); + break; + } + case 106: { + Uint64Field = _oneof_uint64Field_codec.Read(input); + break; + } + case 114: { + Int32Field = _oneof_int32Field_codec.Read(input); + break; + } + case 122: { + Uint32Field = _oneof_uint32Field_codec.Read(input); + break; + } + case 130: { + BoolField = _oneof_boolField_codec.Read(input); + break; + } + case 138: { + StringField = _oneof_stringField_codec.Read(input); + break; + } + case 146: { + BytesField = _oneof_bytesField_codec.Read(input); + break; + } + } + } + } + + } + + /// + /// A map field for each well-known type. We only + /// need to worry about the value part of the map being the + /// well-known types, as messages can't be map keys. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class MapWellKnownTypes : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new MapWellKnownTypes()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.TestProtos.UnittestWellKnownTypesReflection.Descriptor.MessageTypes[3]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public MapWellKnownTypes() { + OnConstruction(); + } + + partial void OnConstruction(); + + public MapWellKnownTypes(MapWellKnownTypes other) : this() { + anyField_ = other.anyField_.Clone(); + apiField_ = other.apiField_.Clone(); + durationField_ = other.durationField_.Clone(); + emptyField_ = other.emptyField_.Clone(); + fieldMaskField_ = other.fieldMaskField_.Clone(); + sourceContextField_ = other.sourceContextField_.Clone(); + structField_ = other.structField_.Clone(); + timestampField_ = other.timestampField_.Clone(); + typeField_ = other.typeField_.Clone(); + doubleField_ = other.doubleField_.Clone(); + floatField_ = other.floatField_.Clone(); + int64Field_ = other.int64Field_.Clone(); + uint64Field_ = other.uint64Field_.Clone(); + int32Field_ = other.int32Field_.Clone(); + uint32Field_ = other.uint32Field_.Clone(); + boolField_ = other.boolField_.Clone(); + stringField_ = other.stringField_.Clone(); + bytesField_ = other.bytesField_.Clone(); + } + + public MapWellKnownTypes Clone() { + return new MapWellKnownTypes(this); + } + + /// Field number for the "any_field" field. + public const int AnyFieldFieldNumber = 1; + private static readonly pbc::MapField.Codec _map_anyField_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForMessage(18, global::Google.Protobuf.WellKnownTypes.Any.Parser), 10); + private readonly pbc::MapField anyField_ = new pbc::MapField(); + public pbc::MapField AnyField { + get { return anyField_; } + } + + /// Field number for the "api_field" field. + public const int ApiFieldFieldNumber = 2; + private static readonly pbc::MapField.Codec _map_apiField_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForMessage(18, global::Google.Protobuf.WellKnownTypes.Api.Parser), 18); + private readonly pbc::MapField apiField_ = new pbc::MapField(); + public pbc::MapField ApiField { + get { return apiField_; } + } + + /// Field number for the "duration_field" field. + public const int DurationFieldFieldNumber = 3; + private static readonly pbc::MapField.Codec _map_durationField_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForMessage(18, global::Google.Protobuf.WellKnownTypes.Duration.Parser), 26); + private readonly pbc::MapField durationField_ = new pbc::MapField(); + public pbc::MapField DurationField { + get { return durationField_; } + } + + /// Field number for the "empty_field" field. + public const int EmptyFieldFieldNumber = 4; + private static readonly pbc::MapField.Codec _map_emptyField_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForMessage(18, global::Google.Protobuf.WellKnownTypes.Empty.Parser), 34); + private readonly pbc::MapField emptyField_ = new pbc::MapField(); + public pbc::MapField EmptyField { + get { return emptyField_; } + } + + /// Field number for the "field_mask_field" field. + public const int FieldMaskFieldFieldNumber = 5; + private static readonly pbc::MapField.Codec _map_fieldMaskField_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForMessage(18, global::Google.Protobuf.WellKnownTypes.FieldMask.Parser), 42); + private readonly pbc::MapField fieldMaskField_ = new pbc::MapField(); + public pbc::MapField FieldMaskField { + get { return fieldMaskField_; } + } + + /// Field number for the "source_context_field" field. + public const int SourceContextFieldFieldNumber = 6; + private static readonly pbc::MapField.Codec _map_sourceContextField_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForMessage(18, global::Google.Protobuf.WellKnownTypes.SourceContext.Parser), 50); + private readonly pbc::MapField sourceContextField_ = new pbc::MapField(); + public pbc::MapField SourceContextField { + get { return sourceContextField_; } + } + + /// Field number for the "struct_field" field. + public const int StructFieldFieldNumber = 7; + private static readonly pbc::MapField.Codec _map_structField_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForMessage(18, global::Google.Protobuf.WellKnownTypes.Struct.Parser), 58); + private readonly pbc::MapField structField_ = new pbc::MapField(); + public pbc::MapField StructField { + get { return structField_; } + } + + /// Field number for the "timestamp_field" field. + public const int TimestampFieldFieldNumber = 8; + private static readonly pbc::MapField.Codec _map_timestampField_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForMessage(18, global::Google.Protobuf.WellKnownTypes.Timestamp.Parser), 66); + private readonly pbc::MapField timestampField_ = new pbc::MapField(); + public pbc::MapField TimestampField { + get { return timestampField_; } + } + + /// Field number for the "type_field" field. + public const int TypeFieldFieldNumber = 9; + private static readonly pbc::MapField.Codec _map_typeField_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForMessage(18, global::Google.Protobuf.WellKnownTypes.Type.Parser), 74); + private readonly pbc::MapField typeField_ = new pbc::MapField(); + public pbc::MapField TypeField { + get { return typeField_; } + } + + /// Field number for the "double_field" field. + public const int DoubleFieldFieldNumber = 10; + private static readonly pbc::MapField.Codec _map_doubleField_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForStructWrapper(18), 82); + private readonly pbc::MapField doubleField_ = new pbc::MapField(); + public pbc::MapField DoubleField { + get { return doubleField_; } + } + + /// Field number for the "float_field" field. + public const int FloatFieldFieldNumber = 11; + private static readonly pbc::MapField.Codec _map_floatField_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForStructWrapper(18), 90); + private readonly pbc::MapField floatField_ = new pbc::MapField(); + public pbc::MapField FloatField { + get { return floatField_; } + } + + /// Field number for the "int64_field" field. + public const int Int64FieldFieldNumber = 12; + private static readonly pbc::MapField.Codec _map_int64Field_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForStructWrapper(18), 98); + private readonly pbc::MapField int64Field_ = new pbc::MapField(); + public pbc::MapField Int64Field { + get { return int64Field_; } + } + + /// Field number for the "uint64_field" field. + public const int Uint64FieldFieldNumber = 13; + private static readonly pbc::MapField.Codec _map_uint64Field_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForStructWrapper(18), 106); + private readonly pbc::MapField uint64Field_ = new pbc::MapField(); + public pbc::MapField Uint64Field { + get { return uint64Field_; } + } + + /// Field number for the "int32_field" field. + public const int Int32FieldFieldNumber = 14; + private static readonly pbc::MapField.Codec _map_int32Field_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForStructWrapper(18), 114); + private readonly pbc::MapField int32Field_ = new pbc::MapField(); + public pbc::MapField Int32Field { + get { return int32Field_; } + } + + /// Field number for the "uint32_field" field. + public const int Uint32FieldFieldNumber = 15; + private static readonly pbc::MapField.Codec _map_uint32Field_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForStructWrapper(18), 122); + private readonly pbc::MapField uint32Field_ = new pbc::MapField(); + public pbc::MapField Uint32Field { + get { return uint32Field_; } + } + + /// Field number for the "bool_field" field. + public const int BoolFieldFieldNumber = 16; + private static readonly pbc::MapField.Codec _map_boolField_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForStructWrapper(18), 130); + private readonly pbc::MapField boolField_ = new pbc::MapField(); + public pbc::MapField BoolField { + get { return boolField_; } + } + + /// Field number for the "string_field" field. + public const int StringFieldFieldNumber = 17; + private static readonly pbc::MapField.Codec _map_stringField_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForClassWrapper(18), 138); + private readonly pbc::MapField stringField_ = new pbc::MapField(); + public pbc::MapField StringField { + get { return stringField_; } + } + + /// Field number for the "bytes_field" field. + public const int BytesFieldFieldNumber = 18; + private static readonly pbc::MapField.Codec _map_bytesField_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForClassWrapper(18), 146); + private readonly pbc::MapField bytesField_ = new pbc::MapField(); + public pbc::MapField BytesField { + get { return bytesField_; } + } + + public override bool Equals(object other) { + return Equals(other as MapWellKnownTypes); + } + + public bool Equals(MapWellKnownTypes other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (!AnyField.Equals(other.AnyField)) return false; + if (!ApiField.Equals(other.ApiField)) return false; + if (!DurationField.Equals(other.DurationField)) return false; + if (!EmptyField.Equals(other.EmptyField)) return false; + if (!FieldMaskField.Equals(other.FieldMaskField)) return false; + if (!SourceContextField.Equals(other.SourceContextField)) return false; + if (!StructField.Equals(other.StructField)) return false; + if (!TimestampField.Equals(other.TimestampField)) return false; + if (!TypeField.Equals(other.TypeField)) return false; + if (!DoubleField.Equals(other.DoubleField)) return false; + if (!FloatField.Equals(other.FloatField)) return false; + if (!Int64Field.Equals(other.Int64Field)) return false; + if (!Uint64Field.Equals(other.Uint64Field)) return false; + if (!Int32Field.Equals(other.Int32Field)) return false; + if (!Uint32Field.Equals(other.Uint32Field)) return false; + if (!BoolField.Equals(other.BoolField)) return false; + if (!StringField.Equals(other.StringField)) return false; + if (!BytesField.Equals(other.BytesField)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + hash ^= AnyField.GetHashCode(); + hash ^= ApiField.GetHashCode(); + hash ^= DurationField.GetHashCode(); + hash ^= EmptyField.GetHashCode(); + hash ^= FieldMaskField.GetHashCode(); + hash ^= SourceContextField.GetHashCode(); + hash ^= StructField.GetHashCode(); + hash ^= TimestampField.GetHashCode(); + hash ^= TypeField.GetHashCode(); + hash ^= DoubleField.GetHashCode(); + hash ^= FloatField.GetHashCode(); + hash ^= Int64Field.GetHashCode(); + hash ^= Uint64Field.GetHashCode(); + hash ^= Int32Field.GetHashCode(); + hash ^= Uint32Field.GetHashCode(); + hash ^= BoolField.GetHashCode(); + hash ^= StringField.GetHashCode(); + hash ^= BytesField.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + anyField_.WriteTo(output, _map_anyField_codec); + apiField_.WriteTo(output, _map_apiField_codec); + durationField_.WriteTo(output, _map_durationField_codec); + emptyField_.WriteTo(output, _map_emptyField_codec); + fieldMaskField_.WriteTo(output, _map_fieldMaskField_codec); + sourceContextField_.WriteTo(output, _map_sourceContextField_codec); + structField_.WriteTo(output, _map_structField_codec); + timestampField_.WriteTo(output, _map_timestampField_codec); + typeField_.WriteTo(output, _map_typeField_codec); + doubleField_.WriteTo(output, _map_doubleField_codec); + floatField_.WriteTo(output, _map_floatField_codec); + int64Field_.WriteTo(output, _map_int64Field_codec); + uint64Field_.WriteTo(output, _map_uint64Field_codec); + int32Field_.WriteTo(output, _map_int32Field_codec); + uint32Field_.WriteTo(output, _map_uint32Field_codec); + boolField_.WriteTo(output, _map_boolField_codec); + stringField_.WriteTo(output, _map_stringField_codec); + bytesField_.WriteTo(output, _map_bytesField_codec); + } + + public int CalculateSize() { + int size = 0; + size += anyField_.CalculateSize(_map_anyField_codec); + size += apiField_.CalculateSize(_map_apiField_codec); + size += durationField_.CalculateSize(_map_durationField_codec); + size += emptyField_.CalculateSize(_map_emptyField_codec); + size += fieldMaskField_.CalculateSize(_map_fieldMaskField_codec); + size += sourceContextField_.CalculateSize(_map_sourceContextField_codec); + size += structField_.CalculateSize(_map_structField_codec); + size += timestampField_.CalculateSize(_map_timestampField_codec); + size += typeField_.CalculateSize(_map_typeField_codec); + size += doubleField_.CalculateSize(_map_doubleField_codec); + size += floatField_.CalculateSize(_map_floatField_codec); + size += int64Field_.CalculateSize(_map_int64Field_codec); + size += uint64Field_.CalculateSize(_map_uint64Field_codec); + size += int32Field_.CalculateSize(_map_int32Field_codec); + size += uint32Field_.CalculateSize(_map_uint32Field_codec); + size += boolField_.CalculateSize(_map_boolField_codec); + size += stringField_.CalculateSize(_map_stringField_codec); + size += bytesField_.CalculateSize(_map_bytesField_codec); + return size; + } + + public void MergeFrom(MapWellKnownTypes other) { + if (other == null) { + return; + } + anyField_.Add(other.anyField_); + apiField_.Add(other.apiField_); + durationField_.Add(other.durationField_); + emptyField_.Add(other.emptyField_); + fieldMaskField_.Add(other.fieldMaskField_); + sourceContextField_.Add(other.sourceContextField_); + structField_.Add(other.structField_); + timestampField_.Add(other.timestampField_); + typeField_.Add(other.typeField_); + doubleField_.Add(other.doubleField_); + floatField_.Add(other.floatField_); + int64Field_.Add(other.int64Field_); + uint64Field_.Add(other.uint64Field_); + int32Field_.Add(other.int32Field_); + uint32Field_.Add(other.uint32Field_); + boolField_.Add(other.boolField_); + stringField_.Add(other.stringField_); + bytesField_.Add(other.bytesField_); + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + anyField_.AddEntriesFrom(input, _map_anyField_codec); + break; + } + case 18: { + apiField_.AddEntriesFrom(input, _map_apiField_codec); + break; + } + case 26: { + durationField_.AddEntriesFrom(input, _map_durationField_codec); + break; + } + case 34: { + emptyField_.AddEntriesFrom(input, _map_emptyField_codec); + break; + } + case 42: { + fieldMaskField_.AddEntriesFrom(input, _map_fieldMaskField_codec); + break; + } + case 50: { + sourceContextField_.AddEntriesFrom(input, _map_sourceContextField_codec); + break; + } + case 58: { + structField_.AddEntriesFrom(input, _map_structField_codec); + break; + } + case 66: { + timestampField_.AddEntriesFrom(input, _map_timestampField_codec); + break; + } + case 74: { + typeField_.AddEntriesFrom(input, _map_typeField_codec); + break; + } + case 82: { + doubleField_.AddEntriesFrom(input, _map_doubleField_codec); + break; + } + case 90: { + floatField_.AddEntriesFrom(input, _map_floatField_codec); + break; + } + case 98: { + int64Field_.AddEntriesFrom(input, _map_int64Field_codec); + break; + } + case 106: { + uint64Field_.AddEntriesFrom(input, _map_uint64Field_codec); + break; + } + case 114: { + int32Field_.AddEntriesFrom(input, _map_int32Field_codec); + break; + } + case 122: { + uint32Field_.AddEntriesFrom(input, _map_uint32Field_codec); + break; + } + case 130: { + boolField_.AddEntriesFrom(input, _map_boolField_codec); + break; + } + case 138: { + stringField_.AddEntriesFrom(input, _map_stringField_codec); + break; + } + case 146: { + bytesField_.AddEntriesFrom(input, _map_bytesField_codec); + break; + } + } + } + } + + } + + #endregion + +} + +#endregion Designer generated code diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/WellKnownTypes/AnyTest.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/WellKnownTypes/AnyTest.cs new file mode 100644 index 0000000000..f21be7d9b9 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/WellKnownTypes/AnyTest.cs @@ -0,0 +1,116 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using Google.Protobuf.TestProtos; +using NUnit.Framework; + +namespace Google.Protobuf.WellKnownTypes +{ + public class AnyTest + { + [Test] + public void Pack() + { + var message = SampleMessages.CreateFullTestAllTypes(); + var any = Any.Pack(message); + Assert.AreEqual("type.googleapis.com/protobuf_unittest.TestAllTypes", any.TypeUrl); + Assert.AreEqual(message.CalculateSize(), any.Value.Length); + } + + [Test] + public void Pack_WithCustomPrefix() + { + var message = SampleMessages.CreateFullTestAllTypes(); + var any = Any.Pack(message, "foo.bar/baz"); + Assert.AreEqual("foo.bar/baz/protobuf_unittest.TestAllTypes", any.TypeUrl); + Assert.AreEqual(message.CalculateSize(), any.Value.Length); + } + + [Test] + public void Pack_WithCustomPrefixTrailingSlash() + { + var message = SampleMessages.CreateFullTestAllTypes(); + var any = Any.Pack(message, "foo.bar/baz/"); + Assert.AreEqual("foo.bar/baz/protobuf_unittest.TestAllTypes", any.TypeUrl); + Assert.AreEqual(message.CalculateSize(), any.Value.Length); + } + + [Test] + public void Unpack_WrongType() + { + var message = SampleMessages.CreateFullTestAllTypes(); + var any = Any.Pack(message); + Assert.Throws(() => any.Unpack()); + } + + [Test] + public void Unpack_Success() + { + var message = SampleMessages.CreateFullTestAllTypes(); + var any = Any.Pack(message); + var unpacked = any.Unpack(); + Assert.AreEqual(message, unpacked); + } + + [Test] + public void Unpack_CustomPrefix_Success() + { + var message = SampleMessages.CreateFullTestAllTypes(); + var any = Any.Pack(message, "foo.bar/baz"); + var unpacked = any.Unpack(); + Assert.AreEqual(message, unpacked); + } + + [Test] + public void ToString_WithValues() + { + var message = SampleMessages.CreateFullTestAllTypes(); + var any = Any.Pack(message); + var text = any.ToString(); + Assert.That(text, Is.StringContaining("\"@value\": \"" + message.ToByteString().ToBase64() + "\"")); + } + + [Test] + public void ToString_Empty() + { + var any = new Any(); + Assert.AreEqual("{ \"@type\": \"\", \"@value\": \"\" }", any.ToString()); + } + + [Test] + public void ToString_MessageContainingAny() + { + var message = new TestWellKnownTypes { AnyField = new Any() }; + Assert.AreEqual("{ \"anyField\": { \"@type\": \"\", \"@value\": \"\" } }", message.ToString()); + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/WellKnownTypes/DurationTest.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/WellKnownTypes/DurationTest.cs new file mode 100644 index 0000000000..141faf80e0 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/WellKnownTypes/DurationTest.cs @@ -0,0 +1,132 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using NUnit.Framework; +using System; + +namespace Google.Protobuf.WellKnownTypes +{ + public class DurationTest + { + [Test] + public void ToTimeSpan() + { + Assert.AreEqual(TimeSpan.FromSeconds(1), new Duration { Seconds = 1 }.ToTimeSpan()); + Assert.AreEqual(TimeSpan.FromSeconds(-1), new Duration { Seconds = -1 }.ToTimeSpan()); + Assert.AreEqual(TimeSpan.FromMilliseconds(1), new Duration { Nanos = 1000000 }.ToTimeSpan()); + Assert.AreEqual(TimeSpan.FromMilliseconds(-1), new Duration { Nanos = -1000000 }.ToTimeSpan()); + Assert.AreEqual(TimeSpan.FromTicks(1), new Duration { Nanos = 100 }.ToTimeSpan()); + Assert.AreEqual(TimeSpan.FromTicks(-1), new Duration { Nanos = -100 }.ToTimeSpan()); + + // Rounding is towards 0 + Assert.AreEqual(TimeSpan.FromTicks(2), new Duration { Nanos = 250 }.ToTimeSpan()); + Assert.AreEqual(TimeSpan.FromTicks(-2), new Duration { Nanos = -250 }.ToTimeSpan()); + } + + [Test] + public void Addition() + { + Assert.AreEqual(new Duration { Seconds = 2, Nanos = 100000000 }, + new Duration { Seconds = 1, Nanos = 600000000 } + new Duration { Nanos = 500000000 }); + Assert.AreEqual(new Duration { Seconds = -2, Nanos = -100000000 }, + new Duration { Seconds = -1, Nanos = -600000000 } + new Duration { Nanos = -500000000 }); + Assert.AreEqual(new Duration { Seconds = 1, Nanos = 100000000 }, + new Duration { Seconds = 1, Nanos = 600000000 } + new Duration { Nanos = -500000000 }); + + // Non-normalized durations, or non-normalized intermediate results + Assert.AreEqual(new Duration { Seconds = 1 }, + new Duration { Seconds = 1, Nanos = -500000000 } + new Duration { Nanos = 500000000 }); + + Assert.AreEqual(new Duration { Nanos = -900000000 }, + new Duration { Seconds = -1, Nanos = -100000000 } + new Duration { Nanos = 200000000 }); + Assert.AreEqual(new Duration { Nanos = 900000000 }, + new Duration { Seconds = 1, Nanos = 100000000 } + new Duration { Nanos = -200000000 }); + } + + [Test] + public void Subtraction() + { + Assert.AreEqual(new Duration { Seconds = 1, Nanos = 100000000 }, + new Duration { Seconds = 1, Nanos = 600000000 } - new Duration { Nanos = 500000000 }); + Assert.AreEqual(new Duration { Seconds = -1, Nanos = -100000000 }, + new Duration { Seconds = -1, Nanos = -600000000 } - new Duration { Nanos = -500000000 }); + Assert.AreEqual(new Duration { Seconds = 2, Nanos = 100000000 }, + new Duration { Seconds = 1, Nanos = 600000000 } - new Duration { Nanos = -500000000 }); + + // Non-normalized durations + Assert.AreEqual(new Duration(), + new Duration { Seconds = 1, Nanos = -500000000 } - new Duration { Nanos = 500000000 }); + Assert.AreEqual(new Duration { Seconds = 1 }, + new Duration { Nanos = 2000000000 } - new Duration { Nanos = 1000000000 }); + } + + [Test] + public void FromTimeSpan() + { + Assert.AreEqual(new Duration { Seconds = 1 }, Duration.FromTimeSpan(TimeSpan.FromSeconds(1))); + Assert.AreEqual(new Duration { Nanos = Duration.NanosecondsPerTick }, Duration.FromTimeSpan(TimeSpan.FromTicks(1))); + } + + [Test] + [TestCase(0, Duration.MaxNanoseconds + 1)] + [TestCase(0, Duration.MinNanoseconds - 1)] + [TestCase(Duration.MinSeconds - 1, 0)] + [TestCase(Duration.MaxSeconds + 1, 0)] + [TestCase(1, -1)] + [TestCase(-1, 1)] + public void ToTimeSpan_Invalid(long seconds, int nanoseconds) + { + var duration = new Duration { Seconds = seconds, Nanos = nanoseconds }; + Assert.Throws(() => duration.ToTimeSpan()); + } + + [Test] + [TestCase(0, Duration.MaxNanoseconds)] + [TestCase(0, Duration.MinNanoseconds)] + [TestCase(Duration.MinSeconds, Duration.MinNanoseconds)] + [TestCase(Duration.MaxSeconds, Duration.MaxNanoseconds)] + public void ToTimeSpan_Valid(long seconds, int nanoseconds) + { + // Only testing that these values don't throw, unlike their similar tests in ToTimeSpan_Invalid + var duration = new Duration { Seconds = seconds, Nanos = nanoseconds }; + duration.ToTimeSpan(); + } + + [Test] + public void ToString_NonNormalized() + { + // Just a single example should be sufficient... + var duration = new Duration { Seconds = 1, Nanos = -1 }; + Assert.AreEqual("{ \"@warning\": \"Invalid Duration\", \"seconds\": \"1\", \"nanos\": -1 }", duration.ToString()); + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/WellKnownTypes/FieldMaskTest.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/WellKnownTypes/FieldMaskTest.cs new file mode 100644 index 0000000000..89bc82759c --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/WellKnownTypes/FieldMaskTest.cs @@ -0,0 +1,62 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2016 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + + +using NUnit.Framework; + +namespace Google.Protobuf.WellKnownTypes +{ + public class FieldMaskTest + { + [Test] + [TestCase("foo__bar")] + [TestCase("foo_3_ar")] + [TestCase("fooBar")] + public void ToString_Invalid(string input) + { + var mask = new FieldMask { Paths = { input } }; + var text = mask.ToString(); + // More specific test below + Assert.That(text, Is.StringContaining("@warning")); + Assert.That(text, Is.StringContaining(input)); + } + + [Test] + public void ToString_Invalid_Precise() + { + var mask = new FieldMask { Paths = { "x", "foo__bar", @"x\y" } }; + Assert.AreEqual( + "{ \"@warning\": \"Invalid FieldMask\", \"paths\": [ \"x\", \"foo__bar\", \"x\\\\y\" ] }", + mask.ToString()); + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/WellKnownTypes/TimestampTest.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/WellKnownTypes/TimestampTest.cs new file mode 100644 index 0000000000..9ecd24c62a --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/WellKnownTypes/TimestampTest.cs @@ -0,0 +1,115 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using NUnit.Framework; +using System; + +namespace Google.Protobuf.WellKnownTypes +{ + public class TimestampTest + { + [Test] + public void FromAndToDateTime() + { + DateTime utcMin = DateTime.SpecifyKind(DateTime.MinValue, DateTimeKind.Utc); + DateTime utcMax = DateTime.SpecifyKind(DateTime.MaxValue, DateTimeKind.Utc); + AssertRoundtrip(new Timestamp { Seconds = -62135596800 }, utcMin); + AssertRoundtrip(new Timestamp { Seconds = 253402300799, Nanos = 999999900 }, utcMax); + AssertRoundtrip(new Timestamp(), new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc)); + AssertRoundtrip(new Timestamp { Nanos = 1000000}, new DateTime(1970, 1, 1, 0, 0, 0, 1, DateTimeKind.Utc)); + AssertRoundtrip(new Timestamp { Seconds = -1, Nanos = 999000000 }, new DateTime(1969, 12, 31, 23, 59, 59, 999, DateTimeKind.Utc)); + AssertRoundtrip(new Timestamp { Seconds = 3600 }, new DateTime(1970, 1, 1, 1, 0, 0, DateTimeKind.Utc)); + AssertRoundtrip(new Timestamp { Seconds = -3600 }, new DateTime(1969, 12, 31, 23, 0, 0, DateTimeKind.Utc)); + } + + [Test] + public void ToDateTimeTruncation() + { + var t1 = new Timestamp { Seconds = 1, Nanos = 1000000 + Duration.NanosecondsPerTick - 1 }; + Assert.AreEqual(new DateTime(1970, 1, 1, 0, 0, 1, DateTimeKind.Utc).AddMilliseconds(1), t1.ToDateTime()); + + var t2 = new Timestamp { Seconds = -1, Nanos = 1000000 + Duration.NanosecondsPerTick - 1 }; + Assert.AreEqual(new DateTime(1969, 12, 31, 23, 59, 59).AddMilliseconds(1), t2.ToDateTime()); + } + + [Test] + [TestCase(Timestamp.UnixSecondsAtBclMinValue - 1, Timestamp.MaxNanos)] + [TestCase(Timestamp.UnixSecondsAtBclMaxValue + 1, 0)] + [TestCase(0, -1)] + [TestCase(0, Timestamp.MaxNanos + 1)] + public void ToDateTime_OutOfRange(long seconds, int nanoseconds) + { + var value = new Timestamp { Seconds = seconds, Nanos = nanoseconds }; + Assert.Throws(() => value.ToDateTime()); + } + + // 1ns larger or smaller than the above values + [Test] + [TestCase(Timestamp.UnixSecondsAtBclMinValue, 0)] + [TestCase(Timestamp.UnixSecondsAtBclMaxValue, Timestamp.MaxNanos)] + [TestCase(0, 0)] + [TestCase(0, Timestamp.MaxNanos)] + public void ToDateTime_ValidBoundaries(long seconds, int nanoseconds) + { + var value = new Timestamp { Seconds = seconds, Nanos = nanoseconds }; + value.ToDateTime(); + } + + private static void AssertRoundtrip(Timestamp timestamp, DateTime dateTime) + { + Assert.AreEqual(timestamp, Timestamp.FromDateTime(dateTime)); + Assert.AreEqual(dateTime, timestamp.ToDateTime()); + Assert.AreEqual(DateTimeKind.Utc, timestamp.ToDateTime().Kind); + } + + [Test] + public void Arithmetic() + { + Timestamp t1 = new Timestamp { Seconds = 10000, Nanos = 5000 }; + Timestamp t2 = new Timestamp { Seconds = 8000, Nanos = 10000 }; + Duration difference = new Duration { Seconds = 1999, Nanos = Duration.NanosecondsPerSecond - 5000 }; + Assert.AreEqual(difference, t1 - t2); + Assert.AreEqual(-difference, t2 - t1); + + Assert.AreEqual(t1, t2 + difference); + Assert.AreEqual(t2, t1 - difference); + } + + [Test] + public void ToString_NonNormalized() + { + // Just a single example should be sufficient... + var duration = new Timestamp { Seconds = 1, Nanos = -1 }; + Assert.AreEqual("{ \"@warning\": \"Invalid Timestamp\", \"seconds\": \"1\", \"nanos\": -1 }", duration.ToString()); + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/WellKnownTypes/WrappersTest.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/WellKnownTypes/WrappersTest.cs new file mode 100644 index 0000000000..5b7185dcd2 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/WellKnownTypes/WrappersTest.cs @@ -0,0 +1,421 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; +using Google.Protobuf.TestProtos; +using NUnit.Framework; +using System.Collections; +using System.IO; + +namespace Google.Protobuf.WellKnownTypes +{ + public class WrappersTest + { + [Test] + public void NullIsDefault() + { + var message = new TestWellKnownTypes(); + Assert.IsNull(message.StringField); + Assert.IsNull(message.BytesField); + Assert.IsNull(message.BoolField); + Assert.IsNull(message.FloatField); + Assert.IsNull(message.DoubleField); + Assert.IsNull(message.Int32Field); + Assert.IsNull(message.Int64Field); + Assert.IsNull(message.Uint32Field); + Assert.IsNull(message.Uint64Field); + } + + [Test] + public void NonDefaultSingleValues() + { + var message = new TestWellKnownTypes + { + StringField = "x", + BytesField = ByteString.CopyFrom(1, 2, 3), + BoolField = true, + FloatField = 12.5f, + DoubleField = 12.25d, + Int32Field = 1, + Int64Field = 2, + Uint32Field = 3, + Uint64Field = 4 + }; + + var bytes = message.ToByteArray(); + var parsed = TestWellKnownTypes.Parser.ParseFrom(bytes); + + Assert.AreEqual("x", parsed.StringField); + Assert.AreEqual(ByteString.CopyFrom(1, 2, 3), parsed.BytesField); + Assert.AreEqual(true, parsed.BoolField); + Assert.AreEqual(12.5f, parsed.FloatField); + Assert.AreEqual(12.25d, parsed.DoubleField); + Assert.AreEqual(1, parsed.Int32Field); + Assert.AreEqual(2L, parsed.Int64Field); + Assert.AreEqual(3U, parsed.Uint32Field); + Assert.AreEqual(4UL, parsed.Uint64Field); + } + + [Test] + public void NonNullDefaultIsPreservedThroughSerialization() + { + var message = new TestWellKnownTypes + { + StringField = "", + BytesField = ByteString.Empty, + BoolField = false, + FloatField = 0f, + DoubleField = 0d, + Int32Field = 0, + Int64Field = 0, + Uint32Field = 0, + Uint64Field = 0 + }; + + var bytes = message.ToByteArray(); + var parsed = TestWellKnownTypes.Parser.ParseFrom(bytes); + + Assert.AreEqual("", parsed.StringField); + Assert.AreEqual(ByteString.Empty, parsed.BytesField); + Assert.AreEqual(false, parsed.BoolField); + Assert.AreEqual(0f, parsed.FloatField); + Assert.AreEqual(0d, parsed.DoubleField); + Assert.AreEqual(0, parsed.Int32Field); + Assert.AreEqual(0L, parsed.Int64Field); + Assert.AreEqual(0U, parsed.Uint32Field); + Assert.AreEqual(0UL, parsed.Uint64Field); + } + + [Test] + public void RepeatedWrappersProhibitNullItems() + { + var message = new RepeatedWellKnownTypes(); + Assert.Throws(() => message.BoolField.Add((bool?) null)); + Assert.Throws(() => message.Int32Field.Add((int?) null)); + Assert.Throws(() => message.StringField.Add((string) null)); + Assert.Throws(() => message.BytesField.Add((ByteString) null)); + } + + [Test] + public void RepeatedWrappersSerializeDeserialize() + { + var message = new RepeatedWellKnownTypes + { + BoolField = { true, false }, + BytesField = { ByteString.CopyFrom(1, 2, 3), ByteString.CopyFrom(4, 5, 6), ByteString.Empty }, + DoubleField = { 12.5, -1.5, 0d }, + FloatField = { 123.25f, -20f, 0f }, + Int32Field = { int.MaxValue, int.MinValue, 0 }, + Int64Field = { long.MaxValue, long.MinValue, 0L }, + StringField = { "First", "Second", "" }, + Uint32Field = { uint.MaxValue, uint.MinValue, 0U }, + Uint64Field = { ulong.MaxValue, ulong.MinValue, 0UL }, + }; + var bytes = message.ToByteArray(); + var parsed = RepeatedWellKnownTypes.Parser.ParseFrom(bytes); + + Assert.AreEqual(message, parsed); + // Just to test a single value for sanity... + Assert.AreEqual("Second", message.StringField[1]); + } + + [Test] + public void RepeatedWrappersBinaryFormat() + { + // At one point we accidentally used a packed format for repeated wrappers, which is wrong (and weird). + // This test is just to prove that we use the right format. + + var rawOutput = new MemoryStream(); + var output = new CodedOutputStream(rawOutput); + // Write a value of 5 + output.WriteTag(RepeatedWellKnownTypes.Int32FieldFieldNumber, WireFormat.WireType.LengthDelimited); + output.WriteLength(2); + output.WriteTag(WrappersReflection.WrapperValueFieldNumber, WireFormat.WireType.Varint); + output.WriteInt32(5); + // Write a value of 0 (empty message) + output.WriteTag(RepeatedWellKnownTypes.Int32FieldFieldNumber, WireFormat.WireType.LengthDelimited); + output.WriteLength(0); + output.Flush(); + var expectedBytes = rawOutput.ToArray(); + + var message = new RepeatedWellKnownTypes { Int32Field = { 5, 0 } }; + var actualBytes = message.ToByteArray(); + Assert.AreEqual(expectedBytes, actualBytes); + } + + [Test] + public void MapWrappersSerializeDeserialize() + { + // Note: no null values here, as they are prohibited in map fields + // (despite being representable). + var message = new MapWellKnownTypes + { + BoolField = { { 10, false }, { 20, true } }, + BytesField = { + { -1, ByteString.CopyFrom(1, 2, 3) }, + { 10, ByteString.CopyFrom(4, 5, 6) }, + { 1000, ByteString.Empty }, + }, + DoubleField = { { 1, 12.5 }, { 10, -1.5 }, { 20, 0d } }, + FloatField = { { 2, 123.25f }, { 3, -20f }, { 4, 0f } }, + Int32Field = { { 5, int.MaxValue }, { 6, int.MinValue }, { 7, 0 } }, + Int64Field = { { 8, long.MaxValue }, { 9, long.MinValue }, { 10, 0L } }, + StringField = { { 11, "First" }, { 12, "Second" }, { 13, "" } }, + Uint32Field = { { 15, uint.MaxValue }, { 16, uint.MinValue }, { 17, 0U } }, + Uint64Field = { { 18, ulong.MaxValue }, { 19, ulong.MinValue }, { 20, 0UL } }, + }; + + var bytes = message.ToByteArray(); + var parsed = MapWellKnownTypes.Parser.ParseFrom(bytes); + + Assert.AreEqual(message, parsed); + // Just to test a single value for sanity... + Assert.AreEqual("Second", message.StringField[12]); + } + + [Test] + public void Reflection_SingleValues() + { + var message = new TestWellKnownTypes + { + StringField = "x", + BytesField = ByteString.CopyFrom(1, 2, 3), + BoolField = true, + FloatField = 12.5f, + DoubleField = 12.25d, + Int32Field = 1, + Int64Field = 2, + Uint32Field = 3, + Uint64Field = 4 + }; + var fields = TestWellKnownTypes.Descriptor.Fields; + + Assert.AreEqual("x", fields[TestWellKnownTypes.StringFieldFieldNumber].Accessor.GetValue(message)); + Assert.AreEqual(ByteString.CopyFrom(1, 2, 3), fields[TestWellKnownTypes.BytesFieldFieldNumber].Accessor.GetValue(message)); + Assert.AreEqual(true, fields[TestWellKnownTypes.BoolFieldFieldNumber].Accessor.GetValue(message)); + Assert.AreEqual(12.5f, fields[TestWellKnownTypes.FloatFieldFieldNumber].Accessor.GetValue(message)); + Assert.AreEqual(12.25d, fields[TestWellKnownTypes.DoubleFieldFieldNumber].Accessor.GetValue(message)); + Assert.AreEqual(1, fields[TestWellKnownTypes.Int32FieldFieldNumber].Accessor.GetValue(message)); + Assert.AreEqual(2L, fields[TestWellKnownTypes.Int64FieldFieldNumber].Accessor.GetValue(message)); + Assert.AreEqual(3U, fields[TestWellKnownTypes.Uint32FieldFieldNumber].Accessor.GetValue(message)); + Assert.AreEqual(4UL, fields[TestWellKnownTypes.Uint64FieldFieldNumber].Accessor.GetValue(message)); + + // And a couple of null fields... + message.StringField = null; + message.FloatField = null; + Assert.IsNull(fields[TestWellKnownTypes.StringFieldFieldNumber].Accessor.GetValue(message)); + Assert.IsNull(fields[TestWellKnownTypes.FloatFieldFieldNumber].Accessor.GetValue(message)); + } + + [Test] + public void Reflection_RepeatedFields() + { + // Just a single example... note that we can't have a null value here + var message = new RepeatedWellKnownTypes { Int32Field = { 1, 2 } }; + var fields = RepeatedWellKnownTypes.Descriptor.Fields; + var list = (IList) fields[RepeatedWellKnownTypes.Int32FieldFieldNumber].Accessor.GetValue(message); + CollectionAssert.AreEqual(new[] { 1, 2 }, list); + } + + [Test] + public void Reflection_MapFields() + { + // Just a single example... note that we can't have a null value here despite the value type being int? + var message = new MapWellKnownTypes { Int32Field = { { 1, 2 } } }; + var fields = MapWellKnownTypes.Descriptor.Fields; + var dictionary = (IDictionary) fields[MapWellKnownTypes.Int32FieldFieldNumber].Accessor.GetValue(message); + Assert.AreEqual(2, dictionary[1]); + } + + [Test] + public void Oneof() + { + var message = new OneofWellKnownTypes { EmptyField = new Empty() }; + // Start off with a non-wrapper + Assert.AreEqual(OneofWellKnownTypes.OneofFieldOneofCase.EmptyField, message.OneofFieldCase); + AssertOneofRoundTrip(message); + + message.StringField = "foo"; + Assert.AreEqual(OneofWellKnownTypes.OneofFieldOneofCase.StringField, message.OneofFieldCase); + AssertOneofRoundTrip(message); + + message.StringField = "foo"; + Assert.AreEqual(OneofWellKnownTypes.OneofFieldOneofCase.StringField, message.OneofFieldCase); + AssertOneofRoundTrip(message); + + message.DoubleField = 0.0f; + Assert.AreEqual(OneofWellKnownTypes.OneofFieldOneofCase.DoubleField, message.OneofFieldCase); + AssertOneofRoundTrip(message); + + message.DoubleField = 1.0f; + Assert.AreEqual(OneofWellKnownTypes.OneofFieldOneofCase.DoubleField, message.OneofFieldCase); + AssertOneofRoundTrip(message); + + message.ClearOneofField(); + Assert.AreEqual(OneofWellKnownTypes.OneofFieldOneofCase.None, message.OneofFieldCase); + AssertOneofRoundTrip(message); + } + + private void AssertOneofRoundTrip(OneofWellKnownTypes message) + { + // Normal roundtrip, but explicitly checking the case... + var bytes = message.ToByteArray(); + var parsed = OneofWellKnownTypes.Parser.ParseFrom(bytes); + Assert.AreEqual(message, parsed); + Assert.AreEqual(message.OneofFieldCase, parsed.OneofFieldCase); + } + + [Test] + [TestCase("x", "y", "y")] + [TestCase("x", "", "x")] + [TestCase("x", null, "x")] + [TestCase("", "y", "y")] + [TestCase("", "", "")] + [TestCase("", null, "")] + [TestCase(null, "y", "y")] + [TestCase(null, "", "")] + [TestCase(null, null, null)] + public void Merging(string original, string merged, string expected) + { + var originalMessage = new TestWellKnownTypes { StringField = original }; + var mergingMessage = new TestWellKnownTypes { StringField = merged }; + originalMessage.MergeFrom(mergingMessage); + Assert.AreEqual(expected, originalMessage.StringField); + + // Try it using MergeFrom(CodedInputStream) too... + originalMessage = new TestWellKnownTypes { StringField = original }; + originalMessage.MergeFrom(mergingMessage.ToByteArray()); + Assert.AreEqual(expected, originalMessage.StringField); + } + + // Merging is odd with wrapper types, due to the way that default values aren't emitted in + // the binary stream. In fact we cheat a little bit - a message with an explicitly present default + // value will have that default value ignored. See issue 615. Fixing this would require significant upheaval to + // the FieldCodec side of things. + [Test] + public void MergingStreamExplicitValue() + { + var message = new TestWellKnownTypes { Int32Field = 5 }; + + // Create a byte array which has the data of an Int32Value explicitly containing a value of 0. + // This wouldn't normally happen. + byte[] bytes; + var wrapperTag = WireFormat.MakeTag(TestWellKnownTypes.Int32FieldFieldNumber, WireFormat.WireType.LengthDelimited); + var valueTag = WireFormat.MakeTag(Int32Value.ValueFieldNumber, WireFormat.WireType.Varint); + using (var stream = new MemoryStream()) + { + var coded = new CodedOutputStream(stream); + coded.WriteTag(wrapperTag); + coded.WriteLength(2); // valueTag + a value 0, each one byte + coded.WriteTag(valueTag); + coded.WriteInt32(0); + coded.Flush(); + bytes = stream.ToArray(); + } + + message.MergeFrom(bytes); + // A normal implementation would have 0 now, as the explicit default would have been overwritten the 5. + // With the FieldCodec for Nullable, we can't tell the difference between an implicit 0 and an explicit 0. + Assert.AreEqual(5, message.Int32Field); + } + + [Test] + public void MergingStreamNoValue() + { + var message = new TestWellKnownTypes { Int32Field = 5 }; + + // Create a byte array which an Int32 field, but with no value. + var bytes = new TestWellKnownTypes { Int32Field = 0 }.ToByteArray(); + Assert.AreEqual(2, bytes.Length); // The tag for Int32Field is a single byte, then a byte indicating a 0-length message. + message.MergeFrom(bytes); + + // The "implicit" 0 did *not* overwrite the value. + // (This is the correct behaviour.) + Assert.AreEqual(5, message.Int32Field); + } + + // All permutations of origin/merging value being null, zero (default) or non-default. + // As this is the in-memory version, we don't need to worry about the difference between implicit and explicit 0. + [Test] + [TestCase(null, null, null)] + [TestCase(null, 0, 0)] + [TestCase(null, 5, 5)] + [TestCase(0, null, 0)] + [TestCase(0, 0, 0)] + [TestCase(0, 5, 5)] + [TestCase(5, null, 5)] + [TestCase(5, 0, 5)] + [TestCase(5, 10, 10)] + public void MergingMessageWithZero(int? originValue, int? mergingValue, int? expectedResult) + { + // This differs from the MergingStreamCornerCase because when we merge message *objects*, + // we ignore default values from the "source". + var message1 = new TestWellKnownTypes { Int32Field = originValue }; + var message2 = new TestWellKnownTypes { Int32Field = mergingValue }; + message1.MergeFrom(message2); + Assert.AreEqual(expectedResult, message1.Int32Field); + } + + [Test] + public void UnknownFieldInWrapper() + { + var stream = new MemoryStream(); + var output = new CodedOutputStream(stream); + var wrapperTag = WireFormat.MakeTag(TestWellKnownTypes.Int32FieldFieldNumber, WireFormat.WireType.LengthDelimited); + var unknownTag = WireFormat.MakeTag(15, WireFormat.WireType.Varint); + var valueTag = WireFormat.MakeTag(Int32Value.ValueFieldNumber, WireFormat.WireType.Varint); + + output.WriteTag(wrapperTag); + output.WriteLength(4); // unknownTag + value 5 + valueType + value 6, each 1 byte + output.WriteTag(unknownTag); + output.WriteInt32((int) valueTag); // Sneakily "pretend" it's a tag when it's really a value + output.WriteTag(valueTag); + output.WriteInt32(6); + + output.Flush(); + stream.Position = 0; + + var message = TestWellKnownTypes.Parser.ParseFrom(stream); + Assert.AreEqual(6, message.Int32Field); + } + + [Test] + public void ClearWithReflection() + { + // String and Bytes are the tricky ones here, as the CLR type of the property + // is the same between the wrapper and non-wrapper types. + var message = new TestWellKnownTypes { StringField = "foo" }; + TestWellKnownTypes.Descriptor.Fields[TestWellKnownTypes.StringFieldFieldNumber].Accessor.Clear(message); + Assert.IsNull(message.StringField); + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/packages.config b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/packages.config new file mode 100644 index 0000000000..c76539928c --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf.Test/packages.config @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/ByteArray.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/ByteArray.cs new file mode 100644 index 0000000000..b19962794b --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/ByteArray.cs @@ -0,0 +1,79 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; + +namespace Google.Protobuf +{ + /// + /// Provides a utility routine to copy small arrays much more quickly than Buffer.BlockCopy + /// + internal static class ByteArray + { + /// + /// The threshold above which you should use Buffer.BlockCopy rather than ByteArray.Copy + /// + private const int CopyThreshold = 12; + + /// + /// Determines which copy routine to use based on the number of bytes to be copied. + /// + internal static void Copy(byte[] src, int srcOffset, byte[] dst, int dstOffset, int count) + { + if (count > CopyThreshold) + { + Buffer.BlockCopy(src, srcOffset, dst, dstOffset, count); + } + else + { + int stop = srcOffset + count; + for (int i = srcOffset; i < stop; i++) + { + dst[dstOffset++] = src[i]; + } + } + } + + /// + /// Reverses the order of bytes in the array + /// + internal static void Reverse(byte[] bytes) + { + for (int first = 0, last = bytes.Length - 1; first < last; first++, last--) + { + byte temp = bytes[first]; + bytes[first] = bytes[last]; + bytes[last] = temp; + } + } + } +} \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/ByteString.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/ByteString.cs new file mode 100644 index 0000000000..11ad6216dd --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/ByteString.cs @@ -0,0 +1,345 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; +using System.Collections; +using System.Collections.Generic; +using System.IO; +using System.Text; + +namespace Google.Protobuf +{ + /// + /// Immutable array of bytes. + /// + public sealed class ByteString : IEnumerable, IEquatable + { + private static readonly ByteString empty = new ByteString(new byte[0]); + + private readonly byte[] bytes; + + /// + /// Unsafe operations that can cause IO Failure and/or other catestrophic side-effects. + /// + internal static class Unsafe + { + /// + /// Constructs a new ByteString from the given byte array. The array is + /// *not* copied, and must not be modified after this constructor is called. + /// + internal static ByteString FromBytes(byte[] bytes) + { + return new ByteString(bytes); + } + + /// + /// Provides direct, unrestricted access to the bytes contained in this instance. + /// You must not modify or resize the byte array returned by this method. + /// + internal static byte[] GetBuffer(ByteString bytes) + { + return bytes.bytes; + } + } + + /// + /// Internal use only. Ensure that the provided array is not mutated and belongs to this instance. + /// + internal static ByteString AttachBytes(byte[] bytes) + { + return new ByteString(bytes); + } + + /// + /// Constructs a new ByteString from the given byte array. The array is + /// *not* copied, and must not be modified after this constructor is called. + /// + private ByteString(byte[] bytes) + { + this.bytes = bytes; + } + + /// + /// Returns an empty ByteString. + /// + public static ByteString Empty + { + get { return empty; } + } + + /// + /// Returns the length of this ByteString in bytes. + /// + public int Length + { + get { return bytes.Length; } + } + + /// + /// Returns true if this byte string is empty, false otherwise. + /// + public bool IsEmpty + { + get { return Length == 0; } + } + + /// + /// Converts this into a byte array. + /// + /// The data is copied - changes to the returned array will not be reflected in this ByteString. + /// A byte array with the same data as this ByteString. + public byte[] ToByteArray() + { + return (byte[]) bytes.Clone(); + } + + /// + /// Converts this into a standard base64 representation. + /// + /// A base64 representation of this ByteString. + public string ToBase64() + { + return Convert.ToBase64String(bytes); + } + + /// + /// Constructs a from the Base64 Encoded String. + /// + public static ByteString FromBase64(string bytes) + { + // By handling the empty string explicitly, we not only optimize but we fix a + // problem on CF 2.0. See issue 61 for details. + return bytes == "" ? Empty : new ByteString(Convert.FromBase64String(bytes)); + } + + /// + /// Constructs a from the given array. The contents + /// are copied, so further modifications to the array will not + /// be reflected in the returned ByteString. + /// This method can also be invoked in ByteString.CopyFrom(0xaa, 0xbb, ...) form + /// which is primarily useful for testing. + /// + public static ByteString CopyFrom(params byte[] bytes) + { + return new ByteString((byte[]) bytes.Clone()); + } + + /// + /// Constructs a from a portion of a byte array. + /// + public static ByteString CopyFrom(byte[] bytes, int offset, int count) + { + byte[] portion = new byte[count]; + ByteArray.Copy(bytes, offset, portion, 0, count); + return new ByteString(portion); + } + + /// + /// Creates a new by encoding the specified text with + /// the given encoding. + /// + public static ByteString CopyFrom(string text, Encoding encoding) + { + return new ByteString(encoding.GetBytes(text)); + } + + /// + /// Creates a new by encoding the specified text in UTF-8. + /// + public static ByteString CopyFromUtf8(string text) + { + return CopyFrom(text, Encoding.UTF8); + } + + /// + /// Retuns the byte at the given index. + /// + public byte this[int index] + { + get { return bytes[index]; } + } + + /// + /// Converts this into a string by applying the given encoding. + /// + /// + /// This method should only be used to convert binary data which was the result of encoding + /// text with the given encoding. + /// + /// The encoding to use to decode the binary data into text. + /// The result of decoding the binary data with the given decoding. + public string ToString(Encoding encoding) + { + return encoding.GetString(bytes, 0, bytes.Length); + } + + /// + /// Converts this into a string by applying the UTF-8 encoding. + /// + /// + /// This method should only be used to convert binary data which was the result of encoding + /// text with UTF-8. + /// + /// The result of decoding the binary data with the given decoding. + public string ToStringUtf8() + { + return ToString(Encoding.UTF8); + } + + /// + /// Returns an iterator over the bytes in this . + /// + /// An iterator over the bytes in this object. + public IEnumerator GetEnumerator() + { + return ((IEnumerable) bytes).GetEnumerator(); + } + + /// + /// Returns an iterator over the bytes in this . + /// + /// An iterator over the bytes in this object. + IEnumerator IEnumerable.GetEnumerator() + { + return GetEnumerator(); + } + + /// + /// Creates a CodedInputStream from this ByteString's data. + /// + public CodedInputStream CreateCodedInput() + { + // We trust CodedInputStream not to reveal the provided byte array or modify it + return new CodedInputStream(bytes); + } + + /// + /// Compares two byte strings for equality. + /// + /// The first byte string to compare. + /// The second byte string to compare. + /// true if the byte strings are equal; false otherwise. + public static bool operator ==(ByteString lhs, ByteString rhs) + { + if (ReferenceEquals(lhs, rhs)) + { + return true; + } + if (ReferenceEquals(lhs, null) || ReferenceEquals(rhs, null)) + { + return false; + } + if (lhs.bytes.Length != rhs.bytes.Length) + { + return false; + } + for (int i = 0; i < lhs.Length; i++) + { + if (rhs.bytes[i] != lhs.bytes[i]) + { + return false; + } + } + return true; + } + + /// + /// Compares two byte strings for inequality. + /// + /// The first byte string to compare. + /// The second byte string to compare. + /// false if the byte strings are equal; true otherwise. + public static bool operator !=(ByteString lhs, ByteString rhs) + { + return !(lhs == rhs); + } + + /// + /// Compares this byte string with another object. + /// + /// The object to compare this with. + /// true if refers to an equal ; false otherwise. + public override bool Equals(object obj) + { + return this == (obj as ByteString); + } + + /// + /// Returns a hash code for this object. Two equal byte strings + /// will return the same hash code. + /// + /// A hash code for this object. + public override int GetHashCode() + { + int ret = 23; + foreach (byte b in bytes) + { + ret = (ret << 8) | b; + } + return ret; + } + + /// + /// Compares this byte string with another. + /// + /// The to compare this with. + /// true if refers to an equal byte string; false otherwise. + public bool Equals(ByteString other) + { + return this == other; + } + + /// + /// Used internally by CodedOutputStream to avoid creating a copy for the write + /// + internal void WriteRawBytesTo(CodedOutputStream outputStream) + { + outputStream.WriteRawBytes(bytes, 0, bytes.Length); + } + + /// + /// Copies the entire byte array to the destination array provided at the offset specified. + /// + public void CopyTo(byte[] array, int position) + { + ByteArray.Copy(bytes, 0, array, position, bytes.Length); + } + + /// + /// Writes the entire byte array to the provided stream + /// + public void WriteTo(Stream outputStream) + { + outputStream.Write(bytes, 0, bytes.Length); + } + } +} \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/CodedInputStream.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/CodedInputStream.cs new file mode 100644 index 0000000000..3484457f64 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/CodedInputStream.cs @@ -0,0 +1,1277 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using Google.Protobuf.Collections; +using System; +using System.Collections.Generic; +using System.IO; + +namespace Google.Protobuf +{ + /// + /// Reads and decodes protocol message fields. + /// + /// + /// + /// This class is generally used by generated code to read appropriate + /// primitives from the stream. It effectively encapsulates the lowest + /// levels of protocol buffer format. + /// + /// + /// Repeated fields and map fields are not handled by this class; use + /// and to serialize such fields. + /// + /// + public sealed class CodedInputStream : IDisposable + { + /// + /// Whether to leave the underlying stream open when disposing of this stream. + /// This is always true when there's no stream. + /// + private readonly bool leaveOpen; + + /// + /// Buffer of data read from the stream or provided at construction time. + /// + private readonly byte[] buffer; + + /// + /// The index of the buffer at which we need to refill from the stream (if there is one). + /// + private int bufferSize; + + private int bufferSizeAfterLimit = 0; + /// + /// The position within the current buffer (i.e. the next byte to read) + /// + private int bufferPos = 0; + + /// + /// The stream to read further input from, or null if the byte array buffer was provided + /// directly on construction, with no further data available. + /// + private readonly Stream input; + + /// + /// The last tag we read. 0 indicates we've read to the end of the stream + /// (or haven't read anything yet). + /// + private uint lastTag = 0; + + /// + /// The next tag, used to store the value read by PeekTag. + /// + private uint nextTag = 0; + private bool hasNextTag = false; + + internal const int DefaultRecursionLimit = 64; + internal const int DefaultSizeLimit = 64 << 20; // 64MB + internal const int BufferSize = 4096; + + /// + /// The total number of bytes read before the current buffer. The + /// total bytes read up to the current position can be computed as + /// totalBytesRetired + bufferPos. + /// + private int totalBytesRetired = 0; + + /// + /// The absolute position of the end of the current message. + /// + private int currentLimit = int.MaxValue; + + private int recursionDepth = 0; + + private readonly int recursionLimit; + private readonly int sizeLimit; + + #region Construction + // Note that the checks are performed such that we don't end up checking obviously-valid things + // like non-null references for arrays we've just created. + + /// + /// Creates a new CodedInputStream reading data from the given byte array. + /// + public CodedInputStream(byte[] buffer) : this(null, ProtoPreconditions.CheckNotNull(buffer, "buffer"), 0, buffer.Length) + { + } + + /// + /// Creates a new that reads from the given byte array slice. + /// + public CodedInputStream(byte[] buffer, int offset, int length) + : this(null, ProtoPreconditions.CheckNotNull(buffer, "buffer"), offset, offset + length) + { + if (offset < 0 || offset > buffer.Length) + { + throw new ArgumentOutOfRangeException("offset", "Offset must be within the buffer"); + } + if (length < 0 || offset + length > buffer.Length) + { + throw new ArgumentOutOfRangeException("length", "Length must be non-negative and within the buffer"); + } + } + + /// + /// Creates a new reading data from the given stream, which will be disposed + /// when the returned object is disposed. + /// + /// The stream to read from. + public CodedInputStream(Stream input) : this(input, false) + { + } + + /// + /// Creates a new reading data from the given stream. + /// + /// The stream to read from. + /// true to leave open when the returned + /// is disposed; false to dispose of the given stream when the + /// returned object is disposed. + public CodedInputStream(Stream input, bool leaveOpen) + : this(ProtoPreconditions.CheckNotNull(input, "input"), new byte[BufferSize], 0, 0) + { + this.leaveOpen = leaveOpen; + } + + /// + /// Creates a new CodedInputStream reading data from the given + /// stream and buffer, using the default limits. + /// + internal CodedInputStream(Stream input, byte[] buffer, int bufferPos, int bufferSize) + { + this.input = input; + this.buffer = buffer; + this.bufferPos = bufferPos; + this.bufferSize = bufferSize; + this.sizeLimit = DefaultSizeLimit; + this.recursionLimit = DefaultRecursionLimit; + } + + /// + /// Creates a new CodedInputStream reading data from the given + /// stream and buffer, using the specified limits. + /// + /// + /// This chains to the version with the default limits instead of vice versa to avoid + /// having to check that the default values are valid every time. + /// + internal CodedInputStream(Stream input, byte[] buffer, int bufferPos, int bufferSize, int sizeLimit, int recursionLimit) + : this(input, buffer, bufferPos, bufferSize) + { + if (sizeLimit <= 0) + { + throw new ArgumentOutOfRangeException("sizeLimit", "Size limit must be positive"); + } + if (recursionLimit <= 0) + { + throw new ArgumentOutOfRangeException("recursionLimit!", "Recursion limit must be positive"); + } + this.sizeLimit = sizeLimit; + this.recursionLimit = recursionLimit; + } + #endregion + + /// + /// Creates a with the specified size and recursion limits, reading + /// from an input stream. + /// + /// + /// This method exists separately from the constructor to reduce the number of constructor overloads. + /// It is likely to be used considerably less frequently than the constructors, as the default limits + /// are suitable for most use cases. + /// + /// The input stream to read from + /// The total limit of data to read from the stream. + /// The maximum recursion depth to allow while reading. + /// A CodedInputStream reading from with the specified size + /// and recursion limits. + public static CodedInputStream CreateWithLimits(Stream input, int sizeLimit, int recursionLimit) + { + return new CodedInputStream(input, new byte[BufferSize], 0, 0, sizeLimit, recursionLimit); + } + + /// + /// Returns the current position in the input stream, or the position in the input buffer + /// + public long Position + { + get + { + if (input != null) + { + return input.Position - ((bufferSize + bufferSizeAfterLimit) - bufferPos); + } + return bufferPos; + } + } + + /// + /// Returns the last tag read, or 0 if no tags have been read or we've read beyond + /// the end of the stream. + /// + internal uint LastTag { get { return lastTag; } } + + /// + /// Returns the size limit for this stream. + /// + /// + /// This limit is applied when reading from the underlying stream, as a sanity check. It is + /// not applied when reading from a byte array data source without an underlying stream. + /// The default value is 64MB. + /// + /// + /// The size limit. + /// + public int SizeLimit { get { return sizeLimit; } } + + /// + /// Returns the recursion limit for this stream. This limit is applied whilst reading messages, + /// to avoid maliciously-recursive data. + /// + /// + /// The default limit is 64. + /// + /// + /// The recursion limit for this stream. + /// + public int RecursionLimit { get { return recursionLimit; } } + + /// + /// Disposes of this instance, potentially closing any underlying stream. + /// + /// + /// As there is no flushing to perform here, disposing of a which + /// was constructed with the leaveOpen option parameter set to true (or one which + /// was constructed to read from a byte array) has no effect. + /// + public void Dispose() + { + if (!leaveOpen) + { + input.Dispose(); + } + } + + #region Validation + /// + /// Verifies that the last call to ReadTag() returned tag 0 - in other words, + /// we've reached the end of the stream when we expected to. + /// + /// The + /// tag read was not the one specified + internal void CheckReadEndOfStreamTag() + { + if (lastTag != 0) + { + throw InvalidProtocolBufferException.MoreDataAvailable(); + } + } + #endregion + + #region Reading of tags etc + + /// + /// Peeks at the next field tag. This is like calling , but the + /// tag is not consumed. (So a subsequent call to will return the + /// same value.) + /// + public uint PeekTag() + { + if (hasNextTag) + { + return nextTag; + } + + uint savedLast = lastTag; + nextTag = ReadTag(); + hasNextTag = true; + lastTag = savedLast; // Undo the side effect of ReadTag + return nextTag; + } + + /// + /// Reads a field tag, returning the tag of 0 for "end of stream". + /// + /// + /// If this method returns 0, it doesn't necessarily mean the end of all + /// the data in this CodedInputStream; it may be the end of the logical stream + /// for an embedded message, for example. + /// + /// The next field tag, or 0 for end of stream. (0 is never a valid tag.) + public uint ReadTag() + { + if (hasNextTag) + { + lastTag = nextTag; + hasNextTag = false; + return lastTag; + } + + // Optimize for the incredibly common case of having at least two bytes left in the buffer, + // and those two bytes being enough to get the tag. This will be true for fields up to 4095. + if (bufferPos + 2 <= bufferSize) + { + int tmp = buffer[bufferPos++]; + if (tmp < 128) + { + lastTag = (uint)tmp; + } + else + { + int result = tmp & 0x7f; + if ((tmp = buffer[bufferPos++]) < 128) + { + result |= tmp << 7; + lastTag = (uint) result; + } + else + { + // Nope, rewind and go the potentially slow route. + bufferPos -= 2; + lastTag = ReadRawVarint32(); + } + } + } + else + { + if (IsAtEnd) + { + lastTag = 0; + return 0; // This is the only case in which we return 0. + } + + lastTag = ReadRawVarint32(); + } + if (lastTag == 0) + { + // If we actually read zero, that's not a valid tag. + throw InvalidProtocolBufferException.InvalidTag(); + } + return lastTag; + } + + /// + /// Skips the data for the field with the tag we've just read. + /// This should be called directly after , when + /// the caller wishes to skip an unknown field. + /// + /// + /// This method throws if the last-read tag was an end-group tag. + /// If a caller wishes to skip a group, they should skip the whole group, by calling this method after reading the + /// start-group tag. This behavior allows callers to call this method on any field they don't understand, correctly + /// resulting in an error if an end-group tag has not been paired with an earlier start-group tag. + /// + /// The last tag was an end-group tag + /// The last read operation read to the end of the logical stream + public void SkipLastField() + { + if (lastTag == 0) + { + throw new InvalidOperationException("SkipLastField cannot be called at the end of a stream"); + } + switch (WireFormat.GetTagWireType(lastTag)) + { + case WireFormat.WireType.StartGroup: + SkipGroup(lastTag); + break; + case WireFormat.WireType.EndGroup: + throw new InvalidProtocolBufferException( + "SkipLastField called on an end-group tag, indicating that the corresponding start-group was missing"); + case WireFormat.WireType.Fixed32: + ReadFixed32(); + break; + case WireFormat.WireType.Fixed64: + ReadFixed64(); + break; + case WireFormat.WireType.LengthDelimited: + var length = ReadLength(); + SkipRawBytes(length); + break; + case WireFormat.WireType.Varint: + ReadRawVarint32(); + break; + } + } + + private void SkipGroup(uint startGroupTag) + { + // Note: Currently we expect this to be the way that groups are read. We could put the recursion + // depth changes into the ReadTag method instead, potentially... + recursionDepth++; + if (recursionDepth >= recursionLimit) + { + throw InvalidProtocolBufferException.RecursionLimitExceeded(); + } + uint tag; + while (true) + { + tag = ReadTag(); + if (tag == 0) + { + throw InvalidProtocolBufferException.TruncatedMessage(); + } + // Can't call SkipLastField for this case- that would throw. + if (WireFormat.GetTagWireType(tag) == WireFormat.WireType.EndGroup) + { + break; + } + // This recursion will allow us to handle nested groups. + SkipLastField(); + } + int startField = WireFormat.GetTagFieldNumber(startGroupTag); + int endField = WireFormat.GetTagFieldNumber(tag); + if (startField != endField) + { + throw new InvalidProtocolBufferException( + $"Mismatched end-group tag. Started with field {startField}; ended with field {endField}"); + } + recursionDepth--; + } + + /// + /// Reads a double field from the stream. + /// + public double ReadDouble() + { + return BitConverter.Int64BitsToDouble((long) ReadRawLittleEndian64()); + } + + /// + /// Reads a float field from the stream. + /// + public float ReadFloat() + { + if (BitConverter.IsLittleEndian && 4 <= bufferSize - bufferPos) + { + float ret = BitConverter.ToSingle(buffer, bufferPos); + bufferPos += 4; + return ret; + } + else + { + byte[] rawBytes = ReadRawBytes(4); + if (!BitConverter.IsLittleEndian) + { + ByteArray.Reverse(rawBytes); + } + return BitConverter.ToSingle(rawBytes, 0); + } + } + + /// + /// Reads a uint64 field from the stream. + /// + public ulong ReadUInt64() + { + return ReadRawVarint64(); + } + + /// + /// Reads an int64 field from the stream. + /// + public long ReadInt64() + { + return (long) ReadRawVarint64(); + } + + /// + /// Reads an int32 field from the stream. + /// + public int ReadInt32() + { + return (int) ReadRawVarint32(); + } + + /// + /// Reads a fixed64 field from the stream. + /// + public ulong ReadFixed64() + { + return ReadRawLittleEndian64(); + } + + /// + /// Reads a fixed32 field from the stream. + /// + public uint ReadFixed32() + { + return ReadRawLittleEndian32(); + } + + /// + /// Reads a bool field from the stream. + /// + public bool ReadBool() + { + return ReadRawVarint32() != 0; + } + + /// + /// Reads a string field from the stream. + /// + public string ReadString() + { + int length = ReadLength(); + // No need to read any data for an empty string. + if (length == 0) + { + return ""; + } + if (length <= bufferSize - bufferPos) + { + // Fast path: We already have the bytes in a contiguous buffer, so + // just copy directly from it. + String result = CodedOutputStream.Utf8Encoding.GetString(buffer, bufferPos, length); + bufferPos += length; + return result; + } + // Slow path: Build a byte array first then copy it. + return CodedOutputStream.Utf8Encoding.GetString(ReadRawBytes(length), 0, length); + } + + /// + /// Reads an embedded message field value from the stream. + /// + public void ReadMessage(IMessage builder) + { + int length = ReadLength(); + if (recursionDepth >= recursionLimit) + { + throw InvalidProtocolBufferException.RecursionLimitExceeded(); + } + int oldLimit = PushLimit(length); + ++recursionDepth; + builder.MergeFrom(this); + CheckReadEndOfStreamTag(); + // Check that we've read exactly as much data as expected. + if (!ReachedLimit) + { + throw InvalidProtocolBufferException.TruncatedMessage(); + } + --recursionDepth; + PopLimit(oldLimit); + } + + /// + /// Reads a bytes field value from the stream. + /// + public ByteString ReadBytes() + { + int length = ReadLength(); + if (length <= bufferSize - bufferPos && length > 0) + { + // Fast path: We already have the bytes in a contiguous buffer, so + // just copy directly from it. + ByteString result = ByteString.CopyFrom(buffer, bufferPos, length); + bufferPos += length; + return result; + } + else + { + // Slow path: Build a byte array and attach it to a new ByteString. + return ByteString.AttachBytes(ReadRawBytes(length)); + } + } + + /// + /// Reads a uint32 field value from the stream. + /// + public uint ReadUInt32() + { + return ReadRawVarint32(); + } + + /// + /// Reads an enum field value from the stream. If the enum is valid for type T, + /// then the ref value is set and it returns true. Otherwise the unknown output + /// value is set and this method returns false. + /// + public int ReadEnum() + { + // Currently just a pass-through, but it's nice to separate it logically from WriteInt32. + return (int) ReadRawVarint32(); + } + + /// + /// Reads an sfixed32 field value from the stream. + /// + public int ReadSFixed32() + { + return (int) ReadRawLittleEndian32(); + } + + /// + /// Reads an sfixed64 field value from the stream. + /// + public long ReadSFixed64() + { + return (long) ReadRawLittleEndian64(); + } + + /// + /// Reads an sint32 field value from the stream. + /// + public int ReadSInt32() + { + return DecodeZigZag32(ReadRawVarint32()); + } + + /// + /// Reads an sint64 field value from the stream. + /// + public long ReadSInt64() + { + return DecodeZigZag64(ReadRawVarint64()); + } + + /// + /// Reads a length for length-delimited data. + /// + /// + /// This is internally just reading a varint, but this method exists + /// to make the calling code clearer. + /// + public int ReadLength() + { + return (int) ReadRawVarint32(); + } + + /// + /// Peeks at the next tag in the stream. If it matches , + /// the tag is consumed and the method returns true; otherwise, the + /// stream is left in the original position and the method returns false. + /// + public bool MaybeConsumeTag(uint tag) + { + if (PeekTag() == tag) + { + hasNextTag = false; + return true; + } + return false; + } + + #endregion + + #region Underlying reading primitives + + /// + /// Same code as ReadRawVarint32, but read each byte individually, checking for + /// buffer overflow. + /// + private uint SlowReadRawVarint32() + { + int tmp = ReadRawByte(); + if (tmp < 128) + { + return (uint) tmp; + } + int result = tmp & 0x7f; + if ((tmp = ReadRawByte()) < 128) + { + result |= tmp << 7; + } + else + { + result |= (tmp & 0x7f) << 7; + if ((tmp = ReadRawByte()) < 128) + { + result |= tmp << 14; + } + else + { + result |= (tmp & 0x7f) << 14; + if ((tmp = ReadRawByte()) < 128) + { + result |= tmp << 21; + } + else + { + result |= (tmp & 0x7f) << 21; + result |= (tmp = ReadRawByte()) << 28; + if (tmp >= 128) + { + // Discard upper 32 bits. + for (int i = 0; i < 5; i++) + { + if (ReadRawByte() < 128) + { + return (uint) result; + } + } + throw InvalidProtocolBufferException.MalformedVarint(); + } + } + } + } + return (uint) result; + } + + /// + /// Reads a raw Varint from the stream. If larger than 32 bits, discard the upper bits. + /// This method is optimised for the case where we've got lots of data in the buffer. + /// That means we can check the size just once, then just read directly from the buffer + /// without constant rechecking of the buffer length. + /// + internal uint ReadRawVarint32() + { + if (bufferPos + 5 > bufferSize) + { + return SlowReadRawVarint32(); + } + + int tmp = buffer[bufferPos++]; + if (tmp < 128) + { + return (uint) tmp; + } + int result = tmp & 0x7f; + if ((tmp = buffer[bufferPos++]) < 128) + { + result |= tmp << 7; + } + else + { + result |= (tmp & 0x7f) << 7; + if ((tmp = buffer[bufferPos++]) < 128) + { + result |= tmp << 14; + } + else + { + result |= (tmp & 0x7f) << 14; + if ((tmp = buffer[bufferPos++]) < 128) + { + result |= tmp << 21; + } + else + { + result |= (tmp & 0x7f) << 21; + result |= (tmp = buffer[bufferPos++]) << 28; + if (tmp >= 128) + { + // Discard upper 32 bits. + // Note that this has to use ReadRawByte() as we only ensure we've + // got at least 5 bytes at the start of the method. This lets us + // use the fast path in more cases, and we rarely hit this section of code. + for (int i = 0; i < 5; i++) + { + if (ReadRawByte() < 128) + { + return (uint) result; + } + } + throw InvalidProtocolBufferException.MalformedVarint(); + } + } + } + } + return (uint) result; + } + + /// + /// Reads a varint from the input one byte at a time, so that it does not + /// read any bytes after the end of the varint. If you simply wrapped the + /// stream in a CodedInputStream and used ReadRawVarint32(Stream) + /// then you would probably end up reading past the end of the varint since + /// CodedInputStream buffers its input. + /// + /// + /// + internal static uint ReadRawVarint32(Stream input) + { + int result = 0; + int offset = 0; + for (; offset < 32; offset += 7) + { + int b = input.ReadByte(); + if (b == -1) + { + throw InvalidProtocolBufferException.TruncatedMessage(); + } + result |= (b & 0x7f) << offset; + if ((b & 0x80) == 0) + { + return (uint) result; + } + } + // Keep reading up to 64 bits. + for (; offset < 64; offset += 7) + { + int b = input.ReadByte(); + if (b == -1) + { + throw InvalidProtocolBufferException.TruncatedMessage(); + } + if ((b & 0x80) == 0) + { + return (uint) result; + } + } + throw InvalidProtocolBufferException.MalformedVarint(); + } + + /// + /// Reads a raw varint from the stream. + /// + internal ulong ReadRawVarint64() + { + int shift = 0; + ulong result = 0; + while (shift < 64) + { + byte b = ReadRawByte(); + result |= (ulong) (b & 0x7F) << shift; + if ((b & 0x80) == 0) + { + return result; + } + shift += 7; + } + throw InvalidProtocolBufferException.MalformedVarint(); + } + + /// + /// Reads a 32-bit little-endian integer from the stream. + /// + internal uint ReadRawLittleEndian32() + { + uint b1 = ReadRawByte(); + uint b2 = ReadRawByte(); + uint b3 = ReadRawByte(); + uint b4 = ReadRawByte(); + return b1 | (b2 << 8) | (b3 << 16) | (b4 << 24); + } + + /// + /// Reads a 64-bit little-endian integer from the stream. + /// + internal ulong ReadRawLittleEndian64() + { + ulong b1 = ReadRawByte(); + ulong b2 = ReadRawByte(); + ulong b3 = ReadRawByte(); + ulong b4 = ReadRawByte(); + ulong b5 = ReadRawByte(); + ulong b6 = ReadRawByte(); + ulong b7 = ReadRawByte(); + ulong b8 = ReadRawByte(); + return b1 | (b2 << 8) | (b3 << 16) | (b4 << 24) + | (b5 << 32) | (b6 << 40) | (b7 << 48) | (b8 << 56); + } + + /// + /// Decode a 32-bit value with ZigZag encoding. + /// + /// + /// ZigZag encodes signed integers into values that can be efficiently + /// encoded with varint. (Otherwise, negative values must be + /// sign-extended to 64 bits to be varint encoded, thus always taking + /// 10 bytes on the wire.) + /// + internal static int DecodeZigZag32(uint n) + { + return (int)(n >> 1) ^ -(int)(n & 1); + } + + /// + /// Decode a 32-bit value with ZigZag encoding. + /// + /// + /// ZigZag encodes signed integers into values that can be efficiently + /// encoded with varint. (Otherwise, negative values must be + /// sign-extended to 64 bits to be varint encoded, thus always taking + /// 10 bytes on the wire.) + /// + internal static long DecodeZigZag64(ulong n) + { + return (long)(n >> 1) ^ -(long)(n & 1); + } + #endregion + + #region Internal reading and buffer management + + /// + /// Sets currentLimit to (current position) + byteLimit. This is called + /// when descending into a length-delimited embedded message. The previous + /// limit is returned. + /// + /// The old limit. + internal int PushLimit(int byteLimit) + { + if (byteLimit < 0) + { + throw InvalidProtocolBufferException.NegativeSize(); + } + byteLimit += totalBytesRetired + bufferPos; + int oldLimit = currentLimit; + if (byteLimit > oldLimit) + { + throw InvalidProtocolBufferException.TruncatedMessage(); + } + currentLimit = byteLimit; + + RecomputeBufferSizeAfterLimit(); + + return oldLimit; + } + + private void RecomputeBufferSizeAfterLimit() + { + bufferSize += bufferSizeAfterLimit; + int bufferEnd = totalBytesRetired + bufferSize; + if (bufferEnd > currentLimit) + { + // Limit is in current buffer. + bufferSizeAfterLimit = bufferEnd - currentLimit; + bufferSize -= bufferSizeAfterLimit; + } + else + { + bufferSizeAfterLimit = 0; + } + } + + /// + /// Discards the current limit, returning the previous limit. + /// + internal void PopLimit(int oldLimit) + { + currentLimit = oldLimit; + RecomputeBufferSizeAfterLimit(); + } + + /// + /// Returns whether or not all the data before the limit has been read. + /// + /// + internal bool ReachedLimit + { + get + { + if (currentLimit == int.MaxValue) + { + return false; + } + int currentAbsolutePosition = totalBytesRetired + bufferPos; + return currentAbsolutePosition >= currentLimit; + } + } + + /// + /// Returns true if the stream has reached the end of the input. This is the + /// case if either the end of the underlying input source has been reached or + /// the stream has reached a limit created using PushLimit. + /// + public bool IsAtEnd + { + get { return bufferPos == bufferSize && !RefillBuffer(false); } + } + + /// + /// Called when buffer is empty to read more bytes from the + /// input. If is true, RefillBuffer() gurantees that + /// either there will be at least one byte in the buffer when it returns + /// or it will throw an exception. If is false, + /// RefillBuffer() returns false if no more bytes were available. + /// + /// + /// + private bool RefillBuffer(bool mustSucceed) + { + if (bufferPos < bufferSize) + { + throw new InvalidOperationException("RefillBuffer() called when buffer wasn't empty."); + } + + if (totalBytesRetired + bufferSize == currentLimit) + { + // Oops, we hit a limit. + if (mustSucceed) + { + throw InvalidProtocolBufferException.TruncatedMessage(); + } + else + { + return false; + } + } + + totalBytesRetired += bufferSize; + + bufferPos = 0; + bufferSize = (input == null) ? 0 : input.Read(buffer, 0, buffer.Length); + if (bufferSize < 0) + { + throw new InvalidOperationException("Stream.Read returned a negative count"); + } + if (bufferSize == 0) + { + if (mustSucceed) + { + throw InvalidProtocolBufferException.TruncatedMessage(); + } + else + { + return false; + } + } + else + { + RecomputeBufferSizeAfterLimit(); + int totalBytesRead = + totalBytesRetired + bufferSize + bufferSizeAfterLimit; + if (totalBytesRead > sizeLimit || totalBytesRead < 0) + { + throw InvalidProtocolBufferException.SizeLimitExceeded(); + } + return true; + } + } + + /// + /// Read one byte from the input. + /// + /// + /// the end of the stream or the current limit was reached + /// + internal byte ReadRawByte() + { + if (bufferPos == bufferSize) + { + RefillBuffer(true); + } + return buffer[bufferPos++]; + } + + /// + /// Reads a fixed size of bytes from the input. + /// + /// + /// the end of the stream or the current limit was reached + /// + internal byte[] ReadRawBytes(int size) + { + if (size < 0) + { + throw InvalidProtocolBufferException.NegativeSize(); + } + + if (totalBytesRetired + bufferPos + size > currentLimit) + { + // Read to the end of the stream (up to the current limit) anyway. + SkipRawBytes(currentLimit - totalBytesRetired - bufferPos); + // Then fail. + throw InvalidProtocolBufferException.TruncatedMessage(); + } + + if (size <= bufferSize - bufferPos) + { + // We have all the bytes we need already. + byte[] bytes = new byte[size]; + ByteArray.Copy(buffer, bufferPos, bytes, 0, size); + bufferPos += size; + return bytes; + } + else if (size < buffer.Length) + { + // Reading more bytes than are in the buffer, but not an excessive number + // of bytes. We can safely allocate the resulting array ahead of time. + + // First copy what we have. + byte[] bytes = new byte[size]; + int pos = bufferSize - bufferPos; + ByteArray.Copy(buffer, bufferPos, bytes, 0, pos); + bufferPos = bufferSize; + + // We want to use RefillBuffer() and then copy from the buffer into our + // byte array rather than reading directly into our byte array because + // the input may be unbuffered. + RefillBuffer(true); + + while (size - pos > bufferSize) + { + Buffer.BlockCopy(buffer, 0, bytes, pos, bufferSize); + pos += bufferSize; + bufferPos = bufferSize; + RefillBuffer(true); + } + + ByteArray.Copy(buffer, 0, bytes, pos, size - pos); + bufferPos = size - pos; + + return bytes; + } + else + { + // The size is very large. For security reasons, we can't allocate the + // entire byte array yet. The size comes directly from the input, so a + // maliciously-crafted message could provide a bogus very large size in + // order to trick the app into allocating a lot of memory. We avoid this + // by allocating and reading only a small chunk at a time, so that the + // malicious message must actually *be* extremely large to cause + // problems. Meanwhile, we limit the allowed size of a message elsewhere. + + // Remember the buffer markers since we'll have to copy the bytes out of + // it later. + int originalBufferPos = bufferPos; + int originalBufferSize = bufferSize; + + // Mark the current buffer consumed. + totalBytesRetired += bufferSize; + bufferPos = 0; + bufferSize = 0; + + // Read all the rest of the bytes we need. + int sizeLeft = size - (originalBufferSize - originalBufferPos); + List chunks = new List(); + + while (sizeLeft > 0) + { + byte[] chunk = new byte[Math.Min(sizeLeft, buffer.Length)]; + int pos = 0; + while (pos < chunk.Length) + { + int n = (input == null) ? -1 : input.Read(chunk, pos, chunk.Length - pos); + if (n <= 0) + { + throw InvalidProtocolBufferException.TruncatedMessage(); + } + totalBytesRetired += n; + pos += n; + } + sizeLeft -= chunk.Length; + chunks.Add(chunk); + } + + // OK, got everything. Now concatenate it all into one buffer. + byte[] bytes = new byte[size]; + + // Start by copying the leftover bytes from this.buffer. + int newPos = originalBufferSize - originalBufferPos; + ByteArray.Copy(buffer, originalBufferPos, bytes, 0, newPos); + + // And now all the chunks. + foreach (byte[] chunk in chunks) + { + Buffer.BlockCopy(chunk, 0, bytes, newPos, chunk.Length); + newPos += chunk.Length; + } + + // Done. + return bytes; + } + } + + /// + /// Reads and discards bytes. + /// + /// the end of the stream + /// or the current limit was reached + private void SkipRawBytes(int size) + { + if (size < 0) + { + throw InvalidProtocolBufferException.NegativeSize(); + } + + if (totalBytesRetired + bufferPos + size > currentLimit) + { + // Read to the end of the stream anyway. + SkipRawBytes(currentLimit - totalBytesRetired - bufferPos); + // Then fail. + throw InvalidProtocolBufferException.TruncatedMessage(); + } + + if (size <= bufferSize - bufferPos) + { + // We have all the bytes we need already. + bufferPos += size; + } + else + { + // Skipping more bytes than are in the buffer. First skip what we have. + int pos = bufferSize - bufferPos; + + // ROK 5/7/2013 Issue #54: should retire all bytes in buffer (bufferSize) + // totalBytesRetired += pos; + totalBytesRetired += bufferSize; + + bufferPos = 0; + bufferSize = 0; + + // Then skip directly from the InputStream for the rest. + if (pos < size) + { + if (input == null) + { + throw InvalidProtocolBufferException.TruncatedMessage(); + } + SkipImpl(size - pos); + totalBytesRetired += size - pos; + } + } + } + + /// + /// Abstraction of skipping to cope with streams which can't really skip. + /// + private void SkipImpl(int amountToSkip) + { + if (input.CanSeek) + { + long previousPosition = input.Position; + input.Position += amountToSkip; + if (input.Position != previousPosition + amountToSkip) + { + throw InvalidProtocolBufferException.TruncatedMessage(); + } + } + else + { + byte[] skipBuffer = new byte[Math.Min(1024, amountToSkip)]; + while (amountToSkip > 0) + { + int bytesRead = input.Read(skipBuffer, 0, Math.Min(skipBuffer.Length, amountToSkip)); + if (bytesRead <= 0) + { + throw InvalidProtocolBufferException.TruncatedMessage(); + } + amountToSkip -= bytesRead; + } + } + } + + #endregion + } +} \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/CodedOutputStream.ComputeSize.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/CodedOutputStream.ComputeSize.cs new file mode 100644 index 0000000000..e22937c13d --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/CodedOutputStream.ComputeSize.cs @@ -0,0 +1,304 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; + +namespace Google.Protobuf +{ + // This part of CodedOutputStream provides all the static entry points that are used + // by generated code and internally to compute the size of messages prior to being + // written to an instance of CodedOutputStream. + public sealed partial class CodedOutputStream + { + private const int LittleEndian64Size = 8; + private const int LittleEndian32Size = 4; + + /// + /// Computes the number of bytes that would be needed to encode a + /// double field, including the tag. + /// + public static int ComputeDoubleSize(double value) + { + return LittleEndian64Size; + } + + /// + /// Computes the number of bytes that would be needed to encode a + /// float field, including the tag. + /// + public static int ComputeFloatSize(float value) + { + return LittleEndian32Size; + } + + /// + /// Computes the number of bytes that would be needed to encode a + /// uint64 field, including the tag. + /// + public static int ComputeUInt64Size(ulong value) + { + return ComputeRawVarint64Size(value); + } + + /// + /// Computes the number of bytes that would be needed to encode an + /// int64 field, including the tag. + /// + public static int ComputeInt64Size(long value) + { + return ComputeRawVarint64Size((ulong) value); + } + + /// + /// Computes the number of bytes that would be needed to encode an + /// int32 field, including the tag. + /// + public static int ComputeInt32Size(int value) + { + if (value >= 0) + { + return ComputeRawVarint32Size((uint) value); + } + else + { + // Must sign-extend. + return 10; + } + } + + /// + /// Computes the number of bytes that would be needed to encode a + /// fixed64 field, including the tag. + /// + public static int ComputeFixed64Size(ulong value) + { + return LittleEndian64Size; + } + + /// + /// Computes the number of bytes that would be needed to encode a + /// fixed32 field, including the tag. + /// + public static int ComputeFixed32Size(uint value) + { + return LittleEndian32Size; + } + + /// + /// Computes the number of bytes that would be needed to encode a + /// bool field, including the tag. + /// + public static int ComputeBoolSize(bool value) + { + return 1; + } + + /// + /// Computes the number of bytes that would be needed to encode a + /// string field, including the tag. + /// + public static int ComputeStringSize(String value) + { + int byteArraySize = Utf8Encoding.GetByteCount(value); + return ComputeLengthSize(byteArraySize) + byteArraySize; + } + + /// + /// Computes the number of bytes that would be needed to encode a + /// group field, including the tag. + /// + public static int ComputeGroupSize(IMessage value) + { + return value.CalculateSize(); + } + + /// + /// Computes the number of bytes that would be needed to encode an + /// embedded message field, including the tag. + /// + public static int ComputeMessageSize(IMessage value) + { + int size = value.CalculateSize(); + return ComputeLengthSize(size) + size; + } + + /// + /// Computes the number of bytes that would be needed to encode a + /// bytes field, including the tag. + /// + public static int ComputeBytesSize(ByteString value) + { + return ComputeLengthSize(value.Length) + value.Length; + } + + /// + /// Computes the number of bytes that would be needed to encode a + /// uint32 field, including the tag. + /// + public static int ComputeUInt32Size(uint value) + { + return ComputeRawVarint32Size(value); + } + + /// + /// Computes the number of bytes that would be needed to encode a + /// enum field, including the tag. The caller is responsible for + /// converting the enum value to its numeric value. + /// + public static int ComputeEnumSize(int value) + { + // Currently just a pass-through, but it's nice to separate it logically. + return ComputeInt32Size(value); + } + + /// + /// Computes the number of bytes that would be needed to encode an + /// sfixed32 field, including the tag. + /// + public static int ComputeSFixed32Size(int value) + { + return LittleEndian32Size; + } + + /// + /// Computes the number of bytes that would be needed to encode an + /// sfixed64 field, including the tag. + /// + public static int ComputeSFixed64Size(long value) + { + return LittleEndian64Size; + } + + /// + /// Computes the number of bytes that would be needed to encode an + /// sint32 field, including the tag. + /// + public static int ComputeSInt32Size(int value) + { + return ComputeRawVarint32Size(EncodeZigZag32(value)); + } + + /// + /// Computes the number of bytes that would be needed to encode an + /// sint64 field, including the tag. + /// + public static int ComputeSInt64Size(long value) + { + return ComputeRawVarint64Size(EncodeZigZag64(value)); + } + + /// + /// Computes the number of bytes that would be needed to encode a length, + /// as written by . + /// + public static int ComputeLengthSize(int length) + { + return ComputeRawVarint32Size((uint) length); + } + + /// + /// Computes the number of bytes that would be needed to encode a varint. + /// + public static int ComputeRawVarint32Size(uint value) + { + if ((value & (0xffffffff << 7)) == 0) + { + return 1; + } + if ((value & (0xffffffff << 14)) == 0) + { + return 2; + } + if ((value & (0xffffffff << 21)) == 0) + { + return 3; + } + if ((value & (0xffffffff << 28)) == 0) + { + return 4; + } + return 5; + } + + /// + /// Computes the number of bytes that would be needed to encode a varint. + /// + public static int ComputeRawVarint64Size(ulong value) + { + if ((value & (0xffffffffffffffffL << 7)) == 0) + { + return 1; + } + if ((value & (0xffffffffffffffffL << 14)) == 0) + { + return 2; + } + if ((value & (0xffffffffffffffffL << 21)) == 0) + { + return 3; + } + if ((value & (0xffffffffffffffffL << 28)) == 0) + { + return 4; + } + if ((value & (0xffffffffffffffffL << 35)) == 0) + { + return 5; + } + if ((value & (0xffffffffffffffffL << 42)) == 0) + { + return 6; + } + if ((value & (0xffffffffffffffffL << 49)) == 0) + { + return 7; + } + if ((value & (0xffffffffffffffffL << 56)) == 0) + { + return 8; + } + if ((value & (0xffffffffffffffffL << 63)) == 0) + { + return 9; + } + return 10; + } + + /// + /// Computes the number of bytes that would be needed to encode a tag. + /// + public static int ComputeTagSize(int fieldNumber) + { + return ComputeRawVarint32Size(WireFormat.MakeTag(fieldNumber, 0)); + } + } +} \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/CodedOutputStream.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/CodedOutputStream.cs new file mode 100644 index 0000000000..827f0398cf --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/CodedOutputStream.cs @@ -0,0 +1,761 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using Google.Protobuf.Collections; +using System; +using System.IO; +using System.Text; + +namespace Google.Protobuf +{ + /// + /// Encodes and writes protocol message fields. + /// + /// + /// + /// This class is generally used by generated code to write appropriate + /// primitives to the stream. It effectively encapsulates the lowest + /// levels of protocol buffer format. Unlike some other implementations, + /// this does not include combined "write tag and value" methods. Generated + /// code knows the exact byte representations of the tags they're going to write, + /// so there's no need to re-encode them each time. Manually-written code calling + /// this class should just call one of the WriteTag overloads before each value. + /// + /// + /// Repeated fields and map fields are not handled by this class; use RepeatedField<T> + /// and MapField<TKey, TValue> to serialize such fields. + /// + /// + public sealed partial class CodedOutputStream : IDisposable + { + // "Local" copy of Encoding.UTF8, for efficiency. (Yes, it makes a difference.) + internal static readonly Encoding Utf8Encoding = Encoding.UTF8; + + /// + /// The buffer size used by CreateInstance(Stream). + /// + public static readonly int DefaultBufferSize = 4096; + + private readonly bool leaveOpen; + private readonly byte[] buffer; + private readonly int limit; + private int position; + private readonly Stream output; + + #region Construction + /// + /// Creates a new CodedOutputStream that writes directly to the given + /// byte array. If more bytes are written than fit in the array, + /// OutOfSpaceException will be thrown. + /// + public CodedOutputStream(byte[] flatArray) : this(flatArray, 0, flatArray.Length) + { + } + + /// + /// Creates a new CodedOutputStream that writes directly to the given + /// byte array slice. If more bytes are written than fit in the array, + /// OutOfSpaceException will be thrown. + /// + private CodedOutputStream(byte[] buffer, int offset, int length) + { + this.output = null; + this.buffer = buffer; + this.position = offset; + this.limit = offset + length; + leaveOpen = true; // Simple way of avoiding trying to dispose of a null reference + } + + private CodedOutputStream(Stream output, byte[] buffer, bool leaveOpen) + { + this.output = ProtoPreconditions.CheckNotNull(output, nameof(output)); + this.buffer = buffer; + this.position = 0; + this.limit = buffer.Length; + this.leaveOpen = leaveOpen; + } + + /// + /// Creates a new which write to the given stream, and disposes of that + /// stream when the returned CodedOutputStream is disposed. + /// + /// The stream to write to. It will be disposed when the returned CodedOutputStream is disposed. + public CodedOutputStream(Stream output) : this(output, DefaultBufferSize, false) + { + } + + /// + /// Creates a new CodedOutputStream which write to the given stream and uses + /// the specified buffer size. + /// + /// The stream to write to. It will be disposed when the returned CodedOutputStream is disposed. + /// The size of buffer to use internally. + public CodedOutputStream(Stream output, int bufferSize) : this(output, new byte[bufferSize], false) + { + } + + /// + /// Creates a new CodedOutputStream which write to the given stream. + /// + /// The stream to write to. + /// If true, is left open when the returned CodedOutputStream is disposed; + /// if false, the provided stream is disposed as well. + public CodedOutputStream(Stream output, bool leaveOpen) : this(output, DefaultBufferSize, leaveOpen) + { + } + + /// + /// Creates a new CodedOutputStream which write to the given stream and uses + /// the specified buffer size. + /// + /// The stream to write to. + /// The size of buffer to use internally. + /// If true, is left open when the returned CodedOutputStream is disposed; + /// if false, the provided stream is disposed as well. + public CodedOutputStream(Stream output, int bufferSize, bool leaveOpen) : this(output, new byte[bufferSize], leaveOpen) + { + } + #endregion + + /// + /// Returns the current position in the stream, or the position in the output buffer + /// + public long Position + { + get + { + if (output != null) + { + return output.Position + position; + } + return position; + } + } + + #region Writing of values (not including tags) + + /// + /// Writes a double field value, without a tag, to the stream. + /// + /// The value to write + public void WriteDouble(double value) + { + WriteRawLittleEndian64((ulong)BitConverter.DoubleToInt64Bits(value)); + } + + /// + /// Writes a float field value, without a tag, to the stream. + /// + /// The value to write + public void WriteFloat(float value) + { + byte[] rawBytes = BitConverter.GetBytes(value); + if (!BitConverter.IsLittleEndian) + { + ByteArray.Reverse(rawBytes); + } + + if (limit - position >= 4) + { + buffer[position++] = rawBytes[0]; + buffer[position++] = rawBytes[1]; + buffer[position++] = rawBytes[2]; + buffer[position++] = rawBytes[3]; + } + else + { + WriteRawBytes(rawBytes, 0, 4); + } + } + + /// + /// Writes a uint64 field value, without a tag, to the stream. + /// + /// The value to write + public void WriteUInt64(ulong value) + { + WriteRawVarint64(value); + } + + /// + /// Writes an int64 field value, without a tag, to the stream. + /// + /// The value to write + public void WriteInt64(long value) + { + WriteRawVarint64((ulong) value); + } + + /// + /// Writes an int32 field value, without a tag, to the stream. + /// + /// The value to write + public void WriteInt32(int value) + { + if (value >= 0) + { + WriteRawVarint32((uint) value); + } + else + { + // Must sign-extend. + WriteRawVarint64((ulong) value); + } + } + + /// + /// Writes a fixed64 field value, without a tag, to the stream. + /// + /// The value to write + public void WriteFixed64(ulong value) + { + WriteRawLittleEndian64(value); + } + + /// + /// Writes a fixed32 field value, without a tag, to the stream. + /// + /// The value to write + public void WriteFixed32(uint value) + { + WriteRawLittleEndian32(value); + } + + /// + /// Writes a bool field value, without a tag, to the stream. + /// + /// The value to write + public void WriteBool(bool value) + { + WriteRawByte(value ? (byte) 1 : (byte) 0); + } + + /// + /// Writes a string field value, without a tag, to the stream. + /// The data is length-prefixed. + /// + /// The value to write + public void WriteString(string value) + { + // Optimise the case where we have enough space to write + // the string directly to the buffer, which should be common. + int length = Utf8Encoding.GetByteCount(value); + WriteLength(length); + if (limit - position >= length) + { + if (length == value.Length) // Must be all ASCII... + { + for (int i = 0; i < length; i++) + { + buffer[position + i] = (byte)value[i]; + } + } + else + { + Utf8Encoding.GetBytes(value, 0, value.Length, buffer, position); + } + position += length; + } + else + { + byte[] bytes = Utf8Encoding.GetBytes(value); + WriteRawBytes(bytes); + } + } + + /// + /// Writes a message, without a tag, to the stream. + /// The data is length-prefixed. + /// + /// The value to write + public void WriteMessage(IMessage value) + { + WriteLength(value.CalculateSize()); + value.WriteTo(this); + } + + /// + /// Write a byte string, without a tag, to the stream. + /// The data is length-prefixed. + /// + /// The value to write + public void WriteBytes(ByteString value) + { + WriteLength(value.Length); + value.WriteRawBytesTo(this); + } + + /// + /// Writes a uint32 value, without a tag, to the stream. + /// + /// The value to write + public void WriteUInt32(uint value) + { + WriteRawVarint32(value); + } + + /// + /// Writes an enum value, without a tag, to the stream. + /// + /// The value to write + public void WriteEnum(int value) + { + WriteInt32(value); + } + + /// + /// Writes an sfixed32 value, without a tag, to the stream. + /// + /// The value to write. + public void WriteSFixed32(int value) + { + WriteRawLittleEndian32((uint) value); + } + + /// + /// Writes an sfixed64 value, without a tag, to the stream. + /// + /// The value to write + public void WriteSFixed64(long value) + { + WriteRawLittleEndian64((ulong) value); + } + + /// + /// Writes an sint32 value, without a tag, to the stream. + /// + /// The value to write + public void WriteSInt32(int value) + { + WriteRawVarint32(EncodeZigZag32(value)); + } + + /// + /// Writes an sint64 value, without a tag, to the stream. + /// + /// The value to write + public void WriteSInt64(long value) + { + WriteRawVarint64(EncodeZigZag64(value)); + } + + /// + /// Writes a length (in bytes) for length-delimited data. + /// + /// + /// This method simply writes a rawint, but exists for clarity in calling code. + /// + /// Length value, in bytes. + public void WriteLength(int length) + { + WriteRawVarint32((uint) length); + } + + #endregion + + #region Raw tag writing + /// + /// Encodes and writes a tag. + /// + /// The number of the field to write the tag for + /// The wire format type of the tag to write + public void WriteTag(int fieldNumber, WireFormat.WireType type) + { + WriteRawVarint32(WireFormat.MakeTag(fieldNumber, type)); + } + + /// + /// Writes an already-encoded tag. + /// + /// The encoded tag + public void WriteTag(uint tag) + { + WriteRawVarint32(tag); + } + + /// + /// Writes the given single-byte tag directly to the stream. + /// + /// The encoded tag + public void WriteRawTag(byte b1) + { + WriteRawByte(b1); + } + + /// + /// Writes the given two-byte tag directly to the stream. + /// + /// The first byte of the encoded tag + /// The second byte of the encoded tag + public void WriteRawTag(byte b1, byte b2) + { + WriteRawByte(b1); + WriteRawByte(b2); + } + + /// + /// Writes the given three-byte tag directly to the stream. + /// + /// The first byte of the encoded tag + /// The second byte of the encoded tag + /// The third byte of the encoded tag + public void WriteRawTag(byte b1, byte b2, byte b3) + { + WriteRawByte(b1); + WriteRawByte(b2); + WriteRawByte(b3); + } + + /// + /// Writes the given four-byte tag directly to the stream. + /// + /// The first byte of the encoded tag + /// The second byte of the encoded tag + /// The third byte of the encoded tag + /// The fourth byte of the encoded tag + public void WriteRawTag(byte b1, byte b2, byte b3, byte b4) + { + WriteRawByte(b1); + WriteRawByte(b2); + WriteRawByte(b3); + WriteRawByte(b4); + } + + /// + /// Writes the given five-byte tag directly to the stream. + /// + /// The first byte of the encoded tag + /// The second byte of the encoded tag + /// The third byte of the encoded tag + /// The fourth byte of the encoded tag + /// The fifth byte of the encoded tag + public void WriteRawTag(byte b1, byte b2, byte b3, byte b4, byte b5) + { + WriteRawByte(b1); + WriteRawByte(b2); + WriteRawByte(b3); + WriteRawByte(b4); + WriteRawByte(b5); + } + #endregion + + #region Underlying writing primitives + /// + /// Writes a 32 bit value as a varint. The fast route is taken when + /// there's enough buffer space left to whizz through without checking + /// for each byte; otherwise, we resort to calling WriteRawByte each time. + /// + internal void WriteRawVarint32(uint value) + { + // Optimize for the common case of a single byte value + if (value < 128 && position < limit) + { + buffer[position++] = (byte)value; + return; + } + + while (value > 127 && position < limit) + { + buffer[position++] = (byte) ((value & 0x7F) | 0x80); + value >>= 7; + } + while (value > 127) + { + WriteRawByte((byte) ((value & 0x7F) | 0x80)); + value >>= 7; + } + if (position < limit) + { + buffer[position++] = (byte) value; + } + else + { + WriteRawByte((byte) value); + } + } + + internal void WriteRawVarint64(ulong value) + { + while (value > 127 && position < limit) + { + buffer[position++] = (byte) ((value & 0x7F) | 0x80); + value >>= 7; + } + while (value > 127) + { + WriteRawByte((byte) ((value & 0x7F) | 0x80)); + value >>= 7; + } + if (position < limit) + { + buffer[position++] = (byte) value; + } + else + { + WriteRawByte((byte) value); + } + } + + internal void WriteRawLittleEndian32(uint value) + { + if (position + 4 > limit) + { + WriteRawByte((byte) value); + WriteRawByte((byte) (value >> 8)); + WriteRawByte((byte) (value >> 16)); + WriteRawByte((byte) (value >> 24)); + } + else + { + buffer[position++] = ((byte) value); + buffer[position++] = ((byte) (value >> 8)); + buffer[position++] = ((byte) (value >> 16)); + buffer[position++] = ((byte) (value >> 24)); + } + } + + internal void WriteRawLittleEndian64(ulong value) + { + if (position + 8 > limit) + { + WriteRawByte((byte) value); + WriteRawByte((byte) (value >> 8)); + WriteRawByte((byte) (value >> 16)); + WriteRawByte((byte) (value >> 24)); + WriteRawByte((byte) (value >> 32)); + WriteRawByte((byte) (value >> 40)); + WriteRawByte((byte) (value >> 48)); + WriteRawByte((byte) (value >> 56)); + } + else + { + buffer[position++] = ((byte) value); + buffer[position++] = ((byte) (value >> 8)); + buffer[position++] = ((byte) (value >> 16)); + buffer[position++] = ((byte) (value >> 24)); + buffer[position++] = ((byte) (value >> 32)); + buffer[position++] = ((byte) (value >> 40)); + buffer[position++] = ((byte) (value >> 48)); + buffer[position++] = ((byte) (value >> 56)); + } + } + + internal void WriteRawByte(byte value) + { + if (position == limit) + { + RefreshBuffer(); + } + + buffer[position++] = value; + } + + internal void WriteRawByte(uint value) + { + WriteRawByte((byte) value); + } + + /// + /// Writes out an array of bytes. + /// + internal void WriteRawBytes(byte[] value) + { + WriteRawBytes(value, 0, value.Length); + } + + /// + /// Writes out part of an array of bytes. + /// + internal void WriteRawBytes(byte[] value, int offset, int length) + { + if (limit - position >= length) + { + ByteArray.Copy(value, offset, buffer, position, length); + // We have room in the current buffer. + position += length; + } + else + { + // Write extends past current buffer. Fill the rest of this buffer and + // flush. + int bytesWritten = limit - position; + ByteArray.Copy(value, offset, buffer, position, bytesWritten); + offset += bytesWritten; + length -= bytesWritten; + position = limit; + RefreshBuffer(); + + // Now deal with the rest. + // Since we have an output stream, this is our buffer + // and buffer offset == 0 + if (length <= limit) + { + // Fits in new buffer. + ByteArray.Copy(value, offset, buffer, 0, length); + position = length; + } + else + { + // Write is very big. Let's do it all at once. + output.Write(value, offset, length); + } + } + } + + #endregion + + /// + /// Encode a 32-bit value with ZigZag encoding. + /// + /// + /// ZigZag encodes signed integers into values that can be efficiently + /// encoded with varint. (Otherwise, negative values must be + /// sign-extended to 64 bits to be varint encoded, thus always taking + /// 10 bytes on the wire.) + /// + internal static uint EncodeZigZag32(int n) + { + // Note: the right-shift must be arithmetic + return (uint) ((n << 1) ^ (n >> 31)); + } + + /// + /// Encode a 64-bit value with ZigZag encoding. + /// + /// + /// ZigZag encodes signed integers into values that can be efficiently + /// encoded with varint. (Otherwise, negative values must be + /// sign-extended to 64 bits to be varint encoded, thus always taking + /// 10 bytes on the wire.) + /// + internal static ulong EncodeZigZag64(long n) + { + return (ulong) ((n << 1) ^ (n >> 63)); + } + + private void RefreshBuffer() + { + if (output == null) + { + // We're writing to a single buffer. + throw new OutOfSpaceException(); + } + + // Since we have an output stream, this is our buffer + // and buffer offset == 0 + output.Write(buffer, 0, position); + position = 0; + } + + /// + /// Indicates that a CodedOutputStream wrapping a flat byte array + /// ran out of space. + /// + public sealed class OutOfSpaceException : IOException + { + internal OutOfSpaceException() + : base("CodedOutputStream was writing to a flat byte array and ran out of space.") + { + } + } + + /// + /// Flushes any buffered data and optionally closes the underlying stream, if any. + /// + /// + /// + /// By default, any underlying stream is closed by this method. To configure this behaviour, + /// use a constructor overload with a leaveOpen parameter. If this instance does not + /// have an underlying stream, this method does nothing. + /// + /// + /// For the sake of efficiency, calling this method does not prevent future write calls - but + /// if a later write ends up writing to a stream which has been disposed, that is likely to + /// fail. It is recommend that you not call any other methods after this. + /// + /// + public void Dispose() + { + Flush(); + if (!leaveOpen) + { + output.Dispose(); + } + } + + /// + /// Flushes any buffered data to the underlying stream (if there is one). + /// + public void Flush() + { + if (output != null) + { + RefreshBuffer(); + } + } + + /// + /// Verifies that SpaceLeft returns zero. It's common to create a byte array + /// that is exactly big enough to hold a message, then write to it with + /// a CodedOutputStream. Calling CheckNoSpaceLeft after writing verifies that + /// the message was actually as big as expected, which can help bugs. + /// + public void CheckNoSpaceLeft() + { + if (SpaceLeft != 0) + { + throw new InvalidOperationException("Did not write as much data as expected."); + } + } + + /// + /// If writing to a flat array, returns the space left in the array. Otherwise, + /// throws an InvalidOperationException. + /// + public int SpaceLeft + { + get + { + if (output == null) + { + return limit - position; + } + else + { + throw new InvalidOperationException( + "SpaceLeft can only be called on CodedOutputStreams that are " + + "writing to a flat array."); + } + } + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/Collections/MapField.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Collections/MapField.cs new file mode 100644 index 0000000000..993a89d776 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Collections/MapField.cs @@ -0,0 +1,760 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using Google.Protobuf.Reflection; +using System; +using System.Collections; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text; +using Google.Protobuf.Compatibility; + +namespace Google.Protobuf.Collections +{ + /// + /// Representation of a map field in a Protocol Buffer message. + /// + /// Key type in the map. Must be a type supported by Protocol Buffer map keys. + /// Value type in the map. Must be a type supported by Protocol Buffers. + /// + /// + /// This implementation preserves insertion order for simplicity of testing + /// code using maps fields. Overwriting an existing entry does not change the + /// position of that entry within the map. Equality is not order-sensitive. + /// For string keys, the equality comparison is provided by . + /// + /// + /// Null values are not permitted in the map, either for wrapper types or regular messages. + /// If a map is deserialized from a data stream and the value is missing from an entry, a default value + /// is created instead. For primitive types, that is the regular default value (0, the empty string and so + /// on); for message types, an empty instance of the message is created, as if the map entry contained a 0-length + /// encoded value for the field. + /// + /// + /// This implementation does not generally prohibit the use of key/value types which are not + /// supported by Protocol Buffers (e.g. using a key type of byte) but nor does it guarantee + /// that all operations will work in such cases. + /// + /// + public sealed class MapField : IDeepCloneable>, IDictionary, IEquatable>, IDictionary + { + // TODO: Don't create the map/list until we have an entry. (Assume many maps will be empty.) + private readonly Dictionary>> map = + new Dictionary>>(); + private readonly LinkedList> list = new LinkedList>(); + + /// + /// Creates a deep clone of this object. + /// + /// + /// A deep clone of this object. + /// + public MapField Clone() + { + var clone = new MapField(); + // Keys are never cloneable. Values might be. + if (typeof(IDeepCloneable).IsAssignableFrom(typeof(TValue))) + { + foreach (var pair in list) + { + clone.Add(pair.Key, ((IDeepCloneable)pair.Value).Clone()); + } + } + else + { + // Nothing is cloneable, so we don't need to worry. + clone.Add(this); + } + return clone; + } + + /// + /// Adds the specified key/value pair to the map. + /// + /// + /// This operation fails if the key already exists in the map. To replace an existing entry, use the indexer. + /// + /// The key to add + /// The value to add. + /// The given key already exists in map. + public void Add(TKey key, TValue value) + { + // Validation of arguments happens in ContainsKey and the indexer + if (ContainsKey(key)) + { + throw new ArgumentException("Key already exists in map", "key"); + } + this[key] = value; + } + + /// + /// Determines whether the specified key is present in the map. + /// + /// The key to check. + /// true if the map contains the given key; false otherwise. + public bool ContainsKey(TKey key) + { + ProtoPreconditions.CheckNotNullUnconstrained(key, "key"); + return map.ContainsKey(key); + } + + private bool ContainsValue(TValue value) + { + var comparer = EqualityComparer.Default; + return list.Any(pair => comparer.Equals(pair.Value, value)); + } + + /// + /// Removes the entry identified by the given key from the map. + /// + /// The key indicating the entry to remove from the map. + /// true if the map contained the given key before the entry was removed; false otherwise. + public bool Remove(TKey key) + { + ProtoPreconditions.CheckNotNullUnconstrained(key, "key"); + LinkedListNode> node; + if (map.TryGetValue(key, out node)) + { + map.Remove(key); + node.List.Remove(node); + return true; + } + else + { + return false; + } + } + + /// + /// Gets the value associated with the specified key. + /// + /// The key whose value to get. + /// When this method returns, the value associated with the specified key, if the key is found; + /// otherwise, the default value for the type of the parameter. + /// This parameter is passed uninitialized. + /// true if the map contains an element with the specified key; otherwise, false. + public bool TryGetValue(TKey key, out TValue value) + { + LinkedListNode> node; + if (map.TryGetValue(key, out node)) + { + value = node.Value.Value; + return true; + } + else + { + value = default(TValue); + return false; + } + } + + /// + /// Gets or sets the value associated with the specified key. + /// + /// The key of the value to get or set. + /// The property is retrieved and key does not exist in the collection. + /// The value associated with the specified key. If the specified key is not found, + /// a get operation throws a , and a set operation creates a new element with the specified key. + public TValue this[TKey key] + { + get + { + ProtoPreconditions.CheckNotNullUnconstrained(key, "key"); + TValue value; + if (TryGetValue(key, out value)) + { + return value; + } + throw new KeyNotFoundException(); + } + set + { + ProtoPreconditions.CheckNotNullUnconstrained(key, "key"); + // value == null check here is redundant, but avoids boxing. + if (value == null) + { + ProtoPreconditions.CheckNotNullUnconstrained(value, "value"); + } + LinkedListNode> node; + var pair = new KeyValuePair(key, value); + if (map.TryGetValue(key, out node)) + { + node.Value = pair; + } + else + { + node = list.AddLast(pair); + map[key] = node; + } + } + } + + /// + /// Gets a collection containing the keys in the map. + /// + public ICollection Keys { get { return new MapView(this, pair => pair.Key, ContainsKey); } } + + /// + /// Gets a collection containing the values in the map. + /// + public ICollection Values { get { return new MapView(this, pair => pair.Value, ContainsValue); } } + + /// + /// Adds the specified entries to the map. The keys and values are not automatically cloned. + /// + /// The entries to add to the map. + public void Add(IDictionary entries) + { + ProtoPreconditions.CheckNotNull(entries, "entries"); + foreach (var pair in entries) + { + Add(pair.Key, pair.Value); + } + } + + /// + /// Returns an enumerator that iterates through the collection. + /// + /// + /// An enumerator that can be used to iterate through the collection. + /// + public IEnumerator> GetEnumerator() + { + return list.GetEnumerator(); + } + + /// + /// Returns an enumerator that iterates through a collection. + /// + /// + /// An object that can be used to iterate through the collection. + /// + IEnumerator IEnumerable.GetEnumerator() + { + return GetEnumerator(); + } + + /// + /// Adds the specified item to the map. + /// + /// The item to add to the map. + void ICollection>.Add(KeyValuePair item) + { + Add(item.Key, item.Value); + } + + /// + /// Removes all items from the map. + /// + public void Clear() + { + list.Clear(); + map.Clear(); + } + + /// + /// Determines whether map contains an entry equivalent to the given key/value pair. + /// + /// The key/value pair to find. + /// + bool ICollection>.Contains(KeyValuePair item) + { + TValue value; + return TryGetValue(item.Key, out value) + && EqualityComparer.Default.Equals(item.Value, value); + } + + /// + /// Copies the key/value pairs in this map to an array. + /// + /// The array to copy the entries into. + /// The index of the array at which to start copying values. + void ICollection>.CopyTo(KeyValuePair[] array, int arrayIndex) + { + list.CopyTo(array, arrayIndex); + } + + /// + /// Removes the specified key/value pair from the map. + /// + /// Both the key and the value must be found for the entry to be removed. + /// The key/value pair to remove. + /// true if the key/value pair was found and removed; false otherwise. + bool ICollection>.Remove(KeyValuePair item) + { + if (item.Key == null) + { + throw new ArgumentException("Key is null", "item"); + } + LinkedListNode> node; + if (map.TryGetValue(item.Key, out node) && + EqualityComparer.Default.Equals(item.Value, node.Value.Value)) + { + map.Remove(item.Key); + node.List.Remove(node); + return true; + } + else + { + return false; + } + } + + /// + /// Gets the number of elements contained in the map. + /// + public int Count { get { return list.Count; } } + + /// + /// Gets a value indicating whether the map is read-only. + /// + public bool IsReadOnly { get { return false; } } + + /// + /// Determines whether the specified , is equal to this instance. + /// + /// The to compare with this instance. + /// + /// true if the specified is equal to this instance; otherwise, false. + /// + public override bool Equals(object other) + { + return Equals(other as MapField); + } + + /// + /// Returns a hash code for this instance. + /// + /// + /// A hash code for this instance, suitable for use in hashing algorithms and data structures like a hash table. + /// + public override int GetHashCode() + { + var valueComparer = EqualityComparer.Default; + int hash = 0; + foreach (var pair in list) + { + hash ^= pair.Key.GetHashCode() * 31 + valueComparer.GetHashCode(pair.Value); + } + return hash; + } + + /// + /// Compares this map with another for equality. + /// + /// + /// The order of the key/value pairs in the maps is not deemed significant in this comparison. + /// + /// The map to compare this with. + /// true if refers to an equal map; false otherwise. + public bool Equals(MapField other) + { + if (other == null) + { + return false; + } + if (other == this) + { + return true; + } + if (other.Count != this.Count) + { + return false; + } + var valueComparer = EqualityComparer.Default; + foreach (var pair in this) + { + TValue value; + if (!other.TryGetValue(pair.Key, out value)) + { + return false; + } + if (!valueComparer.Equals(value, pair.Value)) + { + return false; + } + } + return true; + } + + /// + /// Adds entries to the map from the given stream. + /// + /// + /// It is assumed that the stream is initially positioned after the tag specified by the codec. + /// This method will continue reading entries from the stream until the end is reached, or + /// a different tag is encountered. + /// + /// Stream to read from + /// Codec describing how the key/value pairs are encoded + public void AddEntriesFrom(CodedInputStream input, Codec codec) + { + var adapter = new Codec.MessageAdapter(codec); + do + { + adapter.Reset(); + input.ReadMessage(adapter); + this[adapter.Key] = adapter.Value; + } while (input.MaybeConsumeTag(codec.MapTag)); + } + + /// + /// Writes the contents of this map to the given coded output stream, using the specified codec + /// to encode each entry. + /// + /// The output stream to write to. + /// The codec to use for each entry. + public void WriteTo(CodedOutputStream output, Codec codec) + { + var message = new Codec.MessageAdapter(codec); + foreach (var entry in list) + { + message.Key = entry.Key; + message.Value = entry.Value; + output.WriteTag(codec.MapTag); + output.WriteMessage(message); + } + } + + /// + /// Calculates the size of this map based on the given entry codec. + /// + /// The codec to use to encode each entry. + /// + public int CalculateSize(Codec codec) + { + if (Count == 0) + { + return 0; + } + var message = new Codec.MessageAdapter(codec); + int size = 0; + foreach (var entry in list) + { + message.Key = entry.Key; + message.Value = entry.Value; + size += CodedOutputStream.ComputeRawVarint32Size(codec.MapTag); + size += CodedOutputStream.ComputeMessageSize(message); + } + return size; + } + + /// + /// Returns a string representation of this repeated field, in the same + /// way as it would be represented by the default JSON formatter. + /// + public override string ToString() + { + var writer = new StringWriter(); + JsonFormatter.Default.WriteDictionary(writer, this); + return writer.ToString(); + } + + #region IDictionary explicit interface implementation + void IDictionary.Add(object key, object value) + { + Add((TKey)key, (TValue)value); + } + + bool IDictionary.Contains(object key) + { + if (!(key is TKey)) + { + return false; + } + return ContainsKey((TKey)key); + } + + IDictionaryEnumerator IDictionary.GetEnumerator() + { + return new DictionaryEnumerator(GetEnumerator()); + } + + void IDictionary.Remove(object key) + { + ProtoPreconditions.CheckNotNull(key, "key"); + if (!(key is TKey)) + { + return; + } + Remove((TKey)key); + } + + void ICollection.CopyTo(Array array, int index) + { + // This is ugly and slow as heck, but with any luck it will never be used anyway. + ICollection temp = this.Select(pair => new DictionaryEntry(pair.Key, pair.Value)).ToList(); + temp.CopyTo(array, index); + } + + bool IDictionary.IsFixedSize { get { return false; } } + + ICollection IDictionary.Keys { get { return (ICollection)Keys; } } + + ICollection IDictionary.Values { get { return (ICollection)Values; } } + + bool ICollection.IsSynchronized { get { return false; } } + + object ICollection.SyncRoot { get { return this; } } + + object IDictionary.this[object key] + { + get + { + ProtoPreconditions.CheckNotNull(key, "key"); + if (!(key is TKey)) + { + return null; + } + TValue value; + TryGetValue((TKey)key, out value); + return value; + } + + set + { + this[(TKey)key] = (TValue)value; + } + } + #endregion + + private class DictionaryEnumerator : IDictionaryEnumerator + { + private readonly IEnumerator> enumerator; + + internal DictionaryEnumerator(IEnumerator> enumerator) + { + this.enumerator = enumerator; + } + + public bool MoveNext() + { + return enumerator.MoveNext(); + } + + public void Reset() + { + enumerator.Reset(); + } + + public object Current { get { return Entry; } } + public DictionaryEntry Entry { get { return new DictionaryEntry(Key, Value); } } + public object Key { get { return enumerator.Current.Key; } } + public object Value { get { return enumerator.Current.Value; } } + } + + /// + /// A codec for a specific map field. This contains all the information required to encode and + /// decode the nested messages. + /// + public sealed class Codec + { + private readonly FieldCodec keyCodec; + private readonly FieldCodec valueCodec; + private readonly uint mapTag; + + /// + /// Creates a new entry codec based on a separate key codec and value codec, + /// and the tag to use for each map entry. + /// + /// The key codec. + /// The value codec. + /// The map tag to use to introduce each map entry. + public Codec(FieldCodec keyCodec, FieldCodec valueCodec, uint mapTag) + { + this.keyCodec = keyCodec; + this.valueCodec = valueCodec; + this.mapTag = mapTag; + } + + /// + /// The tag used in the enclosing message to indicate map entries. + /// + internal uint MapTag { get { return mapTag; } } + + /// + /// A mutable message class, used for parsing and serializing. This + /// delegates the work to a codec, but implements the interface + /// for interop with and . + /// This is nested inside Codec as it's tightly coupled to the associated codec, + /// and it's simpler if it has direct access to all its fields. + /// + internal class MessageAdapter : IMessage + { + private static readonly byte[] ZeroLengthMessageStreamData = new byte[] { 0 }; + + private readonly Codec codec; + internal TKey Key { get; set; } + internal TValue Value { get; set; } + + internal MessageAdapter(Codec codec) + { + this.codec = codec; + } + + internal void Reset() + { + Key = codec.keyCodec.DefaultValue; + Value = codec.valueCodec.DefaultValue; + } + + public void MergeFrom(CodedInputStream input) + { + uint tag; + while ((tag = input.ReadTag()) != 0) + { + if (tag == codec.keyCodec.Tag) + { + Key = codec.keyCodec.Read(input); + } + else if (tag == codec.valueCodec.Tag) + { + Value = codec.valueCodec.Read(input); + } + else + { + input.SkipLastField(); + } + } + + // Corner case: a map entry with a key but no value, where the value type is a message. + // Read it as if we'd seen an input stream with no data (i.e. create a "default" message). + if (Value == null) + { + Value = codec.valueCodec.Read(new CodedInputStream(ZeroLengthMessageStreamData)); + } + } + + public void WriteTo(CodedOutputStream output) + { + codec.keyCodec.WriteTagAndValue(output, Key); + codec.valueCodec.WriteTagAndValue(output, Value); + } + + public int CalculateSize() + { + return codec.keyCodec.CalculateSizeWithTag(Key) + codec.valueCodec.CalculateSizeWithTag(Value); + } + + MessageDescriptor IMessage.Descriptor { get { return null; } } + } + } + + private class MapView : ICollection, ICollection + { + private readonly MapField parent; + private readonly Func, T> projection; + private readonly Func containsCheck; + + internal MapView( + MapField parent, + Func, T> projection, + Func containsCheck) + { + this.parent = parent; + this.projection = projection; + this.containsCheck = containsCheck; + } + + public int Count { get { return parent.Count; } } + + public bool IsReadOnly { get { return true; } } + + public bool IsSynchronized { get { return false; } } + + public object SyncRoot { get { return parent; } } + + public void Add(T item) + { + throw new NotSupportedException(); + } + + public void Clear() + { + throw new NotSupportedException(); + } + + public bool Contains(T item) + { + return containsCheck(item); + } + + public void CopyTo(T[] array, int arrayIndex) + { + if (arrayIndex < 0) + { + throw new ArgumentOutOfRangeException("arrayIndex"); + } + if (arrayIndex + Count >= array.Length) + { + throw new ArgumentException("Not enough space in the array", "array"); + } + foreach (var item in this) + { + array[arrayIndex++] = item; + } + } + + public IEnumerator GetEnumerator() + { + return parent.list.Select(projection).GetEnumerator(); + } + + public bool Remove(T item) + { + throw new NotSupportedException(); + } + + IEnumerator IEnumerable.GetEnumerator() + { + return GetEnumerator(); + } + + public void CopyTo(Array array, int index) + { + if (index < 0) + { + throw new ArgumentOutOfRangeException("index"); + } + if (index + Count >= array.Length) + { + throw new ArgumentException("Not enough space in the array", "array"); + } + foreach (var item in this) + { + array.SetValue(item, index++); + } + } + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/Collections/ReadOnlyDictionary.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Collections/ReadOnlyDictionary.cs new file mode 100644 index 0000000000..28530a29a8 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Collections/ReadOnlyDictionary.cs @@ -0,0 +1,147 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; +using System.Collections; +using System.Collections.Generic; + +namespace Google.Protobuf.Collections +{ + /// + /// Read-only wrapper around another dictionary. + /// + internal sealed class ReadOnlyDictionary : IDictionary + { + private readonly IDictionary wrapped; + + public ReadOnlyDictionary(IDictionary wrapped) + { + this.wrapped = wrapped; + } + + public void Add(TKey key, TValue value) + { + throw new InvalidOperationException(); + } + + public bool ContainsKey(TKey key) + { + return wrapped.ContainsKey(key); + } + + public ICollection Keys + { + get { return wrapped.Keys; } + } + + public bool Remove(TKey key) + { + throw new InvalidOperationException(); + } + + public bool TryGetValue(TKey key, out TValue value) + { + return wrapped.TryGetValue(key, out value); + } + + public ICollection Values + { + get { return wrapped.Values; } + } + + public TValue this[TKey key] + { + get { return wrapped[key]; } + set { throw new InvalidOperationException(); } + } + + public void Add(KeyValuePair item) + { + throw new InvalidOperationException(); + } + + public void Clear() + { + throw new InvalidOperationException(); + } + + public bool Contains(KeyValuePair item) + { + return wrapped.Contains(item); + } + + public void CopyTo(KeyValuePair[] array, int arrayIndex) + { + wrapped.CopyTo(array, arrayIndex); + } + + public int Count + { + get { return wrapped.Count; } + } + + public bool IsReadOnly + { + get { return true; } + } + + public bool Remove(KeyValuePair item) + { + throw new InvalidOperationException(); + } + + public IEnumerator> GetEnumerator() + { + return wrapped.GetEnumerator(); + } + + IEnumerator IEnumerable.GetEnumerator() + { + return ((IEnumerable) wrapped).GetEnumerator(); + } + + public override bool Equals(object obj) + { + return wrapped.Equals(obj); + } + + public override int GetHashCode() + { + return wrapped.GetHashCode(); + } + + public override string ToString() + { + return wrapped.ToString(); + } + } +} \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/Collections/RepeatedField.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Collections/RepeatedField.cs new file mode 100644 index 0000000000..d1db856c9c --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Collections/RepeatedField.cs @@ -0,0 +1,568 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; +using System.Collections; +using System.Collections.Generic; +using System.IO; +using System.Text; + +namespace Google.Protobuf.Collections +{ + /// + /// The contents of a repeated field: essentially, a collection with some extra + /// restrictions (no null values) and capabilities (deep cloning). + /// + /// + /// This implementation does not generally prohibit the use of types which are not + /// supported by Protocol Buffers but nor does it guarantee that all operations will work in such cases. + /// + /// The element type of the repeated field. + public sealed class RepeatedField : IList, IList, IDeepCloneable>, IEquatable> + { + private static readonly T[] EmptyArray = new T[0]; + private const int MinArraySize = 8; + + private T[] array = EmptyArray; + private int count = 0; + + /// + /// Creates a deep clone of this repeated field. + /// + /// + /// If the field type is + /// a message type, each element is also cloned; otherwise, it is + /// assumed that the field type is primitive (including string and + /// bytes, both of which are immutable) and so a simple copy is + /// equivalent to a deep clone. + /// + /// A deep clone of this repeated field. + public RepeatedField Clone() + { + RepeatedField clone = new RepeatedField(); + if (array != EmptyArray) + { + clone.array = (T[])array.Clone(); + IDeepCloneable[] cloneableArray = clone.array as IDeepCloneable[]; + if (cloneableArray != null) + { + for (int i = 0; i < count; i++) + { + clone.array[i] = cloneableArray[i].Clone(); + } + } + } + clone.count = count; + return clone; + } + + /// + /// Adds the entries from the given input stream, decoding them with the specified codec. + /// + /// The input stream to read from. + /// The codec to use in order to read each entry. + public void AddEntriesFrom(CodedInputStream input, FieldCodec codec) + { + // TODO: Inline some of the Add code, so we can avoid checking the size on every + // iteration. + uint tag = input.LastTag; + var reader = codec.ValueReader; + // Non-nullable value types can be packed or not. + if (FieldCodec.IsPackedRepeatedField(tag)) + { + int length = input.ReadLength(); + if (length > 0) + { + int oldLimit = input.PushLimit(length); + while (!input.ReachedLimit) + { + Add(reader(input)); + } + input.PopLimit(oldLimit); + } + // Empty packed field. Odd, but valid - just ignore. + } + else + { + // Not packed... (possibly not packable) + do + { + Add(reader(input)); + } while (input.MaybeConsumeTag(tag)); + } + } + + /// + /// Calculates the size of this collection based on the given codec. + /// + /// The codec to use when encoding each field. + /// The number of bytes that would be written to a by , + /// using the same codec. + public int CalculateSize(FieldCodec codec) + { + if (count == 0) + { + return 0; + } + uint tag = codec.Tag; + if (codec.PackedRepeatedField) + { + int dataSize = CalculatePackedDataSize(codec); + return CodedOutputStream.ComputeRawVarint32Size(tag) + + CodedOutputStream.ComputeLengthSize(dataSize) + + dataSize; + } + else + { + var sizeCalculator = codec.ValueSizeCalculator; + int size = count * CodedOutputStream.ComputeRawVarint32Size(tag); + for (int i = 0; i < count; i++) + { + size += sizeCalculator(array[i]); + } + return size; + } + } + + private int CalculatePackedDataSize(FieldCodec codec) + { + int fixedSize = codec.FixedSize; + if (fixedSize == 0) + { + var calculator = codec.ValueSizeCalculator; + int tmp = 0; + for (int i = 0; i < count; i++) + { + tmp += calculator(array[i]); + } + return tmp; + } + else + { + return fixedSize * Count; + } + } + + /// + /// Writes the contents of this collection to the given , + /// encoding each value using the specified codec. + /// + /// The output stream to write to. + /// The codec to use when encoding each value. + public void WriteTo(CodedOutputStream output, FieldCodec codec) + { + if (count == 0) + { + return; + } + var writer = codec.ValueWriter; + var tag = codec.Tag; + if (codec.PackedRepeatedField) + { + // Packed primitive type + uint size = (uint)CalculatePackedDataSize(codec); + output.WriteTag(tag); + output.WriteRawVarint32(size); + for (int i = 0; i < count; i++) + { + writer(output, array[i]); + } + } + else + { + // Not packed: a simple tag/value pair for each value. + // Can't use codec.WriteTagAndValue, as that omits default values. + for (int i = 0; i < count; i++) + { + output.WriteTag(tag); + writer(output, array[i]); + } + } + } + + private void EnsureSize(int size) + { + if (array.Length < size) + { + size = Math.Max(size, MinArraySize); + int newSize = Math.Max(array.Length * 2, size); + var tmp = new T[newSize]; + Array.Copy(array, 0, tmp, 0, array.Length); + array = tmp; + } + } + + /// + /// Adds the specified item to the collection. + /// + /// The item to add. + public void Add(T item) + { + if (item == null) + { + throw new ArgumentNullException("item"); + } + EnsureSize(count + 1); + array[count++] = item; + } + + /// + /// Removes all items from the collection. + /// + public void Clear() + { + array = EmptyArray; + count = 0; + } + + /// + /// Determines whether this collection contains the given item. + /// + /// The item to find. + /// true if this collection contains the given item; false otherwise. + public bool Contains(T item) + { + return IndexOf(item) != -1; + } + + /// + /// Copies this collection to the given array. + /// + /// The array to copy to. + /// The first index of the array to copy to. + public void CopyTo(T[] array, int arrayIndex) + { + Array.Copy(this.array, 0, array, arrayIndex, count); + } + + /// + /// Removes the specified item from the collection + /// + /// The item to remove. + /// true if the item was found and removed; false otherwise. + public bool Remove(T item) + { + int index = IndexOf(item); + if (index == -1) + { + return false; + } + Array.Copy(array, index + 1, array, index, count - index - 1); + count--; + array[count] = default(T); + return true; + } + + /// + /// Gets the number of elements contained in the collection. + /// + public int Count { get { return count; } } + + /// + /// Gets a value indicating whether the collection is read-only. + /// + public bool IsReadOnly { get { return false; } } + + // TODO: Remove this overload and just handle it in the one below, at execution time? + + /// + /// Adds all of the specified values into this collection. + /// + /// The values to add to this collection. + public void Add(RepeatedField values) + { + if (values == null) + { + throw new ArgumentNullException("values"); + } + EnsureSize(count + values.count); + // We know that all the values will be valid, because it's a RepeatedField. + Array.Copy(values.array, 0, array, count, values.count); + count += values.count; + } + + /// + /// Adds all of the specified values into this collection. + /// + /// The values to add to this collection. + public void Add(IEnumerable values) + { + if (values == null) + { + throw new ArgumentNullException("values"); + } + // TODO: Check for ICollection and get the Count, to optimize? + foreach (T item in values) + { + Add(item); + } + } + + /// + /// Returns an enumerator that iterates through the collection. + /// + /// + /// An enumerator that can be used to iterate through the collection. + /// + public IEnumerator GetEnumerator() + { + for (int i = 0; i < count; i++) + { + yield return array[i]; + } + } + + /// + /// Determines whether the specified , is equal to this instance. + /// + /// The to compare with this instance. + /// + /// true if the specified is equal to this instance; otherwise, false. + /// + public override bool Equals(object obj) + { + return Equals(obj as RepeatedField); + } + + /// + /// Returns an enumerator that iterates through a collection. + /// + /// + /// An object that can be used to iterate through the collection. + /// + IEnumerator IEnumerable.GetEnumerator() + { + return GetEnumerator(); + } + + /// + /// Returns a hash code for this instance. + /// + /// + /// A hash code for this instance, suitable for use in hashing algorithms and data structures like a hash table. + /// + public override int GetHashCode() + { + int hash = 0; + for (int i = 0; i < count; i++) + { + hash = hash * 31 + array[i].GetHashCode(); + } + return hash; + } + + /// + /// Compares this repeated field with another for equality. + /// + /// The repeated field to compare this with. + /// true if refers to an equal repeated field; false otherwise. + public bool Equals(RepeatedField other) + { + if (ReferenceEquals(other, null)) + { + return false; + } + if (ReferenceEquals(other, this)) + { + return true; + } + if (other.Count != this.Count) + { + return false; + } + EqualityComparer comparer = EqualityComparer.Default; + for (int i = 0; i < count; i++) + { + if (!comparer.Equals(array[i], other.array[i])) + { + return false; + } + } + return true; + } + + /// + /// Returns the index of the given item within the collection, or -1 if the item is not + /// present. + /// + /// The item to find in the collection. + /// The zero-based index of the item, or -1 if it is not found. + public int IndexOf(T item) + { + if (item == null) + { + throw new ArgumentNullException("item"); + } + EqualityComparer comparer = EqualityComparer.Default; + for (int i = 0; i < count; i++) + { + if (comparer.Equals(array[i], item)) + { + return i; + } + } + return -1; + } + + /// + /// Inserts the given item at the specified index. + /// + /// The index at which to insert the item. + /// The item to insert. + public void Insert(int index, T item) + { + if (item == null) + { + throw new ArgumentNullException("item"); + } + if (index < 0 || index > count) + { + throw new ArgumentOutOfRangeException("index"); + } + EnsureSize(count + 1); + Array.Copy(array, index, array, index + 1, count - index); + array[index] = item; + count++; + } + + /// + /// Removes the item at the given index. + /// + /// The zero-based index of the item to remove. + public void RemoveAt(int index) + { + if (index < 0 || index >= count) + { + throw new ArgumentOutOfRangeException("index"); + } + Array.Copy(array, index + 1, array, index, count - index - 1); + count--; + array[count] = default(T); + } + + /// + /// Returns a string representation of this repeated field, in the same + /// way as it would be represented by the default JSON formatter. + /// + public override string ToString() + { + var writer = new StringWriter(); + JsonFormatter.Default.WriteList(writer, this); + return writer.ToString(); + } + + /// + /// Gets or sets the item at the specified index. + /// + /// + /// The element at the specified index. + /// + /// The zero-based index of the element to get or set. + /// The item at the specified index. + public T this[int index] + { + get + { + if (index < 0 || index >= count) + { + throw new ArgumentOutOfRangeException("index"); + } + return array[index]; + } + set + { + if (index < 0 || index >= count) + { + throw new ArgumentOutOfRangeException("index"); + } + if (value == null) + { + throw new ArgumentNullException("value"); + } + array[index] = value; + } + } + + #region Explicit interface implementation for IList and ICollection. + bool IList.IsFixedSize { get { return false; } } + + void ICollection.CopyTo(Array array, int index) + { + Array.Copy(this.array, 0, array, index, count); + } + + bool ICollection.IsSynchronized { get { return false; } } + + object ICollection.SyncRoot { get { return this; } } + + object IList.this[int index] + { + get { return this[index]; } + set { this[index] = (T)value; } + } + + int IList.Add(object value) + { + Add((T) value); + return count - 1; + } + + bool IList.Contains(object value) + { + return (value is T && Contains((T)value)); + } + + int IList.IndexOf(object value) + { + if (!(value is T)) + { + return -1; + } + return IndexOf((T)value); + } + + void IList.Insert(int index, object value) + { + Insert(index, (T) value); + } + + void IList.Remove(object value) + { + if (!(value is T)) + { + return; + } + Remove((T)value); + } + #endregion + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/Compatibility/PropertyInfoExtensions.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Compatibility/PropertyInfoExtensions.cs new file mode 100644 index 0000000000..8a6fefa74e --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Compatibility/PropertyInfoExtensions.cs @@ -0,0 +1,64 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System.Reflection; + +namespace Google.Protobuf.Compatibility +{ + /// + /// Extension methods for , effectively providing + /// the familiar members from previous desktop framework versions while + /// targeting the newer releases, .NET Core etc. + /// + internal static class PropertyInfoExtensions + { + /// + /// Returns the public getter of a property, or null if there is no such getter + /// (either because it's read-only, or the getter isn't public). + /// + internal static MethodInfo GetGetMethod(this PropertyInfo target) + { + var method = target.GetMethod; + return method != null && method.IsPublic ? method : null; + } + + /// + /// Returns the public setter of a property, or null if there is no such setter + /// (either because it's write-only, or the setter isn't public). + /// + internal static MethodInfo GetSetMethod(this PropertyInfo target) + { + var method = target.SetMethod; + return method != null && method.IsPublic ? method : null; + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/Compatibility/TypeExtensions.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Compatibility/TypeExtensions.cs new file mode 100644 index 0000000000..762a29eb0a --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Compatibility/TypeExtensions.cs @@ -0,0 +1,113 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; +using System.Reflection; + +namespace Google.Protobuf.Compatibility +{ + /// + /// Provides extension methods on Type that just proxy to TypeInfo. + /// These are used to support the new type system from .NET 4.5, without + /// having calls to GetTypeInfo all over the place. While the methods here are meant to be + /// broadly compatible with the desktop framework, there are some subtle differences in behaviour - but + /// they're not expected to affect our use cases. While the class is internal, that should be fine: we can + /// evaluate each new use appropriately. + /// + internal static class TypeExtensions + { + /// + /// Returns true if the target type is a value type, including a nullable value type or an enum, or false + /// if it's a reference type (class, delegate, interface - including System.ValueType and System.Enum). + /// + internal static bool IsValueType(this Type target) + { + return target.GetTypeInfo().IsValueType; + } + + /// + /// See https://msdn.microsoft.com/en-us/library/system.type.isassignablefrom + /// + internal static bool IsAssignableFrom(this Type target, Type c) + { + return target.GetTypeInfo().IsAssignableFrom(c.GetTypeInfo()); + } + + /// + /// Returns a representation of the public property associated with the given name in the given type, + /// including inherited properties or null if there is no such public property. + /// Here, "public property" means a property where either the getter, or the setter, or both, is public. + /// + internal static PropertyInfo GetProperty(this Type target, string name) + { + // GetDeclaredProperty only returns properties declared in the given type, so we need to recurse. + while (target != null) + { + var typeInfo = target.GetTypeInfo(); + var ret = typeInfo.GetDeclaredProperty(name); + if (ret != null && ((ret.CanRead && ret.GetMethod.IsPublic) || (ret.CanWrite && ret.SetMethod.IsPublic))) + { + return ret; + } + target = typeInfo.BaseType; + } + return null; + } + + /// + /// Returns a representation of the public method associated with the given name in the given type, + /// including inherited methods. + /// + /// + /// This has a few differences compared with Type.GetMethod in the desktop framework. It will throw + /// if there is an ambiguous match even between a private method and a public one, but it *won't* throw + /// if there are two overloads at different levels in the type hierarchy (e.g. class Base declares public void Foo(int) and + /// class Child : Base declares public void Foo(long)). + /// + /// One type in the hierarchy declared more than one method with the same name + internal static MethodInfo GetMethod(this Type target, string name) + { + // GetDeclaredMethod only returns methods declared in the given type, so we need to recurse. + while (target != null) + { + var typeInfo = target.GetTypeInfo(); + var ret = typeInfo.GetDeclaredMethod(name); + if (ret != null && ret.IsPublic) + { + return ret; + } + target = typeInfo.BaseType; + } + return null; + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/FieldCodec.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/FieldCodec.cs new file mode 100644 index 0000000000..9831308882 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/FieldCodec.cs @@ -0,0 +1,473 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using Google.Protobuf.Compatibility; +using Google.Protobuf.WellKnownTypes; +using System; +using System.Collections.Generic; + +namespace Google.Protobuf +{ + /// + /// Factory methods for . + /// + public static class FieldCodec + { + // TODO: Avoid the "dual hit" of lambda expressions: create open delegates instead. (At least test...) + + /// + /// Retrieves a codec suitable for a string field with the given tag. + /// + /// The tag. + /// A codec for the given tag. + public static FieldCodec ForString(uint tag) + { + return new FieldCodec(input => input.ReadString(), (output, value) => output.WriteString(value), CodedOutputStream.ComputeStringSize, tag); + } + + /// + /// Retrieves a codec suitable for a bytes field with the given tag. + /// + /// The tag. + /// A codec for the given tag. + public static FieldCodec ForBytes(uint tag) + { + return new FieldCodec(input => input.ReadBytes(), (output, value) => output.WriteBytes(value), CodedOutputStream.ComputeBytesSize, tag); + } + + /// + /// Retrieves a codec suitable for a bool field with the given tag. + /// + /// The tag. + /// A codec for the given tag. + public static FieldCodec ForBool(uint tag) + { + return new FieldCodec(input => input.ReadBool(), (output, value) => output.WriteBool(value), CodedOutputStream.ComputeBoolSize, tag); + } + + /// + /// Retrieves a codec suitable for an int32 field with the given tag. + /// + /// The tag. + /// A codec for the given tag. + public static FieldCodec ForInt32(uint tag) + { + return new FieldCodec(input => input.ReadInt32(), (output, value) => output.WriteInt32(value), CodedOutputStream.ComputeInt32Size, tag); + } + + /// + /// Retrieves a codec suitable for an sint32 field with the given tag. + /// + /// The tag. + /// A codec for the given tag. + public static FieldCodec ForSInt32(uint tag) + { + return new FieldCodec(input => input.ReadSInt32(), (output, value) => output.WriteSInt32(value), CodedOutputStream.ComputeSInt32Size, tag); + } + + /// + /// Retrieves a codec suitable for a fixed32 field with the given tag. + /// + /// The tag. + /// A codec for the given tag. + public static FieldCodec ForFixed32(uint tag) + { + return new FieldCodec(input => input.ReadFixed32(), (output, value) => output.WriteFixed32(value), 4, tag); + } + + /// + /// Retrieves a codec suitable for an sfixed32 field with the given tag. + /// + /// The tag. + /// A codec for the given tag. + public static FieldCodec ForSFixed32(uint tag) + { + return new FieldCodec(input => input.ReadSFixed32(), (output, value) => output.WriteSFixed32(value), 4, tag); + } + + /// + /// Retrieves a codec suitable for a uint32 field with the given tag. + /// + /// The tag. + /// A codec for the given tag. + public static FieldCodec ForUInt32(uint tag) + { + return new FieldCodec(input => input.ReadUInt32(), (output, value) => output.WriteUInt32(value), CodedOutputStream.ComputeUInt32Size, tag); + } + + /// + /// Retrieves a codec suitable for an int64 field with the given tag. + /// + /// The tag. + /// A codec for the given tag. + public static FieldCodec ForInt64(uint tag) + { + return new FieldCodec(input => input.ReadInt64(), (output, value) => output.WriteInt64(value), CodedOutputStream.ComputeInt64Size, tag); + } + + /// + /// Retrieves a codec suitable for an sint64 field with the given tag. + /// + /// The tag. + /// A codec for the given tag. + public static FieldCodec ForSInt64(uint tag) + { + return new FieldCodec(input => input.ReadSInt64(), (output, value) => output.WriteSInt64(value), CodedOutputStream.ComputeSInt64Size, tag); + } + + /// + /// Retrieves a codec suitable for a fixed64 field with the given tag. + /// + /// The tag. + /// A codec for the given tag. + public static FieldCodec ForFixed64(uint tag) + { + return new FieldCodec(input => input.ReadFixed64(), (output, value) => output.WriteFixed64(value), 8, tag); + } + + /// + /// Retrieves a codec suitable for an sfixed64 field with the given tag. + /// + /// The tag. + /// A codec for the given tag. + public static FieldCodec ForSFixed64(uint tag) + { + return new FieldCodec(input => input.ReadSFixed64(), (output, value) => output.WriteSFixed64(value), 8, tag); + } + + /// + /// Retrieves a codec suitable for a uint64 field with the given tag. + /// + /// The tag. + /// A codec for the given tag. + public static FieldCodec ForUInt64(uint tag) + { + return new FieldCodec(input => input.ReadUInt64(), (output, value) => output.WriteUInt64(value), CodedOutputStream.ComputeUInt64Size, tag); + } + + /// + /// Retrieves a codec suitable for a float field with the given tag. + /// + /// The tag. + /// A codec for the given tag. + public static FieldCodec ForFloat(uint tag) + { + return new FieldCodec(input => input.ReadFloat(), (output, value) => output.WriteFloat(value), CodedOutputStream.ComputeFloatSize, tag); + } + + /// + /// Retrieves a codec suitable for a double field with the given tag. + /// + /// The tag. + /// A codec for the given tag. + public static FieldCodec ForDouble(uint tag) + { + return new FieldCodec(input => input.ReadDouble(), (output, value) => output.WriteDouble(value), CodedOutputStream.ComputeDoubleSize, tag); + } + + // Enums are tricky. We can probably use expression trees to build these delegates automatically, + // but it's easy to generate the code for it. + + /// + /// Retrieves a codec suitable for an enum field with the given tag. + /// + /// The tag. + /// A conversion function from to the enum type. + /// A conversion function from the enum type to . + /// A codec for the given tag. + public static FieldCodec ForEnum(uint tag, Func toInt32, Func fromInt32) + { + return new FieldCodec(input => fromInt32( + input.ReadEnum()), + (output, value) => output.WriteEnum(toInt32(value)), + value => CodedOutputStream.ComputeEnumSize(toInt32(value)), tag); + } + + /// + /// Retrieves a codec suitable for a message field with the given tag. + /// + /// The tag. + /// A parser to use for the message type. + /// A codec for the given tag. + public static FieldCodec ForMessage(uint tag, MessageParser parser) where T : IMessage + { + return new FieldCodec(input => { T message = parser.CreateTemplate(); input.ReadMessage(message); return message; }, + (output, value) => output.WriteMessage(value), message => CodedOutputStream.ComputeMessageSize(message), tag); + } + + /// + /// Creates a codec for a wrapper type of a class - which must be string or ByteString. + /// + public static FieldCodec ForClassWrapper(uint tag) where T : class + { + var nestedCodec = WrapperCodecs.GetCodec(); + return new FieldCodec( + input => WrapperCodecs.Read(input, nestedCodec), + (output, value) => WrapperCodecs.Write(output, value, nestedCodec), + value => WrapperCodecs.CalculateSize(value, nestedCodec), + tag, + null); // Default value for the wrapper + } + + /// + /// Creates a codec for a wrapper type of a struct - which must be Int32, Int64, UInt32, UInt64, + /// Bool, Single or Double. + /// + public static FieldCodec ForStructWrapper(uint tag) where T : struct + { + var nestedCodec = WrapperCodecs.GetCodec(); + return new FieldCodec( + input => WrapperCodecs.Read(input, nestedCodec), + (output, value) => WrapperCodecs.Write(output, value.Value, nestedCodec), + value => value == null ? 0 : WrapperCodecs.CalculateSize(value.Value, nestedCodec), + tag, + null); // Default value for the wrapper + } + + /// + /// Helper code to create codecs for wrapper types. + /// + /// + /// Somewhat ugly with all the static methods, but the conversions involved to/from nullable types make it + /// slightly tricky to improve. So long as we keep the public API (ForClassWrapper, ForStructWrapper) in place, + /// we can refactor later if we come up with something cleaner. + /// + private static class WrapperCodecs + { + private static readonly Dictionary Codecs = new Dictionary + { + { typeof(bool), ForBool(WireFormat.MakeTag(WrappersReflection.WrapperValueFieldNumber, WireFormat.WireType.Varint)) }, + { typeof(int), ForInt32(WireFormat.MakeTag(WrappersReflection.WrapperValueFieldNumber, WireFormat.WireType.Varint)) }, + { typeof(long), ForInt64(WireFormat.MakeTag(WrappersReflection.WrapperValueFieldNumber, WireFormat.WireType.Varint)) }, + { typeof(uint), ForUInt32(WireFormat.MakeTag(WrappersReflection.WrapperValueFieldNumber, WireFormat.WireType.Varint)) }, + { typeof(ulong), ForUInt64(WireFormat.MakeTag(WrappersReflection.WrapperValueFieldNumber, WireFormat.WireType.Varint)) }, + { typeof(float), ForFloat(WireFormat.MakeTag(WrappersReflection.WrapperValueFieldNumber, WireFormat.WireType.Fixed32)) }, + { typeof(double), ForDouble(WireFormat.MakeTag(WrappersReflection.WrapperValueFieldNumber, WireFormat.WireType.Fixed64)) }, + { typeof(string), ForString(WireFormat.MakeTag(WrappersReflection.WrapperValueFieldNumber, WireFormat.WireType.LengthDelimited)) }, + { typeof(ByteString), ForBytes(WireFormat.MakeTag(WrappersReflection.WrapperValueFieldNumber, WireFormat.WireType.LengthDelimited)) } + }; + + /// + /// Returns a field codec which effectively wraps a value of type T in a message. + /// + /// + internal static FieldCodec GetCodec() + { + object value; + if (!Codecs.TryGetValue(typeof(T), out value)) + { + throw new InvalidOperationException("Invalid type argument requested for wrapper codec: " + typeof(T)); + } + return (FieldCodec) value; + } + + internal static T Read(CodedInputStream input, FieldCodec codec) + { + int length = input.ReadLength(); + int oldLimit = input.PushLimit(length); + + uint tag; + T value = codec.DefaultValue; + while ((tag = input.ReadTag()) != 0) + { + if (tag == codec.Tag) + { + value = codec.Read(input); + } + else + { + input.SkipLastField(); + } + + } + input.CheckReadEndOfStreamTag(); + input.PopLimit(oldLimit); + + return value; + } + + internal static void Write(CodedOutputStream output, T value, FieldCodec codec) + { + output.WriteLength(codec.CalculateSizeWithTag(value)); + codec.WriteTagAndValue(output, value); + } + + internal static int CalculateSize(T value, FieldCodec codec) + { + int fieldLength = codec.CalculateSizeWithTag(value); + return CodedOutputStream.ComputeLengthSize(fieldLength) + fieldLength; + } + } + } + + /// + /// + /// An encode/decode pair for a single field. This effectively encapsulates + /// all the information needed to read or write the field value from/to a coded + /// stream. + /// + /// + /// This class is public and has to be as it is used by generated code, but its public + /// API is very limited - just what the generated code needs to call directly. + /// + /// + /// + /// This never writes default values to the stream, and does not address "packedness" + /// in repeated fields itself, other than to know whether or not the field *should* be packed. + /// + public sealed class FieldCodec + { + private static readonly T DefaultDefault; + private static readonly bool TypeSupportsPacking = typeof(T).IsValueType() && Nullable.GetUnderlyingType(typeof(T)) == null; + + static FieldCodec() + { + if (typeof(T) == typeof(string)) + { + DefaultDefault = (T)(object)""; + } + else if (typeof(T) == typeof(ByteString)) + { + DefaultDefault = (T)(object)ByteString.Empty; + } + // Otherwise it's the default value of the CLR type + } + + internal static bool IsPackedRepeatedField(uint tag) => + TypeSupportsPacking && WireFormat.GetTagWireType(tag) == WireFormat.WireType.LengthDelimited; + + internal bool PackedRepeatedField { get; } + + /// + /// Returns a delegate to write a value (unconditionally) to a coded output stream. + /// + internal Action ValueWriter { get; } + + /// + /// Returns the size calculator for just a value. + /// + internal Func ValueSizeCalculator { get; } + + /// + /// Returns a delegate to read a value from a coded input stream. It is assumed that + /// the stream is already positioned on the appropriate tag. + /// + internal Func ValueReader { get; } + + /// + /// Returns the fixed size for an entry, or 0 if sizes vary. + /// + internal int FixedSize { get; } + + /// + /// Gets the tag of the codec. + /// + /// + /// The tag of the codec. + /// + internal uint Tag { get; } + + /// + /// Default value for this codec. Usually the same for every instance of the same type, but + /// for string/ByteString wrapper fields the codec's default value is null, whereas for + /// other string/ByteString fields it's "" or ByteString.Empty. + /// + /// + /// The default value of the codec's type. + /// + internal T DefaultValue { get; } + + private readonly int tagSize; + + internal FieldCodec( + Func reader, + Action writer, + int fixedSize, + uint tag) : this(reader, writer, _ => fixedSize, tag) + { + FixedSize = fixedSize; + } + + internal FieldCodec( + Func reader, + Action writer, + Func sizeCalculator, + uint tag) : this(reader, writer, sizeCalculator, tag, DefaultDefault) + { + } + + internal FieldCodec( + Func reader, + Action writer, + Func sizeCalculator, + uint tag, + T defaultValue) + { + ValueReader = reader; + ValueWriter = writer; + ValueSizeCalculator = sizeCalculator; + FixedSize = 0; + Tag = tag; + DefaultValue = defaultValue; + tagSize = CodedOutputStream.ComputeRawVarint32Size(tag); + // Detect packed-ness once, so we can check for it within RepeatedField. + PackedRepeatedField = IsPackedRepeatedField(tag); + } + + /// + /// Write a tag and the given value, *if* the value is not the default. + /// + public void WriteTagAndValue(CodedOutputStream output, T value) + { + if (!IsDefault(value)) + { + output.WriteTag(Tag); + ValueWriter(output, value); + } + } + + /// + /// Reads a value of the codec type from the given . + /// + /// The input stream to read from. + /// The value read from the stream. + public T Read(CodedInputStream input) => ValueReader(input); + + /// + /// Calculates the size required to write the given value, with a tag, + /// if the value is not the default. + /// + public int CalculateSizeWithTag(T value) => IsDefault(value) ? 0 : ValueSizeCalculator(value) + tagSize; + + private bool IsDefault(T value) => EqualityComparer.Default.Equals(value, DefaultValue); + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/FrameworkPortability.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/FrameworkPortability.cs new file mode 100644 index 0000000000..c8060f6ca8 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/FrameworkPortability.cs @@ -0,0 +1,49 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; +using System.Text.RegularExpressions; + +namespace Google.Protobuf +{ + /// + /// Class containing helpful workarounds for various platform compatibility + /// + internal static class FrameworkPortability + { + // The value of RegexOptions.Compiled is 8. We can test for the presence at + // execution time using Enum.IsDefined, so a single build will do the right thing + // on each platform. (RegexOptions.Compiled isn't supported by PCLs.) + internal static readonly RegexOptions CompiledRegexWhereAvailable = + Enum.IsDefined(typeof(RegexOptions), 8) ? (RegexOptions)8 : RegexOptions.None; + } +} \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/Google.Protobuf.csproj b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Google.Protobuf.csproj new file mode 100644 index 0000000000..fdd97ddf74 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Google.Protobuf.csproj @@ -0,0 +1,168 @@ + + + + Debug + AnyCPU + 9.0.30729 + 2.0 + {6908BDCE-D925-43F3-94AC-A531E6DF2591} + Library + Properties + Google.Protobuf + Google.Protobuf + {786C830F-07A1-408B-BD7F-6EE04809D6DB};{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC} + Profile259 + v4.5 + 512 + 3.5 + 10.0 + + + + + true + full + false + bin\Debug + obj\Debug\ + bin\Debug\Google.Protobuf.xml + + + DEBUG;TRACE + prompt + 4 + true + Off + true + + + pdbonly + true + bin\Release + obj\Release\ + $(OutputPath)\$(AssemblyName).xml + + + TRACE + prompt + 4 + true + Off + true + + + pdbonly + true + bin\ReleaseSigned + obj\ReleaseSigned\ + $(OutputPath)\$(AssemblyName).xml + + + TRACE;SIGNED + prompt + 4 + true + Off + True + ..\..\keys\Google.Protobuf.snk + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + This project references NuGet package(s) that are missing on this computer. Use NuGet Package Restore to download them. For more information, see http://go.microsoft.com/fwlink/?LinkID=322105. The missing file is {0}. + + + + + \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/Google.Protobuf.nuspec b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Google.Protobuf.nuspec new file mode 100644 index 0000000000..90e3da3d0e --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Google.Protobuf.nuspec @@ -0,0 +1,54 @@ + + + + Google.Protobuf + Google Protocol Buffers C# + C# runtime library for Protocol Buffers - Google's data interchange format. + See project site for more info. + 3.0.0-beta3 + Google Inc. + protobuf-packages + https://github.com/google/protobuf/blob/master/LICENSE + https://github.com/google/protobuf + false + C# proto3 support + Copyright 2015, Google Inc. + Protocol Buffers Binary Serialization Format Google proto proto3 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/ICustomDiagnosticMessage.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/ICustomDiagnosticMessage.cs new file mode 100644 index 0000000000..a0090569f1 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/ICustomDiagnosticMessage.cs @@ -0,0 +1,69 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2016 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +namespace Google.Protobuf +{ + /// + /// A message type that has a custom string format for diagnostic purposes. + /// + /// + /// + /// Calling on a generated message type normally + /// returns the JSON representation. If a message type implements this interface, + /// then the method will be called instead of the regular + /// JSON formatting code, but only when ToString() is called either on the message itself + /// or on another message which contains it. This does not affect the normal JSON formatting of + /// the message. + /// + /// + /// For example, if you create a proto message representing a GUID, the internal + /// representation may be a bytes field or four fixed32 fields. However, when debugging + /// it may be more convenient to see a result in the same format as provides. + /// + /// This interface extends to avoid it accidentally being implemented + /// on types other than messages, where it would not be used by anything in the framework. + /// + public interface ICustomDiagnosticMessage : IMessage + { + /// + /// Returns a string representation of this object, for diagnostic purposes. + /// + /// + /// This method is called when a message is formatted as part of a + /// call. It does not affect the JSON representation used by other than + /// in calls to . While it is recommended + /// that the result is valid JSON, this is never assumed by the Protobuf library. + /// + /// A string representation of this object, for diagnostic purposes. + string ToDiagnosticString(); + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/IDeepCloneable.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/IDeepCloneable.cs new file mode 100644 index 0000000000..c9c71bbe2c --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/IDeepCloneable.cs @@ -0,0 +1,54 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +namespace Google.Protobuf +{ + /// + /// Generic interface for a deeply cloneable type. + /// + /// + /// + /// All generated messages implement this interface, but so do some non-message types. + /// Additionally, due to the type constraint on T in , + /// it is simpler to keep this as a separate interface. + /// + /// + /// The type itself, returned by the method. + public interface IDeepCloneable + { + /// + /// Creates a deep clone of this object. + /// + /// A deep clone of this object. + T Clone(); + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/IMessage.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/IMessage.cs new file mode 100644 index 0000000000..3e644c1773 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/IMessage.cs @@ -0,0 +1,87 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; +using Google.Protobuf.Reflection; + +namespace Google.Protobuf +{ + /// + /// Interface for a Protocol Buffers message, supporting + /// basic operations required for serialization. + /// + public interface IMessage + { + /// + /// Merges the data from the specified coded input stream with the current message. + /// + /// See the user guide for precise merge semantics. + /// + void MergeFrom(CodedInputStream input); + + /// + /// Writes the data to the given coded output stream. + /// + /// Coded output stream to write the data to. Must not be null. + void WriteTo(CodedOutputStream output); + + /// + /// Calculates the size of this message in Protocol Buffer wire format, in bytes. + /// + /// The number of bytes required to write this message + /// to a coded output stream. + int CalculateSize(); + + /// + /// Descriptor for this message. All instances are expected to return the same descriptor, + /// and for generated types this will be an explicitly-implemented member, returning the + /// same value as the static property declared on the type. + /// + MessageDescriptor Descriptor { get; } + } + + /// + /// Generic interface for a Protocol Buffers message, + /// where the type parameter is expected to be the same type as + /// the implementation class. + /// + /// The message type. + public interface IMessage : IMessage, IEquatable, IDeepCloneable where T : IMessage + { + /// + /// Merges the given message into this one. + /// + /// See the user guide for precise merge semantics. + /// The message to merge with this one. Must not be null. + void MergeFrom(T message); + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/InvalidJsonException.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/InvalidJsonException.cs new file mode 100644 index 0000000000..b543420142 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/InvalidJsonException.cs @@ -0,0 +1,53 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System.IO; + +namespace Google.Protobuf +{ + /// + /// Thrown when an attempt is made to parse invalid JSON, e.g. using + /// a non-string property key, or including a redundant comma. Parsing a protocol buffer + /// message represented in JSON using can throw both this + /// exception and depending on the situation. This + /// exception is only thrown for "pure JSON" errors, whereas InvalidProtocolBufferException + /// is thrown when the JSON may be valid in and of itself, but cannot be parsed as a protocol buffer + /// message. + /// + public sealed class InvalidJsonException : IOException + { + internal InvalidJsonException(string message) + : base(message) + { + } + } +} \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/InvalidProtocolBufferException.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/InvalidProtocolBufferException.cs new file mode 100644 index 0000000000..a6aefb6fa4 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/InvalidProtocolBufferException.cs @@ -0,0 +1,129 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; +using System.IO; + +namespace Google.Protobuf +{ + /// + /// Thrown when a protocol message being parsed is invalid in some way, + /// e.g. it contains a malformed varint or a negative byte length. + /// + public sealed class InvalidProtocolBufferException : IOException + { + internal InvalidProtocolBufferException(string message) + : base(message) + { + } + + internal InvalidProtocolBufferException(string message, Exception innerException) + : base(message, innerException) + { + } + + internal static InvalidProtocolBufferException MoreDataAvailable() + { + return new InvalidProtocolBufferException( + "Completed reading a message while more data was available in the stream."); + } + + internal static InvalidProtocolBufferException TruncatedMessage() + { + return new InvalidProtocolBufferException( + "While parsing a protocol message, the input ended unexpectedly " + + "in the middle of a field. This could mean either than the " + + "input has been truncated or that an embedded message " + + "misreported its own length."); + } + + internal static InvalidProtocolBufferException NegativeSize() + { + return new InvalidProtocolBufferException( + "CodedInputStream encountered an embedded string or message " + + "which claimed to have negative size."); + } + + internal static InvalidProtocolBufferException MalformedVarint() + { + return new InvalidProtocolBufferException( + "CodedInputStream encountered a malformed varint."); + } + + /// + /// Creates an exception for an error condition of an invalid tag being encountered. + /// + internal static InvalidProtocolBufferException InvalidTag() + { + return new InvalidProtocolBufferException( + "Protocol message contained an invalid tag (zero)."); + } + + internal static InvalidProtocolBufferException InvalidBase64(Exception innerException) + { + return new InvalidProtocolBufferException("Invalid base64 data", innerException); + } + + internal static InvalidProtocolBufferException InvalidEndTag() + { + return new InvalidProtocolBufferException( + "Protocol message end-group tag did not match expected tag."); + } + + internal static InvalidProtocolBufferException RecursionLimitExceeded() + { + return new InvalidProtocolBufferException( + "Protocol message had too many levels of nesting. May be malicious. " + + "Use CodedInputStream.SetRecursionLimit() to increase the depth limit."); + } + + internal static InvalidProtocolBufferException JsonRecursionLimitExceeded() + { + return new InvalidProtocolBufferException( + "Protocol message had too many levels of nesting. May be malicious. " + + "Use JsonParser.Settings to increase the depth limit."); + } + + internal static InvalidProtocolBufferException SizeLimitExceeded() + { + return new InvalidProtocolBufferException( + "Protocol message was too large. May be malicious. " + + "Use CodedInputStream.SetSizeLimit() to increase the size limit."); + } + + internal static InvalidProtocolBufferException InvalidMessageStreamTag() + { + return new InvalidProtocolBufferException( + "Stream of protocol messages had invalid tag. Expected tag is length-delimited field 1."); + } + } +} \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/JsonFormatter.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/JsonFormatter.cs new file mode 100644 index 0000000000..8377247326 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/JsonFormatter.cs @@ -0,0 +1,912 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; +using System.Collections; +using System.Globalization; +using System.Text; +using Google.Protobuf.Reflection; +using Google.Protobuf.WellKnownTypes; +using System.IO; +using System.Linq; +using System.Collections.Generic; +using System.Reflection; + +namespace Google.Protobuf +{ + /// + /// Reflection-based converter from messages to JSON. + /// + /// + /// + /// Instances of this class are thread-safe, with no mutable state. + /// + /// + /// This is a simple start to get JSON formatting working. As it's reflection-based, + /// it's not as quick as baking calls into generated messages - but is a simpler implementation. + /// (This code is generally not heavily optimized.) + /// + /// + public sealed class JsonFormatter + { + internal const string AnyTypeUrlField = "@type"; + internal const string AnyDiagnosticValueField = "@value"; + internal const string AnyWellKnownTypeValueField = "value"; + private const string TypeUrlPrefix = "type.googleapis.com"; + private const string NameValueSeparator = ": "; + private const string PropertySeparator = ", "; + + /// + /// Returns a formatter using the default settings. + /// + public static JsonFormatter Default { get; } = new JsonFormatter(Settings.Default); + + // A JSON formatter which *only* exists + private static readonly JsonFormatter diagnosticFormatter = new JsonFormatter(Settings.Default); + + /// + /// The JSON representation of the first 160 characters of Unicode. + /// Empty strings are replaced by the static constructor. + /// + private static readonly string[] CommonRepresentations = { + // C0 (ASCII and derivatives) control characters + "\\u0000", "\\u0001", "\\u0002", "\\u0003", // 0x00 + "\\u0004", "\\u0005", "\\u0006", "\\u0007", + "\\b", "\\t", "\\n", "\\u000b", + "\\f", "\\r", "\\u000e", "\\u000f", + "\\u0010", "\\u0011", "\\u0012", "\\u0013", // 0x10 + "\\u0014", "\\u0015", "\\u0016", "\\u0017", + "\\u0018", "\\u0019", "\\u001a", "\\u001b", + "\\u001c", "\\u001d", "\\u001e", "\\u001f", + // Escaping of " and \ are required by www.json.org string definition. + // Escaping of < and > are required for HTML security. + "", "", "\\\"", "", "", "", "", "", // 0x20 + "", "", "", "", "", "", "", "", + "", "", "", "", "", "", "", "", // 0x30 + "", "", "", "", "\\u003c", "", "\\u003e", "", + "", "", "", "", "", "", "", "", // 0x40 + "", "", "", "", "", "", "", "", + "", "", "", "", "", "", "", "", // 0x50 + "", "", "", "", "\\\\", "", "", "", + "", "", "", "", "", "", "", "", // 0x60 + "", "", "", "", "", "", "", "", + "", "", "", "", "", "", "", "", // 0x70 + "", "", "", "", "", "", "", "\\u007f", + // C1 (ISO 8859 and Unicode) extended control characters + "\\u0080", "\\u0081", "\\u0082", "\\u0083", // 0x80 + "\\u0084", "\\u0085", "\\u0086", "\\u0087", + "\\u0088", "\\u0089", "\\u008a", "\\u008b", + "\\u008c", "\\u008d", "\\u008e", "\\u008f", + "\\u0090", "\\u0091", "\\u0092", "\\u0093", // 0x90 + "\\u0094", "\\u0095", "\\u0096", "\\u0097", + "\\u0098", "\\u0099", "\\u009a", "\\u009b", + "\\u009c", "\\u009d", "\\u009e", "\\u009f" + }; + + static JsonFormatter() + { + for (int i = 0; i < CommonRepresentations.Length; i++) + { + if (CommonRepresentations[i] == "") + { + CommonRepresentations[i] = ((char) i).ToString(); + } + } + } + + private readonly Settings settings; + + private bool DiagnosticOnly => ReferenceEquals(this, diagnosticFormatter); + + /// + /// Creates a new formatted with the given settings. + /// + /// The settings. + public JsonFormatter(Settings settings) + { + this.settings = settings; + } + + /// + /// Formats the specified message as JSON. + /// + /// The message to format. + /// The formatted message. + public string Format(IMessage message) + { + var writer = new StringWriter(); + Format(message, writer); + return writer.ToString(); + } + + /// + /// Formats the specified message as JSON. + /// + /// The message to format. + /// The TextWriter to write the formatted message to. + /// The formatted message. + public void Format(IMessage message, TextWriter writer) + { + ProtoPreconditions.CheckNotNull(message, nameof(message)); + ProtoPreconditions.CheckNotNull(writer, nameof(writer)); + + if (message.Descriptor.IsWellKnownType) + { + WriteWellKnownTypeValue(writer, message.Descriptor, message); + } + else + { + WriteMessage(writer, message); + } + } + + /// + /// Converts a message to JSON for diagnostic purposes with no extra context. + /// + /// + /// + /// This differs from calling on the default JSON + /// formatter in its handling of . As no type registry is available + /// in calls, the normal way of resolving the type of + /// an Any message cannot be applied. Instead, a JSON property named @value + /// is included with the base64 data from the property of the message. + /// + /// The value returned by this method is only designed to be used for diagnostic + /// purposes. It may not be parsable by , and may not be parsable + /// by other Protocol Buffer implementations. + /// + /// The message to format for diagnostic purposes. + /// The diagnostic-only JSON representation of the message + public static string ToDiagnosticString(IMessage message) + { + ProtoPreconditions.CheckNotNull(message, nameof(message)); + return diagnosticFormatter.Format(message); + } + + private void WriteMessage(TextWriter writer, IMessage message) + { + if (message == null) + { + WriteNull(writer); + return; + } + if (DiagnosticOnly) + { + ICustomDiagnosticMessage customDiagnosticMessage = message as ICustomDiagnosticMessage; + if (customDiagnosticMessage != null) + { + writer.Write(customDiagnosticMessage.ToDiagnosticString()); + return; + } + } + writer.Write("{ "); + bool writtenFields = WriteMessageFields(writer, message, false); + writer.Write(writtenFields ? " }" : "}"); + } + + private bool WriteMessageFields(TextWriter writer, IMessage message, bool assumeFirstFieldWritten) + { + var fields = message.Descriptor.Fields; + bool first = !assumeFirstFieldWritten; + // First non-oneof fields + foreach (var field in fields.InFieldNumberOrder()) + { + var accessor = field.Accessor; + if (field.ContainingOneof != null && field.ContainingOneof.Accessor.GetCaseFieldDescriptor(message) != field) + { + continue; + } + // Omit default values unless we're asked to format them, or they're oneofs (where the default + // value is still formatted regardless, because that's how we preserve the oneof case). + object value = accessor.GetValue(message); + if (field.ContainingOneof == null && !settings.FormatDefaultValues && IsDefaultValue(accessor, value)) + { + continue; + } + + // Okay, all tests complete: let's write the field value... + if (!first) + { + writer.Write(PropertySeparator); + } + + WriteString(writer, accessor.Descriptor.JsonName); + writer.Write(NameValueSeparator); + WriteValue(writer, value); + + first = false; + } + return !first; + } + + /// + /// Camel-case converter with added strictness for field mask formatting. + /// + /// The field mask is invalid for JSON representation + private static string ToCamelCaseForFieldMask(string input) + { + for (int i = 0; i < input.Length; i++) + { + char c = input[i]; + if (c >= 'A' && c <= 'Z') + { + throw new InvalidOperationException($"Invalid field mask to be converted to JSON: {input}"); + } + if (c == '_' && i < input.Length - 1) + { + char next = input[i + 1]; + if (next < 'a' || next > 'z') + { + throw new InvalidOperationException($"Invalid field mask to be converted to JSON: {input}"); + } + } + } + return ToCamelCase(input); + } + + // Converted from src/google/protobuf/util/internal/utility.cc ToCamelCase + // TODO: Use the new field in FieldDescriptor. + internal static string ToCamelCase(string input) + { + bool capitalizeNext = false; + bool wasCap = true; + bool isCap = false; + bool firstWord = true; + StringBuilder result = new StringBuilder(input.Length); + + for (int i = 0; i < input.Length; i++, wasCap = isCap) + { + isCap = char.IsUpper(input[i]); + if (input[i] == '_') + { + capitalizeNext = true; + if (result.Length != 0) + { + firstWord = false; + } + continue; + } + else if (firstWord) + { + // Consider when the current character B is capitalized, + // first word ends when: + // 1) following a lowercase: "...aB..." + // 2) followed by a lowercase: "...ABc..." + if (result.Length != 0 && isCap && + (!wasCap || (i + 1 < input.Length && char.IsLower(input[i + 1])))) + { + firstWord = false; + } + else + { + result.Append(char.ToLowerInvariant(input[i])); + continue; + } + } + else if (capitalizeNext) + { + capitalizeNext = false; + if (char.IsLower(input[i])) + { + result.Append(char.ToUpperInvariant(input[i])); + continue; + } + } + result.Append(input[i]); + } + return result.ToString(); + } + + private static void WriteNull(TextWriter writer) + { + writer.Write("null"); + } + + private static bool IsDefaultValue(IFieldAccessor accessor, object value) + { + if (accessor.Descriptor.IsMap) + { + IDictionary dictionary = (IDictionary) value; + return dictionary.Count == 0; + } + if (accessor.Descriptor.IsRepeated) + { + IList list = (IList) value; + return list.Count == 0; + } + switch (accessor.Descriptor.FieldType) + { + case FieldType.Bool: + return (bool) value == false; + case FieldType.Bytes: + return (ByteString) value == ByteString.Empty; + case FieldType.String: + return (string) value == ""; + case FieldType.Double: + return (double) value == 0.0; + case FieldType.SInt32: + case FieldType.Int32: + case FieldType.SFixed32: + case FieldType.Enum: + return (int) value == 0; + case FieldType.Fixed32: + case FieldType.UInt32: + return (uint) value == 0; + case FieldType.Fixed64: + case FieldType.UInt64: + return (ulong) value == 0; + case FieldType.SFixed64: + case FieldType.Int64: + case FieldType.SInt64: + return (long) value == 0; + case FieldType.Float: + return (float) value == 0f; + case FieldType.Message: + case FieldType.Group: // Never expect to get this, but... + return value == null; + default: + throw new ArgumentException("Invalid field type"); + } + } + + private void WriteValue(TextWriter writer, object value) + { + if (value == null) + { + WriteNull(writer); + } + else if (value is bool) + { + writer.Write((bool)value ? "true" : "false"); + } + else if (value is ByteString) + { + // Nothing in Base64 needs escaping + writer.Write('"'); + writer.Write(((ByteString)value).ToBase64()); + writer.Write('"'); + } + else if (value is string) + { + WriteString(writer, (string)value); + } + else if (value is IDictionary) + { + WriteDictionary(writer, (IDictionary)value); + } + else if (value is IList) + { + WriteList(writer, (IList)value); + } + else if (value is int || value is uint) + { + IFormattable formattable = (IFormattable) value; + writer.Write(formattable.ToString("d", CultureInfo.InvariantCulture)); + } + else if (value is long || value is ulong) + { + writer.Write('"'); + IFormattable formattable = (IFormattable) value; + writer.Write(formattable.ToString("d", CultureInfo.InvariantCulture)); + writer.Write('"'); + } + else if (value is System.Enum) + { + string name = OriginalEnumValueHelper.GetOriginalName(value); + if (name != null) + { + WriteString(writer, name); + } + else + { + WriteValue(writer, (int)value); + } + } + else if (value is float || value is double) + { + string text = ((IFormattable) value).ToString("r", CultureInfo.InvariantCulture); + if (text == "NaN" || text == "Infinity" || text == "-Infinity") + { + writer.Write('"'); + writer.Write(text); + writer.Write('"'); + } + else + { + writer.Write(text); + } + } + else if (value is IMessage) + { + IMessage message = (IMessage) value; + if (message.Descriptor.IsWellKnownType) + { + WriteWellKnownTypeValue(writer, message.Descriptor, value); + } + else + { + WriteMessage(writer, (IMessage)value); + } + } + else + { + throw new ArgumentException("Unable to format value of type " + value.GetType()); + } + } + + /// + /// Central interception point for well-known type formatting. Any well-known types which + /// don't need special handling can fall back to WriteMessage. We avoid assuming that the + /// values are using the embedded well-known types, in order to allow for dynamic messages + /// in the future. + /// + private void WriteWellKnownTypeValue(TextWriter writer, MessageDescriptor descriptor, object value) + { + // Currently, we can never actually get here, because null values are always handled by the caller. But if we *could*, + // this would do the right thing. + if (value == null) + { + WriteNull(writer); + return; + } + // For wrapper types, the value will either be the (possibly boxed) "native" value, + // or the message itself if we're formatting it at the top level (e.g. just calling ToString on the object itself). + // If it's the message form, we can extract the value first, which *will* be the (possibly boxed) native value, + // and then proceed, writing it as if we were definitely in a field. (We never need to wrap it in an extra string... + // WriteValue will do the right thing.) + if (descriptor.IsWrapperType) + { + if (value is IMessage) + { + var message = (IMessage) value; + value = message.Descriptor.Fields[WrappersReflection.WrapperValueFieldNumber].Accessor.GetValue(message); + } + WriteValue(writer, value); + return; + } + if (descriptor.FullName == Timestamp.Descriptor.FullName) + { + WriteTimestamp(writer, (IMessage)value); + return; + } + if (descriptor.FullName == Duration.Descriptor.FullName) + { + WriteDuration(writer, (IMessage)value); + return; + } + if (descriptor.FullName == FieldMask.Descriptor.FullName) + { + WriteFieldMask(writer, (IMessage)value); + return; + } + if (descriptor.FullName == Struct.Descriptor.FullName) + { + WriteStruct(writer, (IMessage)value); + return; + } + if (descriptor.FullName == ListValue.Descriptor.FullName) + { + var fieldAccessor = descriptor.Fields[ListValue.ValuesFieldNumber].Accessor; + WriteList(writer, (IList)fieldAccessor.GetValue((IMessage)value)); + return; + } + if (descriptor.FullName == Value.Descriptor.FullName) + { + WriteStructFieldValue(writer, (IMessage)value); + return; + } + if (descriptor.FullName == Any.Descriptor.FullName) + { + WriteAny(writer, (IMessage)value); + return; + } + WriteMessage(writer, (IMessage)value); + } + + private void WriteTimestamp(TextWriter writer, IMessage value) + { + // TODO: In the common case where this *is* using the built-in Timestamp type, we could + // avoid all the reflection at this point, by casting to Timestamp. In the interests of + // avoiding subtle bugs, don't do that until we've implemented DynamicMessage so that we can prove + // it still works in that case. + int nanos = (int) value.Descriptor.Fields[Timestamp.NanosFieldNumber].Accessor.GetValue(value); + long seconds = (long) value.Descriptor.Fields[Timestamp.SecondsFieldNumber].Accessor.GetValue(value); + writer.Write(Timestamp.ToJson(seconds, nanos, DiagnosticOnly)); + } + + private void WriteDuration(TextWriter writer, IMessage value) + { + // TODO: Same as for WriteTimestamp + int nanos = (int) value.Descriptor.Fields[Duration.NanosFieldNumber].Accessor.GetValue(value); + long seconds = (long) value.Descriptor.Fields[Duration.SecondsFieldNumber].Accessor.GetValue(value); + writer.Write(Duration.ToJson(seconds, nanos, DiagnosticOnly)); + } + + private void WriteFieldMask(TextWriter writer, IMessage value) + { + var paths = (IList) value.Descriptor.Fields[FieldMask.PathsFieldNumber].Accessor.GetValue(value); + writer.Write(FieldMask.ToJson(paths, DiagnosticOnly)); + } + + private void WriteAny(TextWriter writer, IMessage value) + { + if (DiagnosticOnly) + { + WriteDiagnosticOnlyAny(writer, value); + return; + } + + string typeUrl = (string) value.Descriptor.Fields[Any.TypeUrlFieldNumber].Accessor.GetValue(value); + ByteString data = (ByteString) value.Descriptor.Fields[Any.ValueFieldNumber].Accessor.GetValue(value); + string typeName = Any.GetTypeName(typeUrl); + MessageDescriptor descriptor = settings.TypeRegistry.Find(typeName); + if (descriptor == null) + { + throw new InvalidOperationException($"Type registry has no descriptor for type name '{typeName}'"); + } + IMessage message = descriptor.Parser.ParseFrom(data); + writer.Write("{ "); + WriteString(writer, AnyTypeUrlField); + writer.Write(NameValueSeparator); + WriteString(writer, typeUrl); + + if (descriptor.IsWellKnownType) + { + writer.Write(PropertySeparator); + WriteString(writer, AnyWellKnownTypeValueField); + writer.Write(NameValueSeparator); + WriteWellKnownTypeValue(writer, descriptor, message); + } + else + { + WriteMessageFields(writer, message, true); + } + writer.Write(" }"); + } + + private void WriteDiagnosticOnlyAny(TextWriter writer, IMessage value) + { + string typeUrl = (string) value.Descriptor.Fields[Any.TypeUrlFieldNumber].Accessor.GetValue(value); + ByteString data = (ByteString) value.Descriptor.Fields[Any.ValueFieldNumber].Accessor.GetValue(value); + writer.Write("{ "); + WriteString(writer, AnyTypeUrlField); + writer.Write(NameValueSeparator); + WriteString(writer, typeUrl); + writer.Write(PropertySeparator); + WriteString(writer, AnyDiagnosticValueField); + writer.Write(NameValueSeparator); + writer.Write('"'); + writer.Write(data.ToBase64()); + writer.Write('"'); + writer.Write(" }"); + } + + private void WriteStruct(TextWriter writer, IMessage message) + { + writer.Write("{ "); + IDictionary fields = (IDictionary) message.Descriptor.Fields[Struct.FieldsFieldNumber].Accessor.GetValue(message); + bool first = true; + foreach (DictionaryEntry entry in fields) + { + string key = (string) entry.Key; + IMessage value = (IMessage) entry.Value; + if (string.IsNullOrEmpty(key) || value == null) + { + throw new InvalidOperationException("Struct fields cannot have an empty key or a null value."); + } + + if (!first) + { + writer.Write(PropertySeparator); + } + WriteString(writer, key); + writer.Write(NameValueSeparator); + WriteStructFieldValue(writer, value); + first = false; + } + writer.Write(first ? "}" : " }"); + } + + private void WriteStructFieldValue(TextWriter writer, IMessage message) + { + var specifiedField = message.Descriptor.Oneofs[0].Accessor.GetCaseFieldDescriptor(message); + if (specifiedField == null) + { + throw new InvalidOperationException("Value message must contain a value for the oneof."); + } + + object value = specifiedField.Accessor.GetValue(message); + + switch (specifiedField.FieldNumber) + { + case Value.BoolValueFieldNumber: + case Value.StringValueFieldNumber: + case Value.NumberValueFieldNumber: + WriteValue(writer, value); + return; + case Value.StructValueFieldNumber: + case Value.ListValueFieldNumber: + // Structs and ListValues are nested messages, and already well-known types. + var nestedMessage = (IMessage) specifiedField.Accessor.GetValue(message); + WriteWellKnownTypeValue(writer, nestedMessage.Descriptor, nestedMessage); + return; + case Value.NullValueFieldNumber: + WriteNull(writer); + return; + default: + throw new InvalidOperationException("Unexpected case in struct field: " + specifiedField.FieldNumber); + } + } + + internal void WriteList(TextWriter writer, IList list) + { + writer.Write("[ "); + bool first = true; + foreach (var value in list) + { + if (!first) + { + writer.Write(PropertySeparator); + } + WriteValue(writer, value); + first = false; + } + writer.Write(first ? "]" : " ]"); + } + + internal void WriteDictionary(TextWriter writer, IDictionary dictionary) + { + writer.Write("{ "); + bool first = true; + // This will box each pair. Could use IDictionaryEnumerator, but that's ugly in terms of disposal. + foreach (DictionaryEntry pair in dictionary) + { + if (!first) + { + writer.Write(PropertySeparator); + } + string keyText; + if (pair.Key is string) + { + keyText = (string) pair.Key; + } + else if (pair.Key is bool) + { + keyText = (bool) pair.Key ? "true" : "false"; + } + else if (pair.Key is int || pair.Key is uint | pair.Key is long || pair.Key is ulong) + { + keyText = ((IFormattable) pair.Key).ToString("d", CultureInfo.InvariantCulture); + } + else + { + if (pair.Key == null) + { + throw new ArgumentException("Dictionary has entry with null key"); + } + throw new ArgumentException("Unhandled dictionary key type: " + pair.Key.GetType()); + } + WriteString(writer, keyText); + writer.Write(NameValueSeparator); + WriteValue(writer, pair.Value); + first = false; + } + writer.Write(first ? "}" : " }"); + } + + /// + /// Returns whether or not a singular value can be represented in JSON. + /// Currently only relevant for enums, where unknown values can't be represented. + /// For repeated/map fields, this always returns true. + /// + private bool CanWriteSingleValue(object value) + { + if (value is System.Enum) + { + return System.Enum.IsDefined(value.GetType(), value); + } + return true; + } + + /// + /// Writes a string (including leading and trailing double quotes) to a builder, escaping as required. + /// + /// + /// Other than surrogate pair handling, this code is mostly taken from src/google/protobuf/util/internal/json_escaping.cc. + /// + internal static void WriteString(TextWriter writer, string text) + { + writer.Write('"'); + for (int i = 0; i < text.Length; i++) + { + char c = text[i]; + if (c < 0xa0) + { + writer.Write(CommonRepresentations[c]); + continue; + } + if (char.IsHighSurrogate(c)) + { + // Encountered first part of a surrogate pair. + // Check that we have the whole pair, and encode both parts as hex. + i++; + if (i == text.Length || !char.IsLowSurrogate(text[i])) + { + throw new ArgumentException("String contains low surrogate not followed by high surrogate"); + } + HexEncodeUtf16CodeUnit(writer, c); + HexEncodeUtf16CodeUnit(writer, text[i]); + continue; + } + else if (char.IsLowSurrogate(c)) + { + throw new ArgumentException("String contains high surrogate not preceded by low surrogate"); + } + switch ((uint) c) + { + // These are not required by json spec + // but used to prevent security bugs in javascript. + case 0xfeff: // Zero width no-break space + case 0xfff9: // Interlinear annotation anchor + case 0xfffa: // Interlinear annotation separator + case 0xfffb: // Interlinear annotation terminator + + case 0x00ad: // Soft-hyphen + case 0x06dd: // Arabic end of ayah + case 0x070f: // Syriac abbreviation mark + case 0x17b4: // Khmer vowel inherent Aq + case 0x17b5: // Khmer vowel inherent Aa + HexEncodeUtf16CodeUnit(writer, c); + break; + + default: + if ((c >= 0x0600 && c <= 0x0603) || // Arabic signs + (c >= 0x200b && c <= 0x200f) || // Zero width etc. + (c >= 0x2028 && c <= 0x202e) || // Separators etc. + (c >= 0x2060 && c <= 0x2064) || // Invisible etc. + (c >= 0x206a && c <= 0x206f)) + { + HexEncodeUtf16CodeUnit(writer, c); + } + else + { + // No handling of surrogates here - that's done earlier + writer.Write(c); + } + break; + } + } + writer.Write('"'); + } + + private const string Hex = "0123456789abcdef"; + private static void HexEncodeUtf16CodeUnit(TextWriter writer, char c) + { + writer.Write("\\u"); + writer.Write(Hex[(c >> 12) & 0xf]); + writer.Write(Hex[(c >> 8) & 0xf]); + writer.Write(Hex[(c >> 4) & 0xf]); + writer.Write(Hex[(c >> 0) & 0xf]); + } + + /// + /// Settings controlling JSON formatting. + /// + public sealed class Settings + { + /// + /// Default settings, as used by + /// + public static Settings Default { get; } + + // Workaround for the Mono compiler complaining about XML comments not being on + // valid language elements. + static Settings() + { + Default = new Settings(false); + } + + /// + /// Whether fields whose values are the default for the field type (e.g. 0 for integers) + /// should be formatted (true) or omitted (false). + /// + public bool FormatDefaultValues { get; } + + /// + /// The type registry used to format messages. + /// + public TypeRegistry TypeRegistry { get; } + + // TODO: Work out how we're going to scale this to multiple settings. "WithXyz" methods? + + /// + /// Creates a new object with the specified formatting of default values + /// and an empty type registry. + /// + /// true if default values (0, empty strings etc) should be formatted; false otherwise. + public Settings(bool formatDefaultValues) : this(formatDefaultValues, TypeRegistry.Empty) + { + } + + /// + /// Creates a new object with the specified formatting of default values + /// and type registry. + /// + /// true if default values (0, empty strings etc) should be formatted; false otherwise. + /// The to use when formatting messages. + public Settings(bool formatDefaultValues, TypeRegistry typeRegistry) + { + FormatDefaultValues = formatDefaultValues; + TypeRegistry = ProtoPreconditions.CheckNotNull(typeRegistry, nameof(typeRegistry)); + } + } + + // Effectively a cache of mapping from enum values to the original name as specified in the proto file, + // fetched by reflection. + // The need for this is unfortunate, as is its unbounded size, but realistically it shouldn't cause issues. + private static class OriginalEnumValueHelper + { + // TODO: In the future we might want to use ConcurrentDictionary, at the point where all + // the platforms we target have it. + private static readonly Dictionary> dictionaries + = new Dictionary>(); + + internal static string GetOriginalName(object value) + { + var enumType = value.GetType(); + Dictionary nameMapping; + lock (dictionaries) + { + if (!dictionaries.TryGetValue(enumType, out nameMapping)) + { + nameMapping = GetNameMapping(enumType); + dictionaries[enumType] = nameMapping; + } + } + + string originalName; + // If this returns false, originalName will be null, which is what we want. + nameMapping.TryGetValue(value, out originalName); + return originalName; + } + + private static Dictionary GetNameMapping(System.Type enumType) => + enumType.GetTypeInfo().DeclaredFields + .Where(f => f.IsStatic) + .ToDictionary(f => f.GetValue(null), + f => f.GetCustomAttributes() + .FirstOrDefault() + // If the attribute hasn't been applied, fall back to the name of the field. + ?.Name ?? f.Name); + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/JsonParser.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/JsonParser.cs new file mode 100644 index 0000000000..d738ebb04b --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/JsonParser.cs @@ -0,0 +1,1018 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using Google.Protobuf.Reflection; +using Google.Protobuf.WellKnownTypes; +using System; +using System.Collections; +using System.Collections.Generic; +using System.Globalization; +using System.IO; +using System.Text; +using System.Text.RegularExpressions; + +namespace Google.Protobuf +{ + /// + /// Reflection-based converter from JSON to messages. + /// + /// + /// + /// Instances of this class are thread-safe, with no mutable state. + /// + /// + /// This is a simple start to get JSON parsing working. As it's reflection-based, + /// it's not as quick as baking calls into generated messages - but is a simpler implementation. + /// (This code is generally not heavily optimized.) + /// + /// + public sealed class JsonParser + { + // Note: using 0-9 instead of \d to ensure no non-ASCII digits. + // This regex isn't a complete validator, but will remove *most* invalid input. We rely on parsing to do the rest. + private static readonly Regex TimestampRegex = new Regex(@"^(?[0-9]{4}-[01][0-9]-[0-3][0-9]T[012][0-9]:[0-5][0-9]:[0-5][0-9])(?\.[0-9]{1,9})?(?(Z|[+-][0-1][0-9]:[0-5][0-9]))$", FrameworkPortability.CompiledRegexWhereAvailable); + private static readonly Regex DurationRegex = new Regex(@"^(?-)?(?[0-9]{1,12})(?\.[0-9]{1,9})?s$", FrameworkPortability.CompiledRegexWhereAvailable); + private static readonly int[] SubsecondScalingFactors = { 0, 100000000, 100000000, 10000000, 1000000, 100000, 10000, 1000, 100, 10, 1 }; + private static readonly char[] FieldMaskPathSeparators = new[] { ',' }; + + private static readonly JsonParser defaultInstance = new JsonParser(Settings.Default); + + // TODO: Consider introducing a class containing parse state of the parser, tokenizer and depth. That would simplify these handlers + // and the signatures of various methods. + private static readonly Dictionary> + WellKnownTypeHandlers = new Dictionary> + { + { Timestamp.Descriptor.FullName, (parser, message, tokenizer) => MergeTimestamp(message, tokenizer.Next()) }, + { Duration.Descriptor.FullName, (parser, message, tokenizer) => MergeDuration(message, tokenizer.Next()) }, + { Value.Descriptor.FullName, (parser, message, tokenizer) => parser.MergeStructValue(message, tokenizer) }, + { ListValue.Descriptor.FullName, (parser, message, tokenizer) => + parser.MergeRepeatedField(message, message.Descriptor.Fields[ListValue.ValuesFieldNumber], tokenizer) }, + { Struct.Descriptor.FullName, (parser, message, tokenizer) => parser.MergeStruct(message, tokenizer) }, + { Any.Descriptor.FullName, (parser, message, tokenizer) => parser.MergeAny(message, tokenizer) }, + { FieldMask.Descriptor.FullName, (parser, message, tokenizer) => MergeFieldMask(message, tokenizer.Next()) }, + { Int32Value.Descriptor.FullName, MergeWrapperField }, + { Int64Value.Descriptor.FullName, MergeWrapperField }, + { UInt32Value.Descriptor.FullName, MergeWrapperField }, + { UInt64Value.Descriptor.FullName, MergeWrapperField }, + { FloatValue.Descriptor.FullName, MergeWrapperField }, + { DoubleValue.Descriptor.FullName, MergeWrapperField }, + { BytesValue.Descriptor.FullName, MergeWrapperField }, + { StringValue.Descriptor.FullName, MergeWrapperField } + }; + + // Convenience method to avoid having to repeat the same code multiple times in the above + // dictionary initialization. + private static void MergeWrapperField(JsonParser parser, IMessage message, JsonTokenizer tokenizer) + { + parser.MergeField(message, message.Descriptor.Fields[WrappersReflection.WrapperValueFieldNumber], tokenizer); + } + + /// + /// Returns a formatter using the default settings. + /// + public static JsonParser Default { get { return defaultInstance; } } + + private readonly Settings settings; + + /// + /// Creates a new formatted with the given settings. + /// + /// The settings. + public JsonParser(Settings settings) + { + this.settings = settings; + } + + /// + /// Parses and merges the information into the given message. + /// + /// The message to merge the JSON information into. + /// The JSON to parse. + internal void Merge(IMessage message, string json) + { + Merge(message, new StringReader(json)); + } + + /// + /// Parses JSON read from and merges the information into the given message. + /// + /// The message to merge the JSON information into. + /// Reader providing the JSON to parse. + internal void Merge(IMessage message, TextReader jsonReader) + { + var tokenizer = JsonTokenizer.FromTextReader(jsonReader); + Merge(message, tokenizer); + var lastToken = tokenizer.Next(); + if (lastToken != JsonToken.EndDocument) + { + throw new InvalidProtocolBufferException("Expected end of JSON after object"); + } + } + + /// + /// Merges the given message using data from the given tokenizer. In most cases, the next + /// token should be a "start object" token, but wrapper types and nullity can invalidate + /// that assumption. This is implemented as an LL(1) recursive descent parser over the stream + /// of tokens provided by the tokenizer. This token stream is assumed to be valid JSON, with the + /// tokenizer performing that validation - but not every token stream is valid "protobuf JSON". + /// + private void Merge(IMessage message, JsonTokenizer tokenizer) + { + if (tokenizer.ObjectDepth > settings.RecursionLimit) + { + throw InvalidProtocolBufferException.JsonRecursionLimitExceeded(); + } + if (message.Descriptor.IsWellKnownType) + { + Action handler; + if (WellKnownTypeHandlers.TryGetValue(message.Descriptor.FullName, out handler)) + { + handler(this, message, tokenizer); + return; + } + // Well-known types with no special handling continue in the normal way. + } + var token = tokenizer.Next(); + if (token.Type != JsonToken.TokenType.StartObject) + { + throw new InvalidProtocolBufferException("Expected an object"); + } + var descriptor = message.Descriptor; + var jsonFieldMap = descriptor.Fields.ByJsonName(); + // All the oneof fields we've already accounted for - we can only see each of them once. + // The set is created lazily to avoid the overhead of creating a set for every message + // we parsed, when oneofs are relatively rare. + HashSet seenOneofs = null; + while (true) + { + token = tokenizer.Next(); + if (token.Type == JsonToken.TokenType.EndObject) + { + return; + } + if (token.Type != JsonToken.TokenType.Name) + { + throw new InvalidOperationException("Unexpected token type " + token.Type); + } + string name = token.StringValue; + FieldDescriptor field; + if (jsonFieldMap.TryGetValue(name, out field)) + { + if (field.ContainingOneof != null) + { + if (seenOneofs == null) + { + seenOneofs = new HashSet(); + } + if (!seenOneofs.Add(field.ContainingOneof)) + { + throw new InvalidProtocolBufferException($"Multiple values specified for oneof {field.ContainingOneof.Name}"); + } + } + MergeField(message, field, tokenizer); + } + else + { + // TODO: Is this what we want to do? If not, we'll need to skip the value, + // which may be an object or array. (We might want to put code in the tokenizer + // to do that.) + throw new InvalidProtocolBufferException("Unknown field: " + name); + } + } + } + + private void MergeField(IMessage message, FieldDescriptor field, JsonTokenizer tokenizer) + { + var token = tokenizer.Next(); + if (token.Type == JsonToken.TokenType.Null) + { + // Clear the field if we see a null token, unless it's for a singular field of type + // google.protobuf.Value. + // Note: different from Java API, which just ignores it. + // TODO: Bring it more in line? Discuss... + if (field.IsMap || field.IsRepeated || !IsGoogleProtobufValueField(field)) + { + field.Accessor.Clear(message); + return; + } + } + tokenizer.PushBack(token); + + if (field.IsMap) + { + MergeMapField(message, field, tokenizer); + } + else if (field.IsRepeated) + { + MergeRepeatedField(message, field, tokenizer); + } + else + { + var value = ParseSingleValue(field, tokenizer); + field.Accessor.SetValue(message, value); + } + } + + private void MergeRepeatedField(IMessage message, FieldDescriptor field, JsonTokenizer tokenizer) + { + var token = tokenizer.Next(); + if (token.Type != JsonToken.TokenType.StartArray) + { + throw new InvalidProtocolBufferException("Repeated field value was not an array. Token type: " + token.Type); + } + + IList list = (IList) field.Accessor.GetValue(message); + while (true) + { + token = tokenizer.Next(); + if (token.Type == JsonToken.TokenType.EndArray) + { + return; + } + tokenizer.PushBack(token); + if (token.Type == JsonToken.TokenType.Null) + { + throw new InvalidProtocolBufferException("Repeated field elements cannot be null"); + } + list.Add(ParseSingleValue(field, tokenizer)); + } + } + + private void MergeMapField(IMessage message, FieldDescriptor field, JsonTokenizer tokenizer) + { + // Map fields are always objects, even if the values are well-known types: ParseSingleValue handles those. + var token = tokenizer.Next(); + if (token.Type != JsonToken.TokenType.StartObject) + { + throw new InvalidProtocolBufferException("Expected an object to populate a map"); + } + + var type = field.MessageType; + var keyField = type.FindFieldByNumber(1); + var valueField = type.FindFieldByNumber(2); + if (keyField == null || valueField == null) + { + throw new InvalidProtocolBufferException("Invalid map field: " + field.FullName); + } + IDictionary dictionary = (IDictionary) field.Accessor.GetValue(message); + + while (true) + { + token = tokenizer.Next(); + if (token.Type == JsonToken.TokenType.EndObject) + { + return; + } + object key = ParseMapKey(keyField, token.StringValue); + object value = ParseSingleValue(valueField, tokenizer); + if (value == null) + { + throw new InvalidProtocolBufferException("Map values must not be null"); + } + dictionary[key] = value; + } + } + + private static bool IsGoogleProtobufValueField(FieldDescriptor field) + { + return field.FieldType == FieldType.Message && + field.MessageType.FullName == Value.Descriptor.FullName; + } + + private object ParseSingleValue(FieldDescriptor field, JsonTokenizer tokenizer) + { + var token = tokenizer.Next(); + if (token.Type == JsonToken.TokenType.Null) + { + // TODO: In order to support dynamic messages, we should really build this up + // dynamically. + if (IsGoogleProtobufValueField(field)) + { + return Value.ForNull(); + } + return null; + } + + var fieldType = field.FieldType; + if (fieldType == FieldType.Message) + { + // Parse wrapper types as their constituent types. + // TODO: What does this mean for null? + if (field.MessageType.IsWrapperType) + { + field = field.MessageType.Fields[WrappersReflection.WrapperValueFieldNumber]; + fieldType = field.FieldType; + } + else + { + // TODO: Merge the current value in message? (Public API currently doesn't make this relevant as we don't expose merging.) + tokenizer.PushBack(token); + IMessage subMessage = NewMessageForField(field); + Merge(subMessage, tokenizer); + return subMessage; + } + } + + switch (token.Type) + { + case JsonToken.TokenType.True: + case JsonToken.TokenType.False: + if (fieldType == FieldType.Bool) + { + return token.Type == JsonToken.TokenType.True; + } + // Fall through to "we don't support this type for this case"; could duplicate the behaviour of the default + // case instead, but this way we'd only need to change one place. + goto default; + case JsonToken.TokenType.StringValue: + return ParseSingleStringValue(field, token.StringValue); + // Note: not passing the number value itself here, as we may end up storing the string value in the token too. + case JsonToken.TokenType.Number: + return ParseSingleNumberValue(field, token); + case JsonToken.TokenType.Null: + throw new NotImplementedException("Haven't worked out what to do for null yet"); + default: + throw new InvalidProtocolBufferException("Unsupported JSON token type " + token.Type + " for field type " + fieldType); + } + } + + /// + /// Parses into a new message. + /// + /// The type of message to create. + /// The JSON to parse. + /// The JSON does not comply with RFC 7159 + /// The JSON does not represent a Protocol Buffers message correctly + public T Parse(string json) where T : IMessage, new() + { + ProtoPreconditions.CheckNotNull(json, nameof(json)); + return Parse(new StringReader(json)); + } + + /// + /// Parses JSON read from into a new message. + /// + /// The type of message to create. + /// Reader providing the JSON to parse. + /// The JSON does not comply with RFC 7159 + /// The JSON does not represent a Protocol Buffers message correctly + public T Parse(TextReader jsonReader) where T : IMessage, new() + { + ProtoPreconditions.CheckNotNull(jsonReader, nameof(jsonReader)); + T message = new T(); + Merge(message, jsonReader); + return message; + } + + /// + /// Parses into a new message. + /// + /// The JSON to parse. + /// Descriptor of message type to parse. + /// The JSON does not comply with RFC 7159 + /// The JSON does not represent a Protocol Buffers message correctly + public IMessage Parse(string json, MessageDescriptor descriptor) + { + ProtoPreconditions.CheckNotNull(json, nameof(json)); + ProtoPreconditions.CheckNotNull(descriptor, nameof(descriptor)); + return Parse(new StringReader(json), descriptor); + } + + /// + /// Parses JSON read from into a new message. + /// + /// Reader providing the JSON to parse. + /// Descriptor of message type to parse. + /// The JSON does not comply with RFC 7159 + /// The JSON does not represent a Protocol Buffers message correctly + public IMessage Parse(TextReader jsonReader, MessageDescriptor descriptor) + { + ProtoPreconditions.CheckNotNull(jsonReader, nameof(jsonReader)); + ProtoPreconditions.CheckNotNull(descriptor, nameof(descriptor)); + IMessage message = descriptor.Parser.CreateTemplate(); + Merge(message, jsonReader); + return message; + } + + private void MergeStructValue(IMessage message, JsonTokenizer tokenizer) + { + var firstToken = tokenizer.Next(); + var fields = message.Descriptor.Fields; + switch (firstToken.Type) + { + case JsonToken.TokenType.Null: + fields[Value.NullValueFieldNumber].Accessor.SetValue(message, 0); + return; + case JsonToken.TokenType.StringValue: + fields[Value.StringValueFieldNumber].Accessor.SetValue(message, firstToken.StringValue); + return; + case JsonToken.TokenType.Number: + fields[Value.NumberValueFieldNumber].Accessor.SetValue(message, firstToken.NumberValue); + return; + case JsonToken.TokenType.False: + case JsonToken.TokenType.True: + fields[Value.BoolValueFieldNumber].Accessor.SetValue(message, firstToken.Type == JsonToken.TokenType.True); + return; + case JsonToken.TokenType.StartObject: + { + var field = fields[Value.StructValueFieldNumber]; + var structMessage = NewMessageForField(field); + tokenizer.PushBack(firstToken); + Merge(structMessage, tokenizer); + field.Accessor.SetValue(message, structMessage); + return; + } + case JsonToken.TokenType.StartArray: + { + var field = fields[Value.ListValueFieldNumber]; + var list = NewMessageForField(field); + tokenizer.PushBack(firstToken); + Merge(list, tokenizer); + field.Accessor.SetValue(message, list); + return; + } + default: + throw new InvalidOperationException("Unexpected token type: " + firstToken.Type); + } + } + + private void MergeStruct(IMessage message, JsonTokenizer tokenizer) + { + var token = tokenizer.Next(); + if (token.Type != JsonToken.TokenType.StartObject) + { + throw new InvalidProtocolBufferException("Expected object value for Struct"); + } + tokenizer.PushBack(token); + + var field = message.Descriptor.Fields[Struct.FieldsFieldNumber]; + MergeMapField(message, field, tokenizer); + } + + private void MergeAny(IMessage message, JsonTokenizer tokenizer) + { + // Record the token stream until we see the @type property. At that point, we can take the value, consult + // the type registry for the relevant message, and replay the stream, omitting the @type property. + var tokens = new List(); + + var token = tokenizer.Next(); + if (token.Type != JsonToken.TokenType.StartObject) + { + throw new InvalidProtocolBufferException("Expected object value for Any"); + } + int typeUrlObjectDepth = tokenizer.ObjectDepth; + + // The check for the property depth protects us from nested Any values which occur before the type URL + // for *this* Any. + while (token.Type != JsonToken.TokenType.Name || + token.StringValue != JsonFormatter.AnyTypeUrlField || + tokenizer.ObjectDepth != typeUrlObjectDepth) + { + tokens.Add(token); + token = tokenizer.Next(); + + if (tokenizer.ObjectDepth < typeUrlObjectDepth) + { + throw new InvalidProtocolBufferException("Any message with no @type"); + } + } + + // Don't add the @type property or its value to the recorded token list + token = tokenizer.Next(); + if (token.Type != JsonToken.TokenType.StringValue) + { + throw new InvalidProtocolBufferException("Expected string value for Any.@type"); + } + string typeUrl = token.StringValue; + string typeName = Any.GetTypeName(typeUrl); + + MessageDescriptor descriptor = settings.TypeRegistry.Find(typeName); + if (descriptor == null) + { + throw new InvalidOperationException($"Type registry has no descriptor for type name '{typeName}'"); + } + + // Now replay the token stream we've already read and anything that remains of the object, just parsing it + // as normal. Our original tokenizer should end up at the end of the object. + var replay = JsonTokenizer.FromReplayedTokens(tokens, tokenizer); + var body = descriptor.Parser.CreateTemplate(); + if (descriptor.IsWellKnownType) + { + MergeWellKnownTypeAnyBody(body, replay); + } + else + { + Merge(body, replay); + } + var data = body.ToByteString(); + + // Now that we have the message data, we can pack it into an Any (the message received as a parameter). + message.Descriptor.Fields[Any.TypeUrlFieldNumber].Accessor.SetValue(message, typeUrl); + message.Descriptor.Fields[Any.ValueFieldNumber].Accessor.SetValue(message, data); + } + + // Well-known types end up in a property called "value" in the JSON. As there's no longer a @type property + // in the given JSON token stream, we should *only* have tokens of start-object, name("value"), the value + // itself, and then end-object. + private void MergeWellKnownTypeAnyBody(IMessage body, JsonTokenizer tokenizer) + { + var token = tokenizer.Next(); // Definitely start-object; checked in previous method + token = tokenizer.Next(); + // TODO: What about an absent Int32Value, for example? + if (token.Type != JsonToken.TokenType.Name || token.StringValue != JsonFormatter.AnyWellKnownTypeValueField) + { + throw new InvalidProtocolBufferException($"Expected '{JsonFormatter.AnyWellKnownTypeValueField}' property for well-known type Any body"); + } + Merge(body, tokenizer); + token = tokenizer.Next(); + if (token.Type != JsonToken.TokenType.EndObject) + { + throw new InvalidProtocolBufferException($"Expected end-object token after @type/value for well-known type"); + } + } + + #region Utility methods which don't depend on the state (or settings) of the parser. + private static object ParseMapKey(FieldDescriptor field, string keyText) + { + switch (field.FieldType) + { + case FieldType.Bool: + if (keyText == "true") + { + return true; + } + if (keyText == "false") + { + return false; + } + throw new InvalidProtocolBufferException("Invalid string for bool map key: " + keyText); + case FieldType.String: + return keyText; + case FieldType.Int32: + case FieldType.SInt32: + case FieldType.SFixed32: + return ParseNumericString(keyText, int.Parse); + case FieldType.UInt32: + case FieldType.Fixed32: + return ParseNumericString(keyText, uint.Parse); + case FieldType.Int64: + case FieldType.SInt64: + case FieldType.SFixed64: + return ParseNumericString(keyText, long.Parse); + case FieldType.UInt64: + case FieldType.Fixed64: + return ParseNumericString(keyText, ulong.Parse); + default: + throw new InvalidProtocolBufferException("Invalid field type for map: " + field.FieldType); + } + } + + private static object ParseSingleNumberValue(FieldDescriptor field, JsonToken token) + { + double value = token.NumberValue; + checked + { + try + { + switch (field.FieldType) + { + case FieldType.Int32: + case FieldType.SInt32: + case FieldType.SFixed32: + CheckInteger(value); + return (int) value; + case FieldType.UInt32: + case FieldType.Fixed32: + CheckInteger(value); + return (uint) value; + case FieldType.Int64: + case FieldType.SInt64: + case FieldType.SFixed64: + CheckInteger(value); + return (long) value; + case FieldType.UInt64: + case FieldType.Fixed64: + CheckInteger(value); + return (ulong) value; + case FieldType.Double: + return value; + case FieldType.Float: + if (double.IsNaN(value)) + { + return float.NaN; + } + if (value > float.MaxValue || value < float.MinValue) + { + if (double.IsPositiveInfinity(value)) + { + return float.PositiveInfinity; + } + if (double.IsNegativeInfinity(value)) + { + return float.NegativeInfinity; + } + throw new InvalidProtocolBufferException($"Value out of range: {value}"); + } + return (float) value; + case FieldType.Enum: + CheckInteger(value); + // Just return it as an int, and let the CLR convert it. + // Note that we deliberately don't check that it's a known value. + return (int) value; + default: + throw new InvalidProtocolBufferException($"Unsupported conversion from JSON number for field type {field.FieldType}"); + } + } + catch (OverflowException) + { + throw new InvalidProtocolBufferException($"Value out of range: {value}"); + } + } + } + + private static void CheckInteger(double value) + { + if (double.IsInfinity(value) || double.IsNaN(value)) + { + throw new InvalidProtocolBufferException($"Value not an integer: {value}"); + } + if (value != Math.Floor(value)) + { + throw new InvalidProtocolBufferException($"Value not an integer: {value}"); + } + } + + private static object ParseSingleStringValue(FieldDescriptor field, string text) + { + switch (field.FieldType) + { + case FieldType.String: + return text; + case FieldType.Bytes: + try + { + return ByteString.FromBase64(text); + } + catch (FormatException e) + { + throw InvalidProtocolBufferException.InvalidBase64(e); + } + case FieldType.Int32: + case FieldType.SInt32: + case FieldType.SFixed32: + return ParseNumericString(text, int.Parse); + case FieldType.UInt32: + case FieldType.Fixed32: + return ParseNumericString(text, uint.Parse); + case FieldType.Int64: + case FieldType.SInt64: + case FieldType.SFixed64: + return ParseNumericString(text, long.Parse); + case FieldType.UInt64: + case FieldType.Fixed64: + return ParseNumericString(text, ulong.Parse); + case FieldType.Double: + double d = ParseNumericString(text, double.Parse); + ValidateInfinityAndNan(text, double.IsPositiveInfinity(d), double.IsNegativeInfinity(d), double.IsNaN(d)); + return d; + case FieldType.Float: + float f = ParseNumericString(text, float.Parse); + ValidateInfinityAndNan(text, float.IsPositiveInfinity(f), float.IsNegativeInfinity(f), float.IsNaN(f)); + return f; + case FieldType.Enum: + var enumValue = field.EnumType.FindValueByName(text); + if (enumValue == null) + { + throw new InvalidProtocolBufferException($"Invalid enum value: {text} for enum type: {field.EnumType.FullName}"); + } + // Just return it as an int, and let the CLR convert it. + return enumValue.Number; + default: + throw new InvalidProtocolBufferException($"Unsupported conversion from JSON string for field type {field.FieldType}"); + } + } + + /// + /// Creates a new instance of the message type for the given field. + /// + private static IMessage NewMessageForField(FieldDescriptor field) + { + return field.MessageType.Parser.CreateTemplate(); + } + + private static T ParseNumericString(string text, Func parser) + { + // Can't prohibit this with NumberStyles. + if (text.StartsWith("+")) + { + throw new InvalidProtocolBufferException($"Invalid numeric value: {text}"); + } + if (text.StartsWith("0") && text.Length > 1) + { + if (text[1] >= '0' && text[1] <= '9') + { + throw new InvalidProtocolBufferException($"Invalid numeric value: {text}"); + } + } + else if (text.StartsWith("-0") && text.Length > 2) + { + if (text[2] >= '0' && text[2] <= '9') + { + throw new InvalidProtocolBufferException($"Invalid numeric value: {text}"); + } + } + try + { + return parser(text, NumberStyles.AllowLeadingSign | NumberStyles.AllowDecimalPoint | NumberStyles.AllowExponent, CultureInfo.InvariantCulture); + } + catch (FormatException) + { + throw new InvalidProtocolBufferException($"Invalid numeric value for type: {text}"); + } + catch (OverflowException) + { + throw new InvalidProtocolBufferException($"Value out of range: {text}"); + } + } + + /// + /// Checks that any infinite/NaN values originated from the correct text. + /// This corrects the lenient whitespace handling of double.Parse/float.Parse, as well as the + /// way that Mono parses out-of-range values as infinity. + /// + private static void ValidateInfinityAndNan(string text, bool isPositiveInfinity, bool isNegativeInfinity, bool isNaN) + { + if ((isPositiveInfinity && text != "Infinity") || + (isNegativeInfinity && text != "-Infinity") || + (isNaN && text != "NaN")) + { + throw new InvalidProtocolBufferException($"Invalid numeric value: {text}"); + } + } + + private static void MergeTimestamp(IMessage message, JsonToken token) + { + if (token.Type != JsonToken.TokenType.StringValue) + { + throw new InvalidProtocolBufferException("Expected string value for Timestamp"); + } + var match = TimestampRegex.Match(token.StringValue); + if (!match.Success) + { + throw new InvalidProtocolBufferException($"Invalid Timestamp value: {token.StringValue}"); + } + var dateTime = match.Groups["datetime"].Value; + var subseconds = match.Groups["subseconds"].Value; + var offset = match.Groups["offset"].Value; + + try + { + DateTime parsed = DateTime.ParseExact( + dateTime, + "yyyy-MM-dd'T'HH:mm:ss", + CultureInfo.InvariantCulture, + DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal); + // TODO: It would be nice not to have to create all these objects... easy to optimize later though. + Timestamp timestamp = Timestamp.FromDateTime(parsed); + int nanosToAdd = 0; + if (subseconds != "") + { + // This should always work, as we've got 1-9 digits. + int parsedFraction = int.Parse(subseconds.Substring(1), CultureInfo.InvariantCulture); + nanosToAdd = parsedFraction * SubsecondScalingFactors[subseconds.Length]; + } + int secondsToAdd = 0; + if (offset != "Z") + { + // This is the amount we need to *subtract* from the local time to get to UTC - hence - => +1 and vice versa. + int sign = offset[0] == '-' ? 1 : -1; + int hours = int.Parse(offset.Substring(1, 2), CultureInfo.InvariantCulture); + int minutes = int.Parse(offset.Substring(4, 2)); + int totalMinutes = hours * 60 + minutes; + if (totalMinutes > 18 * 60) + { + throw new InvalidProtocolBufferException("Invalid Timestamp value: " + token.StringValue); + } + if (totalMinutes == 0 && sign == 1) + { + // This is an offset of -00:00, which means "unknown local offset". It makes no sense for a timestamp. + throw new InvalidProtocolBufferException("Invalid Timestamp value: " + token.StringValue); + } + // We need to *subtract* the offset from local time to get UTC. + secondsToAdd = sign * totalMinutes * 60; + } + // Ensure we've got the right signs. Currently unnecessary, but easy to do. + if (secondsToAdd < 0 && nanosToAdd > 0) + { + secondsToAdd++; + nanosToAdd = nanosToAdd - Duration.NanosecondsPerSecond; + } + if (secondsToAdd != 0 || nanosToAdd != 0) + { + timestamp += new Duration { Nanos = nanosToAdd, Seconds = secondsToAdd }; + // The resulting timestamp after offset change would be out of our expected range. Currently the Timestamp message doesn't validate this + // anywhere, but we shouldn't parse it. + if (timestamp.Seconds < Timestamp.UnixSecondsAtBclMinValue || timestamp.Seconds > Timestamp.UnixSecondsAtBclMaxValue) + { + throw new InvalidProtocolBufferException("Invalid Timestamp value: " + token.StringValue); + } + } + message.Descriptor.Fields[Timestamp.SecondsFieldNumber].Accessor.SetValue(message, timestamp.Seconds); + message.Descriptor.Fields[Timestamp.NanosFieldNumber].Accessor.SetValue(message, timestamp.Nanos); + } + catch (FormatException) + { + throw new InvalidProtocolBufferException("Invalid Timestamp value: " + token.StringValue); + } + } + + private static void MergeDuration(IMessage message, JsonToken token) + { + if (token.Type != JsonToken.TokenType.StringValue) + { + throw new InvalidProtocolBufferException("Expected string value for Duration"); + } + var match = DurationRegex.Match(token.StringValue); + if (!match.Success) + { + throw new InvalidProtocolBufferException("Invalid Duration value: " + token.StringValue); + } + var sign = match.Groups["sign"].Value; + var secondsText = match.Groups["int"].Value; + // Prohibit leading insignficant zeroes + if (secondsText[0] == '0' && secondsText.Length > 1) + { + throw new InvalidProtocolBufferException("Invalid Duration value: " + token.StringValue); + } + var subseconds = match.Groups["subseconds"].Value; + var multiplier = sign == "-" ? -1 : 1; + + try + { + long seconds = long.Parse(secondsText, CultureInfo.InvariantCulture) * multiplier; + int nanos = 0; + if (subseconds != "") + { + // This should always work, as we've got 1-9 digits. + int parsedFraction = int.Parse(subseconds.Substring(1)); + nanos = parsedFraction * SubsecondScalingFactors[subseconds.Length] * multiplier; + } + if (!Duration.IsNormalized(seconds, nanos)) + { + throw new InvalidProtocolBufferException($"Invalid Duration value: {token.StringValue}"); + } + message.Descriptor.Fields[Duration.SecondsFieldNumber].Accessor.SetValue(message, seconds); + message.Descriptor.Fields[Duration.NanosFieldNumber].Accessor.SetValue(message, nanos); + } + catch (FormatException) + { + throw new InvalidProtocolBufferException($"Invalid Duration value: {token.StringValue}"); + } + } + + private static void MergeFieldMask(IMessage message, JsonToken token) + { + if (token.Type != JsonToken.TokenType.StringValue) + { + throw new InvalidProtocolBufferException("Expected string value for FieldMask"); + } + // TODO: Do we *want* to remove empty entries? Probably okay to treat "" as "no paths", but "foo,,bar"? + string[] jsonPaths = token.StringValue.Split(FieldMaskPathSeparators, StringSplitOptions.RemoveEmptyEntries); + IList messagePaths = (IList) message.Descriptor.Fields[FieldMask.PathsFieldNumber].Accessor.GetValue(message); + foreach (var path in jsonPaths) + { + messagePaths.Add(ToSnakeCase(path)); + } + } + + // Ported from src/google/protobuf/util/internal/utility.cc + private static string ToSnakeCase(string text) + { + var builder = new StringBuilder(text.Length * 2); + // Note: this is probably unnecessary now, but currently retained to be as close as possible to the + // C++, whilst still throwing an exception on underscores. + bool wasNotUnderscore = false; // Initialize to false for case 1 (below) + bool wasNotCap = false; + + for (int i = 0; i < text.Length; i++) + { + char c = text[i]; + if (c >= 'A' && c <= 'Z') // ascii_isupper + { + // Consider when the current character B is capitalized: + // 1) At beginning of input: "B..." => "b..." + // (e.g. "Biscuit" => "biscuit") + // 2) Following a lowercase: "...aB..." => "...a_b..." + // (e.g. "gBike" => "g_bike") + // 3) At the end of input: "...AB" => "...ab" + // (e.g. "GoogleLAB" => "google_lab") + // 4) Followed by a lowercase: "...ABc..." => "...a_bc..." + // (e.g. "GBike" => "g_bike") + if (wasNotUnderscore && // case 1 out + (wasNotCap || // case 2 in, case 3 out + (i + 1 < text.Length && // case 3 out + (text[i + 1] >= 'a' && text[i + 1] <= 'z')))) // ascii_islower(text[i + 1]) + { // case 4 in + // We add an underscore for case 2 and case 4. + builder.Append('_'); + } + // ascii_tolower, but we already know that c *is* an upper case ASCII character... + builder.Append((char) (c + 'a' - 'A')); + wasNotUnderscore = true; + wasNotCap = false; + } + else + { + builder.Append(c); + if (c == '_') + { + throw new InvalidProtocolBufferException($"Invalid field mask: {text}"); + } + wasNotUnderscore = true; + wasNotCap = true; + } + } + return builder.ToString(); + } + #endregion + + /// + /// Settings controlling JSON parsing. + /// + public sealed class Settings + { + /// + /// Default settings, as used by . This has the same default + /// recursion limit as , and an empty type registry. + /// + public static Settings Default { get; } + + // Workaround for the Mono compiler complaining about XML comments not being on + // valid language elements. + static Settings() + { + Default = new Settings(CodedInputStream.DefaultRecursionLimit); + } + + /// + /// The maximum depth of messages to parse. Note that this limit only applies to parsing + /// messages, not collections - so a message within a collection within a message only counts as + /// depth 2, not 3. + /// + public int RecursionLimit { get; } + + /// + /// The type registry used to parse messages. + /// + public TypeRegistry TypeRegistry { get; } + + /// + /// Creates a new object with the specified recursion limit. + /// + /// The maximum depth of messages to parse + public Settings(int recursionLimit) : this(recursionLimit, TypeRegistry.Empty) + { + } + + /// + /// Creates a new object with the specified recursion limit and type registry. + /// + /// The maximum depth of messages to parse + /// The type registry used to parse messages + public Settings(int recursionLimit, TypeRegistry typeRegistry) + { + RecursionLimit = recursionLimit; + TypeRegistry = ProtoPreconditions.CheckNotNull(typeRegistry, nameof(typeRegistry)); + } + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/JsonToken.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/JsonToken.cs new file mode 100644 index 0000000000..6c0138ccb6 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/JsonToken.cs @@ -0,0 +1,166 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; + +namespace Google.Protobuf +{ + internal sealed class JsonToken : IEquatable + { + // Tokens with no value can be reused. + private static readonly JsonToken _true = new JsonToken(TokenType.True); + private static readonly JsonToken _false = new JsonToken(TokenType.False); + private static readonly JsonToken _null = new JsonToken(TokenType.Null); + private static readonly JsonToken startObject = new JsonToken(TokenType.StartObject); + private static readonly JsonToken endObject = new JsonToken(TokenType.EndObject); + private static readonly JsonToken startArray = new JsonToken(TokenType.StartArray); + private static readonly JsonToken endArray = new JsonToken(TokenType.EndArray); + private static readonly JsonToken endDocument = new JsonToken(TokenType.EndDocument); + + internal static JsonToken Null { get { return _null; } } + internal static JsonToken False { get { return _false; } } + internal static JsonToken True { get { return _true; } } + internal static JsonToken StartObject{ get { return startObject; } } + internal static JsonToken EndObject { get { return endObject; } } + internal static JsonToken StartArray { get { return startArray; } } + internal static JsonToken EndArray { get { return endArray; } } + internal static JsonToken EndDocument { get { return endDocument; } } + + internal static JsonToken Name(string name) + { + return new JsonToken(TokenType.Name, stringValue: name); + } + + internal static JsonToken Value(string value) + { + return new JsonToken(TokenType.StringValue, stringValue: value); + } + + internal static JsonToken Value(double value) + { + return new JsonToken(TokenType.Number, numberValue: value); + } + + internal enum TokenType + { + Null, + False, + True, + StringValue, + Number, + Name, + StartObject, + EndObject, + StartArray, + EndArray, + EndDocument + } + + // A value is a string, number, array, object, null, true or false + // Arrays and objects have start/end + // A document consists of a value + // Objects are name/value sequences. + + private readonly TokenType type; + private readonly string stringValue; + private readonly double numberValue; + + internal TokenType Type { get { return type; } } + internal string StringValue { get { return stringValue; } } + internal double NumberValue { get { return numberValue; } } + + private JsonToken(TokenType type, string stringValue = null, double numberValue = 0) + { + this.type = type; + this.stringValue = stringValue; + this.numberValue = numberValue; + } + + public override bool Equals(object obj) + { + return Equals(obj as JsonToken); + } + + public override int GetHashCode() + { + unchecked + { + int hash = 17; + hash = hash * 31 + (int) type; + hash = hash * 31 + stringValue == null ? 0 : stringValue.GetHashCode(); + hash = hash * 31 + numberValue.GetHashCode(); + return hash; + } + } + + public override string ToString() + { + switch (type) + { + case TokenType.Null: + return "null"; + case TokenType.True: + return "true"; + case TokenType.False: + return "false"; + case TokenType.Name: + return "name (" + stringValue + ")"; + case TokenType.StringValue: + return "value (" + stringValue + ")"; + case TokenType.Number: + return "number (" + numberValue + ")"; + case TokenType.StartObject: + return "start-object"; + case TokenType.EndObject: + return "end-object"; + case TokenType.StartArray: + return "start-array"; + case TokenType.EndArray: + return "end-array"; + case TokenType.EndDocument: + return "end-document"; + default: + throw new InvalidOperationException("Token is of unknown type " + type); + } + } + + public bool Equals(JsonToken other) + { + if (ReferenceEquals(other, null)) + { + return false; + } + // Note use of other.numberValue.Equals rather than ==, so that NaN compares appropriately. + return other.type == type && other.stringValue == stringValue && other.numberValue.Equals(numberValue); + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/JsonTokenizer.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/JsonTokenizer.cs new file mode 100644 index 0000000000..09a6d43b7b --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/JsonTokenizer.cs @@ -0,0 +1,738 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion +using System; +using System.Collections.Generic; +using System.Globalization; +using System.IO; +using System.Text; + +namespace Google.Protobuf +{ + /// + /// Simple but strict JSON tokenizer, rigidly following RFC 7159. + /// + /// + /// + /// This tokenizer is stateful, and only returns "useful" tokens - names, values etc. + /// It does not create tokens for the separator between names and values, or for the comma + /// between values. It validates the token stream as it goes - so callers can assume that the + /// tokens it produces are appropriate. For example, it would never produce "start object, end array." + /// + /// Implementation details: the base class handles single token push-back and + /// Not thread-safe. + /// + internal abstract class JsonTokenizer + { + private JsonToken bufferedToken; + + /// + /// Creates a tokenizer that reads from the given text reader. + /// + internal static JsonTokenizer FromTextReader(TextReader reader) + { + return new JsonTextTokenizer(reader); + } + + /// + /// Creates a tokenizer that first replays the given list of tokens, then continues reading + /// from another tokenizer. Note that if the returned tokenizer is "pushed back", that does not push back + /// on the continuation tokenizer, or vice versa. Care should be taken when using this method - it was + /// created for the sake of Any parsing. + /// + internal static JsonTokenizer FromReplayedTokens(IList tokens, JsonTokenizer continuation) + { + return new JsonReplayTokenizer(tokens, continuation); + } + + /// + /// Returns the depth of the stack, purely in objects (not collections). + /// Informally, this is the number of remaining unclosed '{' characters we have. + /// + internal int ObjectDepth { get; private set; } + + // TODO: Why do we allow a different token to be pushed back? It might be better to always remember the previous + // token returned, and allow a parameterless Rewind() method (which could only be called once, just like the current PushBack). + internal void PushBack(JsonToken token) + { + if (bufferedToken != null) + { + throw new InvalidOperationException("Can't push back twice"); + } + bufferedToken = token; + if (token.Type == JsonToken.TokenType.StartObject) + { + ObjectDepth--; + } + else if (token.Type == JsonToken.TokenType.EndObject) + { + ObjectDepth++; + } + } + + /// + /// Returns the next JSON token in the stream. An EndDocument token is returned to indicate the end of the stream, + /// after which point Next() should not be called again. + /// + /// This implementation provides single-token buffering, and calls if there is no buffered token. + /// The next token in the stream. This is never null. + /// This method is called after an EndDocument token has been returned + /// The input text does not comply with RFC 7159 + internal JsonToken Next() + { + JsonToken tokenToReturn; + if (bufferedToken != null) + { + tokenToReturn = bufferedToken; + bufferedToken = null; + } + else + { + tokenToReturn = NextImpl(); + } + if (tokenToReturn.Type == JsonToken.TokenType.StartObject) + { + ObjectDepth++; + } + else if (tokenToReturn.Type == JsonToken.TokenType.EndObject) + { + ObjectDepth--; + } + return tokenToReturn; + } + + /// + /// Returns the next JSON token in the stream, when requested by the base class. (The method delegates + /// to this if it doesn't have a buffered token.) + /// + /// This method is called after an EndDocument token has been returned + /// The input text does not comply with RFC 7159 + protected abstract JsonToken NextImpl(); + + /// + /// Tokenizer which first exhausts a list of tokens, then consults another tokenizer. + /// + private class JsonReplayTokenizer : JsonTokenizer + { + private readonly IList tokens; + private readonly JsonTokenizer nextTokenizer; + private int nextTokenIndex; + + internal JsonReplayTokenizer(IList tokens, JsonTokenizer nextTokenizer) + { + this.tokens = tokens; + this.nextTokenizer = nextTokenizer; + } + + // FIXME: Object depth not maintained... + protected override JsonToken NextImpl() + { + if (nextTokenIndex >= tokens.Count) + { + return nextTokenizer.Next(); + } + return tokens[nextTokenIndex++]; + } + } + + /// + /// Tokenizer which does all the *real* work of parsing JSON. + /// + private sealed class JsonTextTokenizer : JsonTokenizer + { + // The set of states in which a value is valid next token. + private static readonly State ValueStates = State.ArrayStart | State.ArrayAfterComma | State.ObjectAfterColon | State.StartOfDocument; + + private readonly Stack containerStack = new Stack(); + private readonly PushBackReader reader; + private State state; + + internal JsonTextTokenizer(TextReader reader) + { + this.reader = new PushBackReader(reader); + state = State.StartOfDocument; + containerStack.Push(ContainerType.Document); + } + + /// + /// This method essentially just loops through characters skipping whitespace, validating and + /// changing state (e.g. from ObjectBeforeColon to ObjectAfterColon) + /// until it reaches something which will be a genuine token (e.g. a start object, or a value) at which point + /// it returns the token. Although the method is large, it would be relatively hard to break down further... most + /// of it is the large switch statement, which sometimes returns and sometimes doesn't. + /// + protected override JsonToken NextImpl() + { + if (state == State.ReaderExhausted) + { + throw new InvalidOperationException("Next() called after end of document"); + } + while (true) + { + var next = reader.Read(); + if (next == null) + { + ValidateState(State.ExpectedEndOfDocument, "Unexpected end of document in state: "); + state = State.ReaderExhausted; + return JsonToken.EndDocument; + } + switch (next.Value) + { + // Skip whitespace between tokens + case ' ': + case '\t': + case '\r': + case '\n': + break; + case ':': + ValidateState(State.ObjectBeforeColon, "Invalid state to read a colon: "); + state = State.ObjectAfterColon; + break; + case ',': + ValidateState(State.ObjectAfterProperty | State.ArrayAfterValue, "Invalid state to read a colon: "); + state = state == State.ObjectAfterProperty ? State.ObjectAfterComma : State.ArrayAfterComma; + break; + case '"': + string stringValue = ReadString(); + if ((state & (State.ObjectStart | State.ObjectAfterComma)) != 0) + { + state = State.ObjectBeforeColon; + return JsonToken.Name(stringValue); + } + else + { + ValidateAndModifyStateForValue("Invalid state to read a double quote: "); + return JsonToken.Value(stringValue); + } + case '{': + ValidateState(ValueStates, "Invalid state to read an open brace: "); + state = State.ObjectStart; + containerStack.Push(ContainerType.Object); + return JsonToken.StartObject; + case '}': + ValidateState(State.ObjectAfterProperty | State.ObjectStart, "Invalid state to read a close brace: "); + PopContainer(); + return JsonToken.EndObject; + case '[': + ValidateState(ValueStates, "Invalid state to read an open square bracket: "); + state = State.ArrayStart; + containerStack.Push(ContainerType.Array); + return JsonToken.StartArray; + case ']': + ValidateState(State.ArrayAfterValue | State.ArrayStart, "Invalid state to read a close square bracket: "); + PopContainer(); + return JsonToken.EndArray; + case 'n': // Start of null + ConsumeLiteral("null"); + ValidateAndModifyStateForValue("Invalid state to read a null literal: "); + return JsonToken.Null; + case 't': // Start of true + ConsumeLiteral("true"); + ValidateAndModifyStateForValue("Invalid state to read a true literal: "); + return JsonToken.True; + case 'f': // Start of false + ConsumeLiteral("false"); + ValidateAndModifyStateForValue("Invalid state to read a false literal: "); + return JsonToken.False; + case '-': // Start of a number + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + double number = ReadNumber(next.Value); + ValidateAndModifyStateForValue("Invalid state to read a number token: "); + return JsonToken.Value(number); + default: + throw new InvalidJsonException("Invalid first character of token: " + next.Value); + } + } + } + + private void ValidateState(State validStates, string errorPrefix) + { + if ((validStates & state) == 0) + { + throw reader.CreateException(errorPrefix + state); + } + } + + /// + /// Reads a string token. It is assumed that the opening " has already been read. + /// + private string ReadString() + { + var value = new StringBuilder(); + bool haveHighSurrogate = false; + while (true) + { + char c = reader.ReadOrFail("Unexpected end of text while reading string"); + if (c < ' ') + { + throw reader.CreateException(string.Format(CultureInfo.InvariantCulture, "Invalid character in string literal: U+{0:x4}", (int) c)); + } + if (c == '"') + { + if (haveHighSurrogate) + { + throw reader.CreateException("Invalid use of surrogate pair code units"); + } + return value.ToString(); + } + if (c == '\\') + { + c = ReadEscapedCharacter(); + } + // TODO: Consider only allowing surrogate pairs that are either both escaped, + // or both not escaped. It would be a very odd text stream that contained a "lone" high surrogate + // followed by an escaped low surrogate or vice versa... and that couldn't even be represented in UTF-8. + if (haveHighSurrogate != char.IsLowSurrogate(c)) + { + throw reader.CreateException("Invalid use of surrogate pair code units"); + } + haveHighSurrogate = char.IsHighSurrogate(c); + value.Append(c); + } + } + + /// + /// Reads an escaped character. It is assumed that the leading backslash has already been read. + /// + private char ReadEscapedCharacter() + { + char c = reader.ReadOrFail("Unexpected end of text while reading character escape sequence"); + switch (c) + { + case 'n': + return '\n'; + case '\\': + return '\\'; + case 'b': + return '\b'; + case 'f': + return '\f'; + case 'r': + return '\r'; + case 't': + return '\t'; + case '"': + return '"'; + case '/': + return '/'; + case 'u': + return ReadUnicodeEscape(); + default: + throw reader.CreateException(string.Format(CultureInfo.InvariantCulture, "Invalid character in character escape sequence: U+{0:x4}", (int) c)); + } + } + + /// + /// Reads an escaped Unicode 4-nybble hex sequence. It is assumed that the leading \u has already been read. + /// + private char ReadUnicodeEscape() + { + int result = 0; + for (int i = 0; i < 4; i++) + { + char c = reader.ReadOrFail("Unexpected end of text while reading Unicode escape sequence"); + int nybble; + if (c >= '0' && c <= '9') + { + nybble = c - '0'; + } + else if (c >= 'a' && c <= 'f') + { + nybble = c - 'a' + 10; + } + else if (c >= 'A' && c <= 'F') + { + nybble = c - 'A' + 10; + } + else + { + throw reader.CreateException(string.Format(CultureInfo.InvariantCulture, "Invalid character in character escape sequence: U+{0:x4}", (int) c)); + } + result = (result << 4) + nybble; + } + return (char) result; + } + + /// + /// Consumes a text-only literal, throwing an exception if the read text doesn't match it. + /// It is assumed that the first letter of the literal has already been read. + /// + private void ConsumeLiteral(string text) + { + for (int i = 1; i < text.Length; i++) + { + char? next = reader.Read(); + if (next == null) + { + throw reader.CreateException("Unexpected end of text while reading literal token " + text); + } + if (next.Value != text[i]) + { + throw reader.CreateException("Unexpected character while reading literal token " + text); + } + } + } + + private double ReadNumber(char initialCharacter) + { + StringBuilder builder = new StringBuilder(); + if (initialCharacter == '-') + { + builder.Append("-"); + } + else + { + reader.PushBack(initialCharacter); + } + // Each method returns the character it read that doesn't belong in that part, + // so we know what to do next, including pushing the character back at the end. + // null is returned for "end of text". + char? next = ReadInt(builder); + if (next == '.') + { + next = ReadFrac(builder); + } + if (next == 'e' || next == 'E') + { + next = ReadExp(builder); + } + // If we read a character which wasn't part of the number, push it back so we can read it again + // to parse the next token. + if (next != null) + { + reader.PushBack(next.Value); + } + + // TODO: What exception should we throw if the value can't be represented as a double? + try + { + return double.Parse(builder.ToString(), + NumberStyles.AllowLeadingSign | NumberStyles.AllowDecimalPoint | NumberStyles.AllowExponent, + CultureInfo.InvariantCulture); + } + catch (OverflowException) + { + throw reader.CreateException("Numeric value out of range: " + builder); + } + } + + private char? ReadInt(StringBuilder builder) + { + char first = reader.ReadOrFail("Invalid numeric literal"); + if (first < '0' || first > '9') + { + throw reader.CreateException("Invalid numeric literal"); + } + builder.Append(first); + int digitCount; + char? next = ConsumeDigits(builder, out digitCount); + if (first == '0' && digitCount != 0) + { + throw reader.CreateException("Invalid numeric literal: leading 0 for non-zero value."); + } + return next; + } + + private char? ReadFrac(StringBuilder builder) + { + builder.Append('.'); // Already consumed this + int digitCount; + char? next = ConsumeDigits(builder, out digitCount); + if (digitCount == 0) + { + throw reader.CreateException("Invalid numeric literal: fraction with no trailing digits"); + } + return next; + } + + private char? ReadExp(StringBuilder builder) + { + builder.Append('E'); // Already consumed this (or 'e') + char? next = reader.Read(); + if (next == null) + { + throw reader.CreateException("Invalid numeric literal: exponent with no trailing digits"); + } + if (next == '-' || next == '+') + { + builder.Append(next.Value); + } + else + { + reader.PushBack(next.Value); + } + int digitCount; + next = ConsumeDigits(builder, out digitCount); + if (digitCount == 0) + { + throw reader.CreateException("Invalid numeric literal: exponent without value"); + } + return next; + } + + private char? ConsumeDigits(StringBuilder builder, out int count) + { + count = 0; + while (true) + { + char? next = reader.Read(); + if (next == null || next.Value < '0' || next.Value > '9') + { + return next; + } + count++; + builder.Append(next.Value); + } + } + + /// + /// Validates that we're in a valid state to read a value (using the given error prefix if necessary) + /// and changes the state to the appropriate one, e.g. ObjectAfterColon to ObjectAfterProperty. + /// + private void ValidateAndModifyStateForValue(string errorPrefix) + { + ValidateState(ValueStates, errorPrefix); + switch (state) + { + case State.StartOfDocument: + state = State.ExpectedEndOfDocument; + return; + case State.ObjectAfterColon: + state = State.ObjectAfterProperty; + return; + case State.ArrayStart: + case State.ArrayAfterComma: + state = State.ArrayAfterValue; + return; + default: + throw new InvalidOperationException("ValidateAndModifyStateForValue does not handle all value states (and should)"); + } + } + + /// + /// Pops the top-most container, and sets the state to the appropriate one for the end of a value + /// in the parent container. + /// + private void PopContainer() + { + containerStack.Pop(); + var parent = containerStack.Peek(); + switch (parent) + { + case ContainerType.Object: + state = State.ObjectAfterProperty; + break; + case ContainerType.Array: + state = State.ArrayAfterValue; + break; + case ContainerType.Document: + state = State.ExpectedEndOfDocument; + break; + default: + throw new InvalidOperationException("Unexpected container type: " + parent); + } + } + + private enum ContainerType + { + Document, Object, Array + } + + /// + /// Possible states of the tokenizer. + /// + /// + /// This is a flags enum purely so we can simply and efficiently represent a set of valid states + /// for checking. + /// + /// Each is documented with an example, + /// where ^ represents the current position within the text stream. The examples all use string values, + /// but could be any value, including nested objects/arrays. + /// The complete state of the tokenizer also includes a stack to indicate the contexts (arrays/objects). + /// Any additional notional state of "AfterValue" indicates that a value has been completed, at which + /// point there's an immediate transition to ExpectedEndOfDocument, ObjectAfterProperty or ArrayAfterValue. + /// + /// + /// These states were derived manually by reading RFC 7159 carefully. + /// + /// + [Flags] + private enum State + { + /// + /// ^ { "foo": "bar" } + /// Before the value in a document. Next states: ObjectStart, ArrayStart, "AfterValue" + /// + StartOfDocument = 1 << 0, + /// + /// { "foo": "bar" } ^ + /// After the value in a document. Next states: ReaderExhausted + /// + ExpectedEndOfDocument = 1 << 1, + /// + /// { "foo": "bar" } ^ (and already read to the end of the reader) + /// Terminal state. + /// + ReaderExhausted = 1 << 2, + /// + /// { ^ "foo": "bar" } + /// Before the *first* property in an object. + /// Next states: + /// "AfterValue" (empty object) + /// ObjectBeforeColon (read a name) + /// + ObjectStart = 1 << 3, + /// + /// { "foo" ^ : "bar", "x": "y" } + /// Next state: ObjectAfterColon + /// + ObjectBeforeColon = 1 << 4, + /// + /// { "foo" : ^ "bar", "x": "y" } + /// Before any property other than the first in an object. + /// (Equivalently: after any property in an object) + /// Next states: + /// "AfterValue" (value is simple) + /// ObjectStart (value is object) + /// ArrayStart (value is array) + /// + ObjectAfterColon = 1 << 5, + /// + /// { "foo" : "bar" ^ , "x" : "y" } + /// At the end of a property, so expecting either a comma or end-of-object + /// Next states: ObjectAfterComma or "AfterValue" + /// + ObjectAfterProperty = 1 << 6, + /// + /// { "foo":"bar", ^ "x":"y" } + /// Read the comma after the previous property, so expecting another property. + /// This is like ObjectStart, but closing brace isn't valid here + /// Next state: ObjectBeforeColon. + /// + ObjectAfterComma = 1 << 7, + /// + /// [ ^ "foo", "bar" ] + /// Before the *first* value in an array. + /// Next states: + /// "AfterValue" (read a value) + /// "AfterValue" (end of array; will pop stack) + /// + ArrayStart = 1 << 8, + /// + /// [ "foo" ^ , "bar" ] + /// After any value in an array, so expecting either a comma or end-of-array + /// Next states: ArrayAfterComma or "AfterValue" + /// + ArrayAfterValue = 1 << 9, + /// + /// [ "foo", ^ "bar" ] + /// After a comma in an array, so there *must* be another value (simple or complex). + /// Next states: "AfterValue" (simple value), StartObject, StartArray + /// + ArrayAfterComma = 1 << 10 + } + + /// + /// Wrapper around a text reader allowing small amounts of buffering and location handling. + /// + private class PushBackReader + { + // TODO: Add locations for errors etc. + + private readonly TextReader reader; + + internal PushBackReader(TextReader reader) + { + // TODO: Wrap the reader in a BufferedReader? + this.reader = reader; + } + + /// + /// The buffered next character, if we have one. + /// + private char? nextChar; + + /// + /// Returns the next character in the stream, or null if we have reached the end. + /// + /// + internal char? Read() + { + if (nextChar != null) + { + char? tmp = nextChar; + nextChar = null; + return tmp; + } + int next = reader.Read(); + return next == -1 ? null : (char?) next; + } + + internal char ReadOrFail(string messageOnFailure) + { + char? next = Read(); + if (next == null) + { + throw CreateException(messageOnFailure); + } + return next.Value; + } + + internal void PushBack(char c) + { + if (nextChar != null) + { + throw new InvalidOperationException("Cannot push back when already buffering a character"); + } + nextChar = c; + } + + /// + /// Creates a new exception appropriate for the current state of the reader. + /// + internal InvalidJsonException CreateException(string message) + { + // TODO: Keep track of and use the location. + return new InvalidJsonException(message); + } + } + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/LimitedInputStream.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/LimitedInputStream.cs new file mode 100644 index 0000000000..f11d19d944 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/LimitedInputStream.cs @@ -0,0 +1,110 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; +using System.IO; + +namespace Google.Protobuf +{ + /// + /// Stream implementation which proxies another stream, only allowing a certain amount + /// of data to be read. Note that this is only used to read delimited streams, so it + /// doesn't attempt to implement everything. + /// + internal sealed class LimitedInputStream : Stream + { + private readonly Stream proxied; + private int bytesLeft; + + internal LimitedInputStream(Stream proxied, int size) + { + this.proxied = proxied; + bytesLeft = size; + } + + public override bool CanRead + { + get { return true; } + } + + public override bool CanSeek + { + get { return false; } + } + + public override bool CanWrite + { + get { return false; } + } + + public override void Flush() + { + } + + public override long Length + { + get { throw new NotSupportedException(); } + } + + public override long Position + { + get { throw new NotSupportedException(); } + set { throw new NotSupportedException(); } + } + + public override int Read(byte[] buffer, int offset, int count) + { + if (bytesLeft > 0) + { + int bytesRead = proxied.Read(buffer, offset, Math.Min(bytesLeft, count)); + bytesLeft -= bytesRead; + return bytesRead; + } + return 0; + } + + public override long Seek(long offset, SeekOrigin origin) + { + throw new NotSupportedException(); + } + + public override void SetLength(long value) + { + throw new NotSupportedException(); + } + + public override void Write(byte[] buffer, int offset, int count) + { + throw new NotSupportedException(); + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/MessageExtensions.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/MessageExtensions.cs new file mode 100644 index 0000000000..047156c3ee --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/MessageExtensions.cs @@ -0,0 +1,157 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System.IO; + +namespace Google.Protobuf +{ + /// + /// Extension methods on and . + /// + public static class MessageExtensions + { + /// + /// Merges data from the given byte array into an existing message. + /// + /// The message to merge the data into. + /// The data to merge, which must be protobuf-encoded binary data. + public static void MergeFrom(this IMessage message, byte[] data) + { + ProtoPreconditions.CheckNotNull(message, "message"); + ProtoPreconditions.CheckNotNull(data, "data"); + CodedInputStream input = new CodedInputStream(data); + message.MergeFrom(input); + input.CheckReadEndOfStreamTag(); + } + + /// + /// Merges data from the given byte string into an existing message. + /// + /// The message to merge the data into. + /// The data to merge, which must be protobuf-encoded binary data. + public static void MergeFrom(this IMessage message, ByteString data) + { + ProtoPreconditions.CheckNotNull(message, "message"); + ProtoPreconditions.CheckNotNull(data, "data"); + CodedInputStream input = data.CreateCodedInput(); + message.MergeFrom(input); + input.CheckReadEndOfStreamTag(); + } + + /// + /// Merges data from the given stream into an existing message. + /// + /// The message to merge the data into. + /// Stream containing the data to merge, which must be protobuf-encoded binary data. + public static void MergeFrom(this IMessage message, Stream input) + { + ProtoPreconditions.CheckNotNull(message, "message"); + ProtoPreconditions.CheckNotNull(input, "input"); + CodedInputStream codedInput = new CodedInputStream(input); + message.MergeFrom(codedInput); + codedInput.CheckReadEndOfStreamTag(); + } + + /// + /// Merges length-delimited data from the given stream into an existing message. + /// + /// + /// The stream is expected to contain a length and then the data. Only the amount of data + /// specified by the length will be consumed. + /// + /// The message to merge the data into. + /// Stream containing the data to merge, which must be protobuf-encoded binary data. + public static void MergeDelimitedFrom(this IMessage message, Stream input) + { + ProtoPreconditions.CheckNotNull(message, "message"); + ProtoPreconditions.CheckNotNull(input, "input"); + int size = (int) CodedInputStream.ReadRawVarint32(input); + Stream limitedStream = new LimitedInputStream(input, size); + message.MergeFrom(limitedStream); + } + + /// + /// Converts the given message into a byte array in protobuf encoding. + /// + /// The message to convert. + /// The message data as a byte array. + public static byte[] ToByteArray(this IMessage message) + { + ProtoPreconditions.CheckNotNull(message, "message"); + byte[] result = new byte[message.CalculateSize()]; + CodedOutputStream output = new CodedOutputStream(result); + message.WriteTo(output); + output.CheckNoSpaceLeft(); + return result; + } + + /// + /// Writes the given message data to the given stream in protobuf encoding. + /// + /// The message to write to the stream. + /// The stream to write to. + public static void WriteTo(this IMessage message, Stream output) + { + ProtoPreconditions.CheckNotNull(message, "message"); + ProtoPreconditions.CheckNotNull(output, "output"); + CodedOutputStream codedOutput = new CodedOutputStream(output); + message.WriteTo(codedOutput); + codedOutput.Flush(); + } + + /// + /// Writes the length and then data of the given message to a stream. + /// + /// The message to write. + /// The output stream to write to. + public static void WriteDelimitedTo(this IMessage message, Stream output) + { + ProtoPreconditions.CheckNotNull(message, "message"); + ProtoPreconditions.CheckNotNull(output, "output"); + CodedOutputStream codedOutput = new CodedOutputStream(output); + codedOutput.WriteRawVarint32((uint)message.CalculateSize()); + message.WriteTo(codedOutput); + codedOutput.Flush(); + } + + /// + /// Converts the given message into a byte string in protobuf encoding. + /// + /// The message to convert. + /// The message data as a byte string. + public static ByteString ToByteString(this IMessage message) + { + ProtoPreconditions.CheckNotNull(message, "message"); + return ByteString.AttachBytes(message.ToByteArray()); + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/MessageParser.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/MessageParser.cs new file mode 100644 index 0000000000..8889638b20 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/MessageParser.cs @@ -0,0 +1,267 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; +using System.IO; + +namespace Google.Protobuf +{ + /// + /// A general message parser, typically used by reflection-based code as all the methods + /// return simple . + /// + public class MessageParser + { + private Func factory; + + internal MessageParser(Func factory) + { + this.factory = factory; + } + + /// + /// Creates a template instance ready for population. + /// + /// An empty message. + internal IMessage CreateTemplate() + { + return factory(); + } + + /// + /// Parses a message from a byte array. + /// + /// The byte array containing the message. Must not be null. + /// The newly parsed message. + public IMessage ParseFrom(byte[] data) + { + ProtoPreconditions.CheckNotNull(data, "data"); + IMessage message = factory(); + message.MergeFrom(data); + return message; + } + + /// + /// Parses a message from the given byte string. + /// + /// The data to parse. + /// The parsed message. + public IMessage ParseFrom(ByteString data) + { + ProtoPreconditions.CheckNotNull(data, "data"); + IMessage message = factory(); + message.MergeFrom(data); + return message; + } + + /// + /// Parses a message from the given stream. + /// + /// The stream to parse. + /// The parsed message. + public IMessage ParseFrom(Stream input) + { + IMessage message = factory(); + message.MergeFrom(input); + return message; + } + + /// + /// Parses a length-delimited message from the given stream. + /// + /// + /// The stream is expected to contain a length and then the data. Only the amount of data + /// specified by the length will be consumed. + /// + /// The stream to parse. + /// The parsed message. + public IMessage ParseDelimitedFrom(Stream input) + { + IMessage message = factory(); + message.MergeDelimitedFrom(input); + return message; + } + + /// + /// Parses a message from the given coded input stream. + /// + /// The stream to parse. + /// The parsed message. + public IMessage ParseFrom(CodedInputStream input) + { + IMessage message = factory(); + message.MergeFrom(input); + return message; + } + + /// + /// Parses a message from the given JSON. + /// + /// The JSON to parse. + /// The parsed message. + /// The JSON does not comply with RFC 7159 + /// The JSON does not represent a Protocol Buffers message correctly + public IMessage ParseJson(string json) + { + IMessage message = factory(); + JsonParser.Default.Merge(message, json); + return message; + } + } + + /// + /// A parser for a specific message type. + /// + /// + ///

+ /// This delegates most behavior to the + /// implementation within the original type, but + /// provides convenient overloads to parse from a variety of sources. + ///

+ ///

+ /// Most applications will never need to create their own instances of this type; + /// instead, use the static Parser property of a generated message type to obtain a + /// parser for that type. + ///

+ ///
+ /// The type of message to be parsed. + public sealed class MessageParser : MessageParser where T : IMessage + { + // Implementation note: all the methods here *could* just delegate up to the base class and cast the result. + // The current implementation avoids a virtual method call and a cast, which *may* be significant in some cases. + // Benchmarking work is required to measure the significance - but it's only a few lines of code in any case. + // The API wouldn't change anyway - just the implementation - so this work can be deferred. + private readonly Func factory; + + /// + /// Creates a new parser. + /// + /// + /// The factory method is effectively an optimization over using a generic constraint + /// to require a parameterless constructor: delegates are significantly faster to execute. + /// + /// Function to invoke when a new, empty message is required. + public MessageParser(Func factory) : base(() => factory()) + { + this.factory = factory; + } + + /// + /// Creates a template instance ready for population. + /// + /// An empty message. + internal new T CreateTemplate() + { + return factory(); + } + + /// + /// Parses a message from a byte array. + /// + /// The byte array containing the message. Must not be null. + /// The newly parsed message. + public new T ParseFrom(byte[] data) + { + ProtoPreconditions.CheckNotNull(data, "data"); + T message = factory(); + message.MergeFrom(data); + return message; + } + + /// + /// Parses a message from the given byte string. + /// + /// The data to parse. + /// The parsed message. + public new T ParseFrom(ByteString data) + { + ProtoPreconditions.CheckNotNull(data, "data"); + T message = factory(); + message.MergeFrom(data); + return message; + } + + /// + /// Parses a message from the given stream. + /// + /// The stream to parse. + /// The parsed message. + public new T ParseFrom(Stream input) + { + T message = factory(); + message.MergeFrom(input); + return message; + } + + /// + /// Parses a length-delimited message from the given stream. + /// + /// + /// The stream is expected to contain a length and then the data. Only the amount of data + /// specified by the length will be consumed. + /// + /// The stream to parse. + /// The parsed message. + public new T ParseDelimitedFrom(Stream input) + { + T message = factory(); + message.MergeDelimitedFrom(input); + return message; + } + + /// + /// Parses a message from the given coded input stream. + /// + /// The stream to parse. + /// The parsed message. + public new T ParseFrom(CodedInputStream input) + { + T message = factory(); + message.MergeFrom(input); + return message; + } + + /// + /// Parses a message from the given JSON. + /// + /// The JSON to parse. + /// The parsed message. + /// The JSON does not comply with RFC 7159 + /// The JSON does not represent a Protocol Buffers message correctly + public new T ParseJson(string json) + { + T message = factory(); + JsonParser.Default.Merge(message, json); + return message; + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/Properties/AssemblyInfo.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Properties/AssemblyInfo.cs new file mode 100644 index 0000000000..ad852eba5d --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Properties/AssemblyInfo.cs @@ -0,0 +1,67 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Security; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. + +[assembly: AssemblyTitle("Google.Protobuf")] +[assembly: AssemblyDescription("")] +[assembly: AssemblyConfiguration("")] +[assembly: AssemblyCompany("")] +[assembly: AssemblyProduct("Google.Protobuf")] +[assembly: AssemblyCopyright("Copyright © 2015")] +[assembly: AssemblyTrademark("")] +[assembly: AssemblyCulture("")] + +#if !NCRUNCH +[assembly: AllowPartiallyTrustedCallers] +#endif + +#if SIGNED +[assembly: InternalsVisibleTo("Google.Protobuf.Test, PublicKey=" + + "002400000480000094000000060200000024000052534131000400000100010025800fbcfc63a1" + + "7c66b303aae80b03a6beaa176bb6bef883be436f2a1579edd80ce23edf151a1f4ced97af83abcd" + + "981207041fd5b2da3b498346fcfcd94910d52f25537c4a43ce3fbe17dc7d43e6cbdb4d8f1242dc" + + "b6bd9b5906be74da8daa7d7280f97130f318a16c07baf118839b156299a48522f9fae2371c9665" + + "c5ae9cb6")] +#else +[assembly: InternalsVisibleTo("Google.Protobuf.Test")] +#endif + +[assembly: AssemblyVersion("3.0.0.0")] +[assembly: AssemblyFileVersion("3.0.0.0")] +[assembly: AssemblyInformationalVersion("3.0.0-beta3")] diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/ProtoPreconditions.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/ProtoPreconditions.cs new file mode 100644 index 0000000000..abaeb9b481 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/ProtoPreconditions.cs @@ -0,0 +1,79 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; + +namespace Google.Protobuf +{ + /// + /// Helper methods for throwing exceptions when preconditions are not met. + /// + /// + /// This class is used internally and by generated code; it is not particularly + /// expected to be used from application code, although nothing prevents it + /// from being used that way. + /// + public static class ProtoPreconditions + { + /// + /// Throws an ArgumentNullException if the given value is null, otherwise + /// return the value to the caller. + /// + public static T CheckNotNull(T value, string name) where T : class + { + if (value == null) + { + throw new ArgumentNullException(name); + } + return value; + } + + /// + /// Throws an ArgumentNullException if the given value is null, otherwise + /// return the value to the caller. + /// + /// + /// This is equivalent to but without the type parameter + /// constraint. In most cases, the constraint is useful to prevent you from calling CheckNotNull + /// with a value type - but it gets in the way if either you want to use it with a nullable + /// value type, or you want to use it with an unconstrained type parameter. + /// + internal static T CheckNotNullUnconstrained(T value, string name) + { + if (value == null) + { + throw new ArgumentNullException(name); + } + return value; + } + } +} \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/Descriptor.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/Descriptor.cs new file mode 100644 index 0000000000..fa138dfe91 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/Descriptor.cs @@ -0,0 +1,5421 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/protobuf/descriptor.proto +#pragma warning disable 1591, 0612, 3021 +#region Designer generated code + +using pb = global::Google.Protobuf; +using pbc = global::Google.Protobuf.Collections; +using pbr = global::Google.Protobuf.Reflection; +using scg = global::System.Collections.Generic; +namespace Google.Protobuf.Reflection { + + /// Holder for reflection information generated from google/protobuf/descriptor.proto + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + internal static partial class DescriptorReflection { + + #region Descriptor + /// File descriptor for google/protobuf/descriptor.proto + public static pbr::FileDescriptor Descriptor { + get { return descriptor; } + } + private static pbr::FileDescriptor descriptor; + + static DescriptorReflection() { + byte[] descriptorData = global::System.Convert.FromBase64String( + string.Concat( + "CiBnb29nbGUvcHJvdG9idWYvZGVzY3JpcHRvci5wcm90bxIPZ29vZ2xlLnBy", + "b3RvYnVmIkcKEUZpbGVEZXNjcmlwdG9yU2V0EjIKBGZpbGUYASADKAsyJC5n", + "b29nbGUucHJvdG9idWYuRmlsZURlc2NyaXB0b3JQcm90byLbAwoTRmlsZURl", + "c2NyaXB0b3JQcm90bxIMCgRuYW1lGAEgASgJEg8KB3BhY2thZ2UYAiABKAkS", + "EgoKZGVwZW5kZW5jeRgDIAMoCRIZChFwdWJsaWNfZGVwZW5kZW5jeRgKIAMo", + "BRIXCg93ZWFrX2RlcGVuZGVuY3kYCyADKAUSNgoMbWVzc2FnZV90eXBlGAQg", + "AygLMiAuZ29vZ2xlLnByb3RvYnVmLkRlc2NyaXB0b3JQcm90bxI3CgllbnVt", + "X3R5cGUYBSADKAsyJC5nb29nbGUucHJvdG9idWYuRW51bURlc2NyaXB0b3JQ", + "cm90bxI4CgdzZXJ2aWNlGAYgAygLMicuZ29vZ2xlLnByb3RvYnVmLlNlcnZp", + "Y2VEZXNjcmlwdG9yUHJvdG8SOAoJZXh0ZW5zaW9uGAcgAygLMiUuZ29vZ2xl", + "LnByb3RvYnVmLkZpZWxkRGVzY3JpcHRvclByb3RvEi0KB29wdGlvbnMYCCAB", + "KAsyHC5nb29nbGUucHJvdG9idWYuRmlsZU9wdGlvbnMSOQoQc291cmNlX2Nv", + "ZGVfaW5mbxgJIAEoCzIfLmdvb2dsZS5wcm90b2J1Zi5Tb3VyY2VDb2RlSW5m", + "bxIOCgZzeW50YXgYDCABKAki8AQKD0Rlc2NyaXB0b3JQcm90bxIMCgRuYW1l", + "GAEgASgJEjQKBWZpZWxkGAIgAygLMiUuZ29vZ2xlLnByb3RvYnVmLkZpZWxk", + "RGVzY3JpcHRvclByb3RvEjgKCWV4dGVuc2lvbhgGIAMoCzIlLmdvb2dsZS5w", + "cm90b2J1Zi5GaWVsZERlc2NyaXB0b3JQcm90bxI1CgtuZXN0ZWRfdHlwZRgD", + "IAMoCzIgLmdvb2dsZS5wcm90b2J1Zi5EZXNjcmlwdG9yUHJvdG8SNwoJZW51", + "bV90eXBlGAQgAygLMiQuZ29vZ2xlLnByb3RvYnVmLkVudW1EZXNjcmlwdG9y", + "UHJvdG8SSAoPZXh0ZW5zaW9uX3JhbmdlGAUgAygLMi8uZ29vZ2xlLnByb3Rv", + "YnVmLkRlc2NyaXB0b3JQcm90by5FeHRlbnNpb25SYW5nZRI5CgpvbmVvZl9k", + "ZWNsGAggAygLMiUuZ29vZ2xlLnByb3RvYnVmLk9uZW9mRGVzY3JpcHRvclBy", + "b3RvEjAKB29wdGlvbnMYByABKAsyHy5nb29nbGUucHJvdG9idWYuTWVzc2Fn", + "ZU9wdGlvbnMSRgoOcmVzZXJ2ZWRfcmFuZ2UYCSADKAsyLi5nb29nbGUucHJv", + "dG9idWYuRGVzY3JpcHRvclByb3RvLlJlc2VydmVkUmFuZ2USFQoNcmVzZXJ2", + "ZWRfbmFtZRgKIAMoCRosCg5FeHRlbnNpb25SYW5nZRINCgVzdGFydBgBIAEo", + "BRILCgNlbmQYAiABKAUaKwoNUmVzZXJ2ZWRSYW5nZRINCgVzdGFydBgBIAEo", + "BRILCgNlbmQYAiABKAUivAUKFEZpZWxkRGVzY3JpcHRvclByb3RvEgwKBG5h", + "bWUYASABKAkSDgoGbnVtYmVyGAMgASgFEjoKBWxhYmVsGAQgASgOMisuZ29v", + "Z2xlLnByb3RvYnVmLkZpZWxkRGVzY3JpcHRvclByb3RvLkxhYmVsEjgKBHR5", + "cGUYBSABKA4yKi5nb29nbGUucHJvdG9idWYuRmllbGREZXNjcmlwdG9yUHJv", + "dG8uVHlwZRIRCgl0eXBlX25hbWUYBiABKAkSEAoIZXh0ZW5kZWUYAiABKAkS", + "FQoNZGVmYXVsdF92YWx1ZRgHIAEoCRITCgtvbmVvZl9pbmRleBgJIAEoBRIR", + "Cglqc29uX25hbWUYCiABKAkSLgoHb3B0aW9ucxgIIAEoCzIdLmdvb2dsZS5w", + "cm90b2J1Zi5GaWVsZE9wdGlvbnMitgIKBFR5cGUSDwoLVFlQRV9ET1VCTEUQ", + "ARIOCgpUWVBFX0ZMT0FUEAISDgoKVFlQRV9JTlQ2NBADEg8KC1RZUEVfVUlO", + "VDY0EAQSDgoKVFlQRV9JTlQzMhAFEhAKDFRZUEVfRklYRUQ2NBAGEhAKDFRZ", + "UEVfRklYRUQzMhAHEg0KCVRZUEVfQk9PTBAIEg8KC1RZUEVfU1RSSU5HEAkS", + "DgoKVFlQRV9HUk9VUBAKEhAKDFRZUEVfTUVTU0FHRRALEg4KClRZUEVfQllU", + "RVMQDBIPCgtUWVBFX1VJTlQzMhANEg0KCVRZUEVfRU5VTRAOEhEKDVRZUEVf", + "U0ZJWEVEMzIQDxIRCg1UWVBFX1NGSVhFRDY0EBASDwoLVFlQRV9TSU5UMzIQ", + "ERIPCgtUWVBFX1NJTlQ2NBASIkMKBUxhYmVsEhIKDkxBQkVMX09QVElPTkFM", + "EAESEgoOTEFCRUxfUkVRVUlSRUQQAhISCg5MQUJFTF9SRVBFQVRFRBADIiQK", + "FE9uZW9mRGVzY3JpcHRvclByb3RvEgwKBG5hbWUYASABKAkijAEKE0VudW1E", + "ZXNjcmlwdG9yUHJvdG8SDAoEbmFtZRgBIAEoCRI4CgV2YWx1ZRgCIAMoCzIp", + "Lmdvb2dsZS5wcm90b2J1Zi5FbnVtVmFsdWVEZXNjcmlwdG9yUHJvdG8SLQoH", + "b3B0aW9ucxgDIAEoCzIcLmdvb2dsZS5wcm90b2J1Zi5FbnVtT3B0aW9ucyJs", + "ChhFbnVtVmFsdWVEZXNjcmlwdG9yUHJvdG8SDAoEbmFtZRgBIAEoCRIOCgZu", + "dW1iZXIYAiABKAUSMgoHb3B0aW9ucxgDIAEoCzIhLmdvb2dsZS5wcm90b2J1", + "Zi5FbnVtVmFsdWVPcHRpb25zIpABChZTZXJ2aWNlRGVzY3JpcHRvclByb3Rv", + "EgwKBG5hbWUYASABKAkSNgoGbWV0aG9kGAIgAygLMiYuZ29vZ2xlLnByb3Rv", + "YnVmLk1ldGhvZERlc2NyaXB0b3JQcm90bxIwCgdvcHRpb25zGAMgASgLMh8u", + "Z29vZ2xlLnByb3RvYnVmLlNlcnZpY2VPcHRpb25zIsEBChVNZXRob2REZXNj", + "cmlwdG9yUHJvdG8SDAoEbmFtZRgBIAEoCRISCgppbnB1dF90eXBlGAIgASgJ", + "EhMKC291dHB1dF90eXBlGAMgASgJEi8KB29wdGlvbnMYBCABKAsyHi5nb29n", + "bGUucHJvdG9idWYuTWV0aG9kT3B0aW9ucxIfChBjbGllbnRfc3RyZWFtaW5n", + "GAUgASgIOgVmYWxzZRIfChBzZXJ2ZXJfc3RyZWFtaW5nGAYgASgIOgVmYWxz", + "ZSKHBQoLRmlsZU9wdGlvbnMSFAoMamF2YV9wYWNrYWdlGAEgASgJEhwKFGph", + "dmFfb3V0ZXJfY2xhc3NuYW1lGAggASgJEiIKE2phdmFfbXVsdGlwbGVfZmls", + "ZXMYCiABKAg6BWZhbHNlEiwKHWphdmFfZ2VuZXJhdGVfZXF1YWxzX2FuZF9o", + "YXNoGBQgASgIOgVmYWxzZRIlChZqYXZhX3N0cmluZ19jaGVja191dGY4GBsg", + "ASgIOgVmYWxzZRJGCgxvcHRpbWl6ZV9mb3IYCSABKA4yKS5nb29nbGUucHJv", + "dG9idWYuRmlsZU9wdGlvbnMuT3B0aW1pemVNb2RlOgVTUEVFRBISCgpnb19w", + "YWNrYWdlGAsgASgJEiIKE2NjX2dlbmVyaWNfc2VydmljZXMYECABKAg6BWZh", + "bHNlEiQKFWphdmFfZ2VuZXJpY19zZXJ2aWNlcxgRIAEoCDoFZmFsc2USIgoT", + "cHlfZ2VuZXJpY19zZXJ2aWNlcxgSIAEoCDoFZmFsc2USGQoKZGVwcmVjYXRl", + "ZBgXIAEoCDoFZmFsc2USHwoQY2NfZW5hYmxlX2FyZW5hcxgfIAEoCDoFZmFs", + "c2USGQoRb2JqY19jbGFzc19wcmVmaXgYJCABKAkSGAoQY3NoYXJwX25hbWVz", + "cGFjZRglIAEoCRJDChR1bmludGVycHJldGVkX29wdGlvbhjnByADKAsyJC5n", + "b29nbGUucHJvdG9idWYuVW5pbnRlcnByZXRlZE9wdGlvbiI6CgxPcHRpbWl6", + "ZU1vZGUSCQoFU1BFRUQQARINCglDT0RFX1NJWkUQAhIQCgxMSVRFX1JVTlRJ", + "TUUQAyoJCOgHEICAgIACSgQIJhAnIuYBCg5NZXNzYWdlT3B0aW9ucxImChdt", + "ZXNzYWdlX3NldF93aXJlX2Zvcm1hdBgBIAEoCDoFZmFsc2USLgofbm9fc3Rh", + "bmRhcmRfZGVzY3JpcHRvcl9hY2Nlc3NvchgCIAEoCDoFZmFsc2USGQoKZGVw", + "cmVjYXRlZBgDIAEoCDoFZmFsc2USEQoJbWFwX2VudHJ5GAcgASgIEkMKFHVu", + "aW50ZXJwcmV0ZWRfb3B0aW9uGOcHIAMoCzIkLmdvb2dsZS5wcm90b2J1Zi5V", + "bmludGVycHJldGVkT3B0aW9uKgkI6AcQgICAgAIimAMKDEZpZWxkT3B0aW9u", + "cxI6CgVjdHlwZRgBIAEoDjIjLmdvb2dsZS5wcm90b2J1Zi5GaWVsZE9wdGlv", + "bnMuQ1R5cGU6BlNUUklORxIOCgZwYWNrZWQYAiABKAgSPwoGanN0eXBlGAYg", + "ASgOMiQuZ29vZ2xlLnByb3RvYnVmLkZpZWxkT3B0aW9ucy5KU1R5cGU6CUpT", + "X05PUk1BTBITCgRsYXp5GAUgASgIOgVmYWxzZRIZCgpkZXByZWNhdGVkGAMg", + "ASgIOgVmYWxzZRITCgR3ZWFrGAogASgIOgVmYWxzZRJDChR1bmludGVycHJl", + "dGVkX29wdGlvbhjnByADKAsyJC5nb29nbGUucHJvdG9idWYuVW5pbnRlcnBy", + "ZXRlZE9wdGlvbiIvCgVDVHlwZRIKCgZTVFJJTkcQABIICgRDT1JEEAESEAoM", + "U1RSSU5HX1BJRUNFEAIiNQoGSlNUeXBlEg0KCUpTX05PUk1BTBAAEg0KCUpT", + "X1NUUklORxABEg0KCUpTX05VTUJFUhACKgkI6AcQgICAgAIijQEKC0VudW1P", + "cHRpb25zEhMKC2FsbG93X2FsaWFzGAIgASgIEhkKCmRlcHJlY2F0ZWQYAyAB", + "KAg6BWZhbHNlEkMKFHVuaW50ZXJwcmV0ZWRfb3B0aW9uGOcHIAMoCzIkLmdv", + "b2dsZS5wcm90b2J1Zi5VbmludGVycHJldGVkT3B0aW9uKgkI6AcQgICAgAIi", + "fQoQRW51bVZhbHVlT3B0aW9ucxIZCgpkZXByZWNhdGVkGAEgASgIOgVmYWxz", + "ZRJDChR1bmludGVycHJldGVkX29wdGlvbhjnByADKAsyJC5nb29nbGUucHJv", + "dG9idWYuVW5pbnRlcnByZXRlZE9wdGlvbioJCOgHEICAgIACInsKDlNlcnZp", + "Y2VPcHRpb25zEhkKCmRlcHJlY2F0ZWQYISABKAg6BWZhbHNlEkMKFHVuaW50", + "ZXJwcmV0ZWRfb3B0aW9uGOcHIAMoCzIkLmdvb2dsZS5wcm90b2J1Zi5Vbmlu", + "dGVycHJldGVkT3B0aW9uKgkI6AcQgICAgAIiegoNTWV0aG9kT3B0aW9ucxIZ", + "CgpkZXByZWNhdGVkGCEgASgIOgVmYWxzZRJDChR1bmludGVycHJldGVkX29w", + "dGlvbhjnByADKAsyJC5nb29nbGUucHJvdG9idWYuVW5pbnRlcnByZXRlZE9w", + "dGlvbioJCOgHEICAgIACIp4CChNVbmludGVycHJldGVkT3B0aW9uEjsKBG5h", + "bWUYAiADKAsyLS5nb29nbGUucHJvdG9idWYuVW5pbnRlcnByZXRlZE9wdGlv", + "bi5OYW1lUGFydBIYChBpZGVudGlmaWVyX3ZhbHVlGAMgASgJEhoKEnBvc2l0", + "aXZlX2ludF92YWx1ZRgEIAEoBBIaChJuZWdhdGl2ZV9pbnRfdmFsdWUYBSAB", + "KAMSFAoMZG91YmxlX3ZhbHVlGAYgASgBEhQKDHN0cmluZ192YWx1ZRgHIAEo", + "DBIXCg9hZ2dyZWdhdGVfdmFsdWUYCCABKAkaMwoITmFtZVBhcnQSEQoJbmFt", + "ZV9wYXJ0GAEgAigJEhQKDGlzX2V4dGVuc2lvbhgCIAIoCCLVAQoOU291cmNl", + "Q29kZUluZm8SOgoIbG9jYXRpb24YASADKAsyKC5nb29nbGUucHJvdG9idWYu", + "U291cmNlQ29kZUluZm8uTG9jYXRpb24ahgEKCExvY2F0aW9uEhAKBHBhdGgY", + "ASADKAVCAhABEhAKBHNwYW4YAiADKAVCAhABEhgKEGxlYWRpbmdfY29tbWVu", + "dHMYAyABKAkSGQoRdHJhaWxpbmdfY29tbWVudHMYBCABKAkSIQoZbGVhZGlu", + "Z19kZXRhY2hlZF9jb21tZW50cxgGIAMoCSKnAQoRR2VuZXJhdGVkQ29kZUlu", + "Zm8SQQoKYW5ub3RhdGlvbhgBIAMoCzItLmdvb2dsZS5wcm90b2J1Zi5HZW5l", + "cmF0ZWRDb2RlSW5mby5Bbm5vdGF0aW9uGk8KCkFubm90YXRpb24SEAoEcGF0", + "aBgBIAMoBUICEAESEwoLc291cmNlX2ZpbGUYAiABKAkSDQoFYmVnaW4YAyAB", + "KAUSCwoDZW5kGAQgASgFQlgKE2NvbS5nb29nbGUucHJvdG9idWZCEERlc2Ny", + "aXB0b3JQcm90b3NIAVoKZGVzY3JpcHRvcqICA0dQQqoCGkdvb2dsZS5Qcm90", + "b2J1Zi5SZWZsZWN0aW9u")); + descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, + new pbr::FileDescriptor[] { }, + new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] { + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.Reflection.FileDescriptorSet), global::Google.Protobuf.Reflection.FileDescriptorSet.Parser, new[]{ "File" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.Reflection.FileDescriptorProto), global::Google.Protobuf.Reflection.FileDescriptorProto.Parser, new[]{ "Name", "Package", "Dependency", "PublicDependency", "WeakDependency", "MessageType", "EnumType", "Service", "Extension", "Options", "SourceCodeInfo", "Syntax" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.Reflection.DescriptorProto), global::Google.Protobuf.Reflection.DescriptorProto.Parser, new[]{ "Name", "Field", "Extension", "NestedType", "EnumType", "ExtensionRange", "OneofDecl", "Options", "ReservedRange", "ReservedName" }, null, null, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.Reflection.DescriptorProto.Types.ExtensionRange), global::Google.Protobuf.Reflection.DescriptorProto.Types.ExtensionRange.Parser, new[]{ "Start", "End" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.Reflection.DescriptorProto.Types.ReservedRange), global::Google.Protobuf.Reflection.DescriptorProto.Types.ReservedRange.Parser, new[]{ "Start", "End" }, null, null, null)}), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.Reflection.FieldDescriptorProto), global::Google.Protobuf.Reflection.FieldDescriptorProto.Parser, new[]{ "Name", "Number", "Label", "Type", "TypeName", "Extendee", "DefaultValue", "OneofIndex", "JsonName", "Options" }, null, new[]{ typeof(global::Google.Protobuf.Reflection.FieldDescriptorProto.Types.Type), typeof(global::Google.Protobuf.Reflection.FieldDescriptorProto.Types.Label) }, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.Reflection.OneofDescriptorProto), global::Google.Protobuf.Reflection.OneofDescriptorProto.Parser, new[]{ "Name" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.Reflection.EnumDescriptorProto), global::Google.Protobuf.Reflection.EnumDescriptorProto.Parser, new[]{ "Name", "Value", "Options" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.Reflection.EnumValueDescriptorProto), global::Google.Protobuf.Reflection.EnumValueDescriptorProto.Parser, new[]{ "Name", "Number", "Options" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.Reflection.ServiceDescriptorProto), global::Google.Protobuf.Reflection.ServiceDescriptorProto.Parser, new[]{ "Name", "Method", "Options" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.Reflection.MethodDescriptorProto), global::Google.Protobuf.Reflection.MethodDescriptorProto.Parser, new[]{ "Name", "InputType", "OutputType", "Options", "ClientStreaming", "ServerStreaming" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.Reflection.FileOptions), global::Google.Protobuf.Reflection.FileOptions.Parser, new[]{ "JavaPackage", "JavaOuterClassname", "JavaMultipleFiles", "JavaGenerateEqualsAndHash", "JavaStringCheckUtf8", "OptimizeFor", "GoPackage", "CcGenericServices", "JavaGenericServices", "PyGenericServices", "Deprecated", "CcEnableArenas", "ObjcClassPrefix", "CsharpNamespace", "UninterpretedOption" }, null, new[]{ typeof(global::Google.Protobuf.Reflection.FileOptions.Types.OptimizeMode) }, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.Reflection.MessageOptions), global::Google.Protobuf.Reflection.MessageOptions.Parser, new[]{ "MessageSetWireFormat", "NoStandardDescriptorAccessor", "Deprecated", "MapEntry", "UninterpretedOption" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.Reflection.FieldOptions), global::Google.Protobuf.Reflection.FieldOptions.Parser, new[]{ "Ctype", "Packed", "Jstype", "Lazy", "Deprecated", "Weak", "UninterpretedOption" }, null, new[]{ typeof(global::Google.Protobuf.Reflection.FieldOptions.Types.CType), typeof(global::Google.Protobuf.Reflection.FieldOptions.Types.JSType) }, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.Reflection.EnumOptions), global::Google.Protobuf.Reflection.EnumOptions.Parser, new[]{ "AllowAlias", "Deprecated", "UninterpretedOption" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.Reflection.EnumValueOptions), global::Google.Protobuf.Reflection.EnumValueOptions.Parser, new[]{ "Deprecated", "UninterpretedOption" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.Reflection.ServiceOptions), global::Google.Protobuf.Reflection.ServiceOptions.Parser, new[]{ "Deprecated", "UninterpretedOption" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.Reflection.MethodOptions), global::Google.Protobuf.Reflection.MethodOptions.Parser, new[]{ "Deprecated", "UninterpretedOption" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.Reflection.UninterpretedOption), global::Google.Protobuf.Reflection.UninterpretedOption.Parser, new[]{ "Name", "IdentifierValue", "PositiveIntValue", "NegativeIntValue", "DoubleValue", "StringValue", "AggregateValue" }, null, null, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.Reflection.UninterpretedOption.Types.NamePart), global::Google.Protobuf.Reflection.UninterpretedOption.Types.NamePart.Parser, new[]{ "NamePart_", "IsExtension" }, null, null, null)}), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.Reflection.SourceCodeInfo), global::Google.Protobuf.Reflection.SourceCodeInfo.Parser, new[]{ "Location" }, null, null, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.Reflection.SourceCodeInfo.Types.Location), global::Google.Protobuf.Reflection.SourceCodeInfo.Types.Location.Parser, new[]{ "Path", "Span", "LeadingComments", "TrailingComments", "LeadingDetachedComments" }, null, null, null)}), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.Reflection.GeneratedCodeInfo), global::Google.Protobuf.Reflection.GeneratedCodeInfo.Parser, new[]{ "Annotation" }, null, null, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.Reflection.GeneratedCodeInfo.Types.Annotation), global::Google.Protobuf.Reflection.GeneratedCodeInfo.Types.Annotation.Parser, new[]{ "Path", "SourceFile", "Begin", "End" }, null, null, null)}) + })); + } + #endregion + + } + #region Messages + /// + /// The protocol compiler can output a FileDescriptorSet containing the .proto + /// files it parses. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + internal sealed partial class FileDescriptorSet : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new FileDescriptorSet()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.Reflection.DescriptorReflection.Descriptor.MessageTypes[0]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public FileDescriptorSet() { + OnConstruction(); + } + + partial void OnConstruction(); + + public FileDescriptorSet(FileDescriptorSet other) : this() { + file_ = other.file_.Clone(); + } + + public FileDescriptorSet Clone() { + return new FileDescriptorSet(this); + } + + /// Field number for the "file" field. + public const int FileFieldNumber = 1; + private static readonly pb::FieldCodec _repeated_file_codec + = pb::FieldCodec.ForMessage(10, global::Google.Protobuf.Reflection.FileDescriptorProto.Parser); + private readonly pbc::RepeatedField file_ = new pbc::RepeatedField(); + public pbc::RepeatedField File { + get { return file_; } + } + + public override bool Equals(object other) { + return Equals(other as FileDescriptorSet); + } + + public bool Equals(FileDescriptorSet other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if(!file_.Equals(other.file_)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + hash ^= file_.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + file_.WriteTo(output, _repeated_file_codec); + } + + public int CalculateSize() { + int size = 0; + size += file_.CalculateSize(_repeated_file_codec); + return size; + } + + public void MergeFrom(FileDescriptorSet other) { + if (other == null) { + return; + } + file_.Add(other.file_); + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + file_.AddEntriesFrom(input, _repeated_file_codec); + break; + } + } + } + } + + } + + /// + /// Describes a complete .proto file. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + internal sealed partial class FileDescriptorProto : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new FileDescriptorProto()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.Reflection.DescriptorReflection.Descriptor.MessageTypes[1]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public FileDescriptorProto() { + OnConstruction(); + } + + partial void OnConstruction(); + + public FileDescriptorProto(FileDescriptorProto other) : this() { + name_ = other.name_; + package_ = other.package_; + dependency_ = other.dependency_.Clone(); + publicDependency_ = other.publicDependency_.Clone(); + weakDependency_ = other.weakDependency_.Clone(); + messageType_ = other.messageType_.Clone(); + enumType_ = other.enumType_.Clone(); + service_ = other.service_.Clone(); + extension_ = other.extension_.Clone(); + Options = other.options_ != null ? other.Options.Clone() : null; + SourceCodeInfo = other.sourceCodeInfo_ != null ? other.SourceCodeInfo.Clone() : null; + syntax_ = other.syntax_; + } + + public FileDescriptorProto Clone() { + return new FileDescriptorProto(this); + } + + /// Field number for the "name" field. + public const int NameFieldNumber = 1; + private string name_ = ""; + /// + /// file name, relative to root of source tree + /// + public string Name { + get { return name_; } + set { + name_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "package" field. + public const int PackageFieldNumber = 2; + private string package_ = ""; + /// + /// e.g. "foo", "foo.bar", etc. + /// + public string Package { + get { return package_; } + set { + package_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "dependency" field. + public const int DependencyFieldNumber = 3; + private static readonly pb::FieldCodec _repeated_dependency_codec + = pb::FieldCodec.ForString(26); + private readonly pbc::RepeatedField dependency_ = new pbc::RepeatedField(); + /// + /// Names of files imported by this file. + /// + public pbc::RepeatedField Dependency { + get { return dependency_; } + } + + /// Field number for the "public_dependency" field. + public const int PublicDependencyFieldNumber = 10; + private static readonly pb::FieldCodec _repeated_publicDependency_codec + = pb::FieldCodec.ForInt32(80); + private readonly pbc::RepeatedField publicDependency_ = new pbc::RepeatedField(); + /// + /// Indexes of the public imported files in the dependency list above. + /// + public pbc::RepeatedField PublicDependency { + get { return publicDependency_; } + } + + /// Field number for the "weak_dependency" field. + public const int WeakDependencyFieldNumber = 11; + private static readonly pb::FieldCodec _repeated_weakDependency_codec + = pb::FieldCodec.ForInt32(88); + private readonly pbc::RepeatedField weakDependency_ = new pbc::RepeatedField(); + /// + /// Indexes of the weak imported files in the dependency list. + /// For Google-internal migration only. Do not use. + /// + public pbc::RepeatedField WeakDependency { + get { return weakDependency_; } + } + + /// Field number for the "message_type" field. + public const int MessageTypeFieldNumber = 4; + private static readonly pb::FieldCodec _repeated_messageType_codec + = pb::FieldCodec.ForMessage(34, global::Google.Protobuf.Reflection.DescriptorProto.Parser); + private readonly pbc::RepeatedField messageType_ = new pbc::RepeatedField(); + /// + /// All top-level definitions in this file. + /// + public pbc::RepeatedField MessageType { + get { return messageType_; } + } + + /// Field number for the "enum_type" field. + public const int EnumTypeFieldNumber = 5; + private static readonly pb::FieldCodec _repeated_enumType_codec + = pb::FieldCodec.ForMessage(42, global::Google.Protobuf.Reflection.EnumDescriptorProto.Parser); + private readonly pbc::RepeatedField enumType_ = new pbc::RepeatedField(); + public pbc::RepeatedField EnumType { + get { return enumType_; } + } + + /// Field number for the "service" field. + public const int ServiceFieldNumber = 6; + private static readonly pb::FieldCodec _repeated_service_codec + = pb::FieldCodec.ForMessage(50, global::Google.Protobuf.Reflection.ServiceDescriptorProto.Parser); + private readonly pbc::RepeatedField service_ = new pbc::RepeatedField(); + public pbc::RepeatedField Service { + get { return service_; } + } + + /// Field number for the "extension" field. + public const int ExtensionFieldNumber = 7; + private static readonly pb::FieldCodec _repeated_extension_codec + = pb::FieldCodec.ForMessage(58, global::Google.Protobuf.Reflection.FieldDescriptorProto.Parser); + private readonly pbc::RepeatedField extension_ = new pbc::RepeatedField(); + public pbc::RepeatedField Extension { + get { return extension_; } + } + + /// Field number for the "options" field. + public const int OptionsFieldNumber = 8; + private global::Google.Protobuf.Reflection.FileOptions options_; + public global::Google.Protobuf.Reflection.FileOptions Options { + get { return options_; } + set { + options_ = value; + } + } + + /// Field number for the "source_code_info" field. + public const int SourceCodeInfoFieldNumber = 9; + private global::Google.Protobuf.Reflection.SourceCodeInfo sourceCodeInfo_; + /// + /// This field contains optional information about the original source code. + /// You may safely remove this entire field without harming runtime + /// functionality of the descriptors -- the information is needed only by + /// development tools. + /// + public global::Google.Protobuf.Reflection.SourceCodeInfo SourceCodeInfo { + get { return sourceCodeInfo_; } + set { + sourceCodeInfo_ = value; + } + } + + /// Field number for the "syntax" field. + public const int SyntaxFieldNumber = 12; + private string syntax_ = ""; + /// + /// The syntax of the proto file. + /// The supported values are "proto2" and "proto3". + /// + public string Syntax { + get { return syntax_; } + set { + syntax_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + public override bool Equals(object other) { + return Equals(other as FileDescriptorProto); + } + + public bool Equals(FileDescriptorProto other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (Name != other.Name) return false; + if (Package != other.Package) return false; + if(!dependency_.Equals(other.dependency_)) return false; + if(!publicDependency_.Equals(other.publicDependency_)) return false; + if(!weakDependency_.Equals(other.weakDependency_)) return false; + if(!messageType_.Equals(other.messageType_)) return false; + if(!enumType_.Equals(other.enumType_)) return false; + if(!service_.Equals(other.service_)) return false; + if(!extension_.Equals(other.extension_)) return false; + if (!object.Equals(Options, other.Options)) return false; + if (!object.Equals(SourceCodeInfo, other.SourceCodeInfo)) return false; + if (Syntax != other.Syntax) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (Name.Length != 0) hash ^= Name.GetHashCode(); + if (Package.Length != 0) hash ^= Package.GetHashCode(); + hash ^= dependency_.GetHashCode(); + hash ^= publicDependency_.GetHashCode(); + hash ^= weakDependency_.GetHashCode(); + hash ^= messageType_.GetHashCode(); + hash ^= enumType_.GetHashCode(); + hash ^= service_.GetHashCode(); + hash ^= extension_.GetHashCode(); + if (options_ != null) hash ^= Options.GetHashCode(); + if (sourceCodeInfo_ != null) hash ^= SourceCodeInfo.GetHashCode(); + if (Syntax.Length != 0) hash ^= Syntax.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (Name.Length != 0) { + output.WriteRawTag(10); + output.WriteString(Name); + } + if (Package.Length != 0) { + output.WriteRawTag(18); + output.WriteString(Package); + } + dependency_.WriteTo(output, _repeated_dependency_codec); + messageType_.WriteTo(output, _repeated_messageType_codec); + enumType_.WriteTo(output, _repeated_enumType_codec); + service_.WriteTo(output, _repeated_service_codec); + extension_.WriteTo(output, _repeated_extension_codec); + if (options_ != null) { + output.WriteRawTag(66); + output.WriteMessage(Options); + } + if (sourceCodeInfo_ != null) { + output.WriteRawTag(74); + output.WriteMessage(SourceCodeInfo); + } + publicDependency_.WriteTo(output, _repeated_publicDependency_codec); + weakDependency_.WriteTo(output, _repeated_weakDependency_codec); + if (Syntax.Length != 0) { + output.WriteRawTag(98); + output.WriteString(Syntax); + } + } + + public int CalculateSize() { + int size = 0; + if (Name.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(Name); + } + if (Package.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(Package); + } + size += dependency_.CalculateSize(_repeated_dependency_codec); + size += publicDependency_.CalculateSize(_repeated_publicDependency_codec); + size += weakDependency_.CalculateSize(_repeated_weakDependency_codec); + size += messageType_.CalculateSize(_repeated_messageType_codec); + size += enumType_.CalculateSize(_repeated_enumType_codec); + size += service_.CalculateSize(_repeated_service_codec); + size += extension_.CalculateSize(_repeated_extension_codec); + if (options_ != null) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(Options); + } + if (sourceCodeInfo_ != null) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(SourceCodeInfo); + } + if (Syntax.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(Syntax); + } + return size; + } + + public void MergeFrom(FileDescriptorProto other) { + if (other == null) { + return; + } + if (other.Name.Length != 0) { + Name = other.Name; + } + if (other.Package.Length != 0) { + Package = other.Package; + } + dependency_.Add(other.dependency_); + publicDependency_.Add(other.publicDependency_); + weakDependency_.Add(other.weakDependency_); + messageType_.Add(other.messageType_); + enumType_.Add(other.enumType_); + service_.Add(other.service_); + extension_.Add(other.extension_); + if (other.options_ != null) { + if (options_ == null) { + options_ = new global::Google.Protobuf.Reflection.FileOptions(); + } + Options.MergeFrom(other.Options); + } + if (other.sourceCodeInfo_ != null) { + if (sourceCodeInfo_ == null) { + sourceCodeInfo_ = new global::Google.Protobuf.Reflection.SourceCodeInfo(); + } + SourceCodeInfo.MergeFrom(other.SourceCodeInfo); + } + if (other.Syntax.Length != 0) { + Syntax = other.Syntax; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + Name = input.ReadString(); + break; + } + case 18: { + Package = input.ReadString(); + break; + } + case 26: { + dependency_.AddEntriesFrom(input, _repeated_dependency_codec); + break; + } + case 34: { + messageType_.AddEntriesFrom(input, _repeated_messageType_codec); + break; + } + case 42: { + enumType_.AddEntriesFrom(input, _repeated_enumType_codec); + break; + } + case 50: { + service_.AddEntriesFrom(input, _repeated_service_codec); + break; + } + case 58: { + extension_.AddEntriesFrom(input, _repeated_extension_codec); + break; + } + case 66: { + if (options_ == null) { + options_ = new global::Google.Protobuf.Reflection.FileOptions(); + } + input.ReadMessage(options_); + break; + } + case 74: { + if (sourceCodeInfo_ == null) { + sourceCodeInfo_ = new global::Google.Protobuf.Reflection.SourceCodeInfo(); + } + input.ReadMessage(sourceCodeInfo_); + break; + } + case 82: + case 80: { + publicDependency_.AddEntriesFrom(input, _repeated_publicDependency_codec); + break; + } + case 90: + case 88: { + weakDependency_.AddEntriesFrom(input, _repeated_weakDependency_codec); + break; + } + case 98: { + Syntax = input.ReadString(); + break; + } + } + } + } + + } + + /// + /// Describes a message type. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + internal sealed partial class DescriptorProto : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new DescriptorProto()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.Reflection.DescriptorReflection.Descriptor.MessageTypes[2]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public DescriptorProto() { + OnConstruction(); + } + + partial void OnConstruction(); + + public DescriptorProto(DescriptorProto other) : this() { + name_ = other.name_; + field_ = other.field_.Clone(); + extension_ = other.extension_.Clone(); + nestedType_ = other.nestedType_.Clone(); + enumType_ = other.enumType_.Clone(); + extensionRange_ = other.extensionRange_.Clone(); + oneofDecl_ = other.oneofDecl_.Clone(); + Options = other.options_ != null ? other.Options.Clone() : null; + reservedRange_ = other.reservedRange_.Clone(); + reservedName_ = other.reservedName_.Clone(); + } + + public DescriptorProto Clone() { + return new DescriptorProto(this); + } + + /// Field number for the "name" field. + public const int NameFieldNumber = 1; + private string name_ = ""; + public string Name { + get { return name_; } + set { + name_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "field" field. + public const int FieldFieldNumber = 2; + private static readonly pb::FieldCodec _repeated_field_codec + = pb::FieldCodec.ForMessage(18, global::Google.Protobuf.Reflection.FieldDescriptorProto.Parser); + private readonly pbc::RepeatedField field_ = new pbc::RepeatedField(); + public pbc::RepeatedField Field { + get { return field_; } + } + + /// Field number for the "extension" field. + public const int ExtensionFieldNumber = 6; + private static readonly pb::FieldCodec _repeated_extension_codec + = pb::FieldCodec.ForMessage(50, global::Google.Protobuf.Reflection.FieldDescriptorProto.Parser); + private readonly pbc::RepeatedField extension_ = new pbc::RepeatedField(); + public pbc::RepeatedField Extension { + get { return extension_; } + } + + /// Field number for the "nested_type" field. + public const int NestedTypeFieldNumber = 3; + private static readonly pb::FieldCodec _repeated_nestedType_codec + = pb::FieldCodec.ForMessage(26, global::Google.Protobuf.Reflection.DescriptorProto.Parser); + private readonly pbc::RepeatedField nestedType_ = new pbc::RepeatedField(); + public pbc::RepeatedField NestedType { + get { return nestedType_; } + } + + /// Field number for the "enum_type" field. + public const int EnumTypeFieldNumber = 4; + private static readonly pb::FieldCodec _repeated_enumType_codec + = pb::FieldCodec.ForMessage(34, global::Google.Protobuf.Reflection.EnumDescriptorProto.Parser); + private readonly pbc::RepeatedField enumType_ = new pbc::RepeatedField(); + public pbc::RepeatedField EnumType { + get { return enumType_; } + } + + /// Field number for the "extension_range" field. + public const int ExtensionRangeFieldNumber = 5; + private static readonly pb::FieldCodec _repeated_extensionRange_codec + = pb::FieldCodec.ForMessage(42, global::Google.Protobuf.Reflection.DescriptorProto.Types.ExtensionRange.Parser); + private readonly pbc::RepeatedField extensionRange_ = new pbc::RepeatedField(); + public pbc::RepeatedField ExtensionRange { + get { return extensionRange_; } + } + + /// Field number for the "oneof_decl" field. + public const int OneofDeclFieldNumber = 8; + private static readonly pb::FieldCodec _repeated_oneofDecl_codec + = pb::FieldCodec.ForMessage(66, global::Google.Protobuf.Reflection.OneofDescriptorProto.Parser); + private readonly pbc::RepeatedField oneofDecl_ = new pbc::RepeatedField(); + public pbc::RepeatedField OneofDecl { + get { return oneofDecl_; } + } + + /// Field number for the "options" field. + public const int OptionsFieldNumber = 7; + private global::Google.Protobuf.Reflection.MessageOptions options_; + public global::Google.Protobuf.Reflection.MessageOptions Options { + get { return options_; } + set { + options_ = value; + } + } + + /// Field number for the "reserved_range" field. + public const int ReservedRangeFieldNumber = 9; + private static readonly pb::FieldCodec _repeated_reservedRange_codec + = pb::FieldCodec.ForMessage(74, global::Google.Protobuf.Reflection.DescriptorProto.Types.ReservedRange.Parser); + private readonly pbc::RepeatedField reservedRange_ = new pbc::RepeatedField(); + public pbc::RepeatedField ReservedRange { + get { return reservedRange_; } + } + + /// Field number for the "reserved_name" field. + public const int ReservedNameFieldNumber = 10; + private static readonly pb::FieldCodec _repeated_reservedName_codec + = pb::FieldCodec.ForString(82); + private readonly pbc::RepeatedField reservedName_ = new pbc::RepeatedField(); + /// + /// Reserved field names, which may not be used by fields in the same message. + /// A given name may only be reserved once. + /// + public pbc::RepeatedField ReservedName { + get { return reservedName_; } + } + + public override bool Equals(object other) { + return Equals(other as DescriptorProto); + } + + public bool Equals(DescriptorProto other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (Name != other.Name) return false; + if(!field_.Equals(other.field_)) return false; + if(!extension_.Equals(other.extension_)) return false; + if(!nestedType_.Equals(other.nestedType_)) return false; + if(!enumType_.Equals(other.enumType_)) return false; + if(!extensionRange_.Equals(other.extensionRange_)) return false; + if(!oneofDecl_.Equals(other.oneofDecl_)) return false; + if (!object.Equals(Options, other.Options)) return false; + if(!reservedRange_.Equals(other.reservedRange_)) return false; + if(!reservedName_.Equals(other.reservedName_)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (Name.Length != 0) hash ^= Name.GetHashCode(); + hash ^= field_.GetHashCode(); + hash ^= extension_.GetHashCode(); + hash ^= nestedType_.GetHashCode(); + hash ^= enumType_.GetHashCode(); + hash ^= extensionRange_.GetHashCode(); + hash ^= oneofDecl_.GetHashCode(); + if (options_ != null) hash ^= Options.GetHashCode(); + hash ^= reservedRange_.GetHashCode(); + hash ^= reservedName_.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (Name.Length != 0) { + output.WriteRawTag(10); + output.WriteString(Name); + } + field_.WriteTo(output, _repeated_field_codec); + nestedType_.WriteTo(output, _repeated_nestedType_codec); + enumType_.WriteTo(output, _repeated_enumType_codec); + extensionRange_.WriteTo(output, _repeated_extensionRange_codec); + extension_.WriteTo(output, _repeated_extension_codec); + if (options_ != null) { + output.WriteRawTag(58); + output.WriteMessage(Options); + } + oneofDecl_.WriteTo(output, _repeated_oneofDecl_codec); + reservedRange_.WriteTo(output, _repeated_reservedRange_codec); + reservedName_.WriteTo(output, _repeated_reservedName_codec); + } + + public int CalculateSize() { + int size = 0; + if (Name.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(Name); + } + size += field_.CalculateSize(_repeated_field_codec); + size += extension_.CalculateSize(_repeated_extension_codec); + size += nestedType_.CalculateSize(_repeated_nestedType_codec); + size += enumType_.CalculateSize(_repeated_enumType_codec); + size += extensionRange_.CalculateSize(_repeated_extensionRange_codec); + size += oneofDecl_.CalculateSize(_repeated_oneofDecl_codec); + if (options_ != null) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(Options); + } + size += reservedRange_.CalculateSize(_repeated_reservedRange_codec); + size += reservedName_.CalculateSize(_repeated_reservedName_codec); + return size; + } + + public void MergeFrom(DescriptorProto other) { + if (other == null) { + return; + } + if (other.Name.Length != 0) { + Name = other.Name; + } + field_.Add(other.field_); + extension_.Add(other.extension_); + nestedType_.Add(other.nestedType_); + enumType_.Add(other.enumType_); + extensionRange_.Add(other.extensionRange_); + oneofDecl_.Add(other.oneofDecl_); + if (other.options_ != null) { + if (options_ == null) { + options_ = new global::Google.Protobuf.Reflection.MessageOptions(); + } + Options.MergeFrom(other.Options); + } + reservedRange_.Add(other.reservedRange_); + reservedName_.Add(other.reservedName_); + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + Name = input.ReadString(); + break; + } + case 18: { + field_.AddEntriesFrom(input, _repeated_field_codec); + break; + } + case 26: { + nestedType_.AddEntriesFrom(input, _repeated_nestedType_codec); + break; + } + case 34: { + enumType_.AddEntriesFrom(input, _repeated_enumType_codec); + break; + } + case 42: { + extensionRange_.AddEntriesFrom(input, _repeated_extensionRange_codec); + break; + } + case 50: { + extension_.AddEntriesFrom(input, _repeated_extension_codec); + break; + } + case 58: { + if (options_ == null) { + options_ = new global::Google.Protobuf.Reflection.MessageOptions(); + } + input.ReadMessage(options_); + break; + } + case 66: { + oneofDecl_.AddEntriesFrom(input, _repeated_oneofDecl_codec); + break; + } + case 74: { + reservedRange_.AddEntriesFrom(input, _repeated_reservedRange_codec); + break; + } + case 82: { + reservedName_.AddEntriesFrom(input, _repeated_reservedName_codec); + break; + } + } + } + } + + #region Nested types + /// Container for nested types declared in the DescriptorProto message type. + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public static partial class Types { + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + internal sealed partial class ExtensionRange : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new ExtensionRange()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.Reflection.DescriptorProto.Descriptor.NestedTypes[0]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public ExtensionRange() { + OnConstruction(); + } + + partial void OnConstruction(); + + public ExtensionRange(ExtensionRange other) : this() { + start_ = other.start_; + end_ = other.end_; + } + + public ExtensionRange Clone() { + return new ExtensionRange(this); + } + + /// Field number for the "start" field. + public const int StartFieldNumber = 1; + private int start_; + public int Start { + get { return start_; } + set { + start_ = value; + } + } + + /// Field number for the "end" field. + public const int EndFieldNumber = 2; + private int end_; + public int End { + get { return end_; } + set { + end_ = value; + } + } + + public override bool Equals(object other) { + return Equals(other as ExtensionRange); + } + + public bool Equals(ExtensionRange other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (Start != other.Start) return false; + if (End != other.End) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (Start != 0) hash ^= Start.GetHashCode(); + if (End != 0) hash ^= End.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (Start != 0) { + output.WriteRawTag(8); + output.WriteInt32(Start); + } + if (End != 0) { + output.WriteRawTag(16); + output.WriteInt32(End); + } + } + + public int CalculateSize() { + int size = 0; + if (Start != 0) { + size += 1 + pb::CodedOutputStream.ComputeInt32Size(Start); + } + if (End != 0) { + size += 1 + pb::CodedOutputStream.ComputeInt32Size(End); + } + return size; + } + + public void MergeFrom(ExtensionRange other) { + if (other == null) { + return; + } + if (other.Start != 0) { + Start = other.Start; + } + if (other.End != 0) { + End = other.End; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 8: { + Start = input.ReadInt32(); + break; + } + case 16: { + End = input.ReadInt32(); + break; + } + } + } + } + + } + + /// + /// Range of reserved tag numbers. Reserved tag numbers may not be used by + /// fields or extension ranges in the same message. Reserved ranges may + /// not overlap. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + internal sealed partial class ReservedRange : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new ReservedRange()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.Reflection.DescriptorProto.Descriptor.NestedTypes[1]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public ReservedRange() { + OnConstruction(); + } + + partial void OnConstruction(); + + public ReservedRange(ReservedRange other) : this() { + start_ = other.start_; + end_ = other.end_; + } + + public ReservedRange Clone() { + return new ReservedRange(this); + } + + /// Field number for the "start" field. + public const int StartFieldNumber = 1; + private int start_; + /// + /// Inclusive. + /// + public int Start { + get { return start_; } + set { + start_ = value; + } + } + + /// Field number for the "end" field. + public const int EndFieldNumber = 2; + private int end_; + /// + /// Exclusive. + /// + public int End { + get { return end_; } + set { + end_ = value; + } + } + + public override bool Equals(object other) { + return Equals(other as ReservedRange); + } + + public bool Equals(ReservedRange other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (Start != other.Start) return false; + if (End != other.End) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (Start != 0) hash ^= Start.GetHashCode(); + if (End != 0) hash ^= End.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (Start != 0) { + output.WriteRawTag(8); + output.WriteInt32(Start); + } + if (End != 0) { + output.WriteRawTag(16); + output.WriteInt32(End); + } + } + + public int CalculateSize() { + int size = 0; + if (Start != 0) { + size += 1 + pb::CodedOutputStream.ComputeInt32Size(Start); + } + if (End != 0) { + size += 1 + pb::CodedOutputStream.ComputeInt32Size(End); + } + return size; + } + + public void MergeFrom(ReservedRange other) { + if (other == null) { + return; + } + if (other.Start != 0) { + Start = other.Start; + } + if (other.End != 0) { + End = other.End; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 8: { + Start = input.ReadInt32(); + break; + } + case 16: { + End = input.ReadInt32(); + break; + } + } + } + } + + } + + } + #endregion + + } + + /// + /// Describes a field within a message. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + internal sealed partial class FieldDescriptorProto : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new FieldDescriptorProto()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.Reflection.DescriptorReflection.Descriptor.MessageTypes[3]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public FieldDescriptorProto() { + OnConstruction(); + } + + partial void OnConstruction(); + + public FieldDescriptorProto(FieldDescriptorProto other) : this() { + name_ = other.name_; + number_ = other.number_; + label_ = other.label_; + type_ = other.type_; + typeName_ = other.typeName_; + extendee_ = other.extendee_; + defaultValue_ = other.defaultValue_; + oneofIndex_ = other.oneofIndex_; + jsonName_ = other.jsonName_; + Options = other.options_ != null ? other.Options.Clone() : null; + } + + public FieldDescriptorProto Clone() { + return new FieldDescriptorProto(this); + } + + /// Field number for the "name" field. + public const int NameFieldNumber = 1; + private string name_ = ""; + public string Name { + get { return name_; } + set { + name_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "number" field. + public const int NumberFieldNumber = 3; + private int number_; + public int Number { + get { return number_; } + set { + number_ = value; + } + } + + /// Field number for the "label" field. + public const int LabelFieldNumber = 4; + private global::Google.Protobuf.Reflection.FieldDescriptorProto.Types.Label label_ = 0; + public global::Google.Protobuf.Reflection.FieldDescriptorProto.Types.Label Label { + get { return label_; } + set { + label_ = value; + } + } + + /// Field number for the "type" field. + public const int TypeFieldNumber = 5; + private global::Google.Protobuf.Reflection.FieldDescriptorProto.Types.Type type_ = 0; + /// + /// If type_name is set, this need not be set. If both this and type_name + /// are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + /// + public global::Google.Protobuf.Reflection.FieldDescriptorProto.Types.Type Type { + get { return type_; } + set { + type_ = value; + } + } + + /// Field number for the "type_name" field. + public const int TypeNameFieldNumber = 6; + private string typeName_ = ""; + /// + /// For message and enum types, this is the name of the type. If the name + /// starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + /// rules are used to find the type (i.e. first the nested types within this + /// message are searched, then within the parent, on up to the root + /// namespace). + /// + public string TypeName { + get { return typeName_; } + set { + typeName_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "extendee" field. + public const int ExtendeeFieldNumber = 2; + private string extendee_ = ""; + /// + /// For extensions, this is the name of the type being extended. It is + /// resolved in the same manner as type_name. + /// + public string Extendee { + get { return extendee_; } + set { + extendee_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "default_value" field. + public const int DefaultValueFieldNumber = 7; + private string defaultValue_ = ""; + /// + /// For numeric types, contains the original text representation of the value. + /// For booleans, "true" or "false". + /// For strings, contains the default text contents (not escaped in any way). + /// For bytes, contains the C escaped value. All bytes >= 128 are escaped. + /// TODO(kenton): Base-64 encode? + /// + public string DefaultValue { + get { return defaultValue_; } + set { + defaultValue_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "oneof_index" field. + public const int OneofIndexFieldNumber = 9; + private int oneofIndex_; + /// + /// If set, gives the index of a oneof in the containing type's oneof_decl + /// list. This field is a member of that oneof. + /// + public int OneofIndex { + get { return oneofIndex_; } + set { + oneofIndex_ = value; + } + } + + /// Field number for the "json_name" field. + public const int JsonNameFieldNumber = 10; + private string jsonName_ = ""; + /// + /// JSON name of this field. The value is set by protocol compiler. If the + /// user has set a "json_name" option on this field, that option's value + /// will be used. Otherwise, it's deduced from the field's name by converting + /// it to camelCase. + /// + public string JsonName { + get { return jsonName_; } + set { + jsonName_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "options" field. + public const int OptionsFieldNumber = 8; + private global::Google.Protobuf.Reflection.FieldOptions options_; + public global::Google.Protobuf.Reflection.FieldOptions Options { + get { return options_; } + set { + options_ = value; + } + } + + public override bool Equals(object other) { + return Equals(other as FieldDescriptorProto); + } + + public bool Equals(FieldDescriptorProto other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (Name != other.Name) return false; + if (Number != other.Number) return false; + if (Label != other.Label) return false; + if (Type != other.Type) return false; + if (TypeName != other.TypeName) return false; + if (Extendee != other.Extendee) return false; + if (DefaultValue != other.DefaultValue) return false; + if (OneofIndex != other.OneofIndex) return false; + if (JsonName != other.JsonName) return false; + if (!object.Equals(Options, other.Options)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (Name.Length != 0) hash ^= Name.GetHashCode(); + if (Number != 0) hash ^= Number.GetHashCode(); + if (Label != 0) hash ^= Label.GetHashCode(); + if (Type != 0) hash ^= Type.GetHashCode(); + if (TypeName.Length != 0) hash ^= TypeName.GetHashCode(); + if (Extendee.Length != 0) hash ^= Extendee.GetHashCode(); + if (DefaultValue.Length != 0) hash ^= DefaultValue.GetHashCode(); + if (OneofIndex != 0) hash ^= OneofIndex.GetHashCode(); + if (JsonName.Length != 0) hash ^= JsonName.GetHashCode(); + if (options_ != null) hash ^= Options.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (Name.Length != 0) { + output.WriteRawTag(10); + output.WriteString(Name); + } + if (Extendee.Length != 0) { + output.WriteRawTag(18); + output.WriteString(Extendee); + } + if (Number != 0) { + output.WriteRawTag(24); + output.WriteInt32(Number); + } + if (Label != 0) { + output.WriteRawTag(32); + output.WriteEnum((int) Label); + } + if (Type != 0) { + output.WriteRawTag(40); + output.WriteEnum((int) Type); + } + if (TypeName.Length != 0) { + output.WriteRawTag(50); + output.WriteString(TypeName); + } + if (DefaultValue.Length != 0) { + output.WriteRawTag(58); + output.WriteString(DefaultValue); + } + if (options_ != null) { + output.WriteRawTag(66); + output.WriteMessage(Options); + } + if (OneofIndex != 0) { + output.WriteRawTag(72); + output.WriteInt32(OneofIndex); + } + if (JsonName.Length != 0) { + output.WriteRawTag(82); + output.WriteString(JsonName); + } + } + + public int CalculateSize() { + int size = 0; + if (Name.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(Name); + } + if (Number != 0) { + size += 1 + pb::CodedOutputStream.ComputeInt32Size(Number); + } + if (Label != 0) { + size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) Label); + } + if (Type != 0) { + size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) Type); + } + if (TypeName.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(TypeName); + } + if (Extendee.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(Extendee); + } + if (DefaultValue.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(DefaultValue); + } + if (OneofIndex != 0) { + size += 1 + pb::CodedOutputStream.ComputeInt32Size(OneofIndex); + } + if (JsonName.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(JsonName); + } + if (options_ != null) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(Options); + } + return size; + } + + public void MergeFrom(FieldDescriptorProto other) { + if (other == null) { + return; + } + if (other.Name.Length != 0) { + Name = other.Name; + } + if (other.Number != 0) { + Number = other.Number; + } + if (other.Label != 0) { + Label = other.Label; + } + if (other.Type != 0) { + Type = other.Type; + } + if (other.TypeName.Length != 0) { + TypeName = other.TypeName; + } + if (other.Extendee.Length != 0) { + Extendee = other.Extendee; + } + if (other.DefaultValue.Length != 0) { + DefaultValue = other.DefaultValue; + } + if (other.OneofIndex != 0) { + OneofIndex = other.OneofIndex; + } + if (other.JsonName.Length != 0) { + JsonName = other.JsonName; + } + if (other.options_ != null) { + if (options_ == null) { + options_ = new global::Google.Protobuf.Reflection.FieldOptions(); + } + Options.MergeFrom(other.Options); + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + Name = input.ReadString(); + break; + } + case 18: { + Extendee = input.ReadString(); + break; + } + case 24: { + Number = input.ReadInt32(); + break; + } + case 32: { + label_ = (global::Google.Protobuf.Reflection.FieldDescriptorProto.Types.Label) input.ReadEnum(); + break; + } + case 40: { + type_ = (global::Google.Protobuf.Reflection.FieldDescriptorProto.Types.Type) input.ReadEnum(); + break; + } + case 50: { + TypeName = input.ReadString(); + break; + } + case 58: { + DefaultValue = input.ReadString(); + break; + } + case 66: { + if (options_ == null) { + options_ = new global::Google.Protobuf.Reflection.FieldOptions(); + } + input.ReadMessage(options_); + break; + } + case 72: { + OneofIndex = input.ReadInt32(); + break; + } + case 82: { + JsonName = input.ReadString(); + break; + } + } + } + } + + #region Nested types + /// Container for nested types declared in the FieldDescriptorProto message type. + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public static partial class Types { + internal enum Type { + /// + /// 0 is reserved for errors. + /// Order is weird for historical reasons. + /// + [pbr::OriginalName("TYPE_DOUBLE")] Double = 1, + [pbr::OriginalName("TYPE_FLOAT")] Float = 2, + /// + /// Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + /// negative values are likely. + /// + [pbr::OriginalName("TYPE_INT64")] Int64 = 3, + [pbr::OriginalName("TYPE_UINT64")] Uint64 = 4, + /// + /// Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + /// negative values are likely. + /// + [pbr::OriginalName("TYPE_INT32")] Int32 = 5, + [pbr::OriginalName("TYPE_FIXED64")] Fixed64 = 6, + [pbr::OriginalName("TYPE_FIXED32")] Fixed32 = 7, + [pbr::OriginalName("TYPE_BOOL")] Bool = 8, + [pbr::OriginalName("TYPE_STRING")] String = 9, + /// + /// Tag-delimited aggregate. + /// + [pbr::OriginalName("TYPE_GROUP")] Group = 10, + /// + /// Length-delimited aggregate. + /// + [pbr::OriginalName("TYPE_MESSAGE")] Message = 11, + /// + /// New in version 2. + /// + [pbr::OriginalName("TYPE_BYTES")] Bytes = 12, + [pbr::OriginalName("TYPE_UINT32")] Uint32 = 13, + [pbr::OriginalName("TYPE_ENUM")] Enum = 14, + [pbr::OriginalName("TYPE_SFIXED32")] Sfixed32 = 15, + [pbr::OriginalName("TYPE_SFIXED64")] Sfixed64 = 16, + /// + /// Uses ZigZag encoding. + /// + [pbr::OriginalName("TYPE_SINT32")] Sint32 = 17, + /// + /// Uses ZigZag encoding. + /// + [pbr::OriginalName("TYPE_SINT64")] Sint64 = 18, + } + + internal enum Label { + /// + /// 0 is reserved for errors + /// + [pbr::OriginalName("LABEL_OPTIONAL")] Optional = 1, + [pbr::OriginalName("LABEL_REQUIRED")] Required = 2, + /// + /// TODO(sanjay): Should we add LABEL_MAP? + /// + [pbr::OriginalName("LABEL_REPEATED")] Repeated = 3, + } + + } + #endregion + + } + + /// + /// Describes a oneof. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + internal sealed partial class OneofDescriptorProto : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new OneofDescriptorProto()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.Reflection.DescriptorReflection.Descriptor.MessageTypes[4]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public OneofDescriptorProto() { + OnConstruction(); + } + + partial void OnConstruction(); + + public OneofDescriptorProto(OneofDescriptorProto other) : this() { + name_ = other.name_; + } + + public OneofDescriptorProto Clone() { + return new OneofDescriptorProto(this); + } + + /// Field number for the "name" field. + public const int NameFieldNumber = 1; + private string name_ = ""; + public string Name { + get { return name_; } + set { + name_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + public override bool Equals(object other) { + return Equals(other as OneofDescriptorProto); + } + + public bool Equals(OneofDescriptorProto other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (Name != other.Name) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (Name.Length != 0) hash ^= Name.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (Name.Length != 0) { + output.WriteRawTag(10); + output.WriteString(Name); + } + } + + public int CalculateSize() { + int size = 0; + if (Name.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(Name); + } + return size; + } + + public void MergeFrom(OneofDescriptorProto other) { + if (other == null) { + return; + } + if (other.Name.Length != 0) { + Name = other.Name; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + Name = input.ReadString(); + break; + } + } + } + } + + } + + /// + /// Describes an enum type. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + internal sealed partial class EnumDescriptorProto : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new EnumDescriptorProto()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.Reflection.DescriptorReflection.Descriptor.MessageTypes[5]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public EnumDescriptorProto() { + OnConstruction(); + } + + partial void OnConstruction(); + + public EnumDescriptorProto(EnumDescriptorProto other) : this() { + name_ = other.name_; + value_ = other.value_.Clone(); + Options = other.options_ != null ? other.Options.Clone() : null; + } + + public EnumDescriptorProto Clone() { + return new EnumDescriptorProto(this); + } + + /// Field number for the "name" field. + public const int NameFieldNumber = 1; + private string name_ = ""; + public string Name { + get { return name_; } + set { + name_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "value" field. + public const int ValueFieldNumber = 2; + private static readonly pb::FieldCodec _repeated_value_codec + = pb::FieldCodec.ForMessage(18, global::Google.Protobuf.Reflection.EnumValueDescriptorProto.Parser); + private readonly pbc::RepeatedField value_ = new pbc::RepeatedField(); + public pbc::RepeatedField Value { + get { return value_; } + } + + /// Field number for the "options" field. + public const int OptionsFieldNumber = 3; + private global::Google.Protobuf.Reflection.EnumOptions options_; + public global::Google.Protobuf.Reflection.EnumOptions Options { + get { return options_; } + set { + options_ = value; + } + } + + public override bool Equals(object other) { + return Equals(other as EnumDescriptorProto); + } + + public bool Equals(EnumDescriptorProto other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (Name != other.Name) return false; + if(!value_.Equals(other.value_)) return false; + if (!object.Equals(Options, other.Options)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (Name.Length != 0) hash ^= Name.GetHashCode(); + hash ^= value_.GetHashCode(); + if (options_ != null) hash ^= Options.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (Name.Length != 0) { + output.WriteRawTag(10); + output.WriteString(Name); + } + value_.WriteTo(output, _repeated_value_codec); + if (options_ != null) { + output.WriteRawTag(26); + output.WriteMessage(Options); + } + } + + public int CalculateSize() { + int size = 0; + if (Name.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(Name); + } + size += value_.CalculateSize(_repeated_value_codec); + if (options_ != null) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(Options); + } + return size; + } + + public void MergeFrom(EnumDescriptorProto other) { + if (other == null) { + return; + } + if (other.Name.Length != 0) { + Name = other.Name; + } + value_.Add(other.value_); + if (other.options_ != null) { + if (options_ == null) { + options_ = new global::Google.Protobuf.Reflection.EnumOptions(); + } + Options.MergeFrom(other.Options); + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + Name = input.ReadString(); + break; + } + case 18: { + value_.AddEntriesFrom(input, _repeated_value_codec); + break; + } + case 26: { + if (options_ == null) { + options_ = new global::Google.Protobuf.Reflection.EnumOptions(); + } + input.ReadMessage(options_); + break; + } + } + } + } + + } + + /// + /// Describes a value within an enum. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + internal sealed partial class EnumValueDescriptorProto : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new EnumValueDescriptorProto()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.Reflection.DescriptorReflection.Descriptor.MessageTypes[6]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public EnumValueDescriptorProto() { + OnConstruction(); + } + + partial void OnConstruction(); + + public EnumValueDescriptorProto(EnumValueDescriptorProto other) : this() { + name_ = other.name_; + number_ = other.number_; + Options = other.options_ != null ? other.Options.Clone() : null; + } + + public EnumValueDescriptorProto Clone() { + return new EnumValueDescriptorProto(this); + } + + /// Field number for the "name" field. + public const int NameFieldNumber = 1; + private string name_ = ""; + public string Name { + get { return name_; } + set { + name_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "number" field. + public const int NumberFieldNumber = 2; + private int number_; + public int Number { + get { return number_; } + set { + number_ = value; + } + } + + /// Field number for the "options" field. + public const int OptionsFieldNumber = 3; + private global::Google.Protobuf.Reflection.EnumValueOptions options_; + public global::Google.Protobuf.Reflection.EnumValueOptions Options { + get { return options_; } + set { + options_ = value; + } + } + + public override bool Equals(object other) { + return Equals(other as EnumValueDescriptorProto); + } + + public bool Equals(EnumValueDescriptorProto other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (Name != other.Name) return false; + if (Number != other.Number) return false; + if (!object.Equals(Options, other.Options)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (Name.Length != 0) hash ^= Name.GetHashCode(); + if (Number != 0) hash ^= Number.GetHashCode(); + if (options_ != null) hash ^= Options.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (Name.Length != 0) { + output.WriteRawTag(10); + output.WriteString(Name); + } + if (Number != 0) { + output.WriteRawTag(16); + output.WriteInt32(Number); + } + if (options_ != null) { + output.WriteRawTag(26); + output.WriteMessage(Options); + } + } + + public int CalculateSize() { + int size = 0; + if (Name.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(Name); + } + if (Number != 0) { + size += 1 + pb::CodedOutputStream.ComputeInt32Size(Number); + } + if (options_ != null) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(Options); + } + return size; + } + + public void MergeFrom(EnumValueDescriptorProto other) { + if (other == null) { + return; + } + if (other.Name.Length != 0) { + Name = other.Name; + } + if (other.Number != 0) { + Number = other.Number; + } + if (other.options_ != null) { + if (options_ == null) { + options_ = new global::Google.Protobuf.Reflection.EnumValueOptions(); + } + Options.MergeFrom(other.Options); + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + Name = input.ReadString(); + break; + } + case 16: { + Number = input.ReadInt32(); + break; + } + case 26: { + if (options_ == null) { + options_ = new global::Google.Protobuf.Reflection.EnumValueOptions(); + } + input.ReadMessage(options_); + break; + } + } + } + } + + } + + /// + /// Describes a service. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + internal sealed partial class ServiceDescriptorProto : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new ServiceDescriptorProto()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.Reflection.DescriptorReflection.Descriptor.MessageTypes[7]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public ServiceDescriptorProto() { + OnConstruction(); + } + + partial void OnConstruction(); + + public ServiceDescriptorProto(ServiceDescriptorProto other) : this() { + name_ = other.name_; + method_ = other.method_.Clone(); + Options = other.options_ != null ? other.Options.Clone() : null; + } + + public ServiceDescriptorProto Clone() { + return new ServiceDescriptorProto(this); + } + + /// Field number for the "name" field. + public const int NameFieldNumber = 1; + private string name_ = ""; + public string Name { + get { return name_; } + set { + name_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "method" field. + public const int MethodFieldNumber = 2; + private static readonly pb::FieldCodec _repeated_method_codec + = pb::FieldCodec.ForMessage(18, global::Google.Protobuf.Reflection.MethodDescriptorProto.Parser); + private readonly pbc::RepeatedField method_ = new pbc::RepeatedField(); + public pbc::RepeatedField Method { + get { return method_; } + } + + /// Field number for the "options" field. + public const int OptionsFieldNumber = 3; + private global::Google.Protobuf.Reflection.ServiceOptions options_; + public global::Google.Protobuf.Reflection.ServiceOptions Options { + get { return options_; } + set { + options_ = value; + } + } + + public override bool Equals(object other) { + return Equals(other as ServiceDescriptorProto); + } + + public bool Equals(ServiceDescriptorProto other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (Name != other.Name) return false; + if(!method_.Equals(other.method_)) return false; + if (!object.Equals(Options, other.Options)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (Name.Length != 0) hash ^= Name.GetHashCode(); + hash ^= method_.GetHashCode(); + if (options_ != null) hash ^= Options.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (Name.Length != 0) { + output.WriteRawTag(10); + output.WriteString(Name); + } + method_.WriteTo(output, _repeated_method_codec); + if (options_ != null) { + output.WriteRawTag(26); + output.WriteMessage(Options); + } + } + + public int CalculateSize() { + int size = 0; + if (Name.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(Name); + } + size += method_.CalculateSize(_repeated_method_codec); + if (options_ != null) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(Options); + } + return size; + } + + public void MergeFrom(ServiceDescriptorProto other) { + if (other == null) { + return; + } + if (other.Name.Length != 0) { + Name = other.Name; + } + method_.Add(other.method_); + if (other.options_ != null) { + if (options_ == null) { + options_ = new global::Google.Protobuf.Reflection.ServiceOptions(); + } + Options.MergeFrom(other.Options); + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + Name = input.ReadString(); + break; + } + case 18: { + method_.AddEntriesFrom(input, _repeated_method_codec); + break; + } + case 26: { + if (options_ == null) { + options_ = new global::Google.Protobuf.Reflection.ServiceOptions(); + } + input.ReadMessage(options_); + break; + } + } + } + } + + } + + /// + /// Describes a method of a service. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + internal sealed partial class MethodDescriptorProto : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new MethodDescriptorProto()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.Reflection.DescriptorReflection.Descriptor.MessageTypes[8]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public MethodDescriptorProto() { + OnConstruction(); + } + + partial void OnConstruction(); + + public MethodDescriptorProto(MethodDescriptorProto other) : this() { + name_ = other.name_; + inputType_ = other.inputType_; + outputType_ = other.outputType_; + Options = other.options_ != null ? other.Options.Clone() : null; + clientStreaming_ = other.clientStreaming_; + serverStreaming_ = other.serverStreaming_; + } + + public MethodDescriptorProto Clone() { + return new MethodDescriptorProto(this); + } + + /// Field number for the "name" field. + public const int NameFieldNumber = 1; + private string name_ = ""; + public string Name { + get { return name_; } + set { + name_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "input_type" field. + public const int InputTypeFieldNumber = 2; + private string inputType_ = ""; + /// + /// Input and output type names. These are resolved in the same way as + /// FieldDescriptorProto.type_name, but must refer to a message type. + /// + public string InputType { + get { return inputType_; } + set { + inputType_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "output_type" field. + public const int OutputTypeFieldNumber = 3; + private string outputType_ = ""; + public string OutputType { + get { return outputType_; } + set { + outputType_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "options" field. + public const int OptionsFieldNumber = 4; + private global::Google.Protobuf.Reflection.MethodOptions options_; + public global::Google.Protobuf.Reflection.MethodOptions Options { + get { return options_; } + set { + options_ = value; + } + } + + /// Field number for the "client_streaming" field. + public const int ClientStreamingFieldNumber = 5; + private bool clientStreaming_; + /// + /// Identifies if client streams multiple client messages + /// + public bool ClientStreaming { + get { return clientStreaming_; } + set { + clientStreaming_ = value; + } + } + + /// Field number for the "server_streaming" field. + public const int ServerStreamingFieldNumber = 6; + private bool serverStreaming_; + /// + /// Identifies if server streams multiple server messages + /// + public bool ServerStreaming { + get { return serverStreaming_; } + set { + serverStreaming_ = value; + } + } + + public override bool Equals(object other) { + return Equals(other as MethodDescriptorProto); + } + + public bool Equals(MethodDescriptorProto other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (Name != other.Name) return false; + if (InputType != other.InputType) return false; + if (OutputType != other.OutputType) return false; + if (!object.Equals(Options, other.Options)) return false; + if (ClientStreaming != other.ClientStreaming) return false; + if (ServerStreaming != other.ServerStreaming) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (Name.Length != 0) hash ^= Name.GetHashCode(); + if (InputType.Length != 0) hash ^= InputType.GetHashCode(); + if (OutputType.Length != 0) hash ^= OutputType.GetHashCode(); + if (options_ != null) hash ^= Options.GetHashCode(); + if (ClientStreaming != false) hash ^= ClientStreaming.GetHashCode(); + if (ServerStreaming != false) hash ^= ServerStreaming.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (Name.Length != 0) { + output.WriteRawTag(10); + output.WriteString(Name); + } + if (InputType.Length != 0) { + output.WriteRawTag(18); + output.WriteString(InputType); + } + if (OutputType.Length != 0) { + output.WriteRawTag(26); + output.WriteString(OutputType); + } + if (options_ != null) { + output.WriteRawTag(34); + output.WriteMessage(Options); + } + if (ClientStreaming != false) { + output.WriteRawTag(40); + output.WriteBool(ClientStreaming); + } + if (ServerStreaming != false) { + output.WriteRawTag(48); + output.WriteBool(ServerStreaming); + } + } + + public int CalculateSize() { + int size = 0; + if (Name.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(Name); + } + if (InputType.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(InputType); + } + if (OutputType.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(OutputType); + } + if (options_ != null) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(Options); + } + if (ClientStreaming != false) { + size += 1 + 1; + } + if (ServerStreaming != false) { + size += 1 + 1; + } + return size; + } + + public void MergeFrom(MethodDescriptorProto other) { + if (other == null) { + return; + } + if (other.Name.Length != 0) { + Name = other.Name; + } + if (other.InputType.Length != 0) { + InputType = other.InputType; + } + if (other.OutputType.Length != 0) { + OutputType = other.OutputType; + } + if (other.options_ != null) { + if (options_ == null) { + options_ = new global::Google.Protobuf.Reflection.MethodOptions(); + } + Options.MergeFrom(other.Options); + } + if (other.ClientStreaming != false) { + ClientStreaming = other.ClientStreaming; + } + if (other.ServerStreaming != false) { + ServerStreaming = other.ServerStreaming; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + Name = input.ReadString(); + break; + } + case 18: { + InputType = input.ReadString(); + break; + } + case 26: { + OutputType = input.ReadString(); + break; + } + case 34: { + if (options_ == null) { + options_ = new global::Google.Protobuf.Reflection.MethodOptions(); + } + input.ReadMessage(options_); + break; + } + case 40: { + ClientStreaming = input.ReadBool(); + break; + } + case 48: { + ServerStreaming = input.ReadBool(); + break; + } + } + } + } + + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + internal sealed partial class FileOptions : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new FileOptions()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.Reflection.DescriptorReflection.Descriptor.MessageTypes[9]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public FileOptions() { + OnConstruction(); + } + + partial void OnConstruction(); + + public FileOptions(FileOptions other) : this() { + javaPackage_ = other.javaPackage_; + javaOuterClassname_ = other.javaOuterClassname_; + javaMultipleFiles_ = other.javaMultipleFiles_; + javaGenerateEqualsAndHash_ = other.javaGenerateEqualsAndHash_; + javaStringCheckUtf8_ = other.javaStringCheckUtf8_; + optimizeFor_ = other.optimizeFor_; + goPackage_ = other.goPackage_; + ccGenericServices_ = other.ccGenericServices_; + javaGenericServices_ = other.javaGenericServices_; + pyGenericServices_ = other.pyGenericServices_; + deprecated_ = other.deprecated_; + ccEnableArenas_ = other.ccEnableArenas_; + objcClassPrefix_ = other.objcClassPrefix_; + csharpNamespace_ = other.csharpNamespace_; + uninterpretedOption_ = other.uninterpretedOption_.Clone(); + } + + public FileOptions Clone() { + return new FileOptions(this); + } + + /// Field number for the "java_package" field. + public const int JavaPackageFieldNumber = 1; + private string javaPackage_ = ""; + /// + /// Sets the Java package where classes generated from this .proto will be + /// placed. By default, the proto package is used, but this is often + /// inappropriate because proto packages do not normally start with backwards + /// domain names. + /// + public string JavaPackage { + get { return javaPackage_; } + set { + javaPackage_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "java_outer_classname" field. + public const int JavaOuterClassnameFieldNumber = 8; + private string javaOuterClassname_ = ""; + /// + /// If set, all the classes from the .proto file are wrapped in a single + /// outer class with the given name. This applies to both Proto1 + /// (equivalent to the old "--one_java_file" option) and Proto2 (where + /// a .proto always translates to a single class, but you may want to + /// explicitly choose the class name). + /// + public string JavaOuterClassname { + get { return javaOuterClassname_; } + set { + javaOuterClassname_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "java_multiple_files" field. + public const int JavaMultipleFilesFieldNumber = 10; + private bool javaMultipleFiles_; + /// + /// If set true, then the Java code generator will generate a separate .java + /// file for each top-level message, enum, and service defined in the .proto + /// file. Thus, these types will *not* be nested inside the outer class + /// named by java_outer_classname. However, the outer class will still be + /// generated to contain the file's getDescriptor() method as well as any + /// top-level extensions defined in the file. + /// + public bool JavaMultipleFiles { + get { return javaMultipleFiles_; } + set { + javaMultipleFiles_ = value; + } + } + + /// Field number for the "java_generate_equals_and_hash" field. + public const int JavaGenerateEqualsAndHashFieldNumber = 20; + private bool javaGenerateEqualsAndHash_; + /// + /// If set true, then the Java code generator will generate equals() and + /// hashCode() methods for all messages defined in the .proto file. + /// This increases generated code size, potentially substantially for large + /// protos, which may harm a memory-constrained application. + /// - In the full runtime this is a speed optimization, as the + /// AbstractMessage base class includes reflection-based implementations of + /// these methods. + /// - In the lite runtime, setting this option changes the semantics of + /// equals() and hashCode() to more closely match those of the full runtime; + /// the generated methods compute their results based on field values rather + /// than object identity. (Implementations should not assume that hashcodes + /// will be consistent across runtimes or versions of the protocol compiler.) + /// + public bool JavaGenerateEqualsAndHash { + get { return javaGenerateEqualsAndHash_; } + set { + javaGenerateEqualsAndHash_ = value; + } + } + + /// Field number for the "java_string_check_utf8" field. + public const int JavaStringCheckUtf8FieldNumber = 27; + private bool javaStringCheckUtf8_; + /// + /// If set true, then the Java2 code generator will generate code that + /// throws an exception whenever an attempt is made to assign a non-UTF-8 + /// byte sequence to a string field. + /// Message reflection will do the same. + /// However, an extension field still accepts non-UTF-8 byte sequences. + /// This option has no effect on when used with the lite runtime. + /// + public bool JavaStringCheckUtf8 { + get { return javaStringCheckUtf8_; } + set { + javaStringCheckUtf8_ = value; + } + } + + /// Field number for the "optimize_for" field. + public const int OptimizeForFieldNumber = 9; + private global::Google.Protobuf.Reflection.FileOptions.Types.OptimizeMode optimizeFor_ = 0; + public global::Google.Protobuf.Reflection.FileOptions.Types.OptimizeMode OptimizeFor { + get { return optimizeFor_; } + set { + optimizeFor_ = value; + } + } + + /// Field number for the "go_package" field. + public const int GoPackageFieldNumber = 11; + private string goPackage_ = ""; + /// + /// Sets the Go package where structs generated from this .proto will be + /// placed. If omitted, the Go package will be derived from the following: + /// - The basename of the package import path, if provided. + /// - Otherwise, the package statement in the .proto file, if present. + /// - Otherwise, the basename of the .proto file, without extension. + /// + public string GoPackage { + get { return goPackage_; } + set { + goPackage_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "cc_generic_services" field. + public const int CcGenericServicesFieldNumber = 16; + private bool ccGenericServices_; + /// + /// Should generic services be generated in each language? "Generic" services + /// are not specific to any particular RPC system. They are generated by the + /// main code generators in each language (without additional plugins). + /// Generic services were the only kind of service generation supported by + /// early versions of google.protobuf. + /// + /// Generic services are now considered deprecated in favor of using plugins + /// that generate code specific to your particular RPC system. Therefore, + /// these default to false. Old code which depends on generic services should + /// explicitly set them to true. + /// + public bool CcGenericServices { + get { return ccGenericServices_; } + set { + ccGenericServices_ = value; + } + } + + /// Field number for the "java_generic_services" field. + public const int JavaGenericServicesFieldNumber = 17; + private bool javaGenericServices_; + public bool JavaGenericServices { + get { return javaGenericServices_; } + set { + javaGenericServices_ = value; + } + } + + /// Field number for the "py_generic_services" field. + public const int PyGenericServicesFieldNumber = 18; + private bool pyGenericServices_; + public bool PyGenericServices { + get { return pyGenericServices_; } + set { + pyGenericServices_ = value; + } + } + + /// Field number for the "deprecated" field. + public const int DeprecatedFieldNumber = 23; + private bool deprecated_; + /// + /// Is this file deprecated? + /// Depending on the target platform, this can emit Deprecated annotations + /// for everything in the file, or it will be completely ignored; in the very + /// least, this is a formalization for deprecating files. + /// + public bool Deprecated { + get { return deprecated_; } + set { + deprecated_ = value; + } + } + + /// Field number for the "cc_enable_arenas" field. + public const int CcEnableArenasFieldNumber = 31; + private bool ccEnableArenas_; + /// + /// Enables the use of arenas for the proto messages in this file. This applies + /// only to generated classes for C++. + /// + public bool CcEnableArenas { + get { return ccEnableArenas_; } + set { + ccEnableArenas_ = value; + } + } + + /// Field number for the "objc_class_prefix" field. + public const int ObjcClassPrefixFieldNumber = 36; + private string objcClassPrefix_ = ""; + /// + /// Sets the objective c class prefix which is prepended to all objective c + /// generated classes from this .proto. There is no default. + /// + public string ObjcClassPrefix { + get { return objcClassPrefix_; } + set { + objcClassPrefix_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "csharp_namespace" field. + public const int CsharpNamespaceFieldNumber = 37; + private string csharpNamespace_ = ""; + /// + /// Namespace for generated classes; defaults to the package. + /// + public string CsharpNamespace { + get { return csharpNamespace_; } + set { + csharpNamespace_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "uninterpreted_option" field. + public const int UninterpretedOptionFieldNumber = 999; + private static readonly pb::FieldCodec _repeated_uninterpretedOption_codec + = pb::FieldCodec.ForMessage(7994, global::Google.Protobuf.Reflection.UninterpretedOption.Parser); + private readonly pbc::RepeatedField uninterpretedOption_ = new pbc::RepeatedField(); + /// + /// The parser stores options it doesn't recognize here. See above. + /// + public pbc::RepeatedField UninterpretedOption { + get { return uninterpretedOption_; } + } + + public override bool Equals(object other) { + return Equals(other as FileOptions); + } + + public bool Equals(FileOptions other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (JavaPackage != other.JavaPackage) return false; + if (JavaOuterClassname != other.JavaOuterClassname) return false; + if (JavaMultipleFiles != other.JavaMultipleFiles) return false; + if (JavaGenerateEqualsAndHash != other.JavaGenerateEqualsAndHash) return false; + if (JavaStringCheckUtf8 != other.JavaStringCheckUtf8) return false; + if (OptimizeFor != other.OptimizeFor) return false; + if (GoPackage != other.GoPackage) return false; + if (CcGenericServices != other.CcGenericServices) return false; + if (JavaGenericServices != other.JavaGenericServices) return false; + if (PyGenericServices != other.PyGenericServices) return false; + if (Deprecated != other.Deprecated) return false; + if (CcEnableArenas != other.CcEnableArenas) return false; + if (ObjcClassPrefix != other.ObjcClassPrefix) return false; + if (CsharpNamespace != other.CsharpNamespace) return false; + if(!uninterpretedOption_.Equals(other.uninterpretedOption_)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (JavaPackage.Length != 0) hash ^= JavaPackage.GetHashCode(); + if (JavaOuterClassname.Length != 0) hash ^= JavaOuterClassname.GetHashCode(); + if (JavaMultipleFiles != false) hash ^= JavaMultipleFiles.GetHashCode(); + if (JavaGenerateEqualsAndHash != false) hash ^= JavaGenerateEqualsAndHash.GetHashCode(); + if (JavaStringCheckUtf8 != false) hash ^= JavaStringCheckUtf8.GetHashCode(); + if (OptimizeFor != 0) hash ^= OptimizeFor.GetHashCode(); + if (GoPackage.Length != 0) hash ^= GoPackage.GetHashCode(); + if (CcGenericServices != false) hash ^= CcGenericServices.GetHashCode(); + if (JavaGenericServices != false) hash ^= JavaGenericServices.GetHashCode(); + if (PyGenericServices != false) hash ^= PyGenericServices.GetHashCode(); + if (Deprecated != false) hash ^= Deprecated.GetHashCode(); + if (CcEnableArenas != false) hash ^= CcEnableArenas.GetHashCode(); + if (ObjcClassPrefix.Length != 0) hash ^= ObjcClassPrefix.GetHashCode(); + if (CsharpNamespace.Length != 0) hash ^= CsharpNamespace.GetHashCode(); + hash ^= uninterpretedOption_.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (JavaPackage.Length != 0) { + output.WriteRawTag(10); + output.WriteString(JavaPackage); + } + if (JavaOuterClassname.Length != 0) { + output.WriteRawTag(66); + output.WriteString(JavaOuterClassname); + } + if (OptimizeFor != 0) { + output.WriteRawTag(72); + output.WriteEnum((int) OptimizeFor); + } + if (JavaMultipleFiles != false) { + output.WriteRawTag(80); + output.WriteBool(JavaMultipleFiles); + } + if (GoPackage.Length != 0) { + output.WriteRawTag(90); + output.WriteString(GoPackage); + } + if (CcGenericServices != false) { + output.WriteRawTag(128, 1); + output.WriteBool(CcGenericServices); + } + if (JavaGenericServices != false) { + output.WriteRawTag(136, 1); + output.WriteBool(JavaGenericServices); + } + if (PyGenericServices != false) { + output.WriteRawTag(144, 1); + output.WriteBool(PyGenericServices); + } + if (JavaGenerateEqualsAndHash != false) { + output.WriteRawTag(160, 1); + output.WriteBool(JavaGenerateEqualsAndHash); + } + if (Deprecated != false) { + output.WriteRawTag(184, 1); + output.WriteBool(Deprecated); + } + if (JavaStringCheckUtf8 != false) { + output.WriteRawTag(216, 1); + output.WriteBool(JavaStringCheckUtf8); + } + if (CcEnableArenas != false) { + output.WriteRawTag(248, 1); + output.WriteBool(CcEnableArenas); + } + if (ObjcClassPrefix.Length != 0) { + output.WriteRawTag(162, 2); + output.WriteString(ObjcClassPrefix); + } + if (CsharpNamespace.Length != 0) { + output.WriteRawTag(170, 2); + output.WriteString(CsharpNamespace); + } + uninterpretedOption_.WriteTo(output, _repeated_uninterpretedOption_codec); + } + + public int CalculateSize() { + int size = 0; + if (JavaPackage.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(JavaPackage); + } + if (JavaOuterClassname.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(JavaOuterClassname); + } + if (JavaMultipleFiles != false) { + size += 1 + 1; + } + if (JavaGenerateEqualsAndHash != false) { + size += 2 + 1; + } + if (JavaStringCheckUtf8 != false) { + size += 2 + 1; + } + if (OptimizeFor != 0) { + size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) OptimizeFor); + } + if (GoPackage.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(GoPackage); + } + if (CcGenericServices != false) { + size += 2 + 1; + } + if (JavaGenericServices != false) { + size += 2 + 1; + } + if (PyGenericServices != false) { + size += 2 + 1; + } + if (Deprecated != false) { + size += 2 + 1; + } + if (CcEnableArenas != false) { + size += 2 + 1; + } + if (ObjcClassPrefix.Length != 0) { + size += 2 + pb::CodedOutputStream.ComputeStringSize(ObjcClassPrefix); + } + if (CsharpNamespace.Length != 0) { + size += 2 + pb::CodedOutputStream.ComputeStringSize(CsharpNamespace); + } + size += uninterpretedOption_.CalculateSize(_repeated_uninterpretedOption_codec); + return size; + } + + public void MergeFrom(FileOptions other) { + if (other == null) { + return; + } + if (other.JavaPackage.Length != 0) { + JavaPackage = other.JavaPackage; + } + if (other.JavaOuterClassname.Length != 0) { + JavaOuterClassname = other.JavaOuterClassname; + } + if (other.JavaMultipleFiles != false) { + JavaMultipleFiles = other.JavaMultipleFiles; + } + if (other.JavaGenerateEqualsAndHash != false) { + JavaGenerateEqualsAndHash = other.JavaGenerateEqualsAndHash; + } + if (other.JavaStringCheckUtf8 != false) { + JavaStringCheckUtf8 = other.JavaStringCheckUtf8; + } + if (other.OptimizeFor != 0) { + OptimizeFor = other.OptimizeFor; + } + if (other.GoPackage.Length != 0) { + GoPackage = other.GoPackage; + } + if (other.CcGenericServices != false) { + CcGenericServices = other.CcGenericServices; + } + if (other.JavaGenericServices != false) { + JavaGenericServices = other.JavaGenericServices; + } + if (other.PyGenericServices != false) { + PyGenericServices = other.PyGenericServices; + } + if (other.Deprecated != false) { + Deprecated = other.Deprecated; + } + if (other.CcEnableArenas != false) { + CcEnableArenas = other.CcEnableArenas; + } + if (other.ObjcClassPrefix.Length != 0) { + ObjcClassPrefix = other.ObjcClassPrefix; + } + if (other.CsharpNamespace.Length != 0) { + CsharpNamespace = other.CsharpNamespace; + } + uninterpretedOption_.Add(other.uninterpretedOption_); + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + JavaPackage = input.ReadString(); + break; + } + case 66: { + JavaOuterClassname = input.ReadString(); + break; + } + case 72: { + optimizeFor_ = (global::Google.Protobuf.Reflection.FileOptions.Types.OptimizeMode) input.ReadEnum(); + break; + } + case 80: { + JavaMultipleFiles = input.ReadBool(); + break; + } + case 90: { + GoPackage = input.ReadString(); + break; + } + case 128: { + CcGenericServices = input.ReadBool(); + break; + } + case 136: { + JavaGenericServices = input.ReadBool(); + break; + } + case 144: { + PyGenericServices = input.ReadBool(); + break; + } + case 160: { + JavaGenerateEqualsAndHash = input.ReadBool(); + break; + } + case 184: { + Deprecated = input.ReadBool(); + break; + } + case 216: { + JavaStringCheckUtf8 = input.ReadBool(); + break; + } + case 248: { + CcEnableArenas = input.ReadBool(); + break; + } + case 290: { + ObjcClassPrefix = input.ReadString(); + break; + } + case 298: { + CsharpNamespace = input.ReadString(); + break; + } + case 7994: { + uninterpretedOption_.AddEntriesFrom(input, _repeated_uninterpretedOption_codec); + break; + } + } + } + } + + #region Nested types + /// Container for nested types declared in the FileOptions message type. + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public static partial class Types { + /// + /// Generated classes can be optimized for speed or code size. + /// + internal enum OptimizeMode { + /// + /// Generate complete code for parsing, serialization, + /// + [pbr::OriginalName("SPEED")] Speed = 1, + /// + /// etc. + /// + [pbr::OriginalName("CODE_SIZE")] CodeSize = 2, + /// + /// Generate code using MessageLite and the lite runtime. + /// + [pbr::OriginalName("LITE_RUNTIME")] LiteRuntime = 3, + } + + } + #endregion + + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + internal sealed partial class MessageOptions : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new MessageOptions()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.Reflection.DescriptorReflection.Descriptor.MessageTypes[10]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public MessageOptions() { + OnConstruction(); + } + + partial void OnConstruction(); + + public MessageOptions(MessageOptions other) : this() { + messageSetWireFormat_ = other.messageSetWireFormat_; + noStandardDescriptorAccessor_ = other.noStandardDescriptorAccessor_; + deprecated_ = other.deprecated_; + mapEntry_ = other.mapEntry_; + uninterpretedOption_ = other.uninterpretedOption_.Clone(); + } + + public MessageOptions Clone() { + return new MessageOptions(this); + } + + /// Field number for the "message_set_wire_format" field. + public const int MessageSetWireFormatFieldNumber = 1; + private bool messageSetWireFormat_; + /// + /// Set true to use the old proto1 MessageSet wire format for extensions. + /// This is provided for backwards-compatibility with the MessageSet wire + /// format. You should not use this for any other reason: It's less + /// efficient, has fewer features, and is more complicated. + /// + /// The message must be defined exactly as follows: + /// message Foo { + /// option message_set_wire_format = true; + /// extensions 4 to max; + /// } + /// Note that the message cannot have any defined fields; MessageSets only + /// have extensions. + /// + /// All extensions of your type must be singular messages; e.g. they cannot + /// be int32s, enums, or repeated messages. + /// + /// Because this is an option, the above two restrictions are not enforced by + /// the protocol compiler. + /// + public bool MessageSetWireFormat { + get { return messageSetWireFormat_; } + set { + messageSetWireFormat_ = value; + } + } + + /// Field number for the "no_standard_descriptor_accessor" field. + public const int NoStandardDescriptorAccessorFieldNumber = 2; + private bool noStandardDescriptorAccessor_; + /// + /// Disables the generation of the standard "descriptor()" accessor, which can + /// conflict with a field of the same name. This is meant to make migration + /// from proto1 easier; new code should avoid fields named "descriptor". + /// + public bool NoStandardDescriptorAccessor { + get { return noStandardDescriptorAccessor_; } + set { + noStandardDescriptorAccessor_ = value; + } + } + + /// Field number for the "deprecated" field. + public const int DeprecatedFieldNumber = 3; + private bool deprecated_; + /// + /// Is this message deprecated? + /// Depending on the target platform, this can emit Deprecated annotations + /// for the message, or it will be completely ignored; in the very least, + /// this is a formalization for deprecating messages. + /// + public bool Deprecated { + get { return deprecated_; } + set { + deprecated_ = value; + } + } + + /// Field number for the "map_entry" field. + public const int MapEntryFieldNumber = 7; + private bool mapEntry_; + /// + /// Whether the message is an automatically generated map entry type for the + /// maps field. + /// + /// For maps fields: + /// map<KeyType, ValueType> map_field = 1; + /// The parsed descriptor looks like: + /// message MapFieldEntry { + /// option map_entry = true; + /// optional KeyType key = 1; + /// optional ValueType value = 2; + /// } + /// repeated MapFieldEntry map_field = 1; + /// + /// Implementations may choose not to generate the map_entry=true message, but + /// use a native map in the target language to hold the keys and values. + /// The reflection APIs in such implementions still need to work as + /// if the field is a repeated message field. + /// + /// NOTE: Do not set the option in .proto files. Always use the maps syntax + /// instead. The option should only be implicitly set by the proto compiler + /// parser. + /// + public bool MapEntry { + get { return mapEntry_; } + set { + mapEntry_ = value; + } + } + + /// Field number for the "uninterpreted_option" field. + public const int UninterpretedOptionFieldNumber = 999; + private static readonly pb::FieldCodec _repeated_uninterpretedOption_codec + = pb::FieldCodec.ForMessage(7994, global::Google.Protobuf.Reflection.UninterpretedOption.Parser); + private readonly pbc::RepeatedField uninterpretedOption_ = new pbc::RepeatedField(); + /// + /// The parser stores options it doesn't recognize here. See above. + /// + public pbc::RepeatedField UninterpretedOption { + get { return uninterpretedOption_; } + } + + public override bool Equals(object other) { + return Equals(other as MessageOptions); + } + + public bool Equals(MessageOptions other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (MessageSetWireFormat != other.MessageSetWireFormat) return false; + if (NoStandardDescriptorAccessor != other.NoStandardDescriptorAccessor) return false; + if (Deprecated != other.Deprecated) return false; + if (MapEntry != other.MapEntry) return false; + if(!uninterpretedOption_.Equals(other.uninterpretedOption_)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (MessageSetWireFormat != false) hash ^= MessageSetWireFormat.GetHashCode(); + if (NoStandardDescriptorAccessor != false) hash ^= NoStandardDescriptorAccessor.GetHashCode(); + if (Deprecated != false) hash ^= Deprecated.GetHashCode(); + if (MapEntry != false) hash ^= MapEntry.GetHashCode(); + hash ^= uninterpretedOption_.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (MessageSetWireFormat != false) { + output.WriteRawTag(8); + output.WriteBool(MessageSetWireFormat); + } + if (NoStandardDescriptorAccessor != false) { + output.WriteRawTag(16); + output.WriteBool(NoStandardDescriptorAccessor); + } + if (Deprecated != false) { + output.WriteRawTag(24); + output.WriteBool(Deprecated); + } + if (MapEntry != false) { + output.WriteRawTag(56); + output.WriteBool(MapEntry); + } + uninterpretedOption_.WriteTo(output, _repeated_uninterpretedOption_codec); + } + + public int CalculateSize() { + int size = 0; + if (MessageSetWireFormat != false) { + size += 1 + 1; + } + if (NoStandardDescriptorAccessor != false) { + size += 1 + 1; + } + if (Deprecated != false) { + size += 1 + 1; + } + if (MapEntry != false) { + size += 1 + 1; + } + size += uninterpretedOption_.CalculateSize(_repeated_uninterpretedOption_codec); + return size; + } + + public void MergeFrom(MessageOptions other) { + if (other == null) { + return; + } + if (other.MessageSetWireFormat != false) { + MessageSetWireFormat = other.MessageSetWireFormat; + } + if (other.NoStandardDescriptorAccessor != false) { + NoStandardDescriptorAccessor = other.NoStandardDescriptorAccessor; + } + if (other.Deprecated != false) { + Deprecated = other.Deprecated; + } + if (other.MapEntry != false) { + MapEntry = other.MapEntry; + } + uninterpretedOption_.Add(other.uninterpretedOption_); + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 8: { + MessageSetWireFormat = input.ReadBool(); + break; + } + case 16: { + NoStandardDescriptorAccessor = input.ReadBool(); + break; + } + case 24: { + Deprecated = input.ReadBool(); + break; + } + case 56: { + MapEntry = input.ReadBool(); + break; + } + case 7994: { + uninterpretedOption_.AddEntriesFrom(input, _repeated_uninterpretedOption_codec); + break; + } + } + } + } + + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + internal sealed partial class FieldOptions : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new FieldOptions()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.Reflection.DescriptorReflection.Descriptor.MessageTypes[11]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public FieldOptions() { + OnConstruction(); + } + + partial void OnConstruction(); + + public FieldOptions(FieldOptions other) : this() { + ctype_ = other.ctype_; + packed_ = other.packed_; + jstype_ = other.jstype_; + lazy_ = other.lazy_; + deprecated_ = other.deprecated_; + weak_ = other.weak_; + uninterpretedOption_ = other.uninterpretedOption_.Clone(); + } + + public FieldOptions Clone() { + return new FieldOptions(this); + } + + /// Field number for the "ctype" field. + public const int CtypeFieldNumber = 1; + private global::Google.Protobuf.Reflection.FieldOptions.Types.CType ctype_ = 0; + /// + /// The ctype option instructs the C++ code generator to use a different + /// representation of the field than it normally would. See the specific + /// options below. This option is not yet implemented in the open source + /// release -- sorry, we'll try to include it in a future version! + /// + public global::Google.Protobuf.Reflection.FieldOptions.Types.CType Ctype { + get { return ctype_; } + set { + ctype_ = value; + } + } + + /// Field number for the "packed" field. + public const int PackedFieldNumber = 2; + private bool packed_; + /// + /// The packed option can be enabled for repeated primitive fields to enable + /// a more efficient representation on the wire. Rather than repeatedly + /// writing the tag and type for each element, the entire array is encoded as + /// a single length-delimited blob. In proto3, only explicit setting it to + /// false will avoid using packed encoding. + /// + public bool Packed { + get { return packed_; } + set { + packed_ = value; + } + } + + /// Field number for the "jstype" field. + public const int JstypeFieldNumber = 6; + private global::Google.Protobuf.Reflection.FieldOptions.Types.JSType jstype_ = 0; + /// + /// The jstype option determines the JavaScript type used for values of the + /// field. The option is permitted only for 64 bit integral and fixed types + /// (int64, uint64, sint64, fixed64, sfixed64). By default these types are + /// represented as JavaScript strings. This avoids loss of precision that can + /// happen when a large value is converted to a floating point JavaScript + /// numbers. Specifying JS_NUMBER for the jstype causes the generated + /// JavaScript code to use the JavaScript "number" type instead of strings. + /// This option is an enum to permit additional types to be added, + /// e.g. goog.math.Integer. + /// + public global::Google.Protobuf.Reflection.FieldOptions.Types.JSType Jstype { + get { return jstype_; } + set { + jstype_ = value; + } + } + + /// Field number for the "lazy" field. + public const int LazyFieldNumber = 5; + private bool lazy_; + /// + /// Should this field be parsed lazily? Lazy applies only to message-type + /// fields. It means that when the outer message is initially parsed, the + /// inner message's contents will not be parsed but instead stored in encoded + /// form. The inner message will actually be parsed when it is first accessed. + /// + /// This is only a hint. Implementations are free to choose whether to use + /// eager or lazy parsing regardless of the value of this option. However, + /// setting this option true suggests that the protocol author believes that + /// using lazy parsing on this field is worth the additional bookkeeping + /// overhead typically needed to implement it. + /// + /// This option does not affect the public interface of any generated code; + /// all method signatures remain the same. Furthermore, thread-safety of the + /// interface is not affected by this option; const methods remain safe to + /// call from multiple threads concurrently, while non-const methods continue + /// to require exclusive access. + /// + /// Note that implementations may choose not to check required fields within + /// a lazy sub-message. That is, calling IsInitialized() on the outher message + /// may return true even if the inner message has missing required fields. + /// This is necessary because otherwise the inner message would have to be + /// parsed in order to perform the check, defeating the purpose of lazy + /// parsing. An implementation which chooses not to check required fields + /// must be consistent about it. That is, for any particular sub-message, the + /// implementation must either *always* check its required fields, or *never* + /// check its required fields, regardless of whether or not the message has + /// been parsed. + /// + public bool Lazy { + get { return lazy_; } + set { + lazy_ = value; + } + } + + /// Field number for the "deprecated" field. + public const int DeprecatedFieldNumber = 3; + private bool deprecated_; + /// + /// Is this field deprecated? + /// Depending on the target platform, this can emit Deprecated annotations + /// for accessors, or it will be completely ignored; in the very least, this + /// is a formalization for deprecating fields. + /// + public bool Deprecated { + get { return deprecated_; } + set { + deprecated_ = value; + } + } + + /// Field number for the "weak" field. + public const int WeakFieldNumber = 10; + private bool weak_; + /// + /// For Google-internal migration only. Do not use. + /// + public bool Weak { + get { return weak_; } + set { + weak_ = value; + } + } + + /// Field number for the "uninterpreted_option" field. + public const int UninterpretedOptionFieldNumber = 999; + private static readonly pb::FieldCodec _repeated_uninterpretedOption_codec + = pb::FieldCodec.ForMessage(7994, global::Google.Protobuf.Reflection.UninterpretedOption.Parser); + private readonly pbc::RepeatedField uninterpretedOption_ = new pbc::RepeatedField(); + /// + /// The parser stores options it doesn't recognize here. See above. + /// + public pbc::RepeatedField UninterpretedOption { + get { return uninterpretedOption_; } + } + + public override bool Equals(object other) { + return Equals(other as FieldOptions); + } + + public bool Equals(FieldOptions other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (Ctype != other.Ctype) return false; + if (Packed != other.Packed) return false; + if (Jstype != other.Jstype) return false; + if (Lazy != other.Lazy) return false; + if (Deprecated != other.Deprecated) return false; + if (Weak != other.Weak) return false; + if(!uninterpretedOption_.Equals(other.uninterpretedOption_)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (Ctype != 0) hash ^= Ctype.GetHashCode(); + if (Packed != false) hash ^= Packed.GetHashCode(); + if (Jstype != 0) hash ^= Jstype.GetHashCode(); + if (Lazy != false) hash ^= Lazy.GetHashCode(); + if (Deprecated != false) hash ^= Deprecated.GetHashCode(); + if (Weak != false) hash ^= Weak.GetHashCode(); + hash ^= uninterpretedOption_.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (Ctype != 0) { + output.WriteRawTag(8); + output.WriteEnum((int) Ctype); + } + if (Packed != false) { + output.WriteRawTag(16); + output.WriteBool(Packed); + } + if (Deprecated != false) { + output.WriteRawTag(24); + output.WriteBool(Deprecated); + } + if (Lazy != false) { + output.WriteRawTag(40); + output.WriteBool(Lazy); + } + if (Jstype != 0) { + output.WriteRawTag(48); + output.WriteEnum((int) Jstype); + } + if (Weak != false) { + output.WriteRawTag(80); + output.WriteBool(Weak); + } + uninterpretedOption_.WriteTo(output, _repeated_uninterpretedOption_codec); + } + + public int CalculateSize() { + int size = 0; + if (Ctype != 0) { + size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) Ctype); + } + if (Packed != false) { + size += 1 + 1; + } + if (Jstype != 0) { + size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) Jstype); + } + if (Lazy != false) { + size += 1 + 1; + } + if (Deprecated != false) { + size += 1 + 1; + } + if (Weak != false) { + size += 1 + 1; + } + size += uninterpretedOption_.CalculateSize(_repeated_uninterpretedOption_codec); + return size; + } + + public void MergeFrom(FieldOptions other) { + if (other == null) { + return; + } + if (other.Ctype != 0) { + Ctype = other.Ctype; + } + if (other.Packed != false) { + Packed = other.Packed; + } + if (other.Jstype != 0) { + Jstype = other.Jstype; + } + if (other.Lazy != false) { + Lazy = other.Lazy; + } + if (other.Deprecated != false) { + Deprecated = other.Deprecated; + } + if (other.Weak != false) { + Weak = other.Weak; + } + uninterpretedOption_.Add(other.uninterpretedOption_); + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 8: { + ctype_ = (global::Google.Protobuf.Reflection.FieldOptions.Types.CType) input.ReadEnum(); + break; + } + case 16: { + Packed = input.ReadBool(); + break; + } + case 24: { + Deprecated = input.ReadBool(); + break; + } + case 40: { + Lazy = input.ReadBool(); + break; + } + case 48: { + jstype_ = (global::Google.Protobuf.Reflection.FieldOptions.Types.JSType) input.ReadEnum(); + break; + } + case 80: { + Weak = input.ReadBool(); + break; + } + case 7994: { + uninterpretedOption_.AddEntriesFrom(input, _repeated_uninterpretedOption_codec); + break; + } + } + } + } + + #region Nested types + /// Container for nested types declared in the FieldOptions message type. + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public static partial class Types { + internal enum CType { + /// + /// Default mode. + /// + [pbr::OriginalName("STRING")] String = 0, + [pbr::OriginalName("CORD")] Cord = 1, + [pbr::OriginalName("STRING_PIECE")] StringPiece = 2, + } + + internal enum JSType { + /// + /// Use the default type. + /// + [pbr::OriginalName("JS_NORMAL")] JsNormal = 0, + /// + /// Use JavaScript strings. + /// + [pbr::OriginalName("JS_STRING")] JsString = 1, + /// + /// Use JavaScript numbers. + /// + [pbr::OriginalName("JS_NUMBER")] JsNumber = 2, + } + + } + #endregion + + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + internal sealed partial class EnumOptions : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new EnumOptions()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.Reflection.DescriptorReflection.Descriptor.MessageTypes[12]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public EnumOptions() { + OnConstruction(); + } + + partial void OnConstruction(); + + public EnumOptions(EnumOptions other) : this() { + allowAlias_ = other.allowAlias_; + deprecated_ = other.deprecated_; + uninterpretedOption_ = other.uninterpretedOption_.Clone(); + } + + public EnumOptions Clone() { + return new EnumOptions(this); + } + + /// Field number for the "allow_alias" field. + public const int AllowAliasFieldNumber = 2; + private bool allowAlias_; + /// + /// Set this option to true to allow mapping different tag names to the same + /// value. + /// + public bool AllowAlias { + get { return allowAlias_; } + set { + allowAlias_ = value; + } + } + + /// Field number for the "deprecated" field. + public const int DeprecatedFieldNumber = 3; + private bool deprecated_; + /// + /// Is this enum deprecated? + /// Depending on the target platform, this can emit Deprecated annotations + /// for the enum, or it will be completely ignored; in the very least, this + /// is a formalization for deprecating enums. + /// + public bool Deprecated { + get { return deprecated_; } + set { + deprecated_ = value; + } + } + + /// Field number for the "uninterpreted_option" field. + public const int UninterpretedOptionFieldNumber = 999; + private static readonly pb::FieldCodec _repeated_uninterpretedOption_codec + = pb::FieldCodec.ForMessage(7994, global::Google.Protobuf.Reflection.UninterpretedOption.Parser); + private readonly pbc::RepeatedField uninterpretedOption_ = new pbc::RepeatedField(); + /// + /// The parser stores options it doesn't recognize here. See above. + /// + public pbc::RepeatedField UninterpretedOption { + get { return uninterpretedOption_; } + } + + public override bool Equals(object other) { + return Equals(other as EnumOptions); + } + + public bool Equals(EnumOptions other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (AllowAlias != other.AllowAlias) return false; + if (Deprecated != other.Deprecated) return false; + if(!uninterpretedOption_.Equals(other.uninterpretedOption_)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (AllowAlias != false) hash ^= AllowAlias.GetHashCode(); + if (Deprecated != false) hash ^= Deprecated.GetHashCode(); + hash ^= uninterpretedOption_.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (AllowAlias != false) { + output.WriteRawTag(16); + output.WriteBool(AllowAlias); + } + if (Deprecated != false) { + output.WriteRawTag(24); + output.WriteBool(Deprecated); + } + uninterpretedOption_.WriteTo(output, _repeated_uninterpretedOption_codec); + } + + public int CalculateSize() { + int size = 0; + if (AllowAlias != false) { + size += 1 + 1; + } + if (Deprecated != false) { + size += 1 + 1; + } + size += uninterpretedOption_.CalculateSize(_repeated_uninterpretedOption_codec); + return size; + } + + public void MergeFrom(EnumOptions other) { + if (other == null) { + return; + } + if (other.AllowAlias != false) { + AllowAlias = other.AllowAlias; + } + if (other.Deprecated != false) { + Deprecated = other.Deprecated; + } + uninterpretedOption_.Add(other.uninterpretedOption_); + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 16: { + AllowAlias = input.ReadBool(); + break; + } + case 24: { + Deprecated = input.ReadBool(); + break; + } + case 7994: { + uninterpretedOption_.AddEntriesFrom(input, _repeated_uninterpretedOption_codec); + break; + } + } + } + } + + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + internal sealed partial class EnumValueOptions : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new EnumValueOptions()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.Reflection.DescriptorReflection.Descriptor.MessageTypes[13]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public EnumValueOptions() { + OnConstruction(); + } + + partial void OnConstruction(); + + public EnumValueOptions(EnumValueOptions other) : this() { + deprecated_ = other.deprecated_; + uninterpretedOption_ = other.uninterpretedOption_.Clone(); + } + + public EnumValueOptions Clone() { + return new EnumValueOptions(this); + } + + /// Field number for the "deprecated" field. + public const int DeprecatedFieldNumber = 1; + private bool deprecated_; + /// + /// Is this enum value deprecated? + /// Depending on the target platform, this can emit Deprecated annotations + /// for the enum value, or it will be completely ignored; in the very least, + /// this is a formalization for deprecating enum values. + /// + public bool Deprecated { + get { return deprecated_; } + set { + deprecated_ = value; + } + } + + /// Field number for the "uninterpreted_option" field. + public const int UninterpretedOptionFieldNumber = 999; + private static readonly pb::FieldCodec _repeated_uninterpretedOption_codec + = pb::FieldCodec.ForMessage(7994, global::Google.Protobuf.Reflection.UninterpretedOption.Parser); + private readonly pbc::RepeatedField uninterpretedOption_ = new pbc::RepeatedField(); + /// + /// The parser stores options it doesn't recognize here. See above. + /// + public pbc::RepeatedField UninterpretedOption { + get { return uninterpretedOption_; } + } + + public override bool Equals(object other) { + return Equals(other as EnumValueOptions); + } + + public bool Equals(EnumValueOptions other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (Deprecated != other.Deprecated) return false; + if(!uninterpretedOption_.Equals(other.uninterpretedOption_)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (Deprecated != false) hash ^= Deprecated.GetHashCode(); + hash ^= uninterpretedOption_.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (Deprecated != false) { + output.WriteRawTag(8); + output.WriteBool(Deprecated); + } + uninterpretedOption_.WriteTo(output, _repeated_uninterpretedOption_codec); + } + + public int CalculateSize() { + int size = 0; + if (Deprecated != false) { + size += 1 + 1; + } + size += uninterpretedOption_.CalculateSize(_repeated_uninterpretedOption_codec); + return size; + } + + public void MergeFrom(EnumValueOptions other) { + if (other == null) { + return; + } + if (other.Deprecated != false) { + Deprecated = other.Deprecated; + } + uninterpretedOption_.Add(other.uninterpretedOption_); + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 8: { + Deprecated = input.ReadBool(); + break; + } + case 7994: { + uninterpretedOption_.AddEntriesFrom(input, _repeated_uninterpretedOption_codec); + break; + } + } + } + } + + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + internal sealed partial class ServiceOptions : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new ServiceOptions()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.Reflection.DescriptorReflection.Descriptor.MessageTypes[14]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public ServiceOptions() { + OnConstruction(); + } + + partial void OnConstruction(); + + public ServiceOptions(ServiceOptions other) : this() { + deprecated_ = other.deprecated_; + uninterpretedOption_ = other.uninterpretedOption_.Clone(); + } + + public ServiceOptions Clone() { + return new ServiceOptions(this); + } + + /// Field number for the "deprecated" field. + public const int DeprecatedFieldNumber = 33; + private bool deprecated_; + /// + /// Is this service deprecated? + /// Depending on the target platform, this can emit Deprecated annotations + /// for the service, or it will be completely ignored; in the very least, + /// this is a formalization for deprecating services. + /// + public bool Deprecated { + get { return deprecated_; } + set { + deprecated_ = value; + } + } + + /// Field number for the "uninterpreted_option" field. + public const int UninterpretedOptionFieldNumber = 999; + private static readonly pb::FieldCodec _repeated_uninterpretedOption_codec + = pb::FieldCodec.ForMessage(7994, global::Google.Protobuf.Reflection.UninterpretedOption.Parser); + private readonly pbc::RepeatedField uninterpretedOption_ = new pbc::RepeatedField(); + /// + /// The parser stores options it doesn't recognize here. See above. + /// + public pbc::RepeatedField UninterpretedOption { + get { return uninterpretedOption_; } + } + + public override bool Equals(object other) { + return Equals(other as ServiceOptions); + } + + public bool Equals(ServiceOptions other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (Deprecated != other.Deprecated) return false; + if(!uninterpretedOption_.Equals(other.uninterpretedOption_)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (Deprecated != false) hash ^= Deprecated.GetHashCode(); + hash ^= uninterpretedOption_.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (Deprecated != false) { + output.WriteRawTag(136, 2); + output.WriteBool(Deprecated); + } + uninterpretedOption_.WriteTo(output, _repeated_uninterpretedOption_codec); + } + + public int CalculateSize() { + int size = 0; + if (Deprecated != false) { + size += 2 + 1; + } + size += uninterpretedOption_.CalculateSize(_repeated_uninterpretedOption_codec); + return size; + } + + public void MergeFrom(ServiceOptions other) { + if (other == null) { + return; + } + if (other.Deprecated != false) { + Deprecated = other.Deprecated; + } + uninterpretedOption_.Add(other.uninterpretedOption_); + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 264: { + Deprecated = input.ReadBool(); + break; + } + case 7994: { + uninterpretedOption_.AddEntriesFrom(input, _repeated_uninterpretedOption_codec); + break; + } + } + } + } + + } + + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + internal sealed partial class MethodOptions : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new MethodOptions()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.Reflection.DescriptorReflection.Descriptor.MessageTypes[15]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public MethodOptions() { + OnConstruction(); + } + + partial void OnConstruction(); + + public MethodOptions(MethodOptions other) : this() { + deprecated_ = other.deprecated_; + uninterpretedOption_ = other.uninterpretedOption_.Clone(); + } + + public MethodOptions Clone() { + return new MethodOptions(this); + } + + /// Field number for the "deprecated" field. + public const int DeprecatedFieldNumber = 33; + private bool deprecated_; + /// + /// Is this method deprecated? + /// Depending on the target platform, this can emit Deprecated annotations + /// for the method, or it will be completely ignored; in the very least, + /// this is a formalization for deprecating methods. + /// + public bool Deprecated { + get { return deprecated_; } + set { + deprecated_ = value; + } + } + + /// Field number for the "uninterpreted_option" field. + public const int UninterpretedOptionFieldNumber = 999; + private static readonly pb::FieldCodec _repeated_uninterpretedOption_codec + = pb::FieldCodec.ForMessage(7994, global::Google.Protobuf.Reflection.UninterpretedOption.Parser); + private readonly pbc::RepeatedField uninterpretedOption_ = new pbc::RepeatedField(); + /// + /// The parser stores options it doesn't recognize here. See above. + /// + public pbc::RepeatedField UninterpretedOption { + get { return uninterpretedOption_; } + } + + public override bool Equals(object other) { + return Equals(other as MethodOptions); + } + + public bool Equals(MethodOptions other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (Deprecated != other.Deprecated) return false; + if(!uninterpretedOption_.Equals(other.uninterpretedOption_)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (Deprecated != false) hash ^= Deprecated.GetHashCode(); + hash ^= uninterpretedOption_.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (Deprecated != false) { + output.WriteRawTag(136, 2); + output.WriteBool(Deprecated); + } + uninterpretedOption_.WriteTo(output, _repeated_uninterpretedOption_codec); + } + + public int CalculateSize() { + int size = 0; + if (Deprecated != false) { + size += 2 + 1; + } + size += uninterpretedOption_.CalculateSize(_repeated_uninterpretedOption_codec); + return size; + } + + public void MergeFrom(MethodOptions other) { + if (other == null) { + return; + } + if (other.Deprecated != false) { + Deprecated = other.Deprecated; + } + uninterpretedOption_.Add(other.uninterpretedOption_); + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 264: { + Deprecated = input.ReadBool(); + break; + } + case 7994: { + uninterpretedOption_.AddEntriesFrom(input, _repeated_uninterpretedOption_codec); + break; + } + } + } + } + + } + + /// + /// A message representing a option the parser does not recognize. This only + /// appears in options protos created by the compiler::Parser class. + /// DescriptorPool resolves these when building Descriptor objects. Therefore, + /// options protos in descriptor objects (e.g. returned by Descriptor::options(), + /// or produced by Descriptor::CopyTo()) will never have UninterpretedOptions + /// in them. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + internal sealed partial class UninterpretedOption : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new UninterpretedOption()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.Reflection.DescriptorReflection.Descriptor.MessageTypes[16]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public UninterpretedOption() { + OnConstruction(); + } + + partial void OnConstruction(); + + public UninterpretedOption(UninterpretedOption other) : this() { + name_ = other.name_.Clone(); + identifierValue_ = other.identifierValue_; + positiveIntValue_ = other.positiveIntValue_; + negativeIntValue_ = other.negativeIntValue_; + doubleValue_ = other.doubleValue_; + stringValue_ = other.stringValue_; + aggregateValue_ = other.aggregateValue_; + } + + public UninterpretedOption Clone() { + return new UninterpretedOption(this); + } + + /// Field number for the "name" field. + public const int NameFieldNumber = 2; + private static readonly pb::FieldCodec _repeated_name_codec + = pb::FieldCodec.ForMessage(18, global::Google.Protobuf.Reflection.UninterpretedOption.Types.NamePart.Parser); + private readonly pbc::RepeatedField name_ = new pbc::RepeatedField(); + public pbc::RepeatedField Name { + get { return name_; } + } + + /// Field number for the "identifier_value" field. + public const int IdentifierValueFieldNumber = 3; + private string identifierValue_ = ""; + /// + /// The value of the uninterpreted option, in whatever type the tokenizer + /// identified it as during parsing. Exactly one of these should be set. + /// + public string IdentifierValue { + get { return identifierValue_; } + set { + identifierValue_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "positive_int_value" field. + public const int PositiveIntValueFieldNumber = 4; + private ulong positiveIntValue_; + public ulong PositiveIntValue { + get { return positiveIntValue_; } + set { + positiveIntValue_ = value; + } + } + + /// Field number for the "negative_int_value" field. + public const int NegativeIntValueFieldNumber = 5; + private long negativeIntValue_; + public long NegativeIntValue { + get { return negativeIntValue_; } + set { + negativeIntValue_ = value; + } + } + + /// Field number for the "double_value" field. + public const int DoubleValueFieldNumber = 6; + private double doubleValue_; + public double DoubleValue { + get { return doubleValue_; } + set { + doubleValue_ = value; + } + } + + /// Field number for the "string_value" field. + public const int StringValueFieldNumber = 7; + private pb::ByteString stringValue_ = pb::ByteString.Empty; + public pb::ByteString StringValue { + get { return stringValue_; } + set { + stringValue_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "aggregate_value" field. + public const int AggregateValueFieldNumber = 8; + private string aggregateValue_ = ""; + public string AggregateValue { + get { return aggregateValue_; } + set { + aggregateValue_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + public override bool Equals(object other) { + return Equals(other as UninterpretedOption); + } + + public bool Equals(UninterpretedOption other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if(!name_.Equals(other.name_)) return false; + if (IdentifierValue != other.IdentifierValue) return false; + if (PositiveIntValue != other.PositiveIntValue) return false; + if (NegativeIntValue != other.NegativeIntValue) return false; + if (DoubleValue != other.DoubleValue) return false; + if (StringValue != other.StringValue) return false; + if (AggregateValue != other.AggregateValue) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + hash ^= name_.GetHashCode(); + if (IdentifierValue.Length != 0) hash ^= IdentifierValue.GetHashCode(); + if (PositiveIntValue != 0UL) hash ^= PositiveIntValue.GetHashCode(); + if (NegativeIntValue != 0L) hash ^= NegativeIntValue.GetHashCode(); + if (DoubleValue != 0D) hash ^= DoubleValue.GetHashCode(); + if (StringValue.Length != 0) hash ^= StringValue.GetHashCode(); + if (AggregateValue.Length != 0) hash ^= AggregateValue.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + name_.WriteTo(output, _repeated_name_codec); + if (IdentifierValue.Length != 0) { + output.WriteRawTag(26); + output.WriteString(IdentifierValue); + } + if (PositiveIntValue != 0UL) { + output.WriteRawTag(32); + output.WriteUInt64(PositiveIntValue); + } + if (NegativeIntValue != 0L) { + output.WriteRawTag(40); + output.WriteInt64(NegativeIntValue); + } + if (DoubleValue != 0D) { + output.WriteRawTag(49); + output.WriteDouble(DoubleValue); + } + if (StringValue.Length != 0) { + output.WriteRawTag(58); + output.WriteBytes(StringValue); + } + if (AggregateValue.Length != 0) { + output.WriteRawTag(66); + output.WriteString(AggregateValue); + } + } + + public int CalculateSize() { + int size = 0; + size += name_.CalculateSize(_repeated_name_codec); + if (IdentifierValue.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(IdentifierValue); + } + if (PositiveIntValue != 0UL) { + size += 1 + pb::CodedOutputStream.ComputeUInt64Size(PositiveIntValue); + } + if (NegativeIntValue != 0L) { + size += 1 + pb::CodedOutputStream.ComputeInt64Size(NegativeIntValue); + } + if (DoubleValue != 0D) { + size += 1 + 8; + } + if (StringValue.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeBytesSize(StringValue); + } + if (AggregateValue.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(AggregateValue); + } + return size; + } + + public void MergeFrom(UninterpretedOption other) { + if (other == null) { + return; + } + name_.Add(other.name_); + if (other.IdentifierValue.Length != 0) { + IdentifierValue = other.IdentifierValue; + } + if (other.PositiveIntValue != 0UL) { + PositiveIntValue = other.PositiveIntValue; + } + if (other.NegativeIntValue != 0L) { + NegativeIntValue = other.NegativeIntValue; + } + if (other.DoubleValue != 0D) { + DoubleValue = other.DoubleValue; + } + if (other.StringValue.Length != 0) { + StringValue = other.StringValue; + } + if (other.AggregateValue.Length != 0) { + AggregateValue = other.AggregateValue; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 18: { + name_.AddEntriesFrom(input, _repeated_name_codec); + break; + } + case 26: { + IdentifierValue = input.ReadString(); + break; + } + case 32: { + PositiveIntValue = input.ReadUInt64(); + break; + } + case 40: { + NegativeIntValue = input.ReadInt64(); + break; + } + case 49: { + DoubleValue = input.ReadDouble(); + break; + } + case 58: { + StringValue = input.ReadBytes(); + break; + } + case 66: { + AggregateValue = input.ReadString(); + break; + } + } + } + } + + #region Nested types + /// Container for nested types declared in the UninterpretedOption message type. + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public static partial class Types { + /// + /// The name of the uninterpreted option. Each string represents a segment in + /// a dot-separated name. is_extension is true iff a segment represents an + /// extension (denoted with parentheses in options specs in .proto files). + /// E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents + /// "foo.(bar.baz).qux". + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + internal sealed partial class NamePart : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new NamePart()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.Reflection.UninterpretedOption.Descriptor.NestedTypes[0]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public NamePart() { + OnConstruction(); + } + + partial void OnConstruction(); + + public NamePart(NamePart other) : this() { + namePart_ = other.namePart_; + isExtension_ = other.isExtension_; + } + + public NamePart Clone() { + return new NamePart(this); + } + + /// Field number for the "name_part" field. + public const int NamePart_FieldNumber = 1; + private string namePart_ = ""; + public string NamePart_ { + get { return namePart_; } + set { + namePart_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "is_extension" field. + public const int IsExtensionFieldNumber = 2; + private bool isExtension_; + public bool IsExtension { + get { return isExtension_; } + set { + isExtension_ = value; + } + } + + public override bool Equals(object other) { + return Equals(other as NamePart); + } + + public bool Equals(NamePart other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (NamePart_ != other.NamePart_) return false; + if (IsExtension != other.IsExtension) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (NamePart_.Length != 0) hash ^= NamePart_.GetHashCode(); + if (IsExtension != false) hash ^= IsExtension.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (NamePart_.Length != 0) { + output.WriteRawTag(10); + output.WriteString(NamePart_); + } + if (IsExtension != false) { + output.WriteRawTag(16); + output.WriteBool(IsExtension); + } + } + + public int CalculateSize() { + int size = 0; + if (NamePart_.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(NamePart_); + } + if (IsExtension != false) { + size += 1 + 1; + } + return size; + } + + public void MergeFrom(NamePart other) { + if (other == null) { + return; + } + if (other.NamePart_.Length != 0) { + NamePart_ = other.NamePart_; + } + if (other.IsExtension != false) { + IsExtension = other.IsExtension; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + NamePart_ = input.ReadString(); + break; + } + case 16: { + IsExtension = input.ReadBool(); + break; + } + } + } + } + + } + + } + #endregion + + } + + /// + /// Encapsulates information about the original source file from which a + /// FileDescriptorProto was generated. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + internal sealed partial class SourceCodeInfo : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new SourceCodeInfo()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.Reflection.DescriptorReflection.Descriptor.MessageTypes[17]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public SourceCodeInfo() { + OnConstruction(); + } + + partial void OnConstruction(); + + public SourceCodeInfo(SourceCodeInfo other) : this() { + location_ = other.location_.Clone(); + } + + public SourceCodeInfo Clone() { + return new SourceCodeInfo(this); + } + + /// Field number for the "location" field. + public const int LocationFieldNumber = 1; + private static readonly pb::FieldCodec _repeated_location_codec + = pb::FieldCodec.ForMessage(10, global::Google.Protobuf.Reflection.SourceCodeInfo.Types.Location.Parser); + private readonly pbc::RepeatedField location_ = new pbc::RepeatedField(); + /// + /// A Location identifies a piece of source code in a .proto file which + /// corresponds to a particular definition. This information is intended + /// to be useful to IDEs, code indexers, documentation generators, and similar + /// tools. + /// + /// For example, say we have a file like: + /// message Foo { + /// optional string foo = 1; + /// } + /// Let's look at just the field definition: + /// optional string foo = 1; + /// ^ ^^ ^^ ^ ^^^ + /// a bc de f ghi + /// We have the following locations: + /// span path represents + /// [a,i) [ 4, 0, 2, 0 ] The whole field definition. + /// [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + /// [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + /// [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + /// [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + /// + /// Notes: + /// - A location may refer to a repeated field itself (i.e. not to any + /// particular index within it). This is used whenever a set of elements are + /// logically enclosed in a single code segment. For example, an entire + /// extend block (possibly containing multiple extension definitions) will + /// have an outer location whose path refers to the "extensions" repeated + /// field without an index. + /// - Multiple locations may have the same path. This happens when a single + /// logical declaration is spread out across multiple places. The most + /// obvious example is the "extend" block again -- there may be multiple + /// extend blocks in the same scope, each of which will have the same path. + /// - A location's span is not always a subset of its parent's span. For + /// example, the "extendee" of an extension declaration appears at the + /// beginning of the "extend" block and is shared by all extensions within + /// the block. + /// - Just because a location's span is a subset of some other location's span + /// does not mean that it is a descendent. For example, a "group" defines + /// both a type and a field in a single declaration. Thus, the locations + /// corresponding to the type and field and their components will overlap. + /// - Code which tries to interpret locations should probably be designed to + /// ignore those that it doesn't understand, as more types of locations could + /// be recorded in the future. + /// + public pbc::RepeatedField Location { + get { return location_; } + } + + public override bool Equals(object other) { + return Equals(other as SourceCodeInfo); + } + + public bool Equals(SourceCodeInfo other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if(!location_.Equals(other.location_)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + hash ^= location_.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + location_.WriteTo(output, _repeated_location_codec); + } + + public int CalculateSize() { + int size = 0; + size += location_.CalculateSize(_repeated_location_codec); + return size; + } + + public void MergeFrom(SourceCodeInfo other) { + if (other == null) { + return; + } + location_.Add(other.location_); + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + location_.AddEntriesFrom(input, _repeated_location_codec); + break; + } + } + } + } + + #region Nested types + /// Container for nested types declared in the SourceCodeInfo message type. + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public static partial class Types { + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + internal sealed partial class Location : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new Location()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.Reflection.SourceCodeInfo.Descriptor.NestedTypes[0]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public Location() { + OnConstruction(); + } + + partial void OnConstruction(); + + public Location(Location other) : this() { + path_ = other.path_.Clone(); + span_ = other.span_.Clone(); + leadingComments_ = other.leadingComments_; + trailingComments_ = other.trailingComments_; + leadingDetachedComments_ = other.leadingDetachedComments_.Clone(); + } + + public Location Clone() { + return new Location(this); + } + + /// Field number for the "path" field. + public const int PathFieldNumber = 1; + private static readonly pb::FieldCodec _repeated_path_codec + = pb::FieldCodec.ForInt32(10); + private readonly pbc::RepeatedField path_ = new pbc::RepeatedField(); + /// + /// Identifies which part of the FileDescriptorProto was defined at this + /// location. + /// + /// Each element is a field number or an index. They form a path from + /// the root FileDescriptorProto to the place where the definition. For + /// example, this path: + /// [ 4, 3, 2, 7, 1 ] + /// refers to: + /// file.message_type(3) // 4, 3 + /// .field(7) // 2, 7 + /// .name() // 1 + /// This is because FileDescriptorProto.message_type has field number 4: + /// repeated DescriptorProto message_type = 4; + /// and DescriptorProto.field has field number 2: + /// repeated FieldDescriptorProto field = 2; + /// and FieldDescriptorProto.name has field number 1: + /// optional string name = 1; + /// + /// Thus, the above path gives the location of a field name. If we removed + /// the last element: + /// [ 4, 3, 2, 7 ] + /// this path refers to the whole field declaration (from the beginning + /// of the label to the terminating semicolon). + /// + public pbc::RepeatedField Path { + get { return path_; } + } + + /// Field number for the "span" field. + public const int SpanFieldNumber = 2; + private static readonly pb::FieldCodec _repeated_span_codec + = pb::FieldCodec.ForInt32(18); + private readonly pbc::RepeatedField span_ = new pbc::RepeatedField(); + /// + /// Always has exactly three or four elements: start line, start column, + /// end line (optional, otherwise assumed same as start line), end column. + /// These are packed into a single field for efficiency. Note that line + /// and column numbers are zero-based -- typically you will want to add + /// 1 to each before displaying to a user. + /// + public pbc::RepeatedField Span { + get { return span_; } + } + + /// Field number for the "leading_comments" field. + public const int LeadingCommentsFieldNumber = 3; + private string leadingComments_ = ""; + /// + /// If this SourceCodeInfo represents a complete declaration, these are any + /// comments appearing before and after the declaration which appear to be + /// attached to the declaration. + /// + /// A series of line comments appearing on consecutive lines, with no other + /// tokens appearing on those lines, will be treated as a single comment. + /// + /// leading_detached_comments will keep paragraphs of comments that appear + /// before (but not connected to) the current element. Each paragraph, + /// separated by empty lines, will be one comment element in the repeated + /// field. + /// + /// Only the comment content is provided; comment markers (e.g. //) are + /// stripped out. For block comments, leading whitespace and an asterisk + /// will be stripped from the beginning of each line other than the first. + /// Newlines are included in the output. + /// + /// Examples: + /// + /// optional int32 foo = 1; // Comment attached to foo. + /// // Comment attached to bar. + /// optional int32 bar = 2; + /// + /// optional string baz = 3; + /// // Comment attached to baz. + /// // Another line attached to baz. + /// + /// // Comment attached to qux. + /// // + /// // Another line attached to qux. + /// optional double qux = 4; + /// + /// // Detached comment for corge. This is not leading or trailing comments + /// // to qux or corge because there are blank lines separating it from + /// // both. + /// + /// // Detached comment for corge paragraph 2. + /// + /// optional string corge = 5; + /// /* Block comment attached + /// * to corge. Leading asterisks + /// * will be removed. */ + /// /* Block comment attached to + /// * grault. */ + /// optional int32 grault = 6; + /// + /// // ignored detached comments. + /// + public string LeadingComments { + get { return leadingComments_; } + set { + leadingComments_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "trailing_comments" field. + public const int TrailingCommentsFieldNumber = 4; + private string trailingComments_ = ""; + public string TrailingComments { + get { return trailingComments_; } + set { + trailingComments_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "leading_detached_comments" field. + public const int LeadingDetachedCommentsFieldNumber = 6; + private static readonly pb::FieldCodec _repeated_leadingDetachedComments_codec + = pb::FieldCodec.ForString(50); + private readonly pbc::RepeatedField leadingDetachedComments_ = new pbc::RepeatedField(); + public pbc::RepeatedField LeadingDetachedComments { + get { return leadingDetachedComments_; } + } + + public override bool Equals(object other) { + return Equals(other as Location); + } + + public bool Equals(Location other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if(!path_.Equals(other.path_)) return false; + if(!span_.Equals(other.span_)) return false; + if (LeadingComments != other.LeadingComments) return false; + if (TrailingComments != other.TrailingComments) return false; + if(!leadingDetachedComments_.Equals(other.leadingDetachedComments_)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + hash ^= path_.GetHashCode(); + hash ^= span_.GetHashCode(); + if (LeadingComments.Length != 0) hash ^= LeadingComments.GetHashCode(); + if (TrailingComments.Length != 0) hash ^= TrailingComments.GetHashCode(); + hash ^= leadingDetachedComments_.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + path_.WriteTo(output, _repeated_path_codec); + span_.WriteTo(output, _repeated_span_codec); + if (LeadingComments.Length != 0) { + output.WriteRawTag(26); + output.WriteString(LeadingComments); + } + if (TrailingComments.Length != 0) { + output.WriteRawTag(34); + output.WriteString(TrailingComments); + } + leadingDetachedComments_.WriteTo(output, _repeated_leadingDetachedComments_codec); + } + + public int CalculateSize() { + int size = 0; + size += path_.CalculateSize(_repeated_path_codec); + size += span_.CalculateSize(_repeated_span_codec); + if (LeadingComments.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(LeadingComments); + } + if (TrailingComments.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(TrailingComments); + } + size += leadingDetachedComments_.CalculateSize(_repeated_leadingDetachedComments_codec); + return size; + } + + public void MergeFrom(Location other) { + if (other == null) { + return; + } + path_.Add(other.path_); + span_.Add(other.span_); + if (other.LeadingComments.Length != 0) { + LeadingComments = other.LeadingComments; + } + if (other.TrailingComments.Length != 0) { + TrailingComments = other.TrailingComments; + } + leadingDetachedComments_.Add(other.leadingDetachedComments_); + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: + case 8: { + path_.AddEntriesFrom(input, _repeated_path_codec); + break; + } + case 18: + case 16: { + span_.AddEntriesFrom(input, _repeated_span_codec); + break; + } + case 26: { + LeadingComments = input.ReadString(); + break; + } + case 34: { + TrailingComments = input.ReadString(); + break; + } + case 50: { + leadingDetachedComments_.AddEntriesFrom(input, _repeated_leadingDetachedComments_codec); + break; + } + } + } + } + + } + + } + #endregion + + } + + /// + /// Describes the relationship between generated code and its original source + /// file. A GeneratedCodeInfo message is associated with only one generated + /// source file, but may contain references to different source .proto files. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + internal sealed partial class GeneratedCodeInfo : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new GeneratedCodeInfo()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.Reflection.DescriptorReflection.Descriptor.MessageTypes[18]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public GeneratedCodeInfo() { + OnConstruction(); + } + + partial void OnConstruction(); + + public GeneratedCodeInfo(GeneratedCodeInfo other) : this() { + annotation_ = other.annotation_.Clone(); + } + + public GeneratedCodeInfo Clone() { + return new GeneratedCodeInfo(this); + } + + /// Field number for the "annotation" field. + public const int AnnotationFieldNumber = 1; + private static readonly pb::FieldCodec _repeated_annotation_codec + = pb::FieldCodec.ForMessage(10, global::Google.Protobuf.Reflection.GeneratedCodeInfo.Types.Annotation.Parser); + private readonly pbc::RepeatedField annotation_ = new pbc::RepeatedField(); + /// + /// An Annotation connects some span of text in generated code to an element + /// of its generating .proto file. + /// + public pbc::RepeatedField Annotation { + get { return annotation_; } + } + + public override bool Equals(object other) { + return Equals(other as GeneratedCodeInfo); + } + + public bool Equals(GeneratedCodeInfo other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if(!annotation_.Equals(other.annotation_)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + hash ^= annotation_.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + annotation_.WriteTo(output, _repeated_annotation_codec); + } + + public int CalculateSize() { + int size = 0; + size += annotation_.CalculateSize(_repeated_annotation_codec); + return size; + } + + public void MergeFrom(GeneratedCodeInfo other) { + if (other == null) { + return; + } + annotation_.Add(other.annotation_); + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + annotation_.AddEntriesFrom(input, _repeated_annotation_codec); + break; + } + } + } + } + + #region Nested types + /// Container for nested types declared in the GeneratedCodeInfo message type. + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public static partial class Types { + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + internal sealed partial class Annotation : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new Annotation()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.Reflection.GeneratedCodeInfo.Descriptor.NestedTypes[0]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public Annotation() { + OnConstruction(); + } + + partial void OnConstruction(); + + public Annotation(Annotation other) : this() { + path_ = other.path_.Clone(); + sourceFile_ = other.sourceFile_; + begin_ = other.begin_; + end_ = other.end_; + } + + public Annotation Clone() { + return new Annotation(this); + } + + /// Field number for the "path" field. + public const int PathFieldNumber = 1; + private static readonly pb::FieldCodec _repeated_path_codec + = pb::FieldCodec.ForInt32(10); + private readonly pbc::RepeatedField path_ = new pbc::RepeatedField(); + /// + /// Identifies the element in the original source .proto file. This field + /// is formatted the same as SourceCodeInfo.Location.path. + /// + public pbc::RepeatedField Path { + get { return path_; } + } + + /// Field number for the "source_file" field. + public const int SourceFileFieldNumber = 2; + private string sourceFile_ = ""; + /// + /// Identifies the filesystem path to the original source .proto. + /// + public string SourceFile { + get { return sourceFile_; } + set { + sourceFile_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "begin" field. + public const int BeginFieldNumber = 3; + private int begin_; + /// + /// Identifies the starting offset in bytes in the generated code + /// that relates to the identified object. + /// + public int Begin { + get { return begin_; } + set { + begin_ = value; + } + } + + /// Field number for the "end" field. + public const int EndFieldNumber = 4; + private int end_; + /// + /// Identifies the ending offset in bytes in the generated code that + /// relates to the identified offset. The end offset should be one past + /// the last relevant byte (so the length of the text = end - begin). + /// + public int End { + get { return end_; } + set { + end_ = value; + } + } + + public override bool Equals(object other) { + return Equals(other as Annotation); + } + + public bool Equals(Annotation other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if(!path_.Equals(other.path_)) return false; + if (SourceFile != other.SourceFile) return false; + if (Begin != other.Begin) return false; + if (End != other.End) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + hash ^= path_.GetHashCode(); + if (SourceFile.Length != 0) hash ^= SourceFile.GetHashCode(); + if (Begin != 0) hash ^= Begin.GetHashCode(); + if (End != 0) hash ^= End.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + path_.WriteTo(output, _repeated_path_codec); + if (SourceFile.Length != 0) { + output.WriteRawTag(18); + output.WriteString(SourceFile); + } + if (Begin != 0) { + output.WriteRawTag(24); + output.WriteInt32(Begin); + } + if (End != 0) { + output.WriteRawTag(32); + output.WriteInt32(End); + } + } + + public int CalculateSize() { + int size = 0; + size += path_.CalculateSize(_repeated_path_codec); + if (SourceFile.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(SourceFile); + } + if (Begin != 0) { + size += 1 + pb::CodedOutputStream.ComputeInt32Size(Begin); + } + if (End != 0) { + size += 1 + pb::CodedOutputStream.ComputeInt32Size(End); + } + return size; + } + + public void MergeFrom(Annotation other) { + if (other == null) { + return; + } + path_.Add(other.path_); + if (other.SourceFile.Length != 0) { + SourceFile = other.SourceFile; + } + if (other.Begin != 0) { + Begin = other.Begin; + } + if (other.End != 0) { + End = other.End; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: + case 8: { + path_.AddEntriesFrom(input, _repeated_path_codec); + break; + } + case 18: { + SourceFile = input.ReadString(); + break; + } + case 24: { + Begin = input.ReadInt32(); + break; + } + case 32: { + End = input.ReadInt32(); + break; + } + } + } + } + + } + + } + #endregion + + } + + #endregion + +} + +#endregion Designer generated code diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/DescriptorBase.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/DescriptorBase.cs new file mode 100644 index 0000000000..194041a889 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/DescriptorBase.cs @@ -0,0 +1,85 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +namespace Google.Protobuf.Reflection +{ + /// + /// Base class for nearly all descriptors, providing common functionality. + /// + public abstract class DescriptorBase : IDescriptor + { + private readonly FileDescriptor file; + private readonly string fullName; + private readonly int index; + + internal DescriptorBase(FileDescriptor file, string fullName, int index) + { + this.file = file; + this.fullName = fullName; + this.index = index; + } + + /// + /// The index of this descriptor within its parent descriptor. + /// + /// + /// This returns the index of this descriptor within its parent, for + /// this descriptor's type. (There can be duplicate values for different + /// types, e.g. one enum type with index 0 and one message type with index 0.) + /// + public int Index + { + get { return index; } + } + + /// + /// Returns the name of the entity (field, message etc) being described. + /// + public abstract string Name { get; } + + /// + /// The fully qualified name of the descriptor's target. + /// + public string FullName + { + get { return fullName; } + } + + /// + /// The file this descriptor was declared in. + /// + public FileDescriptor File + { + get { return file; } + } + } +} \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/DescriptorPool.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/DescriptorPool.cs new file mode 100644 index 0000000000..99ca4bf34f --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/DescriptorPool.cs @@ -0,0 +1,368 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; +using System.Collections.Generic; +using System.Text; +using System.Text.RegularExpressions; + +namespace Google.Protobuf.Reflection +{ + /// + /// Contains lookup tables containing all the descriptors defined in a particular file. + /// + internal sealed class DescriptorPool + { + private readonly IDictionary descriptorsByName = + new Dictionary(); + + private readonly IDictionary fieldsByNumber = + new Dictionary(); + + private readonly IDictionary enumValuesByNumber = + new Dictionary(); + + private readonly HashSet dependencies; + + internal DescriptorPool(FileDescriptor[] dependencyFiles) + { + dependencies = new HashSet(); + for (int i = 0; i < dependencyFiles.Length; i++) + { + dependencies.Add(dependencyFiles[i]); + ImportPublicDependencies(dependencyFiles[i]); + } + + foreach (FileDescriptor dependency in dependencyFiles) + { + AddPackage(dependency.Package, dependency); + } + } + + private void ImportPublicDependencies(FileDescriptor file) + { + foreach (FileDescriptor dependency in file.PublicDependencies) + { + if (dependencies.Add(dependency)) + { + ImportPublicDependencies(dependency); + } + } + } + + /// + /// Finds a symbol of the given name within the pool. + /// + /// The type of symbol to look for + /// Fully-qualified name to look up + /// The symbol with the given name and type, + /// or null if the symbol doesn't exist or has the wrong type + internal T FindSymbol(string fullName) where T : class + { + IDescriptor result; + descriptorsByName.TryGetValue(fullName, out result); + T descriptor = result as T; + if (descriptor != null) + { + return descriptor; + } + + // dependencies contains direct dependencies and any *public* dependencies + // of those dependencies (transitively)... so we don't need to recurse here. + foreach (FileDescriptor dependency in dependencies) + { + dependency.DescriptorPool.descriptorsByName.TryGetValue(fullName, out result); + descriptor = result as T; + if (descriptor != null) + { + return descriptor; + } + } + + return null; + } + + /// + /// Adds a package to the symbol tables. If a package by the same name + /// already exists, that is fine, but if some other kind of symbol + /// exists under the same name, an exception is thrown. If the package + /// has multiple components, this also adds the parent package(s). + /// + internal void AddPackage(string fullName, FileDescriptor file) + { + int dotpos = fullName.LastIndexOf('.'); + String name; + if (dotpos != -1) + { + AddPackage(fullName.Substring(0, dotpos), file); + name = fullName.Substring(dotpos + 1); + } + else + { + name = fullName; + } + + IDescriptor old; + if (descriptorsByName.TryGetValue(fullName, out old)) + { + if (!(old is PackageDescriptor)) + { + throw new DescriptorValidationException(file, + "\"" + name + + "\" is already defined (as something other than a " + + "package) in file \"" + old.File.Name + "\"."); + } + } + descriptorsByName[fullName] = new PackageDescriptor(name, fullName, file); + } + + /// + /// Adds a symbol to the symbol table. + /// + /// The symbol already existed + /// in the symbol table. + internal void AddSymbol(IDescriptor descriptor) + { + ValidateSymbolName(descriptor); + String fullName = descriptor.FullName; + + IDescriptor old; + if (descriptorsByName.TryGetValue(fullName, out old)) + { + int dotPos = fullName.LastIndexOf('.'); + string message; + if (descriptor.File == old.File) + { + if (dotPos == -1) + { + message = "\"" + fullName + "\" is already defined."; + } + else + { + message = "\"" + fullName.Substring(dotPos + 1) + "\" is already defined in \"" + + fullName.Substring(0, dotPos) + "\"."; + } + } + else + { + message = "\"" + fullName + "\" is already defined in file \"" + old.File.Name + "\"."; + } + throw new DescriptorValidationException(descriptor, message); + } + descriptorsByName[fullName] = descriptor; + } + + private static readonly Regex ValidationRegex = new Regex("^[_A-Za-z][_A-Za-z0-9]*$", + FrameworkPortability.CompiledRegexWhereAvailable); + + /// + /// Verifies that the descriptor's name is valid (i.e. it contains + /// only letters, digits and underscores, and does not start with a digit). + /// + /// + private static void ValidateSymbolName(IDescriptor descriptor) + { + if (descriptor.Name == "") + { + throw new DescriptorValidationException(descriptor, "Missing name."); + } + if (!ValidationRegex.IsMatch(descriptor.Name)) + { + throw new DescriptorValidationException(descriptor, + "\"" + descriptor.Name + "\" is not a valid identifier."); + } + } + + /// + /// Returns the field with the given number in the given descriptor, + /// or null if it can't be found. + /// + internal FieldDescriptor FindFieldByNumber(MessageDescriptor messageDescriptor, int number) + { + FieldDescriptor ret; + fieldsByNumber.TryGetValue(new DescriptorIntPair(messageDescriptor, number), out ret); + return ret; + } + + internal EnumValueDescriptor FindEnumValueByNumber(EnumDescriptor enumDescriptor, int number) + { + EnumValueDescriptor ret; + enumValuesByNumber.TryGetValue(new DescriptorIntPair(enumDescriptor, number), out ret); + return ret; + } + + /// + /// Adds a field to the fieldsByNumber table. + /// + /// A field with the same + /// containing type and number already exists. + internal void AddFieldByNumber(FieldDescriptor field) + { + DescriptorIntPair key = new DescriptorIntPair(field.ContainingType, field.FieldNumber); + FieldDescriptor old; + if (fieldsByNumber.TryGetValue(key, out old)) + { + throw new DescriptorValidationException(field, "Field number " + field.FieldNumber + + "has already been used in \"" + + field.ContainingType.FullName + + "\" by field \"" + old.Name + "\"."); + } + fieldsByNumber[key] = field; + } + + /// + /// Adds an enum value to the enumValuesByNumber table. If an enum value + /// with the same type and number already exists, this method does nothing. + /// (This is allowed; the first value defined with the number takes precedence.) + /// + internal void AddEnumValueByNumber(EnumValueDescriptor enumValue) + { + DescriptorIntPair key = new DescriptorIntPair(enumValue.EnumDescriptor, enumValue.Number); + if (!enumValuesByNumber.ContainsKey(key)) + { + enumValuesByNumber[key] = enumValue; + } + } + + /// + /// Looks up a descriptor by name, relative to some other descriptor. + /// The name may be fully-qualified (with a leading '.'), partially-qualified, + /// or unqualified. C++-like name lookup semantics are used to search for the + /// matching descriptor. + /// + /// + /// This isn't heavily optimized, but it's only used during cross linking anyway. + /// If it starts being used more widely, we should look at performance more carefully. + /// + internal IDescriptor LookupSymbol(string name, IDescriptor relativeTo) + { + IDescriptor result; + if (name.StartsWith(".")) + { + // Fully-qualified name. + result = FindSymbol(name.Substring(1)); + } + else + { + // If "name" is a compound identifier, we want to search for the + // first component of it, then search within it for the rest. + int firstPartLength = name.IndexOf('.'); + string firstPart = firstPartLength == -1 ? name : name.Substring(0, firstPartLength); + + // We will search each parent scope of "relativeTo" looking for the + // symbol. + StringBuilder scopeToTry = new StringBuilder(relativeTo.FullName); + + while (true) + { + // Chop off the last component of the scope. + + int dotpos = scopeToTry.ToString().LastIndexOf("."); + if (dotpos == -1) + { + result = FindSymbol(name); + break; + } + else + { + scopeToTry.Length = dotpos + 1; + + // Append firstPart and try to find. + scopeToTry.Append(firstPart); + result = FindSymbol(scopeToTry.ToString()); + + if (result != null) + { + if (firstPartLength != -1) + { + // We only found the first part of the symbol. Now look for + // the whole thing. If this fails, we *don't* want to keep + // searching parent scopes. + scopeToTry.Length = dotpos + 1; + scopeToTry.Append(name); + result = FindSymbol(scopeToTry.ToString()); + } + break; + } + + // Not found. Remove the name so we can try again. + scopeToTry.Length = dotpos; + } + } + } + + if (result == null) + { + throw new DescriptorValidationException(relativeTo, "\"" + name + "\" is not defined."); + } + else + { + return result; + } + } + + /// + /// Struct used to hold the keys for the fieldByNumber table. + /// + private struct DescriptorIntPair : IEquatable + { + private readonly int number; + private readonly IDescriptor descriptor; + + internal DescriptorIntPair(IDescriptor descriptor, int number) + { + this.number = number; + this.descriptor = descriptor; + } + + public bool Equals(DescriptorIntPair other) + { + return descriptor == other.descriptor + && number == other.number; + } + + public override bool Equals(object obj) + { + if (obj is DescriptorIntPair) + { + return Equals((DescriptorIntPair) obj); + } + return false; + } + + public override int GetHashCode() + { + return descriptor.GetHashCode()*((1 << 16) - 1) + number; + } + } + } +} \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/DescriptorUtil.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/DescriptorUtil.cs new file mode 100644 index 0000000000..f5570fc40a --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/DescriptorUtil.cs @@ -0,0 +1,64 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System.Collections.Generic; +using System.Collections.ObjectModel; + +namespace Google.Protobuf.Reflection +{ + /// + /// Internal class containing utility methods when working with descriptors. + /// + internal static class DescriptorUtil + { + /// + /// Equivalent to Func[TInput, int, TOutput] but usable in .NET 2.0. Only used to convert + /// arrays. + /// + internal delegate TOutput IndexedConverter(TInput element, int index); + + /// + /// Converts the given array into a read-only list, applying the specified conversion to + /// each input element. + /// + internal static IList ConvertAndMakeReadOnly + (IList input, IndexedConverter converter) + { + TOutput[] array = new TOutput[input.Count]; + for (int i = 0; i < array.Length; i++) + { + array[i] = converter(input[i], i); + } + return new ReadOnlyCollection(array); + } + } +} \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/DescriptorValidationException.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/DescriptorValidationException.cs new file mode 100644 index 0000000000..143671dbd4 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/DescriptorValidationException.cs @@ -0,0 +1,80 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; + +namespace Google.Protobuf.Reflection +{ + /// + /// Thrown when building descriptors fails because the source DescriptorProtos + /// are not valid. + /// + public sealed class DescriptorValidationException : Exception + { + private readonly String name; + private readonly string description; + + /// + /// The full name of the descriptor where the error occurred. + /// + public String ProblemSymbolName + { + get { return name; } + } + + /// + /// A human-readable description of the error. (The Message property + /// is made up of the descriptor's name and this description.) + /// + public string Description + { + get { return description; } + } + + internal DescriptorValidationException(IDescriptor problemDescriptor, string description) : + base(problemDescriptor.FullName + ": " + description) + { + // Note that problemDescriptor may be partially uninitialized, so we + // don't want to expose it directly to the user. So, we only provide + // the name and the original proto. + name = problemDescriptor.FullName; + this.description = description; + } + + internal DescriptorValidationException(IDescriptor problemDescriptor, string description, Exception cause) : + base(problemDescriptor.FullName + ": " + description, cause) + { + name = problemDescriptor.FullName; + this.description = description; + } + } +} \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/EnumDescriptor.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/EnumDescriptor.cs new file mode 100644 index 0000000000..c732c93a06 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/EnumDescriptor.cs @@ -0,0 +1,116 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; +using System.Collections.Generic; + +namespace Google.Protobuf.Reflection +{ + /// + /// Descriptor for an enum type in a .proto file. + /// + public sealed class EnumDescriptor : DescriptorBase + { + private readonly EnumDescriptorProto proto; + private readonly MessageDescriptor containingType; + private readonly IList values; + private readonly Type clrType; + + internal EnumDescriptor(EnumDescriptorProto proto, FileDescriptor file, MessageDescriptor parent, int index, Type clrType) + : base(file, file.ComputeFullName(parent, proto.Name), index) + { + this.proto = proto; + this.clrType = clrType; + containingType = parent; + + if (proto.Value.Count == 0) + { + // We cannot allow enums with no values because this would mean there + // would be no valid default value for fields of this type. + throw new DescriptorValidationException(this, "Enums must contain at least one value."); + } + + values = DescriptorUtil.ConvertAndMakeReadOnly(proto.Value, + (value, i) => new EnumValueDescriptor(value, file, this, i)); + + File.DescriptorPool.AddSymbol(this); + } + + internal EnumDescriptorProto Proto { get { return proto; } } + + /// + /// The brief name of the descriptor's target. + /// + public override string Name { get { return proto.Name; } } + + /// + /// The CLR type for this enum. For generated code, this will be a CLR enum type. + /// + public Type ClrType { get { return clrType; } } + + /// + /// If this is a nested type, get the outer descriptor, otherwise null. + /// + public MessageDescriptor ContainingType + { + get { return containingType; } + } + + /// + /// An unmodifiable list of defined value descriptors for this enum. + /// + public IList Values + { + get { return values; } + } + + /// + /// Finds an enum value by number. If multiple enum values have the + /// same number, this returns the first defined value with that number. + /// If there is no value for the given number, this returns null. + /// + public EnumValueDescriptor FindValueByNumber(int number) + { + return File.DescriptorPool.FindEnumValueByNumber(this, number); + } + + /// + /// Finds an enum value by name. + /// + /// The unqualified name of the value (e.g. "FOO"). + /// The value's descriptor, or null if not found. + public EnumValueDescriptor FindValueByName(string name) + { + return File.DescriptorPool.FindSymbol(FullName + "." + name); + } + } +} \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/EnumValueDescriptor.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/EnumValueDescriptor.cs new file mode 100644 index 0000000000..b212ce9618 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/EnumValueDescriptor.cs @@ -0,0 +1,70 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +namespace Google.Protobuf.Reflection +{ + /// + /// Descriptor for a single enum value within an enum in a .proto file. + /// + public sealed class EnumValueDescriptor : DescriptorBase + { + private readonly EnumDescriptor enumDescriptor; + private readonly EnumValueDescriptorProto proto; + + internal EnumValueDescriptor(EnumValueDescriptorProto proto, FileDescriptor file, + EnumDescriptor parent, int index) + : base(file, parent.FullName + "." + proto.Name, index) + { + this.proto = proto; + enumDescriptor = parent; + file.DescriptorPool.AddSymbol(this); + file.DescriptorPool.AddEnumValueByNumber(this); + } + + internal EnumValueDescriptorProto Proto { get { return proto; } } + + /// + /// Returns the name of the enum value described by this object. + /// + public override string Name { get { return proto.Name; } } + + /// + /// Returns the number associated with this enum value. + /// + public int Number { get { return Proto.Number; } } + + /// + /// Returns the enum descriptor that this value is part of. + /// + public EnumDescriptor EnumDescriptor { get { return enumDescriptor; } } + } +} \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/FieldAccessorBase.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/FieldAccessorBase.cs new file mode 100644 index 0000000000..82ce50518d --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/FieldAccessorBase.cs @@ -0,0 +1,63 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; +using System.Reflection; +using Google.Protobuf.Compatibility; + +namespace Google.Protobuf.Reflection +{ + /// + /// Base class for field accessors. + /// + internal abstract class FieldAccessorBase : IFieldAccessor + { + private readonly Func getValueDelegate; + private readonly FieldDescriptor descriptor; + + internal FieldAccessorBase(PropertyInfo property, FieldDescriptor descriptor) + { + this.descriptor = descriptor; + getValueDelegate = ReflectionUtil.CreateFuncIMessageObject(property.GetGetMethod()); + } + + public FieldDescriptor Descriptor { get { return descriptor; } } + + public object GetValue(IMessage message) + { + return getValueDelegate(message); + } + + public abstract void Clear(IMessage message); + public abstract void SetValue(IMessage message, object value); + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/FieldDescriptor.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/FieldDescriptor.cs new file mode 100644 index 0000000000..6c6f6ee05f --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/FieldDescriptor.cs @@ -0,0 +1,343 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using Google.Protobuf.Compatibility; +using System; + +namespace Google.Protobuf.Reflection +{ + /// + /// Descriptor for a field or extension within a message in a .proto file. + /// + public sealed class FieldDescriptor : DescriptorBase, IComparable + { + private EnumDescriptor enumType; + private MessageDescriptor messageType; + private FieldType fieldType; + private readonly string propertyName; // Annoyingly, needed in Crosslink. + private IFieldAccessor accessor; + + /// + /// Get the field's containing message type. + /// + public MessageDescriptor ContainingType { get; } + + /// + /// Returns the oneof containing this field, or null if it is not part of a oneof. + /// + public OneofDescriptor ContainingOneof { get; } + + /// + /// The effective JSON name for this field. This is usually the lower-camel-cased form of the field name, + /// but can be overridden using the json_name option in the .proto file. + /// + public string JsonName { get; } + + internal FieldDescriptorProto Proto { get; } + + internal FieldDescriptor(FieldDescriptorProto proto, FileDescriptor file, + MessageDescriptor parent, int index, string propertyName) + : base(file, file.ComputeFullName(parent, proto.Name), index) + { + Proto = proto; + if (proto.Type != 0) + { + fieldType = GetFieldTypeFromProtoType(proto.Type); + } + + if (FieldNumber <= 0) + { + throw new DescriptorValidationException(this, "Field numbers must be positive integers."); + } + ContainingType = parent; + // OneofIndex "defaults" to -1 due to a hack in FieldDescriptor.OnConstruction. + if (proto.OneofIndex != -1) + { + if (proto.OneofIndex < 0 || proto.OneofIndex >= parent.Proto.OneofDecl.Count) + { + throw new DescriptorValidationException(this, + $"FieldDescriptorProto.oneof_index is out of range for type {parent.Name}"); + } + ContainingOneof = parent.Oneofs[proto.OneofIndex]; + } + + file.DescriptorPool.AddSymbol(this); + // We can't create the accessor until we've cross-linked, unfortunately, as we + // may not know whether the type of the field is a map or not. Remember the property name + // for later. + // We could trust the generated code and check whether the type of the property is + // a MapField, but that feels a tad nasty. + this.propertyName = propertyName; + JsonName = Proto.JsonName == "" ? JsonFormatter.ToCamelCase(Proto.Name) : Proto.JsonName; + } + + + /// + /// The brief name of the descriptor's target. + /// + public override string Name => Proto.Name; + + /// + /// Returns the accessor for this field. + /// + /// + /// + /// While a describes the field, it does not provide + /// any way of obtaining or changing the value of the field within a specific message; + /// that is the responsibility of the accessor. + /// + /// + /// The value returned by this property will be non-null for all regular fields. However, + /// if a message containing a map field is introspected, the list of nested messages will include + /// an auto-generated nested key/value pair message for the field. This is not represented in any + /// generated type, and the value of the map field itself is represented by a dictionary in the + /// reflection API. There are never instances of those "hidden" messages, so no accessor is provided + /// and this property will return null. + /// + /// + public IFieldAccessor Accessor => accessor; + + /// + /// Maps a field type as included in the .proto file to a FieldType. + /// + private static FieldType GetFieldTypeFromProtoType(FieldDescriptorProto.Types.Type type) + { + switch (type) + { + case FieldDescriptorProto.Types.Type.Double: + return FieldType.Double; + case FieldDescriptorProto.Types.Type.Float: + return FieldType.Float; + case FieldDescriptorProto.Types.Type.Int64: + return FieldType.Int64; + case FieldDescriptorProto.Types.Type.Uint64: + return FieldType.UInt64; + case FieldDescriptorProto.Types.Type.Int32: + return FieldType.Int32; + case FieldDescriptorProto.Types.Type.Fixed64: + return FieldType.Fixed64; + case FieldDescriptorProto.Types.Type.Fixed32: + return FieldType.Fixed32; + case FieldDescriptorProto.Types.Type.Bool: + return FieldType.Bool; + case FieldDescriptorProto.Types.Type.String: + return FieldType.String; + case FieldDescriptorProto.Types.Type.Group: + return FieldType.Group; + case FieldDescriptorProto.Types.Type.Message: + return FieldType.Message; + case FieldDescriptorProto.Types.Type.Bytes: + return FieldType.Bytes; + case FieldDescriptorProto.Types.Type.Uint32: + return FieldType.UInt32; + case FieldDescriptorProto.Types.Type.Enum: + return FieldType.Enum; + case FieldDescriptorProto.Types.Type.Sfixed32: + return FieldType.SFixed32; + case FieldDescriptorProto.Types.Type.Sfixed64: + return FieldType.SFixed64; + case FieldDescriptorProto.Types.Type.Sint32: + return FieldType.SInt32; + case FieldDescriptorProto.Types.Type.Sint64: + return FieldType.SInt64; + default: + throw new ArgumentException("Invalid type specified"); + } + } + + /// + /// Returns true if this field is a repeated field; false otherwise. + /// + public bool IsRepeated => Proto.Label == FieldDescriptorProto.Types.Label.Repeated; + + /// + /// Returns true if this field is a map field; false otherwise. + /// + public bool IsMap => fieldType == FieldType.Message && messageType.Proto.Options != null && messageType.Proto.Options.MapEntry; + + /// + /// Returns true if this field is a packed, repeated field; false otherwise. + /// + public bool IsPacked => + // Note the || rather than && here - we're effectively defaulting to packed, because that *is* + // the default in proto3, which is all we support. We may give the wrong result for the protos + // within descriptor.proto, but that's okay, as they're never exposed and we don't use IsPacked + // within the runtime. + Proto.Options == null || Proto.Options.Packed; + + /// + /// Returns the type of the field. + /// + public FieldType FieldType => fieldType; + + /// + /// Returns the field number declared in the proto file. + /// + public int FieldNumber => Proto.Number; + + /// + /// Compares this descriptor with another one, ordering in "canonical" order + /// which simply means ascending order by field number. + /// must be a field of the same type, i.e. the of + /// both fields must be the same. + /// + public int CompareTo(FieldDescriptor other) + { + if (other.ContainingType != ContainingType) + { + throw new ArgumentException("FieldDescriptors can only be compared to other FieldDescriptors " + + "for fields of the same message type."); + } + return FieldNumber - other.FieldNumber; + } + + /// + /// For enum fields, returns the field's type. + /// + public EnumDescriptor EnumType + { + get + { + if (fieldType != FieldType.Enum) + { + throw new InvalidOperationException("EnumType is only valid for enum fields."); + } + return enumType; + } + } + + /// + /// For embedded message and group fields, returns the field's type. + /// + public MessageDescriptor MessageType + { + get + { + if (fieldType != FieldType.Message) + { + throw new InvalidOperationException("MessageType is only valid for message fields."); + } + return messageType; + } + } + + /// + /// Look up and cross-link all field types etc. + /// + internal void CrossLink() + { + if (Proto.TypeName != "") + { + IDescriptor typeDescriptor = + File.DescriptorPool.LookupSymbol(Proto.TypeName, this); + + if (Proto.Type != 0) + { + // Choose field type based on symbol. + if (typeDescriptor is MessageDescriptor) + { + fieldType = FieldType.Message; + } + else if (typeDescriptor is EnumDescriptor) + { + fieldType = FieldType.Enum; + } + else + { + throw new DescriptorValidationException(this, $"\"{Proto.TypeName}\" is not a type."); + } + } + + if (fieldType == FieldType.Message) + { + if (!(typeDescriptor is MessageDescriptor)) + { + throw new DescriptorValidationException(this, $"\"{Proto.TypeName}\" is not a message type."); + } + messageType = (MessageDescriptor) typeDescriptor; + + if (Proto.DefaultValue != "") + { + throw new DescriptorValidationException(this, "Messages can't have default values."); + } + } + else if (fieldType == FieldType.Enum) + { + if (!(typeDescriptor is EnumDescriptor)) + { + throw new DescriptorValidationException(this, $"\"{Proto.TypeName}\" is not an enum type."); + } + enumType = (EnumDescriptor) typeDescriptor; + } + else + { + throw new DescriptorValidationException(this, "Field with primitive type has type_name."); + } + } + else + { + if (fieldType == FieldType.Message || fieldType == FieldType.Enum) + { + throw new DescriptorValidationException(this, "Field with message or enum type missing type_name."); + } + } + + // Note: no attempt to perform any default value parsing + + File.DescriptorPool.AddFieldByNumber(this); + + if (ContainingType != null && ContainingType.Proto.Options != null && ContainingType.Proto.Options.MessageSetWireFormat) + { + throw new DescriptorValidationException(this, "MessageSet format is not supported."); + } + accessor = CreateAccessor(); + } + + private IFieldAccessor CreateAccessor() + { + // If we're given no property name, that's because we really don't want an accessor. + // (At the moment, that means it's a map entry message...) + if (propertyName == null) + { + return null; + } + var property = ContainingType.ClrType.GetProperty(propertyName); + if (property == null) + { + throw new DescriptorValidationException(this, $"Property {propertyName} not found in {ContainingType.ClrType}"); + } + return IsMap ? new MapFieldAccessor(property, this) + : IsRepeated ? new RepeatedFieldAccessor(property, this) + : (IFieldAccessor) new SingleFieldAccessor(property, this); + } + } +} \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/FieldType.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/FieldType.cs new file mode 100644 index 0000000000..1658e34cd1 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/FieldType.cs @@ -0,0 +1,113 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +namespace Google.Protobuf.Reflection +{ + /// + /// Enumeration of all the possible field types. + /// + public enum FieldType + { + /// + /// The double field type. + /// + Double, + /// + /// The float field type. + /// + Float, + /// + /// The int64 field type. + /// + Int64, + /// + /// The uint64 field type. + /// + UInt64, + /// + /// The int32 field type. + /// + Int32, + /// + /// The fixed64 field type. + /// + Fixed64, + /// + /// The fixed32 field type. + /// + Fixed32, + /// + /// The bool field type. + /// + Bool, + /// + /// The string field type. + /// + String, + /// + /// The field type used for groups (not supported in this implementation). + /// + Group, + /// + /// The field type used for message fields. + /// + Message, + /// + /// The bytes field type. + /// + Bytes, + /// + /// The uint32 field type. + /// + UInt32, + /// + /// The sfixed32 field type. + /// + SFixed32, + /// + /// The sfixed64 field type. + /// + SFixed64, + /// + /// The sint32 field type. + /// + SInt32, + /// + /// The sint64 field type. + /// + SInt64, + /// + /// The field type used for enum fields. + /// + Enum + } +} \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/FileDescriptor.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/FileDescriptor.cs new file mode 100644 index 0000000000..ab7cd92216 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/FileDescriptor.cs @@ -0,0 +1,344 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; +using System.Collections.Generic; +using System.Collections.ObjectModel; + +namespace Google.Protobuf.Reflection +{ + /// + /// Describes a .proto file, including everything defined within. + /// IDescriptor is implemented such that the File property returns this descriptor, + /// and the FullName is the same as the Name. + /// + public sealed class FileDescriptor : IDescriptor + { + private FileDescriptor(ByteString descriptorData, FileDescriptorProto proto, FileDescriptor[] dependencies, DescriptorPool pool, bool allowUnknownDependencies, GeneratedClrTypeInfo generatedCodeInfo) + { + SerializedData = descriptorData; + DescriptorPool = pool; + Proto = proto; + Dependencies = new ReadOnlyCollection((FileDescriptor[]) dependencies.Clone()); + + PublicDependencies = DeterminePublicDependencies(this, proto, dependencies, allowUnknownDependencies); + + pool.AddPackage(Package, this); + + MessageTypes = DescriptorUtil.ConvertAndMakeReadOnly(proto.MessageType, + (message, index) => + new MessageDescriptor(message, this, null, index, generatedCodeInfo.NestedTypes[index])); + + EnumTypes = DescriptorUtil.ConvertAndMakeReadOnly(proto.EnumType, + (enumType, index) => + new EnumDescriptor(enumType, this, null, index, generatedCodeInfo.NestedEnums[index])); + + Services = DescriptorUtil.ConvertAndMakeReadOnly(proto.Service, + (service, index) => + new ServiceDescriptor(service, this, index)); + } + + /// + /// Computes the full name of a descriptor within this file, with an optional parent message. + /// + internal string ComputeFullName(MessageDescriptor parent, string name) + { + if (parent != null) + { + return parent.FullName + "." + name; + } + if (Package.Length > 0) + { + return Package + "." + name; + } + return name; + } + + /// + /// Extracts public dependencies from direct dependencies. This is a static method despite its + /// first parameter, as the value we're in the middle of constructing is only used for exceptions. + /// + private static IList DeterminePublicDependencies(FileDescriptor @this, FileDescriptorProto proto, FileDescriptor[] dependencies, bool allowUnknownDependencies) + { + var nameToFileMap = new Dictionary(); + foreach (var file in dependencies) + { + nameToFileMap[file.Name] = file; + } + var publicDependencies = new List(); + for (int i = 0; i < proto.PublicDependency.Count; i++) + { + int index = proto.PublicDependency[i]; + if (index < 0 || index >= proto.Dependency.Count) + { + throw new DescriptorValidationException(@this, "Invalid public dependency index."); + } + string name = proto.Dependency[index]; + FileDescriptor file = nameToFileMap[name]; + if (file == null) + { + if (!allowUnknownDependencies) + { + throw new DescriptorValidationException(@this, "Invalid public dependency: " + name); + } + // Ignore unknown dependencies. + } + else + { + publicDependencies.Add(file); + } + } + return new ReadOnlyCollection(publicDependencies); + } + + /// + /// The descriptor in its protocol message representation. + /// + internal FileDescriptorProto Proto { get; } + + /// + /// The file name. + /// + public string Name => Proto.Name; + + /// + /// The package as declared in the .proto file. This may or may not + /// be equivalent to the .NET namespace of the generated classes. + /// + public string Package => Proto.Package; + + /// + /// Unmodifiable list of top-level message types declared in this file. + /// + public IList MessageTypes { get; } + + /// + /// Unmodifiable list of top-level enum types declared in this file. + /// + public IList EnumTypes { get; } + + /// + /// Unmodifiable list of top-level services declared in this file. + /// + public IList Services { get; } + + /// + /// Unmodifiable list of this file's dependencies (imports). + /// + public IList Dependencies { get; } + + /// + /// Unmodifiable list of this file's public dependencies (public imports). + /// + public IList PublicDependencies { get; } + + /// + /// The original serialized binary form of this descriptor. + /// + public ByteString SerializedData { get; } + + /// + /// Implementation of IDescriptor.FullName - just returns the same as Name. + /// + string IDescriptor.FullName => Name; + + /// + /// Implementation of IDescriptor.File - just returns this descriptor. + /// + FileDescriptor IDescriptor.File => this; + + /// + /// Pool containing symbol descriptors. + /// + internal DescriptorPool DescriptorPool { get; } + + /// + /// Finds a type (message, enum, service or extension) in the file by name. Does not find nested types. + /// + /// The unqualified type name to look for. + /// The type of descriptor to look for + /// The type's descriptor, or null if not found. + public T FindTypeByName(String name) + where T : class, IDescriptor + { + // Don't allow looking up nested types. This will make optimization + // easier later. + if (name.IndexOf('.') != -1) + { + return null; + } + if (Package.Length > 0) + { + name = Package + "." + name; + } + T result = DescriptorPool.FindSymbol(name); + if (result != null && result.File == this) + { + return result; + } + return null; + } + + /// + /// Builds a FileDescriptor from its protocol buffer representation. + /// + /// The original serialized descriptor data. + /// We have only limited proto2 support, so serializing FileDescriptorProto + /// would not necessarily give us this. + /// The protocol message form of the FileDescriptor. + /// FileDescriptors corresponding to all of the + /// file's dependencies, in the exact order listed in the .proto file. May be null, + /// in which case it is treated as an empty array. + /// Whether unknown dependencies are ignored (true) or cause an exception to be thrown (false). + /// Details about generated code, for the purposes of reflection. + /// If is not + /// a valid descriptor. This can occur for a number of reasons, such as a field + /// having an undefined type or because two messages were defined with the same name. + private static FileDescriptor BuildFrom(ByteString descriptorData, FileDescriptorProto proto, FileDescriptor[] dependencies, bool allowUnknownDependencies, GeneratedClrTypeInfo generatedCodeInfo) + { + // Building descriptors involves two steps: translating and linking. + // In the translation step (implemented by FileDescriptor's + // constructor), we build an object tree mirroring the + // FileDescriptorProto's tree and put all of the descriptors into the + // DescriptorPool's lookup tables. In the linking step, we look up all + // type references in the DescriptorPool, so that, for example, a + // FieldDescriptor for an embedded message contains a pointer directly + // to the Descriptor for that message's type. We also detect undefined + // types in the linking step. + if (dependencies == null) + { + dependencies = new FileDescriptor[0]; + } + + DescriptorPool pool = new DescriptorPool(dependencies); + FileDescriptor result = new FileDescriptor(descriptorData, proto, dependencies, pool, allowUnknownDependencies, generatedCodeInfo); + + // Validate that the dependencies we've been passed (as FileDescriptors) are actually the ones we + // need. + if (dependencies.Length != proto.Dependency.Count) + { + throw new DescriptorValidationException( + result, + "Dependencies passed to FileDescriptor.BuildFrom() don't match " + + "those listed in the FileDescriptorProto."); + } + for (int i = 0; i < proto.Dependency.Count; i++) + { + if (dependencies[i].Name != proto.Dependency[i]) + { + throw new DescriptorValidationException( + result, + "Dependencies passed to FileDescriptor.BuildFrom() don't match " + + "those listed in the FileDescriptorProto. Expected: " + + proto.Dependency[i] + " but was: " + dependencies[i].Name); + } + } + + result.CrossLink(); + return result; + } + + private void CrossLink() + { + foreach (MessageDescriptor message in MessageTypes) + { + message.CrossLink(); + } + + foreach (ServiceDescriptor service in Services) + { + service.CrossLink(); + } + } + + /// + /// Creates a descriptor for generated code. + /// + /// + /// This method is only designed to be used by the results of generating code with protoc, + /// which creates the appropriate dependencies etc. It has to be public because the generated + /// code is "external", but should not be called directly by end users. + /// + public static FileDescriptor FromGeneratedCode( + byte[] descriptorData, + FileDescriptor[] dependencies, + GeneratedClrTypeInfo generatedCodeInfo) + { + FileDescriptorProto proto; + try + { + proto = FileDescriptorProto.Parser.ParseFrom(descriptorData); + } + catch (InvalidProtocolBufferException e) + { + throw new ArgumentException("Failed to parse protocol buffer descriptor for generated code.", e); + } + + try + { + // When building descriptors for generated code, we allow unknown + // dependencies by default. + return BuildFrom(ByteString.CopyFrom(descriptorData), proto, dependencies, true, generatedCodeInfo); + } + catch (DescriptorValidationException e) + { + throw new ArgumentException($"Invalid embedded descriptor for \"{proto.Name}\".", e); + } + } + + /// + /// Returns a that represents this instance. + /// + /// + /// A that represents this instance. + /// + public override string ToString() + { + return $"FileDescriptor for {Name}"; + } + + /// + /// Returns the file descriptor for descriptor.proto. + /// + /// + /// This is used for protos which take a direct dependency on descriptor.proto, typically for + /// annotations. While descriptor.proto is a proto2 file, it is built into the Google.Protobuf + /// runtime for reflection purposes. The messages are internal to the runtime as they would require + /// proto2 semantics for full support, but the file descriptor is available via this property. The + /// C# codegen in protoc automatically uses this property when it detects a dependency on descriptor.proto. + /// + /// + /// The file descriptor for descriptor.proto. + /// + public static FileDescriptor DescriptorProtoFileDescriptor { get { return DescriptorReflection.Descriptor; } } + } +} \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/GeneratedClrTypeInfo.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/GeneratedClrTypeInfo.cs new file mode 100644 index 0000000000..fe5db65656 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/GeneratedClrTypeInfo.cs @@ -0,0 +1,103 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion +using System; + +namespace Google.Protobuf.Reflection +{ + /// + /// Extra information provided by generated code when initializing a message or file descriptor. + /// These are constructed as required, and are not long-lived. Hand-written code should + /// never need to use this type. + /// + public sealed class GeneratedClrTypeInfo + { + private static readonly string[] EmptyNames = new string[0]; + private static readonly GeneratedClrTypeInfo[] EmptyCodeInfo = new GeneratedClrTypeInfo[0]; + + /// + /// Irrelevant for file descriptors; the CLR type for the message for message descriptors. + /// + public Type ClrType { get; private set; } + + /// + /// Irrelevant for file descriptors; the parser for message descriptors. + /// + public MessageParser Parser { get; } + + /// + /// Irrelevant for file descriptors; the CLR property names (in message descriptor field order) + /// for fields in the message for message descriptors. + /// + public string[] PropertyNames { get; } + + /// + /// Irrelevant for file descriptors; the CLR property "base" names (in message descriptor oneof order) + /// for oneofs in the message for message descriptors. It is expected that for a oneof name of "Foo", + /// there will be a "FooCase" property and a "ClearFoo" method. + /// + public string[] OneofNames { get; } + + /// + /// The reflection information for types within this file/message descriptor. Elements may be null + /// if there is no corresponding generated type, e.g. for map entry types. + /// + public GeneratedClrTypeInfo[] NestedTypes { get; } + + /// + /// The CLR types for enums within this file/message descriptor. + /// + public Type[] NestedEnums { get; } + + /// + /// Creates a GeneratedClrTypeInfo for a message descriptor, with nested types, nested enums, the CLR type, property names and oneof names. + /// Each array parameter may be null, to indicate a lack of values. + /// The parameter order is designed to make it feasible to format the generated code readably. + /// + public GeneratedClrTypeInfo(Type clrType, MessageParser parser, string[] propertyNames, string[] oneofNames, Type[] nestedEnums, GeneratedClrTypeInfo[] nestedTypes) + { + NestedTypes = nestedTypes ?? EmptyCodeInfo; + NestedEnums = nestedEnums ?? ReflectionUtil.EmptyTypes; + ClrType = clrType; + Parser = parser; + PropertyNames = propertyNames ?? EmptyNames; + OneofNames = oneofNames ?? EmptyNames; + } + + /// + /// Creates a GeneratedClrTypeInfo for a file descriptor, with only types and enums. + /// + public GeneratedClrTypeInfo(Type[] nestedEnums, GeneratedClrTypeInfo[] nestedTypes) + : this(null, null, null, null, nestedEnums, nestedTypes) + { + } + } +} \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/IDescriptor.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/IDescriptor.cs new file mode 100644 index 0000000000..318d58c968 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/IDescriptor.cs @@ -0,0 +1,55 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +namespace Google.Protobuf.Reflection +{ + /// + /// Interface implemented by all descriptor types. + /// + public interface IDescriptor + { + /// + /// Returns the name of the entity (message, field etc) being described. + /// + string Name { get; } + + /// + /// Returns the fully-qualified name of the entity being described. + /// + string FullName { get; } + + /// + /// Returns the descriptor for the .proto file that this entity is part of. + /// + FileDescriptor File { get; } + } +} \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/IFieldAccessor.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/IFieldAccessor.cs new file mode 100644 index 0000000000..cfe56fde67 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/IFieldAccessor.cs @@ -0,0 +1,71 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; +using System.Collections; + +namespace Google.Protobuf.Reflection +{ + /// + /// Allows fields to be reflectively accessed. + /// + public interface IFieldAccessor + { + /// + /// Returns the descriptor associated with this field. + /// + FieldDescriptor Descriptor { get; } + + /// + /// Clears the field in the specified message. (For repeated fields, + /// this clears the list.) + /// + void Clear(IMessage message); + + /// + /// Fetches the field value. For repeated values, this will be an + /// implementation. For map values, this will be an + /// implementation. + /// + object GetValue(IMessage message); + + /// + /// Mutator for single "simple" fields only. + /// + /// + /// Repeated fields are mutated by fetching the value and manipulating it as a list. + /// Map fields are mutated by fetching the value and manipulating it as a dictionary. + /// + /// The field is not a "simple" field. + void SetValue(IMessage message, object value); + } +} \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/MapFieldAccessor.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/MapFieldAccessor.cs new file mode 100644 index 0000000000..9ed7f8c4df --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/MapFieldAccessor.cs @@ -0,0 +1,59 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; +using System.Collections; +using System.Reflection; + +namespace Google.Protobuf.Reflection +{ + /// + /// Accessor for map fields. + /// + internal sealed class MapFieldAccessor : FieldAccessorBase + { + internal MapFieldAccessor(PropertyInfo property, FieldDescriptor descriptor) : base(property, descriptor) + { + } + + public override void Clear(IMessage message) + { + IDictionary list = (IDictionary) GetValue(message); + list.Clear(); + } + + public override void SetValue(IMessage message, object value) + { + throw new InvalidOperationException("SetValue is not implemented for map fields"); + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/MessageDescriptor.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/MessageDescriptor.cs new file mode 100644 index 0000000000..f5a835e5cc --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/MessageDescriptor.cs @@ -0,0 +1,317 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Linq; + +namespace Google.Protobuf.Reflection +{ + /// + /// Describes a message type. + /// + public sealed class MessageDescriptor : DescriptorBase + { + private static readonly HashSet WellKnownTypeNames = new HashSet + { + "google/protobuf/any.proto", + "google/protobuf/api.proto", + "google/protobuf/duration.proto", + "google/protobuf/empty.proto", + "google/protobuf/wrappers.proto", + "google/protobuf/timestamp.proto", + "google/protobuf/field_mask.proto", + "google/protobuf/source_context.proto", + "google/protobuf/struct.proto", + "google/protobuf/type.proto", + }; + + private readonly IList fieldsInDeclarationOrder; + private readonly IList fieldsInNumberOrder; + private readonly IDictionary jsonFieldMap; + + internal MessageDescriptor(DescriptorProto proto, FileDescriptor file, MessageDescriptor parent, int typeIndex, GeneratedClrTypeInfo generatedCodeInfo) + : base(file, file.ComputeFullName(parent, proto.Name), typeIndex) + { + Proto = proto; + Parser = generatedCodeInfo?.Parser; + ClrType = generatedCodeInfo?.ClrType; + ContainingType = parent; + + // Note use of generatedCodeInfo. rather than generatedCodeInfo?. here... we don't expect + // to see any nested oneofs, types or enums in "not actually generated" code... we do + // expect fields though (for map entry messages). + Oneofs = DescriptorUtil.ConvertAndMakeReadOnly( + proto.OneofDecl, + (oneof, index) => + new OneofDescriptor(oneof, file, this, index, generatedCodeInfo.OneofNames[index])); + + NestedTypes = DescriptorUtil.ConvertAndMakeReadOnly( + proto.NestedType, + (type, index) => + new MessageDescriptor(type, file, this, index, generatedCodeInfo.NestedTypes[index])); + + EnumTypes = DescriptorUtil.ConvertAndMakeReadOnly( + proto.EnumType, + (type, index) => + new EnumDescriptor(type, file, this, index, generatedCodeInfo.NestedEnums[index])); + + fieldsInDeclarationOrder = DescriptorUtil.ConvertAndMakeReadOnly( + proto.Field, + (field, index) => + new FieldDescriptor(field, file, this, index, generatedCodeInfo?.PropertyNames[index])); + fieldsInNumberOrder = new ReadOnlyCollection(fieldsInDeclarationOrder.OrderBy(field => field.FieldNumber).ToArray()); + // TODO: Use field => field.Proto.JsonName when we're confident it's appropriate. (And then use it in the formatter, too.) + jsonFieldMap = CreateJsonFieldMap(fieldsInNumberOrder); + file.DescriptorPool.AddSymbol(this); + Fields = new FieldCollection(this); + } + + private static ReadOnlyDictionary CreateJsonFieldMap(IList fields) + { + var map = new Dictionary(); + foreach (var field in fields) + { + map[field.Name] = field; + map[field.JsonName] = field; + } + return new ReadOnlyDictionary(map); + } + + /// + /// The brief name of the descriptor's target. + /// + public override string Name => Proto.Name; + + internal DescriptorProto Proto { get; } + + /// + /// The CLR type used to represent message instances from this descriptor. + /// + /// + /// + /// The value returned by this property will be non-null for all regular fields. However, + /// if a message containing a map field is introspected, the list of nested messages will include + /// an auto-generated nested key/value pair message for the field. This is not represented in any + /// generated type, so this property will return null in such cases. + /// + /// + /// For wrapper types ( and the like), the type returned here + /// will be the generated message type, not the native type used by reflection for fields of those types. Code + /// using reflection should call to determine whether a message descriptor represents + /// a wrapper type, and handle the result appropriately. + /// + /// + public Type ClrType { get; } + + /// + /// A parser for this message type. + /// + /// + /// + /// As is not generic, this cannot be statically + /// typed to the relevant type, but it should produce objects of a type compatible with . + /// + /// + /// The value returned by this property will be non-null for all regular fields. However, + /// if a message containing a map field is introspected, the list of nested messages will include + /// an auto-generated nested key/value pair message for the field. No message parser object is created for + /// such messages, so this property will return null in such cases. + /// + /// + /// For wrapper types ( and the like), the parser returned here + /// will be the generated message type, not the native type used by reflection for fields of those types. Code + /// using reflection should call to determine whether a message descriptor represents + /// a wrapper type, and handle the result appropriately. + /// + /// + public MessageParser Parser { get; } + + /// + /// Returns whether this message is one of the "well known types" which may have runtime/protoc support. + /// + internal bool IsWellKnownType => File.Package == "google.protobuf" && WellKnownTypeNames.Contains(File.Name); + + /// + /// Returns whether this message is one of the "wrapper types" used for fields which represent primitive values + /// with the addition of presence. + /// + internal bool IsWrapperType => File.Package == "google.protobuf" && File.Name == "google/protobuf/wrappers.proto"; + + /// + /// If this is a nested type, get the outer descriptor, otherwise null. + /// + public MessageDescriptor ContainingType { get; } + + /// + /// A collection of fields, which can be retrieved by name or field number. + /// + public FieldCollection Fields { get; } + + /// + /// An unmodifiable list of this message type's nested types. + /// + public IList NestedTypes { get; } + + /// + /// An unmodifiable list of this message type's enum types. + /// + public IList EnumTypes { get; } + + /// + /// An unmodifiable list of the "oneof" field collections in this message type. + /// + public IList Oneofs { get; } + + /// + /// Finds a field by field name. + /// + /// The unqualified name of the field (e.g. "foo"). + /// The field's descriptor, or null if not found. + public FieldDescriptor FindFieldByName(String name) => File.DescriptorPool.FindSymbol(FullName + "." + name); + + /// + /// Finds a field by field number. + /// + /// The field number within this message type. + /// The field's descriptor, or null if not found. + public FieldDescriptor FindFieldByNumber(int number) => File.DescriptorPool.FindFieldByNumber(this, number); + + /// + /// Finds a nested descriptor by name. The is valid for fields, nested + /// message types, oneofs and enums. + /// + /// The unqualified name of the descriptor, e.g. "Foo" + /// The descriptor, or null if not found. + public T FindDescriptor(string name) where T : class, IDescriptor => + File.DescriptorPool.FindSymbol(FullName + "." + name); + + /// + /// Looks up and cross-links all fields and nested types. + /// + internal void CrossLink() + { + foreach (MessageDescriptor message in NestedTypes) + { + message.CrossLink(); + } + + foreach (FieldDescriptor field in fieldsInDeclarationOrder) + { + field.CrossLink(); + } + + foreach (OneofDescriptor oneof in Oneofs) + { + oneof.CrossLink(); + } + } + + /// + /// A collection to simplify retrieving the field accessor for a particular field. + /// + public sealed class FieldCollection + { + private readonly MessageDescriptor messageDescriptor; + + internal FieldCollection(MessageDescriptor messageDescriptor) + { + this.messageDescriptor = messageDescriptor; + } + + /// + /// Returns the fields in the message as an immutable list, in the order in which they + /// are declared in the source .proto file. + /// + public IList InDeclarationOrder() => messageDescriptor.fieldsInDeclarationOrder; + + /// + /// Returns the fields in the message as an immutable list, in ascending field number + /// order. Field numbers need not be contiguous, so there is no direct mapping from the + /// index in the list to the field number; to retrieve a field by field number, it is better + /// to use the indexer. + /// + public IList InFieldNumberOrder() => messageDescriptor.fieldsInNumberOrder; + + // TODO: consider making this public in the future. (Being conservative for now...) + + /// + /// Returns a read-only dictionary mapping the field names in this message as they're available + /// in the JSON representation to the field descriptors. For example, a field foo_bar + /// in the message would result two entries, one with a key fooBar and one with a key + /// foo_bar, both referring to the same field. + /// + internal IDictionary ByJsonName() => messageDescriptor.jsonFieldMap; + + /// + /// Retrieves the descriptor for the field with the given number. + /// + /// Number of the field to retrieve the descriptor for + /// The accessor for the given field + /// The message descriptor does not contain a field + /// with the given number + public FieldDescriptor this[int number] + { + get + { + var fieldDescriptor = messageDescriptor.FindFieldByNumber(number); + if (fieldDescriptor == null) + { + throw new KeyNotFoundException("No such field number"); + } + return fieldDescriptor; + } + } + + /// + /// Retrieves the descriptor for the field with the given name. + /// + /// Name of the field to retrieve the descriptor for + /// The descriptor for the given field + /// The message descriptor does not contain a field + /// with the given name + public FieldDescriptor this[string name] + { + get + { + var fieldDescriptor = messageDescriptor.FindFieldByName(name); + if (fieldDescriptor == null) + { + throw new KeyNotFoundException("No such field name"); + } + return fieldDescriptor; + } + } + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/MethodDescriptor.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/MethodDescriptor.cs new file mode 100644 index 0000000000..f9539f6cbd --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/MethodDescriptor.cs @@ -0,0 +1,103 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +namespace Google.Protobuf.Reflection +{ + /// + /// Describes a single method in a service. + /// + public sealed class MethodDescriptor : DescriptorBase + { + private readonly MethodDescriptorProto proto; + private readonly ServiceDescriptor service; + private MessageDescriptor inputType; + private MessageDescriptor outputType; + + /// + /// The service this method belongs to. + /// + public ServiceDescriptor Service { get { return service; } } + + /// + /// The method's input type. + /// + public MessageDescriptor InputType { get { return inputType; } } + + /// + /// The method's input type. + /// + public MessageDescriptor OutputType { get { return outputType; } } + + /// + /// Indicates if client streams multiple requests. + /// + public bool IsClientStreaming { get { return proto.ClientStreaming; } } + + /// + /// Indicates if server streams multiple responses. + /// + public bool IsServerStreaming { get { return proto.ServerStreaming; } } + + internal MethodDescriptor(MethodDescriptorProto proto, FileDescriptor file, + ServiceDescriptor parent, int index) + : base(file, parent.FullName + "." + proto.Name, index) + { + this.proto = proto; + service = parent; + file.DescriptorPool.AddSymbol(this); + } + + internal MethodDescriptorProto Proto { get { return proto; } } + + /// + /// The brief name of the descriptor's target. + /// + public override string Name { get { return proto.Name; } } + + internal void CrossLink() + { + IDescriptor lookup = File.DescriptorPool.LookupSymbol(Proto.InputType, this); + if (!(lookup is MessageDescriptor)) + { + throw new DescriptorValidationException(this, "\"" + Proto.InputType + "\" is not a message type."); + } + inputType = (MessageDescriptor) lookup; + + lookup = File.DescriptorPool.LookupSymbol(Proto.OutputType, this); + if (!(lookup is MessageDescriptor)) + { + throw new DescriptorValidationException(this, "\"" + Proto.OutputType + "\" is not a message type."); + } + outputType = (MessageDescriptor) lookup; + } + } +} \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/OneofAccessor.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/OneofAccessor.cs new file mode 100644 index 0000000000..8714ab18ef --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/OneofAccessor.cs @@ -0,0 +1,90 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; +using System.Reflection; +using Google.Protobuf.Compatibility; + +namespace Google.Protobuf.Reflection +{ + /// + /// Reflection access for a oneof, allowing clear and "get case" actions. + /// + public sealed class OneofAccessor + { + private readonly Func caseDelegate; + private readonly Action clearDelegate; + private OneofDescriptor descriptor; + + internal OneofAccessor(PropertyInfo caseProperty, MethodInfo clearMethod, OneofDescriptor descriptor) + { + if (!caseProperty.CanRead) + { + throw new ArgumentException("Cannot read from property"); + } + this.descriptor = descriptor; + caseDelegate = ReflectionUtil.CreateFuncIMessageT(caseProperty.GetGetMethod()); + + this.descriptor = descriptor; + clearDelegate = ReflectionUtil.CreateActionIMessage(clearMethod); + } + + /// + /// Gets the descriptor for this oneof. + /// + /// + /// The descriptor of the oneof. + /// + public OneofDescriptor Descriptor { get { return descriptor; } } + + /// + /// Clears the oneof in the specified message. + /// + public void Clear(IMessage message) + { + clearDelegate(message); + } + + /// + /// Indicates which field in the oneof is set for specified message + /// + public FieldDescriptor GetCaseFieldDescriptor(IMessage message) + { + int fieldNumber = caseDelegate(message); + if (fieldNumber > 0) + { + return descriptor.ContainingType.FindFieldByNumber(fieldNumber); + } + return null; + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/OneofDescriptor.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/OneofDescriptor.cs new file mode 100644 index 0000000000..22020acf06 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/OneofDescriptor.cs @@ -0,0 +1,122 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System.Collections.Generic; +using System.Collections.ObjectModel; +using Google.Protobuf.Compatibility; + +namespace Google.Protobuf.Reflection +{ + /// + /// Describes a "oneof" field collection in a message type: a set of + /// fields of which at most one can be set in any particular message. + /// + public sealed class OneofDescriptor : DescriptorBase + { + private readonly OneofDescriptorProto proto; + private MessageDescriptor containingType; + private IList fields; + private readonly OneofAccessor accessor; + + internal OneofDescriptor(OneofDescriptorProto proto, FileDescriptor file, MessageDescriptor parent, int index, string clrName) + : base(file, file.ComputeFullName(parent, proto.Name), index) + { + this.proto = proto; + containingType = parent; + + file.DescriptorPool.AddSymbol(this); + accessor = CreateAccessor(clrName); + } + + /// + /// The brief name of the descriptor's target. + /// + public override string Name { get { return proto.Name; } } + + /// + /// Gets the message type containing this oneof. + /// + /// + /// The message type containing this oneof. + /// + public MessageDescriptor ContainingType + { + get { return containingType; } + } + + /// + /// Gets the fields within this oneof, in declaration order. + /// + /// + /// The fields within this oneof, in declaration order. + /// + public IList Fields { get { return fields; } } + + /// + /// Gets an accessor for reflective access to the values associated with the oneof + /// in a particular message. + /// + /// + /// The accessor used for reflective access. + /// + public OneofAccessor Accessor { get { return accessor; } } + + internal void CrossLink() + { + List fieldCollection = new List(); + foreach (var field in ContainingType.Fields.InDeclarationOrder()) + { + if (field.ContainingOneof == this) + { + fieldCollection.Add(field); + } + } + fields = new ReadOnlyCollection(fieldCollection); + } + + private OneofAccessor CreateAccessor(string clrName) + { + var caseProperty = containingType.ClrType.GetProperty(clrName + "Case"); + if (caseProperty == null) + { + throw new DescriptorValidationException(this, $"Property {clrName}Case not found in {containingType.ClrType}"); + } + var clearMethod = containingType.ClrType.GetMethod("Clear" + clrName); + if (clearMethod == null) + { + throw new DescriptorValidationException(this, $"Method Clear{clrName} not found in {containingType.ClrType}"); + } + + return new OneofAccessor(caseProperty, clearMethod, this); + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/OriginalNameAttribute.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/OriginalNameAttribute.cs new file mode 100644 index 0000000000..27f9ab98c3 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/OriginalNameAttribute.cs @@ -0,0 +1,58 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; + +namespace Google.Protobuf.Reflection +{ + /// + /// Specifies the original name (in the .proto file) of a named element, + /// such as an enum value. + /// + [AttributeUsage(AttributeTargets.Field)] + public class OriginalNameAttribute : Attribute + { + /// + /// The name of the element in the .proto file. + /// + public string Name { get; set; } + + /// + /// Constructs a new attribute instance for the given name. + /// + /// The name of the element in the .proto file. + public OriginalNameAttribute(string name) + { + Name = ProtoPreconditions.CheckNotNull(name, nameof(name)); + } + } +} \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/PackageDescriptor.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/PackageDescriptor.cs new file mode 100644 index 0000000000..e547d83498 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/PackageDescriptor.cs @@ -0,0 +1,68 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +namespace Google.Protobuf.Reflection +{ + /// + /// Represents a package in the symbol table. We use PackageDescriptors + /// just as placeholders so that someone cannot define, say, a message type + /// that has the same name as an existing package. + /// + internal sealed class PackageDescriptor : IDescriptor + { + private readonly string name; + private readonly string fullName; + private readonly FileDescriptor file; + + internal PackageDescriptor(string name, string fullName, FileDescriptor file) + { + this.file = file; + this.fullName = fullName; + this.name = name; + } + + public string Name + { + get { return name; } + } + + public string FullName + { + get { return fullName; } + } + + public FileDescriptor File + { + get { return file; } + } + } +} \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/PartialClasses.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/PartialClasses.cs new file mode 100644 index 0000000000..6c285410d4 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/PartialClasses.cs @@ -0,0 +1,59 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +// This file just contains partial classes for any autogenerated classes that need additional support. +namespace Google.Protobuf.Reflection +{ + internal partial class FieldDescriptorProto + { + // We can't tell the difference between "explicitly set to 0" and "not set" + // in proto3, but we need to tell the difference for OneofIndex. descriptor.proto + // is really a proto2 file, but the runtime doesn't know about proto2 semantics... + // We fake it by defaulting to -1. + partial void OnConstruction() + { + OneofIndex = -1; + } + } + + internal partial class FieldOptions + { + // We can't tell the difference between "explicitly set to false" and "not set" + // in proto3, but we need to tell the difference for FieldDescriptor.IsPacked. + // This won't work if we ever need to support proto2, but at that point we'll be + // able to remove this hack and use field presence instead. + partial void OnConstruction() + { + Packed = true; + } + } +} \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/ReflectionUtil.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/ReflectionUtil.cs new file mode 100644 index 0000000000..df820ca36b --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/ReflectionUtil.cs @@ -0,0 +1,107 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; +using System.Collections.Generic; +using System.Linq.Expressions; +using System.Reflection; + +namespace Google.Protobuf.Reflection +{ + /// + /// The methods in this class are somewhat evil, and should not be tampered with lightly. + /// Basically they allow the creation of relatively weakly typed delegates from MethodInfos + /// which are more strongly typed. They do this by creating an appropriate strongly typed + /// delegate from the MethodInfo, and then calling that within an anonymous method. + /// Mind-bending stuff (at least to your humble narrator) but the resulting delegates are + /// very fast compared with calling Invoke later on. + /// + internal static class ReflectionUtil + { + /// + /// Empty Type[] used when calling GetProperty to force property instead of indexer fetching. + /// + internal static readonly Type[] EmptyTypes = new Type[0]; + + /// + /// Creates a delegate which will cast the argument to the appropriate method target type, + /// call the method on it, then convert the result to object. + /// + internal static Func CreateFuncIMessageObject(MethodInfo method) + { + ParameterExpression parameter = Expression.Parameter(typeof(IMessage), "p"); + Expression downcast = Expression.Convert(parameter, method.DeclaringType); + Expression call = Expression.Call(downcast, method); + Expression upcast = Expression.Convert(call, typeof(object)); + return Expression.Lambda>(upcast, parameter).Compile(); + } + + /// + /// Creates a delegate which will cast the argument to the appropriate method target type, + /// call the method on it, then convert the result to the specified type. + /// + internal static Func CreateFuncIMessageT(MethodInfo method) + { + ParameterExpression parameter = Expression.Parameter(typeof(IMessage), "p"); + Expression downcast = Expression.Convert(parameter, method.DeclaringType); + Expression call = Expression.Call(downcast, method); + Expression upcast = Expression.Convert(call, typeof(T)); + return Expression.Lambda>(upcast, parameter).Compile(); + } + + /// + /// Creates a delegate which will execute the given method after casting the first argument to + /// the target type of the method, and the second argument to the first parameter type of the method. + /// + internal static Action CreateActionIMessageObject(MethodInfo method) + { + ParameterExpression targetParameter = Expression.Parameter(typeof(IMessage), "target"); + ParameterExpression argParameter = Expression.Parameter(typeof(object), "arg"); + Expression castTarget = Expression.Convert(targetParameter, method.DeclaringType); + Expression castArgument = Expression.Convert(argParameter, method.GetParameters()[0].ParameterType); + Expression call = Expression.Call(castTarget, method, castArgument); + return Expression.Lambda>(call, targetParameter, argParameter).Compile(); + } + + /// + /// Creates a delegate which will execute the given method after casting the first argument to + /// the target type of the method. + /// + internal static Action CreateActionIMessage(MethodInfo method) + { + ParameterExpression targetParameter = Expression.Parameter(typeof(IMessage), "target"); + Expression castTarget = Expression.Convert(targetParameter, method.DeclaringType); + Expression call = Expression.Call(castTarget, method); + return Expression.Lambda>(call, targetParameter).Compile(); + } + } +} \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/RepeatedFieldAccessor.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/RepeatedFieldAccessor.cs new file mode 100644 index 0000000000..bd40847092 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/RepeatedFieldAccessor.cs @@ -0,0 +1,60 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; +using System.Collections; +using System.Reflection; + +namespace Google.Protobuf.Reflection +{ + /// + /// Accessor for repeated fields. + /// + internal sealed class RepeatedFieldAccessor : FieldAccessorBase + { + internal RepeatedFieldAccessor(PropertyInfo property, FieldDescriptor descriptor) : base(property, descriptor) + { + } + + public override void Clear(IMessage message) + { + IList list = (IList) GetValue(message); + list.Clear(); + } + + public override void SetValue(IMessage message, object value) + { + throw new InvalidOperationException("SetValue is not implemented for repeated fields"); + } + + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/ServiceDescriptor.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/ServiceDescriptor.cs new file mode 100644 index 0000000000..cc0a501042 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/ServiceDescriptor.cs @@ -0,0 +1,89 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; +using System.Collections.Generic; + +namespace Google.Protobuf.Reflection +{ + /// + /// Describes a service type. + /// + public sealed class ServiceDescriptor : DescriptorBase + { + private readonly ServiceDescriptorProto proto; + private readonly IList methods; + + internal ServiceDescriptor(ServiceDescriptorProto proto, FileDescriptor file, int index) + : base(file, file.ComputeFullName(null, proto.Name), index) + { + this.proto = proto; + methods = DescriptorUtil.ConvertAndMakeReadOnly(proto.Method, + (method, i) => new MethodDescriptor(method, file, this, i)); + + file.DescriptorPool.AddSymbol(this); + } + + /// + /// The brief name of the descriptor's target. + /// + public override string Name { get { return proto.Name; } } + + internal ServiceDescriptorProto Proto { get { return proto; } } + + /// + /// An unmodifiable list of methods in this service. + /// + public IList Methods + { + get { return methods; } + } + + /// + /// Finds a method by name. + /// + /// The unqualified name of the method (e.g. "Foo"). + /// The method's decsriptor, or null if not found. + public MethodDescriptor FindMethodByName(String name) + { + return File.DescriptorPool.FindSymbol(FullName + "." + name); + } + + internal void CrossLink() + { + foreach (MethodDescriptor method in methods) + { + method.CrossLink(); + } + } + } +} \ No newline at end of file diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/SingleFieldAccessor.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/SingleFieldAccessor.cs new file mode 100644 index 0000000000..bbac2173d4 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/SingleFieldAccessor.cs @@ -0,0 +1,81 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; +using System.Reflection; +using Google.Protobuf.Compatibility; + +namespace Google.Protobuf.Reflection +{ + /// + /// Accessor for single fields. + /// + internal sealed class SingleFieldAccessor : FieldAccessorBase + { + // All the work here is actually done in the constructor - it creates the appropriate delegates. + // There are various cases to consider, based on the property type (message, string/bytes, or "genuine" primitive) + // and proto2 vs proto3 for non-message types, as proto3 doesn't support "full" presence detection or default + // values. + + private readonly Action setValueDelegate; + private readonly Action clearDelegate; + + internal SingleFieldAccessor(PropertyInfo property, FieldDescriptor descriptor) : base(property, descriptor) + { + if (!property.CanWrite) + { + throw new ArgumentException("Not all required properties/methods available"); + } + setValueDelegate = ReflectionUtil.CreateActionIMessageObject(property.GetSetMethod()); + + var clrType = property.PropertyType; + + // TODO: Validate that this is a reasonable single field? (Should be a value type, a message type, or string/ByteString.) + object defaultValue = + descriptor.FieldType == FieldType.Message ? null + : clrType == typeof(string) ? "" + : clrType == typeof(ByteString) ? ByteString.Empty + : Activator.CreateInstance(clrType); + clearDelegate = message => SetValue(message, defaultValue); + } + + public override void Clear(IMessage message) + { + clearDelegate(message); + } + + public override void SetValue(IMessage message, object value) + { + setValueDelegate(message, value); + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/TypeRegistry.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/TypeRegistry.cs new file mode 100644 index 0000000000..e94e3e6c60 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/Reflection/TypeRegistry.cs @@ -0,0 +1,183 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion +using System.Collections.Generic; +using System.Linq; + +namespace Google.Protobuf.Reflection +{ + /// + /// An immutable registry of types which can be looked up by their full name. + /// + public sealed class TypeRegistry + { + /// + /// An empty type registry, containing no types. + /// + public static TypeRegistry Empty { get; } = new TypeRegistry(new Dictionary()); + + private readonly Dictionary fullNameToMessageMap; + + private TypeRegistry(Dictionary fullNameToMessageMap) + { + this.fullNameToMessageMap = fullNameToMessageMap; + } + + /// + /// Attempts to find a message descriptor by its full name. + /// + /// The full name of the message, which is the dot-separated + /// combination of package, containing messages and message name + /// The message descriptor corresponding to or null + /// if there is no such message descriptor. + public MessageDescriptor Find(string fullName) + { + MessageDescriptor ret; + // Ignore the return value as ret will end up with the right value either way. + fullNameToMessageMap.TryGetValue(fullName, out ret); + return ret; + } + + /// + /// Creates a type registry from the specified set of file descriptors. + /// + /// + /// This is a convenience overload for + /// to allow calls such as TypeRegistry.FromFiles(descriptor1, descriptor2). + /// + /// The set of files to include in the registry. Must not contain null values. + /// A type registry for the given files. + public static TypeRegistry FromFiles(params FileDescriptor[] fileDescriptors) + { + return FromFiles((IEnumerable) fileDescriptors); + } + + /// + /// Creates a type registry from the specified set of file descriptors. + /// + /// + /// All message types within all the specified files are added to the registry, and + /// the dependencies of the specified files are also added, recursively. + /// + /// The set of files to include in the registry. Must not contain null values. + /// A type registry for the given files. + public static TypeRegistry FromFiles(IEnumerable fileDescriptors) + { + ProtoPreconditions.CheckNotNull(fileDescriptors, nameof(fileDescriptors)); + var builder = new Builder(); + foreach (var file in fileDescriptors) + { + builder.AddFile(file); + } + return builder.Build(); + } + + /// + /// Creates a type registry from the file descriptor parents of the specified set of message descriptors. + /// + /// + /// This is a convenience overload for + /// to allow calls such as TypeRegistry.FromFiles(descriptor1, descriptor2). + /// + /// The set of message descriptors to use to identify file descriptors to include in the registry. + /// Must not contain null values. + /// A type registry for the given files. + public static TypeRegistry FromMessages(params MessageDescriptor[] messageDescriptors) + { + return FromMessages((IEnumerable) messageDescriptors); + } + + /// + /// Creates a type registry from the file descriptor parents of the specified set of message descriptors. + /// + /// + /// The specified message descriptors are only used to identify their file descriptors; the returned registry + /// contains all the types within the file descriptors which contain the specified message descriptors (and + /// the dependencies of those files), not just the specified messages. + /// + /// The set of message descriptors to use to identify file descriptors to include in the registry. + /// Must not contain null values. + /// A type registry for the given files. + public static TypeRegistry FromMessages(IEnumerable messageDescriptors) + { + ProtoPreconditions.CheckNotNull(messageDescriptors, nameof(messageDescriptors)); + return FromFiles(messageDescriptors.Select(md => md.File)); + } + + /// + /// Builder class which isn't exposed, but acts as a convenient alternative to passing round two dictionaries in recursive calls. + /// + private class Builder + { + private readonly Dictionary types; + private readonly HashSet fileDescriptorNames; + + internal Builder() + { + types = new Dictionary(); + fileDescriptorNames = new HashSet(); + } + + internal void AddFile(FileDescriptor fileDescriptor) + { + if (!fileDescriptorNames.Add(fileDescriptor.Name)) + { + return; + } + foreach (var dependency in fileDescriptor.Dependencies) + { + AddFile(dependency); + } + foreach (var message in fileDescriptor.MessageTypes) + { + AddMessage(message); + } + } + + private void AddMessage(MessageDescriptor messageDescriptor) + { + foreach (var nestedType in messageDescriptor.NestedTypes) + { + AddMessage(nestedType); + } + // This will overwrite any previous entry. Given that each file should + // only be added once, this could be a problem such as package A.B with type C, + // and package A with type B.C... it's unclear what we should do in that case. + types[messageDescriptor.FullName] = messageDescriptor; + } + + internal TypeRegistry Build() + { + return new TypeRegistry(types); + } + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/Any.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/Any.cs new file mode 100644 index 0000000000..871a383f0d --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/Any.cs @@ -0,0 +1,261 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/protobuf/any.proto +#pragma warning disable 1591, 0612, 3021 +#region Designer generated code + +using pb = global::Google.Protobuf; +using pbc = global::Google.Protobuf.Collections; +using pbr = global::Google.Protobuf.Reflection; +using scg = global::System.Collections.Generic; +namespace Google.Protobuf.WellKnownTypes { + + /// Holder for reflection information generated from google/protobuf/any.proto + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public static partial class AnyReflection { + + #region Descriptor + /// File descriptor for google/protobuf/any.proto + public static pbr::FileDescriptor Descriptor { + get { return descriptor; } + } + private static pbr::FileDescriptor descriptor; + + static AnyReflection() { + byte[] descriptorData = global::System.Convert.FromBase64String( + string.Concat( + "Chlnb29nbGUvcHJvdG9idWYvYW55LnByb3RvEg9nb29nbGUucHJvdG9idWYi", + "JgoDQW55EhAKCHR5cGVfdXJsGAEgASgJEg0KBXZhbHVlGAIgASgMQnIKE2Nv", + "bS5nb29nbGUucHJvdG9idWZCCEFueVByb3RvUAFaJWdpdGh1Yi5jb20vZ29s", + "YW5nL3Byb3RvYnVmL3B0eXBlcy9hbnmgAQGiAgNHUEKqAh5Hb29nbGUuUHJv", + "dG9idWYuV2VsbEtub3duVHlwZXNiBnByb3RvMw==")); + descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, + new pbr::FileDescriptor[] { }, + new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] { + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.WellKnownTypes.Any), global::Google.Protobuf.WellKnownTypes.Any.Parser, new[]{ "TypeUrl", "Value" }, null, null, null) + })); + } + #endregion + + } + #region Messages + /// + /// `Any` contains an arbitrary serialized protocol buffer message along with a + /// URL that describes the type of the serialized message. + /// + /// Protobuf library provides support to pack/unpack Any values in the form + /// of utility functions or additional generated methods of the Any type. + /// + /// Example 1: Pack and unpack a message in C++. + /// + /// Foo foo = ...; + /// Any any; + /// any.PackFrom(foo); + /// ... + /// if (any.UnpackTo(&foo)) { + /// ... + /// } + /// + /// Example 2: Pack and unpack a message in Java. + /// + /// Foo foo = ...; + /// Any any = Any.pack(foo); + /// ... + /// if (any.is(Foo.class)) { + /// foo = any.unpack(Foo.class); + /// } + /// + /// The pack methods provided by protobuf library will by default use + /// 'type.googleapis.com/full.type.name' as the type URL and the unpack + /// methods only use the fully qualified type name after the last '/' + /// in the type URL, for example "foo.bar.com/x/y.z" will yield type + /// name "y.z". + /// + /// JSON + /// ==== + /// The JSON representation of an `Any` value uses the regular + /// representation of the deserialized, embedded message, with an + /// additional field `@type` which contains the type URL. Example: + /// + /// package google.profile; + /// message Person { + /// string first_name = 1; + /// string last_name = 2; + /// } + /// + /// { + /// "@type": "type.googleapis.com/google.profile.Person", + /// "firstName": <string>, + /// "lastName": <string> + /// } + /// + /// If the embedded message type is well-known and has a custom JSON + /// representation, that representation will be embedded adding a field + /// `value` which holds the custom JSON in addition to the `@type` + /// field. Example (for message [google.protobuf.Duration][]): + /// + /// { + /// "@type": "type.googleapis.com/google.protobuf.Duration", + /// "value": "1.212s" + /// } + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class Any : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new Any()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.WellKnownTypes.AnyReflection.Descriptor.MessageTypes[0]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public Any() { + OnConstruction(); + } + + partial void OnConstruction(); + + public Any(Any other) : this() { + typeUrl_ = other.typeUrl_; + value_ = other.value_; + } + + public Any Clone() { + return new Any(this); + } + + /// Field number for the "type_url" field. + public const int TypeUrlFieldNumber = 1; + private string typeUrl_ = ""; + /// + /// A URL/resource name whose content describes the type of the + /// serialized protocol buffer message. + /// + /// For URLs which use the schema `http`, `https`, or no schema, the + /// following restrictions and interpretations apply: + /// + /// * If no schema is provided, `https` is assumed. + /// * The last segment of the URL's path must represent the fully + /// qualified name of the type (as in `path/google.protobuf.Duration`). + /// The name should be in a canonical form (e.g., leading "." is + /// not accepted). + /// * An HTTP GET on the URL must yield a [google.protobuf.Type][] + /// value in binary format, or produce an error. + /// * Applications are allowed to cache lookup results based on the + /// URL, or have them precompiled into a binary to avoid any + /// lookup. Therefore, binary compatibility needs to be preserved + /// on changes to types. (Use versioned type names to manage + /// breaking changes.) + /// + /// Schemas other than `http`, `https` (or the empty schema) might be + /// used with implementation specific semantics. + /// + public string TypeUrl { + get { return typeUrl_; } + set { + typeUrl_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "value" field. + public const int ValueFieldNumber = 2; + private pb::ByteString value_ = pb::ByteString.Empty; + /// + /// Must be a valid serialized protocol buffer of the above specified type. + /// + public pb::ByteString Value { + get { return value_; } + set { + value_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + public override bool Equals(object other) { + return Equals(other as Any); + } + + public bool Equals(Any other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (TypeUrl != other.TypeUrl) return false; + if (Value != other.Value) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (TypeUrl.Length != 0) hash ^= TypeUrl.GetHashCode(); + if (Value.Length != 0) hash ^= Value.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (TypeUrl.Length != 0) { + output.WriteRawTag(10); + output.WriteString(TypeUrl); + } + if (Value.Length != 0) { + output.WriteRawTag(18); + output.WriteBytes(Value); + } + } + + public int CalculateSize() { + int size = 0; + if (TypeUrl.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(TypeUrl); + } + if (Value.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeBytesSize(Value); + } + return size; + } + + public void MergeFrom(Any other) { + if (other == null) { + return; + } + if (other.TypeUrl.Length != 0) { + TypeUrl = other.TypeUrl; + } + if (other.Value.Length != 0) { + Value = other.Value; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + TypeUrl = input.ReadString(); + break; + } + case 18: { + Value = input.ReadBytes(); + break; + } + } + } + } + + } + + #endregion + +} + +#endregion Designer generated code diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/AnyPartial.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/AnyPartial.cs new file mode 100644 index 0000000000..f4fac73865 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/AnyPartial.cs @@ -0,0 +1,107 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using Google.Protobuf.Reflection; + +namespace Google.Protobuf.WellKnownTypes +{ + public partial class Any + { + private const string DefaultPrefix = "type.googleapis.com"; + + // This could be moved to MessageDescriptor if we wanted to, but keeping it here means + // all the Any-specific code is in the same place. + private static string GetTypeUrl(MessageDescriptor descriptor, string prefix) => + prefix.EndsWith("/") ? prefix + descriptor.FullName : prefix + "/" + descriptor.FullName; + + /// + /// Retrieves the type name for a type URL. This is always just the last part of the URL, + /// after the trailing slash. No validation of anything before the trailing slash is performed. + /// If the type URL does not include a slash, an empty string is returned rather than an exception + /// being thrown; this won't match any types, and the calling code is probably in a better position + /// to give a meaningful error. + /// There is no handling of fragments or queries at the moment. + /// + /// The URL to extract the type name from + /// The type name + internal static string GetTypeName(string typeUrl) + { + int lastSlash = typeUrl.LastIndexOf('/'); + return lastSlash == -1 ? "" : typeUrl.Substring(lastSlash + 1); + } + + /// + /// Unpacks the content of this Any message into the target message type, + /// which must match the type URL within this Any message. + /// + /// The type of message to unpack the content into. + /// The unpacked message. + /// The target message type doesn't match the type URL in this message + public T Unpack() where T : IMessage, new() + { + // Note: this doesn't perform as well is it might. We could take a MessageParser in an alternative overload, + // which would be expected to perform slightly better... although the difference is likely to be negligible. + T target = new T(); + if (GetTypeName(TypeUrl) != target.Descriptor.FullName) + { + throw new InvalidProtocolBufferException( + $"Full type name for {target.Descriptor.Name} is {target.Descriptor.FullName}; Any message's type url is {TypeUrl}"); + } + target.MergeFrom(Value); + return target; + } + + /// + /// Packs the specified message into an Any message using a type URL prefix of "type.googleapis.com". + /// + /// The message to pack. + /// An Any message with the content and type URL of . + public static Any Pack(IMessage message) => Pack(message, DefaultPrefix); + + /// + /// Packs the specified message into an Any message using the specified type URL prefix. + /// + /// The message to pack. + /// The prefix for the type URL. + /// An Any message with the content and type URL of . + public static Any Pack(IMessage message, string typeUrlPrefix) + { + ProtoPreconditions.CheckNotNull(message, nameof(message)); + ProtoPreconditions.CheckNotNull(typeUrlPrefix, nameof(typeUrlPrefix)); + return new Any + { + TypeUrl = GetTypeUrl(message.Descriptor, typeUrlPrefix), + Value = message.ToByteString() + }; + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/Api.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/Api.cs new file mode 100644 index 0000000000..e568a2c981 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/Api.cs @@ -0,0 +1,847 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/protobuf/api.proto +#pragma warning disable 1591, 0612, 3021 +#region Designer generated code + +using pb = global::Google.Protobuf; +using pbc = global::Google.Protobuf.Collections; +using pbr = global::Google.Protobuf.Reflection; +using scg = global::System.Collections.Generic; +namespace Google.Protobuf.WellKnownTypes { + + /// Holder for reflection information generated from google/protobuf/api.proto + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public static partial class ApiReflection { + + #region Descriptor + /// File descriptor for google/protobuf/api.proto + public static pbr::FileDescriptor Descriptor { + get { return descriptor; } + } + private static pbr::FileDescriptor descriptor; + + static ApiReflection() { + byte[] descriptorData = global::System.Convert.FromBase64String( + string.Concat( + "Chlnb29nbGUvcHJvdG9idWYvYXBpLnByb3RvEg9nb29nbGUucHJvdG9idWYa", + "JGdvb2dsZS9wcm90b2J1Zi9zb3VyY2VfY29udGV4dC5wcm90bxoaZ29vZ2xl", + "L3Byb3RvYnVmL3R5cGUucHJvdG8igQIKA0FwaRIMCgRuYW1lGAEgASgJEigK", + "B21ldGhvZHMYAiADKAsyFy5nb29nbGUucHJvdG9idWYuTWV0aG9kEigKB29w", + "dGlvbnMYAyADKAsyFy5nb29nbGUucHJvdG9idWYuT3B0aW9uEg8KB3ZlcnNp", + "b24YBCABKAkSNgoOc291cmNlX2NvbnRleHQYBSABKAsyHi5nb29nbGUucHJv", + "dG9idWYuU291cmNlQ29udGV4dBImCgZtaXhpbnMYBiADKAsyFi5nb29nbGUu", + "cHJvdG9idWYuTWl4aW4SJwoGc3ludGF4GAcgASgOMhcuZ29vZ2xlLnByb3Rv", + "YnVmLlN5bnRheCLVAQoGTWV0aG9kEgwKBG5hbWUYASABKAkSGAoQcmVxdWVz", + "dF90eXBlX3VybBgCIAEoCRIZChFyZXF1ZXN0X3N0cmVhbWluZxgDIAEoCBIZ", + "ChFyZXNwb25zZV90eXBlX3VybBgEIAEoCRIaChJyZXNwb25zZV9zdHJlYW1p", + "bmcYBSABKAgSKAoHb3B0aW9ucxgGIAMoCzIXLmdvb2dsZS5wcm90b2J1Zi5P", + "cHRpb24SJwoGc3ludGF4GAcgASgOMhcuZ29vZ2xlLnByb3RvYnVmLlN5bnRh", + "eCIjCgVNaXhpbhIMCgRuYW1lGAEgASgJEgwKBHJvb3QYAiABKAlCSwoTY29t", + "Lmdvb2dsZS5wcm90b2J1ZkIIQXBpUHJvdG9QAaABAaICA0dQQqoCHkdvb2ds", + "ZS5Qcm90b2J1Zi5XZWxsS25vd25UeXBlc2IGcHJvdG8z")); + descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, + new pbr::FileDescriptor[] { global::Google.Protobuf.WellKnownTypes.SourceContextReflection.Descriptor, global::Google.Protobuf.WellKnownTypes.TypeReflection.Descriptor, }, + new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] { + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.WellKnownTypes.Api), global::Google.Protobuf.WellKnownTypes.Api.Parser, new[]{ "Name", "Methods", "Options", "Version", "SourceContext", "Mixins", "Syntax" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.WellKnownTypes.Method), global::Google.Protobuf.WellKnownTypes.Method.Parser, new[]{ "Name", "RequestTypeUrl", "RequestStreaming", "ResponseTypeUrl", "ResponseStreaming", "Options", "Syntax" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.WellKnownTypes.Mixin), global::Google.Protobuf.WellKnownTypes.Mixin.Parser, new[]{ "Name", "Root" }, null, null, null) + })); + } + #endregion + + } + #region Messages + /// + /// Api is a light-weight descriptor for a protocol buffer service. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class Api : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new Api()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.WellKnownTypes.ApiReflection.Descriptor.MessageTypes[0]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public Api() { + OnConstruction(); + } + + partial void OnConstruction(); + + public Api(Api other) : this() { + name_ = other.name_; + methods_ = other.methods_.Clone(); + options_ = other.options_.Clone(); + version_ = other.version_; + SourceContext = other.sourceContext_ != null ? other.SourceContext.Clone() : null; + mixins_ = other.mixins_.Clone(); + syntax_ = other.syntax_; + } + + public Api Clone() { + return new Api(this); + } + + /// Field number for the "name" field. + public const int NameFieldNumber = 1; + private string name_ = ""; + /// + /// The fully qualified name of this api, including package name + /// followed by the api's simple name. + /// + public string Name { + get { return name_; } + set { + name_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "methods" field. + public const int MethodsFieldNumber = 2; + private static readonly pb::FieldCodec _repeated_methods_codec + = pb::FieldCodec.ForMessage(18, global::Google.Protobuf.WellKnownTypes.Method.Parser); + private readonly pbc::RepeatedField methods_ = new pbc::RepeatedField(); + /// + /// The methods of this api, in unspecified order. + /// + public pbc::RepeatedField Methods { + get { return methods_; } + } + + /// Field number for the "options" field. + public const int OptionsFieldNumber = 3; + private static readonly pb::FieldCodec _repeated_options_codec + = pb::FieldCodec.ForMessage(26, global::Google.Protobuf.WellKnownTypes.Option.Parser); + private readonly pbc::RepeatedField options_ = new pbc::RepeatedField(); + /// + /// Any metadata attached to the API. + /// + public pbc::RepeatedField Options { + get { return options_; } + } + + /// Field number for the "version" field. + public const int VersionFieldNumber = 4; + private string version_ = ""; + /// + /// A version string for this api. If specified, must have the form + /// `major-version.minor-version`, as in `1.10`. If the minor version + /// is omitted, it defaults to zero. If the entire version field is + /// empty, the major version is derived from the package name, as + /// outlined below. If the field is not empty, the version in the + /// package name will be verified to be consistent with what is + /// provided here. + /// + /// The versioning schema uses [semantic + /// versioning](http://semver.org) where the major version number + /// indicates a breaking change and the minor version an additive, + /// non-breaking change. Both version numbers are signals to users + /// what to expect from different versions, and should be carefully + /// chosen based on the product plan. + /// + /// The major version is also reflected in the package name of the + /// API, which must end in `v<major-version>`, as in + /// `google.feature.v1`. For major versions 0 and 1, the suffix can + /// be omitted. Zero major versions must only be used for + /// experimental, none-GA apis. + /// + public string Version { + get { return version_; } + set { + version_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "source_context" field. + public const int SourceContextFieldNumber = 5; + private global::Google.Protobuf.WellKnownTypes.SourceContext sourceContext_; + /// + /// Source context for the protocol buffer service represented by this + /// message. + /// + public global::Google.Protobuf.WellKnownTypes.SourceContext SourceContext { + get { return sourceContext_; } + set { + sourceContext_ = value; + } + } + + /// Field number for the "mixins" field. + public const int MixinsFieldNumber = 6; + private static readonly pb::FieldCodec _repeated_mixins_codec + = pb::FieldCodec.ForMessage(50, global::Google.Protobuf.WellKnownTypes.Mixin.Parser); + private readonly pbc::RepeatedField mixins_ = new pbc::RepeatedField(); + /// + /// Included APIs. See [Mixin][]. + /// + public pbc::RepeatedField Mixins { + get { return mixins_; } + } + + /// Field number for the "syntax" field. + public const int SyntaxFieldNumber = 7; + private global::Google.Protobuf.WellKnownTypes.Syntax syntax_ = 0; + /// + /// The source syntax of the service. + /// + public global::Google.Protobuf.WellKnownTypes.Syntax Syntax { + get { return syntax_; } + set { + syntax_ = value; + } + } + + public override bool Equals(object other) { + return Equals(other as Api); + } + + public bool Equals(Api other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (Name != other.Name) return false; + if(!methods_.Equals(other.methods_)) return false; + if(!options_.Equals(other.options_)) return false; + if (Version != other.Version) return false; + if (!object.Equals(SourceContext, other.SourceContext)) return false; + if(!mixins_.Equals(other.mixins_)) return false; + if (Syntax != other.Syntax) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (Name.Length != 0) hash ^= Name.GetHashCode(); + hash ^= methods_.GetHashCode(); + hash ^= options_.GetHashCode(); + if (Version.Length != 0) hash ^= Version.GetHashCode(); + if (sourceContext_ != null) hash ^= SourceContext.GetHashCode(); + hash ^= mixins_.GetHashCode(); + if (Syntax != 0) hash ^= Syntax.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (Name.Length != 0) { + output.WriteRawTag(10); + output.WriteString(Name); + } + methods_.WriteTo(output, _repeated_methods_codec); + options_.WriteTo(output, _repeated_options_codec); + if (Version.Length != 0) { + output.WriteRawTag(34); + output.WriteString(Version); + } + if (sourceContext_ != null) { + output.WriteRawTag(42); + output.WriteMessage(SourceContext); + } + mixins_.WriteTo(output, _repeated_mixins_codec); + if (Syntax != 0) { + output.WriteRawTag(56); + output.WriteEnum((int) Syntax); + } + } + + public int CalculateSize() { + int size = 0; + if (Name.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(Name); + } + size += methods_.CalculateSize(_repeated_methods_codec); + size += options_.CalculateSize(_repeated_options_codec); + if (Version.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(Version); + } + if (sourceContext_ != null) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(SourceContext); + } + size += mixins_.CalculateSize(_repeated_mixins_codec); + if (Syntax != 0) { + size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) Syntax); + } + return size; + } + + public void MergeFrom(Api other) { + if (other == null) { + return; + } + if (other.Name.Length != 0) { + Name = other.Name; + } + methods_.Add(other.methods_); + options_.Add(other.options_); + if (other.Version.Length != 0) { + Version = other.Version; + } + if (other.sourceContext_ != null) { + if (sourceContext_ == null) { + sourceContext_ = new global::Google.Protobuf.WellKnownTypes.SourceContext(); + } + SourceContext.MergeFrom(other.SourceContext); + } + mixins_.Add(other.mixins_); + if (other.Syntax != 0) { + Syntax = other.Syntax; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + Name = input.ReadString(); + break; + } + case 18: { + methods_.AddEntriesFrom(input, _repeated_methods_codec); + break; + } + case 26: { + options_.AddEntriesFrom(input, _repeated_options_codec); + break; + } + case 34: { + Version = input.ReadString(); + break; + } + case 42: { + if (sourceContext_ == null) { + sourceContext_ = new global::Google.Protobuf.WellKnownTypes.SourceContext(); + } + input.ReadMessage(sourceContext_); + break; + } + case 50: { + mixins_.AddEntriesFrom(input, _repeated_mixins_codec); + break; + } + case 56: { + syntax_ = (global::Google.Protobuf.WellKnownTypes.Syntax) input.ReadEnum(); + break; + } + } + } + } + + } + + /// + /// Method represents a method of an api. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class Method : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new Method()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.WellKnownTypes.ApiReflection.Descriptor.MessageTypes[1]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public Method() { + OnConstruction(); + } + + partial void OnConstruction(); + + public Method(Method other) : this() { + name_ = other.name_; + requestTypeUrl_ = other.requestTypeUrl_; + requestStreaming_ = other.requestStreaming_; + responseTypeUrl_ = other.responseTypeUrl_; + responseStreaming_ = other.responseStreaming_; + options_ = other.options_.Clone(); + syntax_ = other.syntax_; + } + + public Method Clone() { + return new Method(this); + } + + /// Field number for the "name" field. + public const int NameFieldNumber = 1; + private string name_ = ""; + /// + /// The simple name of this method. + /// + public string Name { + get { return name_; } + set { + name_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "request_type_url" field. + public const int RequestTypeUrlFieldNumber = 2; + private string requestTypeUrl_ = ""; + /// + /// A URL of the input message type. + /// + public string RequestTypeUrl { + get { return requestTypeUrl_; } + set { + requestTypeUrl_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "request_streaming" field. + public const int RequestStreamingFieldNumber = 3; + private bool requestStreaming_; + /// + /// If true, the request is streamed. + /// + public bool RequestStreaming { + get { return requestStreaming_; } + set { + requestStreaming_ = value; + } + } + + /// Field number for the "response_type_url" field. + public const int ResponseTypeUrlFieldNumber = 4; + private string responseTypeUrl_ = ""; + /// + /// The URL of the output message type. + /// + public string ResponseTypeUrl { + get { return responseTypeUrl_; } + set { + responseTypeUrl_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "response_streaming" field. + public const int ResponseStreamingFieldNumber = 5; + private bool responseStreaming_; + /// + /// If true, the response is streamed. + /// + public bool ResponseStreaming { + get { return responseStreaming_; } + set { + responseStreaming_ = value; + } + } + + /// Field number for the "options" field. + public const int OptionsFieldNumber = 6; + private static readonly pb::FieldCodec _repeated_options_codec + = pb::FieldCodec.ForMessage(50, global::Google.Protobuf.WellKnownTypes.Option.Parser); + private readonly pbc::RepeatedField options_ = new pbc::RepeatedField(); + /// + /// Any metadata attached to the method. + /// + public pbc::RepeatedField Options { + get { return options_; } + } + + /// Field number for the "syntax" field. + public const int SyntaxFieldNumber = 7; + private global::Google.Protobuf.WellKnownTypes.Syntax syntax_ = 0; + /// + /// The source syntax of this method. + /// + public global::Google.Protobuf.WellKnownTypes.Syntax Syntax { + get { return syntax_; } + set { + syntax_ = value; + } + } + + public override bool Equals(object other) { + return Equals(other as Method); + } + + public bool Equals(Method other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (Name != other.Name) return false; + if (RequestTypeUrl != other.RequestTypeUrl) return false; + if (RequestStreaming != other.RequestStreaming) return false; + if (ResponseTypeUrl != other.ResponseTypeUrl) return false; + if (ResponseStreaming != other.ResponseStreaming) return false; + if(!options_.Equals(other.options_)) return false; + if (Syntax != other.Syntax) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (Name.Length != 0) hash ^= Name.GetHashCode(); + if (RequestTypeUrl.Length != 0) hash ^= RequestTypeUrl.GetHashCode(); + if (RequestStreaming != false) hash ^= RequestStreaming.GetHashCode(); + if (ResponseTypeUrl.Length != 0) hash ^= ResponseTypeUrl.GetHashCode(); + if (ResponseStreaming != false) hash ^= ResponseStreaming.GetHashCode(); + hash ^= options_.GetHashCode(); + if (Syntax != 0) hash ^= Syntax.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (Name.Length != 0) { + output.WriteRawTag(10); + output.WriteString(Name); + } + if (RequestTypeUrl.Length != 0) { + output.WriteRawTag(18); + output.WriteString(RequestTypeUrl); + } + if (RequestStreaming != false) { + output.WriteRawTag(24); + output.WriteBool(RequestStreaming); + } + if (ResponseTypeUrl.Length != 0) { + output.WriteRawTag(34); + output.WriteString(ResponseTypeUrl); + } + if (ResponseStreaming != false) { + output.WriteRawTag(40); + output.WriteBool(ResponseStreaming); + } + options_.WriteTo(output, _repeated_options_codec); + if (Syntax != 0) { + output.WriteRawTag(56); + output.WriteEnum((int) Syntax); + } + } + + public int CalculateSize() { + int size = 0; + if (Name.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(Name); + } + if (RequestTypeUrl.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(RequestTypeUrl); + } + if (RequestStreaming != false) { + size += 1 + 1; + } + if (ResponseTypeUrl.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(ResponseTypeUrl); + } + if (ResponseStreaming != false) { + size += 1 + 1; + } + size += options_.CalculateSize(_repeated_options_codec); + if (Syntax != 0) { + size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) Syntax); + } + return size; + } + + public void MergeFrom(Method other) { + if (other == null) { + return; + } + if (other.Name.Length != 0) { + Name = other.Name; + } + if (other.RequestTypeUrl.Length != 0) { + RequestTypeUrl = other.RequestTypeUrl; + } + if (other.RequestStreaming != false) { + RequestStreaming = other.RequestStreaming; + } + if (other.ResponseTypeUrl.Length != 0) { + ResponseTypeUrl = other.ResponseTypeUrl; + } + if (other.ResponseStreaming != false) { + ResponseStreaming = other.ResponseStreaming; + } + options_.Add(other.options_); + if (other.Syntax != 0) { + Syntax = other.Syntax; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + Name = input.ReadString(); + break; + } + case 18: { + RequestTypeUrl = input.ReadString(); + break; + } + case 24: { + RequestStreaming = input.ReadBool(); + break; + } + case 34: { + ResponseTypeUrl = input.ReadString(); + break; + } + case 40: { + ResponseStreaming = input.ReadBool(); + break; + } + case 50: { + options_.AddEntriesFrom(input, _repeated_options_codec); + break; + } + case 56: { + syntax_ = (global::Google.Protobuf.WellKnownTypes.Syntax) input.ReadEnum(); + break; + } + } + } + } + + } + + /// + /// Declares an API to be included in this API. The including API must + /// redeclare all the methods from the included API, but documentation + /// and options are inherited as follows: + /// + /// - If after comment and whitespace stripping, the documentation + /// string of the redeclared method is empty, it will be inherited + /// from the original method. + /// + /// - Each annotation belonging to the service config (http, + /// visibility) which is not set in the redeclared method will be + /// inherited. + /// + /// - If an http annotation is inherited, the path pattern will be + /// modified as follows. Any version prefix will be replaced by the + /// version of the including API plus the [root][] path if specified. + /// + /// Example of a simple mixin: + /// + /// package google.acl.v1; + /// service AccessControl { + /// // Get the underlying ACL object. + /// rpc GetAcl(GetAclRequest) returns (Acl) { + /// option (google.api.http).get = "/v1/{resource=**}:getAcl"; + /// } + /// } + /// + /// package google.storage.v2; + /// service Storage { + /// rpc GetAcl(GetAclRequest) returns (Acl); + /// + /// // Get a data record. + /// rpc GetData(GetDataRequest) returns (Data) { + /// option (google.api.http).get = "/v2/{resource=**}"; + /// } + /// } + /// + /// Example of a mixin configuration: + /// + /// apis: + /// - name: google.storage.v2.Storage + /// mixins: + /// - name: google.acl.v1.AccessControl + /// + /// The mixin construct implies that all methods in `AccessControl` are + /// also declared with same name and request/response types in + /// `Storage`. A documentation generator or annotation processor will + /// see the effective `Storage.GetAcl` method after inherting + /// documentation and annotations as follows: + /// + /// service Storage { + /// // Get the underlying ACL object. + /// rpc GetAcl(GetAclRequest) returns (Acl) { + /// option (google.api.http).get = "/v2/{resource=**}:getAcl"; + /// } + /// ... + /// } + /// + /// Note how the version in the path pattern changed from `v1` to `v2`. + /// + /// If the `root` field in the mixin is specified, it should be a + /// relative path under which inherited HTTP paths are placed. Example: + /// + /// apis: + /// - name: google.storage.v2.Storage + /// mixins: + /// - name: google.acl.v1.AccessControl + /// root: acls + /// + /// This implies the following inherited HTTP annotation: + /// + /// service Storage { + /// // Get the underlying ACL object. + /// rpc GetAcl(GetAclRequest) returns (Acl) { + /// option (google.api.http).get = "/v2/acls/{resource=**}:getAcl"; + /// } + /// ... + /// } + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class Mixin : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new Mixin()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.WellKnownTypes.ApiReflection.Descriptor.MessageTypes[2]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public Mixin() { + OnConstruction(); + } + + partial void OnConstruction(); + + public Mixin(Mixin other) : this() { + name_ = other.name_; + root_ = other.root_; + } + + public Mixin Clone() { + return new Mixin(this); + } + + /// Field number for the "name" field. + public const int NameFieldNumber = 1; + private string name_ = ""; + /// + /// The fully qualified name of the API which is included. + /// + public string Name { + get { return name_; } + set { + name_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "root" field. + public const int RootFieldNumber = 2; + private string root_ = ""; + /// + /// If non-empty specifies a path under which inherited HTTP paths + /// are rooted. + /// + public string Root { + get { return root_; } + set { + root_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + public override bool Equals(object other) { + return Equals(other as Mixin); + } + + public bool Equals(Mixin other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (Name != other.Name) return false; + if (Root != other.Root) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (Name.Length != 0) hash ^= Name.GetHashCode(); + if (Root.Length != 0) hash ^= Root.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (Name.Length != 0) { + output.WriteRawTag(10); + output.WriteString(Name); + } + if (Root.Length != 0) { + output.WriteRawTag(18); + output.WriteString(Root); + } + } + + public int CalculateSize() { + int size = 0; + if (Name.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(Name); + } + if (Root.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(Root); + } + return size; + } + + public void MergeFrom(Mixin other) { + if (other == null) { + return; + } + if (other.Name.Length != 0) { + Name = other.Name; + } + if (other.Root.Length != 0) { + Root = other.Root; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + Name = input.ReadString(); + break; + } + case 18: { + Root = input.ReadString(); + break; + } + } + } + } + + } + + #endregion + +} + +#endregion Designer generated code diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/Duration.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/Duration.cs new file mode 100644 index 0000000000..f17358f4b3 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/Duration.cs @@ -0,0 +1,230 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/protobuf/duration.proto +#pragma warning disable 1591, 0612, 3021 +#region Designer generated code + +using pb = global::Google.Protobuf; +using pbc = global::Google.Protobuf.Collections; +using pbr = global::Google.Protobuf.Reflection; +using scg = global::System.Collections.Generic; +namespace Google.Protobuf.WellKnownTypes { + + /// Holder for reflection information generated from google/protobuf/duration.proto + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public static partial class DurationReflection { + + #region Descriptor + /// File descriptor for google/protobuf/duration.proto + public static pbr::FileDescriptor Descriptor { + get { return descriptor; } + } + private static pbr::FileDescriptor descriptor; + + static DurationReflection() { + byte[] descriptorData = global::System.Convert.FromBase64String( + string.Concat( + "Ch5nb29nbGUvcHJvdG9idWYvZHVyYXRpb24ucHJvdG8SD2dvb2dsZS5wcm90", + "b2J1ZiIqCghEdXJhdGlvbhIPCgdzZWNvbmRzGAEgASgDEg0KBW5hbm9zGAIg", + "ASgFQnwKE2NvbS5nb29nbGUucHJvdG9idWZCDUR1cmF0aW9uUHJvdG9QAVoq", + "Z2l0aHViLmNvbS9nb2xhbmcvcHJvdG9idWYvcHR5cGVzL2R1cmF0aW9uoAEB", + "ogIDR1BCqgIeR29vZ2xlLlByb3RvYnVmLldlbGxLbm93blR5cGVzYgZwcm90", + "bzM=")); + descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, + new pbr::FileDescriptor[] { }, + new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] { + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.WellKnownTypes.Duration), global::Google.Protobuf.WellKnownTypes.Duration.Parser, new[]{ "Seconds", "Nanos" }, null, null, null) + })); + } + #endregion + + } + #region Messages + /// + /// A Duration represents a signed, fixed-length span of time represented + /// as a count of seconds and fractions of seconds at nanosecond + /// resolution. It is independent of any calendar and concepts like "day" + /// or "month". It is related to Timestamp in that the difference between + /// two Timestamp values is a Duration and it can be added or subtracted + /// from a Timestamp. Range is approximately +-10,000 years. + /// + /// Example 1: Compute Duration from two Timestamps in pseudo code. + /// + /// Timestamp start = ...; + /// Timestamp end = ...; + /// Duration duration = ...; + /// + /// duration.seconds = end.seconds - start.seconds; + /// duration.nanos = end.nanos - start.nanos; + /// + /// if (duration.seconds < 0 && duration.nanos > 0) { + /// duration.seconds += 1; + /// duration.nanos -= 1000000000; + /// } else if (durations.seconds > 0 && duration.nanos < 0) { + /// duration.seconds -= 1; + /// duration.nanos += 1000000000; + /// } + /// + /// Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. + /// + /// Timestamp start = ...; + /// Duration duration = ...; + /// Timestamp end = ...; + /// + /// end.seconds = start.seconds + duration.seconds; + /// end.nanos = start.nanos + duration.nanos; + /// + /// if (end.nanos < 0) { + /// end.seconds -= 1; + /// end.nanos += 1000000000; + /// } else if (end.nanos >= 1000000000) { + /// end.seconds += 1; + /// end.nanos -= 1000000000; + /// } + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class Duration : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new Duration()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.WellKnownTypes.DurationReflection.Descriptor.MessageTypes[0]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public Duration() { + OnConstruction(); + } + + partial void OnConstruction(); + + public Duration(Duration other) : this() { + seconds_ = other.seconds_; + nanos_ = other.nanos_; + } + + public Duration Clone() { + return new Duration(this); + } + + /// Field number for the "seconds" field. + public const int SecondsFieldNumber = 1; + private long seconds_; + /// + /// Signed seconds of the span of time. Must be from -315,576,000,000 + /// to +315,576,000,000 inclusive. + /// + public long Seconds { + get { return seconds_; } + set { + seconds_ = value; + } + } + + /// Field number for the "nanos" field. + public const int NanosFieldNumber = 2; + private int nanos_; + /// + /// Signed fractions of a second at nanosecond resolution of the span + /// of time. Durations less than one second are represented with a 0 + /// `seconds` field and a positive or negative `nanos` field. For durations + /// of one second or more, a non-zero value for the `nanos` field must be + /// of the same sign as the `seconds` field. Must be from -999,999,999 + /// to +999,999,999 inclusive. + /// + public int Nanos { + get { return nanos_; } + set { + nanos_ = value; + } + } + + public override bool Equals(object other) { + return Equals(other as Duration); + } + + public bool Equals(Duration other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (Seconds != other.Seconds) return false; + if (Nanos != other.Nanos) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (Seconds != 0L) hash ^= Seconds.GetHashCode(); + if (Nanos != 0) hash ^= Nanos.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (Seconds != 0L) { + output.WriteRawTag(8); + output.WriteInt64(Seconds); + } + if (Nanos != 0) { + output.WriteRawTag(16); + output.WriteInt32(Nanos); + } + } + + public int CalculateSize() { + int size = 0; + if (Seconds != 0L) { + size += 1 + pb::CodedOutputStream.ComputeInt64Size(Seconds); + } + if (Nanos != 0) { + size += 1 + pb::CodedOutputStream.ComputeInt32Size(Nanos); + } + return size; + } + + public void MergeFrom(Duration other) { + if (other == null) { + return; + } + if (other.Seconds != 0L) { + Seconds = other.Seconds; + } + if (other.Nanos != 0) { + Nanos = other.Nanos; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 8: { + Seconds = input.ReadInt64(); + break; + } + case 16: { + Nanos = input.ReadInt32(); + break; + } + } + } + } + + } + + #endregion + +} + +#endregion Designer generated code diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/DurationPartial.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/DurationPartial.cs new file mode 100644 index 0000000000..f164bfd19d --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/DurationPartial.cs @@ -0,0 +1,270 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; +using System.Globalization; +using System.Text; + +namespace Google.Protobuf.WellKnownTypes +{ + // Manually-written partial class for the Duration well-known type, + // providing a conversion to TimeSpan and convenience operators. + public partial class Duration : ICustomDiagnosticMessage + { + /// + /// The number of nanoseconds in a second. + /// + public const int NanosecondsPerSecond = 1000000000; + /// + /// The number of nanoseconds in a BCL tick (as used by and ). + /// + public const int NanosecondsPerTick = 100; + + /// + /// The maximum permitted number of seconds. + /// + public const long MaxSeconds = 315576000000L; + + /// + /// The minimum permitted number of seconds. + /// + public const long MinSeconds = -315576000000L; + + internal const int MaxNanoseconds = NanosecondsPerSecond - 1; + internal const int MinNanoseconds = -NanosecondsPerSecond + 1; + + internal static bool IsNormalized(long seconds, int nanoseconds) + { + // Simple boundaries + if (seconds < MinSeconds || seconds > MaxSeconds || + nanoseconds < MinNanoseconds || nanoseconds > MaxNanoseconds) + { + return false; + } + // We only have a problem is one is strictly negative and the other is + // strictly positive. + return Math.Sign(seconds) * Math.Sign(nanoseconds) != -1; + } + + /// + /// Converts this to a . + /// + /// If the duration is not a precise number of ticks, it is truncated towards 0. + /// The value of this duration, as a TimeSpan. + /// This value isn't a valid normalized duration, as + /// described in the documentation. + public TimeSpan ToTimeSpan() + { + checked + { + if (!IsNormalized(Seconds, Nanos)) + { + throw new InvalidOperationException("Duration was not a valid normalized duration"); + } + long ticks = Seconds * TimeSpan.TicksPerSecond + Nanos / NanosecondsPerTick; + return TimeSpan.FromTicks(ticks); + } + } + + /// + /// Converts the given to a . + /// + /// The TimeSpan to convert. + /// The value of the given TimeSpan, as a Duration. + public static Duration FromTimeSpan(TimeSpan timeSpan) + { + checked + { + long ticks = timeSpan.Ticks; + long seconds = ticks / TimeSpan.TicksPerSecond; + int nanos = (int) (ticks % TimeSpan.TicksPerSecond) * NanosecondsPerTick; + return new Duration { Seconds = seconds, Nanos = nanos }; + } + } + + /// + /// Returns the result of negating the duration. For example, the negation of 5 minutes is -5 minutes. + /// + /// The duration to negate. Must not be null. + /// The negated value of this duration. + public static Duration operator -(Duration value) + { + ProtoPreconditions.CheckNotNull(value, "value"); + checked + { + return Normalize(-value.Seconds, -value.Nanos); + } + } + + /// + /// Adds the two specified values together. + /// + /// The first value to add. Must not be null. + /// The second value to add. Must not be null. + /// + public static Duration operator +(Duration lhs, Duration rhs) + { + ProtoPreconditions.CheckNotNull(lhs, "lhs"); + ProtoPreconditions.CheckNotNull(rhs, "rhs"); + checked + { + return Normalize(lhs.Seconds + rhs.Seconds, lhs.Nanos + rhs.Nanos); + } + } + + /// + /// Subtracts one from another. + /// + /// The duration to subtract from. Must not be null. + /// The duration to subtract. Must not be null. + /// The difference between the two specified durations. + public static Duration operator -(Duration lhs, Duration rhs) + { + ProtoPreconditions.CheckNotNull(lhs, "lhs"); + ProtoPreconditions.CheckNotNull(rhs, "rhs"); + checked + { + return Normalize(lhs.Seconds - rhs.Seconds, lhs.Nanos - rhs.Nanos); + } + } + + /// + /// Creates a duration with the normalized values from the given number of seconds and + /// nanoseconds, conforming with the description in the proto file. + /// + internal static Duration Normalize(long seconds, int nanoseconds) + { + // Ensure that nanoseconds is in the range (-1,000,000,000, +1,000,000,000) + int extraSeconds = nanoseconds / NanosecondsPerSecond; + seconds += extraSeconds; + nanoseconds -= extraSeconds * NanosecondsPerSecond; + + // Now make sure that Sign(seconds) == Sign(nanoseconds) if Sign(seconds) != 0. + if (seconds < 0 && nanoseconds > 0) + { + seconds += 1; + nanoseconds -= NanosecondsPerSecond; + } + else if (seconds > 0 && nanoseconds < 0) + { + seconds -= 1; + nanoseconds += NanosecondsPerSecond; + } + return new Duration { Seconds = seconds, Nanos = nanoseconds }; + } + + /// + /// Converts a duration specified in seconds/nanoseconds to a string. + /// + /// + /// If the value is a normalized duration in the range described in duration.proto, + /// is ignored. Otherwise, if the parameter is true, + /// a JSON object with a warning is returned; if it is false, an is thrown. + /// + /// Seconds portion of the duration. + /// Nanoseconds portion of the duration. + /// Determines the handling of non-normalized values + /// The represented duration is invalid, and is false. + internal static string ToJson(long seconds, int nanoseconds, bool diagnosticOnly) + { + if (IsNormalized(seconds, nanoseconds)) + { + var builder = new StringBuilder(); + builder.Append('"'); + // The seconds part will normally provide the minus sign if we need it, but not if it's 0... + if (seconds == 0 && nanoseconds < 0) + { + builder.Append('-'); + } + + builder.Append(seconds.ToString("d", CultureInfo.InvariantCulture)); + AppendNanoseconds(builder, Math.Abs(nanoseconds)); + builder.Append("s\""); + return builder.ToString(); + } + if (diagnosticOnly) + { + // Note: the double braces here are escaping for braces in format strings. + return string.Format(CultureInfo.InvariantCulture, + "{{ \"@warning\": \"Invalid Duration\", \"seconds\": \"{0}\", \"nanos\": {1} }}", + seconds, + nanoseconds); + } + else + { + throw new InvalidOperationException("Non-normalized duration value"); + } + } + + /// + /// Returns a string representation of this for diagnostic purposes. + /// + /// + /// Normally the returned value will be a JSON string value (including leading and trailing quotes) but + /// when the value is non-normalized or out of range, a JSON object representation will be returned + /// instead, including a warning. This is to avoid exceptions being thrown when trying to + /// diagnose problems - the regular JSON formatter will still throw an exception for non-normalized + /// values. + /// + /// A string representation of this value. + public string ToDiagnosticString() + { + return ToJson(Seconds, Nanos, true); + } + + /// + /// Appends a number of nanoseconds to a StringBuilder. Either 0 digits are added (in which + /// case no "." is appended), or 3 6 or 9 digits. This is internal for use in Timestamp as well + /// as Duration. + /// + internal static void AppendNanoseconds(StringBuilder builder, int nanos) + { + if (nanos != 0) + { + builder.Append('.'); + // Output to 3, 6 or 9 digits. + if (nanos % 1000000 == 0) + { + builder.Append((nanos / 1000000).ToString("d3", CultureInfo.InvariantCulture)); + } + else if (nanos % 1000 == 0) + { + builder.Append((nanos / 1000).ToString("d6", CultureInfo.InvariantCulture)); + } + else + { + builder.Append(nanos.ToString("d9", CultureInfo.InvariantCulture)); + } + } + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/Empty.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/Empty.cs new file mode 100644 index 0000000000..e08ea2400b --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/Empty.cs @@ -0,0 +1,132 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/protobuf/empty.proto +#pragma warning disable 1591, 0612, 3021 +#region Designer generated code + +using pb = global::Google.Protobuf; +using pbc = global::Google.Protobuf.Collections; +using pbr = global::Google.Protobuf.Reflection; +using scg = global::System.Collections.Generic; +namespace Google.Protobuf.WellKnownTypes { + + /// Holder for reflection information generated from google/protobuf/empty.proto + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public static partial class EmptyReflection { + + #region Descriptor + /// File descriptor for google/protobuf/empty.proto + public static pbr::FileDescriptor Descriptor { + get { return descriptor; } + } + private static pbr::FileDescriptor descriptor; + + static EmptyReflection() { + byte[] descriptorData = global::System.Convert.FromBase64String( + string.Concat( + "Chtnb29nbGUvcHJvdG9idWYvZW1wdHkucHJvdG8SD2dvb2dsZS5wcm90b2J1", + "ZiIHCgVFbXB0eUJ5ChNjb20uZ29vZ2xlLnByb3RvYnVmQgpFbXB0eVByb3Rv", + "UAFaJ2dpdGh1Yi5jb20vZ29sYW5nL3Byb3RvYnVmL3B0eXBlcy9lbXB0eaAB", + "AfgBAaICA0dQQqoCHkdvb2dsZS5Qcm90b2J1Zi5XZWxsS25vd25UeXBlc2IG", + "cHJvdG8z")); + descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, + new pbr::FileDescriptor[] { }, + new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] { + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.WellKnownTypes.Empty), global::Google.Protobuf.WellKnownTypes.Empty.Parser, null, null, null, null) + })); + } + #endregion + + } + #region Messages + /// + /// A generic empty message that you can re-use to avoid defining duplicated + /// empty messages in your APIs. A typical example is to use it as the request + /// or the response type of an API method. For instance: + /// + /// service Foo { + /// rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); + /// } + /// + /// The JSON representation for `Empty` is empty JSON object `{}`. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class Empty : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new Empty()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.WellKnownTypes.EmptyReflection.Descriptor.MessageTypes[0]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public Empty() { + OnConstruction(); + } + + partial void OnConstruction(); + + public Empty(Empty other) : this() { + } + + public Empty Clone() { + return new Empty(this); + } + + public override bool Equals(object other) { + return Equals(other as Empty); + } + + public bool Equals(Empty other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + return true; + } + + public override int GetHashCode() { + int hash = 1; + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + } + + public int CalculateSize() { + int size = 0; + return size; + } + + public void MergeFrom(Empty other) { + if (other == null) { + return; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + } + } + } + + } + + #endregion + +} + +#endregion Designer generated code diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/FieldMask.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/FieldMask.cs new file mode 100644 index 0000000000..6f0a64d6ae --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/FieldMask.cs @@ -0,0 +1,292 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/protobuf/field_mask.proto +#pragma warning disable 1591, 0612, 3021 +#region Designer generated code + +using pb = global::Google.Protobuf; +using pbc = global::Google.Protobuf.Collections; +using pbr = global::Google.Protobuf.Reflection; +using scg = global::System.Collections.Generic; +namespace Google.Protobuf.WellKnownTypes { + + /// Holder for reflection information generated from google/protobuf/field_mask.proto + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public static partial class FieldMaskReflection { + + #region Descriptor + /// File descriptor for google/protobuf/field_mask.proto + public static pbr::FileDescriptor Descriptor { + get { return descriptor; } + } + private static pbr::FileDescriptor descriptor; + + static FieldMaskReflection() { + byte[] descriptorData = global::System.Convert.FromBase64String( + string.Concat( + "CiBnb29nbGUvcHJvdG9idWYvZmllbGRfbWFzay5wcm90bxIPZ29vZ2xlLnBy", + "b3RvYnVmIhoKCUZpZWxkTWFzaxINCgVwYXRocxgBIAMoCUJRChNjb20uZ29v", + "Z2xlLnByb3RvYnVmQg5GaWVsZE1hc2tQcm90b1ABoAEBogIDR1BCqgIeR29v", + "Z2xlLlByb3RvYnVmLldlbGxLbm93blR5cGVzYgZwcm90bzM=")); + descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, + new pbr::FileDescriptor[] { }, + new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] { + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.WellKnownTypes.FieldMask), global::Google.Protobuf.WellKnownTypes.FieldMask.Parser, new[]{ "Paths" }, null, null, null) + })); + } + #endregion + + } + #region Messages + /// + /// `FieldMask` represents a set of symbolic field paths, for example: + /// + /// paths: "f.a" + /// paths: "f.b.d" + /// + /// Here `f` represents a field in some root message, `a` and `b` + /// fields in the message found in `f`, and `d` a field found in the + /// message in `f.b`. + /// + /// Field masks are used to specify a subset of fields that should be + /// returned by a get operation or modified by an update operation. + /// Field masks also have a custom JSON encoding (see below). + /// + /// # Field Masks in Projections + /// + /// When used in the context of a projection, a response message or + /// sub-message is filtered by the API to only contain those fields as + /// specified in the mask. For example, if the mask in the previous + /// example is applied to a response message as follows: + /// + /// f { + /// a : 22 + /// b { + /// d : 1 + /// x : 2 + /// } + /// y : 13 + /// } + /// z: 8 + /// + /// The result will not contain specific values for fields x,y and z + /// (their value will be set to the default, and omitted in proto text + /// output): + /// + /// f { + /// a : 22 + /// b { + /// d : 1 + /// } + /// } + /// + /// A repeated field is not allowed except at the last position of a + /// field mask. + /// + /// If a FieldMask object is not present in a get operation, the + /// operation applies to all fields (as if a FieldMask of all fields + /// had been specified). + /// + /// Note that a field mask does not necessarily apply to the + /// top-level response message. In case of a REST get operation, the + /// field mask applies directly to the response, but in case of a REST + /// list operation, the mask instead applies to each individual message + /// in the returned resource list. In case of a REST custom method, + /// other definitions may be used. Where the mask applies will be + /// clearly documented together with its declaration in the API. In + /// any case, the effect on the returned resource/resources is required + /// behavior for APIs. + /// + /// # Field Masks in Update Operations + /// + /// A field mask in update operations specifies which fields of the + /// targeted resource are going to be updated. The API is required + /// to only change the values of the fields as specified in the mask + /// and leave the others untouched. If a resource is passed in to + /// describe the updated values, the API ignores the values of all + /// fields not covered by the mask. + /// + /// In order to reset a field's value to the default, the field must + /// be in the mask and set to the default value in the provided resource. + /// Hence, in order to reset all fields of a resource, provide a default + /// instance of the resource and set all fields in the mask, or do + /// not provide a mask as described below. + /// + /// If a field mask is not present on update, the operation applies to + /// all fields (as if a field mask of all fields has been specified). + /// Note that in the presence of schema evolution, this may mean that + /// fields the client does not know and has therefore not filled into + /// the request will be reset to their default. If this is unwanted + /// behavior, a specific service may require a client to always specify + /// a field mask, producing an error if not. + /// + /// As with get operations, the location of the resource which + /// describes the updated values in the request message depends on the + /// operation kind. In any case, the effect of the field mask is + /// required to be honored by the API. + /// + /// ## Considerations for HTTP REST + /// + /// The HTTP kind of an update operation which uses a field mask must + /// be set to PATCH instead of PUT in order to satisfy HTTP semantics + /// (PUT must only be used for full updates). + /// + /// # JSON Encoding of Field Masks + /// + /// In JSON, a field mask is encoded as a single string where paths are + /// separated by a comma. Fields name in each path are converted + /// to/from lower-camel naming conventions. + /// + /// As an example, consider the following message declarations: + /// + /// message Profile { + /// User user = 1; + /// Photo photo = 2; + /// } + /// message User { + /// string display_name = 1; + /// string address = 2; + /// } + /// + /// In proto a field mask for `Profile` may look as such: + /// + /// mask { + /// paths: "user.display_name" + /// paths: "photo" + /// } + /// + /// In JSON, the same mask is represented as below: + /// + /// { + /// mask: "user.displayName,photo" + /// } + /// + /// # Field Masks and Oneof Fields + /// + /// Field masks treat fields in oneofs just as regular fields. Consider the + /// following message: + /// + /// message SampleMessage { + /// oneof test_oneof { + /// string name = 4; + /// SubMessage sub_message = 9; + /// } + /// } + /// + /// The field mask can be: + /// + /// mask { + /// paths: "name" + /// } + /// + /// Or: + /// + /// mask { + /// paths: "sub_message" + /// } + /// + /// Note that oneof type names ("test_oneof" in this case) cannot be used in + /// paths. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class FieldMask : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new FieldMask()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.WellKnownTypes.FieldMaskReflection.Descriptor.MessageTypes[0]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public FieldMask() { + OnConstruction(); + } + + partial void OnConstruction(); + + public FieldMask(FieldMask other) : this() { + paths_ = other.paths_.Clone(); + } + + public FieldMask Clone() { + return new FieldMask(this); + } + + /// Field number for the "paths" field. + public const int PathsFieldNumber = 1; + private static readonly pb::FieldCodec _repeated_paths_codec + = pb::FieldCodec.ForString(10); + private readonly pbc::RepeatedField paths_ = new pbc::RepeatedField(); + /// + /// The set of field mask paths. + /// + public pbc::RepeatedField Paths { + get { return paths_; } + } + + public override bool Equals(object other) { + return Equals(other as FieldMask); + } + + public bool Equals(FieldMask other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if(!paths_.Equals(other.paths_)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + hash ^= paths_.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + paths_.WriteTo(output, _repeated_paths_codec); + } + + public int CalculateSize() { + int size = 0; + size += paths_.CalculateSize(_repeated_paths_codec); + return size; + } + + public void MergeFrom(FieldMask other) { + if (other == null) { + return; + } + paths_.Add(other.paths_); + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + paths_.AddEntriesFrom(input, _repeated_paths_codec); + break; + } + } + } + } + + } + + #endregion + +} + +#endregion Designer generated code diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/FieldMaskPartial.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/FieldMaskPartial.cs new file mode 100644 index 0000000000..4bd62cf3d4 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/FieldMaskPartial.cs @@ -0,0 +1,123 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2016 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; +using System.Collections; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text; + +namespace Google.Protobuf.WellKnownTypes +{ + // Manually-written partial class for the FieldMask well-known type. + public partial class FieldMask : ICustomDiagnosticMessage + { + /// + /// Converts a timestamp specified in seconds/nanoseconds to a string. + /// + /// + /// If the value is a normalized duration in the range described in field_mask.proto, + /// is ignored. Otherwise, if the parameter is true, + /// a JSON object with a warning is returned; if it is false, an is thrown. + /// + /// Paths in the field mask + /// Determines the handling of non-normalized values + /// The represented duration is invalid, and is false. + internal static string ToJson(IList paths, bool diagnosticOnly) + { + var firstInvalid = paths.FirstOrDefault(p => !ValidatePath(p)); + if (firstInvalid == null) + { + var writer = new StringWriter(); + JsonFormatter.WriteString(writer, string.Join(",", paths.Select(JsonFormatter.ToCamelCase))); + return writer.ToString(); + } + else + { + if (diagnosticOnly) + { + var writer = new StringWriter(); + writer.Write("{ \"@warning\": \"Invalid FieldMask\", \"paths\": "); + JsonFormatter.Default.WriteList(writer, (IList)paths); + writer.Write(" }"); + return writer.ToString(); + } + else + { + throw new InvalidOperationException($"Invalid field mask to be converted to JSON: {firstInvalid}"); + } + } + } + + /// + /// Camel-case converter with added strictness for field mask formatting. + /// + /// The field mask is invalid for JSON representation + private static bool ValidatePath(string input) + { + for (int i = 0; i < input.Length; i++) + { + char c = input[i]; + if (c >= 'A' && c <= 'Z') + { + return false; + } + if (c == '_' && i < input.Length - 1) + { + char next = input[i + 1]; + if (next < 'a' || next > 'z') + { + return false; + } + } + } + return true; + } + + /// + /// Returns a string representation of this for diagnostic purposes. + /// + /// + /// Normally the returned value will be a JSON string value (including leading and trailing quotes) but + /// when the value is non-normalized or out of range, a JSON object representation will be returned + /// instead, including a warning. This is to avoid exceptions being thrown when trying to + /// diagnose problems - the regular JSON formatter will still throw an exception for non-normalized + /// values. + /// + /// A string representation of this value. + public string ToDiagnosticString() + { + return ToJson(Paths, true); + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/SourceContext.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/SourceContext.cs new file mode 100644 index 0000000000..a235ecefa6 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/SourceContext.cs @@ -0,0 +1,156 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/protobuf/source_context.proto +#pragma warning disable 1591, 0612, 3021 +#region Designer generated code + +using pb = global::Google.Protobuf; +using pbc = global::Google.Protobuf.Collections; +using pbr = global::Google.Protobuf.Reflection; +using scg = global::System.Collections.Generic; +namespace Google.Protobuf.WellKnownTypes { + + /// Holder for reflection information generated from google/protobuf/source_context.proto + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public static partial class SourceContextReflection { + + #region Descriptor + /// File descriptor for google/protobuf/source_context.proto + public static pbr::FileDescriptor Descriptor { + get { return descriptor; } + } + private static pbr::FileDescriptor descriptor; + + static SourceContextReflection() { + byte[] descriptorData = global::System.Convert.FromBase64String( + string.Concat( + "CiRnb29nbGUvcHJvdG9idWYvc291cmNlX2NvbnRleHQucHJvdG8SD2dvb2ds", + "ZS5wcm90b2J1ZiIiCg1Tb3VyY2VDb250ZXh0EhEKCWZpbGVfbmFtZRgBIAEo", + "CUJVChNjb20uZ29vZ2xlLnByb3RvYnVmQhJTb3VyY2VDb250ZXh0UHJvdG9Q", + "AaABAaICA0dQQqoCHkdvb2dsZS5Qcm90b2J1Zi5XZWxsS25vd25UeXBlc2IG", + "cHJvdG8z")); + descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, + new pbr::FileDescriptor[] { }, + new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] { + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.WellKnownTypes.SourceContext), global::Google.Protobuf.WellKnownTypes.SourceContext.Parser, new[]{ "FileName" }, null, null, null) + })); + } + #endregion + + } + #region Messages + /// + /// `SourceContext` represents information about the source of a + /// protobuf element, like the file in which it is defined. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class SourceContext : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new SourceContext()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.WellKnownTypes.SourceContextReflection.Descriptor.MessageTypes[0]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public SourceContext() { + OnConstruction(); + } + + partial void OnConstruction(); + + public SourceContext(SourceContext other) : this() { + fileName_ = other.fileName_; + } + + public SourceContext Clone() { + return new SourceContext(this); + } + + /// Field number for the "file_name" field. + public const int FileNameFieldNumber = 1; + private string fileName_ = ""; + /// + /// The path-qualified name of the .proto file that contained the associated + /// protobuf element. For example: `"google/protobuf/source.proto"`. + /// + public string FileName { + get { return fileName_; } + set { + fileName_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + public override bool Equals(object other) { + return Equals(other as SourceContext); + } + + public bool Equals(SourceContext other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (FileName != other.FileName) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (FileName.Length != 0) hash ^= FileName.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (FileName.Length != 0) { + output.WriteRawTag(10); + output.WriteString(FileName); + } + } + + public int CalculateSize() { + int size = 0; + if (FileName.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(FileName); + } + return size; + } + + public void MergeFrom(SourceContext other) { + if (other == null) { + return; + } + if (other.FileName.Length != 0) { + FileName = other.FileName; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + FileName = input.ReadString(); + break; + } + } + } + } + + } + + #endregion + +} + +#endregion Designer generated code diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/Struct.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/Struct.cs new file mode 100644 index 0000000000..edc8940db3 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/Struct.cs @@ -0,0 +1,600 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/protobuf/struct.proto +#pragma warning disable 1591, 0612, 3021 +#region Designer generated code + +using pb = global::Google.Protobuf; +using pbc = global::Google.Protobuf.Collections; +using pbr = global::Google.Protobuf.Reflection; +using scg = global::System.Collections.Generic; +namespace Google.Protobuf.WellKnownTypes { + + /// Holder for reflection information generated from google/protobuf/struct.proto + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public static partial class StructReflection { + + #region Descriptor + /// File descriptor for google/protobuf/struct.proto + public static pbr::FileDescriptor Descriptor { + get { return descriptor; } + } + private static pbr::FileDescriptor descriptor; + + static StructReflection() { + byte[] descriptorData = global::System.Convert.FromBase64String( + string.Concat( + "Chxnb29nbGUvcHJvdG9idWYvc3RydWN0LnByb3RvEg9nb29nbGUucHJvdG9i", + "dWYihAEKBlN0cnVjdBIzCgZmaWVsZHMYASADKAsyIy5nb29nbGUucHJvdG9i", + "dWYuU3RydWN0LkZpZWxkc0VudHJ5GkUKC0ZpZWxkc0VudHJ5EgsKA2tleRgB", + "IAEoCRIlCgV2YWx1ZRgCIAEoCzIWLmdvb2dsZS5wcm90b2J1Zi5WYWx1ZToC", + "OAEi6gEKBVZhbHVlEjAKCm51bGxfdmFsdWUYASABKA4yGi5nb29nbGUucHJv", + "dG9idWYuTnVsbFZhbHVlSAASFgoMbnVtYmVyX3ZhbHVlGAIgASgBSAASFgoM", + "c3RyaW5nX3ZhbHVlGAMgASgJSAASFAoKYm9vbF92YWx1ZRgEIAEoCEgAEi8K", + "DHN0cnVjdF92YWx1ZRgFIAEoCzIXLmdvb2dsZS5wcm90b2J1Zi5TdHJ1Y3RI", + "ABIwCgpsaXN0X3ZhbHVlGAYgASgLMhouZ29vZ2xlLnByb3RvYnVmLkxpc3RW", + "YWx1ZUgAQgYKBGtpbmQiMwoJTGlzdFZhbHVlEiYKBnZhbHVlcxgBIAMoCzIW", + "Lmdvb2dsZS5wcm90b2J1Zi5WYWx1ZSobCglOdWxsVmFsdWUSDgoKTlVMTF9W", + "QUxVRRAAQoEBChNjb20uZ29vZ2xlLnByb3RvYnVmQgtTdHJ1Y3RQcm90b1AB", + "WjFnaXRodWIuY29tL2dvbGFuZy9wcm90b2J1Zi9wdHlwZXMvc3RydWN0O3N0", + "cnVjdHBioAEBogIDR1BCqgIeR29vZ2xlLlByb3RvYnVmLldlbGxLbm93blR5", + "cGVzYgZwcm90bzM=")); + descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, + new pbr::FileDescriptor[] { }, + new pbr::GeneratedClrTypeInfo(new[] {typeof(global::Google.Protobuf.WellKnownTypes.NullValue), }, new pbr::GeneratedClrTypeInfo[] { + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.WellKnownTypes.Struct), global::Google.Protobuf.WellKnownTypes.Struct.Parser, new[]{ "Fields" }, null, null, new pbr::GeneratedClrTypeInfo[] { null, }), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.WellKnownTypes.Value), global::Google.Protobuf.WellKnownTypes.Value.Parser, new[]{ "NullValue", "NumberValue", "StringValue", "BoolValue", "StructValue", "ListValue" }, new[]{ "Kind" }, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.WellKnownTypes.ListValue), global::Google.Protobuf.WellKnownTypes.ListValue.Parser, new[]{ "Values" }, null, null, null) + })); + } + #endregion + + } + #region Enums + /// + /// `NullValue` is a singleton enumeration to represent the null value for the + /// `Value` type union. + /// + /// The JSON representation for `NullValue` is JSON `null`. + /// + public enum NullValue { + /// + /// Null value. + /// + [pbr::OriginalName("NULL_VALUE")] NullValue = 0, + } + + #endregion + + #region Messages + /// + /// `Struct` represents a structured data value, consisting of fields + /// which map to dynamically typed values. In some languages, `Struct` + /// might be supported by a native representation. For example, in + /// scripting languages like JS a struct is represented as an + /// object. The details of that representation are described together + /// with the proto support for the language. + /// + /// The JSON representation for `Struct` is JSON object. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class Struct : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new Struct()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.WellKnownTypes.StructReflection.Descriptor.MessageTypes[0]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public Struct() { + OnConstruction(); + } + + partial void OnConstruction(); + + public Struct(Struct other) : this() { + fields_ = other.fields_.Clone(); + } + + public Struct Clone() { + return new Struct(this); + } + + /// Field number for the "fields" field. + public const int FieldsFieldNumber = 1; + private static readonly pbc::MapField.Codec _map_fields_codec + = new pbc::MapField.Codec(pb::FieldCodec.ForString(10), pb::FieldCodec.ForMessage(18, global::Google.Protobuf.WellKnownTypes.Value.Parser), 10); + private readonly pbc::MapField fields_ = new pbc::MapField(); + /// + /// Unordered map of dynamically typed values. + /// + public pbc::MapField Fields { + get { return fields_; } + } + + public override bool Equals(object other) { + return Equals(other as Struct); + } + + public bool Equals(Struct other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (!Fields.Equals(other.Fields)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + hash ^= Fields.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + fields_.WriteTo(output, _map_fields_codec); + } + + public int CalculateSize() { + int size = 0; + size += fields_.CalculateSize(_map_fields_codec); + return size; + } + + public void MergeFrom(Struct other) { + if (other == null) { + return; + } + fields_.Add(other.fields_); + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + fields_.AddEntriesFrom(input, _map_fields_codec); + break; + } + } + } + } + + } + + /// + /// `Value` represents a dynamically typed value which can be either + /// null, a number, a string, a boolean, a recursive struct value, or a + /// list of values. A producer of value is expected to set one of that + /// variants, absence of any variant indicates an error. + /// + /// The JSON representation for `Value` is JSON value. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class Value : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new Value()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.WellKnownTypes.StructReflection.Descriptor.MessageTypes[1]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public Value() { + OnConstruction(); + } + + partial void OnConstruction(); + + public Value(Value other) : this() { + switch (other.KindCase) { + case KindOneofCase.NullValue: + NullValue = other.NullValue; + break; + case KindOneofCase.NumberValue: + NumberValue = other.NumberValue; + break; + case KindOneofCase.StringValue: + StringValue = other.StringValue; + break; + case KindOneofCase.BoolValue: + BoolValue = other.BoolValue; + break; + case KindOneofCase.StructValue: + StructValue = other.StructValue.Clone(); + break; + case KindOneofCase.ListValue: + ListValue = other.ListValue.Clone(); + break; + } + + } + + public Value Clone() { + return new Value(this); + } + + /// Field number for the "null_value" field. + public const int NullValueFieldNumber = 1; + /// + /// Represents a null value. + /// + public global::Google.Protobuf.WellKnownTypes.NullValue NullValue { + get { return kindCase_ == KindOneofCase.NullValue ? (global::Google.Protobuf.WellKnownTypes.NullValue) kind_ : 0; } + set { + kind_ = value; + kindCase_ = KindOneofCase.NullValue; + } + } + + /// Field number for the "number_value" field. + public const int NumberValueFieldNumber = 2; + /// + /// Represents a double value. + /// + public double NumberValue { + get { return kindCase_ == KindOneofCase.NumberValue ? (double) kind_ : 0D; } + set { + kind_ = value; + kindCase_ = KindOneofCase.NumberValue; + } + } + + /// Field number for the "string_value" field. + public const int StringValueFieldNumber = 3; + /// + /// Represents a string value. + /// + public string StringValue { + get { return kindCase_ == KindOneofCase.StringValue ? (string) kind_ : ""; } + set { + kind_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + kindCase_ = KindOneofCase.StringValue; + } + } + + /// Field number for the "bool_value" field. + public const int BoolValueFieldNumber = 4; + /// + /// Represents a boolean value. + /// + public bool BoolValue { + get { return kindCase_ == KindOneofCase.BoolValue ? (bool) kind_ : false; } + set { + kind_ = value; + kindCase_ = KindOneofCase.BoolValue; + } + } + + /// Field number for the "struct_value" field. + public const int StructValueFieldNumber = 5; + /// + /// Represents a structured value. + /// + public global::Google.Protobuf.WellKnownTypes.Struct StructValue { + get { return kindCase_ == KindOneofCase.StructValue ? (global::Google.Protobuf.WellKnownTypes.Struct) kind_ : null; } + set { + kind_ = value; + kindCase_ = value == null ? KindOneofCase.None : KindOneofCase.StructValue; + } + } + + /// Field number for the "list_value" field. + public const int ListValueFieldNumber = 6; + /// + /// Represents a repeated `Value`. + /// + public global::Google.Protobuf.WellKnownTypes.ListValue ListValue { + get { return kindCase_ == KindOneofCase.ListValue ? (global::Google.Protobuf.WellKnownTypes.ListValue) kind_ : null; } + set { + kind_ = value; + kindCase_ = value == null ? KindOneofCase.None : KindOneofCase.ListValue; + } + } + + private object kind_; + /// Enum of possible cases for the "kind" oneof. + public enum KindOneofCase { + None = 0, + NullValue = 1, + NumberValue = 2, + StringValue = 3, + BoolValue = 4, + StructValue = 5, + ListValue = 6, + } + private KindOneofCase kindCase_ = KindOneofCase.None; + public KindOneofCase KindCase { + get { return kindCase_; } + } + + public void ClearKind() { + kindCase_ = KindOneofCase.None; + kind_ = null; + } + + public override bool Equals(object other) { + return Equals(other as Value); + } + + public bool Equals(Value other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (NullValue != other.NullValue) return false; + if (NumberValue != other.NumberValue) return false; + if (StringValue != other.StringValue) return false; + if (BoolValue != other.BoolValue) return false; + if (!object.Equals(StructValue, other.StructValue)) return false; + if (!object.Equals(ListValue, other.ListValue)) return false; + if (KindCase != other.KindCase) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (kindCase_ == KindOneofCase.NullValue) hash ^= NullValue.GetHashCode(); + if (kindCase_ == KindOneofCase.NumberValue) hash ^= NumberValue.GetHashCode(); + if (kindCase_ == KindOneofCase.StringValue) hash ^= StringValue.GetHashCode(); + if (kindCase_ == KindOneofCase.BoolValue) hash ^= BoolValue.GetHashCode(); + if (kindCase_ == KindOneofCase.StructValue) hash ^= StructValue.GetHashCode(); + if (kindCase_ == KindOneofCase.ListValue) hash ^= ListValue.GetHashCode(); + hash ^= (int) kindCase_; + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (kindCase_ == KindOneofCase.NullValue) { + output.WriteRawTag(8); + output.WriteEnum((int) NullValue); + } + if (kindCase_ == KindOneofCase.NumberValue) { + output.WriteRawTag(17); + output.WriteDouble(NumberValue); + } + if (kindCase_ == KindOneofCase.StringValue) { + output.WriteRawTag(26); + output.WriteString(StringValue); + } + if (kindCase_ == KindOneofCase.BoolValue) { + output.WriteRawTag(32); + output.WriteBool(BoolValue); + } + if (kindCase_ == KindOneofCase.StructValue) { + output.WriteRawTag(42); + output.WriteMessage(StructValue); + } + if (kindCase_ == KindOneofCase.ListValue) { + output.WriteRawTag(50); + output.WriteMessage(ListValue); + } + } + + public int CalculateSize() { + int size = 0; + if (kindCase_ == KindOneofCase.NullValue) { + size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) NullValue); + } + if (kindCase_ == KindOneofCase.NumberValue) { + size += 1 + 8; + } + if (kindCase_ == KindOneofCase.StringValue) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(StringValue); + } + if (kindCase_ == KindOneofCase.BoolValue) { + size += 1 + 1; + } + if (kindCase_ == KindOneofCase.StructValue) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(StructValue); + } + if (kindCase_ == KindOneofCase.ListValue) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(ListValue); + } + return size; + } + + public void MergeFrom(Value other) { + if (other == null) { + return; + } + switch (other.KindCase) { + case KindOneofCase.NullValue: + NullValue = other.NullValue; + break; + case KindOneofCase.NumberValue: + NumberValue = other.NumberValue; + break; + case KindOneofCase.StringValue: + StringValue = other.StringValue; + break; + case KindOneofCase.BoolValue: + BoolValue = other.BoolValue; + break; + case KindOneofCase.StructValue: + StructValue = other.StructValue; + break; + case KindOneofCase.ListValue: + ListValue = other.ListValue; + break; + } + + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 8: { + kind_ = input.ReadEnum(); + kindCase_ = KindOneofCase.NullValue; + break; + } + case 17: { + NumberValue = input.ReadDouble(); + break; + } + case 26: { + StringValue = input.ReadString(); + break; + } + case 32: { + BoolValue = input.ReadBool(); + break; + } + case 42: { + global::Google.Protobuf.WellKnownTypes.Struct subBuilder = new global::Google.Protobuf.WellKnownTypes.Struct(); + if (kindCase_ == KindOneofCase.StructValue) { + subBuilder.MergeFrom(StructValue); + } + input.ReadMessage(subBuilder); + StructValue = subBuilder; + break; + } + case 50: { + global::Google.Protobuf.WellKnownTypes.ListValue subBuilder = new global::Google.Protobuf.WellKnownTypes.ListValue(); + if (kindCase_ == KindOneofCase.ListValue) { + subBuilder.MergeFrom(ListValue); + } + input.ReadMessage(subBuilder); + ListValue = subBuilder; + break; + } + } + } + } + + } + + /// + /// `ListValue` is a wrapper around a repeated field of values. + /// + /// The JSON representation for `ListValue` is JSON array. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class ListValue : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new ListValue()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.WellKnownTypes.StructReflection.Descriptor.MessageTypes[2]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public ListValue() { + OnConstruction(); + } + + partial void OnConstruction(); + + public ListValue(ListValue other) : this() { + values_ = other.values_.Clone(); + } + + public ListValue Clone() { + return new ListValue(this); + } + + /// Field number for the "values" field. + public const int ValuesFieldNumber = 1; + private static readonly pb::FieldCodec _repeated_values_codec + = pb::FieldCodec.ForMessage(10, global::Google.Protobuf.WellKnownTypes.Value.Parser); + private readonly pbc::RepeatedField values_ = new pbc::RepeatedField(); + /// + /// Repeated field of dynamically typed values. + /// + public pbc::RepeatedField Values { + get { return values_; } + } + + public override bool Equals(object other) { + return Equals(other as ListValue); + } + + public bool Equals(ListValue other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if(!values_.Equals(other.values_)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + hash ^= values_.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + values_.WriteTo(output, _repeated_values_codec); + } + + public int CalculateSize() { + int size = 0; + size += values_.CalculateSize(_repeated_values_codec); + return size; + } + + public void MergeFrom(ListValue other) { + if (other == null) { + return; + } + values_.Add(other.values_); + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + values_.AddEntriesFrom(input, _repeated_values_codec); + break; + } + } + } + } + + } + + #endregion + +} + +#endregion Designer generated code diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/TimeExtensions.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/TimeExtensions.cs new file mode 100644 index 0000000000..dd485d324a --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/TimeExtensions.cs @@ -0,0 +1,80 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace Google.Protobuf.WellKnownTypes +{ + /// + /// Extension methods on BCL time-related types, converting to protobuf types. + /// + public static class TimeExtensions + { + /// + /// Converts the given to a . + /// + /// The date and time to convert to a timestamp. + /// The value has a other than Utc. + /// The converted timestamp. + public static Timestamp ToTimestamp(this DateTime dateTime) + { + return Timestamp.FromDateTime(dateTime); + } + + /// + /// Converts the given to a + /// + /// The offset is taken into consideration when converting the value (so the same instant in time + /// is represented) but is not a separate part of the resulting value. In other words, there is no + /// roundtrip operation to retrieve the original DateTimeOffset. + /// The date and time (with UTC offset) to convert to a timestamp. + /// The converted timestamp. + public static Timestamp ToTimestamp(this DateTimeOffset dateTimeOffset) + { + return Timestamp.FromDateTimeOffset(dateTimeOffset); + } + + /// + /// Converts the given to a . + /// + /// The time span to convert. + /// The converted duration. + public static Duration ToDuration(this TimeSpan timeSpan) + { + return Duration.FromTimeSpan(timeSpan); + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/Timestamp.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/Timestamp.cs new file mode 100644 index 0000000000..053b88bd33 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/Timestamp.cs @@ -0,0 +1,241 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/protobuf/timestamp.proto +#pragma warning disable 1591, 0612, 3021 +#region Designer generated code + +using pb = global::Google.Protobuf; +using pbc = global::Google.Protobuf.Collections; +using pbr = global::Google.Protobuf.Reflection; +using scg = global::System.Collections.Generic; +namespace Google.Protobuf.WellKnownTypes { + + /// Holder for reflection information generated from google/protobuf/timestamp.proto + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public static partial class TimestampReflection { + + #region Descriptor + /// File descriptor for google/protobuf/timestamp.proto + public static pbr::FileDescriptor Descriptor { + get { return descriptor; } + } + private static pbr::FileDescriptor descriptor; + + static TimestampReflection() { + byte[] descriptorData = global::System.Convert.FromBase64String( + string.Concat( + "Ch9nb29nbGUvcHJvdG9idWYvdGltZXN0YW1wLnByb3RvEg9nb29nbGUucHJv", + "dG9idWYiKwoJVGltZXN0YW1wEg8KB3NlY29uZHMYASABKAMSDQoFbmFub3MY", + "AiABKAVCgQEKE2NvbS5nb29nbGUucHJvdG9idWZCDlRpbWVzdGFtcFByb3Rv", + "UAFaK2dpdGh1Yi5jb20vZ29sYW5nL3Byb3RvYnVmL3B0eXBlcy90aW1lc3Rh", + "bXCgAQH4AQGiAgNHUEKqAh5Hb29nbGUuUHJvdG9idWYuV2VsbEtub3duVHlw", + "ZXNiBnByb3RvMw==")); + descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, + new pbr::FileDescriptor[] { }, + new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] { + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.WellKnownTypes.Timestamp), global::Google.Protobuf.WellKnownTypes.Timestamp.Parser, new[]{ "Seconds", "Nanos" }, null, null, null) + })); + } + #endregion + + } + #region Messages + /// + /// A Timestamp represents a point in time independent of any time zone + /// or calendar, represented as seconds and fractions of seconds at + /// nanosecond resolution in UTC Epoch time. It is encoded using the + /// Proleptic Gregorian Calendar which extends the Gregorian calendar + /// backwards to year one. It is encoded assuming all minutes are 60 + /// seconds long, i.e. leap seconds are "smeared" so that no leap second + /// table is needed for interpretation. Range is from + /// 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. + /// By restricting to that range, we ensure that we can convert to + /// and from RFC 3339 date strings. + /// See [https://www.ietf.org/rfc/rfc3339.txt](https://www.ietf.org/rfc/rfc3339.txt). + /// + /// Example 1: Compute Timestamp from POSIX `time()`. + /// + /// Timestamp timestamp; + /// timestamp.set_seconds(time(NULL)); + /// timestamp.set_nanos(0); + /// + /// Example 2: Compute Timestamp from POSIX `gettimeofday()`. + /// + /// struct timeval tv; + /// gettimeofday(&tv, NULL); + /// + /// Timestamp timestamp; + /// timestamp.set_seconds(tv.tv_sec); + /// timestamp.set_nanos(tv.tv_usec * 1000); + /// + /// Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. + /// + /// FILETIME ft; + /// GetSystemTimeAsFileTime(&ft); + /// UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; + /// + /// // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z + /// // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. + /// Timestamp timestamp; + /// timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); + /// timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); + /// + /// Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. + /// + /// long millis = System.currentTimeMillis(); + /// + /// Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) + /// .setNanos((int) ((millis % 1000) * 1000000)).build(); + /// + /// Example 5: Compute Timestamp from current time in Python. + /// + /// now = time.time() + /// seconds = int(now) + /// nanos = int((now - seconds) * 10**9) + /// timestamp = Timestamp(seconds=seconds, nanos=nanos) + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class Timestamp : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new Timestamp()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.WellKnownTypes.TimestampReflection.Descriptor.MessageTypes[0]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public Timestamp() { + OnConstruction(); + } + + partial void OnConstruction(); + + public Timestamp(Timestamp other) : this() { + seconds_ = other.seconds_; + nanos_ = other.nanos_; + } + + public Timestamp Clone() { + return new Timestamp(this); + } + + /// Field number for the "seconds" field. + public const int SecondsFieldNumber = 1; + private long seconds_; + /// + /// Represents seconds of UTC time since Unix epoch + /// 1970-01-01T00:00:00Z. Must be from from 0001-01-01T00:00:00Z to + /// 9999-12-31T23:59:59Z inclusive. + /// + public long Seconds { + get { return seconds_; } + set { + seconds_ = value; + } + } + + /// Field number for the "nanos" field. + public const int NanosFieldNumber = 2; + private int nanos_; + /// + /// Non-negative fractions of a second at nanosecond resolution. Negative + /// second values with fractions must still have non-negative nanos values + /// that count forward in time. Must be from 0 to 999,999,999 + /// inclusive. + /// + public int Nanos { + get { return nanos_; } + set { + nanos_ = value; + } + } + + public override bool Equals(object other) { + return Equals(other as Timestamp); + } + + public bool Equals(Timestamp other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (Seconds != other.Seconds) return false; + if (Nanos != other.Nanos) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (Seconds != 0L) hash ^= Seconds.GetHashCode(); + if (Nanos != 0) hash ^= Nanos.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (Seconds != 0L) { + output.WriteRawTag(8); + output.WriteInt64(Seconds); + } + if (Nanos != 0) { + output.WriteRawTag(16); + output.WriteInt32(Nanos); + } + } + + public int CalculateSize() { + int size = 0; + if (Seconds != 0L) { + size += 1 + pb::CodedOutputStream.ComputeInt64Size(Seconds); + } + if (Nanos != 0) { + size += 1 + pb::CodedOutputStream.ComputeInt32Size(Nanos); + } + return size; + } + + public void MergeFrom(Timestamp other) { + if (other == null) { + return; + } + if (other.Seconds != 0L) { + Seconds = other.Seconds; + } + if (other.Nanos != 0) { + Nanos = other.Nanos; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 8: { + Seconds = input.ReadInt64(); + break; + } + case 16: { + Nanos = input.ReadInt32(); + break; + } + } + } + } + + } + + #endregion + +} + +#endregion Designer generated code diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/TimestampPartial.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/TimestampPartial.cs new file mode 100644 index 0000000000..aa40347342 --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/TimestampPartial.cs @@ -0,0 +1,241 @@ +#region Copyright notice and license +// Protocol Buffers - Google's data interchange format +// Copyright 2015 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#endregion + +using System; +using System.Globalization; +using System.Text; + +namespace Google.Protobuf.WellKnownTypes +{ + public partial class Timestamp : ICustomDiagnosticMessage + { + private static readonly DateTime UnixEpoch = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc); + // Constants determined programmatically, but then hard-coded so they can be constant expressions. + private const long BclSecondsAtUnixEpoch = 62135596800; + internal const long UnixSecondsAtBclMaxValue = 253402300799; + internal const long UnixSecondsAtBclMinValue = -BclSecondsAtUnixEpoch; + internal const int MaxNanos = Duration.NanosecondsPerSecond - 1; + + private static bool IsNormalized(long seconds, int nanoseconds) => + nanoseconds >= 0 && + nanoseconds <= MaxNanos && + seconds >= UnixSecondsAtBclMinValue && + seconds <= UnixSecondsAtBclMaxValue; + + /// + /// Returns the difference between one and another, as a . + /// + /// The timestamp to subtract from. Must not be null. + /// The timestamp to subtract. Must not be null. + /// The difference between the two specified timestamps. + public static Duration operator -(Timestamp lhs, Timestamp rhs) + { + ProtoPreconditions.CheckNotNull(lhs, "lhs"); + ProtoPreconditions.CheckNotNull(rhs, "rhs"); + checked + { + return Duration.Normalize(lhs.Seconds - rhs.Seconds, lhs.Nanos - rhs.Nanos); + } + } + + /// + /// Adds a to a , to obtain another Timestamp. + /// + /// The timestamp to add the duration to. Must not be null. + /// The duration to add. Must not be null. + /// The result of adding the duration to the timestamp. + public static Timestamp operator +(Timestamp lhs, Duration rhs) + { + ProtoPreconditions.CheckNotNull(lhs, "lhs"); + ProtoPreconditions.CheckNotNull(rhs, "rhs"); + checked + { + return Normalize(lhs.Seconds + rhs.Seconds, lhs.Nanos + rhs.Nanos); + } + } + + /// + /// Subtracts a from a , to obtain another Timestamp. + /// + /// The timestamp to subtract the duration from. Must not be null. + /// The duration to subtract. + /// The result of subtracting the duration from the timestamp. + public static Timestamp operator -(Timestamp lhs, Duration rhs) + { + ProtoPreconditions.CheckNotNull(lhs, "lhs"); + ProtoPreconditions.CheckNotNull(rhs, "rhs"); + checked + { + return Normalize(lhs.Seconds - rhs.Seconds, lhs.Nanos - rhs.Nanos); + } + } + + /// + /// Converts this timestamp into a . + /// + /// + /// The resulting DateTime will always have a Kind of Utc. + /// If the timestamp is not a precise number of ticks, it will be truncated towards the start + /// of time. For example, a timestamp with a value of 99 will result in a + /// value precisely on a second. + /// + /// This timestamp as a DateTime. + /// The timestamp contains invalid values; either it is + /// incorrectly normalized or is outside the valid range. + public DateTime ToDateTime() + { + if (!IsNormalized(Seconds, Nanos)) + { + throw new InvalidOperationException(@"Timestamp contains invalid values: Seconds={Seconds}; Nanos={Nanos}"); + } + return UnixEpoch.AddSeconds(Seconds).AddTicks(Nanos / Duration.NanosecondsPerTick); + } + + /// + /// Converts this timestamp into a . + /// + /// + /// The resulting DateTimeOffset will always have an Offset of zero. + /// If the timestamp is not a precise number of ticks, it will be truncated towards the start + /// of time. For example, a timestamp with a value of 99 will result in a + /// value precisely on a second. + /// + /// This timestamp as a DateTimeOffset. + /// The timestamp contains invalid values; either it is + /// incorrectly normalized or is outside the valid range. + public DateTimeOffset ToDateTimeOffset() + { + return new DateTimeOffset(ToDateTime(), TimeSpan.Zero); + } + + /// + /// Converts the specified to a . + /// + /// + /// The Kind of is not DateTimeKind.Utc. + /// The converted timestamp. + public static Timestamp FromDateTime(DateTime dateTime) + { + if (dateTime.Kind != DateTimeKind.Utc) + { + throw new ArgumentException("Conversion from DateTime to Timestamp requires the DateTime kind to be Utc", "dateTime"); + } + // Do the arithmetic using DateTime.Ticks, which is always non-negative, making things simpler. + long secondsSinceBclEpoch = dateTime.Ticks / TimeSpan.TicksPerSecond; + int nanoseconds = (int) (dateTime.Ticks % TimeSpan.TicksPerSecond) * Duration.NanosecondsPerTick; + return new Timestamp { Seconds = secondsSinceBclEpoch - BclSecondsAtUnixEpoch, Nanos = nanoseconds }; + } + + /// + /// Converts the given to a + /// + /// The offset is taken into consideration when converting the value (so the same instant in time + /// is represented) but is not a separate part of the resulting value. In other words, there is no + /// roundtrip operation to retrieve the original DateTimeOffset. + /// The date and time (with UTC offset) to convert to a timestamp. + /// The converted timestamp. + public static Timestamp FromDateTimeOffset(DateTimeOffset dateTimeOffset) + { + // We don't need to worry about this having negative ticks: DateTimeOffset is constrained to handle + // values whose *UTC* value is in the range of DateTime. + return FromDateTime(dateTimeOffset.UtcDateTime); + } + + internal static Timestamp Normalize(long seconds, int nanoseconds) + { + int extraSeconds = nanoseconds / Duration.NanosecondsPerSecond; + seconds += extraSeconds; + nanoseconds -= extraSeconds * Duration.NanosecondsPerSecond; + + if (nanoseconds < 0) + { + nanoseconds += Duration.NanosecondsPerSecond; + seconds--; + } + return new Timestamp { Seconds = seconds, Nanos = nanoseconds }; + } + + /// + /// Converts a timestamp specified in seconds/nanoseconds to a string. + /// + /// + /// If the value is a normalized duration in the range described in timestamp.proto, + /// is ignored. Otherwise, if the parameter is true, + /// a JSON object with a warning is returned; if it is false, an is thrown. + /// + /// Seconds portion of the duration. + /// Nanoseconds portion of the duration. + /// Determines the handling of non-normalized values + /// The represented duration is invalid, and is false. + internal static string ToJson(long seconds, int nanoseconds, bool diagnosticOnly) + { + if (IsNormalized(seconds, nanoseconds)) + { + // Use .NET's formatting for the value down to the second, including an opening double quote (as it's a string value) + DateTime dateTime = UnixEpoch.AddSeconds(seconds); + var builder = new StringBuilder(); + builder.Append('"'); + builder.Append(dateTime.ToString("yyyy'-'MM'-'dd'T'HH:mm:ss", CultureInfo.InvariantCulture)); + Duration.AppendNanoseconds(builder, nanoseconds); + builder.Append("Z\""); + return builder.ToString(); + } + if (diagnosticOnly) + { + return string.Format(CultureInfo.InvariantCulture, + "{{ \"@warning\": \"Invalid Timestamp\", \"seconds\": \"{0}\", \"nanos\": {1} }}", + seconds, + nanoseconds); + } + else + { + throw new InvalidOperationException("Non-normalized timestamp value"); + } + } + + /// + /// Returns a string representation of this for diagnostic purposes. + /// + /// + /// Normally the returned value will be a JSON string value (including leading and trailing quotes) but + /// when the value is non-normalized or out of range, a JSON object representation will be returned + /// instead, including a warning. This is to avoid exceptions being thrown when trying to + /// diagnose problems - the regular JSON formatter will still throw an exception for non-normalized + /// values. + /// + /// A string representation of this value. + public string ToDiagnosticString() + { + return ToJson(Seconds, Nanos, true); + } + } +} diff --git a/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/Type.cs b/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/Type.cs new file mode 100644 index 0000000000..657c24645b --- /dev/null +++ b/packager/third_party/protobuf/csharp/src/Google.Protobuf/WellKnownTypes/Type.cs @@ -0,0 +1,1347 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: google/protobuf/type.proto +#pragma warning disable 1591, 0612, 3021 +#region Designer generated code + +using pb = global::Google.Protobuf; +using pbc = global::Google.Protobuf.Collections; +using pbr = global::Google.Protobuf.Reflection; +using scg = global::System.Collections.Generic; +namespace Google.Protobuf.WellKnownTypes { + + /// Holder for reflection information generated from google/protobuf/type.proto + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public static partial class TypeReflection { + + #region Descriptor + /// File descriptor for google/protobuf/type.proto + public static pbr::FileDescriptor Descriptor { + get { return descriptor; } + } + private static pbr::FileDescriptor descriptor; + + static TypeReflection() { + byte[] descriptorData = global::System.Convert.FromBase64String( + string.Concat( + "Chpnb29nbGUvcHJvdG9idWYvdHlwZS5wcm90bxIPZ29vZ2xlLnByb3RvYnVm", + "Ghlnb29nbGUvcHJvdG9idWYvYW55LnByb3RvGiRnb29nbGUvcHJvdG9idWYv", + "c291cmNlX2NvbnRleHQucHJvdG8i1wEKBFR5cGUSDAoEbmFtZRgBIAEoCRIm", + "CgZmaWVsZHMYAiADKAsyFi5nb29nbGUucHJvdG9idWYuRmllbGQSDgoGb25l", + "b2ZzGAMgAygJEigKB29wdGlvbnMYBCADKAsyFy5nb29nbGUucHJvdG9idWYu", + "T3B0aW9uEjYKDnNvdXJjZV9jb250ZXh0GAUgASgLMh4uZ29vZ2xlLnByb3Rv", + "YnVmLlNvdXJjZUNvbnRleHQSJwoGc3ludGF4GAYgASgOMhcuZ29vZ2xlLnBy", + "b3RvYnVmLlN5bnRheCLVBQoFRmllbGQSKQoEa2luZBgBIAEoDjIbLmdvb2ds", + "ZS5wcm90b2J1Zi5GaWVsZC5LaW5kEjcKC2NhcmRpbmFsaXR5GAIgASgOMiIu", + "Z29vZ2xlLnByb3RvYnVmLkZpZWxkLkNhcmRpbmFsaXR5Eg4KBm51bWJlchgD", + "IAEoBRIMCgRuYW1lGAQgASgJEhAKCHR5cGVfdXJsGAYgASgJEhMKC29uZW9m", + "X2luZGV4GAcgASgFEg4KBnBhY2tlZBgIIAEoCBIoCgdvcHRpb25zGAkgAygL", + "MhcuZ29vZ2xlLnByb3RvYnVmLk9wdGlvbhIRCglqc29uX25hbWUYCiABKAkS", + "FQoNZGVmYXVsdF92YWx1ZRgLIAEoCSLIAgoES2luZBIQCgxUWVBFX1VOS05P", + "V04QABIPCgtUWVBFX0RPVUJMRRABEg4KClRZUEVfRkxPQVQQAhIOCgpUWVBF", + "X0lOVDY0EAMSDwoLVFlQRV9VSU5UNjQQBBIOCgpUWVBFX0lOVDMyEAUSEAoM", + "VFlQRV9GSVhFRDY0EAYSEAoMVFlQRV9GSVhFRDMyEAcSDQoJVFlQRV9CT09M", + "EAgSDwoLVFlQRV9TVFJJTkcQCRIOCgpUWVBFX0dST1VQEAoSEAoMVFlQRV9N", + "RVNTQUdFEAsSDgoKVFlQRV9CWVRFUxAMEg8KC1RZUEVfVUlOVDMyEA0SDQoJ", + "VFlQRV9FTlVNEA4SEQoNVFlQRV9TRklYRUQzMhAPEhEKDVRZUEVfU0ZJWEVE", + "NjQQEBIPCgtUWVBFX1NJTlQzMhAREg8KC1RZUEVfU0lOVDY0EBIidAoLQ2Fy", + "ZGluYWxpdHkSFwoTQ0FSRElOQUxJVFlfVU5LTk9XThAAEhgKFENBUkRJTkFM", + "SVRZX09QVElPTkFMEAESGAoUQ0FSRElOQUxJVFlfUkVRVUlSRUQQAhIYChRD", + "QVJESU5BTElUWV9SRVBFQVRFRBADIs4BCgRFbnVtEgwKBG5hbWUYASABKAkS", + "LQoJZW51bXZhbHVlGAIgAygLMhouZ29vZ2xlLnByb3RvYnVmLkVudW1WYWx1", + "ZRIoCgdvcHRpb25zGAMgAygLMhcuZ29vZ2xlLnByb3RvYnVmLk9wdGlvbhI2", + "Cg5zb3VyY2VfY29udGV4dBgEIAEoCzIeLmdvb2dsZS5wcm90b2J1Zi5Tb3Vy", + "Y2VDb250ZXh0EicKBnN5bnRheBgFIAEoDjIXLmdvb2dsZS5wcm90b2J1Zi5T", + "eW50YXgiUwoJRW51bVZhbHVlEgwKBG5hbWUYASABKAkSDgoGbnVtYmVyGAIg", + "ASgFEigKB29wdGlvbnMYAyADKAsyFy5nb29nbGUucHJvdG9idWYuT3B0aW9u", + "IjsKBk9wdGlvbhIMCgRuYW1lGAEgASgJEiMKBXZhbHVlGAIgASgLMhQuZ29v", + "Z2xlLnByb3RvYnVmLkFueSouCgZTeW50YXgSEQoNU1lOVEFYX1BST1RPMhAA", + "EhEKDVNZTlRBWF9QUk9UTzMQAUJMChNjb20uZ29vZ2xlLnByb3RvYnVmQglU", + "eXBlUHJvdG9QAaABAaICA0dQQqoCHkdvb2dsZS5Qcm90b2J1Zi5XZWxsS25v", + "d25UeXBlc2IGcHJvdG8z")); + descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, + new pbr::FileDescriptor[] { global::Google.Protobuf.WellKnownTypes.AnyReflection.Descriptor, global::Google.Protobuf.WellKnownTypes.SourceContextReflection.Descriptor, }, + new pbr::GeneratedClrTypeInfo(new[] {typeof(global::Google.Protobuf.WellKnownTypes.Syntax), }, new pbr::GeneratedClrTypeInfo[] { + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.WellKnownTypes.Type), global::Google.Protobuf.WellKnownTypes.Type.Parser, new[]{ "Name", "Fields", "Oneofs", "Options", "SourceContext", "Syntax" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.WellKnownTypes.Field), global::Google.Protobuf.WellKnownTypes.Field.Parser, new[]{ "Kind", "Cardinality", "Number", "Name", "TypeUrl", "OneofIndex", "Packed", "Options", "JsonName", "DefaultValue" }, null, new[]{ typeof(global::Google.Protobuf.WellKnownTypes.Field.Types.Kind), typeof(global::Google.Protobuf.WellKnownTypes.Field.Types.Cardinality) }, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.WellKnownTypes.Enum), global::Google.Protobuf.WellKnownTypes.Enum.Parser, new[]{ "Name", "Enumvalue", "Options", "SourceContext", "Syntax" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.WellKnownTypes.EnumValue), global::Google.Protobuf.WellKnownTypes.EnumValue.Parser, new[]{ "Name", "Number", "Options" }, null, null, null), + new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.WellKnownTypes.Option), global::Google.Protobuf.WellKnownTypes.Option.Parser, new[]{ "Name", "Value" }, null, null, null) + })); + } + #endregion + + } + #region Enums + /// + /// The syntax in which a protocol buffer element is defined. + /// + public enum Syntax { + /// + /// Syntax `proto2`. + /// + [pbr::OriginalName("SYNTAX_PROTO2")] Proto2 = 0, + /// + /// Syntax `proto3`. + /// + [pbr::OriginalName("SYNTAX_PROTO3")] Proto3 = 1, + } + + #endregion + + #region Messages + /// + /// A protocol buffer message type. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class Type : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new Type()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.WellKnownTypes.TypeReflection.Descriptor.MessageTypes[0]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public Type() { + OnConstruction(); + } + + partial void OnConstruction(); + + public Type(Type other) : this() { + name_ = other.name_; + fields_ = other.fields_.Clone(); + oneofs_ = other.oneofs_.Clone(); + options_ = other.options_.Clone(); + SourceContext = other.sourceContext_ != null ? other.SourceContext.Clone() : null; + syntax_ = other.syntax_; + } + + public Type Clone() { + return new Type(this); + } + + /// Field number for the "name" field. + public const int NameFieldNumber = 1; + private string name_ = ""; + /// + /// The fully qualified message name. + /// + public string Name { + get { return name_; } + set { + name_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "fields" field. + public const int FieldsFieldNumber = 2; + private static readonly pb::FieldCodec _repeated_fields_codec + = pb::FieldCodec.ForMessage(18, global::Google.Protobuf.WellKnownTypes.Field.Parser); + private readonly pbc::RepeatedField fields_ = new pbc::RepeatedField(); + /// + /// The list of fields. + /// + public pbc::RepeatedField Fields { + get { return fields_; } + } + + /// Field number for the "oneofs" field. + public const int OneofsFieldNumber = 3; + private static readonly pb::FieldCodec _repeated_oneofs_codec + = pb::FieldCodec.ForString(26); + private readonly pbc::RepeatedField oneofs_ = new pbc::RepeatedField(); + /// + /// The list of types appearing in `oneof` definitions in this type. + /// + public pbc::RepeatedField Oneofs { + get { return oneofs_; } + } + + /// Field number for the "options" field. + public const int OptionsFieldNumber = 4; + private static readonly pb::FieldCodec _repeated_options_codec + = pb::FieldCodec.ForMessage(34, global::Google.Protobuf.WellKnownTypes.Option.Parser); + private readonly pbc::RepeatedField options_ = new pbc::RepeatedField(); + /// + /// The protocol buffer options. + /// + public pbc::RepeatedField Options { + get { return options_; } + } + + /// Field number for the "source_context" field. + public const int SourceContextFieldNumber = 5; + private global::Google.Protobuf.WellKnownTypes.SourceContext sourceContext_; + /// + /// The source context. + /// + public global::Google.Protobuf.WellKnownTypes.SourceContext SourceContext { + get { return sourceContext_; } + set { + sourceContext_ = value; + } + } + + /// Field number for the "syntax" field. + public const int SyntaxFieldNumber = 6; + private global::Google.Protobuf.WellKnownTypes.Syntax syntax_ = 0; + /// + /// The source syntax. + /// + public global::Google.Protobuf.WellKnownTypes.Syntax Syntax { + get { return syntax_; } + set { + syntax_ = value; + } + } + + public override bool Equals(object other) { + return Equals(other as Type); + } + + public bool Equals(Type other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (Name != other.Name) return false; + if(!fields_.Equals(other.fields_)) return false; + if(!oneofs_.Equals(other.oneofs_)) return false; + if(!options_.Equals(other.options_)) return false; + if (!object.Equals(SourceContext, other.SourceContext)) return false; + if (Syntax != other.Syntax) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (Name.Length != 0) hash ^= Name.GetHashCode(); + hash ^= fields_.GetHashCode(); + hash ^= oneofs_.GetHashCode(); + hash ^= options_.GetHashCode(); + if (sourceContext_ != null) hash ^= SourceContext.GetHashCode(); + if (Syntax != 0) hash ^= Syntax.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (Name.Length != 0) { + output.WriteRawTag(10); + output.WriteString(Name); + } + fields_.WriteTo(output, _repeated_fields_codec); + oneofs_.WriteTo(output, _repeated_oneofs_codec); + options_.WriteTo(output, _repeated_options_codec); + if (sourceContext_ != null) { + output.WriteRawTag(42); + output.WriteMessage(SourceContext); + } + if (Syntax != 0) { + output.WriteRawTag(48); + output.WriteEnum((int) Syntax); + } + } + + public int CalculateSize() { + int size = 0; + if (Name.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(Name); + } + size += fields_.CalculateSize(_repeated_fields_codec); + size += oneofs_.CalculateSize(_repeated_oneofs_codec); + size += options_.CalculateSize(_repeated_options_codec); + if (sourceContext_ != null) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(SourceContext); + } + if (Syntax != 0) { + size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) Syntax); + } + return size; + } + + public void MergeFrom(Type other) { + if (other == null) { + return; + } + if (other.Name.Length != 0) { + Name = other.Name; + } + fields_.Add(other.fields_); + oneofs_.Add(other.oneofs_); + options_.Add(other.options_); + if (other.sourceContext_ != null) { + if (sourceContext_ == null) { + sourceContext_ = new global::Google.Protobuf.WellKnownTypes.SourceContext(); + } + SourceContext.MergeFrom(other.SourceContext); + } + if (other.Syntax != 0) { + Syntax = other.Syntax; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + Name = input.ReadString(); + break; + } + case 18: { + fields_.AddEntriesFrom(input, _repeated_fields_codec); + break; + } + case 26: { + oneofs_.AddEntriesFrom(input, _repeated_oneofs_codec); + break; + } + case 34: { + options_.AddEntriesFrom(input, _repeated_options_codec); + break; + } + case 42: { + if (sourceContext_ == null) { + sourceContext_ = new global::Google.Protobuf.WellKnownTypes.SourceContext(); + } + input.ReadMessage(sourceContext_); + break; + } + case 48: { + syntax_ = (global::Google.Protobuf.WellKnownTypes.Syntax) input.ReadEnum(); + break; + } + } + } + } + + } + + /// + /// A single field of a message type. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class Field : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new Field()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.WellKnownTypes.TypeReflection.Descriptor.MessageTypes[1]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public Field() { + OnConstruction(); + } + + partial void OnConstruction(); + + public Field(Field other) : this() { + kind_ = other.kind_; + cardinality_ = other.cardinality_; + number_ = other.number_; + name_ = other.name_; + typeUrl_ = other.typeUrl_; + oneofIndex_ = other.oneofIndex_; + packed_ = other.packed_; + options_ = other.options_.Clone(); + jsonName_ = other.jsonName_; + defaultValue_ = other.defaultValue_; + } + + public Field Clone() { + return new Field(this); + } + + /// Field number for the "kind" field. + public const int KindFieldNumber = 1; + private global::Google.Protobuf.WellKnownTypes.Field.Types.Kind kind_ = 0; + /// + /// The field type. + /// + public global::Google.Protobuf.WellKnownTypes.Field.Types.Kind Kind { + get { return kind_; } + set { + kind_ = value; + } + } + + /// Field number for the "cardinality" field. + public const int CardinalityFieldNumber = 2; + private global::Google.Protobuf.WellKnownTypes.Field.Types.Cardinality cardinality_ = 0; + /// + /// The field cardinality. + /// + public global::Google.Protobuf.WellKnownTypes.Field.Types.Cardinality Cardinality { + get { return cardinality_; } + set { + cardinality_ = value; + } + } + + /// Field number for the "number" field. + public const int NumberFieldNumber = 3; + private int number_; + /// + /// The field number. + /// + public int Number { + get { return number_; } + set { + number_ = value; + } + } + + /// Field number for the "name" field. + public const int NameFieldNumber = 4; + private string name_ = ""; + /// + /// The field name. + /// + public string Name { + get { return name_; } + set { + name_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "type_url" field. + public const int TypeUrlFieldNumber = 6; + private string typeUrl_ = ""; + /// + /// The field type URL, without the scheme, for message or enumeration + /// types. Example: `"type.googleapis.com/google.protobuf.Timestamp"`. + /// + public string TypeUrl { + get { return typeUrl_; } + set { + typeUrl_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "oneof_index" field. + public const int OneofIndexFieldNumber = 7; + private int oneofIndex_; + /// + /// The index of the field type in `Type.oneofs`, for message or enumeration + /// types. The first type has index 1; zero means the type is not in the list. + /// + public int OneofIndex { + get { return oneofIndex_; } + set { + oneofIndex_ = value; + } + } + + /// Field number for the "packed" field. + public const int PackedFieldNumber = 8; + private bool packed_; + /// + /// Whether to use alternative packed wire representation. + /// + public bool Packed { + get { return packed_; } + set { + packed_ = value; + } + } + + /// Field number for the "options" field. + public const int OptionsFieldNumber = 9; + private static readonly pb::FieldCodec _repeated_options_codec + = pb::FieldCodec.ForMessage(74, global::Google.Protobuf.WellKnownTypes.Option.Parser); + private readonly pbc::RepeatedField options_ = new pbc::RepeatedField(); + /// + /// The protocol buffer options. + /// + public pbc::RepeatedField Options { + get { return options_; } + } + + /// Field number for the "json_name" field. + public const int JsonNameFieldNumber = 10; + private string jsonName_ = ""; + /// + /// The field JSON name. + /// + public string JsonName { + get { return jsonName_; } + set { + jsonName_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "default_value" field. + public const int DefaultValueFieldNumber = 11; + private string defaultValue_ = ""; + /// + /// The string value of the default value of this field. Proto2 syntax only. + /// + public string DefaultValue { + get { return defaultValue_; } + set { + defaultValue_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + public override bool Equals(object other) { + return Equals(other as Field); + } + + public bool Equals(Field other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (Kind != other.Kind) return false; + if (Cardinality != other.Cardinality) return false; + if (Number != other.Number) return false; + if (Name != other.Name) return false; + if (TypeUrl != other.TypeUrl) return false; + if (OneofIndex != other.OneofIndex) return false; + if (Packed != other.Packed) return false; + if(!options_.Equals(other.options_)) return false; + if (JsonName != other.JsonName) return false; + if (DefaultValue != other.DefaultValue) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (Kind != 0) hash ^= Kind.GetHashCode(); + if (Cardinality != 0) hash ^= Cardinality.GetHashCode(); + if (Number != 0) hash ^= Number.GetHashCode(); + if (Name.Length != 0) hash ^= Name.GetHashCode(); + if (TypeUrl.Length != 0) hash ^= TypeUrl.GetHashCode(); + if (OneofIndex != 0) hash ^= OneofIndex.GetHashCode(); + if (Packed != false) hash ^= Packed.GetHashCode(); + hash ^= options_.GetHashCode(); + if (JsonName.Length != 0) hash ^= JsonName.GetHashCode(); + if (DefaultValue.Length != 0) hash ^= DefaultValue.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (Kind != 0) { + output.WriteRawTag(8); + output.WriteEnum((int) Kind); + } + if (Cardinality != 0) { + output.WriteRawTag(16); + output.WriteEnum((int) Cardinality); + } + if (Number != 0) { + output.WriteRawTag(24); + output.WriteInt32(Number); + } + if (Name.Length != 0) { + output.WriteRawTag(34); + output.WriteString(Name); + } + if (TypeUrl.Length != 0) { + output.WriteRawTag(50); + output.WriteString(TypeUrl); + } + if (OneofIndex != 0) { + output.WriteRawTag(56); + output.WriteInt32(OneofIndex); + } + if (Packed != false) { + output.WriteRawTag(64); + output.WriteBool(Packed); + } + options_.WriteTo(output, _repeated_options_codec); + if (JsonName.Length != 0) { + output.WriteRawTag(82); + output.WriteString(JsonName); + } + if (DefaultValue.Length != 0) { + output.WriteRawTag(90); + output.WriteString(DefaultValue); + } + } + + public int CalculateSize() { + int size = 0; + if (Kind != 0) { + size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) Kind); + } + if (Cardinality != 0) { + size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) Cardinality); + } + if (Number != 0) { + size += 1 + pb::CodedOutputStream.ComputeInt32Size(Number); + } + if (Name.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(Name); + } + if (TypeUrl.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(TypeUrl); + } + if (OneofIndex != 0) { + size += 1 + pb::CodedOutputStream.ComputeInt32Size(OneofIndex); + } + if (Packed != false) { + size += 1 + 1; + } + size += options_.CalculateSize(_repeated_options_codec); + if (JsonName.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(JsonName); + } + if (DefaultValue.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(DefaultValue); + } + return size; + } + + public void MergeFrom(Field other) { + if (other == null) { + return; + } + if (other.Kind != 0) { + Kind = other.Kind; + } + if (other.Cardinality != 0) { + Cardinality = other.Cardinality; + } + if (other.Number != 0) { + Number = other.Number; + } + if (other.Name.Length != 0) { + Name = other.Name; + } + if (other.TypeUrl.Length != 0) { + TypeUrl = other.TypeUrl; + } + if (other.OneofIndex != 0) { + OneofIndex = other.OneofIndex; + } + if (other.Packed != false) { + Packed = other.Packed; + } + options_.Add(other.options_); + if (other.JsonName.Length != 0) { + JsonName = other.JsonName; + } + if (other.DefaultValue.Length != 0) { + DefaultValue = other.DefaultValue; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 8: { + kind_ = (global::Google.Protobuf.WellKnownTypes.Field.Types.Kind) input.ReadEnum(); + break; + } + case 16: { + cardinality_ = (global::Google.Protobuf.WellKnownTypes.Field.Types.Cardinality) input.ReadEnum(); + break; + } + case 24: { + Number = input.ReadInt32(); + break; + } + case 34: { + Name = input.ReadString(); + break; + } + case 50: { + TypeUrl = input.ReadString(); + break; + } + case 56: { + OneofIndex = input.ReadInt32(); + break; + } + case 64: { + Packed = input.ReadBool(); + break; + } + case 74: { + options_.AddEntriesFrom(input, _repeated_options_codec); + break; + } + case 82: { + JsonName = input.ReadString(); + break; + } + case 90: { + DefaultValue = input.ReadString(); + break; + } + } + } + } + + #region Nested types + /// Container for nested types declared in the Field message type. + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public static partial class Types { + /// + /// Basic field types. + /// + public enum Kind { + /// + /// Field type unknown. + /// + [pbr::OriginalName("TYPE_UNKNOWN")] TypeUnknown = 0, + /// + /// Field type double. + /// + [pbr::OriginalName("TYPE_DOUBLE")] TypeDouble = 1, + /// + /// Field type float. + /// + [pbr::OriginalName("TYPE_FLOAT")] TypeFloat = 2, + /// + /// Field type int64. + /// + [pbr::OriginalName("TYPE_INT64")] TypeInt64 = 3, + /// + /// Field type uint64. + /// + [pbr::OriginalName("TYPE_UINT64")] TypeUint64 = 4, + /// + /// Field type int32. + /// + [pbr::OriginalName("TYPE_INT32")] TypeInt32 = 5, + /// + /// Field type fixed64. + /// + [pbr::OriginalName("TYPE_FIXED64")] TypeFixed64 = 6, + /// + /// Field type fixed32. + /// + [pbr::OriginalName("TYPE_FIXED32")] TypeFixed32 = 7, + /// + /// Field type bool. + /// + [pbr::OriginalName("TYPE_BOOL")] TypeBool = 8, + /// + /// Field type string. + /// + [pbr::OriginalName("TYPE_STRING")] TypeString = 9, + /// + /// Field type group. Proto2 syntax only, and deprecated. + /// + [pbr::OriginalName("TYPE_GROUP")] TypeGroup = 10, + /// + /// Field type message. + /// + [pbr::OriginalName("TYPE_MESSAGE")] TypeMessage = 11, + /// + /// Field type bytes. + /// + [pbr::OriginalName("TYPE_BYTES")] TypeBytes = 12, + /// + /// Field type uint32. + /// + [pbr::OriginalName("TYPE_UINT32")] TypeUint32 = 13, + /// + /// Field type enum. + /// + [pbr::OriginalName("TYPE_ENUM")] TypeEnum = 14, + /// + /// Field type sfixed32. + /// + [pbr::OriginalName("TYPE_SFIXED32")] TypeSfixed32 = 15, + /// + /// Field type sfixed64. + /// + [pbr::OriginalName("TYPE_SFIXED64")] TypeSfixed64 = 16, + /// + /// Field type sint32. + /// + [pbr::OriginalName("TYPE_SINT32")] TypeSint32 = 17, + /// + /// Field type sint64. + /// + [pbr::OriginalName("TYPE_SINT64")] TypeSint64 = 18, + } + + /// + /// Whether a field is optional, required, or repeated. + /// + public enum Cardinality { + /// + /// For fields with unknown cardinality. + /// + [pbr::OriginalName("CARDINALITY_UNKNOWN")] Unknown = 0, + /// + /// For optional fields. + /// + [pbr::OriginalName("CARDINALITY_OPTIONAL")] Optional = 1, + /// + /// For required fields. Proto2 syntax only. + /// + [pbr::OriginalName("CARDINALITY_REQUIRED")] Required = 2, + /// + /// For repeated fields. + /// + [pbr::OriginalName("CARDINALITY_REPEATED")] Repeated = 3, + } + + } + #endregion + + } + + /// + /// Enum type definition. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class Enum : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new Enum()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.WellKnownTypes.TypeReflection.Descriptor.MessageTypes[2]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public Enum() { + OnConstruction(); + } + + partial void OnConstruction(); + + public Enum(Enum other) : this() { + name_ = other.name_; + enumvalue_ = other.enumvalue_.Clone(); + options_ = other.options_.Clone(); + SourceContext = other.sourceContext_ != null ? other.SourceContext.Clone() : null; + syntax_ = other.syntax_; + } + + public Enum Clone() { + return new Enum(this); + } + + /// Field number for the "name" field. + public const int NameFieldNumber = 1; + private string name_ = ""; + /// + /// Enum type name. + /// + public string Name { + get { return name_; } + set { + name_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "enumvalue" field. + public const int EnumvalueFieldNumber = 2; + private static readonly pb::FieldCodec _repeated_enumvalue_codec + = pb::FieldCodec.ForMessage(18, global::Google.Protobuf.WellKnownTypes.EnumValue.Parser); + private readonly pbc::RepeatedField enumvalue_ = new pbc::RepeatedField(); + /// + /// Enum value definitions. + /// + public pbc::RepeatedField Enumvalue { + get { return enumvalue_; } + } + + /// Field number for the "options" field. + public const int OptionsFieldNumber = 3; + private static readonly pb::FieldCodec _repeated_options_codec + = pb::FieldCodec.ForMessage(26, global::Google.Protobuf.WellKnownTypes.Option.Parser); + private readonly pbc::RepeatedField options_ = new pbc::RepeatedField(); + /// + /// Protocol buffer options. + /// + public pbc::RepeatedField Options { + get { return options_; } + } + + /// Field number for the "source_context" field. + public const int SourceContextFieldNumber = 4; + private global::Google.Protobuf.WellKnownTypes.SourceContext sourceContext_; + /// + /// The source context. + /// + public global::Google.Protobuf.WellKnownTypes.SourceContext SourceContext { + get { return sourceContext_; } + set { + sourceContext_ = value; + } + } + + /// Field number for the "syntax" field. + public const int SyntaxFieldNumber = 5; + private global::Google.Protobuf.WellKnownTypes.Syntax syntax_ = 0; + /// + /// The source syntax. + /// + public global::Google.Protobuf.WellKnownTypes.Syntax Syntax { + get { return syntax_; } + set { + syntax_ = value; + } + } + + public override bool Equals(object other) { + return Equals(other as Enum); + } + + public bool Equals(Enum other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (Name != other.Name) return false; + if(!enumvalue_.Equals(other.enumvalue_)) return false; + if(!options_.Equals(other.options_)) return false; + if (!object.Equals(SourceContext, other.SourceContext)) return false; + if (Syntax != other.Syntax) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (Name.Length != 0) hash ^= Name.GetHashCode(); + hash ^= enumvalue_.GetHashCode(); + hash ^= options_.GetHashCode(); + if (sourceContext_ != null) hash ^= SourceContext.GetHashCode(); + if (Syntax != 0) hash ^= Syntax.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (Name.Length != 0) { + output.WriteRawTag(10); + output.WriteString(Name); + } + enumvalue_.WriteTo(output, _repeated_enumvalue_codec); + options_.WriteTo(output, _repeated_options_codec); + if (sourceContext_ != null) { + output.WriteRawTag(34); + output.WriteMessage(SourceContext); + } + if (Syntax != 0) { + output.WriteRawTag(40); + output.WriteEnum((int) Syntax); + } + } + + public int CalculateSize() { + int size = 0; + if (Name.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(Name); + } + size += enumvalue_.CalculateSize(_repeated_enumvalue_codec); + size += options_.CalculateSize(_repeated_options_codec); + if (sourceContext_ != null) { + size += 1 + pb::CodedOutputStream.ComputeMessageSize(SourceContext); + } + if (Syntax != 0) { + size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) Syntax); + } + return size; + } + + public void MergeFrom(Enum other) { + if (other == null) { + return; + } + if (other.Name.Length != 0) { + Name = other.Name; + } + enumvalue_.Add(other.enumvalue_); + options_.Add(other.options_); + if (other.sourceContext_ != null) { + if (sourceContext_ == null) { + sourceContext_ = new global::Google.Protobuf.WellKnownTypes.SourceContext(); + } + SourceContext.MergeFrom(other.SourceContext); + } + if (other.Syntax != 0) { + Syntax = other.Syntax; + } + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + Name = input.ReadString(); + break; + } + case 18: { + enumvalue_.AddEntriesFrom(input, _repeated_enumvalue_codec); + break; + } + case 26: { + options_.AddEntriesFrom(input, _repeated_options_codec); + break; + } + case 34: { + if (sourceContext_ == null) { + sourceContext_ = new global::Google.Protobuf.WellKnownTypes.SourceContext(); + } + input.ReadMessage(sourceContext_); + break; + } + case 40: { + syntax_ = (global::Google.Protobuf.WellKnownTypes.Syntax) input.ReadEnum(); + break; + } + } + } + } + + } + + /// + /// Enum value definition. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class EnumValue : pb::IMessage { + private static readonly pb::MessageParser _parser = new pb::MessageParser(() => new EnumValue()); + public static pb::MessageParser Parser { get { return _parser; } } + + public static pbr::MessageDescriptor Descriptor { + get { return global::Google.Protobuf.WellKnownTypes.TypeReflection.Descriptor.MessageTypes[3]; } + } + + pbr::MessageDescriptor pb::IMessage.Descriptor { + get { return Descriptor; } + } + + public EnumValue() { + OnConstruction(); + } + + partial void OnConstruction(); + + public EnumValue(EnumValue other) : this() { + name_ = other.name_; + number_ = other.number_; + options_ = other.options_.Clone(); + } + + public EnumValue Clone() { + return new EnumValue(this); + } + + /// Field number for the "name" field. + public const int NameFieldNumber = 1; + private string name_ = ""; + /// + /// Enum value name. + /// + public string Name { + get { return name_; } + set { + name_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); + } + } + + /// Field number for the "number" field. + public const int NumberFieldNumber = 2; + private int number_; + /// + /// Enum value number. + /// + public int Number { + get { return number_; } + set { + number_ = value; + } + } + + /// Field number for the "options" field. + public const int OptionsFieldNumber = 3; + private static readonly pb::FieldCodec _repeated_options_codec + = pb::FieldCodec.ForMessage(26, global::Google.Protobuf.WellKnownTypes.Option.Parser); + private readonly pbc::RepeatedField options_ = new pbc::RepeatedField(); + /// + /// Protocol buffer options. + /// + public pbc::RepeatedField Options { + get { return options_; } + } + + public override bool Equals(object other) { + return Equals(other as EnumValue); + } + + public bool Equals(EnumValue other) { + if (ReferenceEquals(other, null)) { + return false; + } + if (ReferenceEquals(other, this)) { + return true; + } + if (Name != other.Name) return false; + if (Number != other.Number) return false; + if(!options_.Equals(other.options_)) return false; + return true; + } + + public override int GetHashCode() { + int hash = 1; + if (Name.Length != 0) hash ^= Name.GetHashCode(); + if (Number != 0) hash ^= Number.GetHashCode(); + hash ^= options_.GetHashCode(); + return hash; + } + + public override string ToString() { + return pb::JsonFormatter.ToDiagnosticString(this); + } + + public void WriteTo(pb::CodedOutputStream output) { + if (Name.Length != 0) { + output.WriteRawTag(10); + output.WriteString(Name); + } + if (Number != 0) { + output.WriteRawTag(16); + output.WriteInt32(Number); + } + options_.WriteTo(output, _repeated_options_codec); + } + + public int CalculateSize() { + int size = 0; + if (Name.Length != 0) { + size += 1 + pb::CodedOutputStream.ComputeStringSize(Name); + } + if (Number != 0) { + size += 1 + pb::CodedOutputStream.ComputeInt32Size(Number); + } + size += options_.CalculateSize(_repeated_options_codec); + return size; + } + + public void MergeFrom(EnumValue other) { + if (other == null) { + return; + } + if (other.Name.Length != 0) { + Name = other.Name; + } + if (other.Number != 0) { + Number = other.Number; + } + options_.Add(other.options_); + } + + public void MergeFrom(pb::CodedInputStream input) { + uint tag; + while ((tag = input.ReadTag()) != 0) { + switch(tag) { + default: + input.SkipLastField(); + break; + case 10: { + Name = input.ReadString(); + break; + } + case 16: { + Number = input.ReadInt32(); + break; + } + case 26: { + options_.AddEntriesFrom(input, _repeated_options_codec); + break; + } + } + } + } + + } + + /// + /// A protocol buffer option, which can be attached to a message, field, + /// enumeration, etc. + /// + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + public sealed partial class Option : pb::IMessage