From 4b4cceb81c8b29aa08f31f817a6659cd78570738 Mon Sep 17 00:00:00 2001 From: Francesco Bellini Date: Mon, 17 Mar 2025 20:58:26 +0100 Subject: [PATCH] Initial commit --- LICENSE | 201 + THIRD-PARTY-NOTICES | 1692 ++++ array_codec.go | 42 + benchmark_test.go | 449 + bson_binary_vector_spec_test.go | 191 + bson_corpus_spec_test.go | 504 + bson_test.go | 679 ++ bsoncodec.go | 199 + bsoncodec_test.go | 72 + bsonrw_test.go | 846 ++ byte_slice_codec.go | 97 + codec_cache.go | 166 + codec_cache_test.go | 176 + cond_addr_codec.go | 61 + cond_addr_codec_test.go | 95 + copier.go | 431 + copier_test.go | 528 + decimal.go | 339 + decimal_test.go | 236 + decoder.go | 136 + decoder_example_test.go | 208 + decoder_test.go | 699 ++ default_value_decoders.go | 1497 +++ default_value_decoders_test.go | 3806 ++++++++ default_value_encoders.go | 517 + default_value_encoders_test.go | 1758 ++++ doc.go | 155 + empty_interface_codec.go | 127 + encoder.go | 123 + encoder_example_test.go | 240 + encoder_test.go | 303 + example_test.go | 143 + extjson_parser.go | 804 ++ extjson_parser_test.go | 804 ++ extjson_prose_test.go | 46 + extjson_reader.go | 606 ++ extjson_reader_test.go | 168 + extjson_tables.go | 223 + extjson_wrappers.go | 489 + extjson_writer.go | 690 ++ extjson_writer_test.go | 259 + fuzz_test.go | 40 + go.mod | 12 + go.sum | 6 + internal/assert/assertion_compare.go | 481 + .../assert/assertion_compare_can_convert.go | 18 + .../assert/assertion_compare_go1.17_test.go | 184 + internal/assert/assertion_compare_legacy.go | 18 + internal/assert/assertion_compare_test.go | 455 + internal/assert/assertion_format.go | 325 + internal/assert/assertion_mongo.go | 126 + internal/assert/assertion_mongo_test.go | 125 + internal/assert/assertions.go | 1075 ++ internal/assert/assertions_test.go | 1231 +++ internal/assert/difflib.go | 766 ++ internal/assert/difflib_test.go | 326 + internal/aws/awserr/error.go | 60 + internal/aws/awserr/types.go | 144 + internal/aws/credentials/chain_provider.go | 72 + .../aws/credentials/chain_provider_test.go | 176 + internal/aws/credentials/credentials.go | 197 + internal/aws/credentials/credentials_test.go | 192 + internal/aws/signer/v4/header_rules.go | 51 + internal/aws/signer/v4/request.go | 80 + internal/aws/signer/v4/uri_path.go | 65 + internal/aws/signer/v4/v4.go | 421 + internal/aws/signer/v4/v4_test.go | 434 + internal/aws/types.go | 153 + internal/bsoncoreutil/bsoncoreutil.go | 40 + internal/bsoncoreutil/bsoncoreutil_test.go | 59 + internal/bsonutil/bsonutil.go | 62 + internal/codecutil/encoding.go | 62 + internal/codecutil/encoding_test.go | 82 + .../credproviders/assume_role_provider.go | 148 + internal/credproviders/ec2_provider.go | 183 + internal/credproviders/ecs_provider.go | 112 + internal/credproviders/env_provider.go | 69 + internal/credproviders/imds_provider.go | 103 + internal/credproviders/static_provider.go | 59 + internal/csfle/csfle.go | 40 + internal/csot/csot.go | 106 + internal/csot/csot_test.go | 249 + internal/decimal128/decinal128.go | 117 + internal/errutil/join.go | 85 + internal/errutil/join_go1.19.go | 20 + internal/errutil/join_go1.20.go | 17 + internal/errutil/join_test.go | 243 + internal/failpoint/failpoint.go | 63 + internal/handshake/handshake.go | 13 + internal/httputil/httputil.go | 30 + internal/israce/norace.go | 14 + internal/israce/race.go | 14 + internal/logger/component.go | 313 + internal/logger/component_test.go | 229 + internal/logger/context.go | 48 + internal/logger/context_test.go | 187 + internal/logger/io_sink.go | 63 + internal/logger/level.go | 74 + internal/logger/logger.go | 265 + internal/logger/logger_test.go | 576 ++ internal/ptrutil/int64.go | 39 + internal/ptrutil/int64_test.go | 76 + internal/ptrutil/ptr.go | 12 + internal/rand/arith128_test.go | 128 + internal/rand/bits.go | 38 + internal/rand/example_test.go | 165 + internal/rand/exp.go | 223 + internal/rand/modulo_test.go | 52 + internal/rand/normal.go | 158 + internal/rand/race_test.go | 50 + internal/rand/rand.go | 374 + internal/rand/rand_test.go | 619 ++ internal/rand/regress_test.go | 492 + internal/rand/rng.go | 93 + internal/randutil/randutil.go | 39 + internal/randutil/randutil_test.go | 22 + internal/require/require.go | 819 ++ internal/spectest/spectest.go | 35 + internal/test/goleak/go.mod | 29 + internal/test/goleak/go.sum | 74 + internal/test/goleak/goleak_test.go | 122 + internal/uuid/uuid.go | 68 + internal/uuid/uuid_test.go | 107 + json_scanner.go | 533 + json_scanner_test.go | 361 + map_codec.go | 292 + marshal.go | 188 + marshal_test.go | 328 + marshal_value_cases_test.go | 218 + marshal_value_test.go | 90 + marshaling_cases_test.go | 25 + mgocompat/doc.go | 57 + mgocompat/registry.go | 18 + mgoregistry.go | 209 + mgoregistry_test.go | 1830 ++++ mode.go | 82 + objectid.go | 207 + objectid_test.go | 293 + pointer_codec.go | 88 + primitive.go | 379 + primitive_codecs.go | 89 + primitive_codecs_test.go | 1112 +++ raw.go | 93 + raw_array.go | 73 + raw_array_test.go | 472 + raw_element.go | 48 + raw_test.go | 579 ++ raw_value.go | 306 + raw_value_test.go | 207 + reader.go | 49 + registry.go | 381 + registry_examples_test.go | 303 + registry_test.go | 577 ++ slice_codec.go | 173 + string_codec.go | 107 + struct_codec.go | 691 ++ struct_codec_test.go | 170 + struct_tag_parser.go | 123 + struct_tag_parser_test.go | 160 + ...e772a03d07726ed75c3c3b83da865fe9b718adf8ae | 2 + ...54a7fb667ed38461db171af267665c21776f9a9ef4 | 2 + ...71449b9aa374c746754d4925fcfe4ba747e7101504 | 2 + ...618d128cea0ce05cef0711fdd91e403fe3b2f45628 | 2 + ...f00b7706e6ad0529c32689c63ca663dae37d072627 | 2 + .../atlas-data-lake-testing/aggregate.json | 53 + .../atlas-data-lake-testing/aggregate.yml | 23 + .../estimatedDocumentCount.json | 27 + .../estimatedDocumentCount.yml | 18 + testdata/atlas-data-lake-testing/find.json | 65 + testdata/atlas-data-lake-testing/find.yml | 27 + testdata/atlas-data-lake-testing/getMore.json | 57 + testdata/atlas-data-lake-testing/getMore.yml | 35 + .../listCollections.json | 25 + .../listCollections.yml | 17 + .../listDatabases.json | 24 + .../atlas-data-lake-testing/listDatabases.yml | 15 + .../atlas-data-lake-testing/runCommand.json | 31 + .../atlas-data-lake-testing/runCommand.yml | 20 + testdata/auth/connection-string.json | 651 ++ testdata/auth/connection-string.yml | 471 + testdata/bson-binary-vector/float32.json | 65 + testdata/bson-binary-vector/int8.json | 57 + testdata/bson-binary-vector/packed_bit.json | 83 + testdata/bson-corpus/array.json | 49 + testdata/bson-corpus/binary.json | 153 + testdata/bson-corpus/boolean.json | 27 + testdata/bson-corpus/bsonview | 434 + testdata/bson-corpus/code.json | 67 + testdata/bson-corpus/code_w_scope.json | 78 + testdata/bson-corpus/datetime.json | 42 + testdata/bson-corpus/dbpointer.json | 56 + testdata/bson-corpus/dbref.json | 51 + testdata/bson-corpus/decimal128-1.json | 317 + testdata/bson-corpus/decimal128-2.json | 793 ++ testdata/bson-corpus/decimal128-3.json | 1771 ++++ testdata/bson-corpus/decimal128-4.json | 165 + testdata/bson-corpus/decimal128-5.json | 402 + testdata/bson-corpus/decimal128-6.json | 131 + testdata/bson-corpus/decimal128-7.json | 327 + testdata/bson-corpus/document.json | 60 + testdata/bson-corpus/double.json | 87 + testdata/bson-corpus/int32.json | 43 + testdata/bson-corpus/int64.json | 43 + testdata/bson-corpus/maxkey.json | 12 + testdata/bson-corpus/minkey.json | 12 + .../bson-corpus/multi-type-deprecated.json | 15 + testdata/bson-corpus/multi-type.json | 11 + testdata/bson-corpus/null.json | 12 + testdata/bson-corpus/oid.json | 28 + testdata/bson-corpus/regex.json | 65 + testdata/bson-corpus/string.json | 72 + testdata/bson-corpus/symbol.json | 80 + testdata/bson-corpus/timestamp.json | 34 + testdata/bson-corpus/top.json | 266 + testdata/bson-corpus/undefined.json | 15 + .../change-streams-clusterTime.json | 81 + .../change-streams-clusterTime.yml | 41 + .../change-streams-disambiguatedPaths.json | 251 + .../change-streams-disambiguatedPaths.yml | 103 + .../change-streams/change-streams-errors.json | 246 + .../change-streams/change-streams-errors.yml | 120 + .../change-streams-pre_and_post_images.json | 827 ++ .../change-streams-pre_and_post_images.yml | 351 + .../change-streams-resume-allowlist.json | 2348 +++++ .../change-streams-resume-allowlist.yml | 1169 +++ .../change-streams-resume-errorLabels.json | 2130 ++++ .../change-streams-resume-errorLabels.yml | 1069 ++ .../change-streams-showExpandedEvents.json | 516 + .../change-streams-showExpandedEvents.yml | 307 + testdata/change-streams/change-streams.json | 1795 ++++ testdata/change-streams/change-streams.yml | 927 ++ .../change-streams-test.json | 70 + .../corpus-encrypted.json | 7685 +++++++++++++++ .../corpus-key-aws.json | 33 + .../corpus-key-azure.json | 33 + .../corpus-key-gcp.json | 35 + .../corpus-key-kmip.json | 32 + .../corpus-key-local.json | 31 + .../corpus-schema.json | 6335 ++++++++++++ .../client-side-encryption-prose/corpus.json | 8619 +++++++++++++++++ .../encrypted-fields.json | 30 + .../external-key.json | 31 + .../external-schema.json | 19 + .../key1-document.json | 30 + .../limits-doc.json | 102 + .../limits-key.json | 31 + .../limits-schema.json | 1405 +++ .../range-encryptedFields-Date.json | 36 + ...ge-encryptedFields-DecimalNoPrecision.json | 26 + ...ange-encryptedFields-DecimalPrecision.json | 35 + ...nge-encryptedFields-DoubleNoPrecision.json | 26 + ...range-encryptedFields-DoublePrecision.json | 35 + .../range-encryptedFields-Int.json | 32 + .../range-encryptedFields-Long.json | 32 + .../legacy/aggregate.json | 390 + .../legacy/aggregate.yml | 120 + .../legacy/awsTemporary.json | 225 + .../legacy/awsTemporary.yml | 57 + .../legacy/azureKMS.json | 224 + .../legacy/azureKMS.yml | 46 + .../legacy/badQueries.json | 1446 +++ .../legacy/badQueries.yml | 536 + .../legacy/badSchema.json | 254 + .../legacy/badSchema.yml | 73 + .../client-side-encryption/legacy/basic.json | 350 + .../client-side-encryption/legacy/basic.yml | 102 + .../client-side-encryption/legacy/bulk.json | 333 + .../client-side-encryption/legacy/bulk.yml | 78 + .../legacy/bypassAutoEncryption.json | 402 + .../legacy/bypassAutoEncryption.yml | 100 + .../legacy/bypassedCommand.json | 107 + .../legacy/bypassedCommand.yml | 43 + .../client-side-encryption/legacy/count.json | 229 + .../client-side-encryption/legacy/count.yml | 54 + .../legacy/countDocuments.json | 241 + .../legacy/countDocuments.yml | 52 + .../legacy/create-and-createIndexes.json | 115 + .../legacy/create-and-createIndexes.yml | 58 + .../client-side-encryption/legacy/delete.json | 340 + .../client-side-encryption/legacy/delete.yml | 91 + .../legacy/distinct.json | 276 + .../legacy/distinct.yml | 66 + .../legacy/explain.json | 239 + .../client-side-encryption/legacy/explain.yml | 57 + .../client-side-encryption/legacy/find.json | 408 + .../client-side-encryption/legacy/find.yml | 105 + .../legacy/findOneAndDelete.json | 221 + .../legacy/findOneAndDelete.yml | 50 + .../legacy/findOneAndReplace.json | 227 + .../legacy/findOneAndReplace.yml | 50 + .../legacy/findOneAndUpdate.json | 231 + .../legacy/findOneAndUpdate.yml | 50 + .../legacy/fle2v2-BypassQueryAnalysis.json | 261 + .../legacy/fle2v2-BypassQueryAnalysis.yml | 89 + .../legacy/fle2v2-Compact.json | 233 + .../legacy/fle2v2-Compact.yml | 84 + .../fle2v2-CreateCollection-OldServer.json | 94 + .../fle2v2-CreateCollection-OldServer.yml | 61 + .../legacy/fle2v2-CreateCollection.json | 1758 ++++ .../legacy/fle2v2-CreateCollection.yml | 937 ++ .../legacy/fle2v2-DecryptExistingData.json | 149 + .../legacy/fle2v2-DecryptExistingData.yml | 66 + .../legacy/fle2v2-Delete.json | 284 + .../legacy/fle2v2-Delete.yml | 100 + ...EncryptedFields-vs-EncryptedFieldsMap.json | 212 + ...-EncryptedFields-vs-EncryptedFieldsMap.yml | 79 + .../fle2v2-EncryptedFields-vs-jsonSchema.json | 300 + .../fle2v2-EncryptedFields-vs-jsonSchema.yml | 102 + .../fle2v2-EncryptedFieldsMap-defaults.json | 105 + .../fle2v2-EncryptedFieldsMap-defaults.yml | 58 + .../legacy/fle2v2-FindOneAndUpdate.json | 560 ++ .../legacy/fle2v2-FindOneAndUpdate.yml | 200 + .../legacy/fle2v2-InsertFind-Indexed.json | 296 + .../legacy/fle2v2-InsertFind-Indexed.yml | 96 + .../legacy/fle2v2-InsertFind-Unindexed.json | 248 + .../legacy/fle2v2-InsertFind-Unindexed.yml | 89 + .../legacy/fle2v2-MissingKey.json | 116 + .../legacy/fle2v2-MissingKey.yml | 43 + .../legacy/fle2v2-NoEncryption.json | 87 + .../legacy/fle2v2-NoEncryption.yml | 44 + .../legacy/fle2v2-Rangev2-Compact.json | 290 + .../legacy/fle2v2-Rangev2-Compact.yml | 94 + .../legacy/fle2v2-Rangev2-Date-Aggregate.json | 508 + .../legacy/fle2v2-Rangev2-Date-Aggregate.yml | 229 + .../fle2v2-Rangev2-Date-Correctness.json | 1842 ++++ .../fle2v2-Rangev2-Date-Correctness.yml | 422 + .../legacy/fle2v2-Rangev2-Date-Delete.json | 442 + .../legacy/fle2v2-Rangev2-Date-Delete.yml | 176 + .../fle2v2-Rangev2-Date-FindOneAndUpdate.json | 514 + .../fle2v2-Rangev2-Date-FindOneAndUpdate.yml | 227 + .../fle2v2-Rangev2-Date-InsertFind.json | 499 + .../legacy/fle2v2-Rangev2-Date-InsertFind.yml | 223 + .../legacy/fle2v2-Rangev2-Date-Update.json | 516 + .../legacy/fle2v2-Rangev2-Date-Update.yml | 240 + .../fle2v2-Rangev2-Decimal-Aggregate.json | 1902 ++++ .../fle2v2-Rangev2-Decimal-Aggregate.yml | 1675 ++++ .../fle2v2-Rangev2-Decimal-Correctness.json | 1158 +++ .../fle2v2-Rangev2-Decimal-Correctness.yml | 293 + .../legacy/fle2v2-Rangev2-Decimal-Delete.json | 1116 +++ .../legacy/fle2v2-Rangev2-Decimal-Delete.yml | 899 ++ ...e2v2-Rangev2-Decimal-FindOneAndUpdate.json | 1906 ++++ ...le2v2-Rangev2-Decimal-FindOneAndUpdate.yml | 1672 ++++ .../fle2v2-Rangev2-Decimal-InsertFind.json | 1893 ++++ .../fle2v2-Rangev2-Decimal-InsertFind.yml | 1668 ++++ .../legacy/fle2v2-Rangev2-Decimal-Update.json | 1910 ++++ .../legacy/fle2v2-Rangev2-Decimal-Update.yml | 1685 ++++ ...v2-Rangev2-DecimalPrecision-Aggregate.json | 584 ++ ...2v2-Rangev2-DecimalPrecision-Aggregate.yml | 317 + ...-Rangev2-DecimalPrecision-Correctness.json | 1650 ++++ ...2-Rangev2-DecimalPrecision-Correctness.yml | 424 + ...le2v2-Rangev2-DecimalPrecision-Delete.json | 476 + ...fle2v2-Rangev2-DecimalPrecision-Delete.yml | 220 + ...ev2-DecimalPrecision-FindOneAndUpdate.json | 588 ++ ...gev2-DecimalPrecision-FindOneAndUpdate.yml | 315 + ...2-Rangev2-DecimalPrecision-InsertFind.json | 571 ++ ...v2-Rangev2-DecimalPrecision-InsertFind.yml | 307 + ...le2v2-Rangev2-DecimalPrecision-Update.json | 588 ++ ...fle2v2-Rangev2-DecimalPrecision-Update.yml | 324 + .../legacy/fle2v2-Rangev2-Defaults.json | 381 + .../legacy/fle2v2-Rangev2-Defaults.yml | 157 + .../fle2v2-Rangev2-Double-Aggregate.json | 1132 +++ .../fle2v2-Rangev2-Double-Aggregate.yml | 901 ++ .../fle2v2-Rangev2-Double-Correctness.json | 1160 +++ .../fle2v2-Rangev2-Double-Correctness.yml | 292 + .../legacy/fle2v2-Rangev2-Double-Delete.json | 732 ++ .../legacy/fle2v2-Rangev2-Double-Delete.yml | 512 + ...le2v2-Rangev2-Double-FindOneAndUpdate.json | 1136 +++ ...fle2v2-Rangev2-Double-FindOneAndUpdate.yml | 899 ++ .../fle2v2-Rangev2-Double-InsertFind.json | 1123 +++ .../fle2v2-Rangev2-Double-InsertFind.yml | 895 ++ .../legacy/fle2v2-Rangev2-Double-Update.json | 1140 +++ .../legacy/fle2v2-Rangev2-Double-Update.yml | 912 ++ ...2v2-Rangev2-DoublePrecision-Aggregate.json | 580 ++ ...e2v2-Rangev2-DoublePrecision-Aggregate.yml | 313 + ...2-Rangev2-DoublePrecision-Correctness.json | 1650 ++++ ...v2-Rangev2-DoublePrecision-Correctness.yml | 424 + ...fle2v2-Rangev2-DoublePrecision-Delete.json | 474 + .../fle2v2-Rangev2-DoublePrecision-Delete.yml | 218 + ...gev2-DoublePrecision-FindOneAndUpdate.json | 584 ++ ...ngev2-DoublePrecision-FindOneAndUpdate.yml | 311 + ...v2-Rangev2-DoublePrecision-InsertFind.json | 571 ++ ...2v2-Rangev2-DoublePrecision-InsertFind.yml | 307 + ...fle2v2-Rangev2-DoublePrecision-Update.json | 588 ++ .../fle2v2-Rangev2-DoublePrecision-Update.yml | 326 + .../legacy/fle2v2-Rangev2-Int-Aggregate.json | 484 + .../legacy/fle2v2-Rangev2-Int-Aggregate.yml | 229 + .../fle2v2-Rangev2-Int-Correctness.json | 1644 ++++ .../legacy/fle2v2-Rangev2-Int-Correctness.yml | 423 + .../legacy/fle2v2-Rangev2-Int-Delete.json | 420 + .../legacy/fle2v2-Rangev2-Int-Delete.yml | 176 + .../fle2v2-Rangev2-Int-FindOneAndUpdate.json | 488 + .../fle2v2-Rangev2-Int-FindOneAndUpdate.yml | 227 + .../legacy/fle2v2-Rangev2-Int-InsertFind.json | 475 + .../legacy/fle2v2-Rangev2-Int-InsertFind.yml | 223 + .../legacy/fle2v2-Rangev2-Int-Update.json | 492 + .../legacy/fle2v2-Rangev2-Int-Update.yml | 242 + .../legacy/fle2v2-Rangev2-Long-Aggregate.json | 484 + .../legacy/fle2v2-Rangev2-Long-Aggregate.yml | 229 + .../fle2v2-Rangev2-Long-Correctness.json | 1644 ++++ .../fle2v2-Rangev2-Long-Correctness.yml | 422 + .../legacy/fle2v2-Rangev2-Long-Delete.json | 420 + .../legacy/fle2v2-Rangev2-Long-Delete.yml | 176 + .../fle2v2-Rangev2-Long-FindOneAndUpdate.json | 488 + .../fle2v2-Rangev2-Long-FindOneAndUpdate.yml | 227 + .../fle2v2-Rangev2-Long-InsertFind.json | 475 + .../legacy/fle2v2-Rangev2-Long-InsertFind.yml | 223 + .../legacy/fle2v2-Rangev2-Long-Update.json | 492 + .../legacy/fle2v2-Rangev2-Long-Update.yml | 242 + .../legacy/fle2v2-Rangev2-WrongType.json | 163 + .../legacy/fle2v2-Rangev2-WrongType.yml | 44 + .../legacy/fle2v2-Update.json | 570 ++ .../legacy/fle2v2-Update.yml | 210 + ...v2-validatorAndPartialFieldExpression.json | 503 + ...2v2-validatorAndPartialFieldExpression.yml | 169 + .../client-side-encryption/legacy/gcpKMS.json | 226 + .../client-side-encryption/legacy/gcpKMS.yml | 46 + .../legacy/getMore.json | 263 + .../client-side-encryption/legacy/getMore.yml | 61 + .../client-side-encryption/legacy/insert.json | 344 + .../client-side-encryption/legacy/insert.yml | 88 + .../legacy/keyAltName.json | 228 + .../legacy/keyAltName.yml | 64 + .../legacy/keyCache.json | 270 + .../legacy/keyCache.yml | 69 + .../legacy/kmipKMS.json | 223 + .../client-side-encryption/legacy/kmipKMS.yml | 46 + .../legacy/localKMS.json | 191 + .../legacy/localKMS.yml | 47 + .../legacy/localSchema.json | 258 + .../legacy/localSchema.yml | 65 + .../legacy/malformedCiphertext.json | 321 + .../legacy/malformedCiphertext.yml | 69 + .../legacy/maxWireVersion.json | 74 + .../legacy/maxWireVersion.yml | 22 + .../legacy/missingKey.json | 179 + .../legacy/missingKey.yml | 42 + .../legacy/noSchema.json | 67 + .../legacy/noSchema.yml | 37 + .../legacy/replaceOne.json | 239 + .../legacy/replaceOne.yml | 54 + .../client-side-encryption/legacy/types.json | 1646 ++++ .../client-side-encryption/legacy/types.yml | 501 + .../legacy/unsupportedCommand.json | 152 + .../legacy/unsupportedCommand.yml | 25 + .../legacy/updateMany.json | 307 + .../legacy/updateMany.yml | 70 + .../legacy/updateOne.json | 465 + .../legacy/updateOne.yml | 160 + .../validatorAndPartialFieldExpression.json | 642 ++ .../validatorAndPartialFieldExpression.yml | 166 + .../unified/addKeyAltName.json | 609 ++ .../unified/addKeyAltName.yml | 194 + .../createDataKey-kms_providers-invalid.json | 119 + .../createDataKey-kms_providers-invalid.yml | 67 + .../unified/createDataKey.json | 711 ++ .../unified/createDataKey.yml | 309 + .../unified/deleteKey.json | 557 ++ .../unified/deleteKey.yml | 159 + .../unified/getKey.json | 319 + .../client-side-encryption/unified/getKey.yml | 105 + .../unified/getKeyByAltName.json | 289 + .../unified/getKeyByAltName.yml | 104 + .../unified/getKeys.json | 260 + .../unified/getKeys.yml | 122 + .../unified/keyCache.json | 198 + .../unified/keyCache.yml | 85 + .../unified/removeKeyAltName.json | 672 ++ .../unified/removeKeyAltName.yml | 157 + .../rewrapManyDataKey-decrypt_failure.json | 162 + .../rewrapManyDataKey-decrypt_failure.yml | 69 + .../rewrapManyDataKey-encrypt_failure.json | 250 + .../rewrapManyDataKey-encrypt_failure.yml | 122 + .../unified/rewrapManyDataKey.json | 1493 +++ .../unified/rewrapManyDataKey.yml | 438 + .../bulkWrite.json | 159 + .../bulkWrite.yml | 84 + .../command-execution.json | 393 + .../command-execution.yml | 251 + .../error-transformations.json | 180 + .../error-transformations.yml | 96 + .../global-timeoutMS.json | 5830 +++++++++++ .../global-timeoutMS.yml | 3235 +++++++ .../gridfs-advanced.json | 385 + .../gridfs-advanced.yml | 206 + .../gridfs-delete.json | 285 + .../gridfs-delete.yml | 152 + .../gridfs-find.json | 183 + .../gridfs-find.yml | 100 + .../override-operation-timeoutMS.json | 3577 +++++++ .../override-operation-timeoutMS.yml | 1917 ++++ .../retryability-legacy-timeouts.json | 3042 ++++++ .../retryability-legacy-timeouts.yml | 1676 ++++ .../retryability-timeoutMS.json | 5688 +++++++++++ .../retryability-timeoutMS.yml | 2823 ++++++ .../runCursorCommand.json | 583 ++ .../runCursorCommand.yml | 304 + testdata/code.json.gz | Bin 0 -> 120432 bytes .../clustered-indexes.json | 291 + .../clustered-indexes.yml | 135 + .../createCollection-pre_and_post_images.json | 92 + .../createCollection-pre_and_post_images.yml | 50 + .../modifyCollection-errorResponse.json | 118 + .../modifyCollection-errorResponse.yml | 59 + .../modifyCollection-pre_and_post_images.json | 111 + .../modifyCollection-pre_and_post_images.yml | 58 + .../timeseries-collection.json | 320 + .../timeseries-collection.yml | 163 + testdata/command-monitoring/bulkWrite.json | 154 + testdata/command-monitoring/bulkWrite.yml | 68 + testdata/command-monitoring/command.json | 83 + testdata/command-monitoring/command.yml | 50 + testdata/command-monitoring/deleteMany.json | 162 + testdata/command-monitoring/deleteMany.yml | 79 + testdata/command-monitoring/deleteOne.json | 162 + testdata/command-monitoring/deleteOne.yml | 79 + testdata/command-monitoring/find.json | 558 ++ testdata/command-monitoring/find.yml | 262 + testdata/command-monitoring/insertMany.json | 148 + testdata/command-monitoring/insertMany.yml | 79 + testdata/command-monitoring/insertOne.json | 144 + testdata/command-monitoring/insertOne.yml | 77 + .../command-monitoring/logging/command.json | 215 + .../command-monitoring/logging/command.yml | 95 + .../logging/driver-connection-id.json | 146 + .../logging/driver-connection-id.yml | 76 + .../logging/no-handshake-messages.json | 94 + .../logging/no-handshake-messages.yml | 58 + .../logging/no-heartbeat-messages.json | 91 + .../logging/no-heartbeat-messages.yml | 58 + .../logging/operation-id.json | 198 + .../logging/operation-id.yml | 99 + .../logging/pre-42-server-connection-id.json | 119 + .../logging/pre-42-server-connection-id.yml | 66 + .../logging/redacted-commands.json | 1438 +++ .../logging/redacted-commands.yml | 850 ++ .../logging/server-connection-id.json | 131 + .../logging/server-connection-id.yml | 66 + .../logging/service-id.json | 207 + .../command-monitoring/logging/service-id.yml | 111 + .../logging/unacknowledged-write.json | 134 + .../logging/unacknowledged-write.yml | 63 + .../pre-42-server-connection-id.json | 101 + .../pre-42-server-connection-id.yml | 56 + .../command-monitoring/redacted-commands.json | 679 ++ .../command-monitoring/redacted-commands.yml | 348 + .../server-connection-id.json | 101 + .../server-connection-id.yml | 56 + .../unacknowledged-client-bulkWrite.json | 220 + .../unacknowledged-client-bulkWrite.yml | 111 + .../unacknowledgedBulkWrite.json | 220 + .../unacknowledgedBulkWrite.yml | 111 + testdata/command-monitoring/updateMany.json | 188 + testdata/command-monitoring/updateMany.yml | 87 + testdata/command-monitoring/updateOne.json | 260 + testdata/command-monitoring/updateOne.yml | 118 + .../connection-must-have-id.json | 52 + .../connection-must-have-id.yml | 29 + .../connection-must-order-ids.json | 52 + .../connection-must-order-ids.yml | 29 + .../logging/connection-logging.json | 525 + .../logging/connection-logging.yml | 226 + .../logging/connection-pool-options.json | 458 + .../logging/connection-pool-options.yml | 254 + .../pool-checkin-destroy-closed.json | 50 + .../pool-checkin-destroy-closed.yml | 29 + .../pool-checkin-destroy-stale.json | 50 + .../pool-checkin-destroy-stale.yml | 29 + .../pool-checkin-make-available.json | 45 + .../pool-checkin-make-available.yml | 26 + .../pool-checkin.json | 34 + .../pool-checkin.yml | 21 + .../pool-checkout-connection.json | 38 + .../pool-checkout-connection.yml | 21 + ...kout-custom-maxConnecting-is-enforced.json | 81 + ...ckout-custom-maxConnecting-is-enforced.yml | 50 + .../pool-checkout-error-closed.json | 68 + .../pool-checkout-error-closed.yml | 38 + ...ol-checkout-maxConnecting-is-enforced.json | 108 + ...ool-checkout-maxConnecting-is-enforced.yml | 79 + .../pool-checkout-maxConnecting-timeout.json | 103 + .../pool-checkout-maxConnecting-timeout.yml | 69 + ...-minPoolSize-connection-maxConnecting.json | 88 + ...t-minPoolSize-connection-maxConnecting.yml | 63 + .../pool-checkout-multiple.json | 70 + .../pool-checkout-multiple.yml | 39 + .../pool-checkout-no-idle.json | 68 + .../pool-checkout-no-idle.yml | 41 + .../pool-checkout-no-stale.json | 73 + .../pool-checkout-no-stale.yml | 41 + ...out-returned-connection-maxConnecting.json | 124 + ...kout-returned-connection-maxConnecting.yml | 86 + .../pool-clear-clears-waitqueue.json | 101 + .../pool-clear-clears-waitqueue.yml | 63 + ...lear-interrupting-pending-connections.json | 77 + ...clear-interrupting-pending-connections.yml | 42 + .../pool-clear-min-size.json | 68 + .../pool-clear-min-size.yml | 37 + .../pool-clear-paused.json | 32 + .../pool-clear-paused.yml | 15 + .../pool-clear-ready.json | 69 + .../pool-clear-ready.yml | 36 + ...e-run-interruptInUseConnections-false.json | 81 + ...le-run-interruptInUseConnections-false.yml | 48 + .../pool-close-destroy-conns.json | 52 + .../pool-close-destroy-conns.yml | 30 + .../pool-close.json | 21 + .../pool-close.yml | 11 + .../pool-create-max-size.json | 133 + .../pool-create-max-size.yml | 73 + .../pool-create-min-size-error.json | 66 + .../pool-create-min-size-error.yml | 40 + .../pool-create-min-size.json | 66 + .../pool-create-min-size.yml | 40 + .../pool-create-with-options.json | 32 + .../pool-create-with-options.yml | 21 + .../pool-create.json | 19 + .../pool-create.yml | 12 + .../pool-ready-ready.json | 39 + .../pool-ready-ready.yml | 19 + .../pool-ready.json | 57 + .../pool-ready.yml | 30 + .../wait-queue-fairness.json | 196 + .../wait-queue-fairness.yml | 126 + .../wait-queue-timeout.json | 75 + .../wait-queue-timeout.yml | 48 + testdata/connection-string/invalid-uris.json | 274 + testdata/connection-string/invalid-uris.yml | 241 + testdata/connection-string/valid-auth.json | 332 + testdata/connection-string/valid-auth.yml | 257 + .../valid-db-with-dotted-name.json | 100 + .../valid-db-with-dotted-name.yml | 77 + .../valid-host_identifiers.json | 154 + .../valid-host_identifiers.yml | 121 + testdata/connection-string/valid-options.json | 25 + testdata/connection-string/valid-options.yml | 17 + .../valid-unix_socket-absolute.json | 266 + .../valid-unix_socket-absolute.yml | 209 + .../valid-unix_socket-relative.json | 286 + .../valid-unix_socket-relative.yml | 225 + .../connection-string/valid-warnings.json | 68 + testdata/connection-string/valid-warnings.yml | 51 + .../callback-aborts.json | 244 + .../callback-aborts.yml | 170 + .../callback-commits.json | 303 + .../callback-commits.yml | 204 + .../callback-retry.json | 315 + .../callback-retry.yml | 215 + .../convenient-transactions/commit-retry.json | 533 + .../convenient-transactions/commit-retry.yml | 327 + .../commit-transienttransactionerror-4.2.json | 197 + .../commit-transienttransactionerror-4.2.yml | 139 + .../commit-transienttransactionerror.json | 725 ++ .../commit-transienttransactionerror.yml | 175 + .../commit-writeconcernerror.json | 602 ++ .../commit-writeconcernerror.yml | 216 + testdata/convenient-transactions/commit.json | 286 + testdata/convenient-transactions/commit.yml | 193 + .../transaction-options.json | 577 ++ .../transaction-options.yml | 274 + .../crud/unified/aggregate-allowdiskuse.json | 155 + .../crud/unified/aggregate-allowdiskuse.yml | 75 + testdata/crud/unified/aggregate-let.json | 376 + testdata/crud/unified/aggregate-let.yml | 138 + .../aggregate-merge-errorResponse.json | 90 + .../unified/aggregate-merge-errorResponse.yml | 42 + testdata/crud/unified/aggregate-merge.json | 497 + testdata/crud/unified/aggregate-merge.yml | 185 + .../unified/aggregate-out-readConcern.json | 407 + .../unified/aggregate-out-readConcern.yml | 171 + .../aggregate-write-readPreference.json | 461 + .../aggregate-write-readPreference.yml | 157 + testdata/crud/unified/aggregate.json | 568 ++ testdata/crud/unified/aggregate.yml | 216 + .../bulkWrite-arrayFilters-clientError.json | 151 + .../bulkWrite-arrayFilters-clientError.yml | 98 + .../crud/unified/bulkWrite-arrayFilters.json | 279 + .../crud/unified/bulkWrite-arrayFilters.yml | 174 + testdata/crud/unified/bulkWrite-comment.json | 519 + testdata/crud/unified/bulkWrite-comment.yml | 177 + .../bulkWrite-delete-hint-clientError.json | 193 + .../bulkWrite-delete-hint-clientError.yml | 113 + .../bulkWrite-delete-hint-serverError.json | 252 + .../bulkWrite-delete-hint-serverError.yml | 142 + .../crud/unified/bulkWrite-delete-hint.json | 247 + .../crud/unified/bulkWrite-delete-hint.yml | 154 + .../unified/bulkWrite-deleteMany-let.json | 200 + .../crud/unified/bulkWrite-deleteMany-let.yml | 86 + .../crud/unified/bulkWrite-deleteOne-let.json | 200 + .../crud/unified/bulkWrite-deleteOne-let.yml | 86 + .../crud/unified/bulkWrite-errorResponse.json | 88 + .../crud/unified/bulkWrite-errorResponse.yml | 50 + .../bulkWrite-insertOne-dots_and_dollars.json | 374 + .../bulkWrite-insertOne-dots_and_dollars.yml | 138 + ...bulkWrite-replaceOne-dots_and_dollars.json | 532 + .../bulkWrite-replaceOne-dots_and_dollars.yml | 165 + .../unified/bulkWrite-replaceOne-let.json | 226 + .../crud/unified/bulkWrite-replaceOne-let.yml | 93 + .../unified/bulkWrite-replaceOne-sort.json | 239 + .../unified/bulkWrite-replaceOne-sort.yml | 94 + .../bulkWrite-update-hint-clientError.json | 284 + .../bulkWrite-update-hint-clientError.yml | 148 + .../bulkWrite-update-hint-serverError.json | 422 + .../bulkWrite-update-hint-serverError.yml | 239 + .../crud/unified/bulkWrite-update-hint.json | 445 + .../crud/unified/bulkWrite-update-hint.yml | 256 + .../unified/bulkWrite-update-validation.json | 210 + .../unified/bulkWrite-update-validation.yml | 73 + ...bulkWrite-updateMany-dots_and_dollars.json | 452 + .../bulkWrite-updateMany-dots_and_dollars.yml | 150 + .../unified/bulkWrite-updateMany-let.json | 243 + .../crud/unified/bulkWrite-updateMany-let.yml | 96 + .../bulkWrite-updateOne-dots_and_dollars.json | 460 + .../bulkWrite-updateOne-dots_and_dollars.yml | 150 + .../crud/unified/bulkWrite-updateOne-let.json | 247 + .../crud/unified/bulkWrite-updateOne-let.yml | 95 + .../unified/bulkWrite-updateOne-sort.json | 255 + .../crud/unified/bulkWrite-updateOne-sort.yml | 94 + .../client-bulkWrite-delete-options.json | 268 + .../client-bulkWrite-delete-options.yml | 137 + .../client-bulkWrite-errorResponse.json | 69 + .../client-bulkWrite-errorResponse.yml | 38 + .../crud/unified/client-bulkWrite-errors.json | 513 + .../crud/unified/client-bulkWrite-errors.yml | 270 + .../client-bulkWrite-mixed-namespaces.json | 315 + .../client-bulkWrite-mixed-namespaces.yml | 147 + .../unified/client-bulkWrite-options.json | 716 ++ .../crud/unified/client-bulkWrite-options.yml | 351 + .../unified/client-bulkWrite-ordered.json | 291 + .../crud/unified/client-bulkWrite-ordered.yml | 153 + .../client-bulkWrite-partialResults.json | 540 ++ .../client-bulkWrite-partialResults.yml | 262 + .../client-bulkWrite-replaceOne-sort.json | 163 + .../client-bulkWrite-replaceOne-sort.yml | 77 + .../unified/client-bulkWrite-results.json | 833 ++ .../crud/unified/client-bulkWrite-results.yml | 312 + .../client-bulkWrite-update-options.json | 949 ++ .../client-bulkWrite-update-options.yml | 338 + .../client-bulkWrite-update-pipeline.json | 258 + .../client-bulkWrite-update-pipeline.yml | 133 + .../client-bulkWrite-update-validation.json | 216 + .../client-bulkWrite-update-validation.yml | 79 + .../client-bulkWrite-updateOne-sort.json | 167 + .../client-bulkWrite-updateOne-sort.yml | 77 + .../crud/unified/countDocuments-comment.json | 208 + .../crud/unified/countDocuments-comment.yml | 92 + .../db-aggregate-write-readPreference.json | 447 + .../db-aggregate-write-readPreference.yml | 153 + testdata/crud/unified/db-aggregate.json | 107 + testdata/crud/unified/db-aggregate.yml | 73 + testdata/crud/unified/deleteMany-comment.json | 245 + testdata/crud/unified/deleteMany-comment.yml | 97 + .../unified/deleteMany-hint-clientError.json | 149 + .../unified/deleteMany-hint-clientError.yml | 87 + .../unified/deleteMany-hint-serverError.json | 190 + .../unified/deleteMany-hint-serverError.yml | 107 + testdata/crud/unified/deleteMany-hint.json | 173 + testdata/crud/unified/deleteMany-hint.yml | 99 + testdata/crud/unified/deleteMany-let.json | 201 + testdata/crud/unified/deleteMany-let.yml | 93 + testdata/crud/unified/deleteOne-comment.json | 243 + testdata/crud/unified/deleteOne-comment.yml | 98 + .../crud/unified/deleteOne-errorResponse.json | 82 + .../crud/unified/deleteOne-errorResponse.yml | 46 + .../unified/deleteOne-hint-clientError.json | 133 + .../unified/deleteOne-hint-clientError.yml | 80 + .../unified/deleteOne-hint-serverError.json | 170 + .../unified/deleteOne-hint-serverError.yml | 100 + testdata/crud/unified/deleteOne-hint.json | 161 + testdata/crud/unified/deleteOne-hint.yml | 95 + testdata/crud/unified/deleteOne-let.json | 191 + testdata/crud/unified/deleteOne-let.yml | 91 + testdata/crud/unified/distinct-comment.json | 178 + testdata/crud/unified/distinct-comment.yml | 98 + testdata/crud/unified/distinct-hint.json | 139 + testdata/crud/unified/distinct-hint.yml | 73 + .../estimatedDocumentCount-comment.json | 170 + .../estimatedDocumentCount-comment.yml | 95 + .../crud/unified/estimatedDocumentCount.json | 357 + .../crud/unified/estimatedDocumentCount.yml | 181 + .../find-allowdiskuse-clientError.json | 79 + .../unified/find-allowdiskuse-clientError.yml | 55 + .../find-allowdiskuse-serverError.json | 100 + .../unified/find-allowdiskuse-serverError.yml | 68 + testdata/crud/unified/find-allowdiskuse.json | 120 + testdata/crud/unified/find-allowdiskuse.yml | 79 + testdata/crud/unified/find-comment.json | 403 + testdata/crud/unified/find-comment.yml | 166 + testdata/crud/unified/find-let.json | 148 + testdata/crud/unified/find-let.yml | 71 + testdata/crud/unified/find.json | 156 + testdata/crud/unified/find.yml | 68 + .../unified/findOneAndDelete-comment.json | 211 + .../crud/unified/findOneAndDelete-comment.yml | 96 + .../findOneAndDelete-hint-clientError.json | 133 + .../findOneAndDelete-hint-clientError.yml | 91 + .../findOneAndDelete-hint-serverError.json | 162 + .../findOneAndDelete-hint-serverError.yml | 107 + .../crud/unified/findOneAndDelete-hint.json | 155 + .../crud/unified/findOneAndDelete-hint.yml | 102 + .../crud/unified/findOneAndDelete-let.json | 180 + .../crud/unified/findOneAndDelete-let.yml | 86 + .../unified/findOneAndReplace-comment.json | 234 + .../unified/findOneAndReplace-comment.yml | 101 + .../findOneAndReplace-dots_and_dollars.json | 430 + .../findOneAndReplace-dots_and_dollars.yml | 140 + .../findOneAndReplace-hint-clientError.json | 139 + .../findOneAndReplace-hint-clientError.yml | 83 + .../findOneAndReplace-hint-serverError.json | 172 + .../findOneAndReplace-hint-serverError.yml | 99 + .../crud/unified/findOneAndReplace-hint.json | 173 + .../crud/unified/findOneAndReplace-hint.yml | 98 + .../crud/unified/findOneAndReplace-let.json | 197 + .../crud/unified/findOneAndReplace-let.yml | 94 + .../unified/findOneAndUpdate-comment.json | 228 + .../crud/unified/findOneAndUpdate-comment.yml | 95 + .../findOneAndUpdate-dots_and_dollars.json | 380 + .../findOneAndUpdate-dots_and_dollars.yml | 127 + .../findOneAndUpdate-errorResponse.json | 132 + .../findOneAndUpdate-errorResponse.yml | 69 + .../findOneAndUpdate-hint-clientError.json | 143 + .../findOneAndUpdate-hint-clientError.yml | 84 + .../findOneAndUpdate-hint-serverError.json | 180 + .../findOneAndUpdate-hint-serverError.yml | 100 + .../crud/unified/findOneAndUpdate-hint.json | 181 + .../crud/unified/findOneAndUpdate-hint.yml | 99 + .../crud/unified/findOneAndUpdate-let.json | 217 + .../crud/unified/findOneAndUpdate-let.yml | 96 + testdata/crud/unified/insertMany-comment.json | 226 + testdata/crud/unified/insertMany-comment.yml | 93 + .../unified/insertMany-dots_and_dollars.json | 338 + .../unified/insertMany-dots_and_dollars.yml | 128 + testdata/crud/unified/insertOne-comment.json | 220 + testdata/crud/unified/insertOne-comment.yml | 92 + .../unified/insertOne-dots_and_dollars.json | 614 ++ .../unified/insertOne-dots_and_dollars.yml | 238 + .../crud/unified/insertOne-errorResponse.json | 82 + .../crud/unified/insertOne-errorResponse.yml | 46 + testdata/crud/unified/replaceOne-comment.json | 230 + testdata/crud/unified/replaceOne-comment.yml | 99 + .../unified/replaceOne-dots_and_dollars.json | 567 ++ .../unified/replaceOne-dots_and_dollars.yml | 180 + testdata/crud/unified/replaceOne-hint.json | 203 + testdata/crud/unified/replaceOne-hint.yml | 108 + testdata/crud/unified/replaceOne-let.json | 219 + testdata/crud/unified/replaceOne-let.yml | 98 + testdata/crud/unified/replaceOne-sort.json | 232 + testdata/crud/unified/replaceOne-sort.yml | 94 + .../crud/unified/replaceOne-validation.json | 82 + .../crud/unified/replaceOne-validation.yml | 37 + ...ged-bulkWrite-delete-hint-clientError.json | 193 + ...dged-bulkWrite-delete-hint-clientError.yml | 112 + ...ged-bulkWrite-update-hint-clientError.json | 284 + ...dged-bulkWrite-update-hint-clientError.yml | 147 + ...nowledged-deleteMany-hint-clientError.json | 149 + ...knowledged-deleteMany-hint-clientError.yml | 86 + ...knowledged-deleteOne-hint-clientError.json | 133 + ...cknowledged-deleteOne-hint-clientError.yml | 79 + ...ged-findOneAndDelete-hint-clientError.json | 133 + ...dged-findOneAndDelete-hint-clientError.yml | 90 + ...ed-findOneAndReplace-hint-clientError.json | 139 + ...ged-findOneAndReplace-hint-clientError.yml | 82 + ...ged-findOneAndUpdate-hint-clientError.json | 143 + ...dged-findOneAndUpdate-hint-clientError.yml | 83 + ...nowledged-replaceOne-hint-clientError.json | 143 + ...knowledged-replaceOne-hint-clientError.yml | 83 + ...nowledged-updateMany-hint-clientError.json | 159 + ...knowledged-updateMany-hint-clientError.yml | 90 + ...knowledged-updateOne-hint-clientError.json | 147 + ...cknowledged-updateOne-hint-clientError.yml | 84 + testdata/crud/unified/updateMany-comment.json | 254 + testdata/crud/unified/updateMany-comment.yml | 104 + .../unified/updateMany-dots_and_dollars.json | 404 + .../unified/updateMany-dots_and_dollars.yml | 138 + .../unified/updateMany-hint-clientError.json | 159 + .../unified/updateMany-hint-clientError.yml | 91 + .../unified/updateMany-hint-serverError.json | 216 + .../unified/updateMany-hint-serverError.yml | 115 + testdata/crud/unified/updateMany-hint.json | 219 + testdata/crud/unified/updateMany-hint.yml | 115 + testdata/crud/unified/updateMany-let.json | 249 + testdata/crud/unified/updateMany-let.yml | 107 + .../crud/unified/updateMany-validation.json | 98 + .../crud/unified/updateMany-validation.yml | 39 + testdata/crud/unified/updateOne-comment.json | 260 + testdata/crud/unified/updateOne-comment.yml | 104 + .../unified/updateOne-dots_and_dollars.json | 412 + .../unified/updateOne-dots_and_dollars.yml | 138 + .../crud/unified/updateOne-errorResponse.json | 87 + .../crud/unified/updateOne-errorResponse.yml | 47 + .../unified/updateOne-hint-clientError.json | 147 + .../unified/updateOne-hint-clientError.yml | 85 + .../unified/updateOne-hint-serverError.json | 208 + .../unified/updateOne-hint-serverError.yml | 109 + testdata/crud/unified/updateOne-hint.json | 211 + testdata/crud/unified/updateOne-hint.yml | 109 + testdata/crud/unified/updateOne-let.json | 227 + testdata/crud/unified/updateOne-let.yml | 102 + testdata/crud/unified/updateOne-sort.json | 240 + testdata/crud/unified/updateOne-sort.yml | 96 + .../crud/unified/updateOne-validation.json | 80 + .../crud/unified/updateOne-validation.yml | 37 + .../crud/unified/updateWithPipelines.json | 494 + testdata/crud/unified/updateWithPipelines.yml | 299 + .../crud/v1/read/aggregate-collation.json | 39 + testdata/crud/v1/read/aggregate-collation.yml | 18 + testdata/crud/v1/read/aggregate-out.json | 102 + testdata/crud/v1/read/aggregate-out.yml | 44 + testdata/crud/v1/read/aggregate.json | 53 + testdata/crud/v1/read/aggregate.yml | 21 + testdata/crud/v1/read/count-collation.json | 48 + testdata/crud/v1/read/count-collation.yml | 26 + testdata/crud/v1/read/count-empty.json | 39 + testdata/crud/v1/read/count-empty.yml | 29 + testdata/crud/v1/read/count.json | 112 + testdata/crud/v1/read/count.yml | 74 + testdata/crud/v1/read/distinct-collation.json | 34 + testdata/crud/v1/read/distinct-collation.yml | 18 + testdata/crud/v1/read/distinct.json | 55 + testdata/crud/v1/read/distinct.yml | 32 + testdata/crud/v1/read/find-collation.json | 35 + testdata/crud/v1/read/find-collation.yml | 16 + testdata/crud/v1/read/find.json | 105 + testdata/crud/v1/read/find.yml | 49 + .../crud/v1/write/bulkWrite-arrayFilters.json | 111 + .../crud/v1/write/bulkWrite-arrayFilters.yml | 45 + .../crud/v1/write/bulkWrite-collation.json | 218 + .../crud/v1/write/bulkWrite-collation.yml | 102 + testdata/crud/v1/write/bulkWrite.json | 778 ++ testdata/crud/v1/write/bulkWrite.yml | 401 + .../crud/v1/write/deleteMany-collation.json | 48 + .../crud/v1/write/deleteMany-collation.yml | 23 + testdata/crud/v1/write/deleteMany.json | 76 + testdata/crud/v1/write/deleteMany.yml | 35 + .../crud/v1/write/deleteOne-collation.json | 52 + .../crud/v1/write/deleteOne-collation.yml | 23 + testdata/crud/v1/write/deleteOne.json | 96 + testdata/crud/v1/write/deleteOne.yml | 48 + .../v1/write/findOneAndDelete-collation.json | 60 + .../v1/write/findOneAndDelete-collation.yml | 24 + testdata/crud/v1/write/findOneAndDelete.json | 127 + testdata/crud/v1/write/findOneAndDelete.yml | 53 + .../v1/write/findOneAndReplace-collation.json | 59 + .../v1/write/findOneAndReplace-collation.yml | 25 + .../v1/write/findOneAndReplace-upsert.json | 201 + .../v1/write/findOneAndReplace-upsert.yml | 91 + testdata/crud/v1/write/findOneAndReplace.json | 273 + testdata/crud/v1/write/findOneAndReplace.yml | 113 + .../write/findOneAndUpdate-arrayFilters.json | 203 + .../write/findOneAndUpdate-arrayFilters.yml | 69 + .../v1/write/findOneAndUpdate-collation.json | 68 + .../v1/write/findOneAndUpdate-collation.yml | 28 + testdata/crud/v1/write/findOneAndUpdate.json | 379 + testdata/crud/v1/write/findOneAndUpdate.yml | 163 + testdata/crud/v1/write/insertMany.json | 159 + testdata/crud/v1/write/insertMany.yml | 77 + testdata/crud/v1/write/insertOne.json | 39 + testdata/crud/v1/write/insertOne.yml | 18 + .../crud/v1/write/replaceOne-collation.json | 54 + .../crud/v1/write/replaceOne-collation.yml | 25 + testdata/crud/v1/write/replaceOne.json | 205 + testdata/crud/v1/write/replaceOne.yml | 102 + .../v1/write/updateMany-arrayFilters.json | 185 + .../crud/v1/write/updateMany-arrayFilters.yml | 66 + .../crud/v1/write/updateMany-collation.json | 63 + .../crud/v1/write/updateMany-collation.yml | 29 + testdata/crud/v1/write/updateMany.json | 183 + testdata/crud/v1/write/updateMany.yml | 87 + .../crud/v1/write/updateOne-arrayFilters.json | 395 + .../crud/v1/write/updateOne-arrayFilters.yml | 114 + .../crud/v1/write/updateOne-collation.json | 55 + .../crud/v1/write/updateOne-collation.yml | 26 + testdata/crud/v1/write/updateOne.json | 167 + testdata/crud/v1/write/updateOne.yml | 84 + testdata/extended_bson/deep_bson.json.gz | Bin 0 -> 616 bytes testdata/extended_bson/flat_bson.json.gz | Bin 0 -> 5078 bytes testdata/extended_bson/full_bson.json.gz | Bin 0 -> 3345 bytes testdata/gridfs/delete.json | 799 ++ testdata/gridfs/delete.yml | 198 + testdata/gridfs/download.json | 558 ++ testdata/gridfs/download.yml | 241 + testdata/gridfs/downloadByName.json | 330 + testdata/gridfs/downloadByName.yml | 159 + testdata/gridfs/upload-disableMD5.json | 172 + testdata/gridfs/upload-disableMD5.yml | 92 + testdata/gridfs/upload.json | 616 ++ testdata/gridfs/upload.yml | 288 + .../index-management/createSearchIndex.json | 210 + .../index-management/createSearchIndex.yml | 92 + .../index-management/createSearchIndexes.json | 248 + .../index-management/createSearchIndexes.yml | 114 + .../index-management/dropSearchIndex.json | 74 + testdata/index-management/dropSearchIndex.yml | 43 + .../index-management/listSearchIndexes.json | 156 + .../index-management/listSearchIndexes.yml | 88 + .../searchIndexIgnoresReadWriteConcern.json | 252 + .../searchIndexIgnoresReadWriteConcern.yml | 146 + .../index-management/updateSearchIndex.json | 76 + .../index-management/updateSearchIndex.yml | 45 + .../loadBalanced-directConnection.json | 15 + .../loadBalanced-directConnection.yml | 14 + .../loadBalanced-no-results.json | 7 + .../load-balanced/loadBalanced-no-results.yml | 5 + .../loadBalanced-replicaSet-errors.json | 7 + .../loadBalanced-replicaSet-errors.yml | 6 + .../loadBalanced-true-multiple-hosts.json | 7 + .../loadBalanced-true-multiple-hosts.yml | 5 + .../load-balanced/loadBalanced-true-txt.json | 14 + .../load-balanced/loadBalanced-true-txt.yml | 11 + ...-conflicts_with_loadBalanced-true-txt.json | 7 + ...s-conflicts_with_loadBalanced-true-txt.yml | 5 + ...osts-conflicts_with_loadBalanced-true.json | 7 + ...Hosts-conflicts_with_loadBalanced-true.yml | 5 + .../load-balanced/srvMaxHosts-zero-txt.json | 15 + .../load-balanced/srvMaxHosts-zero-txt.yml | 11 + .../load-balanced/srvMaxHosts-zero.json | 15 + .../load-balanced/srvMaxHosts-zero.yml | 11 + .../replica-set/direct-connection-false.json | 16 + .../replica-set/direct-connection-false.yml | 11 + .../replica-set/direct-connection-true.json | 7 + .../replica-set/direct-connection-true.yml | 5 + .../replica-set/encoded-userinfo-and-db.json | 22 + .../replica-set/encoded-userinfo-and-db.yml | 19 + .../replica-set/loadBalanced-false-txt.json | 16 + .../replica-set/loadBalanced-false-txt.yml | 11 + .../replica-set/longer-parent-in-return.json | 17 + .../replica-set/longer-parent-in-return.yml | 12 + .../replica-set/misformatted-option.json | 7 + .../replica-set/misformatted-option.yml | 5 + .../replica-set/no-results.json | 7 + .../replica-set/no-results.yml | 5 + .../replica-set/not-enough-parts.json | 7 + .../replica-set/not-enough-parts.yml | 5 + .../replica-set/one-result-default-port.json | 16 + .../replica-set/one-result-default-port.yml | 11 + .../one-txt-record-multiple-strings.json | 16 + .../one-txt-record-multiple-strings.yml | 11 + .../replica-set/one-txt-record.json | 17 + .../replica-set/one-txt-record.yml | 12 + .../replica-set/parent-part-mismatch1.json | 7 + .../replica-set/parent-part-mismatch1.yml | 5 + .../replica-set/parent-part-mismatch2.json | 7 + .../replica-set/parent-part-mismatch2.yml | 5 + .../replica-set/parent-part-mismatch3.json | 7 + .../replica-set/parent-part-mismatch3.yml | 5 + .../replica-set/parent-part-mismatch4.json | 7 + .../replica-set/parent-part-mismatch4.yml | 5 + .../replica-set/parent-part-mismatch5.json | 7 + .../replica-set/parent-part-mismatch5.yml | 5 + .../returned-parent-too-short.json | 7 + .../replica-set/returned-parent-too-short.yml | 5 + .../replica-set/returned-parent-wrong.json | 7 + .../replica-set/returned-parent-wrong.yml | 5 + .../replica-set/srv-service-name.json | 17 + .../replica-set/srv-service-name.yml | 12 + ...axHosts-conflicts_with_replicaSet-txt.json | 7 + ...MaxHosts-conflicts_with_replicaSet-txt.yml | 5 + ...srvMaxHosts-conflicts_with_replicaSet.json | 7 + .../srvMaxHosts-conflicts_with_replicaSet.yml | 5 + .../srvMaxHosts-equal_to_srv_records.json | 18 + .../srvMaxHosts-equal_to_srv_records.yml | 17 + .../srvMaxHosts-greater_than_srv_records.json | 17 + .../srvMaxHosts-greater_than_srv_records.yml | 16 + .../srvMaxHosts-less_than_srv_records.json | 14 + .../srvMaxHosts-less_than_srv_records.yml | 16 + .../replica-set/srvMaxHosts-zero-txt.json | 18 + .../replica-set/srvMaxHosts-zero-txt.yml | 16 + .../replica-set/srvMaxHosts-zero.json | 18 + .../replica-set/srvMaxHosts-zero.yml | 16 + .../replica-set/two-results-default-port.json | 17 + .../replica-set/two-results-default-port.yml | 12 + .../two-results-nonstandard-port.json | 17 + .../two-results-nonstandard-port.yml | 12 + .../replica-set/two-txt-records.json | 7 + .../replica-set/two-txt-records.yml | 5 + .../txt-record-not-allowed-option.json | 7 + .../txt-record-not-allowed-option.yml | 5 + ...txt-record-with-overridden-ssl-option.json | 17 + .../txt-record-with-overridden-ssl-option.yml | 12 + ...txt-record-with-overridden-uri-option.json | 17 + .../txt-record-with-overridden-uri-option.yml | 12 + .../txt-record-with-unallowed-option.json | 7 + .../txt-record-with-unallowed-option.yml | 5 + .../replica-set/uri-with-admin-database.json | 20 + .../replica-set/uri-with-admin-database.yml | 14 + .../replica-set/uri-with-auth.json | 22 + .../replica-set/uri-with-auth.yml | 19 + .../replica-set/uri-with-port.json | 7 + .../replica-set/uri-with-port.yml | 5 + .../replica-set/uri-with-two-hosts.json | 7 + .../replica-set/uri-with-two-hosts.yml | 5 + .../uri-with-uppercase-hostname.json | 16 + .../uri-with-uppercase-hostname.yml | 11 + .../srvMaxHosts-equal_to_srv_records.json | 17 + .../srvMaxHosts-equal_to_srv_records.yml | 14 + .../srvMaxHosts-greater_than_srv_records.json | 16 + .../srvMaxHosts-greater_than_srv_records.yml | 13 + .../srvMaxHosts-less_than_srv_records.json | 10 + .../srvMaxHosts-less_than_srv_records.yml | 11 + .../sharded/srvMaxHosts-zero.json | 16 + .../sharded/srvMaxHosts-zero.yml | 12 + testdata/kmip-certs/ca-ec.pem | 12 + testdata/kmip-certs/client-ec.pem | 18 + testdata/kmip-certs/server-ec.pem | 18 + testdata/load-balancers/cursors.json | 1238 +++ testdata/load-balancers/cursors.yml | 507 + testdata/load-balancers/event-monitoring.json | 184 + testdata/load-balancers/event-monitoring.yml | 99 + .../lb-connection-establishment.json | 58 + .../lb-connection-establishment.yml | 36 + .../non-lb-connection-establishment.json | 105 + .../non-lb-connection-establishment.yml | 61 + .../load-balancers/sdam-error-handling.json | 514 + .../load-balancers/sdam-error-handling.yml | 274 + testdata/load-balancers/server-selection.json | 82 + testdata/load-balancers/server-selection.yml | 50 + testdata/load-balancers/transactions.json | 1621 ++++ testdata/load-balancers/transactions.yml | 598 ++ .../load-balancers/wait-queue-timeouts.json | 153 + .../load-balancers/wait-queue-timeouts.yml | 82 + testdata/lorem.txt | 1 + .../DefaultNoMaxStaleness.json | 74 + .../DefaultNoMaxStaleness.yml | 26 + .../ReplicaSetNoPrimary/LastUpdateTime.json | 88 + .../ReplicaSetNoPrimary/LastUpdateTime.yml | 33 + .../MaxStalenessTooSmall.json | 20 + .../MaxStalenessTooSmall.yml | 15 + .../ReplicaSetNoPrimary/Nearest.json | 88 + .../ReplicaSetNoPrimary/Nearest.yml | 33 + .../ReplicaSetNoPrimary/Nearest2.json | 88 + .../ReplicaSetNoPrimary/Nearest2.yml | 33 + .../ReplicaSetNoPrimary/NoKnownServers.json | 21 + .../ReplicaSetNoPrimary/NoKnownServers.yml | 16 + .../ReplicaSetNoPrimary/PrimaryPreferred.json | 64 + .../ReplicaSetNoPrimary/PrimaryPreferred.yml | 27 + .../PrimaryPreferred_tags.json | 84 + .../PrimaryPreferred_tags.yml | 36 + .../ReplicaSetNoPrimary/Secondary.json | 111 + .../ReplicaSetNoPrimary/Secondary.yml | 51 + .../SecondaryPreferred.json | 63 + .../SecondaryPreferred.yml | 26 + .../SecondaryPreferred_tags.json | 111 + .../SecondaryPreferred_tags.yml | 51 + .../ReplicaSetNoPrimary/ZeroMaxStaleness.json | 36 + .../ReplicaSetNoPrimary/ZeroMaxStaleness.yml | 23 + .../DefaultNoMaxStaleness.json | 74 + .../DefaultNoMaxStaleness.yml | 26 + .../ReplicaSetWithPrimary/LastUpdateTime.json | 88 + .../ReplicaSetWithPrimary/LastUpdateTime.yml | 35 + .../ReplicaSetWithPrimary/LongHeartbeat.json | 76 + .../ReplicaSetWithPrimary/LongHeartbeat.yml | 29 + .../ReplicaSetWithPrimary/LongHeartbeat2.json | 37 + .../ReplicaSetWithPrimary/LongHeartbeat2.yml | 25 + .../MaxStalenessTooSmall.json | 37 + .../MaxStalenessTooSmall.yml | 26 + .../MaxStalenessWithModePrimary.json | 35 + .../MaxStalenessWithModePrimary.yml | 23 + .../ReplicaSetWithPrimary/Nearest.json | 88 + .../ReplicaSetWithPrimary/Nearest.yml | 33 + .../ReplicaSetWithPrimary/Nearest2.json | 88 + .../ReplicaSetWithPrimary/Nearest2.yml | 33 + .../ReplicaSetWithPrimary/Nearest_tags.json | 84 + .../ReplicaSetWithPrimary/Nearest_tags.yml | 36 + .../PrimaryPreferred.json | 64 + .../PrimaryPreferred.yml | 27 + .../SecondaryPreferred.json | 63 + .../SecondaryPreferred.yml | 26 + .../SecondaryPreferred_tags.json | 138 + .../SecondaryPreferred_tags.yml | 59 + .../SecondaryPreferred_tags2.json | 96 + .../SecondaryPreferred_tags2.yml | 43 + .../ReplicaSetWithPrimary/Secondary_tags.json | 138 + .../ReplicaSetWithPrimary/Secondary_tags.yml | 59 + .../Secondary_tags2.json | 96 + .../ReplicaSetWithPrimary/Secondary_tags2.yml | 43 + .../ZeroMaxStaleness.json | 36 + .../ZeroMaxStaleness.yml | 23 + .../Sharded/SmallMaxStaleness.json | 76 + .../Sharded/SmallMaxStaleness.yml | 28 + .../Single/SmallMaxStaleness.json | 52 + .../Single/SmallMaxStaleness.yml | 20 + .../Unknown/SmallMaxStaleness.json | 19 + .../Unknown/SmallMaxStaleness.yml | 15 + testdata/mongocrypt/collection-info.json | 37 + testdata/mongocrypt/command-reply.json | 13 + testdata/mongocrypt/command.json | 6 + .../mongocrypt/encrypted-command-reply.json | 16 + testdata/mongocrypt/encrypted-command.json | 11 + testdata/mongocrypt/encrypted-value.json | 6 + testdata/mongocrypt/json-schema.json | 15 + testdata/mongocrypt/key-document.json | 36 + .../mongocrypt/key-filter-keyAltName.json | 14 + testdata/mongocrypt/key-filter.json | 19 + testdata/mongocrypt/kms-reply.txt | 6 + .../mongocrypt/list-collections-filter.json | 3 + testdata/mongocrypt/local-key-document.json | 24 + .../mongocrypt/mongocryptd-command-local.json | 22 + .../mongocryptd-command-remote.json | 22 + testdata/mongocrypt/mongocryptd-reply.json | 18 + .../connection-string/read-concern.json | 56 + .../connection-string/read-concern.yml | 37 + .../connection-string/write-concern.json | 122 + .../connection-string/write-concern.yml | 81 + .../document/read-concern.json | 66 + .../document/read-concern.yml | 37 + .../document/write-concern.json | 178 + .../document/write-concern.yml | 102 + .../operation/default-write-concern-2.6.json | 544 ++ .../operation/default-write-concern-2.6.yml | 215 + .../operation/default-write-concern-3.2.json | 125 + .../operation/default-write-concern-3.2.yml | 58 + .../operation/default-write-concern-3.4.json | 216 + .../operation/default-write-concern-3.4.yml | 95 + .../operation/default-write-concern-4.2.json | 87 + .../operation/default-write-concern-4.2.yml | 36 + .../legacy/aggregate-merge.json | 98 + .../legacy/aggregate-merge.yml | 39 + .../legacy/aggregate-serverErrors.json | 1208 +++ .../legacy/aggregate-serverErrors.yml | 157 + .../retryable-reads/legacy/aggregate.json | 406 + testdata/retryable-reads/legacy/aggregate.yml | 87 + ...angeStreams-client.watch-serverErrors.json | 740 ++ ...hangeStreams-client.watch-serverErrors.yml | 150 + .../legacy/changeStreams-client.watch.json | 209 + .../legacy/changeStreams-client.watch.yml | 62 + ...ngeStreams-db.coll.watch-serverErrors.json | 690 ++ ...angeStreams-db.coll.watch-serverErrors.yml | 150 + .../legacy/changeStreams-db.coll.watch.json | 197 + .../legacy/changeStreams-db.coll.watch.yml | 66 + .../changeStreams-db.watch-serverErrors.json | 690 ++ .../changeStreams-db.watch-serverErrors.yml | 154 + .../legacy/changeStreams-db.watch.json | 197 + .../legacy/changeStreams-db.watch.yml | 62 + .../legacy/count-serverErrors.json | 586 ++ .../legacy/count-serverErrors.yml | 150 + testdata/retryable-reads/legacy/count.json | 179 + testdata/retryable-reads/legacy/count.yml | 64 + .../legacy/countDocuments-serverErrors.json | 911 ++ .../legacy/countDocuments-serverErrors.yml | 150 + .../legacy/countDocuments.json | 257 + .../retryable-reads/legacy/countDocuments.yml | 64 + .../legacy/distinct-serverErrors.json | 838 ++ .../legacy/distinct-serverErrors.yml | 156 + testdata/retryable-reads/legacy/distinct.json | 245 + testdata/retryable-reads/legacy/distinct.yml | 71 + .../estimatedDocumentCount-serverErrors.json | 546 ++ .../estimatedDocumentCount-serverErrors.yml | 148 + .../legacy/estimatedDocumentCount.json | 166 + .../legacy/estimatedDocumentCount.yml | 62 + .../legacy/find-serverErrors.json | 962 ++ .../legacy/find-serverErrors.yml | 160 + testdata/retryable-reads/legacy/find.json | 348 + testdata/retryable-reads/legacy/find.yml | 86 + .../legacy/findOne-serverErrors.json | 732 ++ .../legacy/findOne-serverErrors.yml | 154 + testdata/retryable-reads/legacy/findOne.json | 223 + testdata/retryable-reads/legacy/findOne.yml | 68 + .../legacy/gridfs-download-serverErrors.json | 925 ++ .../legacy/gridfs-download-serverErrors.yml | 173 + .../legacy/gridfs-download.json | 270 + .../legacy/gridfs-download.yml | 79 + .../gridfs-downloadByName-serverErrors.json | 849 ++ .../gridfs-downloadByName-serverErrors.yml | 174 + .../legacy/gridfs-downloadByName.json | 250 + .../legacy/gridfs-downloadByName.yml | 79 + .../listCollectionNames-serverErrors.json | 502 + .../listCollectionNames-serverErrors.yml | 143 + .../legacy/listCollectionNames.json | 150 + .../legacy/listCollectionNames.yml | 59 + .../listCollectionObjects-serverErrors.json | 502 + .../listCollectionObjects-serverErrors.yml | 144 + .../legacy/listCollectionObjects.json | 150 + .../legacy/listCollectionObjects.yml | 59 + .../legacy/listCollections-serverErrors.json | 502 + .../legacy/listCollections-serverErrors.yml | 143 + .../legacy/listCollections.json | 150 + .../legacy/listCollections.yml | 59 + .../listDatabaseNames-serverErrors.json | 502 + .../legacy/listDatabaseNames-serverErrors.yml | 143 + .../legacy/listDatabaseNames.json | 150 + .../legacy/listDatabaseNames.yml | 59 + .../listDatabaseObjects-serverErrors.json | 502 + .../listDatabaseObjects-serverErrors.yml | 144 + .../legacy/listDatabaseObjects.json | 150 + .../legacy/listDatabaseObjects.yml | 59 + .../legacy/listDatabases-serverErrors.json | 502 + .../legacy/listDatabases-serverErrors.yml | 144 + .../retryable-reads/legacy/listDatabases.json | 150 + .../retryable-reads/legacy/listDatabases.yml | 59 + .../legacy/listIndexNames-serverErrors.json | 527 + .../legacy/listIndexNames-serverErrors.yml | 144 + .../legacy/listIndexNames.json | 156 + .../retryable-reads/legacy/listIndexNames.yml | 60 + .../legacy/listIndexes-serverErrors.json | 527 + .../legacy/listIndexes-serverErrors.yml | 145 + .../retryable-reads/legacy/listIndexes.json | 156 + .../retryable-reads/legacy/listIndexes.yml | 60 + .../retryable-reads/legacy/mapReduce.json | 188 + testdata/retryable-reads/legacy/mapReduce.yml | 60 + .../readConcernMajorityNotAvailableYet.json | 147 + .../readConcernMajorityNotAvailableYet.yml | 68 + .../legacy/bulkWrite-errorLabels.json | 183 + .../legacy/bulkWrite-errorLabels.yml | 77 + .../legacy/bulkWrite-serverErrors.json | 273 + .../legacy/bulkWrite-serverErrors.yml | 130 + .../retryable-writes/legacy/bulkWrite.json | 806 ++ .../retryable-writes/legacy/bulkWrite.yml | 396 + .../retryable-writes/legacy/deleteMany.json | 42 + .../retryable-writes/legacy/deleteMany.yml | 22 + .../legacy/deleteOne-errorLabels.json | 107 + .../legacy/deleteOne-errorLabels.yml | 48 + .../legacy/deleteOne-serverErrors.json | 153 + .../legacy/deleteOne-serverErrors.yml | 73 + .../retryable-writes/legacy/deleteOne.json | 120 + .../retryable-writes/legacy/deleteOne.yml | 57 + .../legacy/findOneAndDelete-errorLabels.json | 118 + .../legacy/findOneAndDelete-errorLabels.yml | 49 + .../legacy/findOneAndDelete-serverErrors.json | 170 + .../legacy/findOneAndDelete-serverErrors.yml | 74 + .../legacy/findOneAndDelete.json | 137 + .../legacy/findOneAndDelete.yml | 58 + .../legacy/findOneAndReplace-errorLabels.json | 122 + .../legacy/findOneAndReplace-errorLabels.yml | 52 + .../findOneAndReplace-serverErrors.json | 178 + .../legacy/findOneAndReplace-serverErrors.yml | 80 + .../legacy/findOneAndReplace.json | 145 + .../legacy/findOneAndReplace.yml | 63 + .../legacy/findOneAndUpdate-errorLabels.json | 124 + .../legacy/findOneAndUpdate-errorLabels.yml | 52 + .../legacy/findOneAndUpdate-serverErrors.json | 181 + .../legacy/findOneAndUpdate-serverErrors.yml | 79 + .../legacy/findOneAndUpdate.json | 147 + .../legacy/findOneAndUpdate.yml | 62 + .../legacy/insertMany-errorLabels.json | 130 + .../legacy/insertMany-errorLabels.yml | 54 + .../legacy/insertMany-serverErrors.json | 197 + .../legacy/insertMany-serverErrors.yml | 84 + .../retryable-writes/legacy/insertMany.json | 163 + .../retryable-writes/legacy/insertMany.yml | 74 + .../legacy/insertOne-errorLabels.json | 91 + .../legacy/insertOne-errorLabels.yml | 44 + .../legacy/insertOne-serverErrors.json | 1162 +++ .../legacy/insertOne-serverErrors.yml | 527 + .../retryable-writes/legacy/insertOne.json | 139 + .../retryable-writes/legacy/insertOne.yml | 61 + .../legacy/replaceOne-errorLabels.json | 121 + .../legacy/replaceOne-errorLabels.yml | 53 + .../legacy/replaceOne-serverErrors.json | 177 + .../legacy/replaceOne-serverErrors.yml | 82 + .../retryable-writes/legacy/replaceOne.json | 144 + .../retryable-writes/legacy/replaceOne.yml | 66 + .../retryable-writes/legacy/updateMany.json | 58 + .../retryable-writes/legacy/updateMany.yml | 27 + .../legacy/updateOne-errorLabels.json | 123 + .../legacy/updateOne-errorLabels.yml | 53 + .../legacy/updateOne-serverErrors.json | 180 + .../legacy/updateOne-serverErrors.yml | 82 + .../retryable-writes/legacy/updateOne.json | 288 + .../retryable-writes/legacy/updateOne.yml | 129 + .../unified/bulkWrite-serverErrors.json | 285 + .../unified/bulkWrite-serverErrors.yml | 136 + .../client-bulkWrite-clientErrors.json | 351 + .../unified/client-bulkWrite-clientErrors.yml | 173 + .../client-bulkWrite-serverErrors.json | 873 ++ .../unified/client-bulkWrite-serverErrors.yml | 413 + .../insertOne-noWritesPerformedError.yml | 54 + .../insertOne-noWritesPerformedErrors.json | 90 + .../unified/insertOne-serverErrors.json | 865 ++ .../unified/insertOne-serverErrors.yml | 406 + testdata/run-command/runCommand.json | 634 ++ testdata/run-command/runCommand.yml | 319 + testdata/run-command/runCursorCommand.json | 877 ++ testdata/run-command/runCursorCommand.yml | 391 + .../errors/error_handling_handshake.json | 113 + .../errors/error_handling_handshake.yml | 55 + .../errors/generate-error-tests.py | 172 + .../errors/non-stale-network-error.json | 80 + .../errors/non-stale-network-error.yml | 47 + .../non-stale-network-timeout-error.json | 88 + .../non-stale-network-timeout-error.yml | 38 + ...Version-greater-InterruptedAtShutdown.json | 100 + ...yVersion-greater-InterruptedAtShutdown.yml | 61 + ...eater-InterruptedDueToReplStateChange.json | 100 + ...reater-InterruptedDueToReplStateChange.yml | 61 + ...ologyVersion-greater-LegacyNotPrimary.json | 100 + ...pologyVersion-greater-LegacyNotPrimary.yml | 61 + ...rsion-greater-NotPrimaryNoSecondaryOk.json | 100 + ...ersion-greater-NotPrimaryNoSecondaryOk.yml | 61 + ...Version-greater-NotPrimaryOrSecondary.json | 100 + ...yVersion-greater-NotPrimaryOrSecondary.yml | 61 + ...ogyVersion-greater-NotWritablePrimary.json | 100 + ...logyVersion-greater-NotWritablePrimary.yml | 61 + ...ogyVersion-greater-PrimarySteppedDown.json | 100 + ...logyVersion-greater-PrimarySteppedDown.yml | 61 + ...ogyVersion-greater-ShutdownInProgress.json | 100 + ...logyVersion-greater-ShutdownInProgress.yml | 61 + ...Version-missing-InterruptedAtShutdown.json | 85 + ...yVersion-missing-InterruptedAtShutdown.yml | 52 + ...ssing-InterruptedDueToReplStateChange.json | 85 + ...issing-InterruptedDueToReplStateChange.yml | 52 + ...ologyVersion-missing-LegacyNotPrimary.json | 85 + ...pologyVersion-missing-LegacyNotPrimary.yml | 52 + ...rsion-missing-NotPrimaryNoSecondaryOk.json | 85 + ...ersion-missing-NotPrimaryNoSecondaryOk.yml | 52 + ...Version-missing-NotPrimaryOrSecondary.json | 85 + ...yVersion-missing-NotPrimaryOrSecondary.yml | 52 + ...ogyVersion-missing-NotWritablePrimary.json | 85 + ...logyVersion-missing-NotWritablePrimary.yml | 52 + ...ogyVersion-missing-PrimarySteppedDown.json | 85 + ...logyVersion-missing-PrimarySteppedDown.yml | 52 + ...ogyVersion-missing-ShutdownInProgress.json | 85 + ...logyVersion-missing-ShutdownInProgress.yml | 52 + ...ccessId-changed-InterruptedAtShutdown.json | 100 + ...occessId-changed-InterruptedAtShutdown.yml | 61 + ...anged-InterruptedDueToReplStateChange.json | 100 + ...hanged-InterruptedDueToReplStateChange.yml | 61 + ...n-proccessId-changed-LegacyNotPrimary.json | 100 + ...on-proccessId-changed-LegacyNotPrimary.yml | 61 + ...essId-changed-NotPrimaryNoSecondaryOk.json | 100 + ...cessId-changed-NotPrimaryNoSecondaryOk.yml | 61 + ...ccessId-changed-NotPrimaryOrSecondary.json | 100 + ...occessId-changed-NotPrimaryOrSecondary.yml | 61 + ...proccessId-changed-NotWritablePrimary.json | 100 + ...-proccessId-changed-NotWritablePrimary.yml | 61 + ...proccessId-changed-PrimarySteppedDown.json | 100 + ...-proccessId-changed-PrimarySteppedDown.yml | 61 + ...proccessId-changed-ShutdownInProgress.json | 100 + ...-proccessId-changed-ShutdownInProgress.yml | 61 + .../non-stale-topologyVersion.yml.template | 52 + .../errors/post-42-InterruptedAtShutdown.json | 70 + .../errors/post-42-InterruptedAtShutdown.yml | 47 + ...st-42-InterruptedDueToReplStateChange.json | 70 + ...ost-42-InterruptedDueToReplStateChange.yml | 47 + .../errors/post-42-LegacyNotPrimary.json | 70 + .../errors/post-42-LegacyNotPrimary.yml | 47 + .../post-42-NotPrimaryNoSecondaryOk.json | 70 + .../post-42-NotPrimaryNoSecondaryOk.yml | 47 + .../errors/post-42-NotPrimaryOrSecondary.json | 70 + .../errors/post-42-NotPrimaryOrSecondary.yml | 47 + .../errors/post-42-NotWritablePrimary.json | 70 + .../errors/post-42-NotWritablePrimary.yml | 47 + .../errors/post-42-PrimarySteppedDown.json | 70 + .../errors/post-42-PrimarySteppedDown.yml | 47 + .../errors/post-42-ShutdownInProgress.json | 70 + .../errors/post-42-ShutdownInProgress.yml | 47 + .../errors/post-42.yml.template | 47 + .../errors/pre-42-InterruptedAtShutdown.json | 70 + .../errors/pre-42-InterruptedAtShutdown.yml | 47 + ...re-42-InterruptedDueToReplStateChange.json | 70 + ...pre-42-InterruptedDueToReplStateChange.yml | 47 + .../errors/pre-42-LegacyNotPrimary.json | 70 + .../errors/pre-42-LegacyNotPrimary.yml | 47 + .../pre-42-NotPrimaryNoSecondaryOk.json | 70 + .../errors/pre-42-NotPrimaryNoSecondaryOk.yml | 47 + .../errors/pre-42-NotPrimaryOrSecondary.json | 70 + .../errors/pre-42-NotPrimaryOrSecondary.yml | 47 + .../errors/pre-42-NotWritablePrimary.json | 70 + .../errors/pre-42-NotWritablePrimary.yml | 47 + .../errors/pre-42-PrimarySteppedDown.json | 70 + .../errors/pre-42-PrimarySteppedDown.yml | 47 + .../errors/pre-42-ShutdownInProgress.json | 70 + .../errors/pre-42-ShutdownInProgress.yml | 47 + .../errors/pre-42.yml.template | 47 + .../errors/prefer-error-code.json | 131 + .../errors/prefer-error-code.yml | 54 + ...tale-generation-InterruptedAtShutdown.json | 176 + ...stale-generation-InterruptedAtShutdown.yml | 91 + ...ation-InterruptedDueToReplStateChange.json | 176 + ...ration-InterruptedDueToReplStateChange.yml | 91 + ...le-generation-NotPrimaryNoSecondaryOk.json | 176 + ...ale-generation-NotPrimaryNoSecondaryOk.yml | 91 + ...tale-generation-NotPrimaryOrSecondary.json | 176 + ...stale-generation-NotPrimaryOrSecondary.yml | 91 + .../stale-generation-NotWritablePrimary.json | 176 + .../stale-generation-NotWritablePrimary.yml | 91 + .../stale-generation-PrimarySteppedDown.json | 176 + .../stale-generation-PrimarySteppedDown.yml | 91 + .../stale-generation-ShutdownInProgress.json | 176 + .../stale-generation-ShutdownInProgress.yml | 91 + ...dshakeCompletes-InterruptedAtShutdown.json | 176 + ...ndshakeCompletes-InterruptedAtShutdown.yml | 91 + ...letes-InterruptedDueToReplStateChange.json | 176 + ...pletes-InterruptedDueToReplStateChange.yml | 91 + ...erHandshakeCompletes-LegacyNotPrimary.json | 176 + ...terHandshakeCompletes-LegacyNotPrimary.yml | 91 + ...hakeCompletes-NotPrimaryNoSecondaryOk.json | 176 + ...shakeCompletes-NotPrimaryNoSecondaryOk.yml | 91 + ...dshakeCompletes-NotPrimaryOrSecondary.json | 176 + ...ndshakeCompletes-NotPrimaryOrSecondary.yml | 91 + ...HandshakeCompletes-NotWritablePrimary.json | 176 + ...rHandshakeCompletes-NotWritablePrimary.yml | 91 + ...HandshakeCompletes-PrimarySteppedDown.json | 176 + ...rHandshakeCompletes-PrimarySteppedDown.yml | 91 + ...HandshakeCompletes-ShutdownInProgress.json | 176 + ...rHandshakeCompletes-ShutdownInProgress.yml | 91 + ...ation-afterHandshakeCompletes-network.json | 163 + ...ration-afterHandshakeCompletes-network.yml | 82 + ...ation-afterHandshakeCompletes-timeout.json | 163 + ...ration-afterHandshakeCompletes-timeout.yml | 82 + ...dshakeCompletes-InterruptedAtShutdown.json | 176 + ...ndshakeCompletes-InterruptedAtShutdown.yml | 91 + ...letes-InterruptedDueToReplStateChange.json | 176 + ...pletes-InterruptedDueToReplStateChange.yml | 91 + ...reHandshakeCompletes-LegacyNotPrimary.json | 176 + ...oreHandshakeCompletes-LegacyNotPrimary.yml | 91 + ...hakeCompletes-NotPrimaryNoSecondaryOk.json | 176 + ...shakeCompletes-NotPrimaryNoSecondaryOk.yml | 91 + ...dshakeCompletes-NotPrimaryOrSecondary.json | 176 + ...ndshakeCompletes-NotPrimaryOrSecondary.yml | 91 + ...HandshakeCompletes-NotWritablePrimary.json | 176 + ...eHandshakeCompletes-NotWritablePrimary.yml | 91 + ...HandshakeCompletes-PrimarySteppedDown.json | 176 + ...eHandshakeCompletes-PrimarySteppedDown.yml | 91 + ...HandshakeCompletes-ShutdownInProgress.json | 176 + ...eHandshakeCompletes-ShutdownInProgress.yml | 91 + ...tion-beforeHandshakeCompletes-network.json | 163 + ...ation-beforeHandshakeCompletes-network.yml | 82 + ...tion-beforeHandshakeCompletes-timeout.json | 163 + ...ation-beforeHandshakeCompletes-timeout.yml | 82 + .../errors/stale-generation.yml.template | 81 + ...topologyVersion-InterruptedAtShutdown.json | 147 + ...-topologyVersion-InterruptedAtShutdown.yml | 65 + ...rsion-InterruptedDueToReplStateChange.json | 147 + ...ersion-InterruptedDueToReplStateChange.yml | 65 + ...tale-topologyVersion-LegacyNotPrimary.json | 147 + ...stale-topologyVersion-LegacyNotPrimary.yml | 65 + ...pologyVersion-NotPrimaryNoSecondaryOk.json | 147 + ...opologyVersion-NotPrimaryNoSecondaryOk.yml | 65 + ...topologyVersion-NotPrimaryOrSecondary.json | 147 + ...-topologyVersion-NotPrimaryOrSecondary.yml | 65 + ...le-topologyVersion-NotWritablePrimary.json | 147 + ...ale-topologyVersion-NotWritablePrimary.yml | 65 + ...le-topologyVersion-PrimarySteppedDown.json | 147 + ...ale-topologyVersion-PrimarySteppedDown.yml | 65 + ...le-topologyVersion-ShutdownInProgress.json | 147 + ...ale-topologyVersion-ShutdownInProgress.yml | 65 + .../errors/stale-topologyVersion.yml.template | 65 + .../errors/write_errors_ignored.json | 98 + .../errors/write_errors_ignored.yml | 42 + .../rediscover-quickly-after-step-down.json | 165 + .../rediscover-quickly-after-step-down.yml | 98 + .../load-balanced/discover_load_balancer.json | 28 + .../load-balanced/discover_load_balancer.yml | 25 + .../monitoring/discovered_standalone.json | 105 + .../monitoring/discovered_standalone.yml | 70 + .../monitoring/load_balancer.json | 93 + .../monitoring/load_balancer.yml | 65 + .../replica_set_with_no_primary.json | 151 + .../replica_set_with_no_primary.yml | 113 + .../monitoring/replica_set_with_primary.json | 150 + .../monitoring/replica_set_with_primary.yml | 112 + .../monitoring/replica_set_with_removal.json | 161 + .../monitoring/replica_set_with_removal.yml | 111 + .../monitoring/required_replica_set.json | 152 + .../monitoring/required_replica_set.yml | 108 + .../monitoring/standalone.json | 105 + .../monitoring/standalone.yml | 70 + ...ne_suppress_equal_description_changes.json | 115 + ...one_suppress_equal_description_changes.yml | 73 + .../rs/compatible.json | 57 + .../rs/compatible.yml | 45 + .../rs/compatible_unknown.json | 40 + .../rs/compatible_unknown.yml | 34 + .../rs/discover_arbiters.json | 42 + .../rs/discover_arbiters.yml | 44 + .../rs/discover_arbiters_replicaset.json | 42 + .../rs/discover_arbiters_replicaset.yml | 44 + .../rs/discover_ghost.json | 32 + .../rs/discover_ghost.yml | 36 + .../rs/discover_ghost_replicaset.json | 36 + .../rs/discover_ghost_replicaset.yml | 42 + .../rs/discover_hidden.json | 46 + .../rs/discover_hidden.yml | 51 + .../rs/discover_hidden_replicaset.json | 46 + .../rs/discover_hidden_replicaset.yml | 51 + .../rs/discover_passives.json | 80 + .../rs/discover_passives.yml | 83 + .../rs/discover_passives_replicaset.json | 80 + .../rs/discover_passives_replicaset.yml | 83 + .../rs/discover_primary.json | 40 + .../rs/discover_primary.yml | 43 + .../rs/discover_primary_replicaset.json | 40 + .../rs/discover_primary_replicaset.yml | 43 + .../rs/discover_rsother.json | 45 + .../rs/discover_rsother.yml | 50 + .../rs/discover_rsother_replicaset.json | 66 + .../rs/discover_rsother_replicaset.yml | 68 + .../rs/discover_secondary.json | 41 + .../rs/discover_secondary.yml | 44 + .../rs/discover_secondary_replicaset.json | 41 + .../rs/discover_secondary_replicaset.yml | 44 + .../rs/discovery.json | 179 + .../rs/discovery.yml | 207 + .../rs/electionId_precedence_setVersion.json | 92 + .../rs/electionId_precedence_setVersion.yml | 62 + .../rs/equal_electionids.json | 73 + .../rs/equal_electionids.yml | 57 + .../rs/hosts_differ_from_seeds.json | 35 + .../rs/hosts_differ_from_seeds.yml | 37 + .../rs/incompatible_arbiter.json | 56 + .../rs/incompatible_arbiter.yml | 36 + .../rs/incompatible_ghost.json | 51 + .../rs/incompatible_ghost.yml | 34 + .../rs/incompatible_other.json | 56 + .../rs/incompatible_other.yml | 36 + .../rs/ls_timeout.json | 279 + .../rs/ls_timeout.yml | 249 + .../rs/member_reconfig.json | 69 + .../rs/member_reconfig.yml | 74 + .../rs/member_standalone.json | 60 + .../rs/member_standalone.yml | 66 + .../rs/new_primary.json | 74 + .../rs/new_primary.yml | 80 + .../rs/new_primary_new_electionid.json | 147 + .../rs/new_primary_new_electionid.yml | 119 + .../rs/new_primary_new_setversion.json | 147 + .../rs/new_primary_new_setversion.yml | 119 + .../rs/new_primary_wrong_set_name.json | 69 + .../rs/new_primary_wrong_set_name.yml | 77 + .../rs/non_rs_member.json | 30 + .../rs/non_rs_member.yml | 33 + .../rs/normalize_case.json | 49 + .../rs/normalize_case.yml | 52 + .../rs/normalize_case_me.json | 95 + .../rs/normalize_case_me.yml | 102 + .../rs/null_election_id-pre-6.0.json | 203 + .../rs/null_election_id-pre-6.0.yml | 175 + .../rs/null_election_id.json | 209 + .../rs/null_election_id.yml | 177 + .../rs/primary_becomes_ghost.json | 61 + .../rs/primary_becomes_ghost.yml | 65 + .../rs/primary_becomes_mongos.json | 56 + .../rs/primary_becomes_mongos.yml | 58 + .../rs/primary_becomes_standalone.json | 53 + .../rs/primary_becomes_standalone.yml | 55 + .../rs/primary_changes_set_name.json | 59 + .../rs/primary_changes_set_name.yml | 63 + .../rs/primary_disconnect.json | 54 + .../rs/primary_disconnect.yml | 59 + .../rs/primary_disconnect_electionid.json | 235 + .../rs/primary_disconnect_electionid.yml | 189 + .../rs/primary_disconnect_setversion.json | 235 + .../rs/primary_disconnect_setversion.yml | 189 + ...int_from_secondary_with_mismatched_me.json | 68 + ...hint_from_secondary_with_mismatched_me.yml | 64 + .../rs/primary_mismatched_me.json | 41 + .../rs/primary_mismatched_me.yml | 27 + .../rs/primary_mismatched_me_not_removed.json | 79 + .../rs/primary_mismatched_me_not_removed.yml | 75 + .../rs/primary_reports_new_member.json | 155 + .../rs/primary_reports_new_member.yml | 175 + .../primary_to_no_primary_mismatched_me.json | 76 + .../primary_to_no_primary_mismatched_me.yml | 81 + .../rs/primary_wrong_set_name.json | 30 + .../rs/primary_wrong_set_name.yml | 30 + .../rs/repeated.json | 144 + .../rs/repeated.yml | 105 + .../rs/replicaset_rsnp.json | 26 + .../rs/replicaset_rsnp.yml | 21 + .../rs/response_from_removed.json | 66 + .../rs/response_from_removed.yml | 69 + .../rs/sec_not_auth.json | 56 + .../rs/sec_not_auth.yml | 55 + .../rs/secondary_ignore_ok_0-pre-6.0.json | 83 + .../rs/secondary_ignore_ok_0-pre-6.0.yml | 87 + .../rs/secondary_ignore_ok_0.json | 83 + .../rs/secondary_ignore_ok_0.yml | 87 + .../rs/secondary_mismatched_me.json | 41 + .../rs/secondary_mismatched_me.yml | 28 + .../rs/secondary_wrong_set_name.json | 31 + .../rs/secondary_wrong_set_name.yml | 31 + ...secondary_wrong_set_name_with_primary.json | 71 + .../secondary_wrong_set_name_with_primary.yml | 75 + .../rs/set_version_can_rollback.json | 147 + .../rs/set_version_can_rollback.yml | 101 + ...tversion_equal_max_without_electionid.json | 84 + ...etversion_equal_max_without_electionid.yml | 78 + ...on_greaterthan_max_without_electionid.json | 84 + ...ion_greaterthan_max_without_electionid.yml | 79 + ...setversion_without_electionid-pre-6.0.json | 84 + .../setversion_without_electionid-pre-6.0.yml | 79 + .../rs/setversion_without_electionid.json | 84 + .../rs/setversion_without_electionid.yml | 78 + .../rs/stepdown_change_set_name.json | 60 + .../rs/stepdown_change_set_name.yml | 65 + .../rs/too_new.json | 57 + .../rs/too_new.yml | 45 + .../rs/too_old.json | 57 + .../rs/too_old.yml | 43 + .../rs/topology_version_equal.json | 101 + .../rs/topology_version_equal.yml | 68 + .../rs/topology_version_greater.json | 259 + .../rs/topology_version_greater.yml | 194 + .../rs/topology_version_less.json | 97 + .../rs/topology_version_less.yml | 64 + .../rs/unexpected_mongos.json | 27 + .../rs/unexpected_mongos.yml | 29 + ...setversion_without_electionid-pre-6.0.json | 138 + ..._setversion_without_electionid-pre-6.0.yml | 117 + .../rs/use_setversion_without_electionid.json | 144 + .../rs/use_setversion_without_electionid.yml | 119 + .../rs/wrong_set_name.json | 37 + .../rs/wrong_set_name.yml | 38 + .../sharded/compatible.json | 48 + .../sharded/compatible.yml | 40 + .../sharded/discover_single_mongos.json | 31 + .../sharded/discover_single_mongos.yml | 24 + .../sharded/ls_timeout_mongos.json | 91 + .../sharded/ls_timeout_mongos.yml | 101 + .../sharded/mongos_disconnect.json | 100 + .../sharded/mongos_disconnect.yml | 113 + .../sharded/multiple_mongoses.json | 47 + .../sharded/multiple_mongoses.yml | 52 + .../sharded/non_mongos_removed.json | 46 + .../sharded/non_mongos_removed.yml | 47 + .../sharded/normalize_uri_case.json | 24 + .../sharded/normalize_uri_case.yml | 32 + .../sharded/too_new.json | 48 + .../sharded/too_new.yml | 40 + .../sharded/too_old.json | 46 + .../sharded/too_old.yml | 38 + .../single/compatible.json | 32 + .../single/compatible.yml | 27 + .../single/direct_connection_external_ip.json | 35 + .../single/direct_connection_external_ip.yml | 37 + .../single/direct_connection_mongos.json | 32 + .../single/direct_connection_mongos.yml | 36 + .../single/direct_connection_replicaset.json | 32 + .../single/direct_connection_replicaset.yml | 23 + .../single/direct_connection_rsarbiter.json | 37 + .../single/direct_connection_rsarbiter.yml | 38 + .../single/direct_connection_rsprimary.json | 36 + .../single/direct_connection_rsprimary.yml | 37 + .../single/direct_connection_rssecondary.json | 37 + .../single/direct_connection_rssecondary.yml | 38 + .../single/direct_connection_standalone.json | 31 + .../single/direct_connection_standalone.yml | 35 + .../direct_connection_unavailable_seed.json | 25 + .../direct_connection_unavailable_seed.yml | 28 + .../direct_connection_wrong_set_name.json | 65 + .../direct_connection_wrong_set_name.yml | 40 + .../single/discover_standalone.json | 31 + .../single/discover_standalone.yml | 35 + .../single/discover_unavailable_seed.json | 25 + .../single/discover_unavailable_seed.yml | 28 + .../single/ls_timeout_standalone.json | 32 + .../single/ls_timeout_standalone.yml | 36 + .../single/not_ok_response.json | 41 + .../single/not_ok_response.yml | 44 + .../single/standalone_removed.json | 31 + .../single/standalone_removed.yml | 35 + .../single/standalone_using_legacy_hello.json | 30 + .../single/standalone_using_legacy_hello.yml | 34 + .../single/too_new.json | 32 + .../single/too_new.yml | 27 + .../single/too_old.json | 30 + .../single/too_old.yml | 25 + .../single/too_old_then_upgraded.json | 56 + .../single/too_old_then_upgraded.yml | 48 + .../unified/auth-error.json | 230 + .../unified/auth-error.yml | 130 + .../unified/auth-misc-command-error.json | 230 + .../unified/auth-misc-command-error.yml | 132 + .../unified/auth-network-error.json | 230 + .../unified/auth-network-error.yml | 132 + .../unified/auth-network-timeout-error.json | 233 + .../unified/auth-network-timeout-error.yml | 138 + .../unified/auth-shutdown-error.json | 230 + .../unified/auth-shutdown-error.yml | 133 + .../unified/cancel-server-check.json | 201 + .../unified/cancel-server-check.yml | 143 + .../unified/connectTimeoutMS.json | 221 + .../unified/connectTimeoutMS.yml | 130 + .../unified/find-network-error.json | 234 + .../unified/find-network-error.yml | 135 + .../unified/find-network-timeout-error.json | 199 + .../unified/find-network-timeout-error.yml | 119 + .../unified/find-shutdown-error.json | 251 + .../unified/find-shutdown-error.yml | 163 + .../unified/hello-command-error.json | 376 + .../unified/hello-command-error.yml | 233 + .../unified/hello-network-error.json | 346 + .../unified/hello-network-error.yml | 227 + .../unified/hello-timeout.json | 514 + .../unified/insert-network-error.json | 246 + .../unified/insert-network-error.yml | 137 + .../unified/insert-shutdown-error.json | 250 + .../unified/insert-shutdown-error.yml | 162 + .../unified/interruptInUse-pool-clear.json | 591 ++ .../unified/interruptInUse-pool-clear.yml | 340 + .../unified/logging-loadbalanced.json | 150 + .../unified/logging-loadbalanced.yml | 74 + .../unified/logging-replicaset.json | 590 ++ .../unified/logging-replicaset.yml | 296 + .../unified/logging-sharded.json | 476 + .../unified/logging-sharded.yml | 255 + .../unified/logging-standalone.json | 501 + .../unified/logging-standalone.yml | 265 + .../unified/minPoolSize-error.json | 177 + .../unified/minPoolSize-error.yml | 125 + .../unified/pool-cleared-error.json | 373 + .../unified/pool-cleared-error.yml | 239 + .../unified/serverMonitoringMode.json | 449 + .../unified/serverMonitoringMode.yml | 173 + .../in_window/equilibrium.json | 46 + .../in_window/equilibrium.yml | 27 + .../in_window/many-choices.json | 106 + .../in_window/many-choices.yml | 63 + .../in_window/one-least-two-tied.json | 46 + .../in_window/one-least-two-tied.yml | 27 + .../in_window/rs-equilibrium.json | 46 + .../in_window/rs-equilibrium.yml | 27 + .../in_window/rs-three-choices.json | 46 + .../in_window/rs-three-choices.yml | 27 + .../in_window/three-choices.json | 46 + .../in_window/three-choices.yml | 27 + .../in_window/two-choices.json | 36 + .../in_window/two-choices.yml | 21 + .../server-selection/in_window/two-least.json | 46 + .../server-selection/in_window/two-least.yml | 27 + .../logging/load-balanced.json | 107 + .../logging/load-balanced.yml | 60 + .../logging/operation-id.json | 418 + .../server-selection/logging/operation-id.yml | 223 + .../server-selection/logging/replica-set.json | 228 + .../server-selection/logging/replica-set.yml | 126 + .../server-selection/logging/sharded.json | 237 + testdata/server-selection/logging/sharded.yml | 129 + .../server-selection/logging/standalone.json | 235 + .../server-selection/logging/standalone.yml | 128 + .../server-selection/rtt/first_value.json | 5 + testdata/server-selection/rtt/first_value.yml | 3 + .../rtt/first_value_zero.json | 5 + .../server-selection/rtt/first_value_zero.yml | 3 + .../server-selection/rtt/value_test_1.json | 5 + .../server-selection/rtt/value_test_1.yml | 3 + .../server-selection/rtt/value_test_2.json | 5 + .../server-selection/rtt/value_test_2.yml | 3 + .../server-selection/rtt/value_test_3.json | 5 + .../server-selection/rtt/value_test_3.yml | 3 + .../server-selection/rtt/value_test_4.json | 5 + .../server-selection/rtt/value_test_4.yml | 3 + .../server-selection/rtt/value_test_5.json | 5 + .../server-selection/rtt/value_test_5.yml | 3 + .../LoadBalanced/read/Nearest.json | 35 + .../LoadBalanced/read/Nearest.yml | 16 + .../LoadBalanced/read/Primary.json | 30 + .../LoadBalanced/read/Primary.yml | 14 + .../LoadBalanced/read/PrimaryPreferred.json | 35 + .../LoadBalanced/read/PrimaryPreferred.yml | 16 + .../LoadBalanced/read/Secondary.json | 35 + .../LoadBalanced/read/Secondary.yml | 16 + .../LoadBalanced/read/SecondaryPreferred.json | 35 + .../LoadBalanced/read/SecondaryPreferred.yml | 16 + .../LoadBalanced/write/Nearest.json | 35 + .../LoadBalanced/write/Nearest.yml | 16 + .../LoadBalanced/write/Primary.json | 30 + .../LoadBalanced/write/Primary.yml | 14 + .../LoadBalanced/write/PrimaryPreferred.json | 35 + .../LoadBalanced/write/PrimaryPreferred.yml | 16 + .../LoadBalanced/write/Secondary.json | 35 + .../LoadBalanced/write/Secondary.yml | 16 + .../write/SecondaryPreferred.json | 35 + .../LoadBalanced/write/SecondaryPreferred.yml | 16 + .../ReplicaSetNoPrimary/read/Nearest.json | 60 + .../ReplicaSetNoPrimary/read/Nearest.yml | 25 + .../read/Nearest_multiple.json | 68 + .../read/Nearest_multiple.yml | 26 + .../read/Nearest_non_matching.json | 34 + .../read/Nearest_non_matching.yml | 20 + .../read/PossiblePrimary.json | 21 + .../read/PossiblePrimary.yml | 15 + .../read/PossiblePrimaryNearest.json | 21 + .../read/PossiblePrimaryNearest.yml | 15 + .../ReplicaSetNoPrimary/read/Primary.json | 32 + .../ReplicaSetNoPrimary/read/Primary.yml | 20 + .../read/PrimaryPreferred.json | 58 + .../read/PrimaryPreferred.yml | 25 + .../read/PrimaryPreferred_non_matching.json | 34 + .../read/PrimaryPreferred_non_matching.yml | 20 + .../ReplicaSetNoPrimary/read/Secondary.json | 60 + .../ReplicaSetNoPrimary/read/Secondary.yml | 25 + .../read/SecondaryPreferred.json | 60 + .../read/SecondaryPreferred.yml | 25 + .../read/SecondaryPreferred_non_matching.json | 34 + .../read/SecondaryPreferred_non_matching.yml | 20 + .../read/Secondary_multi_tags.json | 60 + .../read/Secondary_multi_tags.yml | 31 + .../read/Secondary_multi_tags2.json | 60 + .../read/Secondary_multi_tags2.yml | 31 + .../read/Secondary_non_matching.json | 34 + .../read/Secondary_non_matching.yml | 20 + .../write/SecondaryPreferred.json | 34 + .../write/SecondaryPreferred.yml | 20 + .../ReplicaSetWithPrimary/read/Nearest.json | 76 + .../ReplicaSetWithPrimary/read/Nearest.yml | 32 + .../read/Nearest_multiple.json | 84 + .../read/Nearest_multiple.yml | 33 + .../read/Nearest_non_matching.json | 42 + .../read/Nearest_non_matching.yml | 25 + .../ReplicaSetWithPrimary/read/Primary.json | 58 + .../ReplicaSetWithPrimary/read/Primary.yml | 28 + .../read/PrimaryPreferred.json | 58 + .../read/PrimaryPreferred.yml | 28 + .../read/PrimaryPreferred_non_matching.json | 60 + .../read/PrimaryPreferred_non_matching.yml | 28 + .../ReplicaSetWithPrimary/read/Secondary.json | 68 + .../ReplicaSetWithPrimary/read/Secondary.yml | 30 + .../read/SecondaryPreferred.json | 68 + .../read/SecondaryPreferred.yml | 30 + .../read/SecondaryPreferred_non_matching.json | 60 + .../read/SecondaryPreferred_non_matching.yml | 28 + .../read/SecondaryPreferred_tags.json | 52 + .../read/SecondaryPreferred_tags.yml | 28 + .../read/Secondary_non_matching.json | 42 + .../read/Secondary_non_matching.yml | 25 + .../write/SecondaryPreferred.json | 60 + .../write/SecondaryPreferred.yml | 28 + .../Sharded/read/Nearest.json | 45 + .../server_selection/Sharded/read/Nearest.yml | 21 + .../Sharded/read/Primary.json | 40 + .../server_selection/Sharded/read/Primary.yml | 19 + .../Sharded/read/PrimaryPreferred.json | 45 + .../Sharded/read/PrimaryPreferred.yml | 21 + .../Sharded/read/Secondary.json | 45 + .../Sharded/read/Secondary.yml | 21 + .../Sharded/read/SecondaryPreferred.json | 45 + .../Sharded/read/SecondaryPreferred.yml | 21 + .../Sharded/write/Nearest.json | 45 + .../Sharded/write/Nearest.yml | 21 + .../Sharded/write/Primary.json | 40 + .../Sharded/write/Primary.yml | 19 + .../Sharded/write/PrimaryPreferred.json | 45 + .../Sharded/write/PrimaryPreferred.yml | 21 + .../Sharded/write/Secondary.json | 45 + .../Sharded/write/Secondary.yml | 21 + .../Sharded/write/SecondaryPreferred.json | 45 + .../Sharded/write/SecondaryPreferred.yml | 21 + .../Single/read/SecondaryPreferred.json | 44 + .../Single/read/SecondaryPreferred.yml | 18 + .../Single/write/SecondaryPreferred.json | 44 + .../Single/write/SecondaryPreferred.yml | 18 + .../Unknown/read/SecondaryPreferred.json | 17 + .../Unknown/read/SecondaryPreferred.yml | 10 + .../Unknown/write/SecondaryPreferred.json | 17 + .../Unknown/write/SecondaryPreferred.yml | 10 + .../driver-sessions-dirty-session-errors.json | 968 ++ .../driver-sessions-dirty-session-errors.yml | 351 + .../driver-sessions-server-support.json | 256 + .../driver-sessions-server-support.yml | 123 + ...t-sessions-default-causal-consistency.json | 318 + ...it-sessions-default-causal-consistency.yml | 119 + ...t-sessions-not-supported-client-error.json | 128 + ...ot-sessions-not-supported-client-error.yml | 75 + ...t-sessions-not-supported-server-error.json | 187 + ...ot-sessions-not-supported-server-error.yml | 102 + .../snapshot-sessions-unsupported-ops.json | 493 + .../snapshot-sessions-unsupported-ops.yml | 258 + testdata/sessions/snapshot-sessions.json | 993 ++ testdata/sessions/snapshot-sessions.yml | 482 + .../large_doc.json.gz | Bin 0 -> 1835846 bytes .../small_doc.json.gz | Bin 0 -> 258 bytes .../single_and_multi_document/tweet.json.gz | Bin 0 -> 845 bytes testdata/transactions/legacy/abort.json | 621 ++ testdata/transactions/legacy/abort.yml | 413 + testdata/transactions/legacy/bulk.json | 531 + testdata/transactions/legacy/bulk.yml | 268 + .../legacy/causal-consistency.json | 305 + .../legacy/causal-consistency.yml | 175 + testdata/transactions/legacy/commit.json | 925 ++ testdata/transactions/legacy/commit.yml | 603 ++ testdata/transactions/legacy/count.json | 120 + testdata/transactions/legacy/count.yml | 67 + .../legacy/create-collection.json | 204 + .../transactions/legacy/create-collection.yml | 131 + .../transactions/legacy/create-index.json | 237 + testdata/transactions/legacy/create-index.yml | 152 + testdata/transactions/legacy/delete.json | 327 + testdata/transactions/legacy/delete.yml | 192 + .../transactions/legacy/error-labels.json | 2089 ++++ testdata/transactions/legacy/error-labels.yml | 1276 +++ .../transactions/legacy/errors-client.json | 96 + .../transactions/legacy/errors-client.yml | 55 + testdata/transactions/legacy/errors.json | 222 + testdata/transactions/legacy/errors.yml | 133 + .../transactions/legacy/findOneAndDelete.json | 221 + .../transactions/legacy/findOneAndDelete.yml | 134 + .../legacy/findOneAndReplace.json | 255 + .../transactions/legacy/findOneAndReplace.yml | 148 + .../transactions/legacy/findOneAndUpdate.json | 413 + .../transactions/legacy/findOneAndUpdate.yml | 236 + testdata/transactions/legacy/insert.json | 648 ++ testdata/transactions/legacy/insert.yml | 390 + testdata/transactions/legacy/isolation.json | 225 + testdata/transactions/legacy/isolation.yml | 133 + .../legacy/mongos-recovery-token.json | 511 + .../legacy/mongos-recovery-token.yml | 350 + testdata/transactions/legacy/pin-mongos.json | 1229 +++ testdata/transactions/legacy/pin-mongos.yml | 559 ++ .../transactions/legacy/read-concern.json | 1628 ++++ testdata/transactions/legacy/read-concern.yml | 623 ++ testdata/transactions/legacy/read-pref.json | 720 ++ testdata/transactions/legacy/read-pref.yml | 348 + testdata/transactions/legacy/reads.json | 543 ++ testdata/transactions/legacy/reads.yml | 261 + .../legacy/retryable-abort-errorLabels.json | 204 + .../legacy/retryable-abort-errorLabels.yml | 124 + .../transactions/legacy/retryable-abort.json | 2017 ++++ .../transactions/legacy/retryable-abort.yml | 1315 +++ .../legacy/retryable-commit-errorLabels.json | 223 + .../legacy/retryable-commit-errorLabels.yml | 132 + .../transactions/legacy/retryable-commit.json | 2337 +++++ .../transactions/legacy/retryable-commit.yml | 1461 +++ .../transactions/legacy/retryable-writes.json | 343 + .../transactions/legacy/retryable-writes.yml | 216 + testdata/transactions/legacy/run-command.json | 306 + testdata/transactions/legacy/run-command.yml | 197 + .../legacy/transaction-options-repl.json | 181 + .../legacy/transaction-options-repl.yml | 117 + .../legacy/transaction-options.json | 1407 +++ .../legacy/transaction-options.yml | 806 ++ testdata/transactions/legacy/update.json | 442 + testdata/transactions/legacy/update.yml | 246 + .../transactions/legacy/write-concern.json | 1278 +++ .../transactions/legacy/write-concern.yml | 554 ++ .../unified/client-bulkWrite.json | 593 ++ .../transactions/unified/client-bulkWrite.yml | 263 + .../do-not-retry-read-in-transaction.json | 115 + .../do-not-retry-read-in-transaction.yml | 64 + .../transactions/unified/mongos-pin-auto.json | 5474 +++++++++++ .../transactions/unified/mongos-pin-auto.yml | 1705 ++++ .../transactions/unified/mongos-unpin.json | 450 + .../transactions/unified/mongos-unpin.yml | 185 + .../assertNumberConnectionsCheckedOut.json | 63 + .../assertNumberConnectionsCheckedOut.yml | 38 + ...ncryptionOpts-missing-kms-credentials.json | 37 + ...EncryptionOpts-missing-kms-credentials.yml | 20 + .../clientEncryptionOpts-no-kms.json | 27 + .../clientEncryptionOpts-no-kms.yml | 17 + .../entity-bucket-database-undefined.json | 18 + .../entity-bucket-database-undefined.yml | 12 + .../entity-client-apiVersion-unsupported.json | 20 + .../entity-client-apiVersion-unsupported.yml | 13 + ...ntsAsEntities-conflict_with_client_id.json | 28 + ...entsAsEntities-conflict_with_client_id.yml | 16 + ...ities-conflict_within_different_array.json | 43 + ...tities-conflict_within_different_array.yml | 19 + ...AsEntities-conflict_within_same_array.json | 36 + ...sAsEntities-conflict_within_same_array.yml | 16 + .../entity-collection-database-undefined.json | 19 + .../entity-collection-database-undefined.yml | 13 + .../entity-database-client-undefined.json | 19 + .../entity-database-client-undefined.yml | 13 + .../valid-fail/entity-find-cursor.json | 62 + .../valid-fail/entity-find-cursor.yml | 37 + .../entity-session-client-undefined.json | 18 + .../entity-session-client-undefined.yml | 12 + ...gesForClient-ignoreExtraMessages-type.json | 24 + ...agesForClient-ignoreExtraMessages-type.yml | 15 + ...essagesForClient-ignoreMessages-items.json | 26 + ...MessagesForClient-ignoreMessages-items.yml | 15 + ...MessagesForClient-ignoreMessages-type.json | 24 + ...gMessagesForClient-ignoreMessages-type.yml | 15 + ...tionChangedEvent-additionalProperties.json | 23 + ...ptionChangedEvent-additionalProperties.yml | 13 + .../valid-fail/ignoreResultAndError.json | 72 + .../valid-fail/ignoreResultAndError.yml | 43 + ...Providers-missing_aws_kms_credentials.json | 36 + ...sProviders-missing_aws_kms_credentials.yml | 22 + ...oviders-missing_azure_kms_credentials.json | 36 + ...roviders-missing_azure_kms_credentials.yml | 22 + ...Providers-missing_gcp_kms_credentials.json | 36 + ...sProviders-missing_gcp_kms_credentials.yml | 22 + .../valid-fail/kmsProviders-no_kms.json | 32 + .../valid-fail/kmsProviders-no_kms.yml | 21 + .../valid-fail/operation-failure.json | 56 + .../valid-fail/operation-failure.yml | 31 + .../valid-fail/operation-unsupported.json | 22 + .../valid-fail/operation-unsupported.yml | 13 + .../returnDocument-enum-invalid.json | 66 + .../returnDocument-enum-invalid.yml | 34 + .../valid-fail/schemaVersion-unsupported.json | 10 + .../valid-fail/schemaVersion-unsupported.yml | 7 + .../assertNumberConnectionsCheckedOut.json | 27 + .../assertNumberConnectionsCheckedOut.yml | 17 + .../collectionData-createOptions.json | 79 + .../collectionData-createOptions.yml | 37 + .../valid-pass/entity-client-cmap-events.json | 71 + .../valid-pass/entity-client-cmap-events.yml | 40 + .../entity-client-storeEventsAsEntities.json | 67 + .../entity-client-storeEventsAsEntities.yml | 37 + .../valid-pass/entity-commandCursor.json | 278 + .../valid-pass/entity-commandCursor.yml | 115 + .../valid-pass/entity-cursor-iterateOnce.json | 108 + .../valid-pass/entity-cursor-iterateOnce.yml | 58 + .../valid-pass/entity-find-cursor.json | 182 + .../valid-pass/entity-find-cursor.yml | 89 + .../expectedEventsForClient-eventType.json | 126 + .../expectedEventsForClient-eventType.yml | 66 + ...ctedEventsForClient-ignoreExtraEvents.json | 151 + ...ectedEventsForClient-ignoreExtraEvents.yml | 78 + .../valid-pass/ignoreResultAndError.json | 59 + .../valid-pass/ignoreResultAndError.yml | 34 + ...kmsProviders-explicit_kms_credentials.json | 52 + .../kmsProviders-explicit_kms_credentials.yml | 26 + ...Providers-mixed_kms_credential_fields.json | 54 + ...sProviders-mixed_kms_credential_fields.yml | 24 + ...Providers-placeholder_kms_credentials.json | 70 + ...sProviders-placeholder_kms_credentials.yml | 26 + .../kmsProviders-unconfigured_kms.json | 39 + .../kmsProviders-unconfigured_kms.yml | 27 + .../valid-pass/matches-lte-operator.json | 78 + .../valid-pass/matches-lte-operator.yml | 40 + .../valid-pass/observeSensitiveCommands.json | 706 ++ .../valid-pass/observeSensitiveCommands.yml | 255 + .../valid-pass/poc-change-streams.json | 455 + .../valid-pass/poc-change-streams.yml | 241 + .../valid-pass/poc-command-monitoring.json | 223 + .../valid-pass/poc-command-monitoring.yml | 102 + .../valid-pass/poc-crud.json | 450 + .../valid-pass/poc-crud.yml | 190 + .../valid-pass/poc-gridfs.json | 301 + .../valid-pass/poc-gridfs.yml | 155 + .../valid-pass/poc-retryable-reads.json | 433 + .../valid-pass/poc-retryable-reads.yml | 193 + .../valid-pass/poc-retryable-writes.json | 491 + .../valid-pass/poc-retryable-writes.yml | 214 + .../valid-pass/poc-sessions.json | 466 + .../valid-pass/poc-sessions.yml | 213 + .../poc-transactions-convenient-api.json | 505 + .../poc-transactions-convenient-api.yml | 235 + .../poc-transactions-mongos-pin-auto.json | 409 + .../poc-transactions-mongos-pin-auto.yml | 169 + .../valid-pass/poc-transactions.json | 323 + .../valid-pass/poc-transactions.yml | 171 + testdata/uri-options/auth-options.json | 32 + testdata/uri-options/auth-options.yml | 24 + testdata/uri-options/ca.pem | 1 + testdata/uri-options/cert.pem | 1 + testdata/uri-options/client.pem | 1 + testdata/uri-options/compression-options.json | 59 + testdata/uri-options/compression-options.yml | 48 + testdata/uri-options/concern-options.json | 76 + testdata/uri-options/concern-options.yml | 55 + testdata/uri-options/connection-options.json | 243 + testdata/uri-options/connection-options.yml | 209 + .../uri-options/connection-pool-options.json | 33 + .../uri-options/connection-pool-options.yml | 26 + .../uri-options/read-preference-options.json | 67 + .../uri-options/read-preference-options.yml | 53 + testdata/uri-options/sdam-options.json | 46 + testdata/uri-options/sdam-options.yml | 35 + .../uri-options/single-threaded-options.json | 24 + .../uri-options/single-threaded-options.yml | 18 + testdata/uri-options/srv-options.json | 116 + testdata/uri-options/srv-options.yml | 89 + testdata/uri-options/tls-options.json | 649 ++ testdata/uri-options/tls-options.yml | 578 ++ .../crud-api-version-1-strict.json | 1109 +++ .../crud-api-version-1-strict.yml | 419 + .../versioned-api/crud-api-version-1.json | 1182 +++ testdata/versioned-api/crud-api-version-1.yml | 455 + ...ommand-helper-no-api-version-declared.json | 127 + ...command-helper-no-api-version-declared.yml | 75 + .../test-commands-deprecation-errors.json | 74 + .../test-commands-deprecation-errors.yml | 47 + .../test-commands-strict-mode.json | 75 + .../test-commands-strict-mode.yml | 46 + .../versioned-api/transaction-handling.json | 348 + .../versioned-api/transaction-handling.yml | 128 + time_codec.go | 109 + time_codec_test.go | 81 + truncation_test.go | 84 + type_test.go | 51 + types.go | 118 + uint_codec.go | 161 + unmarshal.go | 88 + unmarshal_test.go | 819 ++ unmarshal_value_test.go | 92 + unmarshaling_cases_test.go | 369 + value_reader.go | 842 ++ value_reader_test.go | 1489 +++ value_reader_writer_test.go | 606 ++ value_writer.go | 577 ++ value_writer_test.go | 357 + vector.go | 268 + writer.go | 61 + x/README.md | 9 + x/bsonx/bsoncore/array.go | 185 + x/bsonx/bsoncore/array_test.go | 583 ++ x/bsonx/bsoncore/bson_arraybuilder.go | 198 + x/bsonx/bsoncore/bson_arraybuilder_test.go | 211 + x/bsonx/bsoncore/bson_documentbuilder.go | 184 + x/bsonx/bsoncore/bson_documentbuilder_test.go | 213 + x/bsonx/bsoncore/bsoncore.go | 842 ++ x/bsonx/bsoncore/bsoncore_test.go | 955 ++ x/bsonx/bsoncore/doc.go | 34 + x/bsonx/bsoncore/document.go | 411 + x/bsonx/bsoncore/document_test.go | 539 ++ x/bsonx/bsoncore/element.go | 166 + x/bsonx/bsoncore/element_test.go | 126 + x/bsonx/bsoncore/iterator.go | 113 + x/bsonx/bsoncore/iterator_test.go | 305 + x/bsonx/bsoncore/tables.go | 223 + x/bsonx/bsoncore/type.go | 85 + x/bsonx/bsoncore/value.go | 986 ++ x/bsonx/bsoncore/value_test.go | 839 ++ 2206 files changed, 469613 insertions(+) create mode 100644 LICENSE create mode 100644 THIRD-PARTY-NOTICES create mode 100644 array_codec.go create mode 100644 benchmark_test.go create mode 100644 bson_binary_vector_spec_test.go create mode 100644 bson_corpus_spec_test.go create mode 100644 bson_test.go create mode 100644 bsoncodec.go create mode 100644 bsoncodec_test.go create mode 100644 bsonrw_test.go create mode 100644 byte_slice_codec.go create mode 100644 codec_cache.go create mode 100644 codec_cache_test.go create mode 100644 cond_addr_codec.go create mode 100644 cond_addr_codec_test.go create mode 100644 copier.go create mode 100644 copier_test.go create mode 100644 decimal.go create mode 100644 decimal_test.go create mode 100644 decoder.go create mode 100644 decoder_example_test.go create mode 100644 decoder_test.go create mode 100644 default_value_decoders.go create mode 100644 default_value_decoders_test.go create mode 100644 default_value_encoders.go create mode 100644 default_value_encoders_test.go create mode 100644 doc.go create mode 100644 empty_interface_codec.go create mode 100644 encoder.go create mode 100644 encoder_example_test.go create mode 100644 encoder_test.go create mode 100644 example_test.go create mode 100644 extjson_parser.go create mode 100644 extjson_parser_test.go create mode 100644 extjson_prose_test.go create mode 100644 extjson_reader.go create mode 100644 extjson_reader_test.go create mode 100644 extjson_tables.go create mode 100644 extjson_wrappers.go create mode 100644 extjson_writer.go create mode 100644 extjson_writer_test.go create mode 100644 fuzz_test.go create mode 100644 go.mod create mode 100644 go.sum create mode 100644 internal/assert/assertion_compare.go create mode 100644 internal/assert/assertion_compare_can_convert.go create mode 100644 internal/assert/assertion_compare_go1.17_test.go create mode 100644 internal/assert/assertion_compare_legacy.go create mode 100644 internal/assert/assertion_compare_test.go create mode 100644 internal/assert/assertion_format.go create mode 100644 internal/assert/assertion_mongo.go create mode 100644 internal/assert/assertion_mongo_test.go create mode 100644 internal/assert/assertions.go create mode 100644 internal/assert/assertions_test.go create mode 100644 internal/assert/difflib.go create mode 100644 internal/assert/difflib_test.go create mode 100644 internal/aws/awserr/error.go create mode 100644 internal/aws/awserr/types.go create mode 100644 internal/aws/credentials/chain_provider.go create mode 100644 internal/aws/credentials/chain_provider_test.go create mode 100644 internal/aws/credentials/credentials.go create mode 100644 internal/aws/credentials/credentials_test.go create mode 100644 internal/aws/signer/v4/header_rules.go create mode 100644 internal/aws/signer/v4/request.go create mode 100644 internal/aws/signer/v4/uri_path.go create mode 100644 internal/aws/signer/v4/v4.go create mode 100644 internal/aws/signer/v4/v4_test.go create mode 100644 internal/aws/types.go create mode 100644 internal/bsoncoreutil/bsoncoreutil.go create mode 100644 internal/bsoncoreutil/bsoncoreutil_test.go create mode 100644 internal/bsonutil/bsonutil.go create mode 100644 internal/codecutil/encoding.go create mode 100644 internal/codecutil/encoding_test.go create mode 100644 internal/credproviders/assume_role_provider.go create mode 100644 internal/credproviders/ec2_provider.go create mode 100644 internal/credproviders/ecs_provider.go create mode 100644 internal/credproviders/env_provider.go create mode 100644 internal/credproviders/imds_provider.go create mode 100644 internal/credproviders/static_provider.go create mode 100644 internal/csfle/csfle.go create mode 100644 internal/csot/csot.go create mode 100644 internal/csot/csot_test.go create mode 100644 internal/decimal128/decinal128.go create mode 100644 internal/errutil/join.go create mode 100644 internal/errutil/join_go1.19.go create mode 100644 internal/errutil/join_go1.20.go create mode 100644 internal/errutil/join_test.go create mode 100644 internal/failpoint/failpoint.go create mode 100644 internal/handshake/handshake.go create mode 100644 internal/httputil/httputil.go create mode 100644 internal/israce/norace.go create mode 100644 internal/israce/race.go create mode 100644 internal/logger/component.go create mode 100644 internal/logger/component_test.go create mode 100644 internal/logger/context.go create mode 100644 internal/logger/context_test.go create mode 100644 internal/logger/io_sink.go create mode 100644 internal/logger/level.go create mode 100644 internal/logger/logger.go create mode 100644 internal/logger/logger_test.go create mode 100644 internal/ptrutil/int64.go create mode 100644 internal/ptrutil/int64_test.go create mode 100644 internal/ptrutil/ptr.go create mode 100644 internal/rand/arith128_test.go create mode 100644 internal/rand/bits.go create mode 100644 internal/rand/example_test.go create mode 100644 internal/rand/exp.go create mode 100644 internal/rand/modulo_test.go create mode 100644 internal/rand/normal.go create mode 100644 internal/rand/race_test.go create mode 100644 internal/rand/rand.go create mode 100644 internal/rand/rand_test.go create mode 100644 internal/rand/regress_test.go create mode 100644 internal/rand/rng.go create mode 100644 internal/randutil/randutil.go create mode 100644 internal/randutil/randutil_test.go create mode 100644 internal/require/require.go create mode 100644 internal/spectest/spectest.go create mode 100644 internal/test/goleak/go.mod create mode 100644 internal/test/goleak/go.sum create mode 100644 internal/test/goleak/goleak_test.go create mode 100644 internal/uuid/uuid.go create mode 100644 internal/uuid/uuid_test.go create mode 100644 json_scanner.go create mode 100644 json_scanner_test.go create mode 100644 map_codec.go create mode 100644 marshal.go create mode 100644 marshal_test.go create mode 100644 marshal_value_cases_test.go create mode 100644 marshal_value_test.go create mode 100644 marshaling_cases_test.go create mode 100644 mgocompat/doc.go create mode 100644 mgocompat/registry.go create mode 100644 mgoregistry.go create mode 100644 mgoregistry_test.go create mode 100644 mode.go create mode 100644 objectid.go create mode 100644 objectid_test.go create mode 100644 pointer_codec.go create mode 100644 primitive.go create mode 100644 primitive_codecs.go create mode 100644 primitive_codecs_test.go create mode 100644 raw.go create mode 100644 raw_array.go create mode 100644 raw_array_test.go create mode 100644 raw_element.go create mode 100644 raw_test.go create mode 100644 raw_value.go create mode 100644 raw_value_test.go create mode 100644 reader.go create mode 100644 registry.go create mode 100644 registry_examples_test.go create mode 100644 registry_test.go create mode 100644 slice_codec.go create mode 100644 string_codec.go create mode 100644 struct_codec.go create mode 100644 struct_codec_test.go create mode 100644 struct_tag_parser.go create mode 100644 struct_tag_parser_test.go create mode 100644 testdata/FuzzDecode/002ae7d43f636100116fede772a03d07726ed75c3c3b83da865fe9b718adf8ae create mode 100644 testdata/FuzzDecode/0de854041b0055ca1e5e6e54a7fb667ed38461db171af267665c21776f9a9ef4 create mode 100644 testdata/FuzzDecode/718592474a0a3626039f3471449b9aa374c746754d4925fcfe4ba747e7101504 create mode 100644 testdata/FuzzDecode/93c43e3c1cf35c19b7618a618d128cea0ce05cef0711fdd91e403fe3b2f45628 create mode 100644 testdata/FuzzDecode/c3ffbb42eb85b743ede396f00b7706e6ad0529c32689c63ca663dae37d072627 create mode 100644 testdata/atlas-data-lake-testing/aggregate.json create mode 100644 testdata/atlas-data-lake-testing/aggregate.yml create mode 100644 testdata/atlas-data-lake-testing/estimatedDocumentCount.json create mode 100644 testdata/atlas-data-lake-testing/estimatedDocumentCount.yml create mode 100644 testdata/atlas-data-lake-testing/find.json create mode 100644 testdata/atlas-data-lake-testing/find.yml create mode 100644 testdata/atlas-data-lake-testing/getMore.json create mode 100644 testdata/atlas-data-lake-testing/getMore.yml create mode 100644 testdata/atlas-data-lake-testing/listCollections.json create mode 100644 testdata/atlas-data-lake-testing/listCollections.yml create mode 100644 testdata/atlas-data-lake-testing/listDatabases.json create mode 100644 testdata/atlas-data-lake-testing/listDatabases.yml create mode 100644 testdata/atlas-data-lake-testing/runCommand.json create mode 100644 testdata/atlas-data-lake-testing/runCommand.yml create mode 100644 testdata/auth/connection-string.json create mode 100644 testdata/auth/connection-string.yml create mode 100644 testdata/bson-binary-vector/float32.json create mode 100644 testdata/bson-binary-vector/int8.json create mode 100644 testdata/bson-binary-vector/packed_bit.json create mode 100644 testdata/bson-corpus/array.json create mode 100644 testdata/bson-corpus/binary.json create mode 100644 testdata/bson-corpus/boolean.json create mode 100644 testdata/bson-corpus/bsonview create mode 100644 testdata/bson-corpus/code.json create mode 100644 testdata/bson-corpus/code_w_scope.json create mode 100644 testdata/bson-corpus/datetime.json create mode 100644 testdata/bson-corpus/dbpointer.json create mode 100644 testdata/bson-corpus/dbref.json create mode 100644 testdata/bson-corpus/decimal128-1.json create mode 100644 testdata/bson-corpus/decimal128-2.json create mode 100644 testdata/bson-corpus/decimal128-3.json create mode 100644 testdata/bson-corpus/decimal128-4.json create mode 100644 testdata/bson-corpus/decimal128-5.json create mode 100644 testdata/bson-corpus/decimal128-6.json create mode 100644 testdata/bson-corpus/decimal128-7.json create mode 100644 testdata/bson-corpus/document.json create mode 100644 testdata/bson-corpus/double.json create mode 100644 testdata/bson-corpus/int32.json create mode 100644 testdata/bson-corpus/int64.json create mode 100644 testdata/bson-corpus/maxkey.json create mode 100644 testdata/bson-corpus/minkey.json create mode 100644 testdata/bson-corpus/multi-type-deprecated.json create mode 100644 testdata/bson-corpus/multi-type.json create mode 100644 testdata/bson-corpus/null.json create mode 100644 testdata/bson-corpus/oid.json create mode 100644 testdata/bson-corpus/regex.json create mode 100644 testdata/bson-corpus/string.json create mode 100644 testdata/bson-corpus/symbol.json create mode 100644 testdata/bson-corpus/timestamp.json create mode 100644 testdata/bson-corpus/top.json create mode 100644 testdata/bson-corpus/undefined.json create mode 100644 testdata/change-streams/change-streams-clusterTime.json create mode 100644 testdata/change-streams/change-streams-clusterTime.yml create mode 100644 testdata/change-streams/change-streams-disambiguatedPaths.json create mode 100644 testdata/change-streams/change-streams-disambiguatedPaths.yml create mode 100644 testdata/change-streams/change-streams-errors.json create mode 100644 testdata/change-streams/change-streams-errors.yml create mode 100644 testdata/change-streams/change-streams-pre_and_post_images.json create mode 100644 testdata/change-streams/change-streams-pre_and_post_images.yml create mode 100644 testdata/change-streams/change-streams-resume-allowlist.json create mode 100644 testdata/change-streams/change-streams-resume-allowlist.yml create mode 100644 testdata/change-streams/change-streams-resume-errorLabels.json create mode 100644 testdata/change-streams/change-streams-resume-errorLabels.yml create mode 100644 testdata/change-streams/change-streams-showExpandedEvents.json create mode 100644 testdata/change-streams/change-streams-showExpandedEvents.yml create mode 100644 testdata/change-streams/change-streams.json create mode 100644 testdata/change-streams/change-streams.yml create mode 100644 testdata/client-side-encryption-prose/change-streams-test.json create mode 100644 testdata/client-side-encryption-prose/corpus-encrypted.json create mode 100644 testdata/client-side-encryption-prose/corpus-key-aws.json create mode 100644 testdata/client-side-encryption-prose/corpus-key-azure.json create mode 100644 testdata/client-side-encryption-prose/corpus-key-gcp.json create mode 100644 testdata/client-side-encryption-prose/corpus-key-kmip.json create mode 100644 testdata/client-side-encryption-prose/corpus-key-local.json create mode 100644 testdata/client-side-encryption-prose/corpus-schema.json create mode 100644 testdata/client-side-encryption-prose/corpus.json create mode 100644 testdata/client-side-encryption-prose/encrypted-fields.json create mode 100644 testdata/client-side-encryption-prose/external-key.json create mode 100644 testdata/client-side-encryption-prose/external-schema.json create mode 100644 testdata/client-side-encryption-prose/key1-document.json create mode 100644 testdata/client-side-encryption-prose/limits-doc.json create mode 100644 testdata/client-side-encryption-prose/limits-key.json create mode 100644 testdata/client-side-encryption-prose/limits-schema.json create mode 100644 testdata/client-side-encryption-prose/range-encryptedFields-Date.json create mode 100644 testdata/client-side-encryption-prose/range-encryptedFields-DecimalNoPrecision.json create mode 100644 testdata/client-side-encryption-prose/range-encryptedFields-DecimalPrecision.json create mode 100644 testdata/client-side-encryption-prose/range-encryptedFields-DoubleNoPrecision.json create mode 100644 testdata/client-side-encryption-prose/range-encryptedFields-DoublePrecision.json create mode 100644 testdata/client-side-encryption-prose/range-encryptedFields-Int.json create mode 100644 testdata/client-side-encryption-prose/range-encryptedFields-Long.json create mode 100644 testdata/client-side-encryption/legacy/aggregate.json create mode 100644 testdata/client-side-encryption/legacy/aggregate.yml create mode 100644 testdata/client-side-encryption/legacy/awsTemporary.json create mode 100644 testdata/client-side-encryption/legacy/awsTemporary.yml create mode 100644 testdata/client-side-encryption/legacy/azureKMS.json create mode 100644 testdata/client-side-encryption/legacy/azureKMS.yml create mode 100644 testdata/client-side-encryption/legacy/badQueries.json create mode 100644 testdata/client-side-encryption/legacy/badQueries.yml create mode 100644 testdata/client-side-encryption/legacy/badSchema.json create mode 100644 testdata/client-side-encryption/legacy/badSchema.yml create mode 100644 testdata/client-side-encryption/legacy/basic.json create mode 100644 testdata/client-side-encryption/legacy/basic.yml create mode 100644 testdata/client-side-encryption/legacy/bulk.json create mode 100644 testdata/client-side-encryption/legacy/bulk.yml create mode 100644 testdata/client-side-encryption/legacy/bypassAutoEncryption.json create mode 100644 testdata/client-side-encryption/legacy/bypassAutoEncryption.yml create mode 100644 testdata/client-side-encryption/legacy/bypassedCommand.json create mode 100644 testdata/client-side-encryption/legacy/bypassedCommand.yml create mode 100644 testdata/client-side-encryption/legacy/count.json create mode 100644 testdata/client-side-encryption/legacy/count.yml create mode 100644 testdata/client-side-encryption/legacy/countDocuments.json create mode 100644 testdata/client-side-encryption/legacy/countDocuments.yml create mode 100644 testdata/client-side-encryption/legacy/create-and-createIndexes.json create mode 100644 testdata/client-side-encryption/legacy/create-and-createIndexes.yml create mode 100644 testdata/client-side-encryption/legacy/delete.json create mode 100644 testdata/client-side-encryption/legacy/delete.yml create mode 100644 testdata/client-side-encryption/legacy/distinct.json create mode 100644 testdata/client-side-encryption/legacy/distinct.yml create mode 100644 testdata/client-side-encryption/legacy/explain.json create mode 100644 testdata/client-side-encryption/legacy/explain.yml create mode 100644 testdata/client-side-encryption/legacy/find.json create mode 100644 testdata/client-side-encryption/legacy/find.yml create mode 100644 testdata/client-side-encryption/legacy/findOneAndDelete.json create mode 100644 testdata/client-side-encryption/legacy/findOneAndDelete.yml create mode 100644 testdata/client-side-encryption/legacy/findOneAndReplace.json create mode 100644 testdata/client-side-encryption/legacy/findOneAndReplace.yml create mode 100644 testdata/client-side-encryption/legacy/findOneAndUpdate.json create mode 100644 testdata/client-side-encryption/legacy/findOneAndUpdate.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-BypassQueryAnalysis.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-BypassQueryAnalysis.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Compact.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Compact.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-CreateCollection-OldServer.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-CreateCollection-OldServer.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-CreateCollection.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-CreateCollection.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-DecryptExistingData.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-DecryptExistingData.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Delete.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Delete.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-EncryptedFields-vs-EncryptedFieldsMap.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-EncryptedFields-vs-EncryptedFieldsMap.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-EncryptedFields-vs-jsonSchema.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-EncryptedFields-vs-jsonSchema.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-EncryptedFieldsMap-defaults.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-EncryptedFieldsMap-defaults.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-FindOneAndUpdate.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-FindOneAndUpdate.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-InsertFind-Indexed.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-InsertFind-Indexed.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-InsertFind-Unindexed.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-InsertFind-Unindexed.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-MissingKey.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-MissingKey.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-NoEncryption.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-NoEncryption.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Compact.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Compact.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Date-Aggregate.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Date-Aggregate.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Date-Correctness.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Date-Correctness.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Date-Delete.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Date-Delete.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Date-FindOneAndUpdate.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Date-FindOneAndUpdate.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Date-InsertFind.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Date-InsertFind.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Date-Update.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Date-Update.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Decimal-Aggregate.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Decimal-Aggregate.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Decimal-Correctness.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Decimal-Correctness.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Decimal-Delete.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Decimal-Delete.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Decimal-FindOneAndUpdate.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Decimal-FindOneAndUpdate.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Decimal-InsertFind.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Decimal-InsertFind.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Decimal-Update.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Decimal-Update.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-DecimalPrecision-Aggregate.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-DecimalPrecision-Aggregate.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-DecimalPrecision-Correctness.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-DecimalPrecision-Correctness.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-DecimalPrecision-Delete.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-DecimalPrecision-Delete.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-DecimalPrecision-FindOneAndUpdate.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-DecimalPrecision-FindOneAndUpdate.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-DecimalPrecision-InsertFind.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-DecimalPrecision-InsertFind.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-DecimalPrecision-Update.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-DecimalPrecision-Update.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Defaults.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Defaults.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Double-Aggregate.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Double-Aggregate.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Double-Correctness.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Double-Correctness.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Double-Delete.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Double-Delete.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Double-FindOneAndUpdate.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Double-FindOneAndUpdate.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Double-InsertFind.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Double-InsertFind.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Double-Update.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Double-Update.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-DoublePrecision-Aggregate.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-DoublePrecision-Aggregate.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-DoublePrecision-Correctness.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-DoublePrecision-Correctness.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-DoublePrecision-Delete.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-DoublePrecision-Delete.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-DoublePrecision-FindOneAndUpdate.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-DoublePrecision-FindOneAndUpdate.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-DoublePrecision-InsertFind.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-DoublePrecision-InsertFind.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-DoublePrecision-Update.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-DoublePrecision-Update.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Int-Aggregate.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Int-Aggregate.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Int-Correctness.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Int-Correctness.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Int-Delete.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Int-Delete.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Int-FindOneAndUpdate.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Int-FindOneAndUpdate.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Int-InsertFind.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Int-InsertFind.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Int-Update.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Int-Update.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Long-Aggregate.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Long-Aggregate.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Long-Correctness.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Long-Correctness.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Long-Delete.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Long-Delete.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Long-FindOneAndUpdate.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Long-FindOneAndUpdate.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Long-InsertFind.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Long-InsertFind.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Long-Update.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-Long-Update.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-WrongType.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Rangev2-WrongType.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Update.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-Update.yml create mode 100644 testdata/client-side-encryption/legacy/fle2v2-validatorAndPartialFieldExpression.json create mode 100644 testdata/client-side-encryption/legacy/fle2v2-validatorAndPartialFieldExpression.yml create mode 100644 testdata/client-side-encryption/legacy/gcpKMS.json create mode 100644 testdata/client-side-encryption/legacy/gcpKMS.yml create mode 100644 testdata/client-side-encryption/legacy/getMore.json create mode 100644 testdata/client-side-encryption/legacy/getMore.yml create mode 100644 testdata/client-side-encryption/legacy/insert.json create mode 100644 testdata/client-side-encryption/legacy/insert.yml create mode 100644 testdata/client-side-encryption/legacy/keyAltName.json create mode 100644 testdata/client-side-encryption/legacy/keyAltName.yml create mode 100644 testdata/client-side-encryption/legacy/keyCache.json create mode 100644 testdata/client-side-encryption/legacy/keyCache.yml create mode 100644 testdata/client-side-encryption/legacy/kmipKMS.json create mode 100644 testdata/client-side-encryption/legacy/kmipKMS.yml create mode 100644 testdata/client-side-encryption/legacy/localKMS.json create mode 100644 testdata/client-side-encryption/legacy/localKMS.yml create mode 100644 testdata/client-side-encryption/legacy/localSchema.json create mode 100644 testdata/client-side-encryption/legacy/localSchema.yml create mode 100644 testdata/client-side-encryption/legacy/malformedCiphertext.json create mode 100644 testdata/client-side-encryption/legacy/malformedCiphertext.yml create mode 100644 testdata/client-side-encryption/legacy/maxWireVersion.json create mode 100644 testdata/client-side-encryption/legacy/maxWireVersion.yml create mode 100644 testdata/client-side-encryption/legacy/missingKey.json create mode 100644 testdata/client-side-encryption/legacy/missingKey.yml create mode 100644 testdata/client-side-encryption/legacy/noSchema.json create mode 100644 testdata/client-side-encryption/legacy/noSchema.yml create mode 100644 testdata/client-side-encryption/legacy/replaceOne.json create mode 100644 testdata/client-side-encryption/legacy/replaceOne.yml create mode 100644 testdata/client-side-encryption/legacy/types.json create mode 100644 testdata/client-side-encryption/legacy/types.yml create mode 100644 testdata/client-side-encryption/legacy/unsupportedCommand.json create mode 100644 testdata/client-side-encryption/legacy/unsupportedCommand.yml create mode 100644 testdata/client-side-encryption/legacy/updateMany.json create mode 100644 testdata/client-side-encryption/legacy/updateMany.yml create mode 100644 testdata/client-side-encryption/legacy/updateOne.json create mode 100644 testdata/client-side-encryption/legacy/updateOne.yml create mode 100644 testdata/client-side-encryption/legacy/validatorAndPartialFieldExpression.json create mode 100644 testdata/client-side-encryption/legacy/validatorAndPartialFieldExpression.yml create mode 100644 testdata/client-side-encryption/unified/addKeyAltName.json create mode 100644 testdata/client-side-encryption/unified/addKeyAltName.yml create mode 100644 testdata/client-side-encryption/unified/createDataKey-kms_providers-invalid.json create mode 100644 testdata/client-side-encryption/unified/createDataKey-kms_providers-invalid.yml create mode 100644 testdata/client-side-encryption/unified/createDataKey.json create mode 100644 testdata/client-side-encryption/unified/createDataKey.yml create mode 100644 testdata/client-side-encryption/unified/deleteKey.json create mode 100644 testdata/client-side-encryption/unified/deleteKey.yml create mode 100644 testdata/client-side-encryption/unified/getKey.json create mode 100644 testdata/client-side-encryption/unified/getKey.yml create mode 100644 testdata/client-side-encryption/unified/getKeyByAltName.json create mode 100644 testdata/client-side-encryption/unified/getKeyByAltName.yml create mode 100644 testdata/client-side-encryption/unified/getKeys.json create mode 100644 testdata/client-side-encryption/unified/getKeys.yml create mode 100644 testdata/client-side-encryption/unified/keyCache.json create mode 100644 testdata/client-side-encryption/unified/keyCache.yml create mode 100644 testdata/client-side-encryption/unified/removeKeyAltName.json create mode 100644 testdata/client-side-encryption/unified/removeKeyAltName.yml create mode 100644 testdata/client-side-encryption/unified/rewrapManyDataKey-decrypt_failure.json create mode 100644 testdata/client-side-encryption/unified/rewrapManyDataKey-decrypt_failure.yml create mode 100644 testdata/client-side-encryption/unified/rewrapManyDataKey-encrypt_failure.json create mode 100644 testdata/client-side-encryption/unified/rewrapManyDataKey-encrypt_failure.yml create mode 100644 testdata/client-side-encryption/unified/rewrapManyDataKey.json create mode 100644 testdata/client-side-encryption/unified/rewrapManyDataKey.yml create mode 100644 testdata/client-side-operations-timeout/bulkWrite.json create mode 100644 testdata/client-side-operations-timeout/bulkWrite.yml create mode 100644 testdata/client-side-operations-timeout/command-execution.json create mode 100644 testdata/client-side-operations-timeout/command-execution.yml create mode 100644 testdata/client-side-operations-timeout/error-transformations.json create mode 100644 testdata/client-side-operations-timeout/error-transformations.yml create mode 100644 testdata/client-side-operations-timeout/global-timeoutMS.json create mode 100644 testdata/client-side-operations-timeout/global-timeoutMS.yml create mode 100644 testdata/client-side-operations-timeout/gridfs-advanced.json create mode 100644 testdata/client-side-operations-timeout/gridfs-advanced.yml create mode 100644 testdata/client-side-operations-timeout/gridfs-delete.json create mode 100644 testdata/client-side-operations-timeout/gridfs-delete.yml create mode 100644 testdata/client-side-operations-timeout/gridfs-find.json create mode 100644 testdata/client-side-operations-timeout/gridfs-find.yml create mode 100644 testdata/client-side-operations-timeout/override-operation-timeoutMS.json create mode 100644 testdata/client-side-operations-timeout/override-operation-timeoutMS.yml create mode 100644 testdata/client-side-operations-timeout/retryability-legacy-timeouts.json create mode 100644 testdata/client-side-operations-timeout/retryability-legacy-timeouts.yml create mode 100644 testdata/client-side-operations-timeout/retryability-timeoutMS.json create mode 100644 testdata/client-side-operations-timeout/retryability-timeoutMS.yml create mode 100644 testdata/client-side-operations-timeout/runCursorCommand.json create mode 100644 testdata/client-side-operations-timeout/runCursorCommand.yml create mode 100644 testdata/code.json.gz create mode 100644 testdata/collection-management/clustered-indexes.json create mode 100644 testdata/collection-management/clustered-indexes.yml create mode 100644 testdata/collection-management/createCollection-pre_and_post_images.json create mode 100644 testdata/collection-management/createCollection-pre_and_post_images.yml create mode 100644 testdata/collection-management/modifyCollection-errorResponse.json create mode 100644 testdata/collection-management/modifyCollection-errorResponse.yml create mode 100644 testdata/collection-management/modifyCollection-pre_and_post_images.json create mode 100644 testdata/collection-management/modifyCollection-pre_and_post_images.yml create mode 100644 testdata/collection-management/timeseries-collection.json create mode 100644 testdata/collection-management/timeseries-collection.yml create mode 100644 testdata/command-monitoring/bulkWrite.json create mode 100644 testdata/command-monitoring/bulkWrite.yml create mode 100644 testdata/command-monitoring/command.json create mode 100644 testdata/command-monitoring/command.yml create mode 100644 testdata/command-monitoring/deleteMany.json create mode 100644 testdata/command-monitoring/deleteMany.yml create mode 100644 testdata/command-monitoring/deleteOne.json create mode 100644 testdata/command-monitoring/deleteOne.yml create mode 100644 testdata/command-monitoring/find.json create mode 100644 testdata/command-monitoring/find.yml create mode 100644 testdata/command-monitoring/insertMany.json create mode 100644 testdata/command-monitoring/insertMany.yml create mode 100644 testdata/command-monitoring/insertOne.json create mode 100644 testdata/command-monitoring/insertOne.yml create mode 100644 testdata/command-monitoring/logging/command.json create mode 100644 testdata/command-monitoring/logging/command.yml create mode 100644 testdata/command-monitoring/logging/driver-connection-id.json create mode 100644 testdata/command-monitoring/logging/driver-connection-id.yml create mode 100644 testdata/command-monitoring/logging/no-handshake-messages.json create mode 100644 testdata/command-monitoring/logging/no-handshake-messages.yml create mode 100644 testdata/command-monitoring/logging/no-heartbeat-messages.json create mode 100644 testdata/command-monitoring/logging/no-heartbeat-messages.yml create mode 100644 testdata/command-monitoring/logging/operation-id.json create mode 100644 testdata/command-monitoring/logging/operation-id.yml create mode 100644 testdata/command-monitoring/logging/pre-42-server-connection-id.json create mode 100644 testdata/command-monitoring/logging/pre-42-server-connection-id.yml create mode 100644 testdata/command-monitoring/logging/redacted-commands.json create mode 100644 testdata/command-monitoring/logging/redacted-commands.yml create mode 100644 testdata/command-monitoring/logging/server-connection-id.json create mode 100644 testdata/command-monitoring/logging/server-connection-id.yml create mode 100644 testdata/command-monitoring/logging/service-id.json create mode 100644 testdata/command-monitoring/logging/service-id.yml create mode 100644 testdata/command-monitoring/logging/unacknowledged-write.json create mode 100644 testdata/command-monitoring/logging/unacknowledged-write.yml create mode 100644 testdata/command-monitoring/pre-42-server-connection-id.json create mode 100644 testdata/command-monitoring/pre-42-server-connection-id.yml create mode 100644 testdata/command-monitoring/redacted-commands.json create mode 100644 testdata/command-monitoring/redacted-commands.yml create mode 100644 testdata/command-monitoring/server-connection-id.json create mode 100644 testdata/command-monitoring/server-connection-id.yml create mode 100644 testdata/command-monitoring/unacknowledged-client-bulkWrite.json create mode 100644 testdata/command-monitoring/unacknowledged-client-bulkWrite.yml create mode 100644 testdata/command-monitoring/unacknowledgedBulkWrite.json create mode 100644 testdata/command-monitoring/unacknowledgedBulkWrite.yml create mode 100644 testdata/command-monitoring/updateMany.json create mode 100644 testdata/command-monitoring/updateMany.yml create mode 100644 testdata/command-monitoring/updateOne.json create mode 100644 testdata/command-monitoring/updateOne.yml create mode 100644 testdata/connection-monitoring-and-pooling/connection-must-have-id.json create mode 100644 testdata/connection-monitoring-and-pooling/connection-must-have-id.yml create mode 100644 testdata/connection-monitoring-and-pooling/connection-must-order-ids.json create mode 100644 testdata/connection-monitoring-and-pooling/connection-must-order-ids.yml create mode 100644 testdata/connection-monitoring-and-pooling/logging/connection-logging.json create mode 100644 testdata/connection-monitoring-and-pooling/logging/connection-logging.yml create mode 100644 testdata/connection-monitoring-and-pooling/logging/connection-pool-options.json create mode 100644 testdata/connection-monitoring-and-pooling/logging/connection-pool-options.yml create mode 100644 testdata/connection-monitoring-and-pooling/pool-checkin-destroy-closed.json create mode 100644 testdata/connection-monitoring-and-pooling/pool-checkin-destroy-closed.yml create mode 100644 testdata/connection-monitoring-and-pooling/pool-checkin-destroy-stale.json create mode 100644 testdata/connection-monitoring-and-pooling/pool-checkin-destroy-stale.yml create mode 100644 testdata/connection-monitoring-and-pooling/pool-checkin-make-available.json create mode 100644 testdata/connection-monitoring-and-pooling/pool-checkin-make-available.yml create mode 100644 testdata/connection-monitoring-and-pooling/pool-checkin.json create mode 100644 testdata/connection-monitoring-and-pooling/pool-checkin.yml create mode 100644 testdata/connection-monitoring-and-pooling/pool-checkout-connection.json create mode 100644 testdata/connection-monitoring-and-pooling/pool-checkout-connection.yml create mode 100644 testdata/connection-monitoring-and-pooling/pool-checkout-custom-maxConnecting-is-enforced.json create mode 100644 testdata/connection-monitoring-and-pooling/pool-checkout-custom-maxConnecting-is-enforced.yml create mode 100644 testdata/connection-monitoring-and-pooling/pool-checkout-error-closed.json create mode 100644 testdata/connection-monitoring-and-pooling/pool-checkout-error-closed.yml create mode 100644 testdata/connection-monitoring-and-pooling/pool-checkout-maxConnecting-is-enforced.json create mode 100644 testdata/connection-monitoring-and-pooling/pool-checkout-maxConnecting-is-enforced.yml create mode 100644 testdata/connection-monitoring-and-pooling/pool-checkout-maxConnecting-timeout.json create mode 100644 testdata/connection-monitoring-and-pooling/pool-checkout-maxConnecting-timeout.yml create mode 100644 testdata/connection-monitoring-and-pooling/pool-checkout-minPoolSize-connection-maxConnecting.json create mode 100644 testdata/connection-monitoring-and-pooling/pool-checkout-minPoolSize-connection-maxConnecting.yml create mode 100644 testdata/connection-monitoring-and-pooling/pool-checkout-multiple.json create mode 100644 testdata/connection-monitoring-and-pooling/pool-checkout-multiple.yml create mode 100644 testdata/connection-monitoring-and-pooling/pool-checkout-no-idle.json create mode 100644 testdata/connection-monitoring-and-pooling/pool-checkout-no-idle.yml create mode 100644 testdata/connection-monitoring-and-pooling/pool-checkout-no-stale.json create mode 100644 testdata/connection-monitoring-and-pooling/pool-checkout-no-stale.yml create mode 100644 testdata/connection-monitoring-and-pooling/pool-checkout-returned-connection-maxConnecting.json create mode 100644 testdata/connection-monitoring-and-pooling/pool-checkout-returned-connection-maxConnecting.yml create mode 100644 testdata/connection-monitoring-and-pooling/pool-clear-clears-waitqueue.json create mode 100644 testdata/connection-monitoring-and-pooling/pool-clear-clears-waitqueue.yml create mode 100644 testdata/connection-monitoring-and-pooling/pool-clear-interrupting-pending-connections.json create mode 100644 testdata/connection-monitoring-and-pooling/pool-clear-interrupting-pending-connections.yml create mode 100644 testdata/connection-monitoring-and-pooling/pool-clear-min-size.json create mode 100644 testdata/connection-monitoring-and-pooling/pool-clear-min-size.yml create mode 100644 testdata/connection-monitoring-and-pooling/pool-clear-paused.json create mode 100644 testdata/connection-monitoring-and-pooling/pool-clear-paused.yml create mode 100644 testdata/connection-monitoring-and-pooling/pool-clear-ready.json create mode 100644 testdata/connection-monitoring-and-pooling/pool-clear-ready.yml create mode 100644 testdata/connection-monitoring-and-pooling/pool-clear-schedule-run-interruptInUseConnections-false.json create mode 100644 testdata/connection-monitoring-and-pooling/pool-clear-schedule-run-interruptInUseConnections-false.yml create mode 100644 testdata/connection-monitoring-and-pooling/pool-close-destroy-conns.json create mode 100644 testdata/connection-monitoring-and-pooling/pool-close-destroy-conns.yml create mode 100644 testdata/connection-monitoring-and-pooling/pool-close.json create mode 100644 testdata/connection-monitoring-and-pooling/pool-close.yml create mode 100644 testdata/connection-monitoring-and-pooling/pool-create-max-size.json create mode 100644 testdata/connection-monitoring-and-pooling/pool-create-max-size.yml create mode 100644 testdata/connection-monitoring-and-pooling/pool-create-min-size-error.json create mode 100644 testdata/connection-monitoring-and-pooling/pool-create-min-size-error.yml create mode 100644 testdata/connection-monitoring-and-pooling/pool-create-min-size.json create mode 100644 testdata/connection-monitoring-and-pooling/pool-create-min-size.yml create mode 100644 testdata/connection-monitoring-and-pooling/pool-create-with-options.json create mode 100644 testdata/connection-monitoring-and-pooling/pool-create-with-options.yml create mode 100644 testdata/connection-monitoring-and-pooling/pool-create.json create mode 100644 testdata/connection-monitoring-and-pooling/pool-create.yml create mode 100644 testdata/connection-monitoring-and-pooling/pool-ready-ready.json create mode 100644 testdata/connection-monitoring-and-pooling/pool-ready-ready.yml create mode 100644 testdata/connection-monitoring-and-pooling/pool-ready.json create mode 100644 testdata/connection-monitoring-and-pooling/pool-ready.yml create mode 100644 testdata/connection-monitoring-and-pooling/wait-queue-fairness.json create mode 100644 testdata/connection-monitoring-and-pooling/wait-queue-fairness.yml create mode 100644 testdata/connection-monitoring-and-pooling/wait-queue-timeout.json create mode 100644 testdata/connection-monitoring-and-pooling/wait-queue-timeout.yml create mode 100644 testdata/connection-string/invalid-uris.json create mode 100644 testdata/connection-string/invalid-uris.yml create mode 100644 testdata/connection-string/valid-auth.json create mode 100644 testdata/connection-string/valid-auth.yml create mode 100644 testdata/connection-string/valid-db-with-dotted-name.json create mode 100644 testdata/connection-string/valid-db-with-dotted-name.yml create mode 100644 testdata/connection-string/valid-host_identifiers.json create mode 100644 testdata/connection-string/valid-host_identifiers.yml create mode 100644 testdata/connection-string/valid-options.json create mode 100644 testdata/connection-string/valid-options.yml create mode 100644 testdata/connection-string/valid-unix_socket-absolute.json create mode 100644 testdata/connection-string/valid-unix_socket-absolute.yml create mode 100644 testdata/connection-string/valid-unix_socket-relative.json create mode 100644 testdata/connection-string/valid-unix_socket-relative.yml create mode 100644 testdata/connection-string/valid-warnings.json create mode 100644 testdata/connection-string/valid-warnings.yml create mode 100644 testdata/convenient-transactions/callback-aborts.json create mode 100644 testdata/convenient-transactions/callback-aborts.yml create mode 100644 testdata/convenient-transactions/callback-commits.json create mode 100644 testdata/convenient-transactions/callback-commits.yml create mode 100644 testdata/convenient-transactions/callback-retry.json create mode 100644 testdata/convenient-transactions/callback-retry.yml create mode 100644 testdata/convenient-transactions/commit-retry.json create mode 100644 testdata/convenient-transactions/commit-retry.yml create mode 100644 testdata/convenient-transactions/commit-transienttransactionerror-4.2.json create mode 100644 testdata/convenient-transactions/commit-transienttransactionerror-4.2.yml create mode 100644 testdata/convenient-transactions/commit-transienttransactionerror.json create mode 100644 testdata/convenient-transactions/commit-transienttransactionerror.yml create mode 100644 testdata/convenient-transactions/commit-writeconcernerror.json create mode 100644 testdata/convenient-transactions/commit-writeconcernerror.yml create mode 100644 testdata/convenient-transactions/commit.json create mode 100644 testdata/convenient-transactions/commit.yml create mode 100644 testdata/convenient-transactions/transaction-options.json create mode 100644 testdata/convenient-transactions/transaction-options.yml create mode 100644 testdata/crud/unified/aggregate-allowdiskuse.json create mode 100644 testdata/crud/unified/aggregate-allowdiskuse.yml create mode 100644 testdata/crud/unified/aggregate-let.json create mode 100644 testdata/crud/unified/aggregate-let.yml create mode 100644 testdata/crud/unified/aggregate-merge-errorResponse.json create mode 100644 testdata/crud/unified/aggregate-merge-errorResponse.yml create mode 100644 testdata/crud/unified/aggregate-merge.json create mode 100644 testdata/crud/unified/aggregate-merge.yml create mode 100644 testdata/crud/unified/aggregate-out-readConcern.json create mode 100644 testdata/crud/unified/aggregate-out-readConcern.yml create mode 100644 testdata/crud/unified/aggregate-write-readPreference.json create mode 100644 testdata/crud/unified/aggregate-write-readPreference.yml create mode 100644 testdata/crud/unified/aggregate.json create mode 100644 testdata/crud/unified/aggregate.yml create mode 100644 testdata/crud/unified/bulkWrite-arrayFilters-clientError.json create mode 100644 testdata/crud/unified/bulkWrite-arrayFilters-clientError.yml create mode 100644 testdata/crud/unified/bulkWrite-arrayFilters.json create mode 100644 testdata/crud/unified/bulkWrite-arrayFilters.yml create mode 100644 testdata/crud/unified/bulkWrite-comment.json create mode 100644 testdata/crud/unified/bulkWrite-comment.yml create mode 100644 testdata/crud/unified/bulkWrite-delete-hint-clientError.json create mode 100644 testdata/crud/unified/bulkWrite-delete-hint-clientError.yml create mode 100644 testdata/crud/unified/bulkWrite-delete-hint-serverError.json create mode 100644 testdata/crud/unified/bulkWrite-delete-hint-serverError.yml create mode 100644 testdata/crud/unified/bulkWrite-delete-hint.json create mode 100644 testdata/crud/unified/bulkWrite-delete-hint.yml create mode 100644 testdata/crud/unified/bulkWrite-deleteMany-let.json create mode 100644 testdata/crud/unified/bulkWrite-deleteMany-let.yml create mode 100644 testdata/crud/unified/bulkWrite-deleteOne-let.json create mode 100644 testdata/crud/unified/bulkWrite-deleteOne-let.yml create mode 100644 testdata/crud/unified/bulkWrite-errorResponse.json create mode 100644 testdata/crud/unified/bulkWrite-errorResponse.yml create mode 100644 testdata/crud/unified/bulkWrite-insertOne-dots_and_dollars.json create mode 100644 testdata/crud/unified/bulkWrite-insertOne-dots_and_dollars.yml create mode 100644 testdata/crud/unified/bulkWrite-replaceOne-dots_and_dollars.json create mode 100644 testdata/crud/unified/bulkWrite-replaceOne-dots_and_dollars.yml create mode 100644 testdata/crud/unified/bulkWrite-replaceOne-let.json create mode 100644 testdata/crud/unified/bulkWrite-replaceOne-let.yml create mode 100644 testdata/crud/unified/bulkWrite-replaceOne-sort.json create mode 100644 testdata/crud/unified/bulkWrite-replaceOne-sort.yml create mode 100644 testdata/crud/unified/bulkWrite-update-hint-clientError.json create mode 100644 testdata/crud/unified/bulkWrite-update-hint-clientError.yml create mode 100644 testdata/crud/unified/bulkWrite-update-hint-serverError.json create mode 100644 testdata/crud/unified/bulkWrite-update-hint-serverError.yml create mode 100644 testdata/crud/unified/bulkWrite-update-hint.json create mode 100644 testdata/crud/unified/bulkWrite-update-hint.yml create mode 100644 testdata/crud/unified/bulkWrite-update-validation.json create mode 100644 testdata/crud/unified/bulkWrite-update-validation.yml create mode 100644 testdata/crud/unified/bulkWrite-updateMany-dots_and_dollars.json create mode 100644 testdata/crud/unified/bulkWrite-updateMany-dots_and_dollars.yml create mode 100644 testdata/crud/unified/bulkWrite-updateMany-let.json create mode 100644 testdata/crud/unified/bulkWrite-updateMany-let.yml create mode 100644 testdata/crud/unified/bulkWrite-updateOne-dots_and_dollars.json create mode 100644 testdata/crud/unified/bulkWrite-updateOne-dots_and_dollars.yml create mode 100644 testdata/crud/unified/bulkWrite-updateOne-let.json create mode 100644 testdata/crud/unified/bulkWrite-updateOne-let.yml create mode 100644 testdata/crud/unified/bulkWrite-updateOne-sort.json create mode 100644 testdata/crud/unified/bulkWrite-updateOne-sort.yml create mode 100644 testdata/crud/unified/client-bulkWrite-delete-options.json create mode 100644 testdata/crud/unified/client-bulkWrite-delete-options.yml create mode 100644 testdata/crud/unified/client-bulkWrite-errorResponse.json create mode 100644 testdata/crud/unified/client-bulkWrite-errorResponse.yml create mode 100644 testdata/crud/unified/client-bulkWrite-errors.json create mode 100644 testdata/crud/unified/client-bulkWrite-errors.yml create mode 100644 testdata/crud/unified/client-bulkWrite-mixed-namespaces.json create mode 100644 testdata/crud/unified/client-bulkWrite-mixed-namespaces.yml create mode 100644 testdata/crud/unified/client-bulkWrite-options.json create mode 100644 testdata/crud/unified/client-bulkWrite-options.yml create mode 100644 testdata/crud/unified/client-bulkWrite-ordered.json create mode 100644 testdata/crud/unified/client-bulkWrite-ordered.yml create mode 100644 testdata/crud/unified/client-bulkWrite-partialResults.json create mode 100644 testdata/crud/unified/client-bulkWrite-partialResults.yml create mode 100644 testdata/crud/unified/client-bulkWrite-replaceOne-sort.json create mode 100644 testdata/crud/unified/client-bulkWrite-replaceOne-sort.yml create mode 100644 testdata/crud/unified/client-bulkWrite-results.json create mode 100644 testdata/crud/unified/client-bulkWrite-results.yml create mode 100644 testdata/crud/unified/client-bulkWrite-update-options.json create mode 100644 testdata/crud/unified/client-bulkWrite-update-options.yml create mode 100644 testdata/crud/unified/client-bulkWrite-update-pipeline.json create mode 100644 testdata/crud/unified/client-bulkWrite-update-pipeline.yml create mode 100644 testdata/crud/unified/client-bulkWrite-update-validation.json create mode 100644 testdata/crud/unified/client-bulkWrite-update-validation.yml create mode 100644 testdata/crud/unified/client-bulkWrite-updateOne-sort.json create mode 100644 testdata/crud/unified/client-bulkWrite-updateOne-sort.yml create mode 100644 testdata/crud/unified/countDocuments-comment.json create mode 100644 testdata/crud/unified/countDocuments-comment.yml create mode 100644 testdata/crud/unified/db-aggregate-write-readPreference.json create mode 100644 testdata/crud/unified/db-aggregate-write-readPreference.yml create mode 100644 testdata/crud/unified/db-aggregate.json create mode 100644 testdata/crud/unified/db-aggregate.yml create mode 100644 testdata/crud/unified/deleteMany-comment.json create mode 100644 testdata/crud/unified/deleteMany-comment.yml create mode 100644 testdata/crud/unified/deleteMany-hint-clientError.json create mode 100644 testdata/crud/unified/deleteMany-hint-clientError.yml create mode 100644 testdata/crud/unified/deleteMany-hint-serverError.json create mode 100644 testdata/crud/unified/deleteMany-hint-serverError.yml create mode 100644 testdata/crud/unified/deleteMany-hint.json create mode 100644 testdata/crud/unified/deleteMany-hint.yml create mode 100644 testdata/crud/unified/deleteMany-let.json create mode 100644 testdata/crud/unified/deleteMany-let.yml create mode 100644 testdata/crud/unified/deleteOne-comment.json create mode 100644 testdata/crud/unified/deleteOne-comment.yml create mode 100644 testdata/crud/unified/deleteOne-errorResponse.json create mode 100644 testdata/crud/unified/deleteOne-errorResponse.yml create mode 100644 testdata/crud/unified/deleteOne-hint-clientError.json create mode 100644 testdata/crud/unified/deleteOne-hint-clientError.yml create mode 100644 testdata/crud/unified/deleteOne-hint-serverError.json create mode 100644 testdata/crud/unified/deleteOne-hint-serverError.yml create mode 100644 testdata/crud/unified/deleteOne-hint.json create mode 100644 testdata/crud/unified/deleteOne-hint.yml create mode 100644 testdata/crud/unified/deleteOne-let.json create mode 100644 testdata/crud/unified/deleteOne-let.yml create mode 100644 testdata/crud/unified/distinct-comment.json create mode 100644 testdata/crud/unified/distinct-comment.yml create mode 100644 testdata/crud/unified/distinct-hint.json create mode 100644 testdata/crud/unified/distinct-hint.yml create mode 100644 testdata/crud/unified/estimatedDocumentCount-comment.json create mode 100644 testdata/crud/unified/estimatedDocumentCount-comment.yml create mode 100644 testdata/crud/unified/estimatedDocumentCount.json create mode 100644 testdata/crud/unified/estimatedDocumentCount.yml create mode 100644 testdata/crud/unified/find-allowdiskuse-clientError.json create mode 100644 testdata/crud/unified/find-allowdiskuse-clientError.yml create mode 100644 testdata/crud/unified/find-allowdiskuse-serverError.json create mode 100644 testdata/crud/unified/find-allowdiskuse-serverError.yml create mode 100644 testdata/crud/unified/find-allowdiskuse.json create mode 100644 testdata/crud/unified/find-allowdiskuse.yml create mode 100644 testdata/crud/unified/find-comment.json create mode 100644 testdata/crud/unified/find-comment.yml create mode 100644 testdata/crud/unified/find-let.json create mode 100644 testdata/crud/unified/find-let.yml create mode 100644 testdata/crud/unified/find.json create mode 100644 testdata/crud/unified/find.yml create mode 100644 testdata/crud/unified/findOneAndDelete-comment.json create mode 100644 testdata/crud/unified/findOneAndDelete-comment.yml create mode 100644 testdata/crud/unified/findOneAndDelete-hint-clientError.json create mode 100644 testdata/crud/unified/findOneAndDelete-hint-clientError.yml create mode 100644 testdata/crud/unified/findOneAndDelete-hint-serverError.json create mode 100644 testdata/crud/unified/findOneAndDelete-hint-serverError.yml create mode 100644 testdata/crud/unified/findOneAndDelete-hint.json create mode 100644 testdata/crud/unified/findOneAndDelete-hint.yml create mode 100644 testdata/crud/unified/findOneAndDelete-let.json create mode 100644 testdata/crud/unified/findOneAndDelete-let.yml create mode 100644 testdata/crud/unified/findOneAndReplace-comment.json create mode 100644 testdata/crud/unified/findOneAndReplace-comment.yml create mode 100644 testdata/crud/unified/findOneAndReplace-dots_and_dollars.json create mode 100644 testdata/crud/unified/findOneAndReplace-dots_and_dollars.yml create mode 100644 testdata/crud/unified/findOneAndReplace-hint-clientError.json create mode 100644 testdata/crud/unified/findOneAndReplace-hint-clientError.yml create mode 100644 testdata/crud/unified/findOneAndReplace-hint-serverError.json create mode 100644 testdata/crud/unified/findOneAndReplace-hint-serverError.yml create mode 100644 testdata/crud/unified/findOneAndReplace-hint.json create mode 100644 testdata/crud/unified/findOneAndReplace-hint.yml create mode 100644 testdata/crud/unified/findOneAndReplace-let.json create mode 100644 testdata/crud/unified/findOneAndReplace-let.yml create mode 100644 testdata/crud/unified/findOneAndUpdate-comment.json create mode 100644 testdata/crud/unified/findOneAndUpdate-comment.yml create mode 100644 testdata/crud/unified/findOneAndUpdate-dots_and_dollars.json create mode 100644 testdata/crud/unified/findOneAndUpdate-dots_and_dollars.yml create mode 100644 testdata/crud/unified/findOneAndUpdate-errorResponse.json create mode 100644 testdata/crud/unified/findOneAndUpdate-errorResponse.yml create mode 100644 testdata/crud/unified/findOneAndUpdate-hint-clientError.json create mode 100644 testdata/crud/unified/findOneAndUpdate-hint-clientError.yml create mode 100644 testdata/crud/unified/findOneAndUpdate-hint-serverError.json create mode 100644 testdata/crud/unified/findOneAndUpdate-hint-serverError.yml create mode 100644 testdata/crud/unified/findOneAndUpdate-hint.json create mode 100644 testdata/crud/unified/findOneAndUpdate-hint.yml create mode 100644 testdata/crud/unified/findOneAndUpdate-let.json create mode 100644 testdata/crud/unified/findOneAndUpdate-let.yml create mode 100644 testdata/crud/unified/insertMany-comment.json create mode 100644 testdata/crud/unified/insertMany-comment.yml create mode 100644 testdata/crud/unified/insertMany-dots_and_dollars.json create mode 100644 testdata/crud/unified/insertMany-dots_and_dollars.yml create mode 100644 testdata/crud/unified/insertOne-comment.json create mode 100644 testdata/crud/unified/insertOne-comment.yml create mode 100644 testdata/crud/unified/insertOne-dots_and_dollars.json create mode 100644 testdata/crud/unified/insertOne-dots_and_dollars.yml create mode 100644 testdata/crud/unified/insertOne-errorResponse.json create mode 100644 testdata/crud/unified/insertOne-errorResponse.yml create mode 100644 testdata/crud/unified/replaceOne-comment.json create mode 100644 testdata/crud/unified/replaceOne-comment.yml create mode 100644 testdata/crud/unified/replaceOne-dots_and_dollars.json create mode 100644 testdata/crud/unified/replaceOne-dots_and_dollars.yml create mode 100644 testdata/crud/unified/replaceOne-hint.json create mode 100644 testdata/crud/unified/replaceOne-hint.yml create mode 100644 testdata/crud/unified/replaceOne-let.json create mode 100644 testdata/crud/unified/replaceOne-let.yml create mode 100644 testdata/crud/unified/replaceOne-sort.json create mode 100644 testdata/crud/unified/replaceOne-sort.yml create mode 100644 testdata/crud/unified/replaceOne-validation.json create mode 100644 testdata/crud/unified/replaceOne-validation.yml create mode 100644 testdata/crud/unified/unacknowledged-bulkWrite-delete-hint-clientError.json create mode 100644 testdata/crud/unified/unacknowledged-bulkWrite-delete-hint-clientError.yml create mode 100644 testdata/crud/unified/unacknowledged-bulkWrite-update-hint-clientError.json create mode 100644 testdata/crud/unified/unacknowledged-bulkWrite-update-hint-clientError.yml create mode 100644 testdata/crud/unified/unacknowledged-deleteMany-hint-clientError.json create mode 100644 testdata/crud/unified/unacknowledged-deleteMany-hint-clientError.yml create mode 100644 testdata/crud/unified/unacknowledged-deleteOne-hint-clientError.json create mode 100644 testdata/crud/unified/unacknowledged-deleteOne-hint-clientError.yml create mode 100644 testdata/crud/unified/unacknowledged-findOneAndDelete-hint-clientError.json create mode 100644 testdata/crud/unified/unacknowledged-findOneAndDelete-hint-clientError.yml create mode 100644 testdata/crud/unified/unacknowledged-findOneAndReplace-hint-clientError.json create mode 100644 testdata/crud/unified/unacknowledged-findOneAndReplace-hint-clientError.yml create mode 100644 testdata/crud/unified/unacknowledged-findOneAndUpdate-hint-clientError.json create mode 100644 testdata/crud/unified/unacknowledged-findOneAndUpdate-hint-clientError.yml create mode 100644 testdata/crud/unified/unacknowledged-replaceOne-hint-clientError.json create mode 100644 testdata/crud/unified/unacknowledged-replaceOne-hint-clientError.yml create mode 100644 testdata/crud/unified/unacknowledged-updateMany-hint-clientError.json create mode 100644 testdata/crud/unified/unacknowledged-updateMany-hint-clientError.yml create mode 100644 testdata/crud/unified/unacknowledged-updateOne-hint-clientError.json create mode 100644 testdata/crud/unified/unacknowledged-updateOne-hint-clientError.yml create mode 100644 testdata/crud/unified/updateMany-comment.json create mode 100644 testdata/crud/unified/updateMany-comment.yml create mode 100644 testdata/crud/unified/updateMany-dots_and_dollars.json create mode 100644 testdata/crud/unified/updateMany-dots_and_dollars.yml create mode 100644 testdata/crud/unified/updateMany-hint-clientError.json create mode 100644 testdata/crud/unified/updateMany-hint-clientError.yml create mode 100644 testdata/crud/unified/updateMany-hint-serverError.json create mode 100644 testdata/crud/unified/updateMany-hint-serverError.yml create mode 100644 testdata/crud/unified/updateMany-hint.json create mode 100644 testdata/crud/unified/updateMany-hint.yml create mode 100644 testdata/crud/unified/updateMany-let.json create mode 100644 testdata/crud/unified/updateMany-let.yml create mode 100644 testdata/crud/unified/updateMany-validation.json create mode 100644 testdata/crud/unified/updateMany-validation.yml create mode 100644 testdata/crud/unified/updateOne-comment.json create mode 100644 testdata/crud/unified/updateOne-comment.yml create mode 100644 testdata/crud/unified/updateOne-dots_and_dollars.json create mode 100644 testdata/crud/unified/updateOne-dots_and_dollars.yml create mode 100644 testdata/crud/unified/updateOne-errorResponse.json create mode 100644 testdata/crud/unified/updateOne-errorResponse.yml create mode 100644 testdata/crud/unified/updateOne-hint-clientError.json create mode 100644 testdata/crud/unified/updateOne-hint-clientError.yml create mode 100644 testdata/crud/unified/updateOne-hint-serverError.json create mode 100644 testdata/crud/unified/updateOne-hint-serverError.yml create mode 100644 testdata/crud/unified/updateOne-hint.json create mode 100644 testdata/crud/unified/updateOne-hint.yml create mode 100644 testdata/crud/unified/updateOne-let.json create mode 100644 testdata/crud/unified/updateOne-let.yml create mode 100644 testdata/crud/unified/updateOne-sort.json create mode 100644 testdata/crud/unified/updateOne-sort.yml create mode 100644 testdata/crud/unified/updateOne-validation.json create mode 100644 testdata/crud/unified/updateOne-validation.yml create mode 100644 testdata/crud/unified/updateWithPipelines.json create mode 100644 testdata/crud/unified/updateWithPipelines.yml create mode 100644 testdata/crud/v1/read/aggregate-collation.json create mode 100644 testdata/crud/v1/read/aggregate-collation.yml create mode 100644 testdata/crud/v1/read/aggregate-out.json create mode 100644 testdata/crud/v1/read/aggregate-out.yml create mode 100644 testdata/crud/v1/read/aggregate.json create mode 100644 testdata/crud/v1/read/aggregate.yml create mode 100644 testdata/crud/v1/read/count-collation.json create mode 100644 testdata/crud/v1/read/count-collation.yml create mode 100644 testdata/crud/v1/read/count-empty.json create mode 100644 testdata/crud/v1/read/count-empty.yml create mode 100644 testdata/crud/v1/read/count.json create mode 100644 testdata/crud/v1/read/count.yml create mode 100644 testdata/crud/v1/read/distinct-collation.json create mode 100644 testdata/crud/v1/read/distinct-collation.yml create mode 100644 testdata/crud/v1/read/distinct.json create mode 100644 testdata/crud/v1/read/distinct.yml create mode 100644 testdata/crud/v1/read/find-collation.json create mode 100644 testdata/crud/v1/read/find-collation.yml create mode 100644 testdata/crud/v1/read/find.json create mode 100644 testdata/crud/v1/read/find.yml create mode 100644 testdata/crud/v1/write/bulkWrite-arrayFilters.json create mode 100644 testdata/crud/v1/write/bulkWrite-arrayFilters.yml create mode 100644 testdata/crud/v1/write/bulkWrite-collation.json create mode 100644 testdata/crud/v1/write/bulkWrite-collation.yml create mode 100644 testdata/crud/v1/write/bulkWrite.json create mode 100644 testdata/crud/v1/write/bulkWrite.yml create mode 100644 testdata/crud/v1/write/deleteMany-collation.json create mode 100644 testdata/crud/v1/write/deleteMany-collation.yml create mode 100644 testdata/crud/v1/write/deleteMany.json create mode 100644 testdata/crud/v1/write/deleteMany.yml create mode 100644 testdata/crud/v1/write/deleteOne-collation.json create mode 100644 testdata/crud/v1/write/deleteOne-collation.yml create mode 100644 testdata/crud/v1/write/deleteOne.json create mode 100644 testdata/crud/v1/write/deleteOne.yml create mode 100644 testdata/crud/v1/write/findOneAndDelete-collation.json create mode 100644 testdata/crud/v1/write/findOneAndDelete-collation.yml create mode 100644 testdata/crud/v1/write/findOneAndDelete.json create mode 100644 testdata/crud/v1/write/findOneAndDelete.yml create mode 100644 testdata/crud/v1/write/findOneAndReplace-collation.json create mode 100644 testdata/crud/v1/write/findOneAndReplace-collation.yml create mode 100644 testdata/crud/v1/write/findOneAndReplace-upsert.json create mode 100644 testdata/crud/v1/write/findOneAndReplace-upsert.yml create mode 100644 testdata/crud/v1/write/findOneAndReplace.json create mode 100644 testdata/crud/v1/write/findOneAndReplace.yml create mode 100644 testdata/crud/v1/write/findOneAndUpdate-arrayFilters.json create mode 100644 testdata/crud/v1/write/findOneAndUpdate-arrayFilters.yml create mode 100644 testdata/crud/v1/write/findOneAndUpdate-collation.json create mode 100644 testdata/crud/v1/write/findOneAndUpdate-collation.yml create mode 100644 testdata/crud/v1/write/findOneAndUpdate.json create mode 100644 testdata/crud/v1/write/findOneAndUpdate.yml create mode 100644 testdata/crud/v1/write/insertMany.json create mode 100644 testdata/crud/v1/write/insertMany.yml create mode 100644 testdata/crud/v1/write/insertOne.json create mode 100644 testdata/crud/v1/write/insertOne.yml create mode 100644 testdata/crud/v1/write/replaceOne-collation.json create mode 100644 testdata/crud/v1/write/replaceOne-collation.yml create mode 100644 testdata/crud/v1/write/replaceOne.json create mode 100644 testdata/crud/v1/write/replaceOne.yml create mode 100644 testdata/crud/v1/write/updateMany-arrayFilters.json create mode 100644 testdata/crud/v1/write/updateMany-arrayFilters.yml create mode 100644 testdata/crud/v1/write/updateMany-collation.json create mode 100644 testdata/crud/v1/write/updateMany-collation.yml create mode 100644 testdata/crud/v1/write/updateMany.json create mode 100644 testdata/crud/v1/write/updateMany.yml create mode 100644 testdata/crud/v1/write/updateOne-arrayFilters.json create mode 100644 testdata/crud/v1/write/updateOne-arrayFilters.yml create mode 100644 testdata/crud/v1/write/updateOne-collation.json create mode 100644 testdata/crud/v1/write/updateOne-collation.yml create mode 100644 testdata/crud/v1/write/updateOne.json create mode 100644 testdata/crud/v1/write/updateOne.yml create mode 100644 testdata/extended_bson/deep_bson.json.gz create mode 100644 testdata/extended_bson/flat_bson.json.gz create mode 100644 testdata/extended_bson/full_bson.json.gz create mode 100644 testdata/gridfs/delete.json create mode 100644 testdata/gridfs/delete.yml create mode 100644 testdata/gridfs/download.json create mode 100644 testdata/gridfs/download.yml create mode 100644 testdata/gridfs/downloadByName.json create mode 100644 testdata/gridfs/downloadByName.yml create mode 100644 testdata/gridfs/upload-disableMD5.json create mode 100644 testdata/gridfs/upload-disableMD5.yml create mode 100644 testdata/gridfs/upload.json create mode 100644 testdata/gridfs/upload.yml create mode 100644 testdata/index-management/createSearchIndex.json create mode 100644 testdata/index-management/createSearchIndex.yml create mode 100644 testdata/index-management/createSearchIndexes.json create mode 100644 testdata/index-management/createSearchIndexes.yml create mode 100644 testdata/index-management/dropSearchIndex.json create mode 100644 testdata/index-management/dropSearchIndex.yml create mode 100644 testdata/index-management/listSearchIndexes.json create mode 100644 testdata/index-management/listSearchIndexes.yml create mode 100644 testdata/index-management/searchIndexIgnoresReadWriteConcern.json create mode 100644 testdata/index-management/searchIndexIgnoresReadWriteConcern.yml create mode 100644 testdata/index-management/updateSearchIndex.json create mode 100644 testdata/index-management/updateSearchIndex.yml create mode 100644 testdata/initial-dns-seedlist-discovery/load-balanced/loadBalanced-directConnection.json create mode 100644 testdata/initial-dns-seedlist-discovery/load-balanced/loadBalanced-directConnection.yml create mode 100644 testdata/initial-dns-seedlist-discovery/load-balanced/loadBalanced-no-results.json create mode 100644 testdata/initial-dns-seedlist-discovery/load-balanced/loadBalanced-no-results.yml create mode 100644 testdata/initial-dns-seedlist-discovery/load-balanced/loadBalanced-replicaSet-errors.json create mode 100644 testdata/initial-dns-seedlist-discovery/load-balanced/loadBalanced-replicaSet-errors.yml create mode 100644 testdata/initial-dns-seedlist-discovery/load-balanced/loadBalanced-true-multiple-hosts.json create mode 100644 testdata/initial-dns-seedlist-discovery/load-balanced/loadBalanced-true-multiple-hosts.yml create mode 100644 testdata/initial-dns-seedlist-discovery/load-balanced/loadBalanced-true-txt.json create mode 100644 testdata/initial-dns-seedlist-discovery/load-balanced/loadBalanced-true-txt.yml create mode 100644 testdata/initial-dns-seedlist-discovery/load-balanced/srvMaxHosts-conflicts_with_loadBalanced-true-txt.json create mode 100644 testdata/initial-dns-seedlist-discovery/load-balanced/srvMaxHosts-conflicts_with_loadBalanced-true-txt.yml create mode 100644 testdata/initial-dns-seedlist-discovery/load-balanced/srvMaxHosts-conflicts_with_loadBalanced-true.json create mode 100644 testdata/initial-dns-seedlist-discovery/load-balanced/srvMaxHosts-conflicts_with_loadBalanced-true.yml create mode 100644 testdata/initial-dns-seedlist-discovery/load-balanced/srvMaxHosts-zero-txt.json create mode 100644 testdata/initial-dns-seedlist-discovery/load-balanced/srvMaxHosts-zero-txt.yml create mode 100644 testdata/initial-dns-seedlist-discovery/load-balanced/srvMaxHosts-zero.json create mode 100644 testdata/initial-dns-seedlist-discovery/load-balanced/srvMaxHosts-zero.yml create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/direct-connection-false.json create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/direct-connection-false.yml create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/direct-connection-true.json create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/direct-connection-true.yml create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/encoded-userinfo-and-db.json create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/encoded-userinfo-and-db.yml create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/loadBalanced-false-txt.json create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/loadBalanced-false-txt.yml create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/longer-parent-in-return.json create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/longer-parent-in-return.yml create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/misformatted-option.json create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/misformatted-option.yml create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/no-results.json create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/no-results.yml create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/not-enough-parts.json create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/not-enough-parts.yml create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/one-result-default-port.json create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/one-result-default-port.yml create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/one-txt-record-multiple-strings.json create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/one-txt-record-multiple-strings.yml create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/one-txt-record.json create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/one-txt-record.yml create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/parent-part-mismatch1.json create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/parent-part-mismatch1.yml create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/parent-part-mismatch2.json create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/parent-part-mismatch2.yml create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/parent-part-mismatch3.json create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/parent-part-mismatch3.yml create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/parent-part-mismatch4.json create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/parent-part-mismatch4.yml create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/parent-part-mismatch5.json create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/parent-part-mismatch5.yml create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/returned-parent-too-short.json create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/returned-parent-too-short.yml create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/returned-parent-wrong.json create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/returned-parent-wrong.yml create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/srv-service-name.json create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/srv-service-name.yml create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/srvMaxHosts-conflicts_with_replicaSet-txt.json create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/srvMaxHosts-conflicts_with_replicaSet-txt.yml create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/srvMaxHosts-conflicts_with_replicaSet.json create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/srvMaxHosts-conflicts_with_replicaSet.yml create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/srvMaxHosts-equal_to_srv_records.json create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/srvMaxHosts-equal_to_srv_records.yml create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/srvMaxHosts-greater_than_srv_records.json create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/srvMaxHosts-greater_than_srv_records.yml create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/srvMaxHosts-less_than_srv_records.json create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/srvMaxHosts-less_than_srv_records.yml create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/srvMaxHosts-zero-txt.json create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/srvMaxHosts-zero-txt.yml create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/srvMaxHosts-zero.json create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/srvMaxHosts-zero.yml create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/two-results-default-port.json create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/two-results-default-port.yml create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/two-results-nonstandard-port.json create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/two-results-nonstandard-port.yml create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/two-txt-records.json create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/two-txt-records.yml create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/txt-record-not-allowed-option.json create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/txt-record-not-allowed-option.yml create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/txt-record-with-overridden-ssl-option.json create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/txt-record-with-overridden-ssl-option.yml create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/txt-record-with-overridden-uri-option.json create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/txt-record-with-overridden-uri-option.yml create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/txt-record-with-unallowed-option.json create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/txt-record-with-unallowed-option.yml create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/uri-with-admin-database.json create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/uri-with-admin-database.yml create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/uri-with-auth.json create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/uri-with-auth.yml create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/uri-with-port.json create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/uri-with-port.yml create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/uri-with-two-hosts.json create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/uri-with-two-hosts.yml create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/uri-with-uppercase-hostname.json create mode 100644 testdata/initial-dns-seedlist-discovery/replica-set/uri-with-uppercase-hostname.yml create mode 100644 testdata/initial-dns-seedlist-discovery/sharded/srvMaxHosts-equal_to_srv_records.json create mode 100644 testdata/initial-dns-seedlist-discovery/sharded/srvMaxHosts-equal_to_srv_records.yml create mode 100644 testdata/initial-dns-seedlist-discovery/sharded/srvMaxHosts-greater_than_srv_records.json create mode 100644 testdata/initial-dns-seedlist-discovery/sharded/srvMaxHosts-greater_than_srv_records.yml create mode 100644 testdata/initial-dns-seedlist-discovery/sharded/srvMaxHosts-less_than_srv_records.json create mode 100644 testdata/initial-dns-seedlist-discovery/sharded/srvMaxHosts-less_than_srv_records.yml create mode 100644 testdata/initial-dns-seedlist-discovery/sharded/srvMaxHosts-zero.json create mode 100644 testdata/initial-dns-seedlist-discovery/sharded/srvMaxHosts-zero.yml create mode 100644 testdata/kmip-certs/ca-ec.pem create mode 100644 testdata/kmip-certs/client-ec.pem create mode 100644 testdata/kmip-certs/server-ec.pem create mode 100644 testdata/load-balancers/cursors.json create mode 100644 testdata/load-balancers/cursors.yml create mode 100644 testdata/load-balancers/event-monitoring.json create mode 100644 testdata/load-balancers/event-monitoring.yml create mode 100644 testdata/load-balancers/lb-connection-establishment.json create mode 100644 testdata/load-balancers/lb-connection-establishment.yml create mode 100644 testdata/load-balancers/non-lb-connection-establishment.json create mode 100644 testdata/load-balancers/non-lb-connection-establishment.yml create mode 100644 testdata/load-balancers/sdam-error-handling.json create mode 100644 testdata/load-balancers/sdam-error-handling.yml create mode 100644 testdata/load-balancers/server-selection.json create mode 100644 testdata/load-balancers/server-selection.yml create mode 100644 testdata/load-balancers/transactions.json create mode 100644 testdata/load-balancers/transactions.yml create mode 100644 testdata/load-balancers/wait-queue-timeouts.json create mode 100644 testdata/load-balancers/wait-queue-timeouts.yml create mode 100644 testdata/lorem.txt create mode 100644 testdata/max-staleness/ReplicaSetNoPrimary/DefaultNoMaxStaleness.json create mode 100644 testdata/max-staleness/ReplicaSetNoPrimary/DefaultNoMaxStaleness.yml create mode 100644 testdata/max-staleness/ReplicaSetNoPrimary/LastUpdateTime.json create mode 100644 testdata/max-staleness/ReplicaSetNoPrimary/LastUpdateTime.yml create mode 100644 testdata/max-staleness/ReplicaSetNoPrimary/MaxStalenessTooSmall.json create mode 100644 testdata/max-staleness/ReplicaSetNoPrimary/MaxStalenessTooSmall.yml create mode 100644 testdata/max-staleness/ReplicaSetNoPrimary/Nearest.json create mode 100644 testdata/max-staleness/ReplicaSetNoPrimary/Nearest.yml create mode 100644 testdata/max-staleness/ReplicaSetNoPrimary/Nearest2.json create mode 100644 testdata/max-staleness/ReplicaSetNoPrimary/Nearest2.yml create mode 100644 testdata/max-staleness/ReplicaSetNoPrimary/NoKnownServers.json create mode 100644 testdata/max-staleness/ReplicaSetNoPrimary/NoKnownServers.yml create mode 100644 testdata/max-staleness/ReplicaSetNoPrimary/PrimaryPreferred.json create mode 100644 testdata/max-staleness/ReplicaSetNoPrimary/PrimaryPreferred.yml create mode 100644 testdata/max-staleness/ReplicaSetNoPrimary/PrimaryPreferred_tags.json create mode 100644 testdata/max-staleness/ReplicaSetNoPrimary/PrimaryPreferred_tags.yml create mode 100644 testdata/max-staleness/ReplicaSetNoPrimary/Secondary.json create mode 100644 testdata/max-staleness/ReplicaSetNoPrimary/Secondary.yml create mode 100644 testdata/max-staleness/ReplicaSetNoPrimary/SecondaryPreferred.json create mode 100644 testdata/max-staleness/ReplicaSetNoPrimary/SecondaryPreferred.yml create mode 100644 testdata/max-staleness/ReplicaSetNoPrimary/SecondaryPreferred_tags.json create mode 100644 testdata/max-staleness/ReplicaSetNoPrimary/SecondaryPreferred_tags.yml create mode 100644 testdata/max-staleness/ReplicaSetNoPrimary/ZeroMaxStaleness.json create mode 100644 testdata/max-staleness/ReplicaSetNoPrimary/ZeroMaxStaleness.yml create mode 100644 testdata/max-staleness/ReplicaSetWithPrimary/DefaultNoMaxStaleness.json create mode 100644 testdata/max-staleness/ReplicaSetWithPrimary/DefaultNoMaxStaleness.yml create mode 100644 testdata/max-staleness/ReplicaSetWithPrimary/LastUpdateTime.json create mode 100644 testdata/max-staleness/ReplicaSetWithPrimary/LastUpdateTime.yml create mode 100644 testdata/max-staleness/ReplicaSetWithPrimary/LongHeartbeat.json create mode 100644 testdata/max-staleness/ReplicaSetWithPrimary/LongHeartbeat.yml create mode 100644 testdata/max-staleness/ReplicaSetWithPrimary/LongHeartbeat2.json create mode 100644 testdata/max-staleness/ReplicaSetWithPrimary/LongHeartbeat2.yml create mode 100644 testdata/max-staleness/ReplicaSetWithPrimary/MaxStalenessTooSmall.json create mode 100644 testdata/max-staleness/ReplicaSetWithPrimary/MaxStalenessTooSmall.yml create mode 100644 testdata/max-staleness/ReplicaSetWithPrimary/MaxStalenessWithModePrimary.json create mode 100644 testdata/max-staleness/ReplicaSetWithPrimary/MaxStalenessWithModePrimary.yml create mode 100644 testdata/max-staleness/ReplicaSetWithPrimary/Nearest.json create mode 100644 testdata/max-staleness/ReplicaSetWithPrimary/Nearest.yml create mode 100644 testdata/max-staleness/ReplicaSetWithPrimary/Nearest2.json create mode 100644 testdata/max-staleness/ReplicaSetWithPrimary/Nearest2.yml create mode 100644 testdata/max-staleness/ReplicaSetWithPrimary/Nearest_tags.json create mode 100644 testdata/max-staleness/ReplicaSetWithPrimary/Nearest_tags.yml create mode 100644 testdata/max-staleness/ReplicaSetWithPrimary/PrimaryPreferred.json create mode 100644 testdata/max-staleness/ReplicaSetWithPrimary/PrimaryPreferred.yml create mode 100644 testdata/max-staleness/ReplicaSetWithPrimary/SecondaryPreferred.json create mode 100644 testdata/max-staleness/ReplicaSetWithPrimary/SecondaryPreferred.yml create mode 100644 testdata/max-staleness/ReplicaSetWithPrimary/SecondaryPreferred_tags.json create mode 100644 testdata/max-staleness/ReplicaSetWithPrimary/SecondaryPreferred_tags.yml create mode 100644 testdata/max-staleness/ReplicaSetWithPrimary/SecondaryPreferred_tags2.json create mode 100644 testdata/max-staleness/ReplicaSetWithPrimary/SecondaryPreferred_tags2.yml create mode 100644 testdata/max-staleness/ReplicaSetWithPrimary/Secondary_tags.json create mode 100644 testdata/max-staleness/ReplicaSetWithPrimary/Secondary_tags.yml create mode 100644 testdata/max-staleness/ReplicaSetWithPrimary/Secondary_tags2.json create mode 100644 testdata/max-staleness/ReplicaSetWithPrimary/Secondary_tags2.yml create mode 100644 testdata/max-staleness/ReplicaSetWithPrimary/ZeroMaxStaleness.json create mode 100644 testdata/max-staleness/ReplicaSetWithPrimary/ZeroMaxStaleness.yml create mode 100644 testdata/max-staleness/Sharded/SmallMaxStaleness.json create mode 100644 testdata/max-staleness/Sharded/SmallMaxStaleness.yml create mode 100644 testdata/max-staleness/Single/SmallMaxStaleness.json create mode 100644 testdata/max-staleness/Single/SmallMaxStaleness.yml create mode 100644 testdata/max-staleness/Unknown/SmallMaxStaleness.json create mode 100644 testdata/max-staleness/Unknown/SmallMaxStaleness.yml create mode 100644 testdata/mongocrypt/collection-info.json create mode 100644 testdata/mongocrypt/command-reply.json create mode 100644 testdata/mongocrypt/command.json create mode 100644 testdata/mongocrypt/encrypted-command-reply.json create mode 100644 testdata/mongocrypt/encrypted-command.json create mode 100644 testdata/mongocrypt/encrypted-value.json create mode 100644 testdata/mongocrypt/json-schema.json create mode 100644 testdata/mongocrypt/key-document.json create mode 100644 testdata/mongocrypt/key-filter-keyAltName.json create mode 100644 testdata/mongocrypt/key-filter.json create mode 100644 testdata/mongocrypt/kms-reply.txt create mode 100644 testdata/mongocrypt/list-collections-filter.json create mode 100644 testdata/mongocrypt/local-key-document.json create mode 100644 testdata/mongocrypt/mongocryptd-command-local.json create mode 100644 testdata/mongocrypt/mongocryptd-command-remote.json create mode 100644 testdata/mongocrypt/mongocryptd-reply.json create mode 100644 testdata/read-write-concern/connection-string/read-concern.json create mode 100644 testdata/read-write-concern/connection-string/read-concern.yml create mode 100644 testdata/read-write-concern/connection-string/write-concern.json create mode 100644 testdata/read-write-concern/connection-string/write-concern.yml create mode 100644 testdata/read-write-concern/document/read-concern.json create mode 100644 testdata/read-write-concern/document/read-concern.yml create mode 100644 testdata/read-write-concern/document/write-concern.json create mode 100644 testdata/read-write-concern/document/write-concern.yml create mode 100644 testdata/read-write-concern/operation/default-write-concern-2.6.json create mode 100644 testdata/read-write-concern/operation/default-write-concern-2.6.yml create mode 100644 testdata/read-write-concern/operation/default-write-concern-3.2.json create mode 100644 testdata/read-write-concern/operation/default-write-concern-3.2.yml create mode 100644 testdata/read-write-concern/operation/default-write-concern-3.4.json create mode 100644 testdata/read-write-concern/operation/default-write-concern-3.4.yml create mode 100644 testdata/read-write-concern/operation/default-write-concern-4.2.json create mode 100644 testdata/read-write-concern/operation/default-write-concern-4.2.yml create mode 100644 testdata/retryable-reads/legacy/aggregate-merge.json create mode 100644 testdata/retryable-reads/legacy/aggregate-merge.yml create mode 100644 testdata/retryable-reads/legacy/aggregate-serverErrors.json create mode 100644 testdata/retryable-reads/legacy/aggregate-serverErrors.yml create mode 100644 testdata/retryable-reads/legacy/aggregate.json create mode 100644 testdata/retryable-reads/legacy/aggregate.yml create mode 100644 testdata/retryable-reads/legacy/changeStreams-client.watch-serverErrors.json create mode 100644 testdata/retryable-reads/legacy/changeStreams-client.watch-serverErrors.yml create mode 100644 testdata/retryable-reads/legacy/changeStreams-client.watch.json create mode 100644 testdata/retryable-reads/legacy/changeStreams-client.watch.yml create mode 100644 testdata/retryable-reads/legacy/changeStreams-db.coll.watch-serverErrors.json create mode 100644 testdata/retryable-reads/legacy/changeStreams-db.coll.watch-serverErrors.yml create mode 100644 testdata/retryable-reads/legacy/changeStreams-db.coll.watch.json create mode 100644 testdata/retryable-reads/legacy/changeStreams-db.coll.watch.yml create mode 100644 testdata/retryable-reads/legacy/changeStreams-db.watch-serverErrors.json create mode 100644 testdata/retryable-reads/legacy/changeStreams-db.watch-serverErrors.yml create mode 100644 testdata/retryable-reads/legacy/changeStreams-db.watch.json create mode 100644 testdata/retryable-reads/legacy/changeStreams-db.watch.yml create mode 100644 testdata/retryable-reads/legacy/count-serverErrors.json create mode 100644 testdata/retryable-reads/legacy/count-serverErrors.yml create mode 100644 testdata/retryable-reads/legacy/count.json create mode 100644 testdata/retryable-reads/legacy/count.yml create mode 100644 testdata/retryable-reads/legacy/countDocuments-serverErrors.json create mode 100644 testdata/retryable-reads/legacy/countDocuments-serverErrors.yml create mode 100644 testdata/retryable-reads/legacy/countDocuments.json create mode 100644 testdata/retryable-reads/legacy/countDocuments.yml create mode 100644 testdata/retryable-reads/legacy/distinct-serverErrors.json create mode 100644 testdata/retryable-reads/legacy/distinct-serverErrors.yml create mode 100644 testdata/retryable-reads/legacy/distinct.json create mode 100644 testdata/retryable-reads/legacy/distinct.yml create mode 100644 testdata/retryable-reads/legacy/estimatedDocumentCount-serverErrors.json create mode 100644 testdata/retryable-reads/legacy/estimatedDocumentCount-serverErrors.yml create mode 100644 testdata/retryable-reads/legacy/estimatedDocumentCount.json create mode 100644 testdata/retryable-reads/legacy/estimatedDocumentCount.yml create mode 100644 testdata/retryable-reads/legacy/find-serverErrors.json create mode 100644 testdata/retryable-reads/legacy/find-serverErrors.yml create mode 100644 testdata/retryable-reads/legacy/find.json create mode 100644 testdata/retryable-reads/legacy/find.yml create mode 100644 testdata/retryable-reads/legacy/findOne-serverErrors.json create mode 100644 testdata/retryable-reads/legacy/findOne-serverErrors.yml create mode 100644 testdata/retryable-reads/legacy/findOne.json create mode 100644 testdata/retryable-reads/legacy/findOne.yml create mode 100644 testdata/retryable-reads/legacy/gridfs-download-serverErrors.json create mode 100644 testdata/retryable-reads/legacy/gridfs-download-serverErrors.yml create mode 100644 testdata/retryable-reads/legacy/gridfs-download.json create mode 100644 testdata/retryable-reads/legacy/gridfs-download.yml create mode 100644 testdata/retryable-reads/legacy/gridfs-downloadByName-serverErrors.json create mode 100644 testdata/retryable-reads/legacy/gridfs-downloadByName-serverErrors.yml create mode 100644 testdata/retryable-reads/legacy/gridfs-downloadByName.json create mode 100644 testdata/retryable-reads/legacy/gridfs-downloadByName.yml create mode 100644 testdata/retryable-reads/legacy/listCollectionNames-serverErrors.json create mode 100644 testdata/retryable-reads/legacy/listCollectionNames-serverErrors.yml create mode 100644 testdata/retryable-reads/legacy/listCollectionNames.json create mode 100644 testdata/retryable-reads/legacy/listCollectionNames.yml create mode 100644 testdata/retryable-reads/legacy/listCollectionObjects-serverErrors.json create mode 100644 testdata/retryable-reads/legacy/listCollectionObjects-serverErrors.yml create mode 100644 testdata/retryable-reads/legacy/listCollectionObjects.json create mode 100644 testdata/retryable-reads/legacy/listCollectionObjects.yml create mode 100644 testdata/retryable-reads/legacy/listCollections-serverErrors.json create mode 100644 testdata/retryable-reads/legacy/listCollections-serverErrors.yml create mode 100644 testdata/retryable-reads/legacy/listCollections.json create mode 100644 testdata/retryable-reads/legacy/listCollections.yml create mode 100644 testdata/retryable-reads/legacy/listDatabaseNames-serverErrors.json create mode 100644 testdata/retryable-reads/legacy/listDatabaseNames-serverErrors.yml create mode 100644 testdata/retryable-reads/legacy/listDatabaseNames.json create mode 100644 testdata/retryable-reads/legacy/listDatabaseNames.yml create mode 100644 testdata/retryable-reads/legacy/listDatabaseObjects-serverErrors.json create mode 100644 testdata/retryable-reads/legacy/listDatabaseObjects-serverErrors.yml create mode 100644 testdata/retryable-reads/legacy/listDatabaseObjects.json create mode 100644 testdata/retryable-reads/legacy/listDatabaseObjects.yml create mode 100644 testdata/retryable-reads/legacy/listDatabases-serverErrors.json create mode 100644 testdata/retryable-reads/legacy/listDatabases-serverErrors.yml create mode 100644 testdata/retryable-reads/legacy/listDatabases.json create mode 100644 testdata/retryable-reads/legacy/listDatabases.yml create mode 100644 testdata/retryable-reads/legacy/listIndexNames-serverErrors.json create mode 100644 testdata/retryable-reads/legacy/listIndexNames-serverErrors.yml create mode 100644 testdata/retryable-reads/legacy/listIndexNames.json create mode 100644 testdata/retryable-reads/legacy/listIndexNames.yml create mode 100644 testdata/retryable-reads/legacy/listIndexes-serverErrors.json create mode 100644 testdata/retryable-reads/legacy/listIndexes-serverErrors.yml create mode 100644 testdata/retryable-reads/legacy/listIndexes.json create mode 100644 testdata/retryable-reads/legacy/listIndexes.yml create mode 100644 testdata/retryable-reads/legacy/mapReduce.json create mode 100644 testdata/retryable-reads/legacy/mapReduce.yml create mode 100644 testdata/retryable-reads/unified/readConcernMajorityNotAvailableYet.json create mode 100644 testdata/retryable-reads/unified/readConcernMajorityNotAvailableYet.yml create mode 100644 testdata/retryable-writes/legacy/bulkWrite-errorLabels.json create mode 100644 testdata/retryable-writes/legacy/bulkWrite-errorLabels.yml create mode 100644 testdata/retryable-writes/legacy/bulkWrite-serverErrors.json create mode 100644 testdata/retryable-writes/legacy/bulkWrite-serverErrors.yml create mode 100644 testdata/retryable-writes/legacy/bulkWrite.json create mode 100644 testdata/retryable-writes/legacy/bulkWrite.yml create mode 100644 testdata/retryable-writes/legacy/deleteMany.json create mode 100644 testdata/retryable-writes/legacy/deleteMany.yml create mode 100644 testdata/retryable-writes/legacy/deleteOne-errorLabels.json create mode 100644 testdata/retryable-writes/legacy/deleteOne-errorLabels.yml create mode 100644 testdata/retryable-writes/legacy/deleteOne-serverErrors.json create mode 100644 testdata/retryable-writes/legacy/deleteOne-serverErrors.yml create mode 100644 testdata/retryable-writes/legacy/deleteOne.json create mode 100644 testdata/retryable-writes/legacy/deleteOne.yml create mode 100644 testdata/retryable-writes/legacy/findOneAndDelete-errorLabels.json create mode 100644 testdata/retryable-writes/legacy/findOneAndDelete-errorLabels.yml create mode 100644 testdata/retryable-writes/legacy/findOneAndDelete-serverErrors.json create mode 100644 testdata/retryable-writes/legacy/findOneAndDelete-serverErrors.yml create mode 100644 testdata/retryable-writes/legacy/findOneAndDelete.json create mode 100644 testdata/retryable-writes/legacy/findOneAndDelete.yml create mode 100644 testdata/retryable-writes/legacy/findOneAndReplace-errorLabels.json create mode 100644 testdata/retryable-writes/legacy/findOneAndReplace-errorLabels.yml create mode 100644 testdata/retryable-writes/legacy/findOneAndReplace-serverErrors.json create mode 100644 testdata/retryable-writes/legacy/findOneAndReplace-serverErrors.yml create mode 100644 testdata/retryable-writes/legacy/findOneAndReplace.json create mode 100644 testdata/retryable-writes/legacy/findOneAndReplace.yml create mode 100644 testdata/retryable-writes/legacy/findOneAndUpdate-errorLabels.json create mode 100644 testdata/retryable-writes/legacy/findOneAndUpdate-errorLabels.yml create mode 100644 testdata/retryable-writes/legacy/findOneAndUpdate-serverErrors.json create mode 100644 testdata/retryable-writes/legacy/findOneAndUpdate-serverErrors.yml create mode 100644 testdata/retryable-writes/legacy/findOneAndUpdate.json create mode 100644 testdata/retryable-writes/legacy/findOneAndUpdate.yml create mode 100644 testdata/retryable-writes/legacy/insertMany-errorLabels.json create mode 100644 testdata/retryable-writes/legacy/insertMany-errorLabels.yml create mode 100644 testdata/retryable-writes/legacy/insertMany-serverErrors.json create mode 100644 testdata/retryable-writes/legacy/insertMany-serverErrors.yml create mode 100644 testdata/retryable-writes/legacy/insertMany.json create mode 100644 testdata/retryable-writes/legacy/insertMany.yml create mode 100644 testdata/retryable-writes/legacy/insertOne-errorLabels.json create mode 100644 testdata/retryable-writes/legacy/insertOne-errorLabels.yml create mode 100644 testdata/retryable-writes/legacy/insertOne-serverErrors.json create mode 100644 testdata/retryable-writes/legacy/insertOne-serverErrors.yml create mode 100644 testdata/retryable-writes/legacy/insertOne.json create mode 100644 testdata/retryable-writes/legacy/insertOne.yml create mode 100644 testdata/retryable-writes/legacy/replaceOne-errorLabels.json create mode 100644 testdata/retryable-writes/legacy/replaceOne-errorLabels.yml create mode 100644 testdata/retryable-writes/legacy/replaceOne-serverErrors.json create mode 100644 testdata/retryable-writes/legacy/replaceOne-serverErrors.yml create mode 100644 testdata/retryable-writes/legacy/replaceOne.json create mode 100644 testdata/retryable-writes/legacy/replaceOne.yml create mode 100644 testdata/retryable-writes/legacy/updateMany.json create mode 100644 testdata/retryable-writes/legacy/updateMany.yml create mode 100644 testdata/retryable-writes/legacy/updateOne-errorLabels.json create mode 100644 testdata/retryable-writes/legacy/updateOne-errorLabels.yml create mode 100644 testdata/retryable-writes/legacy/updateOne-serverErrors.json create mode 100644 testdata/retryable-writes/legacy/updateOne-serverErrors.yml create mode 100644 testdata/retryable-writes/legacy/updateOne.json create mode 100644 testdata/retryable-writes/legacy/updateOne.yml create mode 100644 testdata/retryable-writes/unified/bulkWrite-serverErrors.json create mode 100644 testdata/retryable-writes/unified/bulkWrite-serverErrors.yml create mode 100644 testdata/retryable-writes/unified/client-bulkWrite-clientErrors.json create mode 100644 testdata/retryable-writes/unified/client-bulkWrite-clientErrors.yml create mode 100644 testdata/retryable-writes/unified/client-bulkWrite-serverErrors.json create mode 100644 testdata/retryable-writes/unified/client-bulkWrite-serverErrors.yml create mode 100644 testdata/retryable-writes/unified/insertOne-noWritesPerformedError.yml create mode 100644 testdata/retryable-writes/unified/insertOne-noWritesPerformedErrors.json create mode 100644 testdata/retryable-writes/unified/insertOne-serverErrors.json create mode 100644 testdata/retryable-writes/unified/insertOne-serverErrors.yml create mode 100644 testdata/run-command/runCommand.json create mode 100644 testdata/run-command/runCommand.yml create mode 100644 testdata/run-command/runCursorCommand.json create mode 100644 testdata/run-command/runCursorCommand.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/error_handling_handshake.json create mode 100644 testdata/server-discovery-and-monitoring/errors/error_handling_handshake.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/generate-error-tests.py create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-network-error.json create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-network-error.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-network-timeout-error.json create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-network-timeout-error.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-greater-InterruptedAtShutdown.json create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-greater-InterruptedAtShutdown.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-greater-InterruptedDueToReplStateChange.json create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-greater-InterruptedDueToReplStateChange.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-greater-LegacyNotPrimary.json create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-greater-LegacyNotPrimary.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-greater-NotPrimaryNoSecondaryOk.json create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-greater-NotPrimaryNoSecondaryOk.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-greater-NotPrimaryOrSecondary.json create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-greater-NotPrimaryOrSecondary.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-greater-NotWritablePrimary.json create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-greater-NotWritablePrimary.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-greater-PrimarySteppedDown.json create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-greater-PrimarySteppedDown.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-greater-ShutdownInProgress.json create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-greater-ShutdownInProgress.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-missing-InterruptedAtShutdown.json create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-missing-InterruptedAtShutdown.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-missing-InterruptedDueToReplStateChange.json create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-missing-InterruptedDueToReplStateChange.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-missing-LegacyNotPrimary.json create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-missing-LegacyNotPrimary.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-missing-NotPrimaryNoSecondaryOk.json create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-missing-NotPrimaryNoSecondaryOk.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-missing-NotPrimaryOrSecondary.json create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-missing-NotPrimaryOrSecondary.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-missing-NotWritablePrimary.json create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-missing-NotWritablePrimary.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-missing-PrimarySteppedDown.json create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-missing-PrimarySteppedDown.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-missing-ShutdownInProgress.json create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-missing-ShutdownInProgress.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-proccessId-changed-InterruptedAtShutdown.json create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-proccessId-changed-InterruptedAtShutdown.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-proccessId-changed-InterruptedDueToReplStateChange.json create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-proccessId-changed-InterruptedDueToReplStateChange.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-proccessId-changed-LegacyNotPrimary.json create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-proccessId-changed-LegacyNotPrimary.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-proccessId-changed-NotPrimaryNoSecondaryOk.json create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-proccessId-changed-NotPrimaryNoSecondaryOk.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-proccessId-changed-NotPrimaryOrSecondary.json create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-proccessId-changed-NotPrimaryOrSecondary.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-proccessId-changed-NotWritablePrimary.json create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-proccessId-changed-NotWritablePrimary.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-proccessId-changed-PrimarySteppedDown.json create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-proccessId-changed-PrimarySteppedDown.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-proccessId-changed-ShutdownInProgress.json create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion-proccessId-changed-ShutdownInProgress.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/non-stale-topologyVersion.yml.template create mode 100644 testdata/server-discovery-and-monitoring/errors/post-42-InterruptedAtShutdown.json create mode 100644 testdata/server-discovery-and-monitoring/errors/post-42-InterruptedAtShutdown.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/post-42-InterruptedDueToReplStateChange.json create mode 100644 testdata/server-discovery-and-monitoring/errors/post-42-InterruptedDueToReplStateChange.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/post-42-LegacyNotPrimary.json create mode 100644 testdata/server-discovery-and-monitoring/errors/post-42-LegacyNotPrimary.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/post-42-NotPrimaryNoSecondaryOk.json create mode 100644 testdata/server-discovery-and-monitoring/errors/post-42-NotPrimaryNoSecondaryOk.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/post-42-NotPrimaryOrSecondary.json create mode 100644 testdata/server-discovery-and-monitoring/errors/post-42-NotPrimaryOrSecondary.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/post-42-NotWritablePrimary.json create mode 100644 testdata/server-discovery-and-monitoring/errors/post-42-NotWritablePrimary.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/post-42-PrimarySteppedDown.json create mode 100644 testdata/server-discovery-and-monitoring/errors/post-42-PrimarySteppedDown.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/post-42-ShutdownInProgress.json create mode 100644 testdata/server-discovery-and-monitoring/errors/post-42-ShutdownInProgress.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/post-42.yml.template create mode 100644 testdata/server-discovery-and-monitoring/errors/pre-42-InterruptedAtShutdown.json create mode 100644 testdata/server-discovery-and-monitoring/errors/pre-42-InterruptedAtShutdown.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/pre-42-InterruptedDueToReplStateChange.json create mode 100644 testdata/server-discovery-and-monitoring/errors/pre-42-InterruptedDueToReplStateChange.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/pre-42-LegacyNotPrimary.json create mode 100644 testdata/server-discovery-and-monitoring/errors/pre-42-LegacyNotPrimary.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/pre-42-NotPrimaryNoSecondaryOk.json create mode 100644 testdata/server-discovery-and-monitoring/errors/pre-42-NotPrimaryNoSecondaryOk.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/pre-42-NotPrimaryOrSecondary.json create mode 100644 testdata/server-discovery-and-monitoring/errors/pre-42-NotPrimaryOrSecondary.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/pre-42-NotWritablePrimary.json create mode 100644 testdata/server-discovery-and-monitoring/errors/pre-42-NotWritablePrimary.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/pre-42-PrimarySteppedDown.json create mode 100644 testdata/server-discovery-and-monitoring/errors/pre-42-PrimarySteppedDown.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/pre-42-ShutdownInProgress.json create mode 100644 testdata/server-discovery-and-monitoring/errors/pre-42-ShutdownInProgress.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/pre-42.yml.template create mode 100644 testdata/server-discovery-and-monitoring/errors/prefer-error-code.json create mode 100644 testdata/server-discovery-and-monitoring/errors/prefer-error-code.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-InterruptedAtShutdown.json create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-InterruptedAtShutdown.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-InterruptedDueToReplStateChange.json create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-InterruptedDueToReplStateChange.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-NotPrimaryNoSecondaryOk.json create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-NotPrimaryNoSecondaryOk.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-NotPrimaryOrSecondary.json create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-NotPrimaryOrSecondary.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-NotWritablePrimary.json create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-NotWritablePrimary.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-PrimarySteppedDown.json create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-PrimarySteppedDown.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-ShutdownInProgress.json create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-ShutdownInProgress.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-afterHandshakeCompletes-InterruptedAtShutdown.json create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-afterHandshakeCompletes-InterruptedAtShutdown.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-afterHandshakeCompletes-InterruptedDueToReplStateChange.json create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-afterHandshakeCompletes-InterruptedDueToReplStateChange.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-afterHandshakeCompletes-LegacyNotPrimary.json create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-afterHandshakeCompletes-LegacyNotPrimary.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-afterHandshakeCompletes-NotPrimaryNoSecondaryOk.json create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-afterHandshakeCompletes-NotPrimaryNoSecondaryOk.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-afterHandshakeCompletes-NotPrimaryOrSecondary.json create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-afterHandshakeCompletes-NotPrimaryOrSecondary.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-afterHandshakeCompletes-NotWritablePrimary.json create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-afterHandshakeCompletes-NotWritablePrimary.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-afterHandshakeCompletes-PrimarySteppedDown.json create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-afterHandshakeCompletes-PrimarySteppedDown.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-afterHandshakeCompletes-ShutdownInProgress.json create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-afterHandshakeCompletes-ShutdownInProgress.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-afterHandshakeCompletes-network.json create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-afterHandshakeCompletes-network.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-afterHandshakeCompletes-timeout.json create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-afterHandshakeCompletes-timeout.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-beforeHandshakeCompletes-InterruptedAtShutdown.json create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-beforeHandshakeCompletes-InterruptedAtShutdown.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-beforeHandshakeCompletes-InterruptedDueToReplStateChange.json create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-beforeHandshakeCompletes-InterruptedDueToReplStateChange.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-beforeHandshakeCompletes-LegacyNotPrimary.json create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-beforeHandshakeCompletes-LegacyNotPrimary.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-beforeHandshakeCompletes-NotPrimaryNoSecondaryOk.json create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-beforeHandshakeCompletes-NotPrimaryNoSecondaryOk.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-beforeHandshakeCompletes-NotPrimaryOrSecondary.json create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-beforeHandshakeCompletes-NotPrimaryOrSecondary.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-beforeHandshakeCompletes-NotWritablePrimary.json create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-beforeHandshakeCompletes-NotWritablePrimary.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-beforeHandshakeCompletes-PrimarySteppedDown.json create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-beforeHandshakeCompletes-PrimarySteppedDown.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-beforeHandshakeCompletes-ShutdownInProgress.json create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-beforeHandshakeCompletes-ShutdownInProgress.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-beforeHandshakeCompletes-network.json create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-beforeHandshakeCompletes-network.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-beforeHandshakeCompletes-timeout.json create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation-beforeHandshakeCompletes-timeout.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-generation.yml.template create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-topologyVersion-InterruptedAtShutdown.json create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-topologyVersion-InterruptedAtShutdown.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-topologyVersion-InterruptedDueToReplStateChange.json create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-topologyVersion-InterruptedDueToReplStateChange.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-topologyVersion-LegacyNotPrimary.json create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-topologyVersion-LegacyNotPrimary.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-topologyVersion-NotPrimaryNoSecondaryOk.json create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-topologyVersion-NotPrimaryNoSecondaryOk.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-topologyVersion-NotPrimaryOrSecondary.json create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-topologyVersion-NotPrimaryOrSecondary.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-topologyVersion-NotWritablePrimary.json create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-topologyVersion-NotWritablePrimary.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-topologyVersion-PrimarySteppedDown.json create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-topologyVersion-PrimarySteppedDown.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-topologyVersion-ShutdownInProgress.json create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-topologyVersion-ShutdownInProgress.yml create mode 100644 testdata/server-discovery-and-monitoring/errors/stale-topologyVersion.yml.template create mode 100644 testdata/server-discovery-and-monitoring/errors/write_errors_ignored.json create mode 100644 testdata/server-discovery-and-monitoring/errors/write_errors_ignored.yml create mode 100644 testdata/server-discovery-and-monitoring/integration/rediscover-quickly-after-step-down.json create mode 100644 testdata/server-discovery-and-monitoring/integration/rediscover-quickly-after-step-down.yml create mode 100644 testdata/server-discovery-and-monitoring/load-balanced/discover_load_balancer.json create mode 100644 testdata/server-discovery-and-monitoring/load-balanced/discover_load_balancer.yml create mode 100644 testdata/server-discovery-and-monitoring/monitoring/discovered_standalone.json create mode 100644 testdata/server-discovery-and-monitoring/monitoring/discovered_standalone.yml create mode 100644 testdata/server-discovery-and-monitoring/monitoring/load_balancer.json create mode 100644 testdata/server-discovery-and-monitoring/monitoring/load_balancer.yml create mode 100644 testdata/server-discovery-and-monitoring/monitoring/replica_set_with_no_primary.json create mode 100644 testdata/server-discovery-and-monitoring/monitoring/replica_set_with_no_primary.yml create mode 100644 testdata/server-discovery-and-monitoring/monitoring/replica_set_with_primary.json create mode 100644 testdata/server-discovery-and-monitoring/monitoring/replica_set_with_primary.yml create mode 100644 testdata/server-discovery-and-monitoring/monitoring/replica_set_with_removal.json create mode 100644 testdata/server-discovery-and-monitoring/monitoring/replica_set_with_removal.yml create mode 100644 testdata/server-discovery-and-monitoring/monitoring/required_replica_set.json create mode 100644 testdata/server-discovery-and-monitoring/monitoring/required_replica_set.yml create mode 100644 testdata/server-discovery-and-monitoring/monitoring/standalone.json create mode 100644 testdata/server-discovery-and-monitoring/monitoring/standalone.yml create mode 100644 testdata/server-discovery-and-monitoring/monitoring/standalone_suppress_equal_description_changes.json create mode 100644 testdata/server-discovery-and-monitoring/monitoring/standalone_suppress_equal_description_changes.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/compatible.json create mode 100644 testdata/server-discovery-and-monitoring/rs/compatible.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/compatible_unknown.json create mode 100644 testdata/server-discovery-and-monitoring/rs/compatible_unknown.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/discover_arbiters.json create mode 100644 testdata/server-discovery-and-monitoring/rs/discover_arbiters.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/discover_arbiters_replicaset.json create mode 100644 testdata/server-discovery-and-monitoring/rs/discover_arbiters_replicaset.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/discover_ghost.json create mode 100644 testdata/server-discovery-and-monitoring/rs/discover_ghost.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/discover_ghost_replicaset.json create mode 100644 testdata/server-discovery-and-monitoring/rs/discover_ghost_replicaset.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/discover_hidden.json create mode 100644 testdata/server-discovery-and-monitoring/rs/discover_hidden.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/discover_hidden_replicaset.json create mode 100644 testdata/server-discovery-and-monitoring/rs/discover_hidden_replicaset.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/discover_passives.json create mode 100644 testdata/server-discovery-and-monitoring/rs/discover_passives.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/discover_passives_replicaset.json create mode 100644 testdata/server-discovery-and-monitoring/rs/discover_passives_replicaset.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/discover_primary.json create mode 100644 testdata/server-discovery-and-monitoring/rs/discover_primary.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/discover_primary_replicaset.json create mode 100644 testdata/server-discovery-and-monitoring/rs/discover_primary_replicaset.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/discover_rsother.json create mode 100644 testdata/server-discovery-and-monitoring/rs/discover_rsother.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/discover_rsother_replicaset.json create mode 100644 testdata/server-discovery-and-monitoring/rs/discover_rsother_replicaset.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/discover_secondary.json create mode 100644 testdata/server-discovery-and-monitoring/rs/discover_secondary.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/discover_secondary_replicaset.json create mode 100644 testdata/server-discovery-and-monitoring/rs/discover_secondary_replicaset.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/discovery.json create mode 100644 testdata/server-discovery-and-monitoring/rs/discovery.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/electionId_precedence_setVersion.json create mode 100644 testdata/server-discovery-and-monitoring/rs/electionId_precedence_setVersion.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/equal_electionids.json create mode 100644 testdata/server-discovery-and-monitoring/rs/equal_electionids.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/hosts_differ_from_seeds.json create mode 100644 testdata/server-discovery-and-monitoring/rs/hosts_differ_from_seeds.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/incompatible_arbiter.json create mode 100644 testdata/server-discovery-and-monitoring/rs/incompatible_arbiter.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/incompatible_ghost.json create mode 100644 testdata/server-discovery-and-monitoring/rs/incompatible_ghost.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/incompatible_other.json create mode 100644 testdata/server-discovery-and-monitoring/rs/incompatible_other.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/ls_timeout.json create mode 100644 testdata/server-discovery-and-monitoring/rs/ls_timeout.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/member_reconfig.json create mode 100644 testdata/server-discovery-and-monitoring/rs/member_reconfig.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/member_standalone.json create mode 100644 testdata/server-discovery-and-monitoring/rs/member_standalone.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/new_primary.json create mode 100644 testdata/server-discovery-and-monitoring/rs/new_primary.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/new_primary_new_electionid.json create mode 100644 testdata/server-discovery-and-monitoring/rs/new_primary_new_electionid.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/new_primary_new_setversion.json create mode 100644 testdata/server-discovery-and-monitoring/rs/new_primary_new_setversion.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/new_primary_wrong_set_name.json create mode 100644 testdata/server-discovery-and-monitoring/rs/new_primary_wrong_set_name.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/non_rs_member.json create mode 100644 testdata/server-discovery-and-monitoring/rs/non_rs_member.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/normalize_case.json create mode 100644 testdata/server-discovery-and-monitoring/rs/normalize_case.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/normalize_case_me.json create mode 100644 testdata/server-discovery-and-monitoring/rs/normalize_case_me.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/null_election_id-pre-6.0.json create mode 100644 testdata/server-discovery-and-monitoring/rs/null_election_id-pre-6.0.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/null_election_id.json create mode 100644 testdata/server-discovery-and-monitoring/rs/null_election_id.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/primary_becomes_ghost.json create mode 100644 testdata/server-discovery-and-monitoring/rs/primary_becomes_ghost.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/primary_becomes_mongos.json create mode 100644 testdata/server-discovery-and-monitoring/rs/primary_becomes_mongos.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/primary_becomes_standalone.json create mode 100644 testdata/server-discovery-and-monitoring/rs/primary_becomes_standalone.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/primary_changes_set_name.json create mode 100644 testdata/server-discovery-and-monitoring/rs/primary_changes_set_name.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/primary_disconnect.json create mode 100644 testdata/server-discovery-and-monitoring/rs/primary_disconnect.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/primary_disconnect_electionid.json create mode 100644 testdata/server-discovery-and-monitoring/rs/primary_disconnect_electionid.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/primary_disconnect_setversion.json create mode 100644 testdata/server-discovery-and-monitoring/rs/primary_disconnect_setversion.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/primary_hint_from_secondary_with_mismatched_me.json create mode 100644 testdata/server-discovery-and-monitoring/rs/primary_hint_from_secondary_with_mismatched_me.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/primary_mismatched_me.json create mode 100644 testdata/server-discovery-and-monitoring/rs/primary_mismatched_me.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/primary_mismatched_me_not_removed.json create mode 100644 testdata/server-discovery-and-monitoring/rs/primary_mismatched_me_not_removed.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/primary_reports_new_member.json create mode 100644 testdata/server-discovery-and-monitoring/rs/primary_reports_new_member.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/primary_to_no_primary_mismatched_me.json create mode 100644 testdata/server-discovery-and-monitoring/rs/primary_to_no_primary_mismatched_me.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/primary_wrong_set_name.json create mode 100644 testdata/server-discovery-and-monitoring/rs/primary_wrong_set_name.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/repeated.json create mode 100644 testdata/server-discovery-and-monitoring/rs/repeated.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/replicaset_rsnp.json create mode 100644 testdata/server-discovery-and-monitoring/rs/replicaset_rsnp.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/response_from_removed.json create mode 100644 testdata/server-discovery-and-monitoring/rs/response_from_removed.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/sec_not_auth.json create mode 100644 testdata/server-discovery-and-monitoring/rs/sec_not_auth.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/secondary_ignore_ok_0-pre-6.0.json create mode 100644 testdata/server-discovery-and-monitoring/rs/secondary_ignore_ok_0-pre-6.0.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/secondary_ignore_ok_0.json create mode 100644 testdata/server-discovery-and-monitoring/rs/secondary_ignore_ok_0.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/secondary_mismatched_me.json create mode 100644 testdata/server-discovery-and-monitoring/rs/secondary_mismatched_me.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/secondary_wrong_set_name.json create mode 100644 testdata/server-discovery-and-monitoring/rs/secondary_wrong_set_name.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/secondary_wrong_set_name_with_primary.json create mode 100644 testdata/server-discovery-and-monitoring/rs/secondary_wrong_set_name_with_primary.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/set_version_can_rollback.json create mode 100644 testdata/server-discovery-and-monitoring/rs/set_version_can_rollback.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/setversion_equal_max_without_electionid.json create mode 100644 testdata/server-discovery-and-monitoring/rs/setversion_equal_max_without_electionid.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/setversion_greaterthan_max_without_electionid.json create mode 100644 testdata/server-discovery-and-monitoring/rs/setversion_greaterthan_max_without_electionid.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/setversion_without_electionid-pre-6.0.json create mode 100644 testdata/server-discovery-and-monitoring/rs/setversion_without_electionid-pre-6.0.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/setversion_without_electionid.json create mode 100644 testdata/server-discovery-and-monitoring/rs/setversion_without_electionid.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/stepdown_change_set_name.json create mode 100644 testdata/server-discovery-and-monitoring/rs/stepdown_change_set_name.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/too_new.json create mode 100644 testdata/server-discovery-and-monitoring/rs/too_new.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/too_old.json create mode 100644 testdata/server-discovery-and-monitoring/rs/too_old.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/topology_version_equal.json create mode 100644 testdata/server-discovery-and-monitoring/rs/topology_version_equal.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/topology_version_greater.json create mode 100644 testdata/server-discovery-and-monitoring/rs/topology_version_greater.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/topology_version_less.json create mode 100644 testdata/server-discovery-and-monitoring/rs/topology_version_less.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/unexpected_mongos.json create mode 100644 testdata/server-discovery-and-monitoring/rs/unexpected_mongos.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/use_setversion_without_electionid-pre-6.0.json create mode 100644 testdata/server-discovery-and-monitoring/rs/use_setversion_without_electionid-pre-6.0.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/use_setversion_without_electionid.json create mode 100644 testdata/server-discovery-and-monitoring/rs/use_setversion_without_electionid.yml create mode 100644 testdata/server-discovery-and-monitoring/rs/wrong_set_name.json create mode 100644 testdata/server-discovery-and-monitoring/rs/wrong_set_name.yml create mode 100644 testdata/server-discovery-and-monitoring/sharded/compatible.json create mode 100644 testdata/server-discovery-and-monitoring/sharded/compatible.yml create mode 100644 testdata/server-discovery-and-monitoring/sharded/discover_single_mongos.json create mode 100644 testdata/server-discovery-and-monitoring/sharded/discover_single_mongos.yml create mode 100644 testdata/server-discovery-and-monitoring/sharded/ls_timeout_mongos.json create mode 100644 testdata/server-discovery-and-monitoring/sharded/ls_timeout_mongos.yml create mode 100644 testdata/server-discovery-and-monitoring/sharded/mongos_disconnect.json create mode 100644 testdata/server-discovery-and-monitoring/sharded/mongos_disconnect.yml create mode 100644 testdata/server-discovery-and-monitoring/sharded/multiple_mongoses.json create mode 100644 testdata/server-discovery-and-monitoring/sharded/multiple_mongoses.yml create mode 100644 testdata/server-discovery-and-monitoring/sharded/non_mongos_removed.json create mode 100644 testdata/server-discovery-and-monitoring/sharded/non_mongos_removed.yml create mode 100644 testdata/server-discovery-and-monitoring/sharded/normalize_uri_case.json create mode 100644 testdata/server-discovery-and-monitoring/sharded/normalize_uri_case.yml create mode 100644 testdata/server-discovery-and-monitoring/sharded/too_new.json create mode 100644 testdata/server-discovery-and-monitoring/sharded/too_new.yml create mode 100644 testdata/server-discovery-and-monitoring/sharded/too_old.json create mode 100644 testdata/server-discovery-and-monitoring/sharded/too_old.yml create mode 100644 testdata/server-discovery-and-monitoring/single/compatible.json create mode 100644 testdata/server-discovery-and-monitoring/single/compatible.yml create mode 100644 testdata/server-discovery-and-monitoring/single/direct_connection_external_ip.json create mode 100644 testdata/server-discovery-and-monitoring/single/direct_connection_external_ip.yml create mode 100644 testdata/server-discovery-and-monitoring/single/direct_connection_mongos.json create mode 100644 testdata/server-discovery-and-monitoring/single/direct_connection_mongos.yml create mode 100644 testdata/server-discovery-and-monitoring/single/direct_connection_replicaset.json create mode 100644 testdata/server-discovery-and-monitoring/single/direct_connection_replicaset.yml create mode 100644 testdata/server-discovery-and-monitoring/single/direct_connection_rsarbiter.json create mode 100644 testdata/server-discovery-and-monitoring/single/direct_connection_rsarbiter.yml create mode 100644 testdata/server-discovery-and-monitoring/single/direct_connection_rsprimary.json create mode 100644 testdata/server-discovery-and-monitoring/single/direct_connection_rsprimary.yml create mode 100644 testdata/server-discovery-and-monitoring/single/direct_connection_rssecondary.json create mode 100644 testdata/server-discovery-and-monitoring/single/direct_connection_rssecondary.yml create mode 100644 testdata/server-discovery-and-monitoring/single/direct_connection_standalone.json create mode 100644 testdata/server-discovery-and-monitoring/single/direct_connection_standalone.yml create mode 100644 testdata/server-discovery-and-monitoring/single/direct_connection_unavailable_seed.json create mode 100644 testdata/server-discovery-and-monitoring/single/direct_connection_unavailable_seed.yml create mode 100644 testdata/server-discovery-and-monitoring/single/direct_connection_wrong_set_name.json create mode 100644 testdata/server-discovery-and-monitoring/single/direct_connection_wrong_set_name.yml create mode 100644 testdata/server-discovery-and-monitoring/single/discover_standalone.json create mode 100644 testdata/server-discovery-and-monitoring/single/discover_standalone.yml create mode 100644 testdata/server-discovery-and-monitoring/single/discover_unavailable_seed.json create mode 100644 testdata/server-discovery-and-monitoring/single/discover_unavailable_seed.yml create mode 100644 testdata/server-discovery-and-monitoring/single/ls_timeout_standalone.json create mode 100644 testdata/server-discovery-and-monitoring/single/ls_timeout_standalone.yml create mode 100644 testdata/server-discovery-and-monitoring/single/not_ok_response.json create mode 100644 testdata/server-discovery-and-monitoring/single/not_ok_response.yml create mode 100644 testdata/server-discovery-and-monitoring/single/standalone_removed.json create mode 100644 testdata/server-discovery-and-monitoring/single/standalone_removed.yml create mode 100644 testdata/server-discovery-and-monitoring/single/standalone_using_legacy_hello.json create mode 100644 testdata/server-discovery-and-monitoring/single/standalone_using_legacy_hello.yml create mode 100644 testdata/server-discovery-and-monitoring/single/too_new.json create mode 100644 testdata/server-discovery-and-monitoring/single/too_new.yml create mode 100644 testdata/server-discovery-and-monitoring/single/too_old.json create mode 100644 testdata/server-discovery-and-monitoring/single/too_old.yml create mode 100644 testdata/server-discovery-and-monitoring/single/too_old_then_upgraded.json create mode 100644 testdata/server-discovery-and-monitoring/single/too_old_then_upgraded.yml create mode 100644 testdata/server-discovery-and-monitoring/unified/auth-error.json create mode 100644 testdata/server-discovery-and-monitoring/unified/auth-error.yml create mode 100644 testdata/server-discovery-and-monitoring/unified/auth-misc-command-error.json create mode 100644 testdata/server-discovery-and-monitoring/unified/auth-misc-command-error.yml create mode 100644 testdata/server-discovery-and-monitoring/unified/auth-network-error.json create mode 100644 testdata/server-discovery-and-monitoring/unified/auth-network-error.yml create mode 100644 testdata/server-discovery-and-monitoring/unified/auth-network-timeout-error.json create mode 100644 testdata/server-discovery-and-monitoring/unified/auth-network-timeout-error.yml create mode 100644 testdata/server-discovery-and-monitoring/unified/auth-shutdown-error.json create mode 100644 testdata/server-discovery-and-monitoring/unified/auth-shutdown-error.yml create mode 100644 testdata/server-discovery-and-monitoring/unified/cancel-server-check.json create mode 100644 testdata/server-discovery-and-monitoring/unified/cancel-server-check.yml create mode 100644 testdata/server-discovery-and-monitoring/unified/connectTimeoutMS.json create mode 100644 testdata/server-discovery-and-monitoring/unified/connectTimeoutMS.yml create mode 100644 testdata/server-discovery-and-monitoring/unified/find-network-error.json create mode 100644 testdata/server-discovery-and-monitoring/unified/find-network-error.yml create mode 100644 testdata/server-discovery-and-monitoring/unified/find-network-timeout-error.json create mode 100644 testdata/server-discovery-and-monitoring/unified/find-network-timeout-error.yml create mode 100644 testdata/server-discovery-and-monitoring/unified/find-shutdown-error.json create mode 100644 testdata/server-discovery-and-monitoring/unified/find-shutdown-error.yml create mode 100644 testdata/server-discovery-and-monitoring/unified/hello-command-error.json create mode 100644 testdata/server-discovery-and-monitoring/unified/hello-command-error.yml create mode 100644 testdata/server-discovery-and-monitoring/unified/hello-network-error.json create mode 100644 testdata/server-discovery-and-monitoring/unified/hello-network-error.yml create mode 100644 testdata/server-discovery-and-monitoring/unified/hello-timeout.json create mode 100644 testdata/server-discovery-and-monitoring/unified/insert-network-error.json create mode 100644 testdata/server-discovery-and-monitoring/unified/insert-network-error.yml create mode 100644 testdata/server-discovery-and-monitoring/unified/insert-shutdown-error.json create mode 100644 testdata/server-discovery-and-monitoring/unified/insert-shutdown-error.yml create mode 100644 testdata/server-discovery-and-monitoring/unified/interruptInUse-pool-clear.json create mode 100644 testdata/server-discovery-and-monitoring/unified/interruptInUse-pool-clear.yml create mode 100644 testdata/server-discovery-and-monitoring/unified/logging-loadbalanced.json create mode 100644 testdata/server-discovery-and-monitoring/unified/logging-loadbalanced.yml create mode 100644 testdata/server-discovery-and-monitoring/unified/logging-replicaset.json create mode 100644 testdata/server-discovery-and-monitoring/unified/logging-replicaset.yml create mode 100644 testdata/server-discovery-and-monitoring/unified/logging-sharded.json create mode 100644 testdata/server-discovery-and-monitoring/unified/logging-sharded.yml create mode 100644 testdata/server-discovery-and-monitoring/unified/logging-standalone.json create mode 100644 testdata/server-discovery-and-monitoring/unified/logging-standalone.yml create mode 100644 testdata/server-discovery-and-monitoring/unified/minPoolSize-error.json create mode 100644 testdata/server-discovery-and-monitoring/unified/minPoolSize-error.yml create mode 100644 testdata/server-discovery-and-monitoring/unified/pool-cleared-error.json create mode 100644 testdata/server-discovery-and-monitoring/unified/pool-cleared-error.yml create mode 100644 testdata/server-discovery-and-monitoring/unified/serverMonitoringMode.json create mode 100644 testdata/server-discovery-and-monitoring/unified/serverMonitoringMode.yml create mode 100644 testdata/server-selection/in_window/equilibrium.json create mode 100644 testdata/server-selection/in_window/equilibrium.yml create mode 100644 testdata/server-selection/in_window/many-choices.json create mode 100644 testdata/server-selection/in_window/many-choices.yml create mode 100644 testdata/server-selection/in_window/one-least-two-tied.json create mode 100644 testdata/server-selection/in_window/one-least-two-tied.yml create mode 100644 testdata/server-selection/in_window/rs-equilibrium.json create mode 100644 testdata/server-selection/in_window/rs-equilibrium.yml create mode 100644 testdata/server-selection/in_window/rs-three-choices.json create mode 100644 testdata/server-selection/in_window/rs-three-choices.yml create mode 100644 testdata/server-selection/in_window/three-choices.json create mode 100644 testdata/server-selection/in_window/three-choices.yml create mode 100644 testdata/server-selection/in_window/two-choices.json create mode 100644 testdata/server-selection/in_window/two-choices.yml create mode 100644 testdata/server-selection/in_window/two-least.json create mode 100644 testdata/server-selection/in_window/two-least.yml create mode 100644 testdata/server-selection/logging/load-balanced.json create mode 100644 testdata/server-selection/logging/load-balanced.yml create mode 100644 testdata/server-selection/logging/operation-id.json create mode 100644 testdata/server-selection/logging/operation-id.yml create mode 100644 testdata/server-selection/logging/replica-set.json create mode 100644 testdata/server-selection/logging/replica-set.yml create mode 100644 testdata/server-selection/logging/sharded.json create mode 100644 testdata/server-selection/logging/sharded.yml create mode 100644 testdata/server-selection/logging/standalone.json create mode 100644 testdata/server-selection/logging/standalone.yml create mode 100644 testdata/server-selection/rtt/first_value.json create mode 100644 testdata/server-selection/rtt/first_value.yml create mode 100644 testdata/server-selection/rtt/first_value_zero.json create mode 100644 testdata/server-selection/rtt/first_value_zero.yml create mode 100644 testdata/server-selection/rtt/value_test_1.json create mode 100644 testdata/server-selection/rtt/value_test_1.yml create mode 100644 testdata/server-selection/rtt/value_test_2.json create mode 100644 testdata/server-selection/rtt/value_test_2.yml create mode 100644 testdata/server-selection/rtt/value_test_3.json create mode 100644 testdata/server-selection/rtt/value_test_3.yml create mode 100644 testdata/server-selection/rtt/value_test_4.json create mode 100644 testdata/server-selection/rtt/value_test_4.yml create mode 100644 testdata/server-selection/rtt/value_test_5.json create mode 100644 testdata/server-selection/rtt/value_test_5.yml create mode 100644 testdata/server-selection/server_selection/LoadBalanced/read/Nearest.json create mode 100644 testdata/server-selection/server_selection/LoadBalanced/read/Nearest.yml create mode 100644 testdata/server-selection/server_selection/LoadBalanced/read/Primary.json create mode 100644 testdata/server-selection/server_selection/LoadBalanced/read/Primary.yml create mode 100644 testdata/server-selection/server_selection/LoadBalanced/read/PrimaryPreferred.json create mode 100644 testdata/server-selection/server_selection/LoadBalanced/read/PrimaryPreferred.yml create mode 100644 testdata/server-selection/server_selection/LoadBalanced/read/Secondary.json create mode 100644 testdata/server-selection/server_selection/LoadBalanced/read/Secondary.yml create mode 100644 testdata/server-selection/server_selection/LoadBalanced/read/SecondaryPreferred.json create mode 100644 testdata/server-selection/server_selection/LoadBalanced/read/SecondaryPreferred.yml create mode 100644 testdata/server-selection/server_selection/LoadBalanced/write/Nearest.json create mode 100644 testdata/server-selection/server_selection/LoadBalanced/write/Nearest.yml create mode 100644 testdata/server-selection/server_selection/LoadBalanced/write/Primary.json create mode 100644 testdata/server-selection/server_selection/LoadBalanced/write/Primary.yml create mode 100644 testdata/server-selection/server_selection/LoadBalanced/write/PrimaryPreferred.json create mode 100644 testdata/server-selection/server_selection/LoadBalanced/write/PrimaryPreferred.yml create mode 100644 testdata/server-selection/server_selection/LoadBalanced/write/Secondary.json create mode 100644 testdata/server-selection/server_selection/LoadBalanced/write/Secondary.yml create mode 100644 testdata/server-selection/server_selection/LoadBalanced/write/SecondaryPreferred.json create mode 100644 testdata/server-selection/server_selection/LoadBalanced/write/SecondaryPreferred.yml create mode 100644 testdata/server-selection/server_selection/ReplicaSetNoPrimary/read/Nearest.json create mode 100644 testdata/server-selection/server_selection/ReplicaSetNoPrimary/read/Nearest.yml create mode 100644 testdata/server-selection/server_selection/ReplicaSetNoPrimary/read/Nearest_multiple.json create mode 100644 testdata/server-selection/server_selection/ReplicaSetNoPrimary/read/Nearest_multiple.yml create mode 100644 testdata/server-selection/server_selection/ReplicaSetNoPrimary/read/Nearest_non_matching.json create mode 100644 testdata/server-selection/server_selection/ReplicaSetNoPrimary/read/Nearest_non_matching.yml create mode 100644 testdata/server-selection/server_selection/ReplicaSetNoPrimary/read/PossiblePrimary.json create mode 100644 testdata/server-selection/server_selection/ReplicaSetNoPrimary/read/PossiblePrimary.yml create mode 100644 testdata/server-selection/server_selection/ReplicaSetNoPrimary/read/PossiblePrimaryNearest.json create mode 100644 testdata/server-selection/server_selection/ReplicaSetNoPrimary/read/PossiblePrimaryNearest.yml create mode 100644 testdata/server-selection/server_selection/ReplicaSetNoPrimary/read/Primary.json create mode 100644 testdata/server-selection/server_selection/ReplicaSetNoPrimary/read/Primary.yml create mode 100644 testdata/server-selection/server_selection/ReplicaSetNoPrimary/read/PrimaryPreferred.json create mode 100644 testdata/server-selection/server_selection/ReplicaSetNoPrimary/read/PrimaryPreferred.yml create mode 100644 testdata/server-selection/server_selection/ReplicaSetNoPrimary/read/PrimaryPreferred_non_matching.json create mode 100644 testdata/server-selection/server_selection/ReplicaSetNoPrimary/read/PrimaryPreferred_non_matching.yml create mode 100644 testdata/server-selection/server_selection/ReplicaSetNoPrimary/read/Secondary.json create mode 100644 testdata/server-selection/server_selection/ReplicaSetNoPrimary/read/Secondary.yml create mode 100644 testdata/server-selection/server_selection/ReplicaSetNoPrimary/read/SecondaryPreferred.json create mode 100644 testdata/server-selection/server_selection/ReplicaSetNoPrimary/read/SecondaryPreferred.yml create mode 100644 testdata/server-selection/server_selection/ReplicaSetNoPrimary/read/SecondaryPreferred_non_matching.json create mode 100644 testdata/server-selection/server_selection/ReplicaSetNoPrimary/read/SecondaryPreferred_non_matching.yml create mode 100644 testdata/server-selection/server_selection/ReplicaSetNoPrimary/read/Secondary_multi_tags.json create mode 100644 testdata/server-selection/server_selection/ReplicaSetNoPrimary/read/Secondary_multi_tags.yml create mode 100644 testdata/server-selection/server_selection/ReplicaSetNoPrimary/read/Secondary_multi_tags2.json create mode 100644 testdata/server-selection/server_selection/ReplicaSetNoPrimary/read/Secondary_multi_tags2.yml create mode 100644 testdata/server-selection/server_selection/ReplicaSetNoPrimary/read/Secondary_non_matching.json create mode 100644 testdata/server-selection/server_selection/ReplicaSetNoPrimary/read/Secondary_non_matching.yml create mode 100644 testdata/server-selection/server_selection/ReplicaSetNoPrimary/write/SecondaryPreferred.json create mode 100644 testdata/server-selection/server_selection/ReplicaSetNoPrimary/write/SecondaryPreferred.yml create mode 100644 testdata/server-selection/server_selection/ReplicaSetWithPrimary/read/Nearest.json create mode 100644 testdata/server-selection/server_selection/ReplicaSetWithPrimary/read/Nearest.yml create mode 100644 testdata/server-selection/server_selection/ReplicaSetWithPrimary/read/Nearest_multiple.json create mode 100644 testdata/server-selection/server_selection/ReplicaSetWithPrimary/read/Nearest_multiple.yml create mode 100644 testdata/server-selection/server_selection/ReplicaSetWithPrimary/read/Nearest_non_matching.json create mode 100644 testdata/server-selection/server_selection/ReplicaSetWithPrimary/read/Nearest_non_matching.yml create mode 100644 testdata/server-selection/server_selection/ReplicaSetWithPrimary/read/Primary.json create mode 100644 testdata/server-selection/server_selection/ReplicaSetWithPrimary/read/Primary.yml create mode 100644 testdata/server-selection/server_selection/ReplicaSetWithPrimary/read/PrimaryPreferred.json create mode 100644 testdata/server-selection/server_selection/ReplicaSetWithPrimary/read/PrimaryPreferred.yml create mode 100644 testdata/server-selection/server_selection/ReplicaSetWithPrimary/read/PrimaryPreferred_non_matching.json create mode 100644 testdata/server-selection/server_selection/ReplicaSetWithPrimary/read/PrimaryPreferred_non_matching.yml create mode 100644 testdata/server-selection/server_selection/ReplicaSetWithPrimary/read/Secondary.json create mode 100644 testdata/server-selection/server_selection/ReplicaSetWithPrimary/read/Secondary.yml create mode 100644 testdata/server-selection/server_selection/ReplicaSetWithPrimary/read/SecondaryPreferred.json create mode 100644 testdata/server-selection/server_selection/ReplicaSetWithPrimary/read/SecondaryPreferred.yml create mode 100644 testdata/server-selection/server_selection/ReplicaSetWithPrimary/read/SecondaryPreferred_non_matching.json create mode 100644 testdata/server-selection/server_selection/ReplicaSetWithPrimary/read/SecondaryPreferred_non_matching.yml create mode 100644 testdata/server-selection/server_selection/ReplicaSetWithPrimary/read/SecondaryPreferred_tags.json create mode 100644 testdata/server-selection/server_selection/ReplicaSetWithPrimary/read/SecondaryPreferred_tags.yml create mode 100644 testdata/server-selection/server_selection/ReplicaSetWithPrimary/read/Secondary_non_matching.json create mode 100644 testdata/server-selection/server_selection/ReplicaSetWithPrimary/read/Secondary_non_matching.yml create mode 100644 testdata/server-selection/server_selection/ReplicaSetWithPrimary/write/SecondaryPreferred.json create mode 100644 testdata/server-selection/server_selection/ReplicaSetWithPrimary/write/SecondaryPreferred.yml create mode 100644 testdata/server-selection/server_selection/Sharded/read/Nearest.json create mode 100644 testdata/server-selection/server_selection/Sharded/read/Nearest.yml create mode 100644 testdata/server-selection/server_selection/Sharded/read/Primary.json create mode 100644 testdata/server-selection/server_selection/Sharded/read/Primary.yml create mode 100644 testdata/server-selection/server_selection/Sharded/read/PrimaryPreferred.json create mode 100644 testdata/server-selection/server_selection/Sharded/read/PrimaryPreferred.yml create mode 100644 testdata/server-selection/server_selection/Sharded/read/Secondary.json create mode 100644 testdata/server-selection/server_selection/Sharded/read/Secondary.yml create mode 100644 testdata/server-selection/server_selection/Sharded/read/SecondaryPreferred.json create mode 100644 testdata/server-selection/server_selection/Sharded/read/SecondaryPreferred.yml create mode 100644 testdata/server-selection/server_selection/Sharded/write/Nearest.json create mode 100644 testdata/server-selection/server_selection/Sharded/write/Nearest.yml create mode 100644 testdata/server-selection/server_selection/Sharded/write/Primary.json create mode 100644 testdata/server-selection/server_selection/Sharded/write/Primary.yml create mode 100644 testdata/server-selection/server_selection/Sharded/write/PrimaryPreferred.json create mode 100644 testdata/server-selection/server_selection/Sharded/write/PrimaryPreferred.yml create mode 100644 testdata/server-selection/server_selection/Sharded/write/Secondary.json create mode 100644 testdata/server-selection/server_selection/Sharded/write/Secondary.yml create mode 100644 testdata/server-selection/server_selection/Sharded/write/SecondaryPreferred.json create mode 100644 testdata/server-selection/server_selection/Sharded/write/SecondaryPreferred.yml create mode 100644 testdata/server-selection/server_selection/Single/read/SecondaryPreferred.json create mode 100644 testdata/server-selection/server_selection/Single/read/SecondaryPreferred.yml create mode 100644 testdata/server-selection/server_selection/Single/write/SecondaryPreferred.json create mode 100644 testdata/server-selection/server_selection/Single/write/SecondaryPreferred.yml create mode 100644 testdata/server-selection/server_selection/Unknown/read/SecondaryPreferred.json create mode 100644 testdata/server-selection/server_selection/Unknown/read/SecondaryPreferred.yml create mode 100644 testdata/server-selection/server_selection/Unknown/write/SecondaryPreferred.json create mode 100644 testdata/server-selection/server_selection/Unknown/write/SecondaryPreferred.yml create mode 100644 testdata/sessions/driver-sessions-dirty-session-errors.json create mode 100644 testdata/sessions/driver-sessions-dirty-session-errors.yml create mode 100644 testdata/sessions/driver-sessions-server-support.json create mode 100644 testdata/sessions/driver-sessions-server-support.yml create mode 100644 testdata/sessions/implicit-sessions-default-causal-consistency.json create mode 100644 testdata/sessions/implicit-sessions-default-causal-consistency.yml create mode 100644 testdata/sessions/snapshot-sessions-not-supported-client-error.json create mode 100644 testdata/sessions/snapshot-sessions-not-supported-client-error.yml create mode 100644 testdata/sessions/snapshot-sessions-not-supported-server-error.json create mode 100644 testdata/sessions/snapshot-sessions-not-supported-server-error.yml create mode 100644 testdata/sessions/snapshot-sessions-unsupported-ops.json create mode 100644 testdata/sessions/snapshot-sessions-unsupported-ops.yml create mode 100644 testdata/sessions/snapshot-sessions.json create mode 100644 testdata/sessions/snapshot-sessions.yml create mode 100644 testdata/single_and_multi_document/large_doc.json.gz create mode 100644 testdata/single_and_multi_document/small_doc.json.gz create mode 100644 testdata/single_and_multi_document/tweet.json.gz create mode 100644 testdata/transactions/legacy/abort.json create mode 100644 testdata/transactions/legacy/abort.yml create mode 100644 testdata/transactions/legacy/bulk.json create mode 100644 testdata/transactions/legacy/bulk.yml create mode 100644 testdata/transactions/legacy/causal-consistency.json create mode 100644 testdata/transactions/legacy/causal-consistency.yml create mode 100644 testdata/transactions/legacy/commit.json create mode 100644 testdata/transactions/legacy/commit.yml create mode 100644 testdata/transactions/legacy/count.json create mode 100644 testdata/transactions/legacy/count.yml create mode 100644 testdata/transactions/legacy/create-collection.json create mode 100644 testdata/transactions/legacy/create-collection.yml create mode 100644 testdata/transactions/legacy/create-index.json create mode 100644 testdata/transactions/legacy/create-index.yml create mode 100644 testdata/transactions/legacy/delete.json create mode 100644 testdata/transactions/legacy/delete.yml create mode 100644 testdata/transactions/legacy/error-labels.json create mode 100644 testdata/transactions/legacy/error-labels.yml create mode 100644 testdata/transactions/legacy/errors-client.json create mode 100644 testdata/transactions/legacy/errors-client.yml create mode 100644 testdata/transactions/legacy/errors.json create mode 100644 testdata/transactions/legacy/errors.yml create mode 100644 testdata/transactions/legacy/findOneAndDelete.json create mode 100644 testdata/transactions/legacy/findOneAndDelete.yml create mode 100644 testdata/transactions/legacy/findOneAndReplace.json create mode 100644 testdata/transactions/legacy/findOneAndReplace.yml create mode 100644 testdata/transactions/legacy/findOneAndUpdate.json create mode 100644 testdata/transactions/legacy/findOneAndUpdate.yml create mode 100644 testdata/transactions/legacy/insert.json create mode 100644 testdata/transactions/legacy/insert.yml create mode 100644 testdata/transactions/legacy/isolation.json create mode 100644 testdata/transactions/legacy/isolation.yml create mode 100644 testdata/transactions/legacy/mongos-recovery-token.json create mode 100644 testdata/transactions/legacy/mongos-recovery-token.yml create mode 100644 testdata/transactions/legacy/pin-mongos.json create mode 100644 testdata/transactions/legacy/pin-mongos.yml create mode 100644 testdata/transactions/legacy/read-concern.json create mode 100644 testdata/transactions/legacy/read-concern.yml create mode 100644 testdata/transactions/legacy/read-pref.json create mode 100644 testdata/transactions/legacy/read-pref.yml create mode 100644 testdata/transactions/legacy/reads.json create mode 100644 testdata/transactions/legacy/reads.yml create mode 100644 testdata/transactions/legacy/retryable-abort-errorLabels.json create mode 100644 testdata/transactions/legacy/retryable-abort-errorLabels.yml create mode 100644 testdata/transactions/legacy/retryable-abort.json create mode 100644 testdata/transactions/legacy/retryable-abort.yml create mode 100644 testdata/transactions/legacy/retryable-commit-errorLabels.json create mode 100644 testdata/transactions/legacy/retryable-commit-errorLabels.yml create mode 100644 testdata/transactions/legacy/retryable-commit.json create mode 100644 testdata/transactions/legacy/retryable-commit.yml create mode 100644 testdata/transactions/legacy/retryable-writes.json create mode 100644 testdata/transactions/legacy/retryable-writes.yml create mode 100644 testdata/transactions/legacy/run-command.json create mode 100644 testdata/transactions/legacy/run-command.yml create mode 100644 testdata/transactions/legacy/transaction-options-repl.json create mode 100644 testdata/transactions/legacy/transaction-options-repl.yml create mode 100644 testdata/transactions/legacy/transaction-options.json create mode 100644 testdata/transactions/legacy/transaction-options.yml create mode 100644 testdata/transactions/legacy/update.json create mode 100644 testdata/transactions/legacy/update.yml create mode 100644 testdata/transactions/legacy/write-concern.json create mode 100644 testdata/transactions/legacy/write-concern.yml create mode 100644 testdata/transactions/unified/client-bulkWrite.json create mode 100644 testdata/transactions/unified/client-bulkWrite.yml create mode 100644 testdata/transactions/unified/do-not-retry-read-in-transaction.json create mode 100644 testdata/transactions/unified/do-not-retry-read-in-transaction.yml create mode 100644 testdata/transactions/unified/mongos-pin-auto.json create mode 100644 testdata/transactions/unified/mongos-pin-auto.yml create mode 100644 testdata/transactions/unified/mongos-unpin.json create mode 100644 testdata/transactions/unified/mongos-unpin.yml create mode 100644 testdata/unified-test-format/valid-fail/assertNumberConnectionsCheckedOut.json create mode 100644 testdata/unified-test-format/valid-fail/assertNumberConnectionsCheckedOut.yml create mode 100644 testdata/unified-test-format/valid-fail/clientEncryptionOpts-missing-kms-credentials.json create mode 100644 testdata/unified-test-format/valid-fail/clientEncryptionOpts-missing-kms-credentials.yml create mode 100644 testdata/unified-test-format/valid-fail/clientEncryptionOpts-no-kms.json create mode 100644 testdata/unified-test-format/valid-fail/clientEncryptionOpts-no-kms.yml create mode 100644 testdata/unified-test-format/valid-fail/entity-bucket-database-undefined.json create mode 100644 testdata/unified-test-format/valid-fail/entity-bucket-database-undefined.yml create mode 100644 testdata/unified-test-format/valid-fail/entity-client-apiVersion-unsupported.json create mode 100644 testdata/unified-test-format/valid-fail/entity-client-apiVersion-unsupported.yml create mode 100644 testdata/unified-test-format/valid-fail/entity-client-storeEventsAsEntities-conflict_with_client_id.json create mode 100644 testdata/unified-test-format/valid-fail/entity-client-storeEventsAsEntities-conflict_with_client_id.yml create mode 100644 testdata/unified-test-format/valid-fail/entity-client-storeEventsAsEntities-conflict_within_different_array.json create mode 100644 testdata/unified-test-format/valid-fail/entity-client-storeEventsAsEntities-conflict_within_different_array.yml create mode 100644 testdata/unified-test-format/valid-fail/entity-client-storeEventsAsEntities-conflict_within_same_array.json create mode 100644 testdata/unified-test-format/valid-fail/entity-client-storeEventsAsEntities-conflict_within_same_array.yml create mode 100644 testdata/unified-test-format/valid-fail/entity-collection-database-undefined.json create mode 100644 testdata/unified-test-format/valid-fail/entity-collection-database-undefined.yml create mode 100644 testdata/unified-test-format/valid-fail/entity-database-client-undefined.json create mode 100644 testdata/unified-test-format/valid-fail/entity-database-client-undefined.yml create mode 100644 testdata/unified-test-format/valid-fail/entity-find-cursor.json create mode 100644 testdata/unified-test-format/valid-fail/entity-find-cursor.yml create mode 100644 testdata/unified-test-format/valid-fail/entity-session-client-undefined.json create mode 100644 testdata/unified-test-format/valid-fail/entity-session-client-undefined.yml create mode 100644 testdata/unified-test-format/valid-fail/expectedLogMessagesForClient-ignoreExtraMessages-type.json create mode 100644 testdata/unified-test-format/valid-fail/expectedLogMessagesForClient-ignoreExtraMessages-type.yml create mode 100644 testdata/unified-test-format/valid-fail/expectedLogMessagesForClient-ignoreMessages-items.json create mode 100644 testdata/unified-test-format/valid-fail/expectedLogMessagesForClient-ignoreMessages-items.yml create mode 100644 testdata/unified-test-format/valid-fail/expectedLogMessagesForClient-ignoreMessages-type.json create mode 100644 testdata/unified-test-format/valid-fail/expectedLogMessagesForClient-ignoreMessages-type.yml create mode 100644 testdata/unified-test-format/valid-fail/expectedSdamEvent-topologyDescriptionChangedEvent-additionalProperties.json create mode 100644 testdata/unified-test-format/valid-fail/expectedSdamEvent-topologyDescriptionChangedEvent-additionalProperties.yml create mode 100644 testdata/unified-test-format/valid-fail/ignoreResultAndError.json create mode 100644 testdata/unified-test-format/valid-fail/ignoreResultAndError.yml create mode 100644 testdata/unified-test-format/valid-fail/kmsProviders-missing_aws_kms_credentials.json create mode 100644 testdata/unified-test-format/valid-fail/kmsProviders-missing_aws_kms_credentials.yml create mode 100644 testdata/unified-test-format/valid-fail/kmsProviders-missing_azure_kms_credentials.json create mode 100644 testdata/unified-test-format/valid-fail/kmsProviders-missing_azure_kms_credentials.yml create mode 100644 testdata/unified-test-format/valid-fail/kmsProviders-missing_gcp_kms_credentials.json create mode 100644 testdata/unified-test-format/valid-fail/kmsProviders-missing_gcp_kms_credentials.yml create mode 100644 testdata/unified-test-format/valid-fail/kmsProviders-no_kms.json create mode 100644 testdata/unified-test-format/valid-fail/kmsProviders-no_kms.yml create mode 100644 testdata/unified-test-format/valid-fail/operation-failure.json create mode 100644 testdata/unified-test-format/valid-fail/operation-failure.yml create mode 100644 testdata/unified-test-format/valid-fail/operation-unsupported.json create mode 100644 testdata/unified-test-format/valid-fail/operation-unsupported.yml create mode 100644 testdata/unified-test-format/valid-fail/returnDocument-enum-invalid.json create mode 100644 testdata/unified-test-format/valid-fail/returnDocument-enum-invalid.yml create mode 100644 testdata/unified-test-format/valid-fail/schemaVersion-unsupported.json create mode 100644 testdata/unified-test-format/valid-fail/schemaVersion-unsupported.yml create mode 100644 testdata/unified-test-format/valid-pass/assertNumberConnectionsCheckedOut.json create mode 100644 testdata/unified-test-format/valid-pass/assertNumberConnectionsCheckedOut.yml create mode 100644 testdata/unified-test-format/valid-pass/collectionData-createOptions.json create mode 100644 testdata/unified-test-format/valid-pass/collectionData-createOptions.yml create mode 100644 testdata/unified-test-format/valid-pass/entity-client-cmap-events.json create mode 100644 testdata/unified-test-format/valid-pass/entity-client-cmap-events.yml create mode 100644 testdata/unified-test-format/valid-pass/entity-client-storeEventsAsEntities.json create mode 100644 testdata/unified-test-format/valid-pass/entity-client-storeEventsAsEntities.yml create mode 100644 testdata/unified-test-format/valid-pass/entity-commandCursor.json create mode 100644 testdata/unified-test-format/valid-pass/entity-commandCursor.yml create mode 100644 testdata/unified-test-format/valid-pass/entity-cursor-iterateOnce.json create mode 100644 testdata/unified-test-format/valid-pass/entity-cursor-iterateOnce.yml create mode 100644 testdata/unified-test-format/valid-pass/entity-find-cursor.json create mode 100644 testdata/unified-test-format/valid-pass/entity-find-cursor.yml create mode 100644 testdata/unified-test-format/valid-pass/expectedEventsForClient-eventType.json create mode 100644 testdata/unified-test-format/valid-pass/expectedEventsForClient-eventType.yml create mode 100644 testdata/unified-test-format/valid-pass/expectedEventsForClient-ignoreExtraEvents.json create mode 100644 testdata/unified-test-format/valid-pass/expectedEventsForClient-ignoreExtraEvents.yml create mode 100644 testdata/unified-test-format/valid-pass/ignoreResultAndError.json create mode 100644 testdata/unified-test-format/valid-pass/ignoreResultAndError.yml create mode 100644 testdata/unified-test-format/valid-pass/kmsProviders-explicit_kms_credentials.json create mode 100644 testdata/unified-test-format/valid-pass/kmsProviders-explicit_kms_credentials.yml create mode 100644 testdata/unified-test-format/valid-pass/kmsProviders-mixed_kms_credential_fields.json create mode 100644 testdata/unified-test-format/valid-pass/kmsProviders-mixed_kms_credential_fields.yml create mode 100644 testdata/unified-test-format/valid-pass/kmsProviders-placeholder_kms_credentials.json create mode 100644 testdata/unified-test-format/valid-pass/kmsProviders-placeholder_kms_credentials.yml create mode 100644 testdata/unified-test-format/valid-pass/kmsProviders-unconfigured_kms.json create mode 100644 testdata/unified-test-format/valid-pass/kmsProviders-unconfigured_kms.yml create mode 100644 testdata/unified-test-format/valid-pass/matches-lte-operator.json create mode 100644 testdata/unified-test-format/valid-pass/matches-lte-operator.yml create mode 100644 testdata/unified-test-format/valid-pass/observeSensitiveCommands.json create mode 100644 testdata/unified-test-format/valid-pass/observeSensitiveCommands.yml create mode 100644 testdata/unified-test-format/valid-pass/poc-change-streams.json create mode 100644 testdata/unified-test-format/valid-pass/poc-change-streams.yml create mode 100644 testdata/unified-test-format/valid-pass/poc-command-monitoring.json create mode 100644 testdata/unified-test-format/valid-pass/poc-command-monitoring.yml create mode 100644 testdata/unified-test-format/valid-pass/poc-crud.json create mode 100644 testdata/unified-test-format/valid-pass/poc-crud.yml create mode 100644 testdata/unified-test-format/valid-pass/poc-gridfs.json create mode 100644 testdata/unified-test-format/valid-pass/poc-gridfs.yml create mode 100644 testdata/unified-test-format/valid-pass/poc-retryable-reads.json create mode 100644 testdata/unified-test-format/valid-pass/poc-retryable-reads.yml create mode 100644 testdata/unified-test-format/valid-pass/poc-retryable-writes.json create mode 100644 testdata/unified-test-format/valid-pass/poc-retryable-writes.yml create mode 100644 testdata/unified-test-format/valid-pass/poc-sessions.json create mode 100644 testdata/unified-test-format/valid-pass/poc-sessions.yml create mode 100644 testdata/unified-test-format/valid-pass/poc-transactions-convenient-api.json create mode 100644 testdata/unified-test-format/valid-pass/poc-transactions-convenient-api.yml create mode 100644 testdata/unified-test-format/valid-pass/poc-transactions-mongos-pin-auto.json create mode 100644 testdata/unified-test-format/valid-pass/poc-transactions-mongos-pin-auto.yml create mode 100644 testdata/unified-test-format/valid-pass/poc-transactions.json create mode 100644 testdata/unified-test-format/valid-pass/poc-transactions.yml create mode 100644 testdata/uri-options/auth-options.json create mode 100644 testdata/uri-options/auth-options.yml create mode 100644 testdata/uri-options/ca.pem create mode 100644 testdata/uri-options/cert.pem create mode 100644 testdata/uri-options/client.pem create mode 100644 testdata/uri-options/compression-options.json create mode 100644 testdata/uri-options/compression-options.yml create mode 100644 testdata/uri-options/concern-options.json create mode 100644 testdata/uri-options/concern-options.yml create mode 100644 testdata/uri-options/connection-options.json create mode 100644 testdata/uri-options/connection-options.yml create mode 100644 testdata/uri-options/connection-pool-options.json create mode 100644 testdata/uri-options/connection-pool-options.yml create mode 100644 testdata/uri-options/read-preference-options.json create mode 100644 testdata/uri-options/read-preference-options.yml create mode 100644 testdata/uri-options/sdam-options.json create mode 100644 testdata/uri-options/sdam-options.yml create mode 100644 testdata/uri-options/single-threaded-options.json create mode 100644 testdata/uri-options/single-threaded-options.yml create mode 100644 testdata/uri-options/srv-options.json create mode 100644 testdata/uri-options/srv-options.yml create mode 100644 testdata/uri-options/tls-options.json create mode 100644 testdata/uri-options/tls-options.yml create mode 100644 testdata/versioned-api/crud-api-version-1-strict.json create mode 100644 testdata/versioned-api/crud-api-version-1-strict.yml create mode 100644 testdata/versioned-api/crud-api-version-1.json create mode 100644 testdata/versioned-api/crud-api-version-1.yml create mode 100644 testdata/versioned-api/runcommand-helper-no-api-version-declared.json create mode 100644 testdata/versioned-api/runcommand-helper-no-api-version-declared.yml create mode 100644 testdata/versioned-api/test-commands-deprecation-errors.json create mode 100644 testdata/versioned-api/test-commands-deprecation-errors.yml create mode 100644 testdata/versioned-api/test-commands-strict-mode.json create mode 100644 testdata/versioned-api/test-commands-strict-mode.yml create mode 100644 testdata/versioned-api/transaction-handling.json create mode 100644 testdata/versioned-api/transaction-handling.yml create mode 100644 time_codec.go create mode 100644 time_codec_test.go create mode 100644 truncation_test.go create mode 100644 type_test.go create mode 100644 types.go create mode 100644 uint_codec.go create mode 100644 unmarshal.go create mode 100644 unmarshal_test.go create mode 100644 unmarshal_value_test.go create mode 100644 unmarshaling_cases_test.go create mode 100644 value_reader.go create mode 100644 value_reader_test.go create mode 100644 value_reader_writer_test.go create mode 100644 value_writer.go create mode 100644 value_writer_test.go create mode 100644 vector.go create mode 100644 writer.go create mode 100644 x/README.md create mode 100644 x/bsonx/bsoncore/array.go create mode 100644 x/bsonx/bsoncore/array_test.go create mode 100644 x/bsonx/bsoncore/bson_arraybuilder.go create mode 100644 x/bsonx/bsoncore/bson_arraybuilder_test.go create mode 100644 x/bsonx/bsoncore/bson_documentbuilder.go create mode 100644 x/bsonx/bsoncore/bson_documentbuilder_test.go create mode 100644 x/bsonx/bsoncore/bsoncore.go create mode 100644 x/bsonx/bsoncore/bsoncore_test.go create mode 100644 x/bsonx/bsoncore/doc.go create mode 100644 x/bsonx/bsoncore/document.go create mode 100644 x/bsonx/bsoncore/document_test.go create mode 100644 x/bsonx/bsoncore/element.go create mode 100644 x/bsonx/bsoncore/element_test.go create mode 100644 x/bsonx/bsoncore/iterator.go create mode 100644 x/bsonx/bsoncore/iterator_test.go create mode 100644 x/bsonx/bsoncore/tables.go create mode 100644 x/bsonx/bsoncore/type.go create mode 100644 x/bsonx/bsoncore/value.go create mode 100644 x/bsonx/bsoncore/value_test.go diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..261eeb9 --- /dev/null +++ b/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/THIRD-PARTY-NOTICES b/THIRD-PARTY-NOTICES new file mode 100644 index 0000000..b1b1c2a --- /dev/null +++ b/THIRD-PARTY-NOTICES @@ -0,0 +1,1692 @@ +---------------------------------------------------------------------- + +This repository only clones and reduce the mongo-go-driver +keeping a bson-only library (starting from release/2.1). +This library is provided under the same following original licence. + +License notice for the code from github.com/mongodb/mongo-go-driver + +// Copyright (C) MongoDB, Inc. 2017-present + +---------------------------------------------------------------------- + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +---------------------------------------------------------------------- +License notice for AWS V4 signing code from github.com/aws/aws-sdk-go +AWS SDK for Go +Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. +Copyright 2014-2015 Stripe, Inc. +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +--------------------------------------------------------------------- +License notice for gopkg.in/mgo.v2/bson +--------------------------------------------------------------------- + +BSON library for Go + +Copyright (c) 2010-2013 - Gustavo Niemeyer + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +--------------------------------------------------------------------- +License notice for JSON and CSV code from github.com/golang/go +--------------------------------------------------------------------- + +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +--------------------------------------------------------------------- +License notice for rand code from golang.org/x/exp +--------------------------------------------------------------------- + +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +--------------------------------------------------------------------- +License notice for Add64 and Mul64 code from github.com/golang/go +--------------------------------------------------------------------- + +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/davecgh/go-spew +---------------------------------------------------------------------- + +ISC License + +Copyright (c) 2012-2016 Dave Collins + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +---------------------------------------------------------------------- +License notice for github.com/golang/snappy +---------------------------------------------------------------------- + +Copyright (c) 2011 The Snappy-Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/google/go-cmp +---------------------------------------------------------------------- + +Copyright (c) 2017 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/klauspost/compress +---------------------------------------------------------------------- + +Copyright (c) 2012 The Go Authors. All rights reserved. +Copyright (c) 2019 Klaus Post. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/klauspost/compress/snappy +---------------------------------------------------------------------- + +Copyright (c) 2011 The Snappy-Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/konsorten/go-windows-terminal-sequences +---------------------------------------------------------------------- + +(The MIT License) + +Copyright (c) 2017 marvin + konsorten GmbH (open-source@konsorten.de) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +---------------------------------------------------------------------- +License notice for github.com/markbates/oncer +---------------------------------------------------------------------- + +The MIT License (MIT) + +Copyright (c) 2018 Mark Bates + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +---------------------------------------------------------------------- +License notice for github.com/markbates/safe +---------------------------------------------------------------------- + +The MIT License (MIT) + +Copyright (c) 2018 Mark Bates + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +---------------------------------------------------------------------- +License notice for github.com/montanaflynn/stats +---------------------------------------------------------------------- + +The MIT License (MIT) + +Copyright (c) 2014-2015 Montana Flynn (https://anonfunction.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +---------------------------------------------------------------------- +License notice for github.com/pkg/errors +---------------------------------------------------------------------- + +Copyright (c) 2015, Dave Cheney +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/pmezard/go-difflib +---------------------------------------------------------------------- + +Copyright (c) 2013, Patrick Mezard +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + Redistributions in binary form must reproduce the above copyright +notice, this list of conditions and the following disclaimer in the +documentation and/or other materials provided with the distribution. + The names of its contributors may not be used to endorse or promote +products derived from this software without specific prior written +permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS +IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED +TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/rogpeppe/go-internal +---------------------------------------------------------------------- + +Copyright (c) 2018 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for github.com/stretchr/testify +---------------------------------------------------------------------- + +MIT License + +Copyright (c) 2012-2020 Mat Ryer, Tyler Bunnell and contributors. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +---------------------------------------------------------------------- +License notice for github.com/xdg-go/pbkdf2 +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +---------------------------------------------------------------------- +License notice for github.com/xdg-go/scram +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +---------------------------------------------------------------------- +License notice for github.com/xdg-go/stringprep +---------------------------------------------------------------------- + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +---------------------------------------------------------------------- +License notice for github.com/youmark/pkcs8 +---------------------------------------------------------------------- + +The MIT License (MIT) + +Copyright (c) 2014 youmark + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +---------------------------------------------------------------------- +License notice for golang.org/x/crypto +---------------------------------------------------------------------- + +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for golang.org/x/sync +---------------------------------------------------------------------- + +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for golang.org/x/sys +---------------------------------------------------------------------- + +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for golang.org/x/text +---------------------------------------------------------------------- + +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for golang.org/x/tools +---------------------------------------------------------------------- + +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for golang.org/x/xerrors +---------------------------------------------------------------------- + +Copyright (c) 2019 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License notice for gopkg.in/yaml.v3 +---------------------------------------------------------------------- + + +This project is covered by two different licenses: MIT and Apache. + +#### MIT License #### + +The following files were ported to Go from C files of libyaml, and thus +are still covered by their original MIT license, with the additional +copyright staring in 2011 when the project was ported over: + + apic.go emitterc.go parserc.go readerc.go scannerc.go + writerc.go yamlh.go yamlprivateh.go + +Copyright (c) 2006-2010 Kirill Simonov +Copyright (c) 2006-2011 Kirill Simonov + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +### Apache License ### + +All the remaining project files are covered by the Apache license: + +Copyright (c) 2011-2019 Canonical Ltd + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/array_codec.go b/array_codec.go new file mode 100644 index 0000000..83a9dde --- /dev/null +++ b/array_codec.go @@ -0,0 +1,42 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bson + +import ( + "reflect" + + "gitea.psichedelico.com/go/bson/x/bsonx/bsoncore" +) + +// arrayCodec is the Codec used for bsoncore.Array values. +type arrayCodec struct{} + +// EncodeValue is the ValueEncoder for bsoncore.Array values. +func (ac *arrayCodec) EncodeValue(_ EncodeContext, vw ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tCoreArray { + return ValueEncoderError{Name: "CoreArrayEncodeValue", Types: []reflect.Type{tCoreArray}, Received: val} + } + + arr := val.Interface().(bsoncore.Array) + return copyArrayFromBytes(vw, arr) +} + +// DecodeValue is the ValueDecoder for bsoncore.Array values. +func (ac *arrayCodec) DecodeValue(_ DecodeContext, vr ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tCoreArray { + return ValueDecoderError{Name: "CoreArrayDecodeValue", Types: []reflect.Type{tCoreArray}, Received: val} + } + + if val.IsNil() { + val.Set(reflect.MakeSlice(val.Type(), 0, 0)) + } + + val.SetLen(0) + arr, err := appendArrayBytes(val.Interface().(bsoncore.Array), vr) + val.Set(reflect.ValueOf(arr)) + return err +} diff --git a/benchmark_test.go b/benchmark_test.go new file mode 100644 index 0000000..b84d4e8 --- /dev/null +++ b/benchmark_test.go @@ -0,0 +1,449 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bson + +import ( + "bytes" + "compress/gzip" + "encoding/json" + "fmt" + "io" + "io/ioutil" + "os" + "path" + "sync" + "testing" +) + +type encodetest struct { + Field1String string + Field1Int64 int64 + Field1Float64 float64 + Field2String string + Field2Int64 int64 + Field2Float64 float64 + Field3String string + Field3Int64 int64 + Field3Float64 float64 + Field4String string + Field4Int64 int64 + Field4Float64 float64 +} + +type nestedtest1 struct { + Nested nestedtest2 +} + +type nestedtest2 struct { + Nested nestedtest3 +} + +type nestedtest3 struct { + Nested nestedtest4 +} + +type nestedtest4 struct { + Nested nestedtest5 +} + +type nestedtest5 struct { + Nested nestedtest6 +} + +type nestedtest6 struct { + Nested nestedtest7 +} + +type nestedtest7 struct { + Nested nestedtest8 +} + +type nestedtest8 struct { + Nested nestedtest9 +} + +type nestedtest9 struct { + Nested nestedtest10 +} + +type nestedtest10 struct { + Nested nestedtest11 +} + +type nestedtest11 struct { + Nested encodetest +} + +var encodetestInstance = encodetest{ + Field1String: "foo", + Field1Int64: 1, + Field1Float64: 3.0, + Field2String: "bar", + Field2Int64: 2, + Field2Float64: 3.1, + Field3String: "baz", + Field3Int64: 3, + Field3Float64: 3.14, + Field4String: "qux", + Field4Int64: 4, + Field4Float64: 3.141, +} + +var nestedInstance = nestedtest1{ + nestedtest2{ + nestedtest3{ + nestedtest4{ + nestedtest5{ + nestedtest6{ + nestedtest7{ + nestedtest8{ + nestedtest9{ + nestedtest10{ + nestedtest11{ + encodetest{ + Field1String: "foo", + Field1Int64: 1, + Field1Float64: 3.0, + Field2String: "bar", + Field2Int64: 2, + Field2Float64: 3.1, + Field3String: "baz", + Field3Int64: 3, + Field3Float64: 3.14, + Field4String: "qux", + Field4Int64: 4, + Field4Float64: 3.141, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, +} + +const extendedBSONDir = "./testdata/extended_bson" + +var ( + extJSONFiles map[string]map[string]interface{} + extJSONFilesMu sync.Mutex +) + +// readExtJSONFile reads the GZIP-compressed extended JSON document from the given filename in the +// "extended BSON" test data directory (./testdata/extended_bson) and returns it as a +// map[string]interface{}. It panics on any errors. +func readExtJSONFile(filename string) map[string]interface{} { + extJSONFilesMu.Lock() + defer extJSONFilesMu.Unlock() + if v, ok := extJSONFiles[filename]; ok { + return v + } + filePath := path.Join(extendedBSONDir, filename) + file, err := os.Open(filePath) + if err != nil { + panic(fmt.Sprintf("error opening file %q: %s", filePath, err)) + } + defer func() { + _ = file.Close() + }() + + gz, err := gzip.NewReader(file) + if err != nil { + panic(fmt.Sprintf("error creating GZIP reader: %s", err)) + } + defer func() { + _ = gz.Close() + }() + + data, err := ioutil.ReadAll(gz) + if err != nil { + panic(fmt.Sprintf("error reading GZIP contents of file: %s", err)) + } + + var v map[string]interface{} + err = UnmarshalExtJSON(data, false, &v) + if err != nil { + panic(fmt.Sprintf("error unmarshalling extended JSON: %s", err)) + } + + if extJSONFiles == nil { + extJSONFiles = make(map[string]map[string]interface{}) + } + extJSONFiles[filename] = v + return v +} + +func BenchmarkMarshal(b *testing.B) { + cases := []struct { + desc string + value interface{} + }{ + { + desc: "simple struct", + value: encodetestInstance, + }, + { + desc: "nested struct", + value: nestedInstance, + }, + { + desc: "deep_bson.json.gz", + value: readExtJSONFile("deep_bson.json.gz"), + }, + { + desc: "flat_bson.json.gz", + value: readExtJSONFile("flat_bson.json.gz"), + }, + { + desc: "full_bson.json.gz", + value: readExtJSONFile("full_bson.json.gz"), + }, + } + + for _, tc := range cases { + b.Run(tc.desc, func(b *testing.B) { + b.Run("BSON", func(b *testing.B) { + for i := 0; i < b.N; i++ { + _, err := Marshal(tc.value) + if err != nil { + b.Errorf("error marshalling BSON: %s", err) + } + } + }) + + b.Run("extJSON", func(b *testing.B) { + for i := 0; i < b.N; i++ { + _, err := MarshalExtJSON(tc.value, true, false) + if err != nil { + b.Errorf("error marshalling extended JSON: %s", err) + } + } + }) + + b.Run("JSON", func(b *testing.B) { + for i := 0; i < b.N; i++ { + _, err := json.Marshal(tc.value) + if err != nil { + b.Errorf("error marshalling JSON: %s", err) + } + } + }) + }) + } +} + +func BenchmarkUnmarshal(b *testing.B) { + cases := []struct { + desc string + value interface{} + }{ + { + desc: "simple struct", + value: encodetestInstance, + }, + { + desc: "nested struct", + value: nestedInstance, + }, + { + desc: "deep_bson.json.gz", + value: readExtJSONFile("deep_bson.json.gz"), + }, + { + desc: "flat_bson.json.gz", + value: readExtJSONFile("flat_bson.json.gz"), + }, + { + desc: "full_bson.json.gz", + value: readExtJSONFile("full_bson.json.gz"), + }, + } + + for _, tc := range cases { + b.Run(tc.desc, func(b *testing.B) { + b.Run("BSON", func(b *testing.B) { + data, err := Marshal(tc.value) + if err != nil { + b.Errorf("error marshalling BSON: %s", err) + return + } + + b.ResetTimer() + var v2 map[string]interface{} + for i := 0; i < b.N; i++ { + err := Unmarshal(data, &v2) + if err != nil { + b.Errorf("error unmarshalling BSON: %s", err) + } + } + }) + + b.Run("extJSON", func(b *testing.B) { + data, err := MarshalExtJSON(tc.value, true, false) + if err != nil { + b.Errorf("error marshalling extended JSON: %s", err) + return + } + + b.ResetTimer() + var v2 map[string]interface{} + for i := 0; i < b.N; i++ { + err := UnmarshalExtJSON(data, true, &v2) + if err != nil { + b.Errorf("error unmarshalling extended JSON: %s", err) + } + } + }) + + b.Run("JSON", func(b *testing.B) { + data, err := json.Marshal(tc.value) + if err != nil { + b.Errorf("error marshalling JSON: %s", err) + return + } + + b.ResetTimer() + var v2 map[string]interface{} + for i := 0; i < b.N; i++ { + err := json.Unmarshal(data, &v2) + if err != nil { + b.Errorf("error unmarshalling JSON: %s", err) + } + } + }) + }) + } +} + +// The following benchmarks are copied from the Go standard library's +// encoding/json package. + +type codeResponse struct { + Tree *codeNode `json:"tree"` + Username string `json:"username"` +} + +type codeNode struct { + Name string `json:"name"` + Kids []*codeNode `json:"kids"` + CLWeight float64 `json:"cl_weight"` + Touches int `json:"touches"` + MinT int64 `json:"min_t"` + MaxT int64 `json:"max_t"` + MeanT int64 `json:"mean_t"` +} + +var codeJSON []byte +var codeBSON []byte +var codeStruct codeResponse + +func codeInit() { + f, err := os.Open("testdata/code.json.gz") + if err != nil { + panic(err) + } + defer f.Close() + gz, err := gzip.NewReader(f) + if err != nil { + panic(err) + } + data, err := io.ReadAll(gz) + if err != nil { + panic(err) + } + + codeJSON = data + + if err := json.Unmarshal(codeJSON, &codeStruct); err != nil { + panic("json.Unmarshal code.json: " + err.Error()) + } + + if data, err = json.Marshal(&codeStruct); err != nil { + panic("json.Marshal code.json: " + err.Error()) + } + + if codeBSON, err = Marshal(&codeStruct); err != nil { + panic("Marshal code.json: " + err.Error()) + } + + if !bytes.Equal(data, codeJSON) { + println("different lengths", len(data), len(codeJSON)) + for i := 0; i < len(data) && i < len(codeJSON); i++ { + if data[i] != codeJSON[i] { + println("re-marshal: changed at byte", i) + println("orig: ", string(codeJSON[i-10:i+10])) + println("new: ", string(data[i-10:i+10])) + break + } + } + panic("re-marshal code.json: different result") + } +} + +func BenchmarkCodeUnmarshal(b *testing.B) { + b.ReportAllocs() + if codeJSON == nil { + b.StopTimer() + codeInit() + b.StartTimer() + } + b.Run("BSON", func(b *testing.B) { + b.RunParallel(func(pb *testing.PB) { + for pb.Next() { + var r codeResponse + if err := Unmarshal(codeBSON, &r); err != nil { + b.Fatal("Unmarshal:", err) + } + } + }) + b.SetBytes(int64(len(codeBSON))) + }) + b.Run("JSON", func(b *testing.B) { + b.RunParallel(func(pb *testing.PB) { + for pb.Next() { + var r codeResponse + if err := json.Unmarshal(codeJSON, &r); err != nil { + b.Fatal("json.Unmarshal:", err) + } + } + }) + b.SetBytes(int64(len(codeJSON))) + }) +} + +func BenchmarkCodeMarshal(b *testing.B) { + b.ReportAllocs() + if codeJSON == nil { + b.StopTimer() + codeInit() + b.StartTimer() + } + b.Run("BSON", func(b *testing.B) { + b.RunParallel(func(pb *testing.PB) { + for pb.Next() { + if _, err := Marshal(&codeStruct); err != nil { + b.Fatal("Marshal:", err) + } + } + }) + b.SetBytes(int64(len(codeBSON))) + }) + b.Run("JSON", func(b *testing.B) { + b.RunParallel(func(pb *testing.PB) { + for pb.Next() { + if _, err := json.Marshal(&codeStruct); err != nil { + b.Fatal("json.Marshal:", err) + } + } + }) + b.SetBytes(int64(len(codeJSON))) + }) +} diff --git a/bson_binary_vector_spec_test.go b/bson_binary_vector_spec_test.go new file mode 100644 index 0000000..05ee409 --- /dev/null +++ b/bson_binary_vector_spec_test.go @@ -0,0 +1,191 @@ +// Copyright (C) MongoDB, Inc. 2024-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bson + +import ( + "encoding/hex" + "encoding/json" + "math" + "os" + "path" + "testing" + + "gitea.psichedelico.com/go/bson/internal/require" +) + +const bsonBinaryVectorDir = "./testdata/bson-binary-vector/" + +type bsonBinaryVectorTests struct { + Description string `json:"description"` + TestKey string `json:"test_key"` + Tests []bsonBinaryVectorTestCase `json:"tests"` +} + +type bsonBinaryVectorTestCase struct { + Description string `json:"description"` + Valid bool `json:"valid"` + Vector []interface{} `json:"vector"` + DtypeHex string `json:"dtype_hex"` + DtypeAlias string `json:"dtype_alias"` + Padding int `json:"padding"` + CanonicalBson string `json:"canonical_bson"` +} + +func TestBsonBinaryVectorSpec(t *testing.T) { + t.Parallel() + + jsonFiles, err := findJSONFilesInDir(bsonBinaryVectorDir) + require.NoErrorf(t, err, "error finding JSON files in %s: %v", bsonBinaryVectorDir, err) + + for _, file := range jsonFiles { + filepath := path.Join(bsonBinaryVectorDir, file) + content, err := os.ReadFile(filepath) + require.NoErrorf(t, err, "reading test file %s", filepath) + + var tests bsonBinaryVectorTests + require.NoErrorf(t, json.Unmarshal(content, &tests), "parsing test file %s", filepath) + + t.Run(tests.Description, func(t *testing.T) { + t.Parallel() + + for _, test := range tests.Tests { + test := test + t.Run(test.Description, func(t *testing.T) { + t.Parallel() + + runBsonBinaryVectorTest(t, tests.TestKey, test) + }) + } + }) + } + + t.Run("Padding specified with no vector data PACKED_BIT", func(t *testing.T) { + t.Parallel() + + t.Run("Marshaling", func(t *testing.T) { + _, err := NewPackedBitVector(nil, 1) + require.EqualError(t, err, errNonZeroVectorPadding.Error()) + }) + }) + + t.Run("Exceeding maximum padding PACKED_BIT", func(t *testing.T) { + t.Parallel() + + t.Run("Marshaling", func(t *testing.T) { + _, err := NewPackedBitVector(nil, 8) + require.EqualError(t, err, errVectorPaddingTooLarge.Error()) + }) + }) +} + +func convertSlice[T int8 | float32 | byte](s []interface{}) []T { + v := make([]T, len(s)) + for i, e := range s { + f := math.NaN() + switch val := e.(type) { + case float64: + f = val + case string: + if val == "inf" { + f = math.Inf(0) + } else if val == "-inf" { + f = math.Inf(-1) + } + } + v[i] = T(f) + } + return v +} + +func runBsonBinaryVectorTest(t *testing.T, testKey string, test bsonBinaryVectorTestCase) { + testVector := make(map[string]Vector) + switch alias := test.DtypeHex; alias { + case "0x03": + testVector[testKey] = Vector{ + dType: Int8Vector, + int8Data: convertSlice[int8](test.Vector), + } + case "0x27": + testVector[testKey] = Vector{ + dType: Float32Vector, + float32Data: convertSlice[float32](test.Vector), + } + case "0x10": + testVector[testKey] = Vector{ + dType: PackedBitVector, + bitData: convertSlice[byte](test.Vector), + bitPadding: uint8(test.Padding), + } + default: + t.Fatalf("unsupported vector type: %s", alias) + } + + testBSON, err := hex.DecodeString(test.CanonicalBson) + require.NoError(t, err, "decoding canonical BSON") + + t.Run("Unmarshaling", func(t *testing.T) { + skipCases := map[string]string{ + "Overflow Vector INT8": "compile-time restriction", + "Underflow Vector INT8": "compile-time restriction", + "INT8 with float inputs": "compile-time restriction", + "Overflow Vector PACKED_BIT": "compile-time restriction", + "Underflow Vector PACKED_BIT": "compile-time restriction", + "Vector with float values PACKED_BIT": "compile-time restriction", + "Negative padding PACKED_BIT": "compile-time restriction", + } + if reason, ok := skipCases[test.Description]; ok { + t.Skipf("skip test case %s: %s", test.Description, reason) + } + + errMap := map[string]string{ + "FLOAT32 with padding": "padding must be 0", + "INT8 with padding": "padding must be 0", + "Padding specified with no vector data PACKED_BIT": "padding must be 0", + "Exceeding maximum padding PACKED_BIT": "padding cannot be larger than 7", + } + + t.Parallel() + + var got map[string]Vector + err := Unmarshal(testBSON, &got) + if test.Valid { + require.NoError(t, err) + require.Equal(t, testVector, got) + } else if errMsg, ok := errMap[test.Description]; ok { + require.ErrorContains(t, err, errMsg) + } else { + require.Error(t, err) + } + }) + + t.Run("Marshaling", func(t *testing.T) { + skipCases := map[string]string{ + "FLOAT32 with padding": "private padding field", + "Insufficient vector data with 3 bytes FLOAT32": "invalid case", + "Insufficient vector data with 5 bytes FLOAT32": "invalid case", + "Overflow Vector INT8": "compile-time restriction", + "Underflow Vector INT8": "compile-time restriction", + "INT8 with padding": "private padding field", + "INT8 with float inputs": "compile-time restriction", + "Overflow Vector PACKED_BIT": "compile-time restriction", + "Underflow Vector PACKED_BIT": "compile-time restriction", + "Vector with float values PACKED_BIT": "compile-time restriction", + "Padding specified with no vector data PACKED_BIT": "run in alternative case", + "Exceeding maximum padding PACKED_BIT": "run in alternative case", + "Negative padding PACKED_BIT": "compile-time restriction", + } + if reason, ok := skipCases[test.Description]; ok { + t.Skipf("skip test case %s: %s", test.Description, reason) + } + + t.Parallel() + + got, err := Marshal(testVector) + require.NoError(t, err) + require.Equal(t, testBSON, got) + }) +} diff --git a/bson_corpus_spec_test.go b/bson_corpus_spec_test.go new file mode 100644 index 0000000..d802c92 --- /dev/null +++ b/bson_corpus_spec_test.go @@ -0,0 +1,504 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bson + +import ( + "bytes" + "encoding/hex" + "encoding/json" + "fmt" + "os" + "path" + "reflect" + "strconv" + "strings" + "testing" + "unicode" + "unicode/utf8" + + "gitea.psichedelico.com/go/bson/internal/assert" + "gitea.psichedelico.com/go/bson/internal/require" + "github.com/google/go-cmp/cmp" +) + +type testCase struct { + Description string `json:"description"` + BsonType string `json:"bson_type"` + TestKey *string `json:"test_key"` + Valid []validityTestCase `json:"valid"` + DecodeErrors []decodeErrorTestCase `json:"decodeErrors"` + ParseErrors []parseErrorTestCase `json:"parseErrors"` + Deprecated *bool `json:"deprecated"` +} + +type validityTestCase struct { + Description string `json:"description"` + CanonicalBson string `json:"canonical_bson"` + CanonicalExtJSON string `json:"canonical_extjson"` + RelaxedExtJSON *string `json:"relaxed_extjson"` + DegenerateBSON *string `json:"degenerate_bson"` + DegenerateExtJSON *string `json:"degenerate_extjson"` + ConvertedBSON *string `json:"converted_bson"` + ConvertedExtJSON *string `json:"converted_extjson"` + Lossy *bool `json:"lossy"` +} + +type decodeErrorTestCase struct { + Description string `json:"description"` + Bson string `json:"bson"` +} + +type parseErrorTestCase struct { + Description string `json:"description"` + String string `json:"string"` +} + +const dataDir = "./testdata/bson-corpus/" + +func findJSONFilesInDir(dir string) ([]string, error) { + files := make([]string, 0) + + entries, err := os.ReadDir(dir) + if err != nil { + return nil, err + } + + for _, entry := range entries { + if entry.IsDir() || path.Ext(entry.Name()) != ".json" { + continue + } + + files = append(files, entry.Name()) + } + + return files, nil +} + +// seedExtJSON will add the byte representation of the "extJSON" string to the fuzzer's coprus. +func seedExtJSON(f *testing.F, extJSON string, extJSONType string, desc string) { + jbytes, err := jsonToBytes(extJSON, extJSONType, desc) + if err != nil { + f.Fatalf("failed to convert JSON to bytes: %v", err) + } + + f.Add(jbytes) +} + +// seedTestCase will add the byte representation for each "extJSON" string of each valid test case to the fuzzer's +// corpus. +func seedTestCase(f *testing.F, tcase *testCase) { + for _, vtc := range tcase.Valid { + seedExtJSON(f, vtc.CanonicalExtJSON, "canonical", vtc.Description) + + // Seed the relaxed extended JSON. + if vtc.RelaxedExtJSON != nil { + seedExtJSON(f, *vtc.RelaxedExtJSON, "relaxed", vtc.Description) + } + + // Seed the degenerate extended JSON. + if vtc.DegenerateExtJSON != nil { + seedExtJSON(f, *vtc.DegenerateExtJSON, "degenerate", vtc.Description) + } + + // Seed the converted extended JSON. + if vtc.ConvertedExtJSON != nil { + seedExtJSON(f, *vtc.ConvertedExtJSON, "converted", vtc.Description) + } + } +} + +// seedBSONCorpus will unmarshal the data from "testdata/bson-corpus" into a slice of "testCase" structs and then +// marshal the "*_extjson" field of each "validityTestCase" into a slice of bytes to seed the fuzz corpus. +func seedBSONCorpus(f *testing.F) { + fileNames, err := findJSONFilesInDir(dataDir) + if err != nil { + f.Fatalf("failed to find JSON files in directory %q: %v", dataDir, err) + } + + for _, fileName := range fileNames { + filePath := path.Join(dataDir, fileName) + + file, err := os.Open(filePath) + if err != nil { + f.Fatalf("failed to open file %q: %v", filePath, err) + } + + var tcase testCase + if err := json.NewDecoder(file).Decode(&tcase); err != nil { + f.Fatal(err) + } + + seedTestCase(f, &tcase) + } +} + +func needsEscapedUnicode(bsonType string) bool { + return bsonType == "0x02" || bsonType == "0x0D" || bsonType == "0x0E" || bsonType == "0x0F" +} + +func unescapeUnicode(s, bsonType string) string { + if !needsEscapedUnicode(bsonType) { + return s + } + + newS := "" + + for i := 0; i < len(s); i++ { + c := s[i] + switch c { + case '\\': + switch s[i+1] { + case 'u': + us := s[i : i+6] + u, err := strconv.Unquote(strings.Replace(strconv.Quote(us), `\\u`, `\u`, 1)) + if err != nil { + return "" + } + for _, r := range u { + if r < ' ' { + newS += fmt.Sprintf(`\u%04x`, r) + } else { + newS += string(r) + } + } + i += 5 + default: + newS += string(c) + } + default: + if c > unicode.MaxASCII { + r, size := utf8.DecodeRune([]byte(s[i:])) + newS += string(r) + i += size - 1 + } else { + newS += string(c) + } + } + } + + return newS +} + +func normalizeCanonicalDouble(t *testing.T, key string, cEJ string) string { + // Unmarshal string into map + cEJMap := make(map[string]map[string]string) + err := json.Unmarshal([]byte(cEJ), &cEJMap) + require.NoError(t, err) + + // Parse the float contained by the map. + expectedString := cEJMap[key]["$numberDouble"] + expectedFloat, err := strconv.ParseFloat(expectedString, 64) + require.NoError(t, err) + + // Normalize the string + return fmt.Sprintf(`{"%s":{"$numberDouble":"%s"}}`, key, formatDouble(expectedFloat)) +} + +func normalizeRelaxedDouble(t *testing.T, key string, rEJ string) string { + // Unmarshal string into map + rEJMap := make(map[string]float64) + err := json.Unmarshal([]byte(rEJ), &rEJMap) + if err != nil { + return normalizeCanonicalDouble(t, key, rEJ) + } + + // Parse the float contained by the map. + expectedFloat := rEJMap[key] + + // Normalize the string + return fmt.Sprintf(`{"%s":%s}`, key, formatDouble(expectedFloat)) +} + +// bsonToNative decodes the BSON bytes (b) into a native Document +func bsonToNative(t *testing.T, b []byte, bType, testDesc string) D { + var doc D + err := Unmarshal(b, &doc) + require.NoErrorf(t, err, "%s: decoding %s BSON", testDesc, bType) + return doc +} + +// nativeToBSON encodes the native Document (doc) into canonical BSON and compares it to the expected +// canonical BSON (cB) +func nativeToBSON(t *testing.T, cB []byte, doc D, testDesc, bType, docSrcDesc string) { + actual, err := Marshal(doc) + require.NoErrorf(t, err, "%s: encoding %s BSON", testDesc, bType) + + if diff := cmp.Diff(cB, actual); diff != "" { + t.Errorf("%s: 'native_to_bson(%s) = cB' failed (-want, +got):\n-%v\n+%v\n", + testDesc, docSrcDesc, cB, actual) + t.FailNow() + } +} + +// jsonToNative decodes the extended JSON string (ej) into a native Document +func jsonToNative(ej, ejType, testDesc string) (D, error) { + var doc D + if err := UnmarshalExtJSON([]byte(ej), ejType != "relaxed", &doc); err != nil { + return nil, fmt.Errorf("%s: decoding %s extended JSON: %w", testDesc, ejType, err) + } + return doc, nil +} + +// jsonToBytes decodes the extended JSON string (ej) into canonical BSON and then encodes it into a byte slice. +func jsonToBytes(ej, ejType, testDesc string) ([]byte, error) { + native, err := jsonToNative(ej, ejType, testDesc) + if err != nil { + return nil, err + } + + b, err := Marshal(native) + if err != nil { + return nil, fmt.Errorf("%s: encoding %s BSON: %w", testDesc, ejType, err) + } + + return b, nil +} + +// nativeToJSON encodes the native Document (doc) into an extended JSON string +func nativeToJSON(t *testing.T, ej string, doc D, testDesc, ejType, ejShortName, docSrcDesc string) { + actualEJ, err := MarshalExtJSON(doc, ejType != "relaxed", true) + require.NoErrorf(t, err, "%s: encoding %s extended JSON", testDesc, ejType) + + if diff := cmp.Diff(ej, string(actualEJ)); diff != "" { + t.Errorf("%s: 'native_to_%s_extended_json(%s) = %s' failed (-want, +got):\n%s\n", + testDesc, ejType, docSrcDesc, ejShortName, diff) + t.FailNow() + } +} + +func runTest(t *testing.T, file string) { + filepath := path.Join(dataDir, file) + content, err := os.ReadFile(filepath) + require.NoError(t, err) + + // Remove ".json" from filename. + file = file[:len(file)-5] + testName := "bson_corpus--" + file + + t.Run(testName, func(t *testing.T) { + var test testCase + require.NoError(t, json.Unmarshal(content, &test)) + + t.Run("valid", func(t *testing.T) { + for _, v := range test.Valid { + t.Run(v.Description, func(t *testing.T) { + // get canonical BSON + cB, err := hex.DecodeString(v.CanonicalBson) + require.NoErrorf(t, err, "%s: reading canonical BSON", v.Description) + + // get canonical extended JSON + var compactEJ bytes.Buffer + require.NoError(t, json.Compact(&compactEJ, []byte(v.CanonicalExtJSON))) + cEJ := unescapeUnicode(compactEJ.String(), test.BsonType) + if test.BsonType == "0x01" { + cEJ = normalizeCanonicalDouble(t, *test.TestKey, cEJ) + } + + /*** canonical BSON round-trip tests ***/ + doc := bsonToNative(t, cB, "canonical", v.Description) + + // native_to_bson(bson_to_native(cB)) = cB + nativeToBSON(t, cB, doc, v.Description, "canonical", "bson_to_native(cB)") + + // native_to_canonical_extended_json(bson_to_native(cB)) = cEJ + nativeToJSON(t, cEJ, doc, v.Description, "canonical", "cEJ", "bson_to_native(cB)") + + // native_to_relaxed_extended_json(bson_to_native(cB)) = rEJ (if rEJ exists) + if v.RelaxedExtJSON != nil { + var compactEJ bytes.Buffer + require.NoError(t, json.Compact(&compactEJ, []byte(*v.RelaxedExtJSON))) + rEJ := unescapeUnicode(compactEJ.String(), test.BsonType) + if test.BsonType == "0x01" { + rEJ = normalizeRelaxedDouble(t, *test.TestKey, rEJ) + } + + nativeToJSON(t, rEJ, doc, v.Description, "relaxed", "rEJ", "bson_to_native(cB)") + + /*** relaxed extended JSON round-trip tests (if exists) ***/ + doc, err = jsonToNative(rEJ, "relaxed", v.Description) + require.NoError(t, err) + + // native_to_relaxed_extended_json(json_to_native(rEJ)) = rEJ + nativeToJSON(t, rEJ, doc, v.Description, "relaxed", "eJR", "json_to_native(rEJ)") + } + + /*** canonical extended JSON round-trip tests ***/ + doc, err = jsonToNative(cEJ, "canonical", v.Description) + require.NoError(t, err) + + // native_to_canonical_extended_json(json_to_native(cEJ)) = cEJ + nativeToJSON(t, cEJ, doc, v.Description, "canonical", "cEJ", "json_to_native(cEJ)") + + // native_to_bson(json_to_native(cEJ)) = cb (unless lossy) + if v.Lossy == nil || !*v.Lossy { + nativeToBSON(t, cB, doc, v.Description, "canonical", "json_to_native(cEJ)") + } + + /*** degenerate BSON round-trip tests (if exists) ***/ + if v.DegenerateBSON != nil { + dB, err := hex.DecodeString(*v.DegenerateBSON) + require.NoErrorf(t, err, "%s: reading degenerate BSON", v.Description) + + doc = bsonToNative(t, dB, "degenerate", v.Description) + + // native_to_bson(bson_to_native(dB)) = cB + nativeToBSON(t, cB, doc, v.Description, "degenerate", "bson_to_native(dB)") + } + + /*** degenerate JSON round-trip tests (if exists) ***/ + if v.DegenerateExtJSON != nil { + var compactEJ bytes.Buffer + require.NoError(t, json.Compact(&compactEJ, []byte(*v.DegenerateExtJSON))) + dEJ := unescapeUnicode(compactEJ.String(), test.BsonType) + if test.BsonType == "0x01" { + dEJ = normalizeCanonicalDouble(t, *test.TestKey, dEJ) + } + + doc, err = jsonToNative(dEJ, "degenerate canonical", v.Description) + require.NoError(t, err) + + // native_to_canonical_extended_json(json_to_native(dEJ)) = cEJ + nativeToJSON(t, cEJ, doc, v.Description, "degenerate canonical", "cEJ", "json_to_native(dEJ)") + + // native_to_bson(json_to_native(dEJ)) = cB (unless lossy) + if v.Lossy == nil || !*v.Lossy { + nativeToBSON(t, cB, doc, v.Description, "canonical", "json_to_native(dEJ)") + } + } + }) + } + }) + + t.Run("decode error", func(t *testing.T) { + for _, d := range test.DecodeErrors { + t.Run(d.Description, func(t *testing.T) { + b, err := hex.DecodeString(d.Bson) + require.NoError(t, err, d.Description) + + var doc D + err = Unmarshal(b, &doc) + + // The driver unmarshals invalid UTF-8 strings without error. Loop over the unmarshalled elements + // and assert that there was no error if any of the string or DBPointer values contain invalid UTF-8 + // characters. + for _, elem := range doc { + value := reflect.ValueOf(elem.Value) + invalidString := (value.Kind() == reflect.String) && !utf8.ValidString(value.String()) + dbPtr, ok := elem.Value.(DBPointer) + invalidDBPtr := ok && !utf8.ValidString(dbPtr.DB) + + if invalidString || invalidDBPtr { + require.NoError(t, err, d.Description) + return + } + } + + require.Errorf(t, err, "%s: expected decode error", d.Description) + }) + } + }) + + t.Run("parse error", func(t *testing.T) { + for _, p := range test.ParseErrors { + t.Run(p.Description, func(t *testing.T) { + s := unescapeUnicode(p.String, test.BsonType) + if test.BsonType == "0x13" { + s = fmt.Sprintf(`{"decimal128": {"$numberDecimal": "%s"}}`, s) + } + + switch test.BsonType { + case "0x00", "0x05", "0x13": + var doc D + err := UnmarshalExtJSON([]byte(s), true, &doc) + // Null bytes are validated when marshaling to BSON + if strings.Contains(p.Description, "Null") { + _, err = Marshal(doc) + } + require.Errorf(t, err, "%s: expected parse error", p.Description) + default: + t.Errorf("Update test to check for parse errors for type %s", test.BsonType) + t.Fail() + } + }) + } + }) + }) +} + +func Test_BsonCorpus(t *testing.T) { + jsonFiles, err := findJSONFilesInDir(dataDir) + require.NoErrorf(t, err, "error finding JSON files in %s: %v", dataDir, err) + + for _, file := range jsonFiles { + runTest(t, file) + } +} + +func TestRelaxedUUIDValidation(t *testing.T) { + testCases := []struct { + description string + canonicalExtJSON string + degenerateExtJSON string + expectedErr string + }{ + { + "valid uuid", + "{\"x\" : { \"$binary\" : {\"base64\" : \"c//SZESzTGmQ6OfR38A11A==\", \"subType\" : \"04\"}}}", + "{\"x\" : { \"$uuid\" : \"73ffd264-44b3-4c69-90e8-e7d1dfc035d4\"}}", + "", + }, + { + "invalid uuid--no hyphens", + "", + "{\"x\" : { \"$uuid\" : \"73ffd26444b34c6990e8e7d1dfc035d4\"}}", + "$uuid value does not follow RFC 4122 format regarding length and hyphens", + }, + { + "invalid uuid--trailing hyphens", + "", + "{\"x\" : { \"$uuid\" : \"73ffd264-44b3-4c69-90e8-e7d1dfc035--\"}}", + "$uuid value does not follow RFC 4122 format regarding length and hyphens", + }, + { + "invalid uuid--malformed hex", + "", + "{\"x\" : { \"$uuid\" : \"q3@fd26l-44b3-4c69-90e8-e7d1dfc035d4\"}}", + "$uuid value does not follow RFC 4122 format regarding hex bytes: encoding/hex: invalid byte: U+0071 'q'", + }, + } + + for _, tc := range testCases { + t.Run(tc.description, func(t *testing.T) { + // get canonical extended JSON (if provided) + cEJ := "" + if tc.canonicalExtJSON != "" { + var compactCEJ bytes.Buffer + require.NoError(t, json.Compact(&compactCEJ, []byte(tc.canonicalExtJSON))) + cEJ = unescapeUnicode(compactCEJ.String(), "0x05") + } + + // get degenerate extended JSON + var compactDEJ bytes.Buffer + require.NoError(t, json.Compact(&compactDEJ, []byte(tc.degenerateExtJSON))) + dEJ := unescapeUnicode(compactDEJ.String(), "0x05") + + // convert dEJ to native doc + var doc D + err := UnmarshalExtJSON([]byte(dEJ), true, &doc) + + if tc.expectedErr != "" { + assert.Equal(t, tc.expectedErr, err.Error(), "expected error %v, got %v", tc.expectedErr, err) + } else { + assert.Nil(t, err, "expected no error, got error: %v", err) + + // Marshal doc into extended JSON and compare with cEJ + nativeToJSON(t, cEJ, doc, tc.description, "degenerate canonical", "cEJ", "json_to_native(dEJ)") + } + }) + } + +} diff --git a/bson_test.go b/bson_test.go new file mode 100644 index 0000000..b42fb80 --- /dev/null +++ b/bson_test.go @@ -0,0 +1,679 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bson + +import ( + "bytes" + "encoding/json" + "fmt" + "reflect" + "strconv" + "strings" + "testing" + "time" + + "gitea.psichedelico.com/go/bson/internal/assert" + "gitea.psichedelico.com/go/bson/internal/require" + "gitea.psichedelico.com/go/bson/x/bsonx/bsoncore" + "github.com/google/go-cmp/cmp" +) + +func noerr(t *testing.T, err error) { + if err != nil { + t.Helper() + t.Errorf("Unexpected error: (%T)%v", err, err) + t.FailNow() + } +} + +func TestTimestamp(t *testing.T) { + t.Parallel() + + testCases := []struct { + description string + tp Timestamp + tp2 Timestamp + expectedAfter bool + expectedBefore bool + expectedEqual bool + expectedCompare int + }{ + { + description: "equal", + tp: Timestamp{T: 12345, I: 67890}, + tp2: Timestamp{T: 12345, I: 67890}, + expectedBefore: false, + expectedAfter: false, + expectedEqual: true, + expectedCompare: 0, + }, + { + description: "T greater than", + tp: Timestamp{T: 12345, I: 67890}, + tp2: Timestamp{T: 2345, I: 67890}, + expectedBefore: false, + expectedAfter: true, + expectedEqual: false, + expectedCompare: 1, + }, + { + description: "I greater than", + tp: Timestamp{T: 12345, I: 67890}, + tp2: Timestamp{T: 12345, I: 7890}, + expectedBefore: false, + expectedAfter: true, + expectedEqual: false, + expectedCompare: 1, + }, + { + description: "T less than", + tp: Timestamp{T: 12345, I: 67890}, + tp2: Timestamp{T: 112345, I: 67890}, + expectedBefore: true, + expectedAfter: false, + expectedEqual: false, + expectedCompare: -1, + }, + { + description: "I less than", + tp: Timestamp{T: 12345, I: 67890}, + tp2: Timestamp{T: 12345, I: 167890}, + expectedBefore: true, + expectedAfter: false, + expectedEqual: false, + expectedCompare: -1, + }, + } + + for _, tc := range testCases { + tc := tc // Capture range variable. + + t.Run(tc.description, func(t *testing.T) { + t.Parallel() + + assert.Equal(t, tc.expectedAfter, tc.tp.After(tc.tp2), "expected After results to be the same") + assert.Equal(t, tc.expectedBefore, tc.tp.Before(tc.tp2), "expected Before results to be the same") + assert.Equal(t, tc.expectedEqual, tc.tp.Equal(tc.tp2), "expected Equal results to be the same") + assert.Equal(t, tc.expectedCompare, tc.tp.Compare(tc.tp2), "expected Compare result to be the same") + }) + } +} + +func TestPrimitiveIsZero(t *testing.T) { + testcases := []struct { + name string + zero Zeroer + nonzero Zeroer + }{ + {"binary", Binary{}, Binary{Data: []byte{0x01, 0x02, 0x03}, Subtype: 0xFF}}, + {"decimal128", Decimal128{}, NewDecimal128(1, 2)}, + {"objectID", ObjectID{}, NewObjectID()}, + {"regex", Regex{}, Regex{Pattern: "foo", Options: "bar"}}, + {"dbPointer", DBPointer{}, DBPointer{DB: "foobar", Pointer: ObjectID{0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C}}}, + {"timestamp", Timestamp{}, Timestamp{T: 12345, I: 67890}}, + } + + for _, tc := range testcases { + t.Run(tc.name, func(t *testing.T) { + require.True(t, tc.zero.IsZero()) + require.False(t, tc.nonzero.IsZero()) + }) + } +} + +func TestRegexCompare(t *testing.T) { + testcases := []struct { + name string + r1 Regex + r2 Regex + eq bool + }{ + {"equal", Regex{Pattern: "foo1", Options: "bar1"}, Regex{Pattern: "foo1", Options: "bar1"}, true}, + {"not equal", Regex{Pattern: "foo1", Options: "bar1"}, Regex{Pattern: "foo2", Options: "bar2"}, false}, + {"not equal", Regex{Pattern: "foo1", Options: "bar1"}, Regex{Pattern: "foo1", Options: "bar2"}, false}, + {"not equal", Regex{Pattern: "foo1", Options: "bar1"}, Regex{Pattern: "foo2", Options: "bar1"}, false}, + } + + for _, tc := range testcases { + t.Run(tc.name, func(t *testing.T) { + require.True(t, tc.r1.Equal(tc.r2) == tc.eq) + }) + } +} + +func TestDateTime(t *testing.T) { + t.Run("json", func(t *testing.T) { + t.Run("round trip", func(t *testing.T) { + original := DateTime(1000) + jsonBytes, err := json.Marshal(original) + assert.Nil(t, err, "Marshal error: %v", err) + + var unmarshalled DateTime + err = json.Unmarshal(jsonBytes, &unmarshalled) + assert.Nil(t, err, "Unmarshal error: %v", err) + + assert.Equal(t, original, unmarshalled, "expected DateTime %v, got %v", original, unmarshalled) + }) + t.Run("decode null", func(t *testing.T) { + jsonBytes := []byte("null") + var dt DateTime + err := json.Unmarshal(jsonBytes, &dt) + assert.Nil(t, err, "Unmarshal error: %v", err) + assert.Equal(t, DateTime(0), dt, "expected DateTime value to be 0, got %v", dt) + }) + t.Run("UTC", func(t *testing.T) { + dt := DateTime(1681145535123) + jsonBytes, err := json.Marshal(dt) + assert.Nil(t, err, "Marshal error: %v", err) + assert.Equal(t, `"2023-04-10T16:52:15.123Z"`, string(jsonBytes)) + }) + }) + t.Run("NewDateTimeFromTime", func(t *testing.T) { + t.Run("range is not limited", func(t *testing.T) { + // If the implementation internally calls time.Time.UnixNano(), the constructor cannot handle times after + // the year 2262. + + timeFormat := "2006-01-02T15:04:05.999Z07:00" + timeString := "3001-01-01T00:00:00Z" + tt, err := time.Parse(timeFormat, timeString) + assert.Nil(t, err, "Parse error: %v", err) + + dt := NewDateTimeFromTime(tt) + assert.True(t, dt > 0, "expected a valid DateTime greater than 0, got %v", dt) + }) + }) +} + +func TestTimeRoundTrip(t *testing.T) { + val := struct { + Value time.Time + ID string + }{ + ID: "time-rt-test", + } + + if !val.Value.IsZero() { + t.Errorf("Did not get zero time as expected.") + } + + bsonOut, err := Marshal(val) + noerr(t, err) + rtval := struct { + Value time.Time + ID string + }{} + + err = Unmarshal(bsonOut, &rtval) + noerr(t, err) + if !cmp.Equal(val, rtval) { + t.Errorf("Did not round trip properly. got %v; want %v", val, rtval) + } + if !rtval.Value.IsZero() { + t.Errorf("Did not get zero time as expected.") + } +} + +func TestNonNullTimeRoundTrip(t *testing.T) { + now := time.Now() + now = time.Unix(now.Unix(), 0) + val := struct { + Value time.Time + ID string + }{ + ID: "time-rt-test", + Value: now, + } + + bsonOut, err := Marshal(val) + noerr(t, err) + rtval := struct { + Value time.Time + ID string + }{} + + err = Unmarshal(bsonOut, &rtval) + noerr(t, err) + if !cmp.Equal(val, rtval) { + t.Errorf("Did not round trip properly. got %v; want %v", val, rtval) + } +} + +func TestD(t *testing.T) { + t.Run("can marshal", func(t *testing.T) { + d := D{{"foo", "bar"}, {"hello", "world"}, {"pi", 3.14159}} + idx, want := bsoncore.AppendDocumentStart(nil) + want = bsoncore.AppendStringElement(want, "foo", "bar") + want = bsoncore.AppendStringElement(want, "hello", "world") + want = bsoncore.AppendDoubleElement(want, "pi", 3.14159) + want, err := bsoncore.AppendDocumentEnd(want, idx) + noerr(t, err) + got, err := Marshal(d) + noerr(t, err) + if !bytes.Equal(got, want) { + t.Errorf("Marshaled documents do not match. got %v; want %v", Raw(got), Raw(want)) + } + }) + t.Run("can unmarshal", func(t *testing.T) { + want := D{{"foo", "bar"}, {"hello", "world"}, {"pi", 3.14159}} + idx, doc := bsoncore.AppendDocumentStart(nil) + doc = bsoncore.AppendStringElement(doc, "foo", "bar") + doc = bsoncore.AppendStringElement(doc, "hello", "world") + doc = bsoncore.AppendDoubleElement(doc, "pi", 3.14159) + doc, err := bsoncore.AppendDocumentEnd(doc, idx) + noerr(t, err) + var got D + err = Unmarshal(doc, &got) + noerr(t, err) + if !cmp.Equal(got, want) { + t.Errorf("Unmarshaled documents do not match. got %v; want %v", got, want) + } + }) +} + +func TestDStringer(t *testing.T) { + got := D{{"a", 1}, {"b", 2}}.String() + want := `{"a":{"$numberInt":"1"},"b":{"$numberInt":"2"}}` + assert.Equal(t, want, got, "expected: %s, got: %s", want, got) +} + +func TestMStringer(t *testing.T) { + type msg struct { + A json.RawMessage `json:"a"` + B json.RawMessage `json:"b"` + } + + var res msg + got := M{"a": 1, "b": 2}.String() + err := json.Unmarshal([]byte(got), &res) + require.NoError(t, err, "Unmarshal error") + + want := msg{ + A: json.RawMessage(`{"$numberInt":"1"}`), + B: json.RawMessage(`{"$numberInt":"2"}`), + } + + assert.Equal(t, want, res, "returned string did not unmarshal to the expected document, returned string: %s", got) +} +func TestD_MarshalJSON(t *testing.T) { + t.Parallel() + + testcases := []struct { + name string + test D + expected interface{} + }{ + { + "nil", + nil, + nil, + }, + { + "empty", + D{}, + struct{}{}, + }, + { + "non-empty", + D{ + {"a", 42}, + {"b", true}, + {"c", "answer"}, + {"d", nil}, + {"e", 2.71828}, + {"f", A{42, true, "answer", nil, 2.71828}}, + {"g", D{{"foo", "bar"}}}, + }, + struct { + A int `json:"a"` + B bool `json:"b"` + C string `json:"c"` + D interface{} `json:"d"` + E float32 `json:"e"` + F []interface{} `json:"f"` + G map[string]interface{} `json:"g"` + }{ + A: 42, + B: true, + C: "answer", + D: nil, + E: 2.71828, + F: []interface{}{42, true, "answer", nil, 2.71828}, + G: map[string]interface{}{"foo": "bar"}, + }, + }, + } + for _, tc := range testcases { + tc := tc + t.Run("json.Marshal "+tc.name, func(t *testing.T) { + t.Parallel() + + got, err := json.Marshal(tc.test) + assert.NoError(t, err) + want, _ := json.Marshal(tc.expected) + assert.Equal(t, want, got) + }) + } + for _, tc := range testcases { + tc := tc + t.Run("json.MarshalIndent "+tc.name, func(t *testing.T) { + t.Parallel() + + got, err := json.MarshalIndent(tc.test, "", "") + assert.NoError(t, err) + want, _ := json.MarshalIndent(tc.expected, "", "") + assert.Equal(t, want, got) + }) + } +} + +func TestD_UnmarshalJSON(t *testing.T) { + t.Parallel() + + t.Run("success", func(t *testing.T) { + t.Parallel() + + for _, tc := range []struct { + name string + test []byte + expected D + }{ + { + "nil", + []byte(`null`), + nil, + }, + { + "empty", + []byte(`{}`), + D{}, + }, + { + "non-empty", + []byte(`{"hello":"world","pi":3.142,"boolean":true,"nothing":null,"list":["hello world",3.142,false,null,{"Lorem":"ipsum"}],"document":{"foo":"bar"}}`), + D{ + {"hello", "world"}, + {"pi", 3.142}, + {"boolean", true}, + {"nothing", nil}, + {"list", []interface{}{"hello world", 3.142, false, nil, D{{"Lorem", "ipsum"}}}}, + {"document", D{{"foo", "bar"}}}, + }, + }, + } { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + var got D + err := json.Unmarshal(tc.test, &got) + assert.NoError(t, err) + assert.Equal(t, tc.expected, got) + }) + } + }) + + t.Run("failure", func(t *testing.T) { + t.Parallel() + + for _, tc := range []struct { + name string + test string + }{ + { + "illegal", + `nil`, + }, + { + "invalid", + `{"pi": 3.142ipsum}`, + }, + { + "malformatted", + `{"pi", 3.142}`, + }, + { + "truncated", + `{"pi": 3.142`, + }, + { + "array type", + `["pi", 3.142]`, + }, + { + "boolean type", + `true`, + }, + } { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + var a map[string]interface{} + want := json.Unmarshal([]byte(tc.test), &a) + var b D + got := json.Unmarshal([]byte(tc.test), &b) + switch w := want.(type) { + case *json.UnmarshalTypeError: + w.Type = reflect.TypeOf(b) + require.IsType(t, want, got) + g := got.(*json.UnmarshalTypeError) + assert.Equal(t, w, g) + default: + assert.Equal(t, want, got) + } + }) + } + }) +} + +type stringerString string + +func (ss stringerString) String() string { + return "bar" +} + +type keyBool bool + +func (kb keyBool) MarshalKey() (string, error) { + return fmt.Sprintf("%v", kb), nil +} + +func (kb *keyBool) UnmarshalKey(key string) error { + switch key { + case "true": + *kb = true + case "false": + *kb = false + default: + return fmt.Errorf("invalid bool value %v", key) + } + return nil +} + +type keyStruct struct { + val int64 +} + +func (k keyStruct) MarshalText() (text []byte, err error) { + str := strconv.FormatInt(k.val, 10) + + return []byte(str), nil +} + +func (k *keyStruct) UnmarshalText(text []byte) error { + val, err := strconv.ParseInt(string(text), 10, 64) + if err != nil { + return err + } + + *k = keyStruct{ + val: val, + } + + return nil +} + +func TestMapCodec(t *testing.T) { + t.Run("EncodeKeysWithStringer", func(t *testing.T) { + strstr := stringerString("foo") + mapObj := map[stringerString]int{strstr: 1} + testCases := []struct { + name string + mapCodec *mapCodec + key string + }{ + {"default", &mapCodec{}, "foo"}, + {"true", &mapCodec{encodeKeysWithStringer: true}, "bar"}, + {"false", &mapCodec{encodeKeysWithStringer: false}, "foo"}, + } + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + mapRegistry := NewRegistry() + mapRegistry.RegisterKindEncoder(reflect.Map, tc.mapCodec) + buf := new(bytes.Buffer) + vw := NewDocumentWriter(buf) + enc := NewEncoder(vw) + enc.SetRegistry(mapRegistry) + err := enc.Encode(mapObj) + assert.Nil(t, err, "Encode error: %v", err) + str := buf.String() + assert.True(t, strings.Contains(str, tc.key), "expected result to contain %v, got: %v", tc.key, str) + }) + } + }) + + t.Run("keys implements keyMarshaler and keyUnmarshaler", func(t *testing.T) { + mapObj := map[keyBool]int{keyBool(true): 1} + + doc, err := Marshal(mapObj) + assert.Nil(t, err, "Marshal error: %v", err) + idx, want := bsoncore.AppendDocumentStart(nil) + want = bsoncore.AppendInt32Element(want, "true", 1) + want, _ = bsoncore.AppendDocumentEnd(want, idx) + assert.Equal(t, want, doc, "expected result %v, got %v", string(want), string(doc)) + + var got map[keyBool]int + err = Unmarshal(doc, &got) + assert.Nil(t, err, "Unmarshal error: %v", err) + assert.Equal(t, mapObj, got, "expected result %v, got %v", mapObj, got) + + }) + + t.Run("keys implements encoding.TextMarshaler and encoding.TextUnmarshaler", func(t *testing.T) { + mapObj := map[keyStruct]int{ + {val: 10}: 100, + } + + doc, err := Marshal(mapObj) + assert.Nil(t, err, "Marshal error: %v", err) + idx, want := bsoncore.AppendDocumentStart(nil) + want = bsoncore.AppendInt32Element(want, "10", 100) + want, _ = bsoncore.AppendDocumentEnd(want, idx) + assert.Equal(t, want, doc, "expected result %v, got %v", string(want), string(doc)) + + var got map[keyStruct]int + err = Unmarshal(doc, &got) + assert.Nil(t, err, "Unmarshal error: %v", err) + assert.Equal(t, mapObj, got, "expected result %v, got %v", mapObj, got) + + }) +} + +func TestExtJSONEscapeKey(t *testing.T) { + doc := D{ + { + Key: "\\usb#", + Value: int32(1), + }, + { + Key: "regex", + Value: Regex{Pattern: "ab\\\\\\\"ab", Options: "\""}, + }, + } + b, err := MarshalExtJSON(&doc, false, false) + noerr(t, err) + + want := `{"\\usb#":1,"regex":{"$regularExpression":{"pattern":"ab\\\\\\\"ab","options":"\""}}}` + if diff := cmp.Diff(want, string(b)); diff != "" { + t.Errorf("Marshaled documents do not match. got %v, want %v", string(b), want) + } + + var got D + err = UnmarshalExtJSON(b, false, &got) + noerr(t, err) + if !cmp.Equal(got, doc) { + t.Errorf("Unmarshaled documents do not match. got %v; want %v", got, doc) + } +} + +func TestBsoncoreArray(t *testing.T) { + type BSONDocumentArray struct { + Array []D `bson:"array"` + } + + type BSONArray struct { + Array bsoncore.Array `bson:"array"` + } + + bda := BSONDocumentArray{ + Array: []D{ + {{"x", 1}}, + {{"x", 2}}, + {{"x", 3}}, + }, + } + + expectedBSON, err := Marshal(bda) + assert.Nil(t, err, "Marshal bsoncore.Document array error: %v", err) + + var ba BSONArray + err = Unmarshal(expectedBSON, &ba) + assert.Nil(t, err, "Unmarshal error: %v", err) + + actualBSON, err := Marshal(ba) + assert.Nil(t, err, "Marshal bsoncore.Array error: %v", err) + + assert.Equal(t, expectedBSON, actualBSON, + "expected BSON to be %v after Marshalling again; got %v", expectedBSON, actualBSON) + + doc := bsoncore.Document(actualBSON) + v := doc.Lookup("array") + assert.Equal(t, bsoncore.TypeArray, v.Type, "expected type array, got %v", v.Type) +} + +var baseTime = time.Date(2024, 10, 11, 12, 13, 14, 12345678, time.UTC) + +func BenchmarkDateTimeMarshalJSON(b *testing.B) { + t := NewDateTimeFromTime(baseTime) + data, err := t.MarshalJSON() + if err != nil { + b.Fatal(err) + } + b.ReportAllocs() + b.SetBytes(int64(len(data))) + for i := 0; i < b.N; i++ { + if _, err := t.MarshalJSON(); err != nil { + b.Fatal(err) + } + } +} + +func BenchmarkDateTimeUnmarshalJSON(b *testing.B) { + t := NewDateTimeFromTime(baseTime) + data, err := t.MarshalJSON() + if err != nil { + b.Fatal(err) + } + b.ReportAllocs() + b.SetBytes(int64(len(data))) + for i := 0; i < b.N; i++ { + var dt DateTime + if err := dt.UnmarshalJSON(data); err != nil { + b.Fatal(err) + } + } +} diff --git a/bsoncodec.go b/bsoncodec.go new file mode 100644 index 0000000..bacc99f --- /dev/null +++ b/bsoncodec.go @@ -0,0 +1,199 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bson + +import ( + "fmt" + "reflect" + "strings" +) + +var ( + emptyValue = reflect.Value{} +) + +// ValueEncoderError is an error returned from a ValueEncoder when the provided value can't be +// encoded by the ValueEncoder. +type ValueEncoderError struct { + Name string + Types []reflect.Type + Kinds []reflect.Kind + Received reflect.Value +} + +func (vee ValueEncoderError) Error() string { + typeKinds := make([]string, 0, len(vee.Types)+len(vee.Kinds)) + for _, t := range vee.Types { + typeKinds = append(typeKinds, t.String()) + } + for _, k := range vee.Kinds { + if k == reflect.Map { + typeKinds = append(typeKinds, "map[string]*") + continue + } + typeKinds = append(typeKinds, k.String()) + } + received := vee.Received.Kind().String() + if vee.Received.IsValid() { + received = vee.Received.Type().String() + } + return fmt.Sprintf("%s can only encode valid %s, but got %s", vee.Name, strings.Join(typeKinds, ", "), received) +} + +// ValueDecoderError is an error returned from a ValueDecoder when the provided value can't be +// decoded by the ValueDecoder. +type ValueDecoderError struct { + Name string + Types []reflect.Type + Kinds []reflect.Kind + Received reflect.Value +} + +func (vde ValueDecoderError) Error() string { + typeKinds := make([]string, 0, len(vde.Types)+len(vde.Kinds)) + for _, t := range vde.Types { + typeKinds = append(typeKinds, t.String()) + } + for _, k := range vde.Kinds { + if k == reflect.Map { + typeKinds = append(typeKinds, "map[string]*") + continue + } + typeKinds = append(typeKinds, k.String()) + } + received := vde.Received.Kind().String() + if vde.Received.IsValid() { + received = vde.Received.Type().String() + } + return fmt.Sprintf("%s can only decode valid and settable %s, but got %s", vde.Name, strings.Join(typeKinds, ", "), received) +} + +// EncodeContext is the contextual information required for a Codec to encode a +// value. +type EncodeContext struct { + *Registry + + // minSize causes the Encoder to marshal Go integer values (int, int8, int16, int32, int64, + // uint, uint8, uint16, uint32, or uint64) as the minimum BSON int size (either 32 or 64 bits) + // that can represent the integer value. + minSize bool + + errorOnInlineDuplicates bool + stringifyMapKeysWithFmt bool + nilMapAsEmpty bool + nilSliceAsEmpty bool + nilByteSliceAsEmpty bool + omitZeroStruct bool + useJSONStructTags bool +} + +// DecodeContext is the contextual information required for a Codec to decode a +// value. +type DecodeContext struct { + *Registry + + // truncate, if true, instructs decoders to to truncate the fractional part of BSON "double" + // values when attempting to unmarshal them into a Go integer (int, int8, int16, int32, int64, + // uint, uint8, uint16, uint32, or uint64) struct field. The truncation logic does not apply to + // BSON "decimal128" values. + truncate bool + + // defaultDocumentType specifies the Go type to decode top-level and nested BSON documents into. In particular, the + // usage for this field is restricted to data typed as "interface{}" or "map[string]interface{}". If DocumentType is + // set to a type that a BSON document cannot be unmarshaled into (e.g. "string"), unmarshalling will result in an + // error. + defaultDocumentType reflect.Type + + binaryAsSlice bool + + // a false value results in a decoding error. + objectIDAsHexString bool + + useJSONStructTags bool + useLocalTimeZone bool + zeroMaps bool + zeroStructs bool +} + +// ValueEncoder is the interface implemented by types that can encode a provided Go type to BSON. +// The value to encode is provided as a reflect.Value and a bson.ValueWriter is used within the +// EncodeValue method to actually create the BSON representation. For convenience, ValueEncoderFunc +// is provided to allow use of a function with the correct signature as a ValueEncoder. An +// EncodeContext instance is provided to allow implementations to lookup further ValueEncoders and +// to provide configuration information. +type ValueEncoder interface { + EncodeValue(EncodeContext, ValueWriter, reflect.Value) error +} + +// ValueEncoderFunc is an adapter function that allows a function with the correct signature to be +// used as a ValueEncoder. +type ValueEncoderFunc func(EncodeContext, ValueWriter, reflect.Value) error + +// EncodeValue implements the ValueEncoder interface. +func (fn ValueEncoderFunc) EncodeValue(ec EncodeContext, vw ValueWriter, val reflect.Value) error { + return fn(ec, vw, val) +} + +// ValueDecoder is the interface implemented by types that can decode BSON to a provided Go type. +// Implementations should ensure that the value they receive is settable. Similar to ValueEncoderFunc, +// ValueDecoderFunc is provided to allow the use of a function with the correct signature as a +// ValueDecoder. A DecodeContext instance is provided and serves similar functionality to the +// EncodeContext. +type ValueDecoder interface { + DecodeValue(DecodeContext, ValueReader, reflect.Value) error +} + +// ValueDecoderFunc is an adapter function that allows a function with the correct signature to be +// used as a ValueDecoder. +type ValueDecoderFunc func(DecodeContext, ValueReader, reflect.Value) error + +// DecodeValue implements the ValueDecoder interface. +func (fn ValueDecoderFunc) DecodeValue(dc DecodeContext, vr ValueReader, val reflect.Value) error { + return fn(dc, vr, val) +} + +// typeDecoder is the interface implemented by types that can handle the decoding of a value given its type. +type typeDecoder interface { + decodeType(DecodeContext, ValueReader, reflect.Type) (reflect.Value, error) +} + +// typeDecoderFunc is an adapter function that allows a function with the correct signature to be used as a typeDecoder. +type typeDecoderFunc func(DecodeContext, ValueReader, reflect.Type) (reflect.Value, error) + +func (fn typeDecoderFunc) decodeType(dc DecodeContext, vr ValueReader, t reflect.Type) (reflect.Value, error) { + return fn(dc, vr, t) +} + +// decodeAdapter allows two functions with the correct signatures to be used as both a ValueDecoder and typeDecoder. +type decodeAdapter struct { + ValueDecoderFunc + typeDecoderFunc +} + +var _ ValueDecoder = decodeAdapter{} +var _ typeDecoder = decodeAdapter{} + +func decodeTypeOrValueWithInfo(vd ValueDecoder, dc DecodeContext, vr ValueReader, t reflect.Type) (reflect.Value, error) { + if td, _ := vd.(typeDecoder); td != nil { + val, err := td.decodeType(dc, vr, t) + if err == nil && val.Type() != t { + // This conversion step is necessary for slices and maps. If a user declares variables like: + // + // type myBool bool + // var m map[string]myBool + // + // and tries to decode BSON bytes into the map, the decoding will fail if this conversion is not present + // because we'll try to assign a value of type bool to one of type myBool. + val = val.Convert(t) + } + return val, err + } + + val := reflect.New(t).Elem() + err := vd.DecodeValue(dc, vr, val) + return val, err +} diff --git a/bsoncodec_test.go b/bsoncodec_test.go new file mode 100644 index 0000000..57bbec8 --- /dev/null +++ b/bsoncodec_test.go @@ -0,0 +1,72 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bson + +import ( + "fmt" + "reflect" + "testing" +) + +func ExampleValueEncoder() { + var _ ValueEncoderFunc = func(_ EncodeContext, vw ValueWriter, val reflect.Value) error { + if val.Kind() != reflect.String { + return ValueEncoderError{Name: "StringEncodeValue", Kinds: []reflect.Kind{reflect.String}, Received: val} + } + + return vw.WriteString(val.String()) + } +} + +func ExampleValueDecoder() { + var _ ValueDecoderFunc = func(_ DecodeContext, vr ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Kind() != reflect.String { + return ValueDecoderError{Name: "StringDecodeValue", Kinds: []reflect.Kind{reflect.String}, Received: val} + } + + if vr.Type() != TypeString { + return fmt.Errorf("cannot decode %v into a string type", vr.Type()) + } + + str, err := vr.ReadString() + if err != nil { + return err + } + val.SetString(str) + return nil + } +} + +type llCodec struct { + t *testing.T + decodeval interface{} + encodeval interface{} + err error +} + +func (llc *llCodec) EncodeValue(_ EncodeContext, _ ValueWriter, i interface{}) error { + if llc.err != nil { + return llc.err + } + + llc.encodeval = i + return nil +} + +func (llc *llCodec) DecodeValue(_ DecodeContext, _ ValueReader, val reflect.Value) error { + if llc.err != nil { + return llc.err + } + + if !reflect.TypeOf(llc.decodeval).AssignableTo(val.Type()) { + llc.t.Errorf("decodeval must be assignable to val provided to DecodeValue, but is not. decodeval %T; val %T", llc.decodeval, val) + return nil + } + + val.Set(reflect.ValueOf(llc.decodeval)) + return nil +} diff --git a/bsonrw_test.go b/bsonrw_test.go new file mode 100644 index 0000000..a1402ba --- /dev/null +++ b/bsonrw_test.go @@ -0,0 +1,846 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bson + +import ( + "testing" + + "gitea.psichedelico.com/go/bson/x/bsonx/bsoncore" +) + +var ( + _ ValueReader = &valueReaderWriter{} + _ ValueWriter = &valueReaderWriter{} +) + +// invoked is a type used to indicate what method was called last. +type invoked byte + +// These are the different methods that can be invoked. +const ( + nothing invoked = iota + readArray + readBinary + readBoolean + readDocument + readCodeWithScope + readDBPointer + readDateTime + readDecimal128 + readDouble + readInt32 + readInt64 + readJavascript + readMaxKey + readMinKey + readNull + readObjectID + readRegex + readString + readSymbol + readTimestamp + readUndefined + readElement + readValue + writeArray + writeBinary + writeBinaryWithSubtype + writeBoolean + writeCodeWithScope + writeDBPointer + writeDateTime + writeDecimal128 + writeDouble + writeInt32 + writeInt64 + writeJavascript + writeMaxKey + writeMinKey + writeNull + writeObjectID + writeRegex + writeString + writeDocument + writeSymbol + writeTimestamp + writeUndefined + writeDocumentElement + writeDocumentEnd + writeArrayElement + writeArrayEnd + skip +) + +func (i invoked) String() string { + switch i { + case nothing: + return "Nothing" + case readArray: + return "ReadArray" + case readBinary: + return "ReadBinary" + case readBoolean: + return "ReadBoolean" + case readDocument: + return "ReadDocument" + case readCodeWithScope: + return "ReadCodeWithScope" + case readDBPointer: + return "ReadDBPointer" + case readDateTime: + return "ReadDateTime" + case readDecimal128: + return "ReadDecimal128" + case readDouble: + return "ReadDouble" + case readInt32: + return "ReadInt32" + case readInt64: + return "ReadInt64" + case readJavascript: + return "ReadJavascript" + case readMaxKey: + return "ReadMaxKey" + case readMinKey: + return "ReadMinKey" + case readNull: + return "ReadNull" + case readObjectID: + return "ReadObjectID" + case readRegex: + return "ReadRegex" + case readString: + return "ReadString" + case readSymbol: + return "ReadSymbol" + case readTimestamp: + return "ReadTimestamp" + case readUndefined: + return "ReadUndefined" + case readElement: + return "ReadElement" + case readValue: + return "ReadValue" + case writeArray: + return "WriteArray" + case writeBinary: + return "WriteBinary" + case writeBinaryWithSubtype: + return "WriteBinaryWithSubtype" + case writeBoolean: + return "WriteBoolean" + case writeCodeWithScope: + return "WriteCodeWithScope" + case writeDBPointer: + return "WriteDBPointer" + case writeDateTime: + return "WriteDateTime" + case writeDecimal128: + return "WriteDecimal128" + case writeDouble: + return "WriteDouble" + case writeInt32: + return "WriteInt32" + case writeInt64: + return "WriteInt64" + case writeJavascript: + return "WriteJavascript" + case writeMaxKey: + return "WriteMaxKey" + case writeMinKey: + return "WriteMinKey" + case writeNull: + return "WriteNull" + case writeObjectID: + return "WriteObjectID" + case writeRegex: + return "WriteRegex" + case writeString: + return "WriteString" + case writeDocument: + return "WriteDocument" + case writeSymbol: + return "WriteSymbol" + case writeTimestamp: + return "WriteTimestamp" + case writeUndefined: + return "WriteUndefined" + case writeDocumentElement: + return "WriteDocumentElement" + case writeDocumentEnd: + return "WriteDocumentEnd" + case writeArrayElement: + return "WriteArrayElement" + case writeArrayEnd: + return "WriteArrayEnd" + default: + return "" + } +} + +// valueReaderWriter is a test implementation of a bsonrw.ValueReader and bsonrw.ValueWriter +type valueReaderWriter struct { + T *testing.T + invoked invoked + Return interface{} // Can be a primitive or a bsoncore.Value + BSONType Type + Err error + ErrAfter invoked // error after this method is called + depth uint64 +} + +// prevent infinite recursion. +func (llvrw *valueReaderWriter) checkdepth() { + llvrw.depth++ + if llvrw.depth > 1000 { + panic("max depth exceeded") + } +} + +// Type implements the ValueReader interface. +func (llvrw *valueReaderWriter) Type() Type { + llvrw.checkdepth() + return llvrw.BSONType +} + +// Skip implements the ValueReader interface. +func (llvrw *valueReaderWriter) Skip() error { + llvrw.checkdepth() + llvrw.invoked = skip + if llvrw.ErrAfter == llvrw.invoked { + return llvrw.Err + } + return nil +} + +// ReadArray implements the ValueReader interface. +func (llvrw *valueReaderWriter) ReadArray() (ArrayReader, error) { + llvrw.checkdepth() + llvrw.invoked = readArray + if llvrw.ErrAfter == llvrw.invoked { + return nil, llvrw.Err + } + + return llvrw, nil +} + +// ReadBinary implements the ValueReader interface. +func (llvrw *valueReaderWriter) ReadBinary() (b []byte, btype byte, err error) { + llvrw.checkdepth() + llvrw.invoked = readBinary + if llvrw.ErrAfter == llvrw.invoked { + return nil, 0x00, llvrw.Err + } + + switch tt := llvrw.Return.(type) { + case bsoncore.Value: + subtype, data, _, ok := bsoncore.ReadBinary(tt.Data) + if !ok { + llvrw.T.Error("Invalid Value provided for return value of ReadBinary.") + return nil, 0x00, nil + } + return data, subtype, nil + default: + llvrw.T.Errorf("Incorrect type provided for return value of ReadBinary: %T", llvrw.Return) + return nil, 0x00, nil + } +} + +// ReadBoolean implements the ValueReader interface. +func (llvrw *valueReaderWriter) ReadBoolean() (bool, error) { + llvrw.checkdepth() + llvrw.invoked = readBoolean + if llvrw.ErrAfter == llvrw.invoked { + return false, llvrw.Err + } + + switch tt := llvrw.Return.(type) { + case bool: + return tt, nil + case bsoncore.Value: + b, _, ok := bsoncore.ReadBoolean(tt.Data) + if !ok { + llvrw.T.Error("Invalid Value provided for return value of ReadBoolean.") + return false, nil + } + return b, nil + default: + llvrw.T.Errorf("Incorrect type provided for return value of ReadBoolean: %T", llvrw.Return) + return false, nil + } +} + +// ReadDocument implements the ValueReader interface. +func (llvrw *valueReaderWriter) ReadDocument() (DocumentReader, error) { + llvrw.checkdepth() + llvrw.invoked = readDocument + if llvrw.ErrAfter == llvrw.invoked { + return nil, llvrw.Err + } + + return llvrw, nil +} + +// ReadCodeWithScope implements the ValueReader interface. +func (llvrw *valueReaderWriter) ReadCodeWithScope() (code string, dr DocumentReader, err error) { + llvrw.checkdepth() + llvrw.invoked = readCodeWithScope + if llvrw.ErrAfter == llvrw.invoked { + return "", nil, llvrw.Err + } + + return "", llvrw, nil +} + +// ReadDBPointer implements the ValueReader interface. +func (llvrw *valueReaderWriter) ReadDBPointer() (ns string, oid ObjectID, err error) { + llvrw.checkdepth() + llvrw.invoked = readDBPointer + if llvrw.ErrAfter == llvrw.invoked { + return "", ObjectID{}, llvrw.Err + } + + switch tt := llvrw.Return.(type) { + case bsoncore.Value: + ns, oid, _, ok := bsoncore.ReadDBPointer(tt.Data) + if !ok { + llvrw.T.Error("Invalid Value instance provided for return value of ReadDBPointer") + return "", ObjectID{}, nil + } + return ns, oid, nil + default: + llvrw.T.Errorf("Incorrect type provided for return value of ReadDBPointer: %T", llvrw.Return) + return "", ObjectID{}, nil + } +} + +// ReadDateTime implements the ValueReader interface. +func (llvrw *valueReaderWriter) ReadDateTime() (int64, error) { + llvrw.checkdepth() + llvrw.invoked = readDateTime + if llvrw.ErrAfter == llvrw.invoked { + return 0, llvrw.Err + } + + dt, ok := llvrw.Return.(int64) + if !ok { + llvrw.T.Errorf("Incorrect type provided for return value of ReadDateTime: %T", llvrw.Return) + return 0, nil + } + + return dt, nil +} + +// ReadDecimal128 implements the ValueReader interface. +func (llvrw *valueReaderWriter) ReadDecimal128() (Decimal128, error) { + llvrw.checkdepth() + llvrw.invoked = readDecimal128 + if llvrw.ErrAfter == llvrw.invoked { + return Decimal128{}, llvrw.Err + } + + d128, ok := llvrw.Return.(Decimal128) + if !ok { + llvrw.T.Errorf("Incorrect type provided for return value of ReadDecimal128: %T", llvrw.Return) + return Decimal128{}, nil + } + + return d128, nil +} + +// ReadDouble implements the ValueReader interface. +func (llvrw *valueReaderWriter) ReadDouble() (float64, error) { + llvrw.checkdepth() + llvrw.invoked = readDouble + if llvrw.ErrAfter == llvrw.invoked { + return 0, llvrw.Err + } + + f64, ok := llvrw.Return.(float64) + if !ok { + llvrw.T.Errorf("Incorrect type provided for return value of ReadDouble: %T", llvrw.Return) + return 0, nil + } + + return f64, nil +} + +// ReadInt32 implements the ValueReader interface. +func (llvrw *valueReaderWriter) ReadInt32() (int32, error) { + llvrw.checkdepth() + llvrw.invoked = readInt32 + if llvrw.ErrAfter == llvrw.invoked { + return 0, llvrw.Err + } + + i32, ok := llvrw.Return.(int32) + if !ok { + llvrw.T.Errorf("Incorrect type provided for return value of ReadInt32: %T", llvrw.Return) + return 0, nil + } + + return i32, nil +} + +// ReadInt64 implements the ValueReader interface. +func (llvrw *valueReaderWriter) ReadInt64() (int64, error) { + llvrw.checkdepth() + llvrw.invoked = readInt64 + if llvrw.ErrAfter == llvrw.invoked { + return 0, llvrw.Err + } + i64, ok := llvrw.Return.(int64) + if !ok { + llvrw.T.Errorf("Incorrect type provided for return value of ReadInt64: %T", llvrw.Return) + return 0, nil + } + + return i64, nil +} + +// ReadJavascript implements the ValueReader interface. +func (llvrw *valueReaderWriter) ReadJavascript() (code string, err error) { + llvrw.checkdepth() + llvrw.invoked = readJavascript + if llvrw.ErrAfter == llvrw.invoked { + return "", llvrw.Err + } + js, ok := llvrw.Return.(string) + if !ok { + llvrw.T.Errorf("Incorrect type provided for return value of ReadJavascript: %T", llvrw.Return) + return "", nil + } + + return js, nil +} + +// ReadMaxKey implements the ValueReader interface. +func (llvrw *valueReaderWriter) ReadMaxKey() error { + llvrw.checkdepth() + llvrw.invoked = readMaxKey + if llvrw.ErrAfter == llvrw.invoked { + return llvrw.Err + } + + return nil +} + +// ReadMinKey implements the ValueReader interface. +func (llvrw *valueReaderWriter) ReadMinKey() error { + llvrw.checkdepth() + llvrw.invoked = readMinKey + if llvrw.ErrAfter == llvrw.invoked { + return llvrw.Err + } + + return nil +} + +// ReadNull implements the ValueReader interface. +func (llvrw *valueReaderWriter) ReadNull() error { + llvrw.checkdepth() + llvrw.invoked = readNull + if llvrw.ErrAfter == llvrw.invoked { + return llvrw.Err + } + + return nil +} + +// ReadObjectID implements the ValueReader interface. +func (llvrw *valueReaderWriter) ReadObjectID() (ObjectID, error) { + llvrw.checkdepth() + llvrw.invoked = readObjectID + if llvrw.ErrAfter == llvrw.invoked { + return ObjectID{}, llvrw.Err + } + oid, ok := llvrw.Return.(ObjectID) + if !ok { + llvrw.T.Errorf("Incorrect type provided for return value of ReadObjectID: %T", llvrw.Return) + return ObjectID{}, nil + } + + return oid, nil +} + +// ReadRegex implements the ValueReader interface. +func (llvrw *valueReaderWriter) ReadRegex() (pattern string, options string, err error) { + llvrw.checkdepth() + llvrw.invoked = readRegex + if llvrw.ErrAfter == llvrw.invoked { + return "", "", llvrw.Err + } + switch tt := llvrw.Return.(type) { + case bsoncore.Value: + pattern, options, _, ok := bsoncore.ReadRegex(tt.Data) + if !ok { + llvrw.T.Error("Invalid Value instance provided for ReadRegex") + return "", "", nil + } + return pattern, options, nil + default: + llvrw.T.Errorf("Incorrect type provided for return value of ReadRegex: %T", llvrw.Return) + return "", "", nil + } +} + +// ReadString implements the ValueReader interface. +func (llvrw *valueReaderWriter) ReadString() (string, error) { + llvrw.checkdepth() + llvrw.invoked = readString + if llvrw.ErrAfter == llvrw.invoked { + return "", llvrw.Err + } + str, ok := llvrw.Return.(string) + if !ok { + llvrw.T.Errorf("Incorrect type provided for return value of ReadString: %T", llvrw.Return) + return "", nil + } + + return str, nil +} + +// ReadSymbol implements the ValueReader interface. +func (llvrw *valueReaderWriter) ReadSymbol() (symbol string, err error) { + llvrw.checkdepth() + llvrw.invoked = readSymbol + if llvrw.ErrAfter == llvrw.invoked { + return "", llvrw.Err + } + switch tt := llvrw.Return.(type) { + case string: + return tt, nil + case bsoncore.Value: + symbol, _, ok := bsoncore.ReadSymbol(tt.Data) + if !ok { + llvrw.T.Error("Invalid Value instance provided for ReadSymbol") + return "", nil + } + return symbol, nil + default: + llvrw.T.Errorf("Incorrect type provided for return value of ReadSymbol: %T", llvrw.Return) + return "", nil + } +} + +// ReadTimestamp implements the ValueReader interface. +func (llvrw *valueReaderWriter) ReadTimestamp() (t uint32, i uint32, err error) { + llvrw.checkdepth() + llvrw.invoked = readTimestamp + if llvrw.ErrAfter == llvrw.invoked { + return 0, 0, llvrw.Err + } + switch tt := llvrw.Return.(type) { + case bsoncore.Value: + t, i, _, ok := bsoncore.ReadTimestamp(tt.Data) + if !ok { + llvrw.T.Errorf("Invalid Value instance provided for return value of ReadTimestamp") + return 0, 0, nil + } + return t, i, nil + default: + llvrw.T.Errorf("Incorrect type provided for return value of ReadTimestamp: %T", llvrw.Return) + return 0, 0, nil + } +} + +// ReadUndefined implements the ValueReader interface. +func (llvrw *valueReaderWriter) ReadUndefined() error { + llvrw.checkdepth() + llvrw.invoked = readUndefined + if llvrw.ErrAfter == llvrw.invoked { + return llvrw.Err + } + + return nil +} + +// WriteArray implements the ValueWriter interface. +func (llvrw *valueReaderWriter) WriteArray() (ArrayWriter, error) { + llvrw.checkdepth() + llvrw.invoked = writeArray + if llvrw.ErrAfter == llvrw.invoked { + return nil, llvrw.Err + } + return llvrw, nil +} + +// WriteBinary implements the ValueWriter interface. +func (llvrw *valueReaderWriter) WriteBinary([]byte) error { + llvrw.checkdepth() + llvrw.invoked = writeBinary + if llvrw.ErrAfter == llvrw.invoked { + return llvrw.Err + } + return nil +} + +// WriteBinaryWithSubtype implements the ValueWriter interface. +func (llvrw *valueReaderWriter) WriteBinaryWithSubtype([]byte, byte) error { + llvrw.checkdepth() + llvrw.invoked = writeBinaryWithSubtype + if llvrw.ErrAfter == llvrw.invoked { + return llvrw.Err + } + return nil +} + +// WriteBoolean implements the ValueWriter interface. +func (llvrw *valueReaderWriter) WriteBoolean(bool) error { + llvrw.checkdepth() + llvrw.invoked = writeBoolean + if llvrw.ErrAfter == llvrw.invoked { + return llvrw.Err + } + return nil +} + +// WriteCodeWithScope implements the ValueWriter interface. +func (llvrw *valueReaderWriter) WriteCodeWithScope(string) (DocumentWriter, error) { + llvrw.checkdepth() + llvrw.invoked = writeCodeWithScope + if llvrw.ErrAfter == llvrw.invoked { + return nil, llvrw.Err + } + return llvrw, nil +} + +// WriteDBPointer implements the ValueWriter interface. +func (llvrw *valueReaderWriter) WriteDBPointer(string, ObjectID) error { + llvrw.checkdepth() + llvrw.invoked = writeDBPointer + if llvrw.ErrAfter == llvrw.invoked { + return llvrw.Err + } + return nil +} + +// WriteDateTime implements the ValueWriter interface. +func (llvrw *valueReaderWriter) WriteDateTime(int64) error { + llvrw.checkdepth() + llvrw.invoked = writeDateTime + if llvrw.ErrAfter == llvrw.invoked { + return llvrw.Err + } + return nil +} + +// WriteDecimal128 implements the ValueWriter interface. +func (llvrw *valueReaderWriter) WriteDecimal128(Decimal128) error { + llvrw.checkdepth() + llvrw.invoked = writeDecimal128 + if llvrw.ErrAfter == llvrw.invoked { + return llvrw.Err + } + return nil +} + +// WriteDouble implements the ValueWriter interface. +func (llvrw *valueReaderWriter) WriteDouble(float64) error { + llvrw.checkdepth() + llvrw.invoked = writeDouble + if llvrw.ErrAfter == llvrw.invoked { + return llvrw.Err + } + return nil +} + +// WriteInt32 implements the ValueWriter interface. +func (llvrw *valueReaderWriter) WriteInt32(int32) error { + llvrw.checkdepth() + llvrw.invoked = writeInt32 + if llvrw.ErrAfter == llvrw.invoked { + return llvrw.Err + } + return nil +} + +// WriteInt64 implements the ValueWriter interface. +func (llvrw *valueReaderWriter) WriteInt64(int64) error { + llvrw.checkdepth() + llvrw.invoked = writeInt64 + if llvrw.ErrAfter == llvrw.invoked { + return llvrw.Err + } + return nil +} + +// WriteJavascript implements the ValueWriter interface. +func (llvrw *valueReaderWriter) WriteJavascript(string) error { + llvrw.checkdepth() + llvrw.invoked = writeJavascript + if llvrw.ErrAfter == llvrw.invoked { + return llvrw.Err + } + return nil +} + +// WriteMaxKey implements the ValueWriter interface. +func (llvrw *valueReaderWriter) WriteMaxKey() error { + llvrw.checkdepth() + llvrw.invoked = writeMaxKey + if llvrw.ErrAfter == llvrw.invoked { + return llvrw.Err + } + return nil +} + +// WriteMinKey implements the ValueWriter interface. +func (llvrw *valueReaderWriter) WriteMinKey() error { + llvrw.checkdepth() + llvrw.invoked = writeMinKey + if llvrw.ErrAfter == llvrw.invoked { + return llvrw.Err + } + return nil +} + +// WriteNull implements the ValueWriter interface. +func (llvrw *valueReaderWriter) WriteNull() error { + llvrw.checkdepth() + llvrw.invoked = writeNull + if llvrw.ErrAfter == llvrw.invoked { + return llvrw.Err + } + return nil +} + +// WriteObjectID implements the ValueWriter interface. +func (llvrw *valueReaderWriter) WriteObjectID(ObjectID) error { + llvrw.checkdepth() + llvrw.invoked = writeObjectID + if llvrw.ErrAfter == llvrw.invoked { + return llvrw.Err + } + return nil +} + +// WriteRegex implements the ValueWriter interface. +func (llvrw *valueReaderWriter) WriteRegex(string, string) error { + llvrw.checkdepth() + llvrw.invoked = writeRegex + if llvrw.ErrAfter == llvrw.invoked { + return llvrw.Err + } + return nil +} + +// WriteString implements the ValueWriter interface. +func (llvrw *valueReaderWriter) WriteString(string) error { + llvrw.checkdepth() + llvrw.invoked = writeString + if llvrw.ErrAfter == llvrw.invoked { + return llvrw.Err + } + return nil +} + +// WriteDocument implements the ValueWriter interface. +func (llvrw *valueReaderWriter) WriteDocument() (DocumentWriter, error) { + llvrw.checkdepth() + llvrw.invoked = writeDocument + if llvrw.ErrAfter == llvrw.invoked { + return nil, llvrw.Err + } + return llvrw, nil +} + +// WriteSymbol implements the ValueWriter interface. +func (llvrw *valueReaderWriter) WriteSymbol(string) error { + llvrw.checkdepth() + llvrw.invoked = writeSymbol + if llvrw.ErrAfter == llvrw.invoked { + return llvrw.Err + } + return nil +} + +// WriteTimestamp implements the ValueWriter interface. +func (llvrw *valueReaderWriter) WriteTimestamp(uint32, uint32) error { + llvrw.checkdepth() + llvrw.invoked = writeTimestamp + if llvrw.ErrAfter == llvrw.invoked { + return llvrw.Err + } + return nil +} + +// WriteUndefined implements the ValueWriter interface. +func (llvrw *valueReaderWriter) WriteUndefined() error { + llvrw.checkdepth() + llvrw.invoked = writeUndefined + if llvrw.ErrAfter == llvrw.invoked { + return llvrw.Err + } + return nil +} + +// ReadElement implements the DocumentReader interface. +func (llvrw *valueReaderWriter) ReadElement() (string, ValueReader, error) { + llvrw.checkdepth() + llvrw.invoked = readElement + if llvrw.ErrAfter == llvrw.invoked { + return "", nil, llvrw.Err + } + + return "", llvrw, nil +} + +// WriteDocumentElement implements the DocumentWriter interface. +func (llvrw *valueReaderWriter) WriteDocumentElement(string) (ValueWriter, error) { + llvrw.checkdepth() + llvrw.invoked = writeDocumentElement + if llvrw.ErrAfter == llvrw.invoked { + return nil, llvrw.Err + } + + return llvrw, nil +} + +// WriteDocumentEnd implements the DocumentWriter interface. +func (llvrw *valueReaderWriter) WriteDocumentEnd() error { + llvrw.checkdepth() + llvrw.invoked = writeDocumentEnd + if llvrw.ErrAfter == llvrw.invoked { + return llvrw.Err + } + + return nil +} + +// ReadValue implements the ArrayReader interface. +func (llvrw *valueReaderWriter) ReadValue() (ValueReader, error) { + llvrw.checkdepth() + llvrw.invoked = readValue + if llvrw.ErrAfter == llvrw.invoked { + return nil, llvrw.Err + } + + return llvrw, nil +} + +// WriteArrayElement implements the ArrayWriter interface. +func (llvrw *valueReaderWriter) WriteArrayElement() (ValueWriter, error) { + llvrw.checkdepth() + llvrw.invoked = writeArrayElement + if llvrw.ErrAfter == llvrw.invoked { + return nil, llvrw.Err + } + + return llvrw, nil +} + +// WriteArrayEnd implements the ArrayWriter interface. +func (llvrw *valueReaderWriter) WriteArrayEnd() error { + llvrw.checkdepth() + llvrw.invoked = writeArrayEnd + if llvrw.ErrAfter == llvrw.invoked { + return llvrw.Err + } + + return nil +} diff --git a/byte_slice_codec.go b/byte_slice_codec.go new file mode 100644 index 0000000..bd44cf9 --- /dev/null +++ b/byte_slice_codec.go @@ -0,0 +1,97 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bson + +import ( + "fmt" + "reflect" +) + +// byteSliceCodec is the Codec used for []byte values. +type byteSliceCodec struct { + // encodeNilAsEmpty causes EncodeValue to marshal nil Go byte slices as empty BSON binary values + // instead of BSON null. + encodeNilAsEmpty bool +} + +// Assert that byteSliceCodec satisfies the typeDecoder interface, which allows it to be +// used by collection type decoders (e.g. map, slice, etc) to set individual values in a +// collection. +var _ typeDecoder = &byteSliceCodec{} + +// EncodeValue is the ValueEncoder for []byte. +func (bsc *byteSliceCodec) EncodeValue(ec EncodeContext, vw ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tByteSlice { + return ValueEncoderError{Name: "ByteSliceEncodeValue", Types: []reflect.Type{tByteSlice}, Received: val} + } + if val.IsNil() && !bsc.encodeNilAsEmpty && !ec.nilByteSliceAsEmpty { + return vw.WriteNull() + } + return vw.WriteBinary(val.Interface().([]byte)) +} + +func (bsc *byteSliceCodec) decodeType(_ DecodeContext, vr ValueReader, t reflect.Type) (reflect.Value, error) { + if t != tByteSlice { + return emptyValue, ValueDecoderError{ + Name: "ByteSliceDecodeValue", + Types: []reflect.Type{tByteSlice}, + Received: reflect.Zero(t), + } + } + + var data []byte + var err error + switch vrType := vr.Type(); vrType { + case TypeString: + str, err := vr.ReadString() + if err != nil { + return emptyValue, err + } + data = []byte(str) + case TypeSymbol: + sym, err := vr.ReadSymbol() + if err != nil { + return emptyValue, err + } + data = []byte(sym) + case TypeBinary: + var subtype byte + data, subtype, err = vr.ReadBinary() + if err != nil { + return emptyValue, err + } + if subtype != TypeBinaryGeneric && subtype != TypeBinaryBinaryOld { + return emptyValue, decodeBinaryError{subtype: subtype, typeName: "[]byte"} + } + case TypeNull: + err = vr.ReadNull() + case TypeUndefined: + err = vr.ReadUndefined() + default: + return emptyValue, fmt.Errorf("cannot decode %v into a []byte", vrType) + } + if err != nil { + return emptyValue, err + } + + return reflect.ValueOf(data), nil +} + +// DecodeValue is the ValueDecoder for []byte. +func (bsc *byteSliceCodec) DecodeValue(dc DecodeContext, vr ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tByteSlice { + return ValueDecoderError{Name: "ByteSliceDecodeValue", Types: []reflect.Type{tByteSlice}, Received: val} + } + + elem, err := bsc.decodeType(dc, vr, tByteSlice) + if err != nil { + return err + } + + val.Set(elem) + return nil +} diff --git a/codec_cache.go b/codec_cache.go new file mode 100644 index 0000000..b404282 --- /dev/null +++ b/codec_cache.go @@ -0,0 +1,166 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bson + +import ( + "reflect" + "sync" + "sync/atomic" +) + +// Runtime check that the kind encoder and decoder caches can store any valid +// reflect.Kind constant. +func init() { + if s := reflect.Kind(len(kindEncoderCache{}.entries)).String(); s != "kind27" { + panic("The capacity of kindEncoderCache is too small.\n" + + "This is due to a new type being added to reflect.Kind.") + } +} + +// statically assert array size +var _ = (kindEncoderCache{}).entries[reflect.UnsafePointer] +var _ = (kindDecoderCache{}).entries[reflect.UnsafePointer] + +type typeEncoderCache struct { + cache sync.Map // map[reflect.Type]ValueEncoder +} + +func (c *typeEncoderCache) Store(rt reflect.Type, enc ValueEncoder) { + c.cache.Store(rt, enc) +} + +func (c *typeEncoderCache) Load(rt reflect.Type) (ValueEncoder, bool) { + if v, _ := c.cache.Load(rt); v != nil { + return v.(ValueEncoder), true + } + return nil, false +} + +func (c *typeEncoderCache) LoadOrStore(rt reflect.Type, enc ValueEncoder) ValueEncoder { + if v, loaded := c.cache.LoadOrStore(rt, enc); loaded { + enc = v.(ValueEncoder) + } + return enc +} + +func (c *typeEncoderCache) Clone() *typeEncoderCache { + cc := new(typeEncoderCache) + c.cache.Range(func(k, v interface{}) bool { + if k != nil && v != nil { + cc.cache.Store(k, v) + } + return true + }) + return cc +} + +type typeDecoderCache struct { + cache sync.Map // map[reflect.Type]ValueDecoder +} + +func (c *typeDecoderCache) Store(rt reflect.Type, dec ValueDecoder) { + c.cache.Store(rt, dec) +} + +func (c *typeDecoderCache) Load(rt reflect.Type) (ValueDecoder, bool) { + if v, _ := c.cache.Load(rt); v != nil { + return v.(ValueDecoder), true + } + return nil, false +} + +func (c *typeDecoderCache) LoadOrStore(rt reflect.Type, dec ValueDecoder) ValueDecoder { + if v, loaded := c.cache.LoadOrStore(rt, dec); loaded { + dec = v.(ValueDecoder) + } + return dec +} + +func (c *typeDecoderCache) Clone() *typeDecoderCache { + cc := new(typeDecoderCache) + c.cache.Range(func(k, v interface{}) bool { + if k != nil && v != nil { + cc.cache.Store(k, v) + } + return true + }) + return cc +} + +// atomic.Value requires that all calls to Store() have the same concrete type +// so we wrap the ValueEncoder with a kindEncoderCacheEntry to ensure the type +// is always the same (since different concrete types may implement the +// ValueEncoder interface). +type kindEncoderCacheEntry struct { + enc ValueEncoder +} + +type kindEncoderCache struct { + entries [reflect.UnsafePointer + 1]atomic.Value // *kindEncoderCacheEntry +} + +func (c *kindEncoderCache) Store(rt reflect.Kind, enc ValueEncoder) { + if enc != nil && rt < reflect.Kind(len(c.entries)) { + c.entries[rt].Store(&kindEncoderCacheEntry{enc: enc}) + } +} + +func (c *kindEncoderCache) Load(rt reflect.Kind) (ValueEncoder, bool) { + if rt < reflect.Kind(len(c.entries)) { + if ent, ok := c.entries[rt].Load().(*kindEncoderCacheEntry); ok { + return ent.enc, ent.enc != nil + } + } + return nil, false +} + +func (c *kindEncoderCache) Clone() *kindEncoderCache { + cc := new(kindEncoderCache) + for i, v := range c.entries { + if val := v.Load(); val != nil { + cc.entries[i].Store(val) + } + } + return cc +} + +// atomic.Value requires that all calls to Store() have the same concrete type +// so we wrap the ValueDecoder with a kindDecoderCacheEntry to ensure the type +// is always the same (since different concrete types may implement the +// ValueDecoder interface). +type kindDecoderCacheEntry struct { + dec ValueDecoder +} + +type kindDecoderCache struct { + entries [reflect.UnsafePointer + 1]atomic.Value // *kindDecoderCacheEntry +} + +func (c *kindDecoderCache) Store(rt reflect.Kind, dec ValueDecoder) { + if rt < reflect.Kind(len(c.entries)) { + c.entries[rt].Store(&kindDecoderCacheEntry{dec: dec}) + } +} + +func (c *kindDecoderCache) Load(rt reflect.Kind) (ValueDecoder, bool) { + if rt < reflect.Kind(len(c.entries)) { + if ent, ok := c.entries[rt].Load().(*kindDecoderCacheEntry); ok { + return ent.dec, ent.dec != nil + } + } + return nil, false +} + +func (c *kindDecoderCache) Clone() *kindDecoderCache { + cc := new(kindDecoderCache) + for i, v := range c.entries { + if val := v.Load(); val != nil { + cc.entries[i].Store(val) + } + } + return cc +} diff --git a/codec_cache_test.go b/codec_cache_test.go new file mode 100644 index 0000000..d48e05f --- /dev/null +++ b/codec_cache_test.go @@ -0,0 +1,176 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bson + +import ( + "reflect" + "strconv" + "strings" + "testing" +) + +// NB(charlie): the array size is a power of 2 because we use the remainder of +// it (mod) in benchmarks and that is faster when the size is a power of 2. +var codecCacheTestTypes = [16]reflect.Type{ + reflect.TypeOf(uint8(0)), + reflect.TypeOf(uint16(0)), + reflect.TypeOf(uint32(0)), + reflect.TypeOf(uint64(0)), + reflect.TypeOf(uint(0)), + reflect.TypeOf(uintptr(0)), + reflect.TypeOf(int8(0)), + reflect.TypeOf(int16(0)), + reflect.TypeOf(int32(0)), + reflect.TypeOf(int64(0)), + reflect.TypeOf(int(0)), + reflect.TypeOf(float32(0)), + reflect.TypeOf(float64(0)), + reflect.TypeOf(true), + reflect.TypeOf(struct{ A int }{}), + reflect.TypeOf(map[int]int{}), +} + +func TestTypeCache(t *testing.T) { + rt := reflect.TypeOf(int(0)) + ec := new(typeEncoderCache) + dc := new(typeDecoderCache) + + codec := new(fakeCodec) + ec.Store(rt, codec) + dc.Store(rt, codec) + if v, ok := ec.Load(rt); !ok || !reflect.DeepEqual(v, codec) { + t.Errorf("Load(%s) = %v, %t; want: %v, %t", rt, v, ok, codec, true) + } + if v, ok := dc.Load(rt); !ok || !reflect.DeepEqual(v, codec) { + t.Errorf("Load(%s) = %v, %t; want: %v, %t", rt, v, ok, codec, true) + } + + // Make sure we overwrite the stored value with nil + ec.Store(rt, nil) + dc.Store(rt, nil) + if v, ok := ec.Load(rt); ok || v != nil { + t.Errorf("Load(%s) = %v, %t; want: %v, %t", rt, v, ok, nil, false) + } + if v, ok := dc.Load(rt); ok || v != nil { + t.Errorf("Load(%s) = %v, %t; want: %v, %t", rt, v, ok, nil, false) + } +} + +func TestTypeCacheClone(t *testing.T) { + codec := new(fakeCodec) + ec1 := new(typeEncoderCache) + dc1 := new(typeDecoderCache) + for _, rt := range codecCacheTestTypes { + ec1.Store(rt, codec) + dc1.Store(rt, codec) + } + ec2 := ec1.Clone() + dc2 := dc1.Clone() + for _, rt := range codecCacheTestTypes { + if v, _ := ec2.Load(rt); !reflect.DeepEqual(v, codec) { + t.Errorf("Load(%s) = %#v; want: %#v", rt, v, codec) + } + if v, _ := dc2.Load(rt); !reflect.DeepEqual(v, codec) { + t.Errorf("Load(%s) = %#v; want: %#v", rt, v, codec) + } + } +} + +func TestKindCacheArray(t *testing.T) { + // Check array bounds + var c kindEncoderCache + codec := new(fakeCodec) + c.Store(reflect.UnsafePointer, codec) // valid + c.Store(reflect.UnsafePointer+1, codec) // ignored + if v, ok := c.Load(reflect.UnsafePointer); !ok || v != codec { + t.Errorf("Load(reflect.UnsafePointer) = %v, %t; want: %v, %t", v, ok, codec, true) + } + if v, ok := c.Load(reflect.UnsafePointer + 1); ok || v != nil { + t.Errorf("Load(reflect.UnsafePointer + 1) = %v, %t; want: %v, %t", v, ok, nil, false) + } + + // Make sure that reflect.UnsafePointer is the last/largest reflect.Type. + // + // The String() method of invalid reflect.Type types are of the format + // "kind{NUMBER}". + for rt := reflect.UnsafePointer + 1; rt < reflect.UnsafePointer+16; rt++ { + s := rt.String() + if !strings.Contains(s, strconv.Itoa(int(rt))) { + t.Errorf("reflect.Type(%d) appears to be valid: %q", rt, s) + } + } +} + +func TestKindCacheClone(t *testing.T) { + e1 := new(kindEncoderCache) + d1 := new(kindDecoderCache) + codec := new(fakeCodec) + for k := reflect.Invalid; k <= reflect.UnsafePointer; k++ { + e1.Store(k, codec) + d1.Store(k, codec) + } + e2 := e1.Clone() + for k := reflect.Invalid; k <= reflect.UnsafePointer; k++ { + v1, ok1 := e1.Load(k) + v2, ok2 := e2.Load(k) + if ok1 != ok2 || !reflect.DeepEqual(v1, v2) || v1 == nil || v2 == nil { + t.Errorf("Encoder(%s): %#v, %t != %#v, %t", k, v1, ok1, v2, ok2) + } + } + d2 := d1.Clone() + for k := reflect.Invalid; k <= reflect.UnsafePointer; k++ { + v1, ok1 := d1.Load(k) + v2, ok2 := d2.Load(k) + if ok1 != ok2 || !reflect.DeepEqual(v1, v2) || v1 == nil || v2 == nil { + t.Errorf("Decoder(%s): %#v, %t != %#v, %t", k, v1, ok1, v2, ok2) + } + } +} + +func TestKindCacheEncoderNilEncoder(t *testing.T) { + t.Run("Encoder", func(t *testing.T) { + c := new(kindEncoderCache) + c.Store(reflect.Invalid, ValueEncoder(nil)) + v, ok := c.Load(reflect.Invalid) + if v != nil || ok { + t.Errorf("Load of nil ValueEncoder should return: nil, false; got: %v, %t", v, ok) + } + }) + t.Run("Decoder", func(t *testing.T) { + c := new(kindDecoderCache) + c.Store(reflect.Invalid, ValueDecoder(nil)) + v, ok := c.Load(reflect.Invalid) + if v != nil || ok { + t.Errorf("Load of nil ValueDecoder should return: nil, false; got: %v, %t", v, ok) + } + }) +} + +func BenchmarkEncoderCacheLoad(b *testing.B) { + c := new(typeEncoderCache) + codec := new(fakeCodec) + typs := codecCacheTestTypes + for _, t := range typs { + c.Store(t, codec) + } + b.RunParallel(func(pb *testing.PB) { + for i := 0; pb.Next(); i++ { + c.Load(typs[i%len(typs)]) + } + }) +} + +func BenchmarkEncoderCacheStore(b *testing.B) { + c := new(typeEncoderCache) + codec := new(fakeCodec) + b.RunParallel(func(pb *testing.PB) { + typs := codecCacheTestTypes + for i := 0; pb.Next(); i++ { + c.Store(typs[i%len(typs)], codec) + } + }) +} diff --git a/cond_addr_codec.go b/cond_addr_codec.go new file mode 100644 index 0000000..fed4d1f --- /dev/null +++ b/cond_addr_codec.go @@ -0,0 +1,61 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bson + +import ( + "reflect" +) + +// condAddrEncoder is the encoder used when a pointer to the encoding value has an encoder. +type condAddrEncoder struct { + canAddrEnc ValueEncoder + elseEnc ValueEncoder +} + +var _ ValueEncoder = &condAddrEncoder{} + +// newCondAddrEncoder returns an condAddrEncoder. +func newCondAddrEncoder(canAddrEnc, elseEnc ValueEncoder) *condAddrEncoder { + encoder := condAddrEncoder{canAddrEnc: canAddrEnc, elseEnc: elseEnc} + return &encoder +} + +// EncodeValue is the ValueEncoderFunc for a value that may be addressable. +func (cae *condAddrEncoder) EncodeValue(ec EncodeContext, vw ValueWriter, val reflect.Value) error { + if val.CanAddr() { + return cae.canAddrEnc.EncodeValue(ec, vw, val) + } + if cae.elseEnc != nil { + return cae.elseEnc.EncodeValue(ec, vw, val) + } + return errNoEncoder{Type: val.Type()} +} + +// condAddrDecoder is the decoder used when a pointer to the value has a decoder. +type condAddrDecoder struct { + canAddrDec ValueDecoder + elseDec ValueDecoder +} + +var _ ValueDecoder = &condAddrDecoder{} + +// newCondAddrDecoder returns an CondAddrDecoder. +func newCondAddrDecoder(canAddrDec, elseDec ValueDecoder) *condAddrDecoder { + decoder := condAddrDecoder{canAddrDec: canAddrDec, elseDec: elseDec} + return &decoder +} + +// DecodeValue is the ValueDecoderFunc for a value that may be addressable. +func (cad *condAddrDecoder) DecodeValue(dc DecodeContext, vr ValueReader, val reflect.Value) error { + if val.CanAddr() { + return cad.canAddrDec.DecodeValue(dc, vr, val) + } + if cad.elseDec != nil { + return cad.elseDec.DecodeValue(dc, vr, val) + } + return errNoDecoder{Type: val.Type()} +} diff --git a/cond_addr_codec_test.go b/cond_addr_codec_test.go new file mode 100644 index 0000000..fd6d605 --- /dev/null +++ b/cond_addr_codec_test.go @@ -0,0 +1,95 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bson + +import ( + "reflect" + "testing" + + "gitea.psichedelico.com/go/bson/internal/assert" +) + +func TestCondAddrCodec(t *testing.T) { + var inner int + canAddrVal := reflect.ValueOf(&inner) + addressable := canAddrVal.Elem() + unaddressable := reflect.ValueOf(inner) + rw := &valueReaderWriter{} + + t.Run("addressEncode", func(t *testing.T) { + invoked := 0 + encode1 := ValueEncoderFunc(func(EncodeContext, ValueWriter, reflect.Value) error { + invoked = 1 + return nil + }) + encode2 := ValueEncoderFunc(func(EncodeContext, ValueWriter, reflect.Value) error { + invoked = 2 + return nil + }) + condEncoder := newCondAddrEncoder(encode1, encode2) + + testCases := []struct { + name string + val reflect.Value + invoked int + }{ + {"canAddr", addressable, 1}, + {"else", unaddressable, 2}, + } + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + err := condEncoder.EncodeValue(EncodeContext{}, rw, tc.val) + assert.Nil(t, err, "CondAddrEncoder error: %v", err) + + assert.Equal(t, invoked, tc.invoked, "Expected function %v to be called, called %v", tc.invoked, invoked) + }) + } + + t.Run("error", func(t *testing.T) { + errEncoder := newCondAddrEncoder(encode1, nil) + err := errEncoder.EncodeValue(EncodeContext{}, rw, unaddressable) + want := errNoEncoder{Type: unaddressable.Type()} + assert.Equal(t, err, want, "expected error %v, got %v", want, err) + }) + }) + t.Run("addressDecode", func(t *testing.T) { + invoked := 0 + decode1 := ValueDecoderFunc(func(DecodeContext, ValueReader, reflect.Value) error { + invoked = 1 + return nil + }) + decode2 := ValueDecoderFunc(func(DecodeContext, ValueReader, reflect.Value) error { + invoked = 2 + return nil + }) + condDecoder := newCondAddrDecoder(decode1, decode2) + + testCases := []struct { + name string + val reflect.Value + invoked int + }{ + {"canAddr", addressable, 1}, + {"else", unaddressable, 2}, + } + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + err := condDecoder.DecodeValue(DecodeContext{}, rw, tc.val) + assert.Nil(t, err, "CondAddrDecoder error: %v", err) + + assert.Equal(t, invoked, tc.invoked, "Expected function %v to be called, called %v", tc.invoked, invoked) + }) + } + + t.Run("error", func(t *testing.T) { + errDecoder := newCondAddrDecoder(decode1, nil) + err := errDecoder.DecodeValue(DecodeContext{}, rw, unaddressable) + want := errNoDecoder{Type: unaddressable.Type()} + assert.Equal(t, err, want, "expected error %v, got %v", want, err) + }) + }) +} diff --git a/copier.go b/copier.go new file mode 100644 index 0000000..1e59220 --- /dev/null +++ b/copier.go @@ -0,0 +1,431 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bson + +import ( + "bytes" + "errors" + "fmt" + "io" + + "gitea.psichedelico.com/go/bson/x/bsonx/bsoncore" +) + +// copyDocument handles copying one document from the src to the dst. +func copyDocument(dst ValueWriter, src ValueReader) error { + dr, err := src.ReadDocument() + if err != nil { + return err + } + + dw, err := dst.WriteDocument() + if err != nil { + return err + } + + return copyDocumentCore(dw, dr) +} + +// copyArrayFromBytes copies the values from a BSON array represented as a +// []byte to a ValueWriter. +func copyArrayFromBytes(dst ValueWriter, src []byte) error { + aw, err := dst.WriteArray() + if err != nil { + return err + } + + err = copyBytesToArrayWriter(aw, src) + if err != nil { + return err + } + + return aw.WriteArrayEnd() +} + +// copyDocumentFromBytes copies the values from a BSON document represented as a +// []byte to a ValueWriter. +func copyDocumentFromBytes(dst ValueWriter, src []byte) error { + dw, err := dst.WriteDocument() + if err != nil { + return err + } + + err = copyBytesToDocumentWriter(dw, src) + if err != nil { + return err + } + + return dw.WriteDocumentEnd() +} + +type writeElementFn func(key string) (ValueWriter, error) + +// copyBytesToArrayWriter copies the values from a BSON Array represented as a []byte to an +// ArrayWriter. +func copyBytesToArrayWriter(dst ArrayWriter, src []byte) error { + wef := func(_ string) (ValueWriter, error) { + return dst.WriteArrayElement() + } + + return copyBytesToValueWriter(src, wef) +} + +// copyBytesToDocumentWriter copies the values from a BSON document represented as a []byte to a +// DocumentWriter. +func copyBytesToDocumentWriter(dst DocumentWriter, src []byte) error { + wef := func(key string) (ValueWriter, error) { + return dst.WriteDocumentElement(key) + } + + return copyBytesToValueWriter(src, wef) +} + +func copyBytesToValueWriter(src []byte, wef writeElementFn) error { + // TODO(skriptble): Create errors types here. Anything that is a tag should be a property. + length, rem, ok := bsoncore.ReadLength(src) + if !ok { + return fmt.Errorf("couldn't read length from src, not enough bytes. length=%d", len(src)) + } + if len(src) < int(length) { + return fmt.Errorf("length read exceeds number of bytes available. length=%d bytes=%d", len(src), length) + } + rem = rem[:length-4] + + var t bsoncore.Type + var key string + var val bsoncore.Value + for { + t, rem, ok = bsoncore.ReadType(rem) + if !ok { + return io.EOF + } + if t == bsoncore.Type(0) { + if len(rem) != 0 { + return fmt.Errorf("document end byte found before end of document. remaining bytes=%v", rem) + } + break + } + + key, rem, ok = bsoncore.ReadKey(rem) + if !ok { + return fmt.Errorf("invalid key found. remaining bytes=%v", rem) + } + + // write as either array element or document element using writeElementFn + vw, err := wef(key) + if err != nil { + return err + } + + val, rem, ok = bsoncore.ReadValue(rem, t) + if !ok { + return fmt.Errorf("not enough bytes available to read type. bytes=%d type=%s", len(rem), t) + } + err = copyValueFromBytes(vw, Type(t), val.Data) + if err != nil { + return err + } + } + return nil +} + +// copyDocumentToBytes copies an entire document from the ValueReader and +// returns it as bytes. +func copyDocumentToBytes(src ValueReader) ([]byte, error) { + return appendDocumentBytes(nil, src) +} + +// appendDocumentBytes functions the same as CopyDocumentToBytes, but will +// append the result to dst. +func appendDocumentBytes(dst []byte, src ValueReader) ([]byte, error) { + if br, ok := src.(bytesReader); ok { + _, dst, err := br.readValueBytes(dst) + return dst, err + } + + vw := vwPool.Get().(*valueWriter) + defer putValueWriter(vw) + + vw.reset(dst) + + err := copyDocument(vw, src) + dst = vw.buf + return dst, err +} + +// appendArrayBytes copies an array from the ValueReader to dst. +func appendArrayBytes(dst []byte, src ValueReader) ([]byte, error) { + if br, ok := src.(bytesReader); ok { + _, dst, err := br.readValueBytes(dst) + return dst, err + } + + vw := vwPool.Get().(*valueWriter) + defer putValueWriter(vw) + + vw.reset(dst) + + err := copyArray(vw, src) + dst = vw.buf + return dst, err +} + +// copyValueFromBytes will write the value represtend by t and src to dst. +func copyValueFromBytes(dst ValueWriter, t Type, src []byte) error { + if wvb, ok := dst.(bytesWriter); ok { + return wvb.writeValueBytes(t, src) + } + + vr := newDocumentReader(bytes.NewReader(src)) + vr.pushElement(t) + + return copyValue(dst, vr) +} + +// copyValueToBytes copies a value from src and returns it as a Type and a +// []byte. +func copyValueToBytes(src ValueReader) (Type, []byte, error) { + if br, ok := src.(bytesReader); ok { + return br.readValueBytes(nil) + } + + vw := vwPool.Get().(*valueWriter) + defer putValueWriter(vw) + + vw.reset(nil) + vw.push(mElement) + + err := copyValue(vw, src) + if err != nil { + return 0, nil, err + } + + return Type(vw.buf[0]), vw.buf[2:], nil +} + +// copyValue will copy a single value from src to dst. +func copyValue(dst ValueWriter, src ValueReader) error { + var err error + switch src.Type() { + case TypeDouble: + var f64 float64 + f64, err = src.ReadDouble() + if err != nil { + break + } + err = dst.WriteDouble(f64) + case TypeString: + var str string + str, err = src.ReadString() + if err != nil { + return err + } + err = dst.WriteString(str) + case TypeEmbeddedDocument: + err = copyDocument(dst, src) + case TypeArray: + err = copyArray(dst, src) + case TypeBinary: + var data []byte + var subtype byte + data, subtype, err = src.ReadBinary() + if err != nil { + break + } + err = dst.WriteBinaryWithSubtype(data, subtype) + case TypeUndefined: + err = src.ReadUndefined() + if err != nil { + break + } + err = dst.WriteUndefined() + case TypeObjectID: + var oid ObjectID + oid, err = src.ReadObjectID() + if err != nil { + break + } + err = dst.WriteObjectID(oid) + case TypeBoolean: + var b bool + b, err = src.ReadBoolean() + if err != nil { + break + } + err = dst.WriteBoolean(b) + case TypeDateTime: + var dt int64 + dt, err = src.ReadDateTime() + if err != nil { + break + } + err = dst.WriteDateTime(dt) + case TypeNull: + err = src.ReadNull() + if err != nil { + break + } + err = dst.WriteNull() + case TypeRegex: + var pattern, options string + pattern, options, err = src.ReadRegex() + if err != nil { + break + } + err = dst.WriteRegex(pattern, options) + case TypeDBPointer: + var ns string + var pointer ObjectID + ns, pointer, err = src.ReadDBPointer() + if err != nil { + break + } + err = dst.WriteDBPointer(ns, pointer) + case TypeJavaScript: + var js string + js, err = src.ReadJavascript() + if err != nil { + break + } + err = dst.WriteJavascript(js) + case TypeSymbol: + var symbol string + symbol, err = src.ReadSymbol() + if err != nil { + break + } + err = dst.WriteSymbol(symbol) + case TypeCodeWithScope: + var code string + var srcScope DocumentReader + code, srcScope, err = src.ReadCodeWithScope() + if err != nil { + break + } + + var dstScope DocumentWriter + dstScope, err = dst.WriteCodeWithScope(code) + if err != nil { + break + } + err = copyDocumentCore(dstScope, srcScope) + case TypeInt32: + var i32 int32 + i32, err = src.ReadInt32() + if err != nil { + break + } + err = dst.WriteInt32(i32) + case TypeTimestamp: + var t, i uint32 + t, i, err = src.ReadTimestamp() + if err != nil { + break + } + err = dst.WriteTimestamp(t, i) + case TypeInt64: + var i64 int64 + i64, err = src.ReadInt64() + if err != nil { + break + } + err = dst.WriteInt64(i64) + case TypeDecimal128: + var d128 Decimal128 + d128, err = src.ReadDecimal128() + if err != nil { + break + } + err = dst.WriteDecimal128(d128) + case TypeMinKey: + err = src.ReadMinKey() + if err != nil { + break + } + err = dst.WriteMinKey() + case TypeMaxKey: + err = src.ReadMaxKey() + if err != nil { + break + } + err = dst.WriteMaxKey() + default: + err = fmt.Errorf("cannot copy unknown BSON type %s", src.Type()) + } + + return err +} + +func copyArray(dst ValueWriter, src ValueReader) error { + ar, err := src.ReadArray() + if err != nil { + return err + } + + aw, err := dst.WriteArray() + if err != nil { + return err + } + + for { + vr, err := ar.ReadValue() + if errors.Is(err, ErrEOA) { + break + } + if err != nil { + return err + } + + vw, err := aw.WriteArrayElement() + if err != nil { + return err + } + + err = copyValue(vw, vr) + if err != nil { + return err + } + } + + return aw.WriteArrayEnd() +} + +func copyDocumentCore(dw DocumentWriter, dr DocumentReader) error { + for { + key, vr, err := dr.ReadElement() + if errors.Is(err, ErrEOD) { + break + } + if err != nil { + return err + } + + vw, err := dw.WriteDocumentElement(key) + if err != nil { + return err + } + + err = copyValue(vw, vr) + if err != nil { + return err + } + } + + return dw.WriteDocumentEnd() +} + +// bytesReader is the interface used to read BSON bytes from a valueReader. +// +// The bytes of the value will be appended to dst. +type bytesReader interface { + readValueBytes(dst []byte) (Type, []byte, error) +} + +// bytesWriter is the interface used to write BSON bytes to a valueWriter. +type bytesWriter interface { + writeValueBytes(t Type, b []byte) error +} diff --git a/copier_test.go b/copier_test.go new file mode 100644 index 0000000..e99e792 --- /dev/null +++ b/copier_test.go @@ -0,0 +1,528 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bson + +import ( + "bytes" + "errors" + "fmt" + "testing" + + "gitea.psichedelico.com/go/bson/internal/assert" + "gitea.psichedelico.com/go/bson/x/bsonx/bsoncore" +) + +func TestCopier(t *testing.T) { + t.Run("CopyDocument", func(t *testing.T) { + t.Run("ReadDocument Error", func(t *testing.T) { + want := errors.New("ReadDocumentError") + src := &TestValueReaderWriter{t: t, err: want, errAfter: llvrwReadDocument} + got := copyDocument(nil, src) + if !assert.CompareErrors(got, want) { + t.Errorf("Did not receive correct error. got %v; want %v", got, want) + } + }) + t.Run("WriteDocument Error", func(t *testing.T) { + want := errors.New("WriteDocumentError") + src := &TestValueReaderWriter{} + dst := &TestValueReaderWriter{t: t, err: want, errAfter: llvrwWriteDocument} + got := copyDocument(dst, src) + if !assert.CompareErrors(got, want) { + t.Errorf("Did not receive correct error. got %v; want %v", got, want) + } + }) + t.Run("success", func(t *testing.T) { + idx, doc := bsoncore.AppendDocumentStart(nil) + doc = bsoncore.AppendStringElement(doc, "Hello", "world") + doc, err := bsoncore.AppendDocumentEnd(doc, idx) + noerr(t, err) + src := newDocumentReader(bytes.NewReader(doc)) + dst := newValueWriterFromSlice(make([]byte, 0)) + want := doc + err = copyDocument(dst, src) + noerr(t, err) + got := dst.buf + if !bytes.Equal(got, want) { + t.Errorf("Bytes are not equal. got %v; want %v", got, want) + } + }) + }) + t.Run("copyArray", func(t *testing.T) { + t.Run("ReadArray Error", func(t *testing.T) { + want := errors.New("ReadArrayError") + src := &TestValueReaderWriter{t: t, err: want, errAfter: llvrwReadArray} + got := copyArray(nil, src) + if !assert.CompareErrors(got, want) { + t.Errorf("Did not receive correct error. got %v; want %v", got, want) + } + }) + t.Run("WriteArray Error", func(t *testing.T) { + want := errors.New("WriteArrayError") + src := &TestValueReaderWriter{} + dst := &TestValueReaderWriter{t: t, err: want, errAfter: llvrwWriteArray} + got := copyArray(dst, src) + if !assert.CompareErrors(got, want) { + t.Errorf("Did not receive correct error. got %v; want %v", got, want) + } + }) + t.Run("success", func(t *testing.T) { + idx, doc := bsoncore.AppendDocumentStart(nil) + aidx, doc := bsoncore.AppendArrayElementStart(doc, "foo") + doc = bsoncore.AppendStringElement(doc, "0", "Hello, world!") + doc, err := bsoncore.AppendArrayEnd(doc, aidx) + noerr(t, err) + doc, err = bsoncore.AppendDocumentEnd(doc, idx) + noerr(t, err) + src := newDocumentReader(bytes.NewReader(doc)) + + _, err = src.ReadDocument() + noerr(t, err) + _, _, err = src.ReadElement() + noerr(t, err) + + dst := newValueWriterFromSlice(make([]byte, 0)) + _, err = dst.WriteDocument() + noerr(t, err) + _, err = dst.WriteDocumentElement("foo") + noerr(t, err) + want := doc + + err = copyArray(dst, src) + noerr(t, err) + + err = dst.WriteDocumentEnd() + noerr(t, err) + + got := dst.buf + if !bytes.Equal(got, want) { + t.Errorf("Bytes are not equal. got %v; want %v", got, want) + } + }) + }) + t.Run("CopyValue", func(t *testing.T) { + testCases := []struct { + name string + dst *TestValueReaderWriter + src *TestValueReaderWriter + err error + }{ + { + "Double/src/error", + &TestValueReaderWriter{}, + &TestValueReaderWriter{bsontype: TypeDouble, err: errors.New("1"), errAfter: llvrwReadDouble}, + errors.New("1"), + }, + { + "Double/dst/error", + &TestValueReaderWriter{bsontype: TypeDouble, err: errors.New("2"), errAfter: llvrwWriteDouble}, + &TestValueReaderWriter{bsontype: TypeDouble, readval: float64(3.14159)}, + errors.New("2"), + }, + { + "String/src/error", + &TestValueReaderWriter{}, + &TestValueReaderWriter{bsontype: TypeString, err: errors.New("1"), errAfter: llvrwReadString}, + errors.New("1"), + }, + { + "String/dst/error", + &TestValueReaderWriter{bsontype: TypeString, err: errors.New("2"), errAfter: llvrwWriteString}, + &TestValueReaderWriter{bsontype: TypeString, readval: "hello, world"}, + errors.New("2"), + }, + { + "Document/src/error", + &TestValueReaderWriter{}, + &TestValueReaderWriter{bsontype: TypeEmbeddedDocument, err: errors.New("1"), errAfter: llvrwReadDocument}, + errors.New("1"), + }, + { + "Array/dst/error", + &TestValueReaderWriter{}, + &TestValueReaderWriter{bsontype: TypeArray, err: errors.New("2"), errAfter: llvrwReadArray}, + errors.New("2"), + }, + { + "Binary/src/error", + &TestValueReaderWriter{}, + &TestValueReaderWriter{bsontype: TypeBinary, err: errors.New("1"), errAfter: llvrwReadBinary}, + errors.New("1"), + }, + { + "Binary/dst/error", + &TestValueReaderWriter{bsontype: TypeBinary, err: errors.New("2"), errAfter: llvrwWriteBinaryWithSubtype}, + &TestValueReaderWriter{ + bsontype: TypeBinary, + readval: bsoncore.Value{ + Type: bsoncore.TypeBinary, + Data: []byte{0x03, 0x00, 0x00, 0x00, 0xFF, 0x01, 0x02, 0x03}, + }, + }, + errors.New("2"), + }, + { + "Undefined/src/error", + &TestValueReaderWriter{}, + &TestValueReaderWriter{bsontype: TypeUndefined, err: errors.New("1"), errAfter: llvrwReadUndefined}, + errors.New("1"), + }, + { + "Undefined/dst/error", + &TestValueReaderWriter{bsontype: TypeUndefined, err: errors.New("2"), errAfter: llvrwWriteUndefined}, + &TestValueReaderWriter{bsontype: TypeUndefined}, + errors.New("2"), + }, + { + "ObjectID/src/error", + &TestValueReaderWriter{}, + &TestValueReaderWriter{bsontype: TypeObjectID, err: errors.New("1"), errAfter: llvrwReadObjectID}, + errors.New("1"), + }, + { + "ObjectID/dst/error", + &TestValueReaderWriter{bsontype: TypeObjectID, err: errors.New("2"), errAfter: llvrwWriteObjectID}, + &TestValueReaderWriter{bsontype: TypeObjectID, readval: ObjectID{0x01, 0x02, 0x03}}, + errors.New("2"), + }, + { + "Boolean/src/error", + &TestValueReaderWriter{}, + &TestValueReaderWriter{bsontype: TypeBoolean, err: errors.New("1"), errAfter: llvrwReadBoolean}, + errors.New("1"), + }, + { + "Boolean/dst/error", + &TestValueReaderWriter{bsontype: TypeBoolean, err: errors.New("2"), errAfter: llvrwWriteBoolean}, + &TestValueReaderWriter{bsontype: TypeBoolean, readval: bool(true)}, + errors.New("2"), + }, + { + "DateTime/src/error", + &TestValueReaderWriter{}, + &TestValueReaderWriter{bsontype: TypeDateTime, err: errors.New("1"), errAfter: llvrwReadDateTime}, + errors.New("1"), + }, + { + "DateTime/dst/error", + &TestValueReaderWriter{bsontype: TypeDateTime, err: errors.New("2"), errAfter: llvrwWriteDateTime}, + &TestValueReaderWriter{bsontype: TypeDateTime, readval: int64(1234567890)}, + errors.New("2"), + }, + { + "Null/src/error", + &TestValueReaderWriter{}, + &TestValueReaderWriter{bsontype: TypeNull, err: errors.New("1"), errAfter: llvrwReadNull}, + errors.New("1"), + }, + { + "Null/dst/error", + &TestValueReaderWriter{bsontype: TypeNull, err: errors.New("2"), errAfter: llvrwWriteNull}, + &TestValueReaderWriter{bsontype: TypeNull}, + errors.New("2"), + }, + { + "Regex/src/error", + &TestValueReaderWriter{}, + &TestValueReaderWriter{bsontype: TypeRegex, err: errors.New("1"), errAfter: llvrwReadRegex}, + errors.New("1"), + }, + { + "Regex/dst/error", + &TestValueReaderWriter{bsontype: TypeRegex, err: errors.New("2"), errAfter: llvrwWriteRegex}, + &TestValueReaderWriter{ + bsontype: TypeRegex, + readval: bsoncore.Value{ + Type: bsoncore.TypeRegex, + Data: bsoncore.AppendRegex(nil, "hello", "world"), + }, + }, + errors.New("2"), + }, + { + "DBPointer/src/error", + &TestValueReaderWriter{}, + &TestValueReaderWriter{bsontype: TypeDBPointer, err: errors.New("1"), errAfter: llvrwReadDBPointer}, + errors.New("1"), + }, + { + "DBPointer/dst/error", + &TestValueReaderWriter{bsontype: TypeDBPointer, err: errors.New("2"), errAfter: llvrwWriteDBPointer}, + &TestValueReaderWriter{ + bsontype: TypeDBPointer, + readval: bsoncore.Value{ + Type: bsoncore.TypeDBPointer, + Data: bsoncore.AppendDBPointer(nil, "foo", ObjectID{0x01, 0x02, 0x03}), + }, + }, + errors.New("2"), + }, + { + "Javascript/src/error", + &TestValueReaderWriter{}, + &TestValueReaderWriter{bsontype: TypeJavaScript, err: errors.New("1"), errAfter: llvrwReadJavascript}, + errors.New("1"), + }, + { + "Javascript/dst/error", + &TestValueReaderWriter{bsontype: TypeJavaScript, err: errors.New("2"), errAfter: llvrwWriteJavascript}, + &TestValueReaderWriter{bsontype: TypeJavaScript, readval: "hello, world"}, + errors.New("2"), + }, + { + "Symbol/src/error", + &TestValueReaderWriter{}, + &TestValueReaderWriter{bsontype: TypeSymbol, err: errors.New("1"), errAfter: llvrwReadSymbol}, + errors.New("1"), + }, + { + "Symbol/dst/error", + &TestValueReaderWriter{bsontype: TypeSymbol, err: errors.New("2"), errAfter: llvrwWriteSymbol}, + &TestValueReaderWriter{ + bsontype: TypeSymbol, + readval: bsoncore.Value{ + Type: bsoncore.TypeSymbol, + Data: bsoncore.AppendSymbol(nil, "hello, world"), + }, + }, + errors.New("2"), + }, + { + "CodeWithScope/src/error", + &TestValueReaderWriter{}, + &TestValueReaderWriter{bsontype: TypeCodeWithScope, err: errors.New("1"), errAfter: llvrwReadCodeWithScope}, + errors.New("1"), + }, + { + "CodeWithScope/dst/error", + &TestValueReaderWriter{bsontype: TypeCodeWithScope, err: errors.New("2"), errAfter: llvrwWriteCodeWithScope}, + &TestValueReaderWriter{bsontype: TypeCodeWithScope}, + errors.New("2"), + }, + { + "CodeWithScope/dst/copyDocumentCore error", + &TestValueReaderWriter{err: errors.New("3"), errAfter: llvrwWriteDocumentElement}, + &TestValueReaderWriter{bsontype: TypeCodeWithScope}, + errors.New("3"), + }, + { + "Int32/src/error", + &TestValueReaderWriter{}, + &TestValueReaderWriter{bsontype: TypeInt32, err: errors.New("1"), errAfter: llvrwReadInt32}, + errors.New("1"), + }, + { + "Int32/dst/error", + &TestValueReaderWriter{bsontype: TypeInt32, err: errors.New("2"), errAfter: llvrwWriteInt32}, + &TestValueReaderWriter{bsontype: TypeInt32, readval: int32(12345)}, + errors.New("2"), + }, + { + "Timestamp/src/error", + &TestValueReaderWriter{}, + &TestValueReaderWriter{bsontype: TypeTimestamp, err: errors.New("1"), errAfter: llvrwReadTimestamp}, + errors.New("1"), + }, + { + "Timestamp/dst/error", + &TestValueReaderWriter{bsontype: TypeTimestamp, err: errors.New("2"), errAfter: llvrwWriteTimestamp}, + &TestValueReaderWriter{ + bsontype: TypeTimestamp, + readval: bsoncore.Value{ + Type: bsoncore.TypeTimestamp, + Data: bsoncore.AppendTimestamp(nil, 12345, 67890), + }, + }, + errors.New("2"), + }, + { + "Int64/src/error", + &TestValueReaderWriter{}, + &TestValueReaderWriter{bsontype: TypeInt64, err: errors.New("1"), errAfter: llvrwReadInt64}, + errors.New("1"), + }, + { + "Int64/dst/error", + &TestValueReaderWriter{bsontype: TypeInt64, err: errors.New("2"), errAfter: llvrwWriteInt64}, + &TestValueReaderWriter{bsontype: TypeInt64, readval: int64(1234567890)}, + errors.New("2"), + }, + { + "Decimal128/src/error", + &TestValueReaderWriter{}, + &TestValueReaderWriter{bsontype: TypeDecimal128, err: errors.New("1"), errAfter: llvrwReadDecimal128}, + errors.New("1"), + }, + { + "Decimal128/dst/error", + &TestValueReaderWriter{bsontype: TypeDecimal128, err: errors.New("2"), errAfter: llvrwWriteDecimal128}, + &TestValueReaderWriter{bsontype: TypeDecimal128, readval: NewDecimal128(12345, 67890)}, + errors.New("2"), + }, + { + "MinKey/src/error", + &TestValueReaderWriter{}, + &TestValueReaderWriter{bsontype: TypeMinKey, err: errors.New("1"), errAfter: llvrwReadMinKey}, + errors.New("1"), + }, + { + "MinKey/dst/error", + &TestValueReaderWriter{bsontype: TypeMinKey, err: errors.New("2"), errAfter: llvrwWriteMinKey}, + &TestValueReaderWriter{bsontype: TypeMinKey}, + errors.New("2"), + }, + { + "MaxKey/src/error", + &TestValueReaderWriter{}, + &TestValueReaderWriter{bsontype: TypeMaxKey, err: errors.New("1"), errAfter: llvrwReadMaxKey}, + errors.New("1"), + }, + { + "MaxKey/dst/error", + &TestValueReaderWriter{bsontype: TypeMaxKey, err: errors.New("2"), errAfter: llvrwWriteMaxKey}, + &TestValueReaderWriter{bsontype: TypeMaxKey}, + errors.New("2"), + }, + { + "Unknown BSON type error", + &TestValueReaderWriter{}, + &TestValueReaderWriter{}, + fmt.Errorf("cannot copy unknown BSON type %s", Type(0)), + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + tc.dst.t, tc.src.t = t, t + err := copyValue(tc.dst, tc.src) + if !assert.CompareErrors(err, tc.err) { + t.Errorf("Did not receive expected error. got %v; want %v", err, tc.err) + } + }) + } + }) + t.Run("CopyValueFromBytes", func(t *testing.T) { + t.Run("BytesWriter", func(t *testing.T) { + vw := newValueWriterFromSlice(make([]byte, 0)) + _, err := vw.WriteDocument() + noerr(t, err) + _, err = vw.WriteDocumentElement("foo") + noerr(t, err) + err = copyValueFromBytes(vw, TypeString, bsoncore.AppendString(nil, "bar")) + noerr(t, err) + err = vw.WriteDocumentEnd() + noerr(t, err) + var idx int32 + want, err := bsoncore.AppendDocumentEnd( + bsoncore.AppendStringElement( + bsoncore.AppendDocumentStartInline(nil, &idx), + "foo", "bar", + ), + idx, + ) + noerr(t, err) + got := vw.buf + if !bytes.Equal(got, want) { + t.Errorf("Bytes are not equal. got %v; want %v", got, want) + } + }) + t.Run("Non BytesWriter", func(t *testing.T) { + llvrw := &TestValueReaderWriter{t: t} + err := copyValueFromBytes(llvrw, TypeString, bsoncore.AppendString(nil, "bar")) + noerr(t, err) + got, want := llvrw.invoked, llvrwWriteString + if got != want { + t.Errorf("Incorrect method invoked on llvrw. got %v; want %v", got, want) + } + }) + }) + t.Run("CopyValueToBytes", func(t *testing.T) { + t.Run("BytesReader", func(t *testing.T) { + var idx int32 + b, err := bsoncore.AppendDocumentEnd( + bsoncore.AppendStringElement( + bsoncore.AppendDocumentStartInline(nil, &idx), + "hello", "world", + ), + idx, + ) + noerr(t, err) + vr := newDocumentReader(bytes.NewReader(b)) + _, err = vr.ReadDocument() + noerr(t, err) + _, _, err = vr.ReadElement() + noerr(t, err) + btype, got, err := copyValueToBytes(vr) + noerr(t, err) + want := bsoncore.AppendString(nil, "world") + if btype != TypeString { + t.Errorf("Incorrect type returned. got %v; want %v", btype, TypeString) + } + if !bytes.Equal(got, want) { + t.Errorf("Bytes do not match. got %v; want %v", got, want) + } + }) + t.Run("Non BytesReader", func(t *testing.T) { + llvrw := &TestValueReaderWriter{t: t, bsontype: TypeString, readval: "Hello, world!"} + btype, got, err := copyValueToBytes(llvrw) + noerr(t, err) + want := bsoncore.AppendString(nil, "Hello, world!") + if btype != TypeString { + t.Errorf("Incorrect type returned. got %v; want %v", btype, TypeString) + } + if !bytes.Equal(got, want) { + t.Errorf("Bytes do not match. got %v; want %v", got, want) + } + }) + }) + t.Run("AppendValueBytes", func(t *testing.T) { + t.Run("BytesReader", func(t *testing.T) { + var idx int32 + b, err := bsoncore.AppendDocumentEnd( + bsoncore.AppendStringElement( + bsoncore.AppendDocumentStartInline(nil, &idx), + "hello", "world", + ), + idx, + ) + noerr(t, err) + vr := newDocumentReader(bytes.NewReader(b)) + _, err = vr.ReadDocument() + noerr(t, err) + _, _, err = vr.ReadElement() + noerr(t, err) + btype, got, err := copyValueToBytes(vr) + noerr(t, err) + want := bsoncore.AppendString(nil, "world") + if btype != TypeString { + t.Errorf("Incorrect type returned. got %v; want %v", btype, TypeString) + } + if !bytes.Equal(got, want) { + t.Errorf("Bytes do not match. got %v; want %v", got, want) + } + }) + t.Run("Non BytesReader", func(t *testing.T) { + llvrw := &TestValueReaderWriter{t: t, bsontype: TypeString, readval: "Hello, world!"} + btype, got, err := copyValueToBytes(llvrw) + noerr(t, err) + want := bsoncore.AppendString(nil, "Hello, world!") + if btype != TypeString { + t.Errorf("Incorrect type returned. got %v; want %v", btype, TypeString) + } + if !bytes.Equal(got, want) { + t.Errorf("Bytes do not match. got %v; want %v", got, want) + } + }) + t.Run("CopyValue error", func(t *testing.T) { + want := errors.New("CopyValue error") + llvrw := &TestValueReaderWriter{t: t, bsontype: TypeString, err: want, errAfter: llvrwReadString} + _, _, got := copyValueToBytes(llvrw) + if !assert.CompareErrors(got, want) { + t.Errorf("Errors do not match. got %v; want %v", got, want) + } + }) + }) +} diff --git a/decimal.go b/decimal.go new file mode 100644 index 0000000..028b8fc --- /dev/null +++ b/decimal.go @@ -0,0 +1,339 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 +// +// Based on gopkg.in/mgo.v2/bson by Gustavo Niemeyer +// See THIRD-PARTY-NOTICES for original license terms. + +package bson + +import ( + "encoding/json" + "errors" + "fmt" + "math/big" + "regexp" + "strconv" + "strings" + + "gitea.psichedelico.com/go/bson/internal/decimal128" +) + +// These constants are the maximum and minimum values for the exponent field in a decimal128 value. +const ( + MaxDecimal128Exp = 6111 + MinDecimal128Exp = -6176 +) + +// These errors are returned when an invalid value is parsed as a big.Int. +var ( + ErrParseNaN = errors.New("cannot parse NaN as a *big.Int") + ErrParseInf = errors.New("cannot parse Infinity as a *big.Int") + ErrParseNegInf = errors.New("cannot parse -Infinity as a *big.Int") +) + +// Decimal128 holds decimal128 BSON values. +type Decimal128 struct { + h, l uint64 +} + +// NewDecimal128 creates a Decimal128 using the provide high and low uint64s. +func NewDecimal128(h, l uint64) Decimal128 { + return Decimal128{h: h, l: l} +} + +// GetBytes returns the underlying bytes of the BSON decimal value as two uint64 values. The first +// contains the most first 8 bytes of the value and the second contains the latter. +func (d Decimal128) GetBytes() (uint64, uint64) { + return d.h, d.l +} + +// String returns a string representation of the decimal value. +func (d Decimal128) String() string { + return decimal128.String(d.h, d.l) +} + +// BigInt returns significand as big.Int and exponent, bi * 10 ^ exp. +func (d Decimal128) BigInt() (*big.Int, int, error) { + high, low := d.GetBytes() + posSign := high>>63&1 == 0 // positive sign + + switch high >> 58 & (1<<5 - 1) { + case 0x1F: + return nil, 0, ErrParseNaN + case 0x1E: + if posSign { + return nil, 0, ErrParseInf + } + return nil, 0, ErrParseNegInf + } + + var exp int + if high>>61&3 == 3 { + // Bits: 1*sign 2*ignored 14*exponent 111*significand. + // Implicit 0b100 prefix in significand. + exp = int(high >> 47 & (1<<14 - 1)) + // Spec says all of these values are out of range. + high, low = 0, 0 + } else { + // Bits: 1*sign 14*exponent 113*significand + exp = int(high >> 49 & (1<<14 - 1)) + high &= (1<<49 - 1) + } + exp += MinDecimal128Exp + + // Would be handled by the logic below, but that's trivial and common. + if high == 0 && low == 0 && exp == 0 { + return new(big.Int), 0, nil + } + + bi := big.NewInt(0) + const host32bit = ^uint(0)>>32 == 0 + if host32bit { + bi.SetBits([]big.Word{big.Word(low), big.Word(low >> 32), big.Word(high), big.Word(high >> 32)}) + } else { + bi.SetBits([]big.Word{big.Word(low), big.Word(high)}) + } + + if !posSign { + return bi.Neg(bi), exp, nil + } + return bi, exp, nil +} + +// IsNaN returns whether d is NaN. +func (d Decimal128) IsNaN() bool { + return d.h>>58&(1<<5-1) == 0x1F +} + +// IsInf returns: +// +// +1 d == Infinity +// 0 other case +// -1 d == -Infinity +func (d Decimal128) IsInf() int { + if d.h>>58&(1<<5-1) != 0x1E { + return 0 + } + + if d.h>>63&1 == 0 { + return 1 + } + return -1 +} + +// IsZero returns true if d is the empty Decimal128. +func (d Decimal128) IsZero() bool { + return d.h == 0 && d.l == 0 +} + +// MarshalJSON returns Decimal128 as a string. +func (d Decimal128) MarshalJSON() ([]byte, error) { + return json.Marshal(d.String()) +} + +// UnmarshalJSON creates a Decimal128 from a JSON string, an extended JSON $numberDecimal value, or the string +// "null". If b is a JSON string or extended JSON value, d will have the value of that string, and if b is "null", d will +// be unchanged. +func (d *Decimal128) UnmarshalJSON(b []byte) error { + // Ignore "null" to keep parity with the standard library. Decoding a JSON null into a non-pointer Decimal128 field + // will leave the field unchanged. For pointer values, encoding/json will set the pointer to nil and will not + // enter the UnmarshalJSON hook. + if string(b) == "null" { + return nil + } + + var res interface{} + err := json.Unmarshal(b, &res) + if err != nil { + return err + } + str, ok := res.(string) + + // Extended JSON + if !ok { + m, ok := res.(map[string]interface{}) + if !ok { + return errors.New("not an extended JSON Decimal128: expected document") + } + d128, ok := m["$numberDecimal"] + if !ok { + return errors.New("not an extended JSON Decimal128: expected key $numberDecimal") + } + str, ok = d128.(string) + if !ok { + return errors.New("not an extended JSON Decimal128: expected decimal to be string") + } + } + + *d, err = ParseDecimal128(str) + return err +} + +var dNaN = Decimal128{0x1F << 58, 0} +var dPosInf = Decimal128{0x1E << 58, 0} +var dNegInf = Decimal128{0x3E << 58, 0} + +func dErr(s string) (Decimal128, error) { + return dNaN, fmt.Errorf("cannot parse %q as a decimal128", s) +} + +// match scientific notation number, example -10.15e-18 +var normalNumber = regexp.MustCompile(`^(?P[-+]?\d*)?(?:\.(?P\d*))?(?:[Ee](?P[-+]?\d+))?$`) + +// ParseDecimal128 takes the given string and attempts to parse it into a valid +// Decimal128 value. +func ParseDecimal128(s string) (Decimal128, error) { + if s == "" { + return dErr(s) + } + + matches := normalNumber.FindStringSubmatch(s) + if len(matches) == 0 { + orig := s + neg := s[0] == '-' + if neg || s[0] == '+' { + s = s[1:] + } + + if s == "NaN" || s == "nan" || strings.EqualFold(s, "nan") { + return dNaN, nil + } + if s == "Inf" || s == "inf" || strings.EqualFold(s, "inf") || strings.EqualFold(s, "infinity") { + if neg { + return dNegInf, nil + } + return dPosInf, nil + } + return dErr(orig) + } + + intPart := matches[1] + decPart := matches[2] + expPart := matches[3] + + var err error + exp := 0 + if expPart != "" { + exp, err = strconv.Atoi(expPart) + if err != nil { + return dErr(s) + } + } + if decPart != "" { + exp -= len(decPart) + } + + if len(strings.Trim(intPart+decPart, "-0")) > 35 { + return dErr(s) + } + + // Parse the significand (i.e. the non-exponent part) as a big.Int. + bi, ok := new(big.Int).SetString(intPart+decPart, 10) + if !ok { + return dErr(s) + } + + d, ok := ParseDecimal128FromBigInt(bi, exp) + if !ok { + return dErr(s) + } + + if bi.Sign() == 0 && s[0] == '-' { + d.h |= 1 << 63 + } + + return d, nil +} + +var ( + ten = big.NewInt(10) + zero = new(big.Int) + + maxS, _ = new(big.Int).SetString("9999999999999999999999999999999999", 10) +) + +// ParseDecimal128FromBigInt attempts to parse the given significand and exponent into a valid Decimal128 value. +func ParseDecimal128FromBigInt(bi *big.Int, exp int) (Decimal128, bool) { + // copy + bi = new(big.Int).Set(bi) + + q := new(big.Int) + r := new(big.Int) + + // If the significand is zero, the logical value will always be zero, independent of the + // exponent. However, the loops for handling out-of-range exponent values below may be extremely + // slow for zero values because the significand never changes. Limit the exponent value to the + // supported range here to prevent entering the loops below. + if bi.Cmp(zero) == 0 { + if exp > MaxDecimal128Exp { + exp = MaxDecimal128Exp + } + if exp < MinDecimal128Exp { + exp = MinDecimal128Exp + } + } + + for bigIntCmpAbs(bi, maxS) == 1 { + bi, _ = q.QuoRem(bi, ten, r) + if r.Cmp(zero) != 0 { + return Decimal128{}, false + } + exp++ + if exp > MaxDecimal128Exp { + return Decimal128{}, false + } + } + + for exp < MinDecimal128Exp { + // Subnormal. + bi, _ = q.QuoRem(bi, ten, r) + if r.Cmp(zero) != 0 { + return Decimal128{}, false + } + exp++ + } + for exp > MaxDecimal128Exp { + // Clamped. + bi.Mul(bi, ten) + if bigIntCmpAbs(bi, maxS) == 1 { + return Decimal128{}, false + } + exp-- + } + + b := bi.Bytes() + var h, l uint64 + for i := 0; i < len(b); i++ { + if i < len(b)-8 { + h = h<<8 | uint64(b[i]) + continue + } + l = l<<8 | uint64(b[i]) + } + + h |= uint64(exp-MinDecimal128Exp) & uint64(1<<14-1) << 49 + if bi.Sign() == -1 { + h |= 1 << 63 + } + + return Decimal128{h: h, l: l}, true +} + +// bigIntCmpAbs computes big.Int.Cmp(absoluteValue(x), absoluteValue(y)). +func bigIntCmpAbs(x, y *big.Int) int { + xAbs := bigIntAbsValue(x) + yAbs := bigIntAbsValue(y) + return xAbs.Cmp(yAbs) +} + +// bigIntAbsValue returns a big.Int containing the absolute value of b. +// If b is already a non-negative number, it is returned without any changes or copies. +func bigIntAbsValue(b *big.Int) *big.Int { + if b.Sign() >= 0 { + return b // already positive + } + return new(big.Int).Abs(b) +} diff --git a/decimal_test.go b/decimal_test.go new file mode 100644 index 0000000..0966403 --- /dev/null +++ b/decimal_test.go @@ -0,0 +1,236 @@ +// Copyright (C) MongoDB, Inc. 2022-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bson + +import ( + "encoding/json" + "fmt" + "math/big" + "testing" + + "gitea.psichedelico.com/go/bson/internal/assert" + "gitea.psichedelico.com/go/bson/internal/require" +) + +type bigIntTestCase struct { + s string + + h uint64 + l uint64 + + bi *big.Int + exp int + + remark string +} + +func parseBigInt(s string) *big.Int { + bi, _ := new(big.Int).SetString(s, 10) + return bi +} + +var ( + one = big.NewInt(1) + + biMaxS = new(big.Int).SetBytes([]byte{0x1, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff}) + biNMaxS = new(big.Int).Neg(biMaxS) + + biOverflow = new(big.Int).Add(biMaxS, one) + biNOverflow = new(big.Int).Neg(biOverflow) + + bi12345 = parseBigInt("12345") + biN12345 = parseBigInt("-12345") + + bi9_14 = parseBigInt("90123456789012") + biN9_14 = parseBigInt("-90123456789012") + + bi9_34 = parseBigInt("9999999999999999999999999999999999") + biN9_34 = parseBigInt("-9999999999999999999999999999999999") +) + +var bigIntTestCases = []bigIntTestCase{ + {s: "12345", h: 0x3040000000000000, l: 12345, bi: bi12345}, + {s: "-12345", h: 0xB040000000000000, l: 12345, bi: biN12345}, + + {s: "90123456.789012", h: 0x3034000000000000, l: 90123456789012, bi: bi9_14, exp: -6}, + {s: "-90123456.789012", h: 0xB034000000000000, l: 90123456789012, bi: biN9_14, exp: -6}, + {s: "9.0123456789012E+22", h: 0x3052000000000000, l: 90123456789012, bi: bi9_14, exp: 9}, + {s: "-9.0123456789012E+22", h: 0xB052000000000000, l: 90123456789012, bi: biN9_14, exp: 9}, + {s: "9.0123456789012E-8", h: 0x3016000000000000, l: 90123456789012, bi: bi9_14, exp: -21}, + {s: "-9.0123456789012E-8", h: 0xB016000000000000, l: 90123456789012, bi: biN9_14, exp: -21}, + + {s: "9999999999999999999999999999999999", h: 3477321013416265664, l: 4003012203950112767, bi: bi9_34}, + {s: "-9999999999999999999999999999999999", h: 12700693050271041472, l: 4003012203950112767, bi: biN9_34}, + {s: "0.9999999999999999999999999999999999", h: 3458180714999941056, l: 4003012203950112767, bi: bi9_34, exp: -34}, + {s: "-0.9999999999999999999999999999999999", h: 12681552751854716864, l: 4003012203950112767, bi: biN9_34, exp: -34}, + {s: "99999999999999999.99999999999999999", h: 3467750864208103360, l: 4003012203950112767, bi: bi9_34, exp: -17}, + {s: "-99999999999999999.99999999999999999", h: 12691122901062879168, l: 4003012203950112767, bi: biN9_34, exp: -17}, + {s: "9.999999999999999999999999999999999E+35", h: 3478446913323108288, l: 4003012203950112767, bi: bi9_34, exp: 2}, + {s: "-9.999999999999999999999999999999999E+35", h: 12701818950177884096, l: 4003012203950112767, bi: biN9_34, exp: 2}, + {s: "9.999999999999999999999999999999999E+40", h: 3481261663090214848, l: 4003012203950112767, bi: bi9_34, exp: 7}, + {s: "-9.999999999999999999999999999999999E+40", h: 12704633699944990656, l: 4003012203950112767, bi: biN9_34, exp: 7}, + {s: "99999999999999999999999999999.99999", h: 3474506263649159104, l: 4003012203950112767, bi: bi9_34, exp: -5}, + {s: "-99999999999999999999999999999.99999", h: 12697878300503934912, l: 4003012203950112767, bi: biN9_34, exp: -5}, + + {s: "1.038459371706965525706099265844019E-6143", remark: "subnormal", h: 0x333333333333, l: 0x3333333333333333, bi: parseBigInt("10384593717069655257060992658440190"), exp: MinDecimal128Exp - 1}, + {s: "-1.038459371706965525706099265844019E-6143", remark: "subnormal", h: 0x8000333333333333, l: 0x3333333333333333, bi: parseBigInt("-10384593717069655257060992658440190"), exp: MinDecimal128Exp - 1}, + + {s: "rounding overflow 1", remark: "overflow", bi: parseBigInt("103845937170696552570609926584401910"), exp: MaxDecimal128Exp}, + {s: "rounding overflow 2", remark: "overflow", bi: parseBigInt("103845937170696552570609926584401910"), exp: MaxDecimal128Exp}, + + {s: "subnormal overflow 1", remark: "overflow", bi: biMaxS, exp: MinDecimal128Exp - 1}, + {s: "subnormal overflow 2", remark: "overflow", bi: biNMaxS, exp: MinDecimal128Exp - 1}, + + {s: "clamped overflow 1", remark: "overflow", bi: biMaxS, exp: MaxDecimal128Exp + 1}, + {s: "clamped overflow 2", remark: "overflow", bi: biNMaxS, exp: MaxDecimal128Exp + 1}, + + {s: "biMaxS+1 overflow", remark: "overflow", bi: biOverflow, exp: MaxDecimal128Exp}, + {s: "biNMaxS-1 overflow", remark: "overflow", bi: biNOverflow, exp: MaxDecimal128Exp}, + + {s: "NaN", h: 0x7c00000000000000, l: 0, remark: "NaN"}, + {s: "Infinity", h: 0x7800000000000000, l: 0, remark: "Infinity"}, + {s: "-Infinity", h: 0xf800000000000000, l: 0, remark: "-Infinity"}, +} + +func TestDecimal128_BigInt(t *testing.T) { + for _, c := range bigIntTestCases { + t.Run(c.s, func(t *testing.T) { + switch c.remark { + case "NaN", "Infinity", "-Infinity": + d128 := NewDecimal128(c.h, c.l) + _, _, err := d128.BigInt() + require.Error(t, err, "case %s", c.s) + case "": + d128 := NewDecimal128(c.h, c.l) + bi, e, err := d128.BigInt() + require.NoError(t, err, "case %s", c.s) + require.Equal(t, 0, c.bi.Cmp(bi), "case %s e:%s a:%s", c.s, c.bi.String(), bi.String()) + require.Equal(t, c.exp, e, "case %s", c.s, d128.String()) + } + }) + } +} + +func TestParseDecimal128FromBigInt(t *testing.T) { + for _, c := range bigIntTestCases { + switch c.remark { + case "overflow": + d128, ok := ParseDecimal128FromBigInt(c.bi, c.exp) + require.Equal(t, false, ok, "case %s %s", c.s, d128.String(), c.remark) + case "", "rounding", "subnormal", "clamped": + d128, ok := ParseDecimal128FromBigInt(c.bi, c.exp) + require.Equal(t, true, ok, "case %s", c.s) + require.Equal(t, c.s, d128.String(), "case %s", c.s) + + require.Equal(t, c.h, d128.h, "case %s", c.s, d128.l) + require.Equal(t, c.l, d128.l, "case %s", c.s, d128.h) + } + } +} + +func TestParseDecimal128(t *testing.T) { + cases := make([]bigIntTestCase, 0, len(bigIntTestCases)) + cases = append(cases, bigIntTestCases...) + cases = append(cases, + bigIntTestCase{s: "-0001231.453454000000565600000000E-21", h: 0xafe6000003faa269, l: 0x81cfeceaabdb1800}, + bigIntTestCase{s: "12345E+21", h: 0x306a000000000000, l: 12345}, + bigIntTestCase{s: "0.10000000000000000000000000000000000000000001", remark: "parse fail"}, + bigIntTestCase{s: ".125e1", h: 0x303c000000000000, l: 125}, + bigIntTestCase{s: ".125", h: 0x303a000000000000, l: 125}, + // Test that parsing negative zero returns negative zero with a zero exponent. + bigIntTestCase{s: "-0", h: 0xb040000000000000, l: 0}, + // Test that parsing negative zero with an in-range exponent returns negative zero and + // preserves the specified exponent value. + bigIntTestCase{s: "-0E999", h: 0xb80e000000000000, l: 0}, + // Test that parsing zero with an out-of-range positive exponent returns zero with the + // maximum positive exponent (i.e. 0e+6111). + bigIntTestCase{s: "0E2000000000000", h: 0x5ffe000000000000, l: 0}, + // Test that parsing zero with an out-of-range negative exponent returns zero with the + // minimum negative exponent (i.e. 0e-6176). + bigIntTestCase{s: "-0E2000000000000", h: 0xdffe000000000000, l: 0}, + bigIntTestCase{s: "", remark: "parse fail"}) + + for _, c := range cases { + t.Run(c.s, func(t *testing.T) { + switch c.remark { + case "overflow", "parse fail": + _, err := ParseDecimal128(c.s) + assert.Error(t, err, "ParseDecimal128(%q) should return an error", c.s) + default: + got, err := ParseDecimal128(c.s) + require.NoError(t, err, "ParseDecimal128(%q) error", c.s) + + want := Decimal128{h: c.h, l: c.l} + // Decimal128 doesn't implement an equality function, so compare the expected + // low/high uint64 values directly. Also print the string representation of each + // number to make debugging failures easier. + assert.Equal(t, want, got, "ParseDecimal128(%q) = %s, want %s", c.s, got, want) + } + }) + } +} + +func TestDecimal128_JSON(t *testing.T) { + t.Run("roundTrip", func(t *testing.T) { + decimal := NewDecimal128(0x3040000000000000, 12345) + bytes, err := json.Marshal(decimal) + assert.Nil(t, err, "json.Marshal error: %v", err) + got := NewDecimal128(0, 0) + err = json.Unmarshal(bytes, &got) + assert.Nil(t, err, "json.Unmarshal error: %v", err) + assert.Equal(t, decimal.h, got.h, "expected h: %v got: %v", decimal.h, got.h) + assert.Equal(t, decimal.l, got.l, "expected l: %v got: %v", decimal.l, got.l) + }) + t.Run("unmarshal extendedJSON", func(t *testing.T) { + want := NewDecimal128(0x3040000000000000, 12345) + extJSON := fmt.Sprintf(`{"$numberDecimal": %q}`, want.String()) + + got := NewDecimal128(0, 0) + err := json.Unmarshal([]byte(extJSON), &got) + assert.Nil(t, err, "json.Unmarshal error: %v", err) + assert.Equal(t, want.h, got.h, "expected h: %v got: %v", want.h, got.h) + assert.Equal(t, want.l, got.l, "expected l: %v got: %v", want.l, got.l) + }) + t.Run("unmarshal null", func(t *testing.T) { + want := NewDecimal128(0, 0) + extJSON := `null` + + got := NewDecimal128(0, 0) + err := json.Unmarshal([]byte(extJSON), &got) + assert.Nil(t, err, "json.Unmarshal error: %v", err) + assert.Equal(t, want.h, got.h, "expected h: %v got: %v", want.h, got.h) + assert.Equal(t, want.l, got.l, "expected l: %v got: %v", want.l, got.l) + }) + t.Run("unmarshal", func(t *testing.T) { + cases := make([]bigIntTestCase, 0, len(bigIntTestCases)) + cases = append(cases, bigIntTestCases...) + cases = append(cases, + bigIntTestCase{s: "-0001231.453454000000565600000000E-21", h: 0xafe6000003faa269, l: 0x81cfeceaabdb1800}, + bigIntTestCase{s: "12345E+21", h: 0x306a000000000000, l: 12345}, + bigIntTestCase{s: "0.10000000000000000000000000000000000000000001", remark: "parse fail"}, + bigIntTestCase{s: ".125e1", h: 0x303c000000000000, l: 125}, + bigIntTestCase{s: ".125", h: 0x303a000000000000, l: 125}) + + for _, c := range cases { + t.Run(c.s, func(t *testing.T) { + input := fmt.Sprintf(`{"foo": %q}`, c.s) + var got map[string]Decimal128 + err := json.Unmarshal([]byte(input), &got) + + switch c.remark { + case "overflow", "parse fail": + assert.NotNil(t, err, "expected Unmarshal error, got nil") + default: + assert.Nil(t, err, "Unmarshal error: %v", err) + gotDecimal := got["foo"] + assert.Equal(t, c.h, gotDecimal.h, "expected h: %v got: %v", c.h, gotDecimal.l) + assert.Equal(t, c.l, gotDecimal.l, "expected l: %v got: %v", c.l, gotDecimal.h) + } + }) + } + }) +} diff --git a/decoder.go b/decoder.go new file mode 100644 index 0000000..2fa9e6f --- /dev/null +++ b/decoder.go @@ -0,0 +1,136 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bson + +import ( + "errors" + "fmt" + "reflect" + "sync" +) + +// ErrDecodeToNil is the error returned when trying to decode to a nil value +var ErrDecodeToNil = errors.New("cannot Decode to nil value") + +// This pool is used to keep the allocations of Decoders down. This is only used for the Marshal* +// methods and is not consumable from outside of this package. The Decoders retrieved from this pool +// must have both Reset and SetRegistry called on them. +var decPool = sync.Pool{ + New: func() interface{} { + return new(Decoder) + }, +} + +// A Decoder reads and decodes BSON documents from a stream. It reads from a ValueReader as +// the source of BSON data. +type Decoder struct { + dc DecodeContext + vr ValueReader +} + +// NewDecoder returns a new decoder that reads from vr. +func NewDecoder(vr ValueReader) *Decoder { + return &Decoder{ + dc: DecodeContext{Registry: defaultRegistry}, + vr: vr, + } +} + +// Decode reads the next BSON document from the stream and decodes it into the +// value pointed to by val. +// +// See [Unmarshal] for details about BSON unmarshaling behavior. +func (d *Decoder) Decode(val interface{}) error { + if unmarshaler, ok := val.(Unmarshaler); ok { + // TODO(skriptble): Reuse a []byte here and use the AppendDocumentBytes method. + buf, err := copyDocumentToBytes(d.vr) + if err != nil { + return err + } + return unmarshaler.UnmarshalBSON(buf) + } + + rval := reflect.ValueOf(val) + switch rval.Kind() { + case reflect.Ptr: + if rval.IsNil() { + return ErrDecodeToNil + } + rval = rval.Elem() + case reflect.Map: + if rval.IsNil() { + return ErrDecodeToNil + } + default: + return fmt.Errorf("argument to Decode must be a pointer or a map, but got %v", rval) + } + decoder, err := d.dc.LookupDecoder(rval.Type()) + if err != nil { + return err + } + + return decoder.DecodeValue(d.dc, d.vr, rval) +} + +// Reset will reset the state of the decoder, using the same *DecodeContext used in +// the original construction but using vr for reading. +func (d *Decoder) Reset(vr ValueReader) { + d.vr = vr +} + +// SetRegistry replaces the current registry of the decoder with r. +func (d *Decoder) SetRegistry(r *Registry) { + d.dc.Registry = r +} + +// DefaultDocumentM causes the Decoder to always unmarshal documents into the bson.M type. This +// behavior is restricted to data typed as "interface{}" or "map[string]interface{}". +func (d *Decoder) DefaultDocumentM() { + d.dc.defaultDocumentType = reflect.TypeOf(M{}) +} + +// AllowTruncatingDoubles causes the Decoder to truncate the fractional part of BSON "double" values +// when attempting to unmarshal them into a Go integer (int, int8, int16, int32, or int64) struct +// field. The truncation logic does not apply to BSON "decimal128" values. +func (d *Decoder) AllowTruncatingDoubles() { + d.dc.truncate = true +} + +// BinaryAsSlice causes the Decoder to unmarshal BSON binary field values that are the "Generic" or +// "Old" BSON binary subtype as a Go byte slice instead of a bson.Binary. +func (d *Decoder) BinaryAsSlice() { + d.dc.binaryAsSlice = true +} + +// ObjectIDAsHexString causes the Decoder to decode object IDs to their hex representation. +func (d *Decoder) ObjectIDAsHexString() { + d.dc.objectIDAsHexString = true +} + +// UseJSONStructTags causes the Decoder to fall back to using the "json" struct tag if a "bson" +// struct tag is not specified. +func (d *Decoder) UseJSONStructTags() { + d.dc.useJSONStructTags = true +} + +// UseLocalTimeZone causes the Decoder to unmarshal time.Time values in the local timezone instead +// of the UTC timezone. +func (d *Decoder) UseLocalTimeZone() { + d.dc.useLocalTimeZone = true +} + +// ZeroMaps causes the Decoder to delete any existing values from Go maps in the destination value +// passed to Decode before unmarshaling BSON documents into them. +func (d *Decoder) ZeroMaps() { + d.dc.zeroMaps = true +} + +// ZeroStructs causes the Decoder to delete any existing values from Go structs in the destination +// value passed to Decode before unmarshaling BSON documents into them. +func (d *Decoder) ZeroStructs() { + d.dc.zeroStructs = true +} diff --git a/decoder_example_test.go b/decoder_example_test.go new file mode 100644 index 0000000..cb65161 --- /dev/null +++ b/decoder_example_test.go @@ -0,0 +1,208 @@ +// Copyright (C) MongoDB, Inc. 2023-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bson_test + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" + + "gitea.psichedelico.com/go/bson" +) + +func ExampleDecoder() { + // Marshal a BSON document that contains the name, SKU, and price (in cents) + // of a product. + doc := bson.D{ + {Key: "name", Value: "Cereal Rounds"}, + {Key: "sku", Value: "AB12345"}, + {Key: "price_cents", Value: 399}, + } + data, err := bson.Marshal(doc) + if err != nil { + panic(err) + } + + // Create a Decoder that reads the marshaled BSON document and use it to + // unmarshal the document into a Product struct. + decoder := bson.NewDecoder(bson.NewDocumentReader(bytes.NewReader(data))) + + type Product struct { + Name string `bson:"name"` + SKU string `bson:"sku"` + Price int64 `bson:"price_cents"` + } + + var res Product + err = decoder.Decode(&res) + if err != nil { + panic(err) + } + + fmt.Printf("%+v\n", res) + // Output: {Name:Cereal Rounds SKU:AB12345 Price:399} +} + +func ExampleDecoder_DefaultDocumentM() { + // Marshal a BSON document that contains a city name and a nested document + // with various city properties. + doc := bson.D{ + {Key: "name", Value: "New York"}, + {Key: "properties", Value: bson.D{ + {Key: "state", Value: "NY"}, + {Key: "population", Value: 8_804_190}, + {Key: "elevation", Value: 10}, + }}, + } + data, err := bson.Marshal(doc) + if err != nil { + panic(err) + } + + // Create a Decoder that reads the marshaled BSON document and use it to unmarshal the document + // into a City struct. + decoder := bson.NewDecoder(bson.NewDocumentReader(bytes.NewReader(data))) + + type City struct { + Name string `bson:"name"` + Properties interface{} `bson:"properties"` + } + + // Configure the Decoder to default to decoding BSON documents as the M + // type if the decode destination has no type information. The Properties + // field in the City struct will be decoded as a "M" (i.e. map) instead + // of the default "D". + decoder.DefaultDocumentM() + + var res City + err = decoder.Decode(&res) + if err != nil { + panic(err) + } + + data, err = json.Marshal(res) + if err != nil { + panic(err) + } + fmt.Printf("%+v\n", string(data)) + // Output: {"Name":"New York","Properties":{"elevation":10,"population":8804190,"state":"NY"}} +} + +func ExampleDecoder_UseJSONStructTags() { + // Marshal a BSON document that contains the name, SKU, and price (in cents) + // of a product. + doc := bson.D{ + {Key: "name", Value: "Cereal Rounds"}, + {Key: "sku", Value: "AB12345"}, + {Key: "price_cents", Value: 399}, + } + data, err := bson.Marshal(doc) + if err != nil { + panic(err) + } + + // Create a Decoder that reads the marshaled BSON document and use it to + // unmarshal the document into a Product struct. + decoder := bson.NewDecoder(bson.NewDocumentReader(bytes.NewReader(data))) + + type Product struct { + Name string `json:"name"` + SKU string `json:"sku"` + Price int64 `json:"price_cents"` + } + + // Configure the Decoder to use "json" struct tags when decoding if "bson" + // struct tags are not present. + decoder.UseJSONStructTags() + + var res Product + err = decoder.Decode(&res) + if err != nil { + panic(err) + } + + fmt.Printf("%+v\n", res) + // Output: {Name:Cereal Rounds SKU:AB12345 Price:399} +} + +func ExampleDecoder_extendedJSON() { + // Define an Extended JSON document that contains the name, SKU, and price + // (in cents) of a product. + data := []byte(`{"name":"Cereal Rounds","sku":"AB12345","price_cents":{"$numberLong":"399"}}`) + + // Create a Decoder that reads the Extended JSON document and use it to + // unmarshal the document into a Product struct. + vr, err := bson.NewExtJSONValueReader(bytes.NewReader(data), true) + if err != nil { + panic(err) + } + decoder := bson.NewDecoder(vr) + + type Product struct { + Name string `bson:"name"` + SKU string `bson:"sku"` + Price int64 `bson:"price_cents"` + } + + var res Product + err = decoder.Decode(&res) + if err != nil { + panic(err) + } + + fmt.Printf("%+v\n", res) + // Output: {Name:Cereal Rounds SKU:AB12345 Price:399} +} + +func ExampleDecoder_multipleExtendedJSONDocuments() { + // Define a newline-separated sequence of Extended JSON documents that + // contain X,Y coordinates. + data := []byte(` +{"x":{"$numberInt":"0"},"y":{"$numberInt":"0"}} +{"x":{"$numberInt":"1"},"y":{"$numberInt":"1"}} +{"x":{"$numberInt":"2"},"y":{"$numberInt":"2"}} +{"x":{"$numberInt":"3"},"y":{"$numberInt":"3"}} +{"x":{"$numberInt":"4"},"y":{"$numberInt":"4"}} +`) + + // Create a Decoder that reads the Extended JSON documents and use it to + // unmarshal the documents Coordinate structs. + vr, err := bson.NewExtJSONValueReader(bytes.NewReader(data), true) + if err != nil { + panic(err) + } + decoder := bson.NewDecoder(vr) + + type Coordinate struct { + X int + Y int + } + + // Read and unmarshal each Extended JSON document from the sequence. If + // Decode returns error io.EOF, that means the Decoder has reached the end + // of the input, so break the loop. + for { + var res Coordinate + err = decoder.Decode(&res) + if errors.Is(err, io.EOF) { + break + } + if err != nil { + panic(err) + } + + fmt.Printf("%+v\n", res) + } + // Output: + // {X:0 Y:0} + // {X:1 Y:1} + // {X:2 Y:2} + // {X:3 Y:3} + // {X:4 Y:4} +} diff --git a/decoder_test.go b/decoder_test.go new file mode 100644 index 0000000..1f5bf4f --- /dev/null +++ b/decoder_test.go @@ -0,0 +1,699 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bson + +import ( + "bytes" + "errors" + "reflect" + "testing" + "time" + + "gitea.psichedelico.com/go/bson/internal/assert" + "gitea.psichedelico.com/go/bson/internal/require" + "gitea.psichedelico.com/go/bson/x/bsonx/bsoncore" +) + +func TestDecodeValue(t *testing.T) { + t.Parallel() + + for _, tc := range unmarshalingTestCases() { + tc := tc + + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + got := reflect.New(tc.sType).Elem() + vr := NewDocumentReader(bytes.NewReader(tc.data)) + reg := defaultRegistry + decoder, err := reg.LookupDecoder(reflect.TypeOf(got)) + noerr(t, err) + err = decoder.DecodeValue(DecodeContext{Registry: reg}, vr, got) + noerr(t, err) + assert.Equal(t, tc.want, got.Addr().Interface(), "Results do not match.") + }) + } +} + +func TestDecodingInterfaces(t *testing.T) { + t.Parallel() + + type testCase struct { + name string + stub func() ([]byte, interface{}, func(*testing.T)) + } + testCases := []testCase{ + { + name: "struct with interface containing a concrete value", + stub: func() ([]byte, interface{}, func(*testing.T)) { + type testStruct struct { + Value interface{} + } + var value string + + data := docToBytes(struct { + Value string + }{ + Value: "foo", + }) + + receiver := testStruct{&value} + + check := func(t *testing.T) { + t.Helper() + assert.Equal(t, "foo", value) + } + + return data, &receiver, check + }, + }, + { + name: "struct with interface containing a struct", + stub: func() ([]byte, interface{}, func(*testing.T)) { + type demo struct { + Data string + } + + type testStruct struct { + Value interface{} + } + var value demo + + data := docToBytes(struct { + Value demo + }{ + Value: demo{"foo"}, + }) + + receiver := testStruct{&value} + + check := func(t *testing.T) { + t.Helper() + assert.Equal(t, "foo", value.Data) + } + + return data, &receiver, check + }, + }, + { + name: "struct with interface containing a slice", + stub: func() ([]byte, interface{}, func(*testing.T)) { + type testStruct struct { + Values interface{} + } + var values []string + + data := docToBytes(struct { + Values []string + }{ + Values: []string{"foo", "bar"}, + }) + + receiver := testStruct{&values} + + check := func(t *testing.T) { + t.Helper() + assert.Equal(t, []string{"foo", "bar"}, values) + } + + return data, &receiver, check + }, + }, + { + name: "struct with interface containing an array", + stub: func() ([]byte, interface{}, func(*testing.T)) { + type testStruct struct { + Values interface{} + } + var values [2]string + + data := docToBytes(struct { + Values []string + }{ + Values: []string{"foo", "bar"}, + }) + + receiver := testStruct{&values} + + check := func(t *testing.T) { + t.Helper() + assert.Equal(t, [2]string{"foo", "bar"}, values) + } + + return data, &receiver, check + }, + }, + { + name: "struct with interface array containing concrete values", + stub: func() ([]byte, interface{}, func(*testing.T)) { + type testStruct struct { + Values [3]interface{} + } + var str string + var i, j int + + data := docToBytes(struct { + Values []interface{} + }{ + Values: []interface{}{"foo", 42, nil}, + }) + + receiver := testStruct{[3]interface{}{&str, &i, &j}} + + check := func(t *testing.T) { + t.Helper() + assert.Equal(t, "foo", str) + assert.Equal(t, 42, i) + assert.Equal(t, 0, j) + assert.Equal(t, testStruct{[3]interface{}{&str, &i, nil}}, receiver) + } + + return data, &receiver, check + }, + }, + } + for _, tc := range testCases { + tc := tc + + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + data, receiver, check := tc.stub() + got := reflect.ValueOf(receiver).Elem() + vr := NewDocumentReader(bytes.NewReader(data)) + reg := defaultRegistry + decoder, err := reg.LookupDecoder(got.Type()) + noerr(t, err) + err = decoder.DecodeValue(DecodeContext{Registry: reg}, vr, got) + noerr(t, err) + check(t) + }) + } +} + +func TestDecoder(t *testing.T) { + t.Parallel() + + t.Run("Decode", func(t *testing.T) { + t.Parallel() + + t.Run("basic", func(t *testing.T) { + t.Parallel() + + for _, tc := range unmarshalingTestCases() { + tc := tc + + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + got := reflect.New(tc.sType).Interface() + vr := NewDocumentReader(bytes.NewReader(tc.data)) + dec := NewDecoder(vr) + err := dec.Decode(got) + noerr(t, err) + assert.Equal(t, tc.want, got, "Results do not match.") + }) + } + }) + t.Run("stream", func(t *testing.T) { + t.Parallel() + + var buf bytes.Buffer + vr := NewDocumentReader(&buf) + dec := NewDecoder(vr) + for _, tc := range unmarshalingTestCases() { + tc := tc + + t.Run(tc.name, func(t *testing.T) { + buf.Write(tc.data) + got := reflect.New(tc.sType).Interface() + err := dec.Decode(got) + noerr(t, err) + assert.Equal(t, tc.want, got, "Results do not match.") + }) + } + }) + t.Run("lookup error", func(t *testing.T) { + t.Parallel() + + type certainlydoesntexistelsewhereihope func(string, string) string + // Avoid unused code lint error. + _ = certainlydoesntexistelsewhereihope(func(string, string) string { return "" }) + + cdeih := func(string, string) string { return "certainlydoesntexistelsewhereihope" } + dec := NewDecoder(NewDocumentReader(bytes.NewReader([]byte{}))) + want := errNoDecoder{Type: reflect.TypeOf(cdeih)} + got := dec.Decode(&cdeih) + assert.Equal(t, want, got, "Received unexpected error.") + }) + t.Run("Unmarshaler", func(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + err error + vr ValueReader + invoked bool + }{ + { + "error", + errors.New("Unmarshaler error"), + &valueReaderWriter{BSONType: TypeEmbeddedDocument, Err: ErrEOD, ErrAfter: readElement}, + true, + }, + { + "copy error", + errors.New("copy error"), + &valueReaderWriter{Err: errors.New("copy error"), ErrAfter: readDocument}, + false, + }, + { + "success", + nil, + &valueReaderWriter{BSONType: TypeEmbeddedDocument, Err: ErrEOD, ErrAfter: readElement}, + true, + }, + } + + for _, tc := range testCases { + tc := tc + + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + unmarshaler := &testUnmarshaler{Err: tc.err} + dec := NewDecoder(tc.vr) + got := dec.Decode(unmarshaler) + want := tc.err + if !assert.CompareErrors(got, want) { + t.Errorf("Did not receive expected error. got %v; want %v", got, want) + } + if unmarshaler.Invoked != tc.invoked { + if tc.invoked { + t.Error("Expected to have UnmarshalBSON invoked, but it wasn't.") + } else { + t.Error("Expected UnmarshalBSON to not be invoked, but it was.") + } + } + }) + } + + t.Run("Unmarshaler/success ValueReader", func(t *testing.T) { + t.Parallel() + + want := bsoncore.BuildDocument(nil, bsoncore.AppendDoubleElement(nil, "pi", 3.14159)) + unmarshaler := &testUnmarshaler{} + vr := NewDocumentReader(bytes.NewReader(want)) + dec := NewDecoder(vr) + err := dec.Decode(unmarshaler) + noerr(t, err) + got := unmarshaler.Val + if !bytes.Equal(got, want) { + t.Errorf("Did not unmarshal properly. got %v; want %v", got, want) + } + }) + }) + }) + t.Run("NewDecoder", func(t *testing.T) { + t.Parallel() + + t.Run("success", func(t *testing.T) { + t.Parallel() + + got := NewDecoder(NewDocumentReader(bytes.NewReader([]byte{}))) + if got == nil { + t.Errorf("Was expecting a non-nil Decoder, but got ") + } + }) + }) + t.Run("NewDecoderWithContext", func(t *testing.T) { + t.Parallel() + + t.Run("success", func(t *testing.T) { + t.Parallel() + + got := NewDecoder(NewDocumentReader(bytes.NewReader([]byte{}))) + if got == nil { + t.Errorf("Was expecting a non-nil Decoder, but got ") + } + }) + }) + t.Run("Decode doesn't zero struct", func(t *testing.T) { + t.Parallel() + + type foo struct { + Item string + Qty int + Bonus int + } + var got foo + got.Item = "apple" + got.Bonus = 2 + data := docToBytes(D{{"item", "canvas"}, {"qty", 4}}) + vr := NewDocumentReader(bytes.NewReader(data)) + dec := NewDecoder(vr) + err := dec.Decode(&got) + noerr(t, err) + want := foo{Item: "canvas", Qty: 4, Bonus: 2} + assert.Equal(t, want, got, "Results do not match.") + }) + t.Run("Reset", func(t *testing.T) { + t.Parallel() + + vr1, vr2 := NewDocumentReader(bytes.NewReader([]byte{})), NewDocumentReader(bytes.NewReader([]byte{})) + dec := NewDecoder(vr1) + if dec.vr != vr1 { + t.Errorf("Decoder should use the value reader provided. got %v; want %v", dec.vr, vr1) + } + dec.Reset(vr2) + if dec.vr != vr2 { + t.Errorf("Decoder should use the value reader provided. got %v; want %v", dec.vr, vr2) + } + }) + t.Run("SetRegistry", func(t *testing.T) { + t.Parallel() + + r1, r2 := defaultRegistry, NewRegistry() + dc1 := DecodeContext{Registry: r1} + dc2 := DecodeContext{Registry: r2} + dec := NewDecoder(NewDocumentReader(bytes.NewReader([]byte{}))) + if !reflect.DeepEqual(dec.dc, dc1) { + t.Errorf("Decoder should use the Registry provided. got %v; want %v", dec.dc, dc1) + } + dec.SetRegistry(r2) + if !reflect.DeepEqual(dec.dc, dc2) { + t.Errorf("Decoder should use the Registry provided. got %v; want %v", dec.dc, dc2) + } + }) + t.Run("DecodeToNil", func(t *testing.T) { + t.Parallel() + + data := docToBytes(D{{"item", "canvas"}, {"qty", 4}}) + vr := NewDocumentReader(bytes.NewReader(data)) + dec := NewDecoder(vr) + + var got *D + err := dec.Decode(got) + if !errors.Is(err, ErrDecodeToNil) { + t.Fatalf("Decode error mismatch; expected %v, got %v", ErrDecodeToNil, err) + } + }) +} + +type testUnmarshaler struct { + Invoked bool + Val []byte + Err error +} + +func (tu *testUnmarshaler) UnmarshalBSON(d []byte) error { + tu.Invoked = true + tu.Val = d + return tu.Err +} + +func TestDecoderConfiguration(t *testing.T) { + type truncateDoublesTest struct { + MyInt int + MyInt8 int8 + MyInt16 int16 + MyInt32 int32 + MyInt64 int64 + MyUint uint + MyUint8 uint8 + MyUint16 uint16 + MyUint32 uint32 + MyUint64 uint64 + } + + type objectIDTest struct { + ID string + } + + type jsonStructTest struct { + StructFieldName string `json:"jsonFieldName"` + } + + type localTimeZoneTest struct { + MyTime time.Time + } + + type zeroMapsTest struct { + MyMap map[string]string + } + + type zeroStructsTest struct { + MyString string + MyInt int + } + + testCases := []struct { + description string + configure func(*Decoder) + input []byte + decodeInto func() interface{} + want interface{} + }{ + // Test that AllowTruncatingDoubles causes the Decoder to unmarshal BSON doubles with + // fractional parts into Go integer types by truncating the fractional part. + { + description: "AllowTruncatingDoubles", + configure: func(dec *Decoder) { + dec.AllowTruncatingDoubles() + }, + input: bsoncore.NewDocumentBuilder(). + AppendDouble("myInt", 1.999). + AppendDouble("myInt8", 1.999). + AppendDouble("myInt16", 1.999). + AppendDouble("myInt32", 1.999). + AppendDouble("myInt64", 1.999). + AppendDouble("myUint", 1.999). + AppendDouble("myUint8", 1.999). + AppendDouble("myUint16", 1.999). + AppendDouble("myUint32", 1.999). + AppendDouble("myUint64", 1.999). + Build(), + decodeInto: func() interface{} { return &truncateDoublesTest{} }, + want: &truncateDoublesTest{ + MyInt: 1, + MyInt8: 1, + MyInt16: 1, + MyInt32: 1, + MyInt64: 1, + MyUint: 1, + MyUint8: 1, + MyUint16: 1, + MyUint32: 1, + MyUint64: 1, + }, + }, + // Test that BinaryAsSlice causes the Decoder to unmarshal BSON binary fields into Go byte + // slices when there is no type information (e.g when unmarshaling into a bson.D). + { + description: "BinaryAsSlice", + configure: func(dec *Decoder) { + dec.BinaryAsSlice() + }, + input: bsoncore.NewDocumentBuilder(). + AppendBinary("myBinary", TypeBinaryGeneric, []byte{}). + Build(), + decodeInto: func() interface{} { return &D{} }, + want: &D{{Key: "myBinary", Value: []byte{}}}, + }, + // Test that the default decoder always decodes BSON documents into bson.D values, + // independent of the top-level Go value type. + { + description: "DocumentD nested by default", + configure: func(_ *Decoder) {}, + input: bsoncore.NewDocumentBuilder(). + AppendDocument("myDocument", bsoncore.NewDocumentBuilder(). + AppendString("myString", "test value"). + Build()). + Build(), + decodeInto: func() interface{} { return M{} }, + want: M{ + "myDocument": D{{Key: "myString", Value: "test value"}}, + }, + }, + // Test that DefaultDocumentM always decodes BSON documents into bson.M values, + // independent of the top-level Go value type. + { + description: "DefaultDocumentM nested", + configure: func(dec *Decoder) { + dec.DefaultDocumentM() + }, + input: bsoncore.NewDocumentBuilder(). + AppendDocument("myDocument", bsoncore.NewDocumentBuilder(). + AppendString("myString", "test value"). + Build()). + Build(), + decodeInto: func() interface{} { return &D{} }, + want: &D{ + {Key: "myDocument", Value: M{"myString": "test value"}}, + }, + }, + // Test that ObjectIDAsHexString causes the Decoder to decode object ID to hex. + { + description: "ObjectIDAsHexString", + configure: func(dec *Decoder) { + dec.ObjectIDAsHexString() + }, + input: bsoncore.NewDocumentBuilder(). + AppendObjectID("id", func() ObjectID { + id, _ := ObjectIDFromHex("5ef7fdd91c19e3222b41b839") + return id + }()). + Build(), + decodeInto: func() interface{} { return &objectIDTest{} }, + want: &objectIDTest{ID: "5ef7fdd91c19e3222b41b839"}, + }, + // Test that UseJSONStructTags causes the Decoder to fall back to "json" struct tags if + // "bson" struct tags are not available. + { + description: "UseJSONStructTags", + configure: func(dec *Decoder) { + dec.UseJSONStructTags() + }, + input: bsoncore.NewDocumentBuilder(). + AppendString("jsonFieldName", "test value"). + Build(), + decodeInto: func() interface{} { return &jsonStructTest{} }, + want: &jsonStructTest{StructFieldName: "test value"}, + }, + // Test that UseLocalTimeZone causes the Decoder to use the local time zone for decoded + // time.Time values instead of UTC. + { + description: "UseLocalTimeZone", + configure: func(dec *Decoder) { + dec.UseLocalTimeZone() + }, + input: bsoncore.NewDocumentBuilder(). + AppendDateTime("myTime", 1684349179939). + Build(), + decodeInto: func() interface{} { return &localTimeZoneTest{} }, + want: &localTimeZoneTest{MyTime: time.UnixMilli(1684349179939)}, + }, + // Test that ZeroMaps causes the Decoder to empty any Go map values before decoding BSON + // documents into them. + { + description: "ZeroMaps", + configure: func(dec *Decoder) { + dec.ZeroMaps() + }, + input: bsoncore.NewDocumentBuilder(). + AppendDocument("myMap", bsoncore.NewDocumentBuilder(). + AppendString("myString", "test value"). + Build()). + Build(), + decodeInto: func() interface{} { + return &zeroMapsTest{MyMap: map[string]string{"myExtraValue": "extra value"}} + }, + want: &zeroMapsTest{MyMap: map[string]string{"myString": "test value"}}, + }, + // Test that ZeroStructs causes the Decoder to empty any Go struct values before decoding + // BSON documents into them. + { + description: "ZeroStructs", + configure: func(dec *Decoder) { + dec.ZeroStructs() + }, + input: bsoncore.NewDocumentBuilder(). + AppendString("myString", "test value"). + Build(), + decodeInto: func() interface{} { + return &zeroStructsTest{MyInt: 1} + }, + want: &zeroStructsTest{MyString: "test value"}, + }, + } + + for _, tc := range testCases { + tc := tc // Capture range variable. + + t.Run(tc.description, func(t *testing.T) { + t.Parallel() + + dec := NewDecoder(NewDocumentReader(bytes.NewReader(tc.input))) + + tc.configure(dec) + + got := tc.decodeInto() + err := dec.Decode(got) + require.NoError(t, err, "Decode error") + + assert.Equal(t, tc.want, got, "expected and actual decode results do not match") + }) + } + + t.Run("Decoding an object ID to string", func(t *testing.T) { + t.Parallel() + + type objectIDTest struct { + ID string + } + + doc := bsoncore.NewDocumentBuilder(). + AppendObjectID("id", func() ObjectID { + id, _ := ObjectIDFromHex("5ef7fdd91c19e3222b41b839") + return id + }()). + Build() + + dec := NewDecoder(NewDocumentReader(bytes.NewReader(doc))) + + var got objectIDTest + err := dec.Decode(&got) + const want = "error decoding key id: decoding an object ID into a string is not supported by default (set Decoder.ObjectIDAsHexString to enable decoding as a hexadecimal string)" + assert.EqualError(t, err, want) + }) + t.Run("DefaultDocumentM top-level", func(t *testing.T) { + t.Parallel() + + input := bsoncore.NewDocumentBuilder(). + AppendDocument("myDocument", bsoncore.NewDocumentBuilder(). + AppendString("myString", "test value"). + Build()). + Build() + + dec := NewDecoder(NewDocumentReader(bytes.NewReader(input))) + + dec.DefaultDocumentM() + + var got interface{} + err := dec.Decode(&got) + require.NoError(t, err, "Decode error") + + want := M{ + "myDocument": M{ + "myString": "test value", + }, + } + assert.Equal(t, want, got, "expected and actual decode results do not match") + }) + t.Run("Default decodes DocumentD for top-level", func(t *testing.T) { + t.Parallel() + + input := bsoncore.NewDocumentBuilder(). + AppendDocument("myDocument", bsoncore.NewDocumentBuilder(). + AppendString("myString", "test value"). + Build()). + Build() + + dec := NewDecoder(NewDocumentReader(bytes.NewReader(input))) + + var got interface{} + err := dec.Decode(&got) + require.NoError(t, err, "Decode error") + + want := D{ + {Key: "myDocument", Value: D{ + {Key: "myString", Value: "test value"}, + }}, + } + assert.Equal(t, want, got, "expected and actual decode results do not match") + }) +} diff --git a/default_value_decoders.go b/default_value_decoders.go new file mode 100644 index 0000000..8fbb2a0 --- /dev/null +++ b/default_value_decoders.go @@ -0,0 +1,1497 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bson + +import ( + "encoding/json" + "errors" + "fmt" + "math" + "net/url" + "reflect" + "strconv" + + "gitea.psichedelico.com/go/bson/x/bsonx/bsoncore" +) + +var errCannotTruncate = errors.New("float64 can only be truncated to a lower precision type when truncation is enabled") + +type decodeBinaryError struct { + subtype byte + typeName string +} + +func (d decodeBinaryError) Error() string { + return fmt.Sprintf("only binary values with subtype 0x00 or 0x02 can be decoded into %s, but got subtype %v", d.typeName, d.subtype) +} + +// registerDefaultDecoders will register the decoder methods attached to DefaultValueDecoders with +// the provided RegistryBuilder. +// +// There is no support for decoding map[string]interface{} because there is no decoder for +// interface{}, so users must either register this decoder themselves or use the +// EmptyInterfaceDecoder available in the bson package. +func registerDefaultDecoders(reg *Registry) { + intDecoder := decodeAdapter{intDecodeValue, intDecodeType} + floatDecoder := decodeAdapter{floatDecodeValue, floatDecodeType} + uintCodec := &uintCodec{} + + reg.RegisterTypeDecoder(tD, ValueDecoderFunc(dDecodeValue)) + reg.RegisterTypeDecoder(tBinary, decodeAdapter{binaryDecodeValue, binaryDecodeType}) + reg.RegisterTypeDecoder(tVector, decodeAdapter{vectorDecodeValue, vectorDecodeType}) + reg.RegisterTypeDecoder(tUndefined, decodeAdapter{undefinedDecodeValue, undefinedDecodeType}) + reg.RegisterTypeDecoder(tDateTime, decodeAdapter{dateTimeDecodeValue, dateTimeDecodeType}) + reg.RegisterTypeDecoder(tNull, decodeAdapter{nullDecodeValue, nullDecodeType}) + reg.RegisterTypeDecoder(tRegex, decodeAdapter{regexDecodeValue, regexDecodeType}) + reg.RegisterTypeDecoder(tDBPointer, decodeAdapter{dbPointerDecodeValue, dbPointerDecodeType}) + reg.RegisterTypeDecoder(tTimestamp, decodeAdapter{timestampDecodeValue, timestampDecodeType}) + reg.RegisterTypeDecoder(tMinKey, decodeAdapter{minKeyDecodeValue, minKeyDecodeType}) + reg.RegisterTypeDecoder(tMaxKey, decodeAdapter{maxKeyDecodeValue, maxKeyDecodeType}) + reg.RegisterTypeDecoder(tJavaScript, decodeAdapter{javaScriptDecodeValue, javaScriptDecodeType}) + reg.RegisterTypeDecoder(tSymbol, decodeAdapter{symbolDecodeValue, symbolDecodeType}) + reg.RegisterTypeDecoder(tByteSlice, &byteSliceCodec{}) + reg.RegisterTypeDecoder(tTime, &timeCodec{}) + reg.RegisterTypeDecoder(tEmpty, &emptyInterfaceCodec{}) + reg.RegisterTypeDecoder(tCoreArray, &arrayCodec{}) + reg.RegisterTypeDecoder(tOID, decodeAdapter{objectIDDecodeValue, objectIDDecodeType}) + reg.RegisterTypeDecoder(tDecimal, decodeAdapter{decimal128DecodeValue, decimal128DecodeType}) + reg.RegisterTypeDecoder(tJSONNumber, decodeAdapter{jsonNumberDecodeValue, jsonNumberDecodeType}) + reg.RegisterTypeDecoder(tURL, decodeAdapter{urlDecodeValue, urlDecodeType}) + reg.RegisterTypeDecoder(tCoreDocument, ValueDecoderFunc(coreDocumentDecodeValue)) + reg.RegisterTypeDecoder(tCodeWithScope, decodeAdapter{codeWithScopeDecodeValue, codeWithScopeDecodeType}) + reg.RegisterKindDecoder(reflect.Bool, decodeAdapter{booleanDecodeValue, booleanDecodeType}) + reg.RegisterKindDecoder(reflect.Int, intDecoder) + reg.RegisterKindDecoder(reflect.Int8, intDecoder) + reg.RegisterKindDecoder(reflect.Int16, intDecoder) + reg.RegisterKindDecoder(reflect.Int32, intDecoder) + reg.RegisterKindDecoder(reflect.Int64, intDecoder) + reg.RegisterKindDecoder(reflect.Uint, uintCodec) + reg.RegisterKindDecoder(reflect.Uint8, uintCodec) + reg.RegisterKindDecoder(reflect.Uint16, uintCodec) + reg.RegisterKindDecoder(reflect.Uint32, uintCodec) + reg.RegisterKindDecoder(reflect.Uint64, uintCodec) + reg.RegisterKindDecoder(reflect.Float32, floatDecoder) + reg.RegisterKindDecoder(reflect.Float64, floatDecoder) + reg.RegisterKindDecoder(reflect.Array, ValueDecoderFunc(arrayDecodeValue)) + reg.RegisterKindDecoder(reflect.Map, &mapCodec{}) + reg.RegisterKindDecoder(reflect.Slice, &sliceCodec{}) + reg.RegisterKindDecoder(reflect.String, &stringCodec{}) + reg.RegisterKindDecoder(reflect.Struct, newStructCodec(nil)) + reg.RegisterKindDecoder(reflect.Ptr, &pointerCodec{}) + reg.RegisterTypeMapEntry(TypeDouble, tFloat64) + reg.RegisterTypeMapEntry(TypeString, tString) + reg.RegisterTypeMapEntry(TypeArray, tA) + reg.RegisterTypeMapEntry(TypeBinary, tBinary) + reg.RegisterTypeMapEntry(TypeUndefined, tUndefined) + reg.RegisterTypeMapEntry(TypeObjectID, tOID) + reg.RegisterTypeMapEntry(TypeBoolean, tBool) + reg.RegisterTypeMapEntry(TypeDateTime, tDateTime) + reg.RegisterTypeMapEntry(TypeRegex, tRegex) + reg.RegisterTypeMapEntry(TypeDBPointer, tDBPointer) + reg.RegisterTypeMapEntry(TypeJavaScript, tJavaScript) + reg.RegisterTypeMapEntry(TypeSymbol, tSymbol) + reg.RegisterTypeMapEntry(TypeCodeWithScope, tCodeWithScope) + reg.RegisterTypeMapEntry(TypeInt32, tInt32) + reg.RegisterTypeMapEntry(TypeInt64, tInt64) + reg.RegisterTypeMapEntry(TypeTimestamp, tTimestamp) + reg.RegisterTypeMapEntry(TypeDecimal128, tDecimal) + reg.RegisterTypeMapEntry(TypeMinKey, tMinKey) + reg.RegisterTypeMapEntry(TypeMaxKey, tMaxKey) + reg.RegisterTypeMapEntry(Type(0), tD) + reg.RegisterTypeMapEntry(TypeEmbeddedDocument, tD) + reg.RegisterInterfaceDecoder(tValueUnmarshaler, ValueDecoderFunc(valueUnmarshalerDecodeValue)) + reg.RegisterInterfaceDecoder(tUnmarshaler, ValueDecoderFunc(unmarshalerDecodeValue)) +} + +// dDecodeValue is the ValueDecoderFunc for D instances. +func dDecodeValue(dc DecodeContext, vr ValueReader, val reflect.Value) error { + if !val.IsValid() || !val.CanSet() || val.Type() != tD { + return ValueDecoderError{Name: "DDecodeValue", Kinds: []reflect.Kind{reflect.Slice}, Received: val} + } + + switch vrType := vr.Type(); vrType { + case Type(0), TypeEmbeddedDocument: + break + case TypeNull: + val.Set(reflect.Zero(val.Type())) + return vr.ReadNull() + default: + return fmt.Errorf("cannot decode %v into a D", vrType) + } + + dr, err := vr.ReadDocument() + if err != nil { + return err + } + + decoder, err := dc.LookupDecoder(tEmpty) + if err != nil { + return err + } + + // Use the elements in the provided value if it's non nil. Otherwise, allocate a new D instance. + var elems D + if !val.IsNil() { + val.SetLen(0) + elems = val.Interface().(D) + } else { + elems = make(D, 0) + } + + for { + key, elemVr, err := dr.ReadElement() + if errors.Is(err, ErrEOD) { + break + } else if err != nil { + return err + } + + var v interface{} + err = decoder.DecodeValue(dc, elemVr, reflect.ValueOf(&v).Elem()) + if err != nil { + return err + } + + elems = append(elems, E{Key: key, Value: v}) + } + + val.Set(reflect.ValueOf(elems)) + return nil +} + +func booleanDecodeType(_ DecodeContext, vr ValueReader, t reflect.Type) (reflect.Value, error) { + if t.Kind() != reflect.Bool { + return emptyValue, ValueDecoderError{ + Name: "BooleanDecodeValue", + Kinds: []reflect.Kind{reflect.Bool}, + Received: reflect.Zero(t), + } + } + + var b bool + var err error + switch vrType := vr.Type(); vrType { + case TypeInt32: + i32, err := vr.ReadInt32() + if err != nil { + return emptyValue, err + } + b = (i32 != 0) + case TypeInt64: + i64, err := vr.ReadInt64() + if err != nil { + return emptyValue, err + } + b = (i64 != 0) + case TypeDouble: + f64, err := vr.ReadDouble() + if err != nil { + return emptyValue, err + } + b = (f64 != 0) + case TypeBoolean: + b, err = vr.ReadBoolean() + case TypeNull: + err = vr.ReadNull() + case TypeUndefined: + err = vr.ReadUndefined() + default: + return emptyValue, fmt.Errorf("cannot decode %v into a boolean", vrType) + } + if err != nil { + return emptyValue, err + } + + return reflect.ValueOf(b), nil +} + +// booleanDecodeValue is the ValueDecoderFunc for bool types. +func booleanDecodeValue(dctx DecodeContext, vr ValueReader, val reflect.Value) error { + if !val.IsValid() || !val.CanSet() || val.Kind() != reflect.Bool { + return ValueDecoderError{Name: "BooleanDecodeValue", Kinds: []reflect.Kind{reflect.Bool}, Received: val} + } + + elem, err := booleanDecodeType(dctx, vr, val.Type()) + if err != nil { + return err + } + + val.SetBool(elem.Bool()) + return nil +} + +func intDecodeType(dc DecodeContext, vr ValueReader, t reflect.Type) (reflect.Value, error) { + var i64 int64 + var err error + switch vrType := vr.Type(); vrType { + case TypeInt32: + i32, err := vr.ReadInt32() + if err != nil { + return emptyValue, err + } + i64 = int64(i32) + case TypeInt64: + i64, err = vr.ReadInt64() + if err != nil { + return emptyValue, err + } + case TypeDouble: + f64, err := vr.ReadDouble() + if err != nil { + return emptyValue, err + } + if !dc.truncate && math.Floor(f64) != f64 { + return emptyValue, errCannotTruncate + } + if f64 > float64(math.MaxInt64) { + return emptyValue, fmt.Errorf("%g overflows int64", f64) + } + i64 = int64(f64) + case TypeBoolean: + b, err := vr.ReadBoolean() + if err != nil { + return emptyValue, err + } + if b { + i64 = 1 + } + case TypeNull: + if err = vr.ReadNull(); err != nil { + return emptyValue, err + } + case TypeUndefined: + if err = vr.ReadUndefined(); err != nil { + return emptyValue, err + } + default: + return emptyValue, fmt.Errorf("cannot decode %v into an integer type", vrType) + } + + switch t.Kind() { + case reflect.Int8: + if i64 < math.MinInt8 || i64 > math.MaxInt8 { + return emptyValue, fmt.Errorf("%d overflows int8", i64) + } + + return reflect.ValueOf(int8(i64)), nil + case reflect.Int16: + if i64 < math.MinInt16 || i64 > math.MaxInt16 { + return emptyValue, fmt.Errorf("%d overflows int16", i64) + } + + return reflect.ValueOf(int16(i64)), nil + case reflect.Int32: + if i64 < math.MinInt32 || i64 > math.MaxInt32 { + return emptyValue, fmt.Errorf("%d overflows int32", i64) + } + + return reflect.ValueOf(int32(i64)), nil + case reflect.Int64: + return reflect.ValueOf(i64), nil + case reflect.Int: + if i64 > math.MaxInt { // Can we fit this inside of an int + return emptyValue, fmt.Errorf("%d overflows int", i64) + } + + return reflect.ValueOf(int(i64)), nil + default: + return emptyValue, ValueDecoderError{ + Name: "IntDecodeValue", + Kinds: []reflect.Kind{reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int}, + Received: reflect.Zero(t), + } + } +} + +// intDecodeValue is the ValueDecoderFunc for int types. +func intDecodeValue(dc DecodeContext, vr ValueReader, val reflect.Value) error { + if !val.CanSet() { + return ValueDecoderError{ + Name: "IntDecodeValue", + Kinds: []reflect.Kind{reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int}, + Received: val, + } + } + + elem, err := intDecodeType(dc, vr, val.Type()) + if err != nil { + return err + } + + val.SetInt(elem.Int()) + return nil +} + +func floatDecodeType(dc DecodeContext, vr ValueReader, t reflect.Type) (reflect.Value, error) { + var f float64 + var err error + switch vrType := vr.Type(); vrType { + case TypeInt32: + i32, err := vr.ReadInt32() + if err != nil { + return emptyValue, err + } + f = float64(i32) + case TypeInt64: + i64, err := vr.ReadInt64() + if err != nil { + return emptyValue, err + } + f = float64(i64) + case TypeDouble: + f, err = vr.ReadDouble() + if err != nil { + return emptyValue, err + } + case TypeBoolean: + b, err := vr.ReadBoolean() + if err != nil { + return emptyValue, err + } + if b { + f = 1 + } + case TypeNull: + if err = vr.ReadNull(); err != nil { + return emptyValue, err + } + case TypeUndefined: + if err = vr.ReadUndefined(); err != nil { + return emptyValue, err + } + default: + return emptyValue, fmt.Errorf("cannot decode %v into a float32 or float64 type", vrType) + } + + switch t.Kind() { + case reflect.Float32: + if !dc.truncate && float64(float32(f)) != f { + return emptyValue, errCannotTruncate + } + + return reflect.ValueOf(float32(f)), nil + case reflect.Float64: + return reflect.ValueOf(f), nil + default: + return emptyValue, ValueDecoderError{ + Name: "FloatDecodeValue", + Kinds: []reflect.Kind{reflect.Float32, reflect.Float64}, + Received: reflect.Zero(t), + } + } +} + +// floatDecodeValue is the ValueDecoderFunc for float types. +func floatDecodeValue(ec DecodeContext, vr ValueReader, val reflect.Value) error { + if !val.CanSet() { + return ValueDecoderError{ + Name: "FloatDecodeValue", + Kinds: []reflect.Kind{reflect.Float32, reflect.Float64}, + Received: val, + } + } + + elem, err := floatDecodeType(ec, vr, val.Type()) + if err != nil { + return err + } + + val.SetFloat(elem.Float()) + return nil +} + +func javaScriptDecodeType(_ DecodeContext, vr ValueReader, t reflect.Type) (reflect.Value, error) { + if t != tJavaScript { + return emptyValue, ValueDecoderError{ + Name: "JavaScriptDecodeValue", + Types: []reflect.Type{tJavaScript}, + Received: reflect.Zero(t), + } + } + + var js string + var err error + switch vrType := vr.Type(); vrType { + case TypeJavaScript: + js, err = vr.ReadJavascript() + case TypeNull: + err = vr.ReadNull() + case TypeUndefined: + err = vr.ReadUndefined() + default: + return emptyValue, fmt.Errorf("cannot decode %v into a JavaScript", vrType) + } + if err != nil { + return emptyValue, err + } + + return reflect.ValueOf(JavaScript(js)), nil +} + +// javaScriptDecodeValue is the ValueDecoderFunc for the JavaScript type. +func javaScriptDecodeValue(dctx DecodeContext, vr ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tJavaScript { + return ValueDecoderError{Name: "JavaScriptDecodeValue", Types: []reflect.Type{tJavaScript}, Received: val} + } + + elem, err := javaScriptDecodeType(dctx, vr, tJavaScript) + if err != nil { + return err + } + + val.SetString(elem.String()) + return nil +} + +func symbolDecodeType(_ DecodeContext, vr ValueReader, t reflect.Type) (reflect.Value, error) { + if t != tSymbol { + return emptyValue, ValueDecoderError{ + Name: "SymbolDecodeValue", + Types: []reflect.Type{tSymbol}, + Received: reflect.Zero(t), + } + } + + var symbol string + var err error + switch vrType := vr.Type(); vrType { + case TypeString: + symbol, err = vr.ReadString() + case TypeSymbol: + symbol, err = vr.ReadSymbol() + case TypeBinary: + data, subtype, err := vr.ReadBinary() + if err != nil { + return emptyValue, err + } + + if subtype != TypeBinaryGeneric && subtype != TypeBinaryBinaryOld { + return emptyValue, decodeBinaryError{subtype: subtype, typeName: "Symbol"} + } + symbol = string(data) + case TypeNull: + err = vr.ReadNull() + case TypeUndefined: + err = vr.ReadUndefined() + default: + return emptyValue, fmt.Errorf("cannot decode %v into a Symbol", vrType) + } + if err != nil { + return emptyValue, err + } + + return reflect.ValueOf(Symbol(symbol)), nil +} + +// symbolDecodeValue is the ValueDecoderFunc for the Symbol type. +func symbolDecodeValue(dctx DecodeContext, vr ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tSymbol { + return ValueDecoderError{Name: "SymbolDecodeValue", Types: []reflect.Type{tSymbol}, Received: val} + } + + elem, err := symbolDecodeType(dctx, vr, tSymbol) + if err != nil { + return err + } + + val.SetString(elem.String()) + return nil +} + +func binaryDecode(vr ValueReader) (Binary, error) { + var b Binary + + var data []byte + var subtype byte + var err error + switch vrType := vr.Type(); vrType { + case TypeBinary: + data, subtype, err = vr.ReadBinary() + case TypeNull: + err = vr.ReadNull() + case TypeUndefined: + err = vr.ReadUndefined() + default: + return b, fmt.Errorf("cannot decode %v into a Binary", vrType) + } + if err != nil { + return b, err + } + b.Subtype = subtype + b.Data = data + + return b, nil +} + +func binaryDecodeType(_ DecodeContext, vr ValueReader, t reflect.Type) (reflect.Value, error) { + if t != tBinary { + return emptyValue, ValueDecoderError{ + Name: "BinaryDecodeValue", + Types: []reflect.Type{tBinary}, + Received: reflect.Zero(t), + } + } + + b, err := binaryDecode(vr) + if err != nil { + return emptyValue, err + } + return reflect.ValueOf(b), nil +} + +// binaryDecodeValue is the ValueDecoderFunc for Binary. +func binaryDecodeValue(dc DecodeContext, vr ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tBinary { + return ValueDecoderError{Name: "BinaryDecodeValue", Types: []reflect.Type{tBinary}, Received: val} + } + + elem, err := binaryDecodeType(dc, vr, tBinary) + if err != nil { + return err + } + + val.Set(elem) + return nil +} + +func vectorDecodeType(_ DecodeContext, vr ValueReader, t reflect.Type) (reflect.Value, error) { + if t != tVector { + return emptyValue, ValueDecoderError{ + Name: "VectorDecodeValue", + Types: []reflect.Type{tVector}, + Received: reflect.Zero(t), + } + } + + b, err := binaryDecode(vr) + if err != nil { + return emptyValue, err + } + + v, err := NewVectorFromBinary(b) + if err != nil { + return emptyValue, err + } + + return reflect.ValueOf(v), nil +} + +// vectorDecodeValue is the ValueDecoderFunc for Vector. +func vectorDecodeValue(dctx DecodeContext, vr ValueReader, val reflect.Value) error { + t := val.Type() + if !val.CanSet() || t != tVector { + return ValueDecoderError{ + Name: "VectorDecodeValue", + Types: []reflect.Type{tVector}, + Received: val, + } + } + + elem, err := vectorDecodeType(dctx, vr, t) + if err != nil { + return err + } + + val.Set(elem) + return nil +} + +func undefinedDecodeType(_ DecodeContext, vr ValueReader, t reflect.Type) (reflect.Value, error) { + if t != tUndefined { + return emptyValue, ValueDecoderError{ + Name: "UndefinedDecodeValue", + Types: []reflect.Type{tUndefined}, + Received: reflect.Zero(t), + } + } + + var err error + switch vrType := vr.Type(); vrType { + case TypeUndefined: + err = vr.ReadUndefined() + case TypeNull: + err = vr.ReadNull() + default: + return emptyValue, fmt.Errorf("cannot decode %v into an Undefined", vr.Type()) + } + if err != nil { + return emptyValue, err + } + + return reflect.ValueOf(Undefined{}), nil +} + +// undefinedDecodeValue is the ValueDecoderFunc for Undefined. +func undefinedDecodeValue(dc DecodeContext, vr ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tUndefined { + return ValueDecoderError{Name: "UndefinedDecodeValue", Types: []reflect.Type{tUndefined}, Received: val} + } + + elem, err := undefinedDecodeType(dc, vr, tUndefined) + if err != nil { + return err + } + + val.Set(elem) + return nil +} + +// Accept both 12-byte string and pretty-printed 24-byte hex string formats. +func objectIDDecodeType(_ DecodeContext, vr ValueReader, t reflect.Type) (reflect.Value, error) { + if t != tOID { + return emptyValue, ValueDecoderError{ + Name: "ObjectIDDecodeValue", + Types: []reflect.Type{tOID}, + Received: reflect.Zero(t), + } + } + + var oid ObjectID + var err error + switch vrType := vr.Type(); vrType { + case TypeObjectID: + oid, err = vr.ReadObjectID() + if err != nil { + return emptyValue, err + } + case TypeString: + str, err := vr.ReadString() + if err != nil { + return emptyValue, err + } + if oid, err = ObjectIDFromHex(str); err == nil { + break + } + if len(str) != 12 { + return emptyValue, fmt.Errorf("an ObjectID string must be exactly 12 bytes long (got %v)", len(str)) + } + byteArr := []byte(str) + copy(oid[:], byteArr) + case TypeNull: + if err = vr.ReadNull(); err != nil { + return emptyValue, err + } + case TypeUndefined: + if err = vr.ReadUndefined(); err != nil { + return emptyValue, err + } + default: + return emptyValue, fmt.Errorf("cannot decode %v into an ObjectID", vrType) + } + + return reflect.ValueOf(oid), nil +} + +// objectIDDecodeValue is the ValueDecoderFunc for ObjectID. +func objectIDDecodeValue(dc DecodeContext, vr ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tOID { + return ValueDecoderError{Name: "ObjectIDDecodeValue", Types: []reflect.Type{tOID}, Received: val} + } + + elem, err := objectIDDecodeType(dc, vr, tOID) + if err != nil { + return err + } + + val.Set(elem) + return nil +} + +func dateTimeDecodeType(_ DecodeContext, vr ValueReader, t reflect.Type) (reflect.Value, error) { + if t != tDateTime { + return emptyValue, ValueDecoderError{ + Name: "DateTimeDecodeValue", + Types: []reflect.Type{tDateTime}, + Received: reflect.Zero(t), + } + } + + var dt int64 + var err error + switch vrType := vr.Type(); vrType { + case TypeDateTime: + dt, err = vr.ReadDateTime() + case TypeNull: + err = vr.ReadNull() + case TypeUndefined: + err = vr.ReadUndefined() + default: + return emptyValue, fmt.Errorf("cannot decode %v into a DateTime", vrType) + } + if err != nil { + return emptyValue, err + } + + return reflect.ValueOf(DateTime(dt)), nil +} + +// dateTimeDecodeValue is the ValueDecoderFunc for DateTime. +func dateTimeDecodeValue(dc DecodeContext, vr ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tDateTime { + return ValueDecoderError{Name: "DateTimeDecodeValue", Types: []reflect.Type{tDateTime}, Received: val} + } + + elem, err := dateTimeDecodeType(dc, vr, tDateTime) + if err != nil { + return err + } + + val.Set(elem) + return nil +} + +func nullDecodeType(_ DecodeContext, vr ValueReader, t reflect.Type) (reflect.Value, error) { + if t != tNull { + return emptyValue, ValueDecoderError{ + Name: "NullDecodeValue", + Types: []reflect.Type{tNull}, + Received: reflect.Zero(t), + } + } + + var err error + switch vrType := vr.Type(); vrType { + case TypeUndefined: + err = vr.ReadUndefined() + case TypeNull: + err = vr.ReadNull() + default: + return emptyValue, fmt.Errorf("cannot decode %v into a Null", vr.Type()) + } + if err != nil { + return emptyValue, err + } + + return reflect.ValueOf(Null{}), nil +} + +// nullDecodeValue is the ValueDecoderFunc for Null. +func nullDecodeValue(dc DecodeContext, vr ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tNull { + return ValueDecoderError{Name: "NullDecodeValue", Types: []reflect.Type{tNull}, Received: val} + } + + elem, err := nullDecodeType(dc, vr, tNull) + if err != nil { + return err + } + + val.Set(elem) + return nil +} + +func regexDecodeType(_ DecodeContext, vr ValueReader, t reflect.Type) (reflect.Value, error) { + if t != tRegex { + return emptyValue, ValueDecoderError{ + Name: "RegexDecodeValue", + Types: []reflect.Type{tRegex}, + Received: reflect.Zero(t), + } + } + + var pattern, options string + var err error + switch vrType := vr.Type(); vrType { + case TypeRegex: + pattern, options, err = vr.ReadRegex() + case TypeNull: + err = vr.ReadNull() + case TypeUndefined: + err = vr.ReadUndefined() + default: + return emptyValue, fmt.Errorf("cannot decode %v into a Regex", vrType) + } + if err != nil { + return emptyValue, err + } + + return reflect.ValueOf(Regex{Pattern: pattern, Options: options}), nil +} + +// regexDecodeValue is the ValueDecoderFunc for Regex. +func regexDecodeValue(dc DecodeContext, vr ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tRegex { + return ValueDecoderError{Name: "RegexDecodeValue", Types: []reflect.Type{tRegex}, Received: val} + } + + elem, err := regexDecodeType(dc, vr, tRegex) + if err != nil { + return err + } + + val.Set(elem) + return nil +} + +func dbPointerDecodeType(_ DecodeContext, vr ValueReader, t reflect.Type) (reflect.Value, error) { + if t != tDBPointer { + return emptyValue, ValueDecoderError{ + Name: "DBPointerDecodeValue", + Types: []reflect.Type{tDBPointer}, + Received: reflect.Zero(t), + } + } + + var ns string + var pointer ObjectID + var err error + switch vrType := vr.Type(); vrType { + case TypeDBPointer: + ns, pointer, err = vr.ReadDBPointer() + case TypeNull: + err = vr.ReadNull() + case TypeUndefined: + err = vr.ReadUndefined() + default: + return emptyValue, fmt.Errorf("cannot decode %v into a DBPointer", vrType) + } + if err != nil { + return emptyValue, err + } + + return reflect.ValueOf(DBPointer{DB: ns, Pointer: pointer}), nil +} + +// dbPointerDecodeValue is the ValueDecoderFunc for DBPointer. +func dbPointerDecodeValue(dc DecodeContext, vr ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tDBPointer { + return ValueDecoderError{Name: "DBPointerDecodeValue", Types: []reflect.Type{tDBPointer}, Received: val} + } + + elem, err := dbPointerDecodeType(dc, vr, tDBPointer) + if err != nil { + return err + } + + val.Set(elem) + return nil +} + +func timestampDecodeType(_ DecodeContext, vr ValueReader, reflectType reflect.Type) (reflect.Value, error) { + if reflectType != tTimestamp { + return emptyValue, ValueDecoderError{ + Name: "TimestampDecodeValue", + Types: []reflect.Type{tTimestamp}, + Received: reflect.Zero(reflectType), + } + } + + var t, incr uint32 + var err error + switch vrType := vr.Type(); vrType { + case TypeTimestamp: + t, incr, err = vr.ReadTimestamp() + case TypeNull: + err = vr.ReadNull() + case TypeUndefined: + err = vr.ReadUndefined() + default: + return emptyValue, fmt.Errorf("cannot decode %v into a Timestamp", vrType) + } + if err != nil { + return emptyValue, err + } + + return reflect.ValueOf(Timestamp{T: t, I: incr}), nil +} + +// timestampDecodeValue is the ValueDecoderFunc for Timestamp. +func timestampDecodeValue(dc DecodeContext, vr ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tTimestamp { + return ValueDecoderError{Name: "TimestampDecodeValue", Types: []reflect.Type{tTimestamp}, Received: val} + } + + elem, err := timestampDecodeType(dc, vr, tTimestamp) + if err != nil { + return err + } + + val.Set(elem) + return nil +} + +func minKeyDecodeType(_ DecodeContext, vr ValueReader, t reflect.Type) (reflect.Value, error) { + if t != tMinKey { + return emptyValue, ValueDecoderError{ + Name: "MinKeyDecodeValue", + Types: []reflect.Type{tMinKey}, + Received: reflect.Zero(t), + } + } + + var err error + switch vrType := vr.Type(); vrType { + case TypeMinKey: + err = vr.ReadMinKey() + case TypeNull: + err = vr.ReadNull() + case TypeUndefined: + err = vr.ReadUndefined() + default: + return emptyValue, fmt.Errorf("cannot decode %v into a MinKey", vr.Type()) + } + if err != nil { + return emptyValue, err + } + + return reflect.ValueOf(MinKey{}), nil +} + +// minKeyDecodeValue is the ValueDecoderFunc for MinKey. +func minKeyDecodeValue(dc DecodeContext, vr ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tMinKey { + return ValueDecoderError{Name: "MinKeyDecodeValue", Types: []reflect.Type{tMinKey}, Received: val} + } + + elem, err := minKeyDecodeType(dc, vr, tMinKey) + if err != nil { + return err + } + + val.Set(elem) + return nil +} + +func maxKeyDecodeType(_ DecodeContext, vr ValueReader, t reflect.Type) (reflect.Value, error) { + if t != tMaxKey { + return emptyValue, ValueDecoderError{ + Name: "MaxKeyDecodeValue", + Types: []reflect.Type{tMaxKey}, + Received: reflect.Zero(t), + } + } + + var err error + switch vrType := vr.Type(); vrType { + case TypeMaxKey: + err = vr.ReadMaxKey() + case TypeNull: + err = vr.ReadNull() + case TypeUndefined: + err = vr.ReadUndefined() + default: + return emptyValue, fmt.Errorf("cannot decode %v into a MaxKey", vr.Type()) + } + if err != nil { + return emptyValue, err + } + + return reflect.ValueOf(MaxKey{}), nil +} + +// maxKeyDecodeValue is the ValueDecoderFunc for MaxKey. +func maxKeyDecodeValue(dc DecodeContext, vr ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tMaxKey { + return ValueDecoderError{Name: "MaxKeyDecodeValue", Types: []reflect.Type{tMaxKey}, Received: val} + } + + elem, err := maxKeyDecodeType(dc, vr, tMaxKey) + if err != nil { + return err + } + + val.Set(elem) + return nil +} + +func decimal128DecodeType(_ DecodeContext, vr ValueReader, t reflect.Type) (reflect.Value, error) { + if t != tDecimal { + return emptyValue, ValueDecoderError{ + Name: "Decimal128DecodeValue", + Types: []reflect.Type{tDecimal}, + Received: reflect.Zero(t), + } + } + + var d128 Decimal128 + var err error + switch vrType := vr.Type(); vrType { + case TypeDecimal128: + d128, err = vr.ReadDecimal128() + case TypeNull: + err = vr.ReadNull() + case TypeUndefined: + err = vr.ReadUndefined() + default: + return emptyValue, fmt.Errorf("cannot decode %v into a Decimal128", vr.Type()) + } + if err != nil { + return emptyValue, err + } + + return reflect.ValueOf(d128), nil +} + +// decimal128DecodeValue is the ValueDecoderFunc for Decimal128. +func decimal128DecodeValue(dctx DecodeContext, vr ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tDecimal { + return ValueDecoderError{Name: "Decimal128DecodeValue", Types: []reflect.Type{tDecimal}, Received: val} + } + + elem, err := decimal128DecodeType(dctx, vr, tDecimal) + if err != nil { + return err + } + + val.Set(elem) + return nil +} + +func jsonNumberDecodeType(_ DecodeContext, vr ValueReader, t reflect.Type) (reflect.Value, error) { + if t != tJSONNumber { + return emptyValue, ValueDecoderError{ + Name: "JSONNumberDecodeValue", + Types: []reflect.Type{tJSONNumber}, + Received: reflect.Zero(t), + } + } + + var jsonNum json.Number + var err error + switch vrType := vr.Type(); vrType { + case TypeDouble: + f64, err := vr.ReadDouble() + if err != nil { + return emptyValue, err + } + jsonNum = json.Number(strconv.FormatFloat(f64, 'f', -1, 64)) + case TypeInt32: + i32, err := vr.ReadInt32() + if err != nil { + return emptyValue, err + } + jsonNum = json.Number(strconv.FormatInt(int64(i32), 10)) + case TypeInt64: + i64, err := vr.ReadInt64() + if err != nil { + return emptyValue, err + } + jsonNum = json.Number(strconv.FormatInt(i64, 10)) + case TypeNull: + err = vr.ReadNull() + case TypeUndefined: + err = vr.ReadUndefined() + default: + return emptyValue, fmt.Errorf("cannot decode %v into a json.Number", vrType) + } + if err != nil { + return emptyValue, err + } + + return reflect.ValueOf(jsonNum), nil +} + +// jsonNumberDecodeValue is the ValueDecoderFunc for json.Number. +func jsonNumberDecodeValue(dc DecodeContext, vr ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tJSONNumber { + return ValueDecoderError{Name: "JSONNumberDecodeValue", Types: []reflect.Type{tJSONNumber}, Received: val} + } + + elem, err := jsonNumberDecodeType(dc, vr, tJSONNumber) + if err != nil { + return err + } + + val.Set(elem) + return nil +} + +func urlDecodeType(_ DecodeContext, vr ValueReader, t reflect.Type) (reflect.Value, error) { + if t != tURL { + return emptyValue, ValueDecoderError{ + Name: "URLDecodeValue", + Types: []reflect.Type{tURL}, + Received: reflect.Zero(t), + } + } + + urlPtr := &url.URL{} + var err error + switch vrType := vr.Type(); vrType { + case TypeString: + var str string // Declare str here to avoid shadowing err during the ReadString call. + str, err = vr.ReadString() + if err != nil { + return emptyValue, err + } + + urlPtr, err = url.Parse(str) + case TypeNull: + err = vr.ReadNull() + case TypeUndefined: + err = vr.ReadUndefined() + default: + return emptyValue, fmt.Errorf("cannot decode %v into a *url.URL", vrType) + } + if err != nil { + return emptyValue, err + } + + return reflect.ValueOf(urlPtr).Elem(), nil +} + +// urlDecodeValue is the ValueDecoderFunc for url.URL. +func urlDecodeValue(dc DecodeContext, vr ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tURL { + return ValueDecoderError{Name: "URLDecodeValue", Types: []reflect.Type{tURL}, Received: val} + } + + elem, err := urlDecodeType(dc, vr, tURL) + if err != nil { + return err + } + + val.Set(elem) + return nil +} + +// arrayDecodeValue is the ValueDecoderFunc for array types. +func arrayDecodeValue(dc DecodeContext, vr ValueReader, val reflect.Value) error { + if !val.IsValid() || val.Kind() != reflect.Array { + return ValueDecoderError{Name: "ArrayDecodeValue", Kinds: []reflect.Kind{reflect.Array}, Received: val} + } + + switch vrType := vr.Type(); vrType { + case TypeArray: + case Type(0), TypeEmbeddedDocument: + if val.Type().Elem() != tE { + return fmt.Errorf("cannot decode document into %s", val.Type()) + } + case TypeBinary: + if val.Type().Elem() != tByte { + return fmt.Errorf("ArrayDecodeValue can only be used to decode binary into a byte array, got %v", vrType) + } + data, subtype, err := vr.ReadBinary() + if err != nil { + return err + } + if subtype != TypeBinaryGeneric && subtype != TypeBinaryBinaryOld { + return fmt.Errorf("ArrayDecodeValue can only be used to decode subtype 0x00 or 0x02 for %s, got %v", TypeBinary, subtype) + } + + if len(data) > val.Len() { + return fmt.Errorf("more elements returned in array than can fit inside %s", val.Type()) + } + + for idx, elem := range data { + val.Index(idx).Set(reflect.ValueOf(elem)) + } + return nil + case TypeNull: + val.Set(reflect.Zero(val.Type())) + return vr.ReadNull() + case TypeUndefined: + val.Set(reflect.Zero(val.Type())) + return vr.ReadUndefined() + default: + return fmt.Errorf("cannot decode %v into an array", vrType) + } + + var elemsFunc func(DecodeContext, ValueReader, reflect.Value) ([]reflect.Value, error) + switch val.Type().Elem() { + case tE: + elemsFunc = decodeD + default: + elemsFunc = decodeDefault + } + + elems, err := elemsFunc(dc, vr, val) + if err != nil { + return err + } + + if len(elems) > val.Len() { + return fmt.Errorf("more elements returned in array than can fit inside %s, got %v elements", val.Type(), len(elems)) + } + + for idx, elem := range elems { + val.Index(idx).Set(elem) + } + + return nil +} + +// valueUnmarshalerDecodeValue is the ValueDecoderFunc for ValueUnmarshaler implementations. +func valueUnmarshalerDecodeValue(_ DecodeContext, vr ValueReader, val reflect.Value) error { + if !val.IsValid() || (!val.Type().Implements(tValueUnmarshaler) && !reflect.PtrTo(val.Type()).Implements(tValueUnmarshaler)) { + return ValueDecoderError{Name: "ValueUnmarshalerDecodeValue", Types: []reflect.Type{tValueUnmarshaler}, Received: val} + } + + // If BSON value is null and the go value is a pointer, then don't call + // UnmarshalBSONValue. Even if the Go pointer is already initialized (i.e., + // non-nil), encountering null in BSON will result in the pointer being + // directly set to nil here. Since the pointer is being replaced with nil, + // there is no opportunity (or reason) for the custom UnmarshalBSONValue logic + // to be called. + if vr.Type() == TypeNull && val.Kind() == reflect.Ptr { + val.Set(reflect.Zero(val.Type())) + + return vr.ReadNull() + } + + if val.Kind() == reflect.Ptr && val.IsNil() { + if !val.CanSet() { + return ValueDecoderError{Name: "ValueUnmarshalerDecodeValue", Types: []reflect.Type{tValueUnmarshaler}, Received: val} + } + val.Set(reflect.New(val.Type().Elem())) + } + + if !val.Type().Implements(tValueUnmarshaler) { + if !val.CanAddr() { + return ValueDecoderError{Name: "ValueUnmarshalerDecodeValue", Types: []reflect.Type{tValueUnmarshaler}, Received: val} + } + val = val.Addr() // If the type doesn't implement the interface, a pointer to it must. + } + + t, src, err := copyValueToBytes(vr) + if err != nil { + return err + } + + m, ok := val.Interface().(ValueUnmarshaler) + if !ok { + // NB: this error should be unreachable due to the above checks + return ValueDecoderError{Name: "ValueUnmarshalerDecodeValue", Types: []reflect.Type{tValueUnmarshaler}, Received: val} + } + return m.UnmarshalBSONValue(byte(t), src) +} + +// unmarshalerDecodeValue is the ValueDecoderFunc for Unmarshaler implementations. +func unmarshalerDecodeValue(_ DecodeContext, vr ValueReader, val reflect.Value) error { + if !val.IsValid() || (!val.Type().Implements(tUnmarshaler) && !reflect.PtrTo(val.Type()).Implements(tUnmarshaler)) { + return ValueDecoderError{Name: "UnmarshalerDecodeValue", Types: []reflect.Type{tUnmarshaler}, Received: val} + } + + if val.Kind() == reflect.Ptr && val.IsNil() { + if !val.CanSet() { + return ValueDecoderError{Name: "UnmarshalerDecodeValue", Types: []reflect.Type{tUnmarshaler}, Received: val} + } + val.Set(reflect.New(val.Type().Elem())) + } + + _, src, err := copyValueToBytes(vr) + if err != nil { + return err + } + + // If the target Go value is a pointer and the BSON field value is empty, set the value to the + // zero value of the pointer (nil) and don't call UnmarshalBSON. UnmarshalBSON has no way to + // change the pointer value from within the function (only the value at the pointer address), + // so it can't set the pointer to "nil" itself. Since the most common Go value for an empty BSON + // field value is "nil", we set "nil" here and don't call UnmarshalBSON. This behavior matches + // the behavior of the Go "encoding/json" unmarshaler when the target Go value is a pointer and + // the JSON field value is "null". + if val.Kind() == reflect.Ptr && len(src) == 0 { + val.Set(reflect.Zero(val.Type())) + return nil + } + + if !val.Type().Implements(tUnmarshaler) { + if !val.CanAddr() { + return ValueDecoderError{Name: "UnmarshalerDecodeValue", Types: []reflect.Type{tUnmarshaler}, Received: val} + } + val = val.Addr() // If the type doesn't implement the interface, a pointer to it must. + } + + m, ok := val.Interface().(Unmarshaler) + if !ok { + // NB: this error should be unreachable due to the above checks + return ValueDecoderError{Name: "UnmarshalerDecodeValue", Types: []reflect.Type{tUnmarshaler}, Received: val} + } + return m.UnmarshalBSON(src) +} + +// coreDocumentDecodeValue is the ValueDecoderFunc for bsoncore.Document. +func coreDocumentDecodeValue(_ DecodeContext, vr ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tCoreDocument { + return ValueDecoderError{Name: "CoreDocumentDecodeValue", Types: []reflect.Type{tCoreDocument}, Received: val} + } + + if val.IsNil() { + val.Set(reflect.MakeSlice(val.Type(), 0, 0)) + } + + val.SetLen(0) + + cdoc, err := appendDocumentBytes(val.Interface().(bsoncore.Document), vr) + val.Set(reflect.ValueOf(cdoc)) + return err +} + +func decodeDefault(dc DecodeContext, vr ValueReader, val reflect.Value) ([]reflect.Value, error) { + elems := make([]reflect.Value, 0) + + ar, err := vr.ReadArray() + if err != nil { + return nil, err + } + + eType := val.Type().Elem() + + var vDecoder ValueDecoder + if !(eType.Kind() == reflect.Interface && val.Len() > 0) { + vDecoder, err = dc.LookupDecoder(eType) + if err != nil { + return nil, err + } + } + + idx := 0 + for { + vr, err := ar.ReadValue() + if errors.Is(err, ErrEOA) { + break + } + if err != nil { + return nil, err + } + + var elem reflect.Value + if vDecoder == nil { + elem = val.Index(idx).Elem() + switch { + case elem.Kind() != reflect.Ptr || elem.IsNil(): + valueDecoder, err := dc.LookupDecoder(elem.Type()) + if err != nil { + return nil, err + } + err = valueDecoder.DecodeValue(dc, vr, elem) + if err != nil { + return nil, newDecodeError(strconv.Itoa(idx), err) + } + case vr.Type() == TypeNull: + if err = vr.ReadNull(); err != nil { + return nil, err + } + elem = reflect.Zero(val.Index(idx).Type()) + default: + e := elem.Elem() + valueDecoder, err := dc.LookupDecoder(e.Type()) + if err != nil { + return nil, err + } + err = valueDecoder.DecodeValue(dc, vr, e) + if err != nil { + return nil, newDecodeError(strconv.Itoa(idx), err) + } + } + } else { + elem, err = decodeTypeOrValueWithInfo(vDecoder, dc, vr, eType) + if err != nil { + return nil, newDecodeError(strconv.Itoa(idx), err) + } + } + + elems = append(elems, elem) + idx++ + } + + return elems, nil +} + +func codeWithScopeDecodeType(dc DecodeContext, vr ValueReader, t reflect.Type) (reflect.Value, error) { + if t != tCodeWithScope { + return emptyValue, ValueDecoderError{ + Name: "CodeWithScopeDecodeValue", + Types: []reflect.Type{tCodeWithScope}, + Received: reflect.Zero(t), + } + } + + var cws CodeWithScope + var err error + switch vrType := vr.Type(); vrType { + case TypeCodeWithScope: + code, dr, err := vr.ReadCodeWithScope() + if err != nil { + return emptyValue, err + } + + scope := reflect.New(tD).Elem() + elems, err := decodeElemsFromDocumentReader(dc, dr) + if err != nil { + return emptyValue, err + } + + scope.Set(reflect.MakeSlice(tD, 0, len(elems))) + scope.Set(reflect.Append(scope, elems...)) + + cws = CodeWithScope{ + Code: JavaScript(code), + Scope: scope.Interface().(D), + } + case TypeNull: + err = vr.ReadNull() + case TypeUndefined: + err = vr.ReadUndefined() + default: + return emptyValue, fmt.Errorf("cannot decode %v into a CodeWithScope", vrType) + } + if err != nil { + return emptyValue, err + } + + return reflect.ValueOf(cws), nil +} + +// codeWithScopeDecodeValue is the ValueDecoderFunc for CodeWithScope. +func codeWithScopeDecodeValue(dc DecodeContext, vr ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tCodeWithScope { + return ValueDecoderError{Name: "CodeWithScopeDecodeValue", Types: []reflect.Type{tCodeWithScope}, Received: val} + } + + elem, err := codeWithScopeDecodeType(dc, vr, tCodeWithScope) + if err != nil { + return err + } + + val.Set(elem) + return nil +} + +func decodeD(dc DecodeContext, vr ValueReader, _ reflect.Value) ([]reflect.Value, error) { + switch vr.Type() { + case Type(0), TypeEmbeddedDocument: + default: + return nil, fmt.Errorf("cannot decode %v into a D", vr.Type()) + } + + dr, err := vr.ReadDocument() + if err != nil { + return nil, err + } + + return decodeElemsFromDocumentReader(dc, dr) +} + +func decodeElemsFromDocumentReader(dc DecodeContext, dr DocumentReader) ([]reflect.Value, error) { + decoder, err := dc.LookupDecoder(tEmpty) + if err != nil { + return nil, err + } + + elems := make([]reflect.Value, 0) + for { + key, vr, err := dr.ReadElement() + if errors.Is(err, ErrEOD) { + break + } + if err != nil { + return nil, err + } + + val := reflect.New(tEmpty).Elem() + err = decoder.DecodeValue(dc, vr, val) + if err != nil { + return nil, newDecodeError(key, err) + } + + elems = append(elems, reflect.ValueOf(E{Key: key, Value: val.Interface()})) + } + + return elems, nil +} diff --git a/default_value_decoders_test.go b/default_value_decoders_test.go new file mode 100644 index 0000000..16b2dba --- /dev/null +++ b/default_value_decoders_test.go @@ -0,0 +1,3806 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bson + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "math" + "net/url" + "reflect" + "strings" + "testing" + "time" + + "gitea.psichedelico.com/go/bson/internal/assert" + "gitea.psichedelico.com/go/bson/x/bsonx/bsoncore" + "github.com/google/go-cmp/cmp" +) + +func TestDefaultValueDecoders(t *testing.T) { + var wrong = func(string, string) string { return "wrong" } + + type mybool bool + type myint8 int8 + type myint16 int16 + type myint32 int32 + type myint64 int64 + type myint int + type myuint8 uint8 + type myuint16 uint16 + type myuint32 uint32 + type myuint64 uint64 + type myuint uint + type myfloat32 float32 + type myfloat64 float64 + type mystring string + type mystruct struct{} + + const cansetreflectiontest = "cansetreflectiontest" + const cansettest = "cansettest" + + now := time.Now().Truncate(time.Millisecond) + d128 := NewDecimal128(12345, 67890) + var pbool = func(b bool) *bool { return &b } + var pi32 = func(i32 int32) *int32 { return &i32 } + var pi64 = func(i64 int64) *int64 { return &i64 } + + type subtest struct { + name string + val interface{} + dctx *DecodeContext + llvrw *valueReaderWriter + invoke invoked + err error + } + + testCases := []struct { + name string + vd ValueDecoder + subtests []subtest + }{ + { + "BooleanDecodeValue", + ValueDecoderFunc(booleanDecodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + &valueReaderWriter{BSONType: TypeBoolean}, + nothing, + ValueDecoderError{Name: "BooleanDecodeValue", Kinds: []reflect.Kind{reflect.Bool}, Received: reflect.ValueOf(wrong)}, + }, + { + "type not boolean", + bool(false), + nil, + &valueReaderWriter{BSONType: TypeString}, + nothing, + fmt.Errorf("cannot decode %v into a boolean", TypeString), + }, + { + "fast path", + bool(true), + nil, + &valueReaderWriter{BSONType: TypeBoolean, Return: bool(true)}, + readBoolean, + nil, + }, + { + "reflection path", + mybool(true), + nil, + &valueReaderWriter{BSONType: TypeBoolean, Return: bool(true)}, + readBoolean, + nil, + }, + { + "reflection path error", + mybool(true), + nil, + &valueReaderWriter{BSONType: TypeBoolean, Return: bool(true), Err: errors.New("ReadBoolean Error"), ErrAfter: readBoolean}, + readBoolean, errors.New("ReadBoolean Error"), + }, + { + "can set false", + cansettest, + nil, + &valueReaderWriter{BSONType: TypeBoolean}, + nothing, + ValueDecoderError{Name: "BooleanDecodeValue", Kinds: []reflect.Kind{reflect.Bool}}, + }, + { + "decode null", + mybool(false), + nil, + &valueReaderWriter{BSONType: TypeNull}, + readNull, + nil, + }, + { + "decode undefined", + mybool(false), + nil, + &valueReaderWriter{BSONType: TypeUndefined}, + readUndefined, + nil, + }, + }, + }, + { + "IntDecodeValue", + ValueDecoderFunc(intDecodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(0)}, + readInt32, + ValueDecoderError{ + Name: "IntDecodeValue", + Kinds: []reflect.Kind{reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int}, + Received: reflect.ValueOf(wrong), + }, + }, + { + "type not int32/int64", + 0, + nil, + &valueReaderWriter{BSONType: TypeString}, + nothing, + fmt.Errorf("cannot decode %v into an integer type", TypeString), + }, + { + "ReadInt32 error", + 0, + nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(0), Err: errors.New("ReadInt32 error"), ErrAfter: readInt32}, + readInt32, + errors.New("ReadInt32 error"), + }, + { + "ReadInt64 error", + 0, + nil, + &valueReaderWriter{BSONType: TypeInt64, Return: int64(0), Err: errors.New("ReadInt64 error"), ErrAfter: readInt64}, + readInt64, + errors.New("ReadInt64 error"), + }, + { + "ReadDouble error", + 0, + nil, + &valueReaderWriter{BSONType: TypeDouble, Return: float64(0), Err: errors.New("ReadDouble error"), ErrAfter: readDouble}, + readDouble, + errors.New("ReadDouble error"), + }, + { + "ReadDouble", int64(3), &DecodeContext{}, + &valueReaderWriter{BSONType: TypeDouble, Return: float64(3.00)}, readDouble, + nil, + }, + { + "ReadDouble (truncate)", int64(3), &DecodeContext{truncate: true}, + &valueReaderWriter{BSONType: TypeDouble, Return: float64(3.14)}, readDouble, + nil, + }, + { + "ReadDouble (no truncate)", int64(0), nil, + &valueReaderWriter{BSONType: TypeDouble, Return: float64(3.14)}, readDouble, + errCannotTruncate, + }, + { + "ReadDouble overflows int64", int64(0), nil, + &valueReaderWriter{BSONType: TypeDouble, Return: math.MaxFloat64}, readDouble, + fmt.Errorf("%g overflows int64", math.MaxFloat64), + }, + {"int8/fast path", int8(127), nil, &valueReaderWriter{BSONType: TypeInt32, Return: int32(127)}, readInt32, nil}, + {"int16/fast path", int16(32676), nil, &valueReaderWriter{BSONType: TypeInt32, Return: int32(32676)}, readInt32, nil}, + {"int32/fast path", int32(1234), nil, &valueReaderWriter{BSONType: TypeInt32, Return: int32(1234)}, readInt32, nil}, + {"int64/fast path", int64(1234), nil, &valueReaderWriter{BSONType: TypeInt64, Return: int64(1234)}, readInt64, nil}, + {"int/fast path", int(1234), nil, &valueReaderWriter{BSONType: TypeInt64, Return: int64(1234)}, readInt64, nil}, + { + "int8/fast path - nil", (*int8)(nil), nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(0)}, readInt32, + ValueDecoderError{ + Name: "IntDecodeValue", + Kinds: []reflect.Kind{reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int}, + Received: reflect.ValueOf((*int8)(nil)), + }, + }, + { + "int16/fast path - nil", (*int16)(nil), nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(0)}, readInt32, + ValueDecoderError{ + Name: "IntDecodeValue", + Kinds: []reflect.Kind{reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int}, + Received: reflect.ValueOf((*int16)(nil)), + }, + }, + { + "int32/fast path - nil", (*int32)(nil), nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(0)}, readInt32, + ValueDecoderError{ + Name: "IntDecodeValue", + Kinds: []reflect.Kind{reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int}, + Received: reflect.ValueOf((*int32)(nil)), + }, + }, + { + "int64/fast path - nil", (*int64)(nil), nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(0)}, readInt32, + ValueDecoderError{ + Name: "IntDecodeValue", + Kinds: []reflect.Kind{reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int}, + Received: reflect.ValueOf((*int64)(nil)), + }, + }, + { + "int/fast path - nil", (*int)(nil), nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(0)}, readInt32, + ValueDecoderError{ + Name: "IntDecodeValue", + Kinds: []reflect.Kind{reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int}, + Received: reflect.ValueOf((*int)(nil)), + }, + }, + { + "int8/fast path - overflow", int8(0), nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(129)}, readInt32, + fmt.Errorf("%d overflows int8", 129), + }, + { + "int16/fast path - overflow", int16(0), nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(32768)}, readInt32, + fmt.Errorf("%d overflows int16", 32768), + }, + { + "int32/fast path - overflow", int32(0), nil, + &valueReaderWriter{BSONType: TypeInt64, Return: int64(2147483648)}, readInt64, + fmt.Errorf("%d overflows int32", int64(2147483648)), + }, + { + "int8/fast path - overflow (negative)", int8(0), nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(-129)}, readInt32, + fmt.Errorf("%d overflows int8", -129), + }, + { + "int16/fast path - overflow (negative)", int16(0), nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(-32769)}, readInt32, + fmt.Errorf("%d overflows int16", -32769), + }, + { + "int32/fast path - overflow (negative)", int32(0), nil, + &valueReaderWriter{BSONType: TypeInt64, Return: int64(-2147483649)}, readInt64, + fmt.Errorf("%d overflows int32", int64(-2147483649)), + }, + { + "int8/reflection path", myint8(127), nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(127)}, readInt32, + nil, + }, + { + "int16/reflection path", myint16(255), nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(255)}, readInt32, + nil, + }, + { + "int32/reflection path", myint32(511), nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(511)}, readInt32, + nil, + }, + { + "int64/reflection path", myint64(1023), nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(1023)}, readInt32, + nil, + }, + { + "int/reflection path", myint(2047), nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(2047)}, readInt32, + nil, + }, + { + "int8/reflection path - overflow", myint8(0), nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(129)}, readInt32, + fmt.Errorf("%d overflows int8", 129), + }, + { + "int16/reflection path - overflow", myint16(0), nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(32768)}, readInt32, + fmt.Errorf("%d overflows int16", 32768), + }, + { + "int32/reflection path - overflow", myint32(0), nil, + &valueReaderWriter{BSONType: TypeInt64, Return: int64(2147483648)}, readInt64, + fmt.Errorf("%d overflows int32", int64(2147483648)), + }, + { + "int8/reflection path - overflow (negative)", myint8(0), nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(-129)}, readInt32, + fmt.Errorf("%d overflows int8", -129), + }, + { + "int16/reflection path - overflow (negative)", myint16(0), nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(-32769)}, readInt32, + fmt.Errorf("%d overflows int16", -32769), + }, + { + "int32/reflection path - overflow (negative)", myint32(0), nil, + &valueReaderWriter{BSONType: TypeInt64, Return: int64(-2147483649)}, readInt64, + fmt.Errorf("%d overflows int32", int64(-2147483649)), + }, + { + "can set false", + cansettest, + nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(0)}, + nothing, + ValueDecoderError{ + Name: "IntDecodeValue", + Kinds: []reflect.Kind{reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int}, + }, + }, + { + "decode null", + myint(0), + nil, + &valueReaderWriter{BSONType: TypeNull}, + readNull, + nil, + }, + { + "decode undefined", + myint(0), + nil, + &valueReaderWriter{BSONType: TypeUndefined}, + readUndefined, + nil, + }, + }, + }, + { + "defaultUIntCodec.DecodeValue", + &uintCodec{}, + []subtest{ + { + "wrong type", + wrong, + nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(0)}, + readInt32, + ValueDecoderError{ + Name: "UintDecodeValue", + Kinds: []reflect.Kind{reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint}, + Received: reflect.ValueOf(wrong), + }, + }, + { + "type not int32/int64", + 0, + nil, + &valueReaderWriter{BSONType: TypeString}, + nothing, + fmt.Errorf("cannot decode %v into an integer type", TypeString), + }, + { + "ReadInt32 error", + uint(0), + nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(0), Err: errors.New("ReadInt32 error"), ErrAfter: readInt32}, + readInt32, + errors.New("ReadInt32 error"), + }, + { + "ReadInt64 error", + uint(0), + nil, + &valueReaderWriter{BSONType: TypeInt64, Return: int64(0), Err: errors.New("ReadInt64 error"), ErrAfter: readInt64}, + readInt64, + errors.New("ReadInt64 error"), + }, + { + "ReadDouble error", + 0, + nil, + &valueReaderWriter{BSONType: TypeDouble, Return: float64(0), Err: errors.New("ReadDouble error"), ErrAfter: readDouble}, + readDouble, + errors.New("ReadDouble error"), + }, + { + "ReadDouble", uint64(3), &DecodeContext{}, + &valueReaderWriter{BSONType: TypeDouble, Return: float64(3.00)}, readDouble, + nil, + }, + { + "ReadDouble (truncate)", uint64(3), &DecodeContext{truncate: true}, + &valueReaderWriter{BSONType: TypeDouble, Return: float64(3.14)}, readDouble, + nil, + }, + { + "ReadDouble (no truncate)", uint64(0), nil, + &valueReaderWriter{BSONType: TypeDouble, Return: float64(3.14)}, readDouble, + errCannotTruncate, + }, + { + "ReadDouble overflows int64", uint64(0), nil, + &valueReaderWriter{BSONType: TypeDouble, Return: math.MaxFloat64}, readDouble, + fmt.Errorf("%g overflows int64", math.MaxFloat64), + }, + {"uint8/fast path", uint8(127), nil, &valueReaderWriter{BSONType: TypeInt32, Return: int32(127)}, readInt32, nil}, + {"uint16/fast path", uint16(255), nil, &valueReaderWriter{BSONType: TypeInt32, Return: int32(255)}, readInt32, nil}, + {"uint32/fast path", uint32(1234), nil, &valueReaderWriter{BSONType: TypeInt32, Return: int32(1234)}, readInt32, nil}, + {"uint64/fast path", uint64(1234), nil, &valueReaderWriter{BSONType: TypeInt64, Return: int64(1234)}, readInt64, nil}, + {"uint/fast path", uint(1234), nil, &valueReaderWriter{BSONType: TypeInt64, Return: int64(1234)}, readInt64, nil}, + { + "uint8/fast path - nil", (*uint8)(nil), nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(0)}, readInt32, + ValueDecoderError{ + Name: "UintDecodeValue", + Kinds: []reflect.Kind{reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint}, + Received: reflect.ValueOf((*uint8)(nil)), + }, + }, + { + "uint16/fast path - nil", (*uint16)(nil), nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(0)}, readInt32, + ValueDecoderError{ + Name: "UintDecodeValue", + Kinds: []reflect.Kind{reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint}, + Received: reflect.ValueOf((*uint16)(nil)), + }, + }, + { + "uint32/fast path - nil", (*uint32)(nil), nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(0)}, readInt32, + ValueDecoderError{ + Name: "UintDecodeValue", + Kinds: []reflect.Kind{reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint}, + Received: reflect.ValueOf((*uint32)(nil)), + }, + }, + { + "uint64/fast path - nil", (*uint64)(nil), nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(0)}, readInt32, + ValueDecoderError{ + Name: "UintDecodeValue", + Kinds: []reflect.Kind{reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint}, + Received: reflect.ValueOf((*uint64)(nil)), + }, + }, + { + "uint/fast path - nil", (*uint)(nil), nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(0)}, readInt32, + ValueDecoderError{ + Name: "UintDecodeValue", + Kinds: []reflect.Kind{reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint}, + Received: reflect.ValueOf((*uint)(nil)), + }, + }, + { + "uint8/fast path - overflow", uint8(0), nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(1 << 8)}, readInt32, + fmt.Errorf("%d overflows uint8", 1<<8), + }, + { + "uint16/fast path - overflow", uint16(0), nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(1 << 16)}, readInt32, + fmt.Errorf("%d overflows uint16", 1<<16), + }, + { + "uint32/fast path - overflow", uint32(0), nil, + &valueReaderWriter{BSONType: TypeInt64, Return: int64(1 << 32)}, readInt64, + fmt.Errorf("%d overflows uint32", int64(1<<32)), + }, + { + "uint8/fast path - overflow (negative)", uint8(0), nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(-1)}, readInt32, + fmt.Errorf("%d overflows uint8", -1), + }, + { + "uint16/fast path - overflow (negative)", uint16(0), nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(-1)}, readInt32, + fmt.Errorf("%d overflows uint16", -1), + }, + { + "uint32/fast path - overflow (negative)", uint32(0), nil, + &valueReaderWriter{BSONType: TypeInt64, Return: int64(-1)}, readInt64, + fmt.Errorf("%d overflows uint32", -1), + }, + { + "uint64/fast path - overflow (negative)", uint64(0), nil, + &valueReaderWriter{BSONType: TypeInt64, Return: int64(-1)}, readInt64, + fmt.Errorf("%d overflows uint64", -1), + }, + { + "uint/fast path - overflow (negative)", uint(0), nil, + &valueReaderWriter{BSONType: TypeInt64, Return: int64(-1)}, readInt64, + fmt.Errorf("%d overflows uint", -1), + }, + { + "uint8/reflection path", myuint8(127), nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(127)}, readInt32, + nil, + }, + { + "uint16/reflection path", myuint16(255), nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(255)}, readInt32, + nil, + }, + { + "uint32/reflection path", myuint32(511), nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(511)}, readInt32, + nil, + }, + { + "uint64/reflection path", myuint64(1023), nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(1023)}, readInt32, + nil, + }, + { + "uint/reflection path", myuint(2047), nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(2047)}, readInt32, + nil, + }, + { + "uint8/reflection path - overflow", myuint8(0), nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(1 << 8)}, readInt32, + fmt.Errorf("%d overflows uint8", 1<<8), + }, + { + "uint16/reflection path - overflow", myuint16(0), nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(1 << 16)}, readInt32, + fmt.Errorf("%d overflows uint16", 1<<16), + }, + { + "uint32/reflection path - overflow", myuint32(0), nil, + &valueReaderWriter{BSONType: TypeInt64, Return: int64(1 << 32)}, readInt64, + fmt.Errorf("%d overflows uint32", int64(1<<32)), + }, + { + "uint8/reflection path - overflow (negative)", myuint8(0), nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(-1)}, readInt32, + fmt.Errorf("%d overflows uint8", -1), + }, + { + "uint16/reflection path - overflow (negative)", myuint16(0), nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(-1)}, readInt32, + fmt.Errorf("%d overflows uint16", -1), + }, + { + "uint32/reflection path - overflow (negative)", myuint32(0), nil, + &valueReaderWriter{BSONType: TypeInt64, Return: int64(-1)}, readInt64, + fmt.Errorf("%d overflows uint32", -1), + }, + { + "uint64/reflection path - overflow (negative)", myuint64(0), nil, + &valueReaderWriter{BSONType: TypeInt64, Return: int64(-1)}, readInt64, + fmt.Errorf("%d overflows uint64", -1), + }, + { + "uint/reflection path - overflow (negative)", myuint(0), nil, + &valueReaderWriter{BSONType: TypeInt64, Return: int64(-1)}, readInt64, + fmt.Errorf("%d overflows uint", -1), + }, + { + "can set false", + cansettest, + nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(0)}, + nothing, + ValueDecoderError{ + Name: "UintDecodeValue", + Kinds: []reflect.Kind{reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint}, + }, + }, + }, + }, + { + "FloatDecodeValue", + ValueDecoderFunc(floatDecodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + &valueReaderWriter{BSONType: TypeDouble, Return: float64(0)}, + readDouble, + ValueDecoderError{ + Name: "FloatDecodeValue", + Kinds: []reflect.Kind{reflect.Float32, reflect.Float64}, + Received: reflect.ValueOf(wrong), + }, + }, + { + "type not double", + 0, + nil, + &valueReaderWriter{BSONType: TypeString}, + nothing, + fmt.Errorf("cannot decode %v into a float32 or float64 type", TypeString), + }, + { + "ReadDouble error", + float64(0), + nil, + &valueReaderWriter{BSONType: TypeDouble, Return: float64(0), Err: errors.New("ReadDouble error"), ErrAfter: readDouble}, + readDouble, + errors.New("ReadDouble error"), + }, + { + "ReadInt32 error", + float64(0), + nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(0), Err: errors.New("ReadInt32 error"), ErrAfter: readInt32}, + readInt32, + errors.New("ReadInt32 error"), + }, + { + "ReadInt64 error", + float64(0), + nil, + &valueReaderWriter{BSONType: TypeInt64, Return: int64(0), Err: errors.New("ReadInt64 error"), ErrAfter: readInt64}, + readInt64, + errors.New("ReadInt64 error"), + }, + { + "float64/int32", float32(32.0), nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(32)}, readInt32, + nil, + }, + { + "float64/int64", float32(64.0), nil, + &valueReaderWriter{BSONType: TypeInt64, Return: int64(64)}, readInt64, + nil, + }, + { + "float32/fast path (equal)", float32(3.0), nil, + &valueReaderWriter{BSONType: TypeDouble, Return: float64(3.0)}, readDouble, + nil, + }, + { + "float64/fast path", float64(3.14159), nil, + &valueReaderWriter{BSONType: TypeDouble, Return: float64(3.14159)}, readDouble, + nil, + }, + { + "float32/fast path (truncate)", float32(3.14), &DecodeContext{truncate: true}, + &valueReaderWriter{BSONType: TypeDouble, Return: float64(3.14)}, readDouble, + nil, + }, + { + "float32/fast path (no truncate)", float32(0), nil, + &valueReaderWriter{BSONType: TypeDouble, Return: float64(3.14)}, readDouble, + errCannotTruncate, + }, + { + "float32/fast path - nil", (*float32)(nil), nil, + &valueReaderWriter{BSONType: TypeDouble, Return: float64(0)}, readDouble, + ValueDecoderError{ + Name: "FloatDecodeValue", + Kinds: []reflect.Kind{reflect.Float32, reflect.Float64}, + Received: reflect.ValueOf((*float32)(nil)), + }, + }, + { + "float64/fast path - nil", (*float64)(nil), nil, + &valueReaderWriter{BSONType: TypeDouble, Return: float64(0)}, readDouble, + ValueDecoderError{ + Name: "FloatDecodeValue", + Kinds: []reflect.Kind{reflect.Float32, reflect.Float64}, + Received: reflect.ValueOf((*float64)(nil)), + }, + }, + { + "float32/reflection path (equal)", myfloat32(3.0), nil, + &valueReaderWriter{BSONType: TypeDouble, Return: float64(3.0)}, readDouble, + nil, + }, + { + "float64/reflection path", myfloat64(3.14159), nil, + &valueReaderWriter{BSONType: TypeDouble, Return: float64(3.14159)}, readDouble, + nil, + }, + { + "float32/reflection path (truncate)", myfloat32(3.14), &DecodeContext{truncate: true}, + &valueReaderWriter{BSONType: TypeDouble, Return: float64(3.14)}, readDouble, + nil, + }, + { + "float32/reflection path (no truncate)", myfloat32(0), nil, + &valueReaderWriter{BSONType: TypeDouble, Return: float64(3.14)}, readDouble, + errCannotTruncate, + }, + { + "can set false", + cansettest, + nil, + &valueReaderWriter{BSONType: TypeDouble, Return: float64(0)}, + nothing, + ValueDecoderError{ + Name: "FloatDecodeValue", + Kinds: []reflect.Kind{reflect.Float32, reflect.Float64}, + }, + }, + }, + }, + { + "defaultTimeCodec.DecodeValue", + &timeCodec{}, + []subtest{ + { + "wrong type", + wrong, + nil, + &valueReaderWriter{BSONType: TypeDateTime, Return: int64(0)}, + nothing, + ValueDecoderError{Name: "TimeDecodeValue", Types: []reflect.Type{tTime}, Received: reflect.ValueOf(wrong)}, + }, + { + "ReadDateTime error", + time.Time{}, + nil, + &valueReaderWriter{BSONType: TypeDateTime, Return: int64(0), Err: errors.New("ReadDateTime error"), ErrAfter: readDateTime}, + readDateTime, + errors.New("ReadDateTime error"), + }, + { + "time.Time", + now, + nil, + &valueReaderWriter{BSONType: TypeDateTime, Return: now.UnixNano() / int64(time.Millisecond)}, + readDateTime, + nil, + }, + { + "can set false", + cansettest, + nil, + &valueReaderWriter{BSONType: TypeDateTime, Return: int64(0)}, + nothing, + ValueDecoderError{Name: "TimeDecodeValue", Types: []reflect.Type{tTime}}, + }, + { + "decode null", + time.Time{}, + nil, + &valueReaderWriter{BSONType: TypeNull}, + readNull, + nil, + }, + { + "decode undefined", + time.Time{}, + nil, + &valueReaderWriter{BSONType: TypeUndefined}, + readUndefined, + nil, + }, + }, + }, + { + "defaultMapCodec.DecodeValue", + &mapCodec{}, + []subtest{ + { + "wrong kind", + wrong, + nil, + &valueReaderWriter{}, + nothing, + ValueDecoderError{Name: "MapDecodeValue", Kinds: []reflect.Kind{reflect.Map}, Received: reflect.ValueOf(wrong)}, + }, + { + "wrong kind (non-string key)", + map[bool]interface{}{}, + &DecodeContext{Registry: buildDefaultRegistry()}, + &valueReaderWriter{}, + readElement, + fmt.Errorf("unsupported key type: %T", false), + }, + { + "ReadDocument Error", + make(map[string]interface{}), + nil, + &valueReaderWriter{Err: errors.New("rd error"), ErrAfter: readDocument}, + readDocument, + errors.New("rd error"), + }, + { + "Lookup Error", + map[string]string{}, + &DecodeContext{Registry: newTestRegistry()}, + &valueReaderWriter{}, + readDocument, + errNoDecoder{Type: reflect.TypeOf("")}, + }, + { + "ReadElement Error", + make(map[string]interface{}), + &DecodeContext{Registry: buildDefaultRegistry()}, + &valueReaderWriter{Err: errors.New("re error"), ErrAfter: readElement}, + readElement, + errors.New("re error"), + }, + { + "can set false", + cansettest, + nil, + &valueReaderWriter{}, + nothing, + ValueDecoderError{Name: "MapDecodeValue", Kinds: []reflect.Kind{reflect.Map}}, + }, + { + "wrong BSON type", + map[string]interface{}{}, + nil, + &valueReaderWriter{BSONType: TypeString}, + nothing, + errors.New("cannot decode string into a map[string]interface {}"), + }, + { + "decode null", + (map[string]interface{})(nil), + nil, + &valueReaderWriter{BSONType: TypeNull}, + readNull, + nil, + }, + { + "decode undefined", + (map[string]interface{})(nil), + nil, + &valueReaderWriter{BSONType: TypeUndefined}, + readUndefined, + nil, + }, + }, + }, + { + "ArrayDecodeValue", + ValueDecoderFunc(arrayDecodeValue), + []subtest{ + { + "wrong kind", + wrong, + nil, + &valueReaderWriter{}, + nothing, + ValueDecoderError{Name: "ArrayDecodeValue", Kinds: []reflect.Kind{reflect.Array}, Received: reflect.ValueOf(wrong)}, + }, + { + "can set false", + cansettest, + nil, + &valueReaderWriter{}, + nothing, + ValueDecoderError{Name: "ArrayDecodeValue", Kinds: []reflect.Kind{reflect.Array}}, + }, + { + "Not Type Array", + [1]interface{}{}, + nil, + &valueReaderWriter{BSONType: TypeString}, + nothing, + errors.New("cannot decode string into an array"), + }, + { + "ReadArray Error", + [1]interface{}{}, + nil, + &valueReaderWriter{Err: errors.New("ra error"), ErrAfter: readArray, BSONType: TypeArray}, + readArray, + errors.New("ra error"), + }, + { + "Lookup Error", + [1]string{}, + &DecodeContext{Registry: newTestRegistry()}, + &valueReaderWriter{BSONType: TypeArray}, + readArray, + errNoDecoder{Type: reflect.TypeOf("")}, + }, + { + "ReadValue Error", + [1]string{}, + &DecodeContext{Registry: buildDefaultRegistry()}, + &valueReaderWriter{Err: errors.New("rv error"), ErrAfter: readValue, BSONType: TypeArray}, + readValue, + errors.New("rv error"), + }, + { + "DecodeValue Error", + [1]string{}, + &DecodeContext{Registry: buildDefaultRegistry()}, + &valueReaderWriter{BSONType: TypeArray}, + readValue, + &DecodeError{keys: []string{"0"}, wrapped: errors.New("cannot decode array into a string type")}, + }, + { + "Document but not D", + [1]string{}, + nil, + &valueReaderWriter{BSONType: Type(0)}, + nothing, + errors.New("cannot decode document into [1]string"), + }, + { + "EmbeddedDocument but not D", + [1]string{}, + nil, + &valueReaderWriter{BSONType: TypeEmbeddedDocument}, + nothing, + errors.New("cannot decode document into [1]string"), + }, + { + "decode null", + [1]string{}, + nil, + &valueReaderWriter{BSONType: TypeNull}, + readNull, + nil, + }, + { + "decode undefined", + [1]string{}, + nil, + &valueReaderWriter{BSONType: TypeUndefined}, + readUndefined, + nil, + }, + }, + }, + { + "defaultSliceCodec.DecodeValue", + &sliceCodec{}, + []subtest{ + { + "wrong kind", + wrong, + nil, + &valueReaderWriter{}, + nothing, + ValueDecoderError{Name: "SliceDecodeValue", Kinds: []reflect.Kind{reflect.Slice}, Received: reflect.ValueOf(wrong)}, + }, + { + "can set false", + cansettest, + nil, + &valueReaderWriter{}, + nothing, + ValueDecoderError{Name: "SliceDecodeValue", Kinds: []reflect.Kind{reflect.Slice}}, + }, + { + "Not Type Array", + []interface{}{}, + nil, + &valueReaderWriter{BSONType: TypeInt32}, + nothing, + errors.New("cannot decode 32-bit integer into a slice"), + }, + { + "ReadArray Error", + []interface{}{}, + nil, + &valueReaderWriter{Err: errors.New("ra error"), ErrAfter: readArray, BSONType: TypeArray}, + readArray, + errors.New("ra error"), + }, + { + "Lookup Error", + []string{}, + &DecodeContext{Registry: newTestRegistry()}, + &valueReaderWriter{BSONType: TypeArray}, + readArray, + errNoDecoder{Type: reflect.TypeOf("")}, + }, + { + "ReadValue Error", + []string{}, + &DecodeContext{Registry: buildDefaultRegistry()}, + &valueReaderWriter{Err: errors.New("rv error"), ErrAfter: readValue, BSONType: TypeArray}, + readValue, + errors.New("rv error"), + }, + { + "DecodeValue Error", + []string{}, + &DecodeContext{Registry: buildDefaultRegistry()}, + &valueReaderWriter{BSONType: TypeArray}, + readValue, + &DecodeError{keys: []string{"0"}, wrapped: errors.New("cannot decode array into a string type")}, + }, + { + "Document but not D", + []string{}, + nil, + &valueReaderWriter{BSONType: Type(0)}, + nothing, + errors.New("cannot decode document into []string"), + }, + { + "EmbeddedDocument but not D", + []string{}, + nil, + &valueReaderWriter{BSONType: TypeEmbeddedDocument}, + nothing, + errors.New("cannot decode document into []string"), + }, + { + "decode null", + ([]string)(nil), + nil, + &valueReaderWriter{BSONType: TypeNull}, + readNull, + nil, + }, + { + "decode undefined", + ([]string)(nil), + nil, + &valueReaderWriter{BSONType: TypeUndefined}, + readUndefined, + nil, + }, + }, + }, + { + "ObjectIDDecodeValue", + ValueDecoderFunc(objectIDDecodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + &valueReaderWriter{BSONType: TypeObjectID}, + nothing, + ValueDecoderError{Name: "ObjectIDDecodeValue", Types: []reflect.Type{tOID}, Received: reflect.ValueOf(wrong)}, + }, + { + "type not objectID", + ObjectID{}, + nil, + &valueReaderWriter{BSONType: TypeInt32}, + nothing, + fmt.Errorf("cannot decode %v into an ObjectID", TypeInt32), + }, + { + "ReadObjectID Error", + ObjectID{}, + nil, + &valueReaderWriter{BSONType: TypeObjectID, Err: errors.New("roid error"), ErrAfter: readObjectID}, + readObjectID, + errors.New("roid error"), + }, + { + "can set false", + cansettest, + nil, + &valueReaderWriter{BSONType: TypeObjectID, Return: ObjectID{}}, + nothing, + ValueDecoderError{Name: "ObjectIDDecodeValue", Types: []reflect.Type{tOID}}, + }, + { + "success", + ObjectID{0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C}, + nil, + &valueReaderWriter{ + BSONType: TypeObjectID, + Return: ObjectID{0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C}, + }, + readObjectID, + nil, + }, + { + "success/string", + ObjectID{0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x61, 0x62}, + nil, + &valueReaderWriter{ + BSONType: TypeString, + Return: "0123456789ab", + }, + readString, + nil, + }, + { + "success/string-hex", + ObjectID{0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x61, 0x62}, + nil, + &valueReaderWriter{ + BSONType: TypeString, + Return: "303132333435363738396162", + }, + readString, + nil, + }, + { + "decode null", + ObjectID{}, + nil, + &valueReaderWriter{BSONType: TypeNull}, + readNull, + nil, + }, + { + "decode undefined", + ObjectID{}, + nil, + &valueReaderWriter{BSONType: TypeUndefined}, + readUndefined, + nil, + }, + }, + }, + { + "Decimal128DecodeValue", + ValueDecoderFunc(decimal128DecodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + &valueReaderWriter{BSONType: TypeDecimal128}, + nothing, + ValueDecoderError{Name: "Decimal128DecodeValue", Types: []reflect.Type{tDecimal}, Received: reflect.ValueOf(wrong)}, + }, + { + "type not decimal128", + Decimal128{}, + nil, + &valueReaderWriter{BSONType: TypeString}, + nothing, + fmt.Errorf("cannot decode %v into a Decimal128", TypeString), + }, + { + "ReadDecimal128 Error", + Decimal128{}, + nil, + &valueReaderWriter{BSONType: TypeDecimal128, Err: errors.New("rd128 error"), ErrAfter: readDecimal128}, + readDecimal128, + errors.New("rd128 error"), + }, + { + "can set false", + cansettest, + nil, + &valueReaderWriter{BSONType: TypeDecimal128, Return: d128}, + nothing, + ValueDecoderError{Name: "Decimal128DecodeValue", Types: []reflect.Type{tDecimal}}, + }, + { + "success", + d128, + nil, + &valueReaderWriter{BSONType: TypeDecimal128, Return: d128}, + readDecimal128, + nil, + }, + { + "decode null", + Decimal128{}, + nil, + &valueReaderWriter{BSONType: TypeNull}, + readNull, + nil, + }, + { + "decode undefined", + Decimal128{}, + nil, + &valueReaderWriter{BSONType: TypeUndefined}, + readUndefined, + nil, + }, + }, + }, + { + "JSONNumberDecodeValue", + ValueDecoderFunc(jsonNumberDecodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + &valueReaderWriter{BSONType: TypeObjectID}, + nothing, + ValueDecoderError{Name: "JSONNumberDecodeValue", Types: []reflect.Type{tJSONNumber}, Received: reflect.ValueOf(wrong)}, + }, + { + "type not double/int32/int64", + json.Number(""), + nil, + &valueReaderWriter{BSONType: TypeString}, + nothing, + fmt.Errorf("cannot decode %v into a json.Number", TypeString), + }, + { + "ReadDouble Error", + json.Number(""), + nil, + &valueReaderWriter{BSONType: TypeDouble, Err: errors.New("rd error"), ErrAfter: readDouble}, + readDouble, + errors.New("rd error"), + }, + { + "ReadInt32 Error", + json.Number(""), + nil, + &valueReaderWriter{BSONType: TypeInt32, Err: errors.New("ri32 error"), ErrAfter: readInt32}, + readInt32, + errors.New("ri32 error"), + }, + { + "ReadInt64 Error", + json.Number(""), + nil, + &valueReaderWriter{BSONType: TypeInt64, Err: errors.New("ri64 error"), ErrAfter: readInt64}, + readInt64, + errors.New("ri64 error"), + }, + { + "can set false", + cansettest, + nil, + &valueReaderWriter{BSONType: TypeObjectID, Return: ObjectID{}}, + nothing, + ValueDecoderError{Name: "JSONNumberDecodeValue", Types: []reflect.Type{tJSONNumber}}, + }, + { + "success/double", + json.Number("3.14159"), + nil, + &valueReaderWriter{BSONType: TypeDouble, Return: float64(3.14159)}, + readDouble, + nil, + }, + { + "success/int32", + json.Number("12345"), + nil, + &valueReaderWriter{BSONType: TypeInt32, Return: int32(12345)}, + readInt32, + nil, + }, + { + "success/int64", + json.Number("1234567890"), + nil, + &valueReaderWriter{BSONType: TypeInt64, Return: int64(1234567890)}, + readInt64, + nil, + }, + { + "decode null", + json.Number(""), + nil, + &valueReaderWriter{BSONType: TypeNull}, + readNull, + nil, + }, + { + "decode undefined", + json.Number(""), + nil, + &valueReaderWriter{BSONType: TypeUndefined}, + readUndefined, + nil, + }, + }, + }, + { + "URLDecodeValue", + ValueDecoderFunc(urlDecodeValue), + []subtest{ + { + "wrong type", + url.URL{}, + nil, + &valueReaderWriter{BSONType: TypeInt32}, + nothing, + fmt.Errorf("cannot decode %v into a *url.URL", TypeInt32), + }, + { + "type not *url.URL", + int64(0), + nil, + &valueReaderWriter{BSONType: TypeString, Return: "http://example.com"}, + nothing, + ValueDecoderError{Name: "URLDecodeValue", Types: []reflect.Type{tURL}, Received: reflect.ValueOf(int64(0))}, + }, + { + "ReadString error", + url.URL{}, + nil, + &valueReaderWriter{BSONType: TypeString, Err: errors.New("rs error"), ErrAfter: readString}, + readString, + errors.New("rs error"), + }, + { + "url.Parse error", + url.URL{}, + nil, + &valueReaderWriter{BSONType: TypeString, Return: "not-valid-%%%%://"}, + readString, + &url.Error{ + Op: "parse", + URL: "not-valid-%%%%://", + Err: errors.New("first path segment in URL cannot contain colon"), + }, + }, + { + "can set false", + cansettest, + nil, + &valueReaderWriter{BSONType: TypeString, Return: "http://example.com"}, + nothing, + ValueDecoderError{Name: "URLDecodeValue", Types: []reflect.Type{tURL}}, + }, + { + "url.URL", + url.URL{Scheme: "http", Host: "example.com"}, + nil, + &valueReaderWriter{BSONType: TypeString, Return: "http://example.com"}, + readString, + nil, + }, + { + "decode null", + url.URL{}, + nil, + &valueReaderWriter{BSONType: TypeNull}, + readNull, + nil, + }, + { + "decode undefined", + url.URL{}, + nil, + &valueReaderWriter{BSONType: TypeUndefined}, + readUndefined, + nil, + }, + }, + }, + { + "defaultByteSliceCodec.DecodeValue", + &byteSliceCodec{}, + []subtest{ + { + "wrong type", + []byte{}, + nil, + &valueReaderWriter{BSONType: TypeInt32}, + nothing, + fmt.Errorf("cannot decode %v into a []byte", TypeInt32), + }, + { + "type not []byte", + int64(0), + nil, + &valueReaderWriter{BSONType: TypeBinary, Return: bsoncore.Value{Type: bsoncore.TypeBinary}}, + nothing, + ValueDecoderError{Name: "ByteSliceDecodeValue", Types: []reflect.Type{tByteSlice}, Received: reflect.ValueOf(int64(0))}, + }, + { + "ReadBinary error", + []byte{}, + nil, + &valueReaderWriter{BSONType: TypeBinary, Err: errors.New("rb error"), ErrAfter: readBinary}, + readBinary, + errors.New("rb error"), + }, + { + "incorrect subtype", + []byte{}, + nil, + &valueReaderWriter{ + BSONType: TypeBinary, + Return: bsoncore.Value{ + Type: bsoncore.TypeBinary, + Data: bsoncore.AppendBinary(nil, 0xFF, []byte{0x01, 0x02, 0x03}), + }, + }, + readBinary, + decodeBinaryError{subtype: byte(0xFF), typeName: "[]byte"}, + }, + { + "can set false", + cansettest, + nil, + &valueReaderWriter{BSONType: TypeBinary, Return: bsoncore.AppendBinary(nil, 0x00, []byte{0x01, 0x02, 0x03})}, + nothing, + ValueDecoderError{Name: "ByteSliceDecodeValue", Types: []reflect.Type{tByteSlice}}, + }, + { + "decode null", + ([]byte)(nil), + nil, + &valueReaderWriter{BSONType: TypeNull}, + readNull, + nil, + }, + { + "decode undefined", + ([]byte)(nil), + nil, + &valueReaderWriter{BSONType: TypeUndefined}, + readUndefined, + nil, + }, + }, + }, + { + "defaultStringCodec.DecodeValue", + &stringCodec{}, + []subtest{ + { + "symbol", + "var hello = 'world';", + nil, + &valueReaderWriter{BSONType: TypeSymbol, Return: "var hello = 'world';"}, + readSymbol, + nil, + }, + { + "decode null", + "", + nil, + &valueReaderWriter{BSONType: TypeNull}, + readNull, + nil, + }, + { + "decode undefined", + "", + nil, + &valueReaderWriter{BSONType: TypeUndefined}, + readUndefined, + nil, + }, + }, + }, + { + "ValueUnmarshalerDecodeValue", + ValueDecoderFunc(valueUnmarshalerDecodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + nil, + nothing, + ValueDecoderError{ + Name: "ValueUnmarshalerDecodeValue", + Types: []reflect.Type{tValueUnmarshaler}, + Received: reflect.ValueOf(wrong), + }, + }, + { + "copy error", + &testValueUnmarshaler{}, + nil, + &valueReaderWriter{BSONType: TypeString, Err: errors.New("copy error"), ErrAfter: readString}, + readString, + errors.New("copy error"), + }, + { + "ValueUnmarshaler", + &testValueUnmarshaler{t: TypeString, val: bsoncore.AppendString(nil, "hello, world")}, + nil, + &valueReaderWriter{BSONType: TypeString, Return: "hello, world"}, + readString, + nil, + }, + }, + }, + { + "UnmarshalerDecodeValue", + ValueDecoderFunc(unmarshalerDecodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + nil, + nothing, + ValueDecoderError{Name: "UnmarshalerDecodeValue", Types: []reflect.Type{tUnmarshaler}, Received: reflect.ValueOf(wrong)}, + }, + { + "copy error", + &testUnmarshaler{}, + nil, + &valueReaderWriter{BSONType: TypeString, Err: errors.New("copy error"), ErrAfter: readString}, + readString, + errors.New("copy error"), + }, + { + // Only the pointer form of testUnmarshaler implements Unmarshaler + "value does not implement Unmarshaler", + &testUnmarshaler{ + Invoked: true, + Val: bsoncore.AppendDouble(nil, 3.14159), + }, + nil, + &valueReaderWriter{BSONType: TypeDouble, Return: float64(3.14159)}, + readDouble, + nil, + }, + { + "Unmarshaler", + &testUnmarshaler{ + Invoked: true, + Val: bsoncore.AppendDouble(nil, 3.14159), + }, + nil, + &valueReaderWriter{BSONType: TypeDouble, Return: float64(3.14159)}, + readDouble, + nil, + }, + }, + }, + { + "PointerCodec.DecodeValue", + &pointerCodec{}, + []subtest{ + { + "not valid", nil, nil, nil, nothing, + ValueDecoderError{Name: "PointerCodec.DecodeValue", Kinds: []reflect.Kind{reflect.Ptr}, Received: reflect.Value{}}, + }, + { + "can set", cansettest, nil, nil, nothing, + ValueDecoderError{Name: "PointerCodec.DecodeValue", Kinds: []reflect.Kind{reflect.Ptr}}, + }, + { + "No Decoder", &wrong, &DecodeContext{Registry: buildDefaultRegistry()}, nil, nothing, + errNoDecoder{Type: reflect.TypeOf(wrong)}, + }, + { + "decode null", + (*mystruct)(nil), + nil, + &valueReaderWriter{BSONType: TypeNull}, + readNull, + nil, + }, + { + "decode undefined", + (*mystruct)(nil), + nil, + &valueReaderWriter{BSONType: TypeUndefined}, + readUndefined, + nil, + }, + }, + }, + { + "BinaryDecodeValue", + ValueDecoderFunc(binaryDecodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + &valueReaderWriter{}, + nothing, + ValueDecoderError{Name: "BinaryDecodeValue", Types: []reflect.Type{tBinary}, Received: reflect.ValueOf(wrong)}, + }, + { + "type not binary", + Binary{}, + nil, + &valueReaderWriter{BSONType: TypeString}, + nothing, + fmt.Errorf("cannot decode %v into a Binary", TypeString), + }, + { + "ReadBinary Error", + Binary{}, + nil, + &valueReaderWriter{BSONType: TypeBinary, Err: errors.New("rb error"), ErrAfter: readBinary}, + readBinary, + errors.New("rb error"), + }, + { + "Binary/success", + Binary{Data: []byte{0x01, 0x02, 0x03}, Subtype: 0xFF}, + nil, + &valueReaderWriter{ + BSONType: TypeBinary, + Return: bsoncore.Value{ + Type: bsoncore.TypeBinary, + Data: bsoncore.AppendBinary(nil, 0xFF, []byte{0x01, 0x02, 0x03}), + }, + }, + readBinary, + nil, + }, + { + "decode null", + Binary{}, + nil, + &valueReaderWriter{BSONType: TypeNull}, + readNull, + nil, + }, + { + "decode undefined", + Binary{}, + nil, + &valueReaderWriter{BSONType: TypeUndefined}, + readUndefined, + nil, + }, + }, + }, + { + "UndefinedDecodeValue", + ValueDecoderFunc(undefinedDecodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + &valueReaderWriter{BSONType: TypeUndefined}, + nothing, + ValueDecoderError{Name: "UndefinedDecodeValue", Types: []reflect.Type{tUndefined}, Received: reflect.ValueOf(wrong)}, + }, + { + "type not undefined", + Undefined{}, + nil, + &valueReaderWriter{BSONType: TypeString}, + nothing, + fmt.Errorf("cannot decode %v into an Undefined", TypeString), + }, + { + "ReadUndefined Error", + Undefined{}, + nil, + &valueReaderWriter{BSONType: TypeUndefined, Err: errors.New("ru error"), ErrAfter: readUndefined}, + readUndefined, + errors.New("ru error"), + }, + { + "ReadUndefined/success", + Undefined{}, + nil, + &valueReaderWriter{BSONType: TypeUndefined}, + readUndefined, + nil, + }, + { + "decode null", + Undefined{}, + nil, + &valueReaderWriter{BSONType: TypeNull}, + readNull, + nil, + }, + }, + }, + { + "DateTimeDecodeValue", + ValueDecoderFunc(dateTimeDecodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + &valueReaderWriter{BSONType: TypeDateTime}, + nothing, + ValueDecoderError{Name: "DateTimeDecodeValue", Types: []reflect.Type{tDateTime}, Received: reflect.ValueOf(wrong)}, + }, + { + "type not datetime", + DateTime(0), + nil, + &valueReaderWriter{BSONType: TypeString}, + nothing, + fmt.Errorf("cannot decode %v into a DateTime", TypeString), + }, + { + "ReadDateTime Error", + DateTime(0), + nil, + &valueReaderWriter{BSONType: TypeDateTime, Err: errors.New("rdt error"), ErrAfter: readDateTime}, + readDateTime, + errors.New("rdt error"), + }, + { + "success", + DateTime(1234567890), + nil, + &valueReaderWriter{BSONType: TypeDateTime, Return: int64(1234567890)}, + readDateTime, + nil, + }, + { + "decode null", + DateTime(0), + nil, + &valueReaderWriter{BSONType: TypeNull}, + readNull, + nil, + }, + { + "decode undefined", + DateTime(0), + nil, + &valueReaderWriter{BSONType: TypeUndefined}, + readUndefined, + nil, + }, + }, + }, + { + "NullDecodeValue", + ValueDecoderFunc(nullDecodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + &valueReaderWriter{BSONType: TypeNull}, + nothing, + ValueDecoderError{Name: "NullDecodeValue", Types: []reflect.Type{tNull}, Received: reflect.ValueOf(wrong)}, + }, + { + "type not null", + Null{}, + nil, + &valueReaderWriter{BSONType: TypeString}, + nothing, + fmt.Errorf("cannot decode %v into a Null", TypeString), + }, + { + "ReadNull Error", + Null{}, + nil, + &valueReaderWriter{BSONType: TypeNull, Err: errors.New("rn error"), ErrAfter: readNull}, + readNull, + errors.New("rn error"), + }, + { + "success", + Null{}, + nil, + &valueReaderWriter{BSONType: TypeNull}, + readNull, + nil, + }, + }, + }, + { + "RegexDecodeValue", + ValueDecoderFunc(regexDecodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + &valueReaderWriter{BSONType: TypeRegex}, + nothing, + ValueDecoderError{Name: "RegexDecodeValue", Types: []reflect.Type{tRegex}, Received: reflect.ValueOf(wrong)}, + }, + { + "type not regex", + Regex{}, + nil, + &valueReaderWriter{BSONType: TypeString}, + nothing, + fmt.Errorf("cannot decode %v into a Regex", TypeString), + }, + { + "ReadRegex Error", + Regex{}, + nil, + &valueReaderWriter{BSONType: TypeRegex, Err: errors.New("rr error"), ErrAfter: readRegex}, + readRegex, + errors.New("rr error"), + }, + { + "success", + Regex{Pattern: "foo", Options: "bar"}, + nil, + &valueReaderWriter{ + BSONType: TypeRegex, + Return: bsoncore.Value{ + Type: bsoncore.TypeRegex, + Data: bsoncore.AppendRegex(nil, "foo", "bar"), + }, + }, + readRegex, + nil, + }, + { + "decode null", + Regex{}, + nil, + &valueReaderWriter{BSONType: TypeNull}, + readNull, + nil, + }, + { + "decode undefined", + Regex{}, + nil, + &valueReaderWriter{BSONType: TypeUndefined}, + readUndefined, + nil, + }, + }, + }, + { + "DBPointerDecodeValue", + ValueDecoderFunc(dbPointerDecodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + &valueReaderWriter{BSONType: TypeDBPointer}, + nothing, + ValueDecoderError{Name: "DBPointerDecodeValue", Types: []reflect.Type{tDBPointer}, Received: reflect.ValueOf(wrong)}, + }, + { + "type not dbpointer", + DBPointer{}, + nil, + &valueReaderWriter{BSONType: TypeString}, + nothing, + fmt.Errorf("cannot decode %v into a DBPointer", TypeString), + }, + { + "ReadDBPointer Error", + DBPointer{}, + nil, + &valueReaderWriter{BSONType: TypeDBPointer, Err: errors.New("rdbp error"), ErrAfter: readDBPointer}, + readDBPointer, + errors.New("rdbp error"), + }, + { + "success", + DBPointer{ + DB: "foobar", + Pointer: ObjectID{0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C}, + }, + nil, + &valueReaderWriter{ + BSONType: TypeDBPointer, + Return: bsoncore.Value{ + Type: bsoncore.TypeDBPointer, + Data: bsoncore.AppendDBPointer( + nil, "foobar", ObjectID{0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C}, + ), + }, + }, + readDBPointer, + nil, + }, + { + "decode null", + DBPointer{}, + nil, + &valueReaderWriter{BSONType: TypeNull}, + readNull, + nil, + }, + { + "decode undefined", + DBPointer{}, + nil, + &valueReaderWriter{BSONType: TypeUndefined}, + readUndefined, + nil, + }, + }, + }, + { + "TimestampDecodeValue", + ValueDecoderFunc(timestampDecodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + &valueReaderWriter{BSONType: TypeTimestamp}, + nothing, + ValueDecoderError{Name: "TimestampDecodeValue", Types: []reflect.Type{tTimestamp}, Received: reflect.ValueOf(wrong)}, + }, + { + "type not timestamp", + Timestamp{}, + nil, + &valueReaderWriter{BSONType: TypeString}, + nothing, + fmt.Errorf("cannot decode %v into a Timestamp", TypeString), + }, + { + "ReadTimestamp Error", + Timestamp{}, + nil, + &valueReaderWriter{BSONType: TypeTimestamp, Err: errors.New("rt error"), ErrAfter: readTimestamp}, + readTimestamp, + errors.New("rt error"), + }, + { + "success", + Timestamp{T: 12345, I: 67890}, + nil, + &valueReaderWriter{ + BSONType: TypeTimestamp, + Return: bsoncore.Value{ + Type: bsoncore.TypeTimestamp, + Data: bsoncore.AppendTimestamp(nil, 12345, 67890), + }, + }, + readTimestamp, + nil, + }, + { + "decode null", + Timestamp{}, + nil, + &valueReaderWriter{BSONType: TypeNull}, + readNull, + nil, + }, + { + "decode undefined", + Timestamp{}, + nil, + &valueReaderWriter{BSONType: TypeUndefined}, + readUndefined, + nil, + }, + }, + }, + { + "MinKeyDecodeValue", + ValueDecoderFunc(minKeyDecodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + &valueReaderWriter{BSONType: TypeMinKey}, + nothing, + ValueDecoderError{Name: "MinKeyDecodeValue", Types: []reflect.Type{tMinKey}, Received: reflect.ValueOf(wrong)}, + }, + { + "type not null", + MinKey{}, + nil, + &valueReaderWriter{BSONType: TypeString}, + nothing, + fmt.Errorf("cannot decode %v into a MinKey", TypeString), + }, + { + "ReadMinKey Error", + MinKey{}, + nil, + &valueReaderWriter{BSONType: TypeMinKey, Err: errors.New("rn error"), ErrAfter: readMinKey}, + readMinKey, + errors.New("rn error"), + }, + { + "success", + MinKey{}, + nil, + &valueReaderWriter{BSONType: TypeMinKey}, + readMinKey, + nil, + }, + { + "decode null", + MinKey{}, + nil, + &valueReaderWriter{BSONType: TypeNull}, + readNull, + nil, + }, + { + "decode undefined", + MinKey{}, + nil, + &valueReaderWriter{BSONType: TypeUndefined}, + readUndefined, + nil, + }, + }, + }, + { + "MaxKeyDecodeValue", + ValueDecoderFunc(maxKeyDecodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + &valueReaderWriter{BSONType: TypeMaxKey}, + nothing, + ValueDecoderError{Name: "MaxKeyDecodeValue", Types: []reflect.Type{tMaxKey}, Received: reflect.ValueOf(wrong)}, + }, + { + "type not null", + MaxKey{}, + nil, + &valueReaderWriter{BSONType: TypeString}, + nothing, + fmt.Errorf("cannot decode %v into a MaxKey", TypeString), + }, + { + "ReadMaxKey Error", + MaxKey{}, + nil, + &valueReaderWriter{BSONType: TypeMaxKey, Err: errors.New("rn error"), ErrAfter: readMaxKey}, + readMaxKey, + errors.New("rn error"), + }, + { + "success", + MaxKey{}, + nil, + &valueReaderWriter{BSONType: TypeMaxKey}, + readMaxKey, + nil, + }, + { + "decode null", + MaxKey{}, + nil, + &valueReaderWriter{BSONType: TypeNull}, + readNull, + nil, + }, + { + "decode undefined", + MaxKey{}, + nil, + &valueReaderWriter{BSONType: TypeUndefined}, + readUndefined, + nil, + }, + }, + }, + { + "JavaScriptDecodeValue", + ValueDecoderFunc(javaScriptDecodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + &valueReaderWriter{BSONType: TypeJavaScript, Return: ""}, + nothing, + ValueDecoderError{Name: "JavaScriptDecodeValue", Types: []reflect.Type{tJavaScript}, Received: reflect.ValueOf(wrong)}, + }, + { + "type not Javascript", + JavaScript(""), + nil, + &valueReaderWriter{BSONType: TypeString}, + nothing, + fmt.Errorf("cannot decode %v into a JavaScript", TypeString), + }, + { + "ReadJavascript Error", + JavaScript(""), + nil, + &valueReaderWriter{BSONType: TypeJavaScript, Err: errors.New("rjs error"), ErrAfter: readJavascript}, + readJavascript, + errors.New("rjs error"), + }, + { + "JavaScript/success", + JavaScript("var hello = 'world';"), + nil, + &valueReaderWriter{BSONType: TypeJavaScript, Return: "var hello = 'world';"}, + readJavascript, + nil, + }, + { + "decode null", + JavaScript(""), + nil, + &valueReaderWriter{BSONType: TypeNull}, + readNull, + nil, + }, + { + "decode undefined", + JavaScript(""), + nil, + &valueReaderWriter{BSONType: TypeUndefined}, + readUndefined, + nil, + }, + }, + }, + { + "SymbolDecodeValue", + ValueDecoderFunc(symbolDecodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + &valueReaderWriter{BSONType: TypeSymbol, Return: ""}, + nothing, + ValueDecoderError{Name: "SymbolDecodeValue", Types: []reflect.Type{tSymbol}, Received: reflect.ValueOf(wrong)}, + }, + { + "type not Symbol", + Symbol(""), + nil, + &valueReaderWriter{BSONType: TypeInt32}, + nothing, + fmt.Errorf("cannot decode %v into a Symbol", TypeInt32), + }, + { + "ReadSymbol Error", + Symbol(""), + nil, + &valueReaderWriter{BSONType: TypeSymbol, Err: errors.New("rjs error"), ErrAfter: readSymbol}, + readSymbol, + errors.New("rjs error"), + }, + { + "Symbol/success", + Symbol("var hello = 'world';"), + nil, + &valueReaderWriter{BSONType: TypeSymbol, Return: "var hello = 'world';"}, + readSymbol, + nil, + }, + { + "decode null", + Symbol(""), + nil, + &valueReaderWriter{BSONType: TypeNull}, + readNull, + nil, + }, + { + "decode undefined", + Symbol(""), + nil, + &valueReaderWriter{BSONType: TypeUndefined}, + readUndefined, + nil, + }, + }, + }, + { + "CoreDocumentDecodeValue", + ValueDecoderFunc(coreDocumentDecodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + &valueReaderWriter{}, + nothing, + ValueDecoderError{ + Name: "CoreDocumentDecodeValue", + Types: []reflect.Type{tCoreDocument}, + Received: reflect.ValueOf(wrong), + }, + }, + { + "*bsoncore.Document is nil", + (*bsoncore.Document)(nil), + nil, + nil, + nothing, + ValueDecoderError{ + Name: "CoreDocumentDecodeValue", + Types: []reflect.Type{tCoreDocument}, + Received: reflect.ValueOf((*bsoncore.Document)(nil)), + }, + }, + { + "Copy error", + bsoncore.Document{}, + nil, + &valueReaderWriter{Err: errors.New("copy error"), ErrAfter: readDocument}, + readDocument, + errors.New("copy error"), + }, + }, + }, + { + "StructCodec.DecodeValue", + newStructCodec(nil), + []subtest{ + { + "Not struct", + reflect.New(reflect.TypeOf(struct{ Foo string }{})).Elem().Interface(), + nil, + &valueReaderWriter{BSONType: TypeString}, + nothing, + errors.New("cannot decode string into a struct { Foo string }"), + }, + { + "decode null", + reflect.New(reflect.TypeOf(struct{ Foo string }{})).Elem().Interface(), + nil, + &valueReaderWriter{BSONType: TypeNull}, + readNull, + nil, + }, + { + "decode undefined", + reflect.New(reflect.TypeOf(struct{ Foo string }{})).Elem().Interface(), + nil, + &valueReaderWriter{BSONType: TypeUndefined}, + readUndefined, + nil, + }, + }, + }, + { + "CodeWithScopeDecodeValue", + ValueDecoderFunc(codeWithScopeDecodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + &valueReaderWriter{BSONType: TypeCodeWithScope}, + nothing, + ValueDecoderError{ + Name: "CodeWithScopeDecodeValue", + Types: []reflect.Type{tCodeWithScope}, + Received: reflect.ValueOf(wrong), + }, + }, + { + "type not codewithscope", + CodeWithScope{}, + nil, + &valueReaderWriter{BSONType: TypeString}, + nothing, + fmt.Errorf("cannot decode %v into a CodeWithScope", TypeString), + }, + { + "ReadCodeWithScope Error", + CodeWithScope{}, + nil, + &valueReaderWriter{BSONType: TypeCodeWithScope, Err: errors.New("rcws error"), ErrAfter: readCodeWithScope}, + readCodeWithScope, + errors.New("rcws error"), + }, + { + "decodeDocument Error", + CodeWithScope{ + Code: "var hello = 'world';", + Scope: D{{"foo", nil}}, + }, + &DecodeContext{Registry: buildDefaultRegistry()}, + &valueReaderWriter{BSONType: TypeCodeWithScope, Err: errors.New("dd error"), ErrAfter: readElement}, + readElement, + errors.New("dd error"), + }, + { + "decode null", + CodeWithScope{}, + nil, + &valueReaderWriter{BSONType: TypeNull}, + readNull, + nil, + }, + { + "decode undefined", + CodeWithScope{}, + nil, + &valueReaderWriter{BSONType: TypeUndefined}, + readUndefined, + nil, + }, + }, + }, + { + "CoreArrayDecodeValue", + &arrayCodec{}, + []subtest{ + { + "wrong type", + wrong, + nil, + &valueReaderWriter{}, + nothing, + ValueDecoderError{ + Name: "CoreArrayDecodeValue", + Types: []reflect.Type{tCoreArray}, + Received: reflect.ValueOf(wrong), + }, + }, + { + "*bsoncore.Array is nil", + (*bsoncore.Array)(nil), + nil, + nil, + nothing, + ValueDecoderError{ + Name: "CoreArrayDecodeValue", + Types: []reflect.Type{tCoreArray}, + Received: reflect.ValueOf((*bsoncore.Array)(nil)), + }, + }, + }, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + for _, rc := range tc.subtests { + t.Run(rc.name, func(t *testing.T) { + var dc DecodeContext + if rc.dctx != nil { + dc = *rc.dctx + } + llvrw := new(valueReaderWriter) + if rc.llvrw != nil { + llvrw = rc.llvrw + } + llvrw.T = t + // var got interface{} + if rc.val == cansetreflectiontest { // We're doing a CanSet reflection test + err := tc.vd.DecodeValue(dc, llvrw, reflect.Value{}) + if !assert.CompareErrors(err, rc.err) { + t.Errorf("Errors do not match. got %v; want %v", err, rc.err) + } + + val := reflect.New(reflect.TypeOf(rc.val)).Elem() + err = tc.vd.DecodeValue(dc, llvrw, val) + if !assert.CompareErrors(err, rc.err) { + t.Errorf("Errors do not match. got %v; want %v", err, rc.err) + } + return + } + if rc.val == cansettest { // We're doing an IsValid and CanSet test + var wanterr ValueDecoderError + if !errors.As(rc.err, &wanterr) { + t.Fatalf("Error must be a DecodeValueError, but got a %T", rc.err) + } + + err := tc.vd.DecodeValue(dc, llvrw, reflect.Value{}) + wanterr.Received = reflect.ValueOf(nil) + if !assert.CompareErrors(err, wanterr) { + t.Errorf("Errors do not match. got %v; want %v", err, wanterr) + } + + err = tc.vd.DecodeValue(dc, llvrw, reflect.ValueOf(int(12345))) + wanterr.Received = reflect.ValueOf(int(12345)) + if !assert.CompareErrors(err, wanterr) { + t.Errorf("Errors do not match. got %v; want %v", err, wanterr) + } + return + } + var val reflect.Value + if rtype := reflect.TypeOf(rc.val); rtype != nil { + val = reflect.New(rtype).Elem() + } + want := rc.val + defer func() { + if err := recover(); err != nil { + fmt.Println(t.Name()) + panic(err) + } + }() + err := tc.vd.DecodeValue(dc, llvrw, val) + if !assert.CompareErrors(err, rc.err) { + t.Errorf("Errors do not match. got %v; want %v", err, rc.err) + } + invoked := llvrw.invoked + if !cmp.Equal(invoked, rc.invoke) { + t.Errorf("Incorrect method invoked. got %v; want %v", invoked, rc.invoke) + } + var got interface{} + if val.IsValid() && val.CanInterface() { + got = val.Interface() + } + if rc.err == nil && !cmp.Equal(got, want, cmp.Comparer(compareDecimal128)) { + t.Errorf("Values do not match. got (%T)%v; want (%T)%v", got, got, want, want) + } + }) + } + }) + } + + t.Run("CodeWithScopeCodec/DecodeValue/success", func(t *testing.T) { + dc := DecodeContext{Registry: buildDefaultRegistry()} + b := bsoncore.BuildDocument(nil, + bsoncore.AppendCodeWithScopeElement( + nil, "foo", "var hello = 'world';", + buildDocument(bsoncore.AppendNullElement(nil, "bar")), + ), + ) + dvr := NewDocumentReader(bytes.NewReader(b)) + dr, err := dvr.ReadDocument() + noerr(t, err) + _, vr, err := dr.ReadElement() + noerr(t, err) + + want := CodeWithScope{ + Code: "var hello = 'world';", + Scope: D{{"bar", nil}}, + } + val := reflect.New(tCodeWithScope).Elem() + err = codeWithScopeDecodeValue(dc, vr, val) + noerr(t, err) + + got := val.Interface().(CodeWithScope) + if got.Code != want.Code && !cmp.Equal(got.Scope, want.Scope) { + t.Errorf("CodeWithScopes do not match. got %v; want %v", got, want) + } + }) + t.Run("ValueUnmarshalerDecodeValue/UnmarshalBSONValue error", func(t *testing.T) { + var dc DecodeContext + llvrw := &valueReaderWriter{BSONType: TypeString, Return: string("hello, world!")} + llvrw.T = t + + want := errors.New("ubsonv error") + valUnmarshaler := &testValueUnmarshaler{err: want} + got := valueUnmarshalerDecodeValue(dc, llvrw, reflect.ValueOf(valUnmarshaler)) + if !assert.CompareErrors(got, want) { + t.Errorf("Errors do not match. got %v; want %v", got, want) + } + }) + t.Run("ValueUnmarshalerDecodeValue/Unaddressable value", func(t *testing.T) { + var dc DecodeContext + llvrw := &valueReaderWriter{BSONType: TypeString, Return: string("hello, world!")} + llvrw.T = t + + val := reflect.ValueOf(testValueUnmarshaler{}) + want := ValueDecoderError{Name: "ValueUnmarshalerDecodeValue", Types: []reflect.Type{tValueUnmarshaler}, Received: val} + got := valueUnmarshalerDecodeValue(dc, llvrw, val) + if !assert.CompareErrors(got, want) { + t.Errorf("Errors do not match. got %v; want %v", got, want) + } + }) + + t.Run("SliceCodec/DecodeValue/too many elements", func(t *testing.T) { + idx, doc := bsoncore.AppendDocumentStart(nil) + aidx, doc := bsoncore.AppendArrayElementStart(doc, "foo") + doc = bsoncore.AppendStringElement(doc, "0", "foo") + doc = bsoncore.AppendStringElement(doc, "1", "bar") + doc, err := bsoncore.AppendArrayEnd(doc, aidx) + noerr(t, err) + doc, err = bsoncore.AppendDocumentEnd(doc, idx) + noerr(t, err) + dvr := NewDocumentReader(bytes.NewReader(doc)) + noerr(t, err) + dr, err := dvr.ReadDocument() + noerr(t, err) + _, vr, err := dr.ReadElement() + noerr(t, err) + var val [1]string + want := fmt.Errorf("more elements returned in array than can fit inside %T, got 2 elements", val) + + dc := DecodeContext{Registry: buildDefaultRegistry()} + got := arrayDecodeValue(dc, vr, reflect.ValueOf(val)) + if !assert.CompareErrors(got, want) { + t.Errorf("Errors do not match. got %v; want %v", got, want) + } + }) + + t.Run("success path", func(t *testing.T) { + oid := NewObjectID() + oids := []ObjectID{NewObjectID(), NewObjectID(), NewObjectID()} + var str = new(string) + *str = "bar" + now := time.Now().Truncate(time.Millisecond).UTC() + murl, err := url.Parse("https://mongodb.com/random-url?hello=world") + if err != nil { + t.Errorf("Error parsing URL: %v", err) + t.FailNow() + } + decimal128, err := ParseDecimal128("1.5e10") + if err != nil { + t.Errorf("Error parsing decimal128: %v", err) + t.FailNow() + } + + testCases := []struct { + name string + value interface{} + b []byte + err error + }{ + { + "map[string]int", + map[string]int32{"foo": 1}, + []byte{ + 0x0E, 0x00, 0x00, 0x00, + 0x10, 'f', 'o', 'o', 0x00, + 0x01, 0x00, 0x00, 0x00, + 0x00, + }, + nil, + }, + { + "map[string]ObjectID", + map[string]ObjectID{"foo": oid}, + func() []byte { + idx, doc := bsoncore.AppendDocumentStart(nil) + doc = bsoncore.AppendObjectIDElement(doc, "foo", oid) + doc, _ = bsoncore.AppendDocumentEnd(doc, idx) + return doc + }(), + nil, + }, + { + "map[string][]int32", + map[string][]int32{"Z": {1, 2, 3}}, + buildDocumentArray(func(doc []byte) []byte { + doc = bsoncore.AppendInt32Element(doc, "0", 1) + doc = bsoncore.AppendInt32Element(doc, "1", 2) + return bsoncore.AppendInt32Element(doc, "2", 3) + }), + nil, + }, + { + "map[string][]ObjectID", + map[string][]ObjectID{"Z": oids}, + buildDocumentArray(func(doc []byte) []byte { + doc = bsoncore.AppendObjectIDElement(doc, "0", oids[0]) + doc = bsoncore.AppendObjectIDElement(doc, "1", oids[1]) + return bsoncore.AppendObjectIDElement(doc, "2", oids[2]) + }), + nil, + }, + { + "map[string][]json.Number(int64)", + map[string][]json.Number{"Z": {json.Number("5"), json.Number("10")}}, + buildDocumentArray(func(doc []byte) []byte { + doc = bsoncore.AppendInt64Element(doc, "0", 5) + return bsoncore.AppendInt64Element(doc, "1", 10) + }), + nil, + }, + { + "map[string][]json.Number(float64)", + map[string][]json.Number{"Z": {json.Number("5"), json.Number("10.1")}}, + buildDocumentArray(func(doc []byte) []byte { + doc = bsoncore.AppendInt64Element(doc, "0", 5) + return bsoncore.AppendDoubleElement(doc, "1", 10.1) + }), + nil, + }, + { + "map[string][]*url.URL", + map[string][]*url.URL{"Z": {murl}}, + buildDocumentArray(func(doc []byte) []byte { + return bsoncore.AppendStringElement(doc, "0", murl.String()) + }), + nil, + }, + { + "map[string][]Decimal128", + map[string][]Decimal128{"Z": {decimal128}}, + buildDocumentArray(func(doc []byte) []byte { + return bsoncore.AppendDecimal128Element(doc, "0", decimal128.h, decimal128.l) + }), + nil, + }, + { + "map[mystring]interface{}", + map[mystring]interface{}{"pi": 3.14159}, + buildDocument(bsoncore.AppendDoubleElement(nil, "pi", 3.14159)), + nil, + }, + { + "-", + struct { + A string `bson:"-"` + }{ + A: "", + }, + []byte{0x05, 0x00, 0x00, 0x00, 0x00}, + nil, + }, + { + "omitempty", + struct { + A string `bson:",omitempty"` + }{ + A: "", + }, + []byte{0x05, 0x00, 0x00, 0x00, 0x00}, + nil, + }, + { + "omitempty, empty time", + struct { + A time.Time `bson:",omitempty"` + }{ + A: time.Time{}, + }, + []byte{0x05, 0x00, 0x00, 0x00, 0x00}, + nil, + }, + { + "no private fields", + noPrivateFields{a: "should be empty"}, + []byte{0x05, 0x00, 0x00, 0x00, 0x00}, + nil, + }, + { + "minsize", + struct { + A int64 `bson:",minsize"` + }{ + A: 12345, + }, + buildDocument(bsoncore.AppendInt32Element(nil, "a", 12345)), + nil, + }, + { + "inline", + struct { + Foo struct { + A int64 `bson:",minsize"` + } `bson:",inline"` + }{ + Foo: struct { + A int64 `bson:",minsize"` + }{ + A: 12345, + }, + }, + buildDocument(bsoncore.AppendInt32Element(nil, "a", 12345)), + nil, + }, + { + "inline struct pointer", + struct { + Foo *struct { + A int64 `bson:",minsize"` + } `bson:",inline"` + Bar *struct { + B int64 + } `bson:",inline"` + }{ + Foo: &struct { + A int64 `bson:",minsize"` + }{ + A: 12345, + }, + Bar: nil, + }, + buildDocument(bsoncore.AppendInt32Element(nil, "a", 12345)), + nil, + }, + { + "nested inline struct pointer", + struct { + Foo *struct { + Bar *struct { + A int64 `bson:",minsize"` + } `bson:",inline"` + } `bson:",inline"` + }{ + Foo: &struct { + Bar *struct { + A int64 `bson:",minsize"` + } `bson:",inline"` + }{ + Bar: &struct { + A int64 `bson:",minsize"` + }{ + A: 12345, + }, + }, + }, + buildDocument(bsoncore.AppendInt32Element(nil, "a", 12345)), + nil, + }, + { + "inline nil struct pointer", + struct { + Foo *struct { + A int64 `bson:",minsize"` + } `bson:",inline"` + }{ + Foo: nil, + }, + buildDocument([]byte{}), + nil, + }, + { + "inline overwrite", + struct { + Foo struct { + A int32 + B string + } `bson:",inline"` + A int64 + }{ + Foo: struct { + A int32 + B string + }{ + A: 0, + B: "foo", + }, + A: 54321, + }, + buildDocument(func(doc []byte) []byte { + doc = bsoncore.AppendStringElement(doc, "b", "foo") + doc = bsoncore.AppendInt64Element(doc, "a", 54321) + return doc + }(nil)), + nil, + }, + { + "inline overwrite with nested structs", + struct { + Foo struct { + A int32 + } `bson:",inline"` + Bar struct { + A int32 + } `bson:",inline"` + A int64 + }{ + Foo: struct { + A int32 + }{}, + Bar: struct { + A int32 + }{}, + A: 54321, + }, + buildDocument(bsoncore.AppendInt64Element(nil, "a", 54321)), + nil, + }, + { + "inline map", + struct { + Foo map[string]string `bson:",inline"` + }{ + Foo: map[string]string{"foo": "bar"}, + }, + buildDocument(bsoncore.AppendStringElement(nil, "foo", "bar")), + nil, + }, + { + "alternate name bson:name", + struct { + A string `bson:"foo"` + }{ + A: "bar", + }, + buildDocument(bsoncore.AppendStringElement(nil, "foo", "bar")), + nil, + }, + { + "alternate name", + struct { + A string `bson:"foo"` + }{ + A: "bar", + }, + buildDocument(bsoncore.AppendStringElement(nil, "foo", "bar")), + nil, + }, + { + "inline, omitempty", + struct { + A string + Foo zeroTest `bson:"omitempty,inline"` + }{ + A: "bar", + Foo: zeroTest{true}, + }, + buildDocument(bsoncore.AppendStringElement(nil, "a", "bar")), + nil, + }, + { + "struct{}", + struct { + A bool + B int32 + C int64 + D uint16 + E uint64 + F float64 + G string + H map[string]string + I []byte + K [2]string + L struct { + M string + } + Q ObjectID + T []struct{} + Y json.Number + Z time.Time + AA json.Number + AB *url.URL + AC Decimal128 + AD *time.Time + AE *testValueUnmarshaler + AF *bool + AG *bool + AH *int32 + AI *int64 + AJ *ObjectID + AK *ObjectID + AL testValueUnmarshaler + AM interface{} + AN interface{} + AO interface{} + AP D + AQ A + AR [2]E + AS []byte + AT map[string]interface{} + AU CodeWithScope + AV M + AW D + AX map[string]interface{} + AY []E + AZ interface{} + }{ + A: true, + B: 123, + C: 456, + D: 789, + E: 101112, + F: 3.14159, + G: "Hello, world", + H: map[string]string{"foo": "bar"}, + I: []byte{0x01, 0x02, 0x03}, + K: [2]string{"baz", "qux"}, + L: struct { + M string + }{ + M: "foobar", + }, + Q: oid, + T: nil, + Y: json.Number("5"), + Z: now, + AA: json.Number("10.1"), + AB: murl, + AC: decimal128, + AD: &now, + AE: &testValueUnmarshaler{t: TypeString, val: bsoncore.AppendString(nil, "hello, world!")}, + AF: func(b bool) *bool { return &b }(true), + AG: nil, + AH: func(i32 int32) *int32 { return &i32 }(12345), + AI: func(i64 int64) *int64 { return &i64 }(1234567890), + AJ: &oid, + AK: nil, + AL: testValueUnmarshaler{t: TypeString, val: bsoncore.AppendString(nil, "hello, world!")}, + AM: "hello, world", + AN: int32(12345), + AO: oid, + AP: D{{"foo", "bar"}}, + AQ: A{"foo", "bar"}, + AR: [2]E{{"hello", "world"}, {"pi", 3.14159}}, + AS: nil, + AT: nil, + AU: CodeWithScope{Code: "var hello = 'world';", Scope: D{{"pi", 3.14159}}}, + AV: M{"foo": D{{"bar", "baz"}}}, + AW: D{{"foo", D{{"bar", "baz"}}}}, + AX: map[string]interface{}{"foo": D{{"bar", "baz"}}}, + AY: []E{{"foo", D{{"bar", "baz"}}}}, + AZ: D{{"foo", D{{"bar", "baz"}}}}, + }, + buildDocument(func(doc []byte) []byte { + doc = bsoncore.AppendBooleanElement(doc, "a", true) + doc = bsoncore.AppendInt32Element(doc, "b", 123) + doc = bsoncore.AppendInt64Element(doc, "c", 456) + doc = bsoncore.AppendInt32Element(doc, "d", 789) + doc = bsoncore.AppendInt64Element(doc, "e", 101112) + doc = bsoncore.AppendDoubleElement(doc, "f", 3.14159) + doc = bsoncore.AppendStringElement(doc, "g", "Hello, world") + doc = bsoncore.AppendDocumentElement(doc, "h", buildDocument(bsoncore.AppendStringElement(nil, "foo", "bar"))) + doc = bsoncore.AppendBinaryElement(doc, "i", 0x00, []byte{0x01, 0x02, 0x03}) + doc = bsoncore.AppendArrayElement(doc, "k", + buildArray(bsoncore.AppendStringElement(bsoncore.AppendStringElement(nil, "0", "baz"), "1", "qux")), + ) + doc = bsoncore.AppendDocumentElement(doc, "l", buildDocument(bsoncore.AppendStringElement(nil, "m", "foobar"))) + doc = bsoncore.AppendObjectIDElement(doc, "q", oid) + doc = bsoncore.AppendNullElement(doc, "t") + doc = bsoncore.AppendInt64Element(doc, "y", 5) + doc = bsoncore.AppendDateTimeElement(doc, "z", now.UnixNano()/int64(time.Millisecond)) + doc = bsoncore.AppendDoubleElement(doc, "aa", 10.1) + doc = bsoncore.AppendStringElement(doc, "ab", murl.String()) + doc = bsoncore.AppendDecimal128Element(doc, "ac", decimal128.h, decimal128.l) + doc = bsoncore.AppendDateTimeElement(doc, "ad", now.UnixNano()/int64(time.Millisecond)) + doc = bsoncore.AppendStringElement(doc, "ae", "hello, world!") + doc = bsoncore.AppendBooleanElement(doc, "af", true) + doc = bsoncore.AppendNullElement(doc, "ag") + doc = bsoncore.AppendInt32Element(doc, "ah", 12345) + doc = bsoncore.AppendInt32Element(doc, "ai", 1234567890) + doc = bsoncore.AppendObjectIDElement(doc, "aj", oid) + doc = bsoncore.AppendNullElement(doc, "ak") + doc = bsoncore.AppendStringElement(doc, "al", "hello, world!") + doc = bsoncore.AppendStringElement(doc, "am", "hello, world") + doc = bsoncore.AppendInt32Element(doc, "an", 12345) + doc = bsoncore.AppendObjectIDElement(doc, "ao", oid) + doc = bsoncore.AppendDocumentElement(doc, "ap", buildDocument(bsoncore.AppendStringElement(nil, "foo", "bar"))) + doc = bsoncore.AppendArrayElement(doc, "aq", + buildArray(bsoncore.AppendStringElement(bsoncore.AppendStringElement(nil, "0", "foo"), "1", "bar")), + ) + doc = bsoncore.AppendDocumentElement(doc, "ar", + buildDocument(bsoncore.AppendDoubleElement(bsoncore.AppendStringElement(nil, "hello", "world"), "pi", 3.14159)), + ) + doc = bsoncore.AppendNullElement(doc, "as") + doc = bsoncore.AppendNullElement(doc, "at") + doc = bsoncore.AppendCodeWithScopeElement(doc, "au", + "var hello = 'world';", buildDocument(bsoncore.AppendDoubleElement(nil, "pi", 3.14159)), + ) + for _, name := range [5]string{"av", "aw", "ax", "ay", "az"} { + doc = bsoncore.AppendDocumentElement(doc, name, buildDocument( + bsoncore.AppendDocumentElement(nil, "foo", buildDocument( + bsoncore.AppendStringElement(nil, "bar", "baz"), + )), + )) + } + return doc + }(nil)), + nil, + }, + { + "struct{[]interface{}}", + struct { + A []bool + B []int32 + C []int64 + D []uint16 + E []uint64 + F []float64 + G []string + H []map[string]string + I [][]byte + K [1][2]string + L []struct { + M string + } + N [][]string + R []ObjectID + T []struct{} + W []map[string]struct{} + X []map[string]struct{} + Y []map[string]struct{} + Z []time.Time + AA []json.Number + AB []*url.URL + AC []Decimal128 + AD []*time.Time + AE []*testValueUnmarshaler + AF []*bool + AG []*int32 + AH []*int64 + AI []*ObjectID + AJ []D + AK []A + AL [][2]E + }{ + A: []bool{true}, + B: []int32{123}, + C: []int64{456}, + D: []uint16{789}, + E: []uint64{101112}, + F: []float64{3.14159}, + G: []string{"Hello, world"}, + H: []map[string]string{{"foo": "bar"}}, + I: [][]byte{{0x01, 0x02, 0x03}}, + K: [1][2]string{{"baz", "qux"}}, + L: []struct { + M string + }{ + { + M: "foobar", + }, + }, + N: [][]string{{"foo", "bar"}}, + R: oids, + T: nil, + W: nil, + X: []map[string]struct{}{}, // Should be empty BSON Array + Y: []map[string]struct{}{{}}, // Should be BSON array with one element, an empty BSON SubDocument + Z: []time.Time{now, now}, + AA: []json.Number{json.Number("5"), json.Number("10.1")}, + AB: []*url.URL{murl}, + AC: []Decimal128{decimal128}, + AD: []*time.Time{&now, &now}, + AE: []*testValueUnmarshaler{ + {t: TypeString, val: bsoncore.AppendString(nil, "hello")}, + {t: TypeString, val: bsoncore.AppendString(nil, "world")}, + }, + AF: []*bool{pbool(true), nil}, + AG: []*int32{pi32(12345), nil}, + AH: []*int64{pi64(1234567890), nil, pi64(9012345678)}, + AI: []*ObjectID{&oid, nil}, + AJ: []D{{{"foo", "bar"}}, nil}, + AK: []A{{"foo", "bar"}, nil}, + AL: [][2]E{{{"hello", "world"}, {"pi", 3.14159}}}, + }, + buildDocument(func(doc []byte) []byte { + doc = appendArrayElement(doc, "a", bsoncore.AppendBooleanElement(nil, "0", true)) + doc = appendArrayElement(doc, "b", bsoncore.AppendInt32Element(nil, "0", 123)) + doc = appendArrayElement(doc, "c", bsoncore.AppendInt64Element(nil, "0", 456)) + doc = appendArrayElement(doc, "d", bsoncore.AppendInt32Element(nil, "0", 789)) + doc = appendArrayElement(doc, "e", bsoncore.AppendInt64Element(nil, "0", 101112)) + doc = appendArrayElement(doc, "f", bsoncore.AppendDoubleElement(nil, "0", 3.14159)) + doc = appendArrayElement(doc, "g", bsoncore.AppendStringElement(nil, "0", "Hello, world")) + doc = appendArrayElement(doc, "h", bsoncore.BuildDocumentElement(nil, "0", bsoncore.AppendStringElement(nil, "foo", "bar"))) + doc = appendArrayElement(doc, "i", bsoncore.AppendBinaryElement(nil, "0", 0x00, []byte{0x01, 0x02, 0x03})) + doc = appendArrayElement(doc, "k", + appendArrayElement(nil, "0", + bsoncore.AppendStringElement(bsoncore.AppendStringElement(nil, "0", "baz"), "1", "qux")), + ) + doc = appendArrayElement(doc, "l", bsoncore.BuildDocumentElement(nil, "0", bsoncore.AppendStringElement(nil, "m", "foobar"))) + doc = appendArrayElement(doc, "n", + appendArrayElement(nil, "0", + bsoncore.AppendStringElement(bsoncore.AppendStringElement(nil, "0", "foo"), "1", "bar")), + ) + doc = appendArrayElement(doc, "r", + bsoncore.AppendObjectIDElement( + bsoncore.AppendObjectIDElement( + bsoncore.AppendObjectIDElement(nil, + "0", oids[0]), + "1", oids[1]), + "2", oids[2]), + ) + doc = bsoncore.AppendNullElement(doc, "t") + doc = bsoncore.AppendNullElement(doc, "w") + doc = appendArrayElement(doc, "x", nil) + doc = appendArrayElement(doc, "y", bsoncore.BuildDocumentElement(nil, "0", nil)) + doc = appendArrayElement(doc, "z", + bsoncore.AppendDateTimeElement( + bsoncore.AppendDateTimeElement( + nil, "0", now.UnixNano()/int64(time.Millisecond)), + "1", now.UnixNano()/int64(time.Millisecond)), + ) + doc = appendArrayElement(doc, "aa", bsoncore.AppendDoubleElement(bsoncore.AppendInt64Element(nil, "0", 5), "1", 10.10)) + doc = appendArrayElement(doc, "ab", bsoncore.AppendStringElement(nil, "0", murl.String())) + doc = appendArrayElement(doc, "ac", bsoncore.AppendDecimal128Element(nil, "0", decimal128.h, decimal128.l)) + doc = appendArrayElement(doc, "ad", + bsoncore.AppendDateTimeElement( + bsoncore.AppendDateTimeElement(nil, "0", now.UnixNano()/int64(time.Millisecond)), + "1", now.UnixNano()/int64(time.Millisecond)), + ) + doc = appendArrayElement(doc, "ae", + bsoncore.AppendStringElement(bsoncore.AppendStringElement(nil, "0", "hello"), "1", "world"), + ) + doc = appendArrayElement(doc, "af", + bsoncore.AppendNullElement(bsoncore.AppendBooleanElement(nil, "0", true), "1"), + ) + doc = appendArrayElement(doc, "ag", + bsoncore.AppendNullElement(bsoncore.AppendInt32Element(nil, "0", 12345), "1"), + ) + doc = appendArrayElement(doc, "ah", + bsoncore.AppendInt64Element( + bsoncore.AppendNullElement(bsoncore.AppendInt64Element(nil, "0", 1234567890), "1"), + "2", 9012345678, + ), + ) + doc = appendArrayElement(doc, "ai", + bsoncore.AppendNullElement(bsoncore.AppendObjectIDElement(nil, "0", oid), "1"), + ) + doc = appendArrayElement(doc, "aj", + bsoncore.AppendNullElement( + bsoncore.AppendDocumentElement(nil, "0", buildDocument(bsoncore.AppendStringElement(nil, "foo", "bar"))), + "1", + ), + ) + doc = appendArrayElement(doc, "ak", + bsoncore.AppendNullElement( + appendArrayElement(nil, "0", + bsoncore.AppendStringElement(bsoncore.AppendStringElement(nil, "0", "foo"), "1", "bar"), + ), + "1", + ), + ) + doc = appendArrayElement(doc, "al", + bsoncore.BuildDocumentElement(nil, "0", + bsoncore.AppendDoubleElement(bsoncore.AppendStringElement(nil, "hello", "world"), "pi", 3.14159), + ), + ) + return doc + }(nil)), + nil, + }, + } + + t.Run("Decode", func(t *testing.T) { + compareTime := func(t1, t2 time.Time) bool { + if t1.Location() != t2.Location() { + return false + } + return t1.Equal(t2) + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + vr := NewDocumentReader(bytes.NewReader(tc.b)) + reg := buildDefaultRegistry() + vtype := reflect.TypeOf(tc.value) + dec, err := reg.LookupDecoder(vtype) + noerr(t, err) + + gotVal := reflect.New(reflect.TypeOf(tc.value)).Elem() + err = dec.DecodeValue(DecodeContext{Registry: reg}, vr, gotVal) + noerr(t, err) + + got := gotVal.Interface() + want := tc.value + if diff := cmp.Diff( + got, want, + cmp.Comparer(compareDecimal128), + cmp.Comparer(compareNoPrivateFields), + cmp.Comparer(compareZeroTest), + cmp.Comparer(compareTime), + ); diff != "" { + t.Errorf("difference:\n%s", diff) + t.Errorf("Values are not equal.\ngot: %#v\nwant:%#v", got, want) + } + }) + } + }) + }) + t.Run("error path", func(t *testing.T) { + testCases := []struct { + name string + value interface{} + b []byte + err error + }{ + { + "duplicate name struct", + struct { + A int64 + B int64 `bson:"a"` + }{ + A: 0, + B: 54321, + }, + buildDocument(bsoncore.AppendInt32Element(nil, "a", 12345)), + fmt.Errorf("duplicated key a"), + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + vr := NewDocumentReader(bytes.NewReader(tc.b)) + reg := buildDefaultRegistry() + vtype := reflect.TypeOf(tc.value) + dec, err := reg.LookupDecoder(vtype) + noerr(t, err) + + gotVal := reflect.New(reflect.TypeOf(tc.value)).Elem() + err = dec.DecodeValue(DecodeContext{Registry: reg}, vr, gotVal) + if err == nil || !strings.Contains(err.Error(), tc.err.Error()) { + t.Errorf("Did not receive expected error. got %v; want %v", err, tc.err) + } + }) + } + }) + + t.Run("defaultEmptyInterfaceCodec.DecodeValue", func(t *testing.T) { + t.Run("DecodeValue", func(t *testing.T) { + testCases := []struct { + name string + val interface{} + bsontype Type + }{ + { + "Double - float64", + float64(3.14159), + TypeDouble, + }, + { + "String - string", + "foo bar baz", + TypeString, + }, + { + "Array - A", + A{3.14159}, + TypeArray, + }, + { + "Binary - Binary", + Binary{Subtype: 0xFF, Data: []byte{0x01, 0x02, 0x03}}, + TypeBinary, + }, + { + "Undefined - Undefined", + Undefined{}, + TypeUndefined, + }, + { + "ObjectID - ObjectID", + ObjectID{0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C}, + TypeObjectID, + }, + { + "Boolean - bool", + bool(true), + TypeBoolean, + }, + { + "DateTime - DateTime", + DateTime(1234567890), + TypeDateTime, + }, + { + "Null - Null", + nil, + TypeNull, + }, + { + "Regex - Regex", + Regex{Pattern: "foo", Options: "bar"}, + TypeRegex, + }, + { + "DBPointer - DBPointer", + DBPointer{ + DB: "foobar", + Pointer: ObjectID{0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C}, + }, + TypeDBPointer, + }, + { + "JavaScript - JavaScript", + JavaScript("var foo = 'bar';"), + TypeJavaScript, + }, + { + "Symbol - Symbol", + Symbol("foobarbazlolz"), + TypeSymbol, + }, + { + "Int32 - int32", + int32(123456), + TypeInt32, + }, + { + "Int64 - int64", + int64(1234567890), + TypeInt64, + }, + { + "Timestamp - Timestamp", + Timestamp{T: 12345, I: 67890}, + TypeTimestamp, + }, + { + "Decimal128 - decimal.Decimal128", + NewDecimal128(12345, 67890), + TypeDecimal128, + }, + { + "MinKey - MinKey", + MinKey{}, + TypeMinKey, + }, + { + "MaxKey - MaxKey", + MaxKey{}, + TypeMaxKey, + }, + } + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + llvr := &valueReaderWriter{BSONType: tc.bsontype} + + t.Run("Type Map failure", func(t *testing.T) { + if tc.bsontype == TypeNull { + t.Skip() + } + val := reflect.New(tEmpty).Elem() + dc := DecodeContext{Registry: newTestRegistry()} + want := errNoTypeMapEntry{Type: tc.bsontype} + got := (&emptyInterfaceCodec{}).DecodeValue(dc, llvr, val) + if !assert.CompareErrors(got, want) { + t.Errorf("Errors are not equal. got %v; want %v", got, want) + } + }) + + t.Run("Lookup failure", func(t *testing.T) { + if tc.bsontype == TypeNull { + t.Skip() + } + val := reflect.New(tEmpty).Elem() + reg := newTestRegistry() + reg.RegisterTypeMapEntry(tc.bsontype, reflect.TypeOf(tc.val)) + dc := DecodeContext{ + Registry: reg, + } + want := errNoDecoder{Type: reflect.TypeOf(tc.val)} + got := (&emptyInterfaceCodec{}).DecodeValue(dc, llvr, val) + if !assert.CompareErrors(got, want) { + t.Errorf("Errors are not equal. got %v; want %v", got, want) + } + }) + + t.Run("DecodeValue failure", func(t *testing.T) { + if tc.bsontype == TypeNull { + t.Skip() + } + want := errors.New("DecodeValue failure error") + llc := &llCodec{t: t, err: want} + reg := newTestRegistry() + reg.RegisterTypeDecoder(reflect.TypeOf(tc.val), llc) + reg.RegisterTypeMapEntry(tc.bsontype, reflect.TypeOf(tc.val)) + dc := DecodeContext{ + Registry: reg, + } + got := (&emptyInterfaceCodec{}).DecodeValue(dc, llvr, reflect.New(tEmpty).Elem()) + if !assert.CompareErrors(got, want) { + t.Errorf("Errors are not equal. got %v; want %v", got, want) + } + }) + + t.Run("Success", func(t *testing.T) { + want := tc.val + llc := &llCodec{t: t, decodeval: tc.val} + reg := newTestRegistry() + reg.RegisterTypeDecoder(reflect.TypeOf(tc.val), llc) + reg.RegisterTypeMapEntry(tc.bsontype, reflect.TypeOf(tc.val)) + dc := DecodeContext{ + Registry: reg, + } + got := reflect.New(tEmpty).Elem() + err := (&emptyInterfaceCodec{}).DecodeValue(dc, llvr, got) + noerr(t, err) + if !cmp.Equal(got.Interface(), want, cmp.Comparer(compareDecimal128)) { + t.Errorf("Did not receive expected value. got %v; want %v", got.Interface(), want) + } + }) + }) + } + }) + + t.Run("non-interface{}", func(t *testing.T) { + val := uint64(1234567890) + want := ValueDecoderError{Name: "EmptyInterfaceDecodeValue", Types: []reflect.Type{tEmpty}, Received: reflect.ValueOf(val)} + got := (&emptyInterfaceCodec{}).DecodeValue(DecodeContext{}, nil, reflect.ValueOf(val)) + if !assert.CompareErrors(got, want) { + t.Errorf("Errors are not equal. got %v; want %v", got, want) + } + }) + + t.Run("nil *interface{}", func(t *testing.T) { + var val interface{} + want := ValueDecoderError{Name: "EmptyInterfaceDecodeValue", Types: []reflect.Type{tEmpty}, Received: reflect.ValueOf(val)} + got := (&emptyInterfaceCodec{}).DecodeValue(DecodeContext{}, nil, reflect.ValueOf(val)) + if !assert.CompareErrors(got, want) { + t.Errorf("Errors are not equal. got %v; want %v", got, want) + } + }) + + t.Run("no type registered", func(t *testing.T) { + llvr := &valueReaderWriter{BSONType: TypeDouble} + want := errNoTypeMapEntry{Type: TypeDouble} + val := reflect.New(tEmpty).Elem() + got := (&emptyInterfaceCodec{}).DecodeValue(DecodeContext{Registry: newTestRegistry()}, llvr, val) + if !assert.CompareErrors(got, want) { + t.Errorf("Errors are not equal. got %v; want %v", got, want) + } + }) + t.Run("top level document", func(t *testing.T) { + data := bsoncore.BuildDocument(nil, bsoncore.AppendDoubleElement(nil, "pi", 3.14159)) + vr := NewDocumentReader(bytes.NewReader(data)) + want := D{{"pi", 3.14159}} + var got interface{} + val := reflect.ValueOf(&got).Elem() + err := (&emptyInterfaceCodec{}).DecodeValue(DecodeContext{Registry: buildDefaultRegistry()}, vr, val) + noerr(t, err) + if !cmp.Equal(got, want) { + t.Errorf("Did not get correct result. got %v; want %v", got, want) + } + }) + t.Run("custom type map entry", func(t *testing.T) { + // registering a custom type map entry for both Type(0) anad TypeEmbeddedDocument should cause + // the top-level to decode to registered type when unmarshalling to interface{} + + topLevelReg := &Registry{ + typeEncoders: new(typeEncoderCache), + typeDecoders: new(typeDecoderCache), + kindEncoders: new(kindEncoderCache), + kindDecoders: new(kindDecoderCache), + } + registerDefaultEncoders(topLevelReg) + registerDefaultDecoders(topLevelReg) + topLevelReg.RegisterTypeMapEntry(Type(0), reflect.TypeOf(M{})) + + embeddedReg := &Registry{ + typeEncoders: new(typeEncoderCache), + typeDecoders: new(typeDecoderCache), + kindEncoders: new(kindEncoderCache), + kindDecoders: new(kindDecoderCache), + } + registerDefaultEncoders(embeddedReg) + registerDefaultDecoders(embeddedReg) + embeddedReg.RegisterTypeMapEntry(Type(0), reflect.TypeOf(M{})) + + // create doc {"nested": {"foo": 1}} + innerDoc := bsoncore.BuildDocument( + nil, + bsoncore.AppendInt32Element(nil, "foo", 1), + ) + doc := bsoncore.BuildDocument( + nil, + bsoncore.AppendDocumentElement(nil, "nested", innerDoc), + ) + want := M{ + "nested": D{{"foo", int32(1)}}, + } + + testCases := []struct { + name string + registry *Registry + }{ + {"top level", topLevelReg}, + {"embedded", embeddedReg}, + } + for _, tc := range testCases { + var got interface{} + vr := NewDocumentReader(bytes.NewReader(doc)) + val := reflect.ValueOf(&got).Elem() + + err := (&emptyInterfaceCodec{}).DecodeValue(DecodeContext{Registry: tc.registry}, vr, val) + noerr(t, err) + if !cmp.Equal(got, want) { + t.Fatalf("got %v, want %v", got, want) + } + } + }) + t.Run("custom type map entry is used if there is no type information", func(t *testing.T) { + // If a type map entry is registered for TypeEmbeddedDocument, the decoder should use it when + // type information is not available. + + reg := &Registry{ + typeEncoders: new(typeEncoderCache), + typeDecoders: new(typeDecoderCache), + kindEncoders: new(kindEncoderCache), + kindDecoders: new(kindDecoderCache), + } + registerDefaultEncoders(reg) + registerDefaultDecoders(reg) + reg.RegisterTypeMapEntry(TypeEmbeddedDocument, reflect.TypeOf(M{})) + + // build document {"nested": {"foo": 10}} + inner := bsoncore.BuildDocument( + nil, + bsoncore.AppendInt32Element(nil, "foo", 10), + ) + doc := bsoncore.BuildDocument( + nil, + bsoncore.AppendDocumentElement(nil, "nested", inner), + ) + want := D{ + {"nested", M{ + "foo": int32(10), + }}, + } + + var got D + vr := NewDocumentReader(bytes.NewReader(doc)) + val := reflect.ValueOf(&got).Elem() + err := (&sliceCodec{}).DecodeValue(DecodeContext{Registry: reg}, vr, val) + noerr(t, err) + if !cmp.Equal(got, want) { + t.Fatalf("got %v, want %v", got, want) + } + }) + }) + + t.Run("decode errors contain key information", func(t *testing.T) { + decodeValueError := errors.New("decode value error") + emptyInterfaceErrorDecode := func(DecodeContext, ValueReader, reflect.Value) error { + return decodeValueError + } + emptyInterfaceErrorRegistry := newTestRegistry() + emptyInterfaceErrorRegistry.RegisterTypeDecoder(tEmpty, ValueDecoderFunc(emptyInterfaceErrorDecode)) + + // Set up a document {foo: 10} and an error that would happen if the value were decoded into interface{} + // using the registry defined above. + docBytes := bsoncore.BuildDocumentFromElements( + nil, + bsoncore.AppendInt32Element(nil, "foo", 10), + ) + docEmptyInterfaceErr := &DecodeError{ + keys: []string{"foo"}, + wrapped: decodeValueError, + } + + // Set up struct definitions where Foo maps to interface{} and string. When decoded using the registry defined + // above, the interface{} struct will get an error when calling DecodeValue and the string struct will get an + // error when looking up a decoder. + type emptyInterfaceStruct struct { + Foo interface{} + } + type stringStruct struct { + Foo string + } + emptyInterfaceStructErr := &DecodeError{ + keys: []string{"foo"}, + wrapped: decodeValueError, + } + stringStructErr := &DecodeError{ + keys: []string{"foo"}, + wrapped: errNoDecoder{reflect.TypeOf("")}, + } + + // Test a deeply nested struct mixed with maps and slices. + // Build document {"first": {"second": {"randomKey": {"third": [{}, {"fourth": "value"}]}}}} + type inner3 struct{ Fourth interface{} } + type inner2 struct{ Third []inner3 } + type inner1 struct{ Second map[string]inner2 } + type outer struct{ First inner1 } + inner3EmptyDoc := buildDocument(nil) + inner3Doc := buildDocument(bsoncore.AppendStringElement(nil, "fourth", "value")) + inner3Array := buildArray( + // buildArray takes []byte so we first append() all of the values into a single []byte + append( + bsoncore.AppendDocumentElement(nil, "0", inner3EmptyDoc), + bsoncore.AppendDocumentElement(nil, "1", inner3Doc)..., + ), + ) + inner2Doc := buildDocument(bsoncore.AppendArrayElement(nil, "third", inner3Array)) + inner2Map := buildDocument(bsoncore.AppendDocumentElement(nil, "randomKey", inner2Doc)) + inner1Doc := buildDocument(bsoncore.AppendDocumentElement(nil, "second", inner2Map)) + outerDoc := buildDocument(bsoncore.AppendDocumentElement(nil, "first", inner1Doc)) + + // Use a registry that has all default decoders with the custom interface{} decoder that always errors. + nestedRegistry := &Registry{ + typeEncoders: new(typeEncoderCache), + typeDecoders: new(typeDecoderCache), + kindEncoders: new(kindEncoderCache), + kindDecoders: new(kindDecoderCache), + } + registerDefaultDecoders(nestedRegistry) + nestedRegistry.RegisterTypeDecoder(tEmpty, ValueDecoderFunc(emptyInterfaceErrorDecode)) + nestedErr := &DecodeError{ + keys: []string{"fourth", "1", "third", "randomKey", "second", "first"}, + wrapped: decodeValueError, + } + + testCases := []struct { + name string + val interface{} + vr ValueReader + registry *Registry // buildDefaultRegistry will be used if this is nil + decoder ValueDecoder + err error + }{ + { + // DecodeValue error when decoding into a D. + "D slice", + D{}, + NewDocumentReader(bytes.NewReader(docBytes)), + emptyInterfaceErrorRegistry, + &sliceCodec{}, + docEmptyInterfaceErr, + }, + { + // DecodeValue error when decoding into a []string. + "string slice", + []string{}, + &valueReaderWriter{BSONType: TypeArray}, + nil, + &sliceCodec{}, + &DecodeError{ + keys: []string{"0"}, + wrapped: errors.New("cannot decode array into a string type"), + }, + }, + { + // DecodeValue error when decoding into a E array. This should have the same behavior as + // the "D slice" test above because both the defaultSliceCodec and ArrayDecodeValue use + // the decodeD helper function. + "D array", + [1]E{}, + NewDocumentReader(bytes.NewReader(docBytes)), + emptyInterfaceErrorRegistry, + ValueDecoderFunc(arrayDecodeValue), + docEmptyInterfaceErr, + }, + { + // DecodeValue error when decoding into a string array. This should have the same behavior as + // the "D slice" test above because both the defaultSliceCodec and ArrayDecodeValue use + // the decodeDefault helper function. + "string array", + [1]string{}, + &valueReaderWriter{BSONType: TypeArray}, + nil, + ValueDecoderFunc(arrayDecodeValue), + &DecodeError{ + keys: []string{"0"}, + wrapped: errors.New("cannot decode array into a string type"), + }, + }, + { + // DecodeValue error when decoding into a map. + "map", + map[string]interface{}{}, + NewDocumentReader(bytes.NewReader(docBytes)), + emptyInterfaceErrorRegistry, + &mapCodec{}, + docEmptyInterfaceErr, + }, + { + // DecodeValue error when decoding into a struct. + "struct - DecodeValue error", + emptyInterfaceStruct{}, + NewDocumentReader(bytes.NewReader(docBytes)), + emptyInterfaceErrorRegistry, + newStructCodec(nil), + emptyInterfaceStructErr, + }, + { + // ErrNoDecoder when decoding into a struct. + // This test uses NewRegistryBuilder().Build rather than buildDefaultRegistry to ensure that there is + // no decoder for strings. + "struct - no decoder found", + stringStruct{}, + NewDocumentReader(bytes.NewReader(docBytes)), + newTestRegistry(), + newStructCodec(nil), + stringStructErr, + }, + { + "deeply nested struct", + outer{}, + NewDocumentReader(bytes.NewReader(outerDoc)), + nestedRegistry, + newStructCodec(nil), + nestedErr, + }, + } + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + dc := DecodeContext{Registry: tc.registry} + if dc.Registry == nil { + dc.Registry = buildDefaultRegistry() + } + + var val reflect.Value + if rtype := reflect.TypeOf(tc.val); rtype != nil { + val = reflect.New(rtype).Elem() + } + err := tc.decoder.DecodeValue(dc, tc.vr, val) + assert.Equal(t, tc.err, err, "expected error %v, got %v", tc.err, err) + }) + } + + t.Run("keys are correctly reversed", func(t *testing.T) { + innerBytes := bsoncore.BuildDocumentFromElements(nil, bsoncore.AppendInt32Element(nil, "bar", 10)) + outerBytes := bsoncore.BuildDocumentFromElements(nil, bsoncore.AppendDocumentElement(nil, "foo", innerBytes)) + + type inner struct{ Bar string } + type outer struct{ Foo inner } + + dc := DecodeContext{Registry: buildDefaultRegistry()} + vr := NewDocumentReader(bytes.NewReader(outerBytes)) + val := reflect.New(reflect.TypeOf(outer{})).Elem() + err := newStructCodec(nil).DecodeValue(dc, vr, val) + + var decodeErr *DecodeError + assert.True(t, errors.As(err, &decodeErr), "expected DecodeError, got %v of type %T", err, err) + expectedKeys := []string{"foo", "bar"} + assert.Equal(t, expectedKeys, decodeErr.Keys(), "expected keys slice %v, got %v", expectedKeys, + decodeErr.Keys()) + keyPath := strings.Join(expectedKeys, ".") + assert.True(t, strings.Contains(decodeErr.Error(), keyPath), + "expected error %v to contain key pattern %s", decodeErr, keyPath) + }) + }) + + t.Run("values are converted", func(t *testing.T) { + // When decoding into a D or M, values must be converted if they are not being decoded to the default type. + + t.Run("D", func(t *testing.T) { + trueValue := bsoncore.Value{ + Type: bsoncore.TypeBoolean, + Data: bsoncore.AppendBoolean(nil, true), + } + docBytes := bsoncore.BuildDocumentFromElements(nil, + bsoncore.AppendBooleanElement(nil, "bool", true), + bsoncore.BuildArrayElement(nil, "boolArray", trueValue), + ) + + reg := &Registry{ + typeEncoders: new(typeEncoderCache), + typeDecoders: new(typeDecoderCache), + kindEncoders: new(kindEncoderCache), + kindDecoders: new(kindDecoderCache), + } + registerDefaultDecoders(reg) + reg.RegisterTypeMapEntry(TypeBoolean, reflect.TypeOf(mybool(true))) + + dc := DecodeContext{Registry: reg} + vr := NewDocumentReader(bytes.NewReader(docBytes)) + val := reflect.New(tD).Elem() + err := dDecodeValue(dc, vr, val) + assert.Nil(t, err, "DDecodeValue error: %v", err) + + want := D{ + {"bool", mybool(true)}, + {"boolArray", A{mybool(true)}}, + } + got := val.Interface().(D) + assert.Equal(t, want, got, "want document %v, got %v", want, got) + }) + t.Run("M", func(t *testing.T) { + docBytes := bsoncore.BuildDocumentFromElements(nil, + bsoncore.AppendBooleanElement(nil, "bool", true), + ) + + type myMap map[string]mybool + dc := DecodeContext{Registry: buildDefaultRegistry()} + vr := NewDocumentReader(bytes.NewReader(docBytes)) + val := reflect.New(reflect.TypeOf(myMap{})).Elem() + err := (&mapCodec{}).DecodeValue(dc, vr, val) + assert.Nil(t, err, "DecodeValue error: %v", err) + + want := myMap{ + "bool": mybool(true), + } + got := val.Interface().(myMap) + assert.Equal(t, want, got, "expected map %v, got %v", want, got) + }) + }) +} + +// buildDocumentArray inserts vals inside of an array inside of a document. +func buildDocumentArray(fn func([]byte) []byte) []byte { + aix, doc := bsoncore.AppendArrayElementStart(nil, "Z") + doc = fn(doc) + doc, _ = bsoncore.AppendArrayEnd(doc, aix) + return buildDocument(doc) +} + +func buildArray(vals []byte) []byte { + aix, doc := bsoncore.AppendArrayStart(nil) + doc = append(doc, vals...) + doc, _ = bsoncore.AppendArrayEnd(doc, aix) + return doc +} + +func appendArrayElement(dst []byte, key string, vals []byte) []byte { + aix, doc := bsoncore.AppendArrayElementStart(dst, key) + doc = append(doc, vals...) + doc, _ = bsoncore.AppendArrayEnd(doc, aix) + return doc +} + +// buildDocument inserts elems inside of a document. +func buildDocument(elems []byte) []byte { + idx, doc := bsoncore.AppendDocumentStart(nil) + doc = append(doc, elems...) + doc, _ = bsoncore.AppendDocumentEnd(doc, idx) + return doc +} + +func buildDefaultRegistry() *Registry { + reg := &Registry{ + typeEncoders: new(typeEncoderCache), + typeDecoders: new(typeDecoderCache), + kindEncoders: new(kindEncoderCache), + kindDecoders: new(kindDecoderCache), + } + registerDefaultEncoders(reg) + registerDefaultDecoders(reg) + return reg +} diff --git a/default_value_encoders.go b/default_value_encoders.go new file mode 100644 index 0000000..2ab5d86 --- /dev/null +++ b/default_value_encoders.go @@ -0,0 +1,517 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bson + +import ( + "encoding/json" + "errors" + "math" + "net/url" + "reflect" + "sync" + + "gitea.psichedelico.com/go/bson/x/bsonx/bsoncore" +) + +var bvwPool = sync.Pool{ + New: func() interface{} { + return new(valueWriter) + }, +} + +var errInvalidValue = errors.New("cannot encode invalid element") + +var sliceWriterPool = sync.Pool{ + New: func() interface{} { + sw := make(sliceWriter, 0) + return &sw + }, +} + +func encodeElement(ec EncodeContext, dw DocumentWriter, e E) error { + vw, err := dw.WriteDocumentElement(e.Key) + if err != nil { + return err + } + + if e.Value == nil { + return vw.WriteNull() + } + encoder, err := ec.LookupEncoder(reflect.TypeOf(e.Value)) + if err != nil { + return err + } + + err = encoder.EncodeValue(ec, vw, reflect.ValueOf(e.Value)) + if err != nil { + return err + } + return nil +} + +// registerDefaultEncoders will register the encoder methods attached to DefaultValueEncoders with +// the provided RegistryBuilder. +func registerDefaultEncoders(reg *Registry) { + mapEncoder := &mapCodec{} + uintCodec := &uintCodec{} + + reg.RegisterTypeEncoder(tByteSlice, &byteSliceCodec{}) + reg.RegisterTypeEncoder(tTime, &timeCodec{}) + reg.RegisterTypeEncoder(tEmpty, &emptyInterfaceCodec{}) + reg.RegisterTypeEncoder(tCoreArray, &arrayCodec{}) + reg.RegisterTypeEncoder(tOID, ValueEncoderFunc(objectIDEncodeValue)) + reg.RegisterTypeEncoder(tDecimal, ValueEncoderFunc(decimal128EncodeValue)) + reg.RegisterTypeEncoder(tJSONNumber, ValueEncoderFunc(jsonNumberEncodeValue)) + reg.RegisterTypeEncoder(tURL, ValueEncoderFunc(urlEncodeValue)) + reg.RegisterTypeEncoder(tJavaScript, ValueEncoderFunc(javaScriptEncodeValue)) + reg.RegisterTypeEncoder(tSymbol, ValueEncoderFunc(symbolEncodeValue)) + reg.RegisterTypeEncoder(tBinary, ValueEncoderFunc(binaryEncodeValue)) + reg.RegisterTypeEncoder(tVector, ValueEncoderFunc(vectorEncodeValue)) + reg.RegisterTypeEncoder(tUndefined, ValueEncoderFunc(undefinedEncodeValue)) + reg.RegisterTypeEncoder(tDateTime, ValueEncoderFunc(dateTimeEncodeValue)) + reg.RegisterTypeEncoder(tNull, ValueEncoderFunc(nullEncodeValue)) + reg.RegisterTypeEncoder(tRegex, ValueEncoderFunc(regexEncodeValue)) + reg.RegisterTypeEncoder(tDBPointer, ValueEncoderFunc(dbPointerEncodeValue)) + reg.RegisterTypeEncoder(tTimestamp, ValueEncoderFunc(timestampEncodeValue)) + reg.RegisterTypeEncoder(tMinKey, ValueEncoderFunc(minKeyEncodeValue)) + reg.RegisterTypeEncoder(tMaxKey, ValueEncoderFunc(maxKeyEncodeValue)) + reg.RegisterTypeEncoder(tCoreDocument, ValueEncoderFunc(coreDocumentEncodeValue)) + reg.RegisterTypeEncoder(tCodeWithScope, ValueEncoderFunc(codeWithScopeEncodeValue)) + reg.RegisterKindEncoder(reflect.Bool, ValueEncoderFunc(booleanEncodeValue)) + reg.RegisterKindEncoder(reflect.Int, ValueEncoderFunc(intEncodeValue)) + reg.RegisterKindEncoder(reflect.Int8, ValueEncoderFunc(intEncodeValue)) + reg.RegisterKindEncoder(reflect.Int16, ValueEncoderFunc(intEncodeValue)) + reg.RegisterKindEncoder(reflect.Int32, ValueEncoderFunc(intEncodeValue)) + reg.RegisterKindEncoder(reflect.Int64, ValueEncoderFunc(intEncodeValue)) + reg.RegisterKindEncoder(reflect.Uint, uintCodec) + reg.RegisterKindEncoder(reflect.Uint8, uintCodec) + reg.RegisterKindEncoder(reflect.Uint16, uintCodec) + reg.RegisterKindEncoder(reflect.Uint32, uintCodec) + reg.RegisterKindEncoder(reflect.Uint64, uintCodec) + reg.RegisterKindEncoder(reflect.Float32, ValueEncoderFunc(floatEncodeValue)) + reg.RegisterKindEncoder(reflect.Float64, ValueEncoderFunc(floatEncodeValue)) + reg.RegisterKindEncoder(reflect.Array, ValueEncoderFunc(arrayEncodeValue)) + reg.RegisterKindEncoder(reflect.Map, mapEncoder) + reg.RegisterKindEncoder(reflect.Slice, &sliceCodec{}) + reg.RegisterKindEncoder(reflect.String, &stringCodec{}) + reg.RegisterKindEncoder(reflect.Struct, newStructCodec(mapEncoder)) + reg.RegisterKindEncoder(reflect.Ptr, &pointerCodec{}) + reg.RegisterInterfaceEncoder(tValueMarshaler, ValueEncoderFunc(valueMarshalerEncodeValue)) + reg.RegisterInterfaceEncoder(tMarshaler, ValueEncoderFunc(marshalerEncodeValue)) +} + +// booleanEncodeValue is the ValueEncoderFunc for bool types. +func booleanEncodeValue(_ EncodeContext, vw ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Kind() != reflect.Bool { + return ValueEncoderError{Name: "BooleanEncodeValue", Kinds: []reflect.Kind{reflect.Bool}, Received: val} + } + return vw.WriteBoolean(val.Bool()) +} + +func fitsIn32Bits(i int64) bool { + return math.MinInt32 <= i && i <= math.MaxInt32 +} + +// intEncodeValue is the ValueEncoderFunc for int types. +func intEncodeValue(ec EncodeContext, vw ValueWriter, val reflect.Value) error { + switch val.Kind() { + case reflect.Int8, reflect.Int16, reflect.Int32: + return vw.WriteInt32(int32(val.Int())) + case reflect.Int: + i64 := val.Int() + if fitsIn32Bits(i64) { + return vw.WriteInt32(int32(i64)) + } + return vw.WriteInt64(i64) + case reflect.Int64: + i64 := val.Int() + if ec.minSize && fitsIn32Bits(i64) { + return vw.WriteInt32(int32(i64)) + } + return vw.WriteInt64(i64) + } + + return ValueEncoderError{ + Name: "IntEncodeValue", + Kinds: []reflect.Kind{reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int}, + Received: val, + } +} + +// floatEncodeValue is the ValueEncoderFunc for float types. +func floatEncodeValue(_ EncodeContext, vw ValueWriter, val reflect.Value) error { + switch val.Kind() { + case reflect.Float32, reflect.Float64: + return vw.WriteDouble(val.Float()) + } + + return ValueEncoderError{Name: "FloatEncodeValue", Kinds: []reflect.Kind{reflect.Float32, reflect.Float64}, Received: val} +} + +// objectIDEncodeValue is the ValueEncoderFunc for ObjectID. +func objectIDEncodeValue(_ EncodeContext, vw ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tOID { + return ValueEncoderError{Name: "ObjectIDEncodeValue", Types: []reflect.Type{tOID}, Received: val} + } + return vw.WriteObjectID(val.Interface().(ObjectID)) +} + +// decimal128EncodeValue is the ValueEncoderFunc for Decimal128. +func decimal128EncodeValue(_ EncodeContext, vw ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tDecimal { + return ValueEncoderError{Name: "Decimal128EncodeValue", Types: []reflect.Type{tDecimal}, Received: val} + } + return vw.WriteDecimal128(val.Interface().(Decimal128)) +} + +// jsonNumberEncodeValue is the ValueEncoderFunc for json.Number. +func jsonNumberEncodeValue(ec EncodeContext, vw ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tJSONNumber { + return ValueEncoderError{Name: "JSONNumberEncodeValue", Types: []reflect.Type{tJSONNumber}, Received: val} + } + jsnum := val.Interface().(json.Number) + + // Attempt int first, then float64 + if i64, err := jsnum.Int64(); err == nil { + return intEncodeValue(ec, vw, reflect.ValueOf(i64)) + } + + f64, err := jsnum.Float64() + if err != nil { + return err + } + + return floatEncodeValue(ec, vw, reflect.ValueOf(f64)) +} + +// urlEncodeValue is the ValueEncoderFunc for url.URL. +func urlEncodeValue(_ EncodeContext, vw ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tURL { + return ValueEncoderError{Name: "URLEncodeValue", Types: []reflect.Type{tURL}, Received: val} + } + u := val.Interface().(url.URL) + return vw.WriteString(u.String()) +} + +// arrayEncodeValue is the ValueEncoderFunc for array types. +func arrayEncodeValue(ec EncodeContext, vw ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Kind() != reflect.Array { + return ValueEncoderError{Name: "ArrayEncodeValue", Kinds: []reflect.Kind{reflect.Array}, Received: val} + } + + // If we have a []E we want to treat it as a document instead of as an array. + if val.Type().Elem() == tE { + dw, err := vw.WriteDocument() + if err != nil { + return err + } + + for idx := 0; idx < val.Len(); idx++ { + e := val.Index(idx).Interface().(E) + err = encodeElement(ec, dw, e) + if err != nil { + return err + } + } + + return dw.WriteDocumentEnd() + } + + // If we have a []byte we want to treat it as a binary instead of as an array. + if val.Type().Elem() == tByte { + var byteSlice []byte + for idx := 0; idx < val.Len(); idx++ { + byteSlice = append(byteSlice, val.Index(idx).Interface().(byte)) + } + return vw.WriteBinary(byteSlice) + } + + aw, err := vw.WriteArray() + if err != nil { + return err + } + + elemType := val.Type().Elem() + encoder, err := ec.LookupEncoder(elemType) + if err != nil && elemType.Kind() != reflect.Interface { + return err + } + + for idx := 0; idx < val.Len(); idx++ { + currEncoder, currVal, lookupErr := lookupElementEncoder(ec, encoder, val.Index(idx)) + if lookupErr != nil && !errors.Is(lookupErr, errInvalidValue) { + return lookupErr + } + + vw, err := aw.WriteArrayElement() + if err != nil { + return err + } + + if errors.Is(lookupErr, errInvalidValue) { + err = vw.WriteNull() + if err != nil { + return err + } + continue + } + + err = currEncoder.EncodeValue(ec, vw, currVal) + if err != nil { + return err + } + } + return aw.WriteArrayEnd() +} + +func lookupElementEncoder(ec EncodeContext, origEncoder ValueEncoder, currVal reflect.Value) (ValueEncoder, reflect.Value, error) { + if origEncoder != nil || (currVal.Kind() != reflect.Interface) { + return origEncoder, currVal, nil + } + currVal = currVal.Elem() + if !currVal.IsValid() { + return nil, currVal, errInvalidValue + } + currEncoder, err := ec.LookupEncoder(currVal.Type()) + + return currEncoder, currVal, err +} + +// valueMarshalerEncodeValue is the ValueEncoderFunc for ValueMarshaler implementations. +func valueMarshalerEncodeValue(_ EncodeContext, vw ValueWriter, val reflect.Value) error { + // Either val or a pointer to val must implement ValueMarshaler + switch { + case !val.IsValid(): + return ValueEncoderError{Name: "ValueMarshalerEncodeValue", Types: []reflect.Type{tValueMarshaler}, Received: val} + case val.Type().Implements(tValueMarshaler): + // If ValueMarshaler is implemented on a concrete type, make sure that val isn't a nil pointer + if isImplementationNil(val, tValueMarshaler) { + return vw.WriteNull() + } + case reflect.PtrTo(val.Type()).Implements(tValueMarshaler) && val.CanAddr(): + val = val.Addr() + default: + return ValueEncoderError{Name: "ValueMarshalerEncodeValue", Types: []reflect.Type{tValueMarshaler}, Received: val} + } + + m, ok := val.Interface().(ValueMarshaler) + if !ok { + return vw.WriteNull() + } + t, data, err := m.MarshalBSONValue() + if err != nil { + return err + } + return copyValueFromBytes(vw, Type(t), data) +} + +// marshalerEncodeValue is the ValueEncoderFunc for Marshaler implementations. +func marshalerEncodeValue(_ EncodeContext, vw ValueWriter, val reflect.Value) error { + // Either val or a pointer to val must implement Marshaler + switch { + case !val.IsValid(): + return ValueEncoderError{Name: "MarshalerEncodeValue", Types: []reflect.Type{tMarshaler}, Received: val} + case val.Type().Implements(tMarshaler): + // If Marshaler is implemented on a concrete type, make sure that val isn't a nil pointer + if isImplementationNil(val, tMarshaler) { + return vw.WriteNull() + } + case reflect.PtrTo(val.Type()).Implements(tMarshaler) && val.CanAddr(): + val = val.Addr() + default: + return ValueEncoderError{Name: "MarshalerEncodeValue", Types: []reflect.Type{tMarshaler}, Received: val} + } + + m, ok := val.Interface().(Marshaler) + if !ok { + return vw.WriteNull() + } + data, err := m.MarshalBSON() + if err != nil { + return err + } + return copyValueFromBytes(vw, TypeEmbeddedDocument, data) +} + +// javaScriptEncodeValue is the ValueEncoderFunc for the JavaScript type. +func javaScriptEncodeValue(_ EncodeContext, vw ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tJavaScript { + return ValueEncoderError{Name: "JavaScriptEncodeValue", Types: []reflect.Type{tJavaScript}, Received: val} + } + + return vw.WriteJavascript(val.String()) +} + +// symbolEncodeValue is the ValueEncoderFunc for the Symbol type. +func symbolEncodeValue(_ EncodeContext, vw ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tSymbol { + return ValueEncoderError{Name: "SymbolEncodeValue", Types: []reflect.Type{tSymbol}, Received: val} + } + + return vw.WriteSymbol(val.String()) +} + +// binaryEncodeValue is the ValueEncoderFunc for Binary. +func binaryEncodeValue(_ EncodeContext, vw ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tBinary { + return ValueEncoderError{Name: "BinaryEncodeValue", Types: []reflect.Type{tBinary}, Received: val} + } + b := val.Interface().(Binary) + + return vw.WriteBinaryWithSubtype(b.Data, b.Subtype) +} + +// vectorEncodeValue is the ValueEncoderFunc for Vector. +func vectorEncodeValue(_ EncodeContext, vw ValueWriter, val reflect.Value) error { + t := val.Type() + if !val.IsValid() || t != tVector { + return ValueEncoderError{Name: "VectorEncodeValue", + Types: []reflect.Type{tVector}, + Received: val, + } + } + v := val.Interface().(Vector) + b := v.Binary() + return vw.WriteBinaryWithSubtype(b.Data, b.Subtype) +} + +// undefinedEncodeValue is the ValueEncoderFunc for Undefined. +func undefinedEncodeValue(_ EncodeContext, vw ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tUndefined { + return ValueEncoderError{Name: "UndefinedEncodeValue", Types: []reflect.Type{tUndefined}, Received: val} + } + + return vw.WriteUndefined() +} + +// dateTimeEncodeValue is the ValueEncoderFunc for DateTime. +func dateTimeEncodeValue(_ EncodeContext, vw ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tDateTime { + return ValueEncoderError{Name: "DateTimeEncodeValue", Types: []reflect.Type{tDateTime}, Received: val} + } + + return vw.WriteDateTime(val.Int()) +} + +// nullEncodeValue is the ValueEncoderFunc for Null. +func nullEncodeValue(_ EncodeContext, vw ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tNull { + return ValueEncoderError{Name: "NullEncodeValue", Types: []reflect.Type{tNull}, Received: val} + } + + return vw.WriteNull() +} + +// regexEncodeValue is the ValueEncoderFunc for Regex. +func regexEncodeValue(_ EncodeContext, vw ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tRegex { + return ValueEncoderError{Name: "RegexEncodeValue", Types: []reflect.Type{tRegex}, Received: val} + } + + regex := val.Interface().(Regex) + + return vw.WriteRegex(regex.Pattern, regex.Options) +} + +// dbPointerEncodeValue is the ValueEncoderFunc for DBPointer. +func dbPointerEncodeValue(_ EncodeContext, vw ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tDBPointer { + return ValueEncoderError{Name: "DBPointerEncodeValue", Types: []reflect.Type{tDBPointer}, Received: val} + } + + dbp := val.Interface().(DBPointer) + + return vw.WriteDBPointer(dbp.DB, dbp.Pointer) +} + +// timestampEncodeValue is the ValueEncoderFunc for Timestamp. +func timestampEncodeValue(_ EncodeContext, vw ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tTimestamp { + return ValueEncoderError{Name: "TimestampEncodeValue", Types: []reflect.Type{tTimestamp}, Received: val} + } + + ts := val.Interface().(Timestamp) + + return vw.WriteTimestamp(ts.T, ts.I) +} + +// minKeyEncodeValue is the ValueEncoderFunc for MinKey. +func minKeyEncodeValue(_ EncodeContext, vw ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tMinKey { + return ValueEncoderError{Name: "MinKeyEncodeValue", Types: []reflect.Type{tMinKey}, Received: val} + } + + return vw.WriteMinKey() +} + +// maxKeyEncodeValue is the ValueEncoderFunc for MaxKey. +func maxKeyEncodeValue(_ EncodeContext, vw ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tMaxKey { + return ValueEncoderError{Name: "MaxKeyEncodeValue", Types: []reflect.Type{tMaxKey}, Received: val} + } + + return vw.WriteMaxKey() +} + +// coreDocumentEncodeValue is the ValueEncoderFunc for bsoncore.Document. +func coreDocumentEncodeValue(_ EncodeContext, vw ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tCoreDocument { + return ValueEncoderError{Name: "CoreDocumentEncodeValue", Types: []reflect.Type{tCoreDocument}, Received: val} + } + + cdoc := val.Interface().(bsoncore.Document) + + return copyDocumentFromBytes(vw, cdoc) +} + +// codeWithScopeEncodeValue is the ValueEncoderFunc for CodeWithScope. +func codeWithScopeEncodeValue(ec EncodeContext, vw ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tCodeWithScope { + return ValueEncoderError{Name: "CodeWithScopeEncodeValue", Types: []reflect.Type{tCodeWithScope}, Received: val} + } + + cws := val.Interface().(CodeWithScope) + + dw, err := vw.WriteCodeWithScope(string(cws.Code)) + if err != nil { + return err + } + + sw := sliceWriterPool.Get().(*sliceWriter) + defer sliceWriterPool.Put(sw) + *sw = (*sw)[:0] + + scopeVW := bvwPool.Get().(*valueWriter) + scopeVW.reset(scopeVW.buf[:0]) + scopeVW.w = sw + defer bvwPool.Put(scopeVW) + + encoder, err := ec.LookupEncoder(reflect.TypeOf(cws.Scope)) + if err != nil { + return err + } + + err = encoder.EncodeValue(ec, scopeVW, reflect.ValueOf(cws.Scope)) + if err != nil { + return err + } + + err = copyBytesToDocumentWriter(dw, *sw) + if err != nil { + return err + } + return dw.WriteDocumentEnd() +} + +// isImplementationNil returns if val is a nil pointer and inter is implemented on a concrete type +func isImplementationNil(val reflect.Value, inter reflect.Type) bool { + vt := val.Type() + for vt.Kind() == reflect.Ptr { + vt = vt.Elem() + } + return vt.Implements(inter) && val.Kind() == reflect.Ptr && val.IsNil() +} diff --git a/default_value_encoders_test.go b/default_value_encoders_test.go new file mode 100644 index 0000000..eb2b023 --- /dev/null +++ b/default_value_encoders_test.go @@ -0,0 +1,1758 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bson + +import ( + "encoding/json" + "errors" + "fmt" + "math" + "net/url" + "reflect" + "strings" + "testing" + "time" + + "gitea.psichedelico.com/go/bson/internal/assert" + "gitea.psichedelico.com/go/bson/x/bsonx/bsoncore" + "github.com/google/go-cmp/cmp" +) + +type myInterface interface { + Foo() int +} + +type myStruct struct { + Val int +} + +func (ms myStruct) Foo() int { + return ms.Val +} + +func TestDefaultValueEncoders(t *testing.T) { + var wrong = func(string, string) string { return "wrong" } + + type mybool bool + type myint8 int8 + type myint16 int16 + type myint32 int32 + type myint64 int64 + type myint int + type myuint8 uint8 + type myuint16 uint16 + type myuint32 uint32 + type myuint64 uint64 + type myuint uint + type myfloat32 float32 + type myfloat64 float64 + + now := time.Now().Truncate(time.Millisecond) + pjsnum := new(json.Number) + *pjsnum = json.Number("3.14159") + d128 := NewDecimal128(12345, 67890) + var nilValueMarshaler *testValueMarshaler + var nilMarshaler *testMarshaler + + vmStruct := struct{ V testValueMarshalPtr }{testValueMarshalPtr{t: TypeString, buf: []byte{0x04, 0x00, 0x00, 0x00, 'f', 'o', 'o', 0x00}}} + mStruct := struct{ V testMarshalPtr }{testMarshalPtr{buf: bsoncore.BuildDocument(nil, bsoncore.AppendDoubleElement(nil, "pi", 3.14159))}} + + type subtest struct { + name string + val interface{} + ectx *EncodeContext + llvrw *valueReaderWriter + invoke invoked + err error + } + + testCases := []struct { + name string + ve ValueEncoder + subtests []subtest + }{ + { + "BooleanEncodeValue", + ValueEncoderFunc(booleanEncodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + nil, + nothing, + ValueEncoderError{Name: "BooleanEncodeValue", Kinds: []reflect.Kind{reflect.Bool}, Received: reflect.ValueOf(wrong)}, + }, + {"fast path", bool(true), nil, nil, writeBoolean, nil}, + {"reflection path", mybool(true), nil, nil, writeBoolean, nil}, + }, + }, + { + "IntEncodeValue", + ValueEncoderFunc(intEncodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + nil, + nothing, + ValueEncoderError{ + Name: "IntEncodeValue", + Kinds: []reflect.Kind{reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int}, + Received: reflect.ValueOf(wrong), + }, + }, + {"int8/fast path", int8(127), nil, nil, writeInt32, nil}, + {"int16/fast path", int16(32767), nil, nil, writeInt32, nil}, + {"int32/fast path", int32(2147483647), nil, nil, writeInt32, nil}, + {"int64/fast path", int64(1234567890987), nil, nil, writeInt64, nil}, + {"int64/fast path - minsize", int64(math.MaxInt32), &EncodeContext{minSize: true}, nil, writeInt32, nil}, + {"int64/fast path - minsize too large", int64(math.MaxInt32 + 1), &EncodeContext{minSize: true}, nil, writeInt64, nil}, + {"int64/fast path - minsize too small", int64(math.MinInt32 - 1), &EncodeContext{minSize: true}, nil, writeInt64, nil}, + {"int/fast path - positive int32", int(math.MaxInt32 - 1), nil, nil, writeInt32, nil}, + {"int/fast path - negative int32", int(math.MinInt32 + 1), nil, nil, writeInt32, nil}, + {"int/fast path - MaxInt32", int(math.MaxInt32), nil, nil, writeInt32, nil}, + {"int/fast path - MinInt32", int(math.MinInt32), nil, nil, writeInt32, nil}, + {"int8/reflection path", myint8(127), nil, nil, writeInt32, nil}, + {"int16/reflection path", myint16(32767), nil, nil, writeInt32, nil}, + {"int32/reflection path", myint32(2147483647), nil, nil, writeInt32, nil}, + {"int64/reflection path", myint64(1234567890987), nil, nil, writeInt64, nil}, + {"int64/reflection path - minsize", myint64(math.MaxInt32), &EncodeContext{minSize: true}, nil, writeInt32, nil}, + {"int64/reflection path - minsize too large", myint64(math.MaxInt32 + 1), &EncodeContext{minSize: true}, nil, writeInt64, nil}, + {"int64/reflection path - minsize too small", myint64(math.MinInt32 - 1), &EncodeContext{minSize: true}, nil, writeInt64, nil}, + {"int/reflection path - positive int32", myint(math.MaxInt32 - 1), nil, nil, writeInt32, nil}, + {"int/reflection path - negative int32", myint(math.MinInt32 + 1), nil, nil, writeInt32, nil}, + {"int/reflection path - MaxInt32", myint(math.MaxInt32), nil, nil, writeInt32, nil}, + {"int/reflection path - MinInt32", myint(math.MinInt32), nil, nil, writeInt32, nil}, + }, + }, + { + "UintEncodeValue", + &uintCodec{}, + []subtest{ + { + "wrong type", + wrong, + nil, + nil, + nothing, + ValueEncoderError{ + Name: "UintEncodeValue", + Kinds: []reflect.Kind{reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint}, + Received: reflect.ValueOf(wrong), + }, + }, + {"uint8/fast path", uint8(127), nil, nil, writeInt32, nil}, + {"uint16/fast path", uint16(32767), nil, nil, writeInt32, nil}, + {"uint32/fast path", uint32(2147483647), nil, nil, writeInt64, nil}, + {"uint64/fast path", uint64(1234567890987), nil, nil, writeInt64, nil}, + {"uint/fast path", uint(1234567), nil, nil, writeInt64, nil}, + {"uint32/fast path - minsize", uint32(2147483647), &EncodeContext{minSize: true}, nil, writeInt32, nil}, + {"uint64/fast path - minsize", uint64(2147483647), &EncodeContext{minSize: true}, nil, writeInt32, nil}, + {"uint/fast path - minsize", uint(2147483647), &EncodeContext{minSize: true}, nil, writeInt32, nil}, + {"uint32/fast path - minsize too large", uint32(2147483648), &EncodeContext{minSize: true}, nil, writeInt64, nil}, + {"uint64/fast path - minsize too large", uint64(2147483648), &EncodeContext{minSize: true}, nil, writeInt64, nil}, + {"uint/fast path - minsize too large", uint(2147483648), &EncodeContext{minSize: true}, nil, writeInt64, nil}, + {"uint64/fast path - overflow", uint64(1 << 63), nil, nil, nothing, fmt.Errorf("%d overflows int64", uint64(1<<63))}, + {"uint8/reflection path", myuint8(127), nil, nil, writeInt32, nil}, + {"uint16/reflection path", myuint16(32767), nil, nil, writeInt32, nil}, + {"uint32/reflection path", myuint32(2147483647), nil, nil, writeInt64, nil}, + {"uint64/reflection path", myuint64(1234567890987), nil, nil, writeInt64, nil}, + {"uint32/reflection path - minsize", myuint32(2147483647), &EncodeContext{minSize: true}, nil, writeInt32, nil}, + {"uint64/reflection path - minsize", myuint64(2147483647), &EncodeContext{minSize: true}, nil, writeInt32, nil}, + {"uint/reflection path - minsize", myuint(2147483647), &EncodeContext{minSize: true}, nil, writeInt32, nil}, + {"uint32/reflection path - minsize too large", myuint(1 << 31), &EncodeContext{minSize: true}, nil, writeInt64, nil}, + {"uint64/reflection path - minsize too large", myuint64(1 << 31), &EncodeContext{minSize: true}, nil, writeInt64, nil}, + {"uint/reflection path - minsize too large", myuint(2147483648), &EncodeContext{minSize: true}, nil, writeInt64, nil}, + {"uint64/reflection path - overflow", myuint64(1 << 63), nil, nil, nothing, fmt.Errorf("%d overflows int64", uint64(1<<63))}, + }, + }, + { + "FloatEncodeValue", + ValueEncoderFunc(floatEncodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + nil, + nothing, + ValueEncoderError{ + Name: "FloatEncodeValue", + Kinds: []reflect.Kind{reflect.Float32, reflect.Float64}, + Received: reflect.ValueOf(wrong), + }, + }, + {"float32/fast path", float32(3.14159), nil, nil, writeDouble, nil}, + {"float64/fast path", float64(3.14159), nil, nil, writeDouble, nil}, + {"float32/reflection path", myfloat32(3.14159), nil, nil, writeDouble, nil}, + {"float64/reflection path", myfloat64(3.14159), nil, nil, writeDouble, nil}, + }, + }, + { + "TimeEncodeValue", + &timeCodec{}, + []subtest{ + { + "wrong type", + wrong, + nil, + nil, + nothing, + ValueEncoderError{Name: "TimeEncodeValue", Types: []reflect.Type{tTime}, Received: reflect.ValueOf(wrong)}, + }, + {"time.Time", now, nil, nil, writeDateTime, nil}, + }, + }, + { + "MapEncodeValue", + &mapCodec{}, + []subtest{ + { + "wrong kind", + wrong, + nil, + nil, + nothing, + ValueEncoderError{Name: "MapEncodeValue", Kinds: []reflect.Kind{reflect.Map}, Received: reflect.ValueOf(wrong)}, + }, + { + "WriteDocument Error", + map[string]interface{}{}, + nil, + &valueReaderWriter{Err: errors.New("wd error"), ErrAfter: writeDocument}, + writeDocument, + errors.New("wd error"), + }, + { + "Lookup Error", + map[string]int{"foo": 1}, + &EncodeContext{Registry: newTestRegistry()}, + &valueReaderWriter{}, + writeDocument, + fmt.Errorf("no encoder found for int"), + }, + { + "WriteDocumentElement Error", + map[string]interface{}{"foo": "bar"}, + &EncodeContext{Registry: buildDefaultRegistry()}, + &valueReaderWriter{Err: errors.New("wde error"), ErrAfter: writeDocumentElement}, + writeDocumentElement, + errors.New("wde error"), + }, + { + "EncodeValue Error", + map[string]interface{}{"foo": "bar"}, + &EncodeContext{Registry: buildDefaultRegistry()}, + &valueReaderWriter{Err: errors.New("ev error"), ErrAfter: writeString}, + writeString, + errors.New("ev error"), + }, + { + "empty map/success", + map[string]interface{}{}, + &EncodeContext{Registry: newTestRegistry()}, + &valueReaderWriter{}, + writeDocumentEnd, + nil, + }, + { + "with interface/success", + map[string]myInterface{"foo": myStruct{1}}, + &EncodeContext{Registry: buildDefaultRegistry()}, + nil, + writeDocumentEnd, + nil, + }, + { + "with interface/nil/success", + map[string]myInterface{"foo": nil}, + &EncodeContext{Registry: buildDefaultRegistry()}, + nil, + writeDocumentEnd, + nil, + }, + { + "non-string key success", + map[int]interface{}{ + 1: "foobar", + }, + &EncodeContext{Registry: buildDefaultRegistry()}, + &valueReaderWriter{}, + writeDocumentEnd, + nil, + }, + }, + }, + { + "ArrayEncodeValue", + ValueEncoderFunc(arrayEncodeValue), + []subtest{ + { + "wrong kind", + wrong, + nil, + nil, + nothing, + ValueEncoderError{Name: "ArrayEncodeValue", Kinds: []reflect.Kind{reflect.Array}, Received: reflect.ValueOf(wrong)}, + }, + { + "WriteArray Error", + [1]string{}, + nil, + &valueReaderWriter{Err: errors.New("wa error"), ErrAfter: writeArray}, + writeArray, + errors.New("wa error"), + }, + { + "Lookup Error", + [1]int{1}, + &EncodeContext{Registry: newTestRegistry()}, + &valueReaderWriter{}, + writeArray, + fmt.Errorf("no encoder found for int"), + }, + { + "WriteArrayElement Error", + [1]string{"foo"}, + &EncodeContext{Registry: buildDefaultRegistry()}, + &valueReaderWriter{Err: errors.New("wae error"), ErrAfter: writeArrayElement}, + writeArrayElement, + errors.New("wae error"), + }, + { + "EncodeValue Error", + [1]string{"foo"}, + &EncodeContext{Registry: buildDefaultRegistry()}, + &valueReaderWriter{Err: errors.New("ev error"), ErrAfter: writeString}, + writeString, + errors.New("ev error"), + }, + { + "[1]E/success", + [1]E{{"hello", "world"}}, + &EncodeContext{Registry: buildDefaultRegistry()}, + nil, + writeDocumentEnd, + nil, + }, + { + "[1]E/success", + [1]E{{"hello", nil}}, + &EncodeContext{Registry: buildDefaultRegistry()}, + nil, + writeDocumentEnd, + nil, + }, + { + "[1]interface/success", + [1]myInterface{myStruct{1}}, + &EncodeContext{Registry: buildDefaultRegistry()}, + nil, + writeArrayEnd, + nil, + }, + { + "[1]interface/nil/success", + [1]myInterface{nil}, + &EncodeContext{Registry: buildDefaultRegistry()}, + nil, + writeArrayEnd, + nil, + }, + }, + }, + { + "SliceEncodeValue", + &sliceCodec{}, + []subtest{ + { + "wrong kind", + wrong, + nil, + nil, + nothing, + ValueEncoderError{Name: "SliceEncodeValue", Kinds: []reflect.Kind{reflect.Slice}, Received: reflect.ValueOf(wrong)}, + }, + { + "WriteArray Error", + []string{}, + nil, + &valueReaderWriter{Err: errors.New("wa error"), ErrAfter: writeArray}, + writeArray, + errors.New("wa error"), + }, + { + "Lookup Error", + []int{1}, + &EncodeContext{Registry: newTestRegistry()}, + &valueReaderWriter{}, + writeArray, + fmt.Errorf("no encoder found for int"), + }, + { + "WriteArrayElement Error", + []string{"foo"}, + &EncodeContext{Registry: buildDefaultRegistry()}, + &valueReaderWriter{Err: errors.New("wae error"), ErrAfter: writeArrayElement}, + writeArrayElement, + errors.New("wae error"), + }, + { + "EncodeValue Error", + []string{"foo"}, + &EncodeContext{Registry: buildDefaultRegistry()}, + &valueReaderWriter{Err: errors.New("ev error"), ErrAfter: writeString}, + writeString, + errors.New("ev error"), + }, + { + "D/success", + D{{"hello", "world"}}, + &EncodeContext{Registry: buildDefaultRegistry()}, + nil, + writeDocumentEnd, + nil, + }, + { + "D/success", + D{{"hello", nil}}, + &EncodeContext{Registry: buildDefaultRegistry()}, + nil, + writeDocumentEnd, + nil, + }, + { + "empty slice/success", + []interface{}{}, + &EncodeContext{Registry: newTestRegistry()}, + &valueReaderWriter{}, + writeArrayEnd, + nil, + }, + { + "interface/success", + []myInterface{myStruct{1}}, + &EncodeContext{Registry: buildDefaultRegistry()}, + nil, + writeArrayEnd, + nil, + }, + { + "interface/success", + []myInterface{nil}, + &EncodeContext{Registry: buildDefaultRegistry()}, + nil, + writeArrayEnd, + nil, + }, + }, + }, + { + "ObjectIDEncodeValue", + ValueEncoderFunc(objectIDEncodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + nil, + nothing, + ValueEncoderError{Name: "ObjectIDEncodeValue", Types: []reflect.Type{tOID}, Received: reflect.ValueOf(wrong)}, + }, + { + "ObjectID/success", + ObjectID{0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C}, + nil, nil, writeObjectID, nil, + }, + }, + }, + { + "Decimal128EncodeValue", + ValueEncoderFunc(decimal128EncodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + nil, + nothing, + ValueEncoderError{Name: "Decimal128EncodeValue", Types: []reflect.Type{tDecimal}, Received: reflect.ValueOf(wrong)}, + }, + {"Decimal128/success", d128, nil, nil, writeDecimal128, nil}, + }, + }, + { + "JSONNumberEncodeValue", + ValueEncoderFunc(jsonNumberEncodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + nil, + nothing, + ValueEncoderError{Name: "JSONNumberEncodeValue", Types: []reflect.Type{tJSONNumber}, Received: reflect.ValueOf(wrong)}, + }, + { + "json.Number/invalid", + json.Number("hello world"), + nil, nil, nothing, errors.New(`strconv.ParseFloat: parsing "hello world": invalid syntax`), + }, + { + "json.Number/int64/success", + json.Number("1234567890"), + nil, nil, writeInt64, nil, + }, + { + "json.Number/float64/success", + json.Number("3.14159"), + nil, nil, writeDouble, nil, + }, + }, + }, + { + "URLEncodeValue", + ValueEncoderFunc(urlEncodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + nil, + nothing, + ValueEncoderError{Name: "URLEncodeValue", Types: []reflect.Type{tURL}, Received: reflect.ValueOf(wrong)}, + }, + {"url.URL", url.URL{Scheme: "http", Host: "example.com"}, nil, nil, writeString, nil}, + }, + }, + { + "ByteSliceEncodeValue", + &byteSliceCodec{}, + []subtest{ + { + "wrong type", + wrong, + nil, + nil, + nothing, + ValueEncoderError{Name: "ByteSliceEncodeValue", Types: []reflect.Type{tByteSlice}, Received: reflect.ValueOf(wrong)}, + }, + {"[]byte", []byte{0x01, 0x02, 0x03}, nil, nil, writeBinary, nil}, + {"[]byte/nil", []byte(nil), nil, nil, writeNull, nil}, + }, + }, + { + "EmptyInterfaceEncodeValue", + &emptyInterfaceCodec{}, + []subtest{ + { + "wrong type", + wrong, + nil, + nil, + nothing, + ValueEncoderError{Name: "EmptyInterfaceEncodeValue", Types: []reflect.Type{tEmpty}, Received: reflect.ValueOf(wrong)}, + }, + }, + }, + { + "ValueMarshalerEncodeValue", + ValueEncoderFunc(valueMarshalerEncodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + nil, + nothing, + ValueEncoderError{ + Name: "ValueMarshalerEncodeValue", + Types: []reflect.Type{tValueMarshaler}, + Received: reflect.ValueOf(wrong), + }, + }, + { + "MarshalBSONValue error", + testValueMarshaler{err: errors.New("mbsonv error")}, + nil, + nil, + nothing, + errors.New("mbsonv error"), + }, + { + "Copy error", + testValueMarshaler{}, + nil, + nil, + nothing, + fmt.Errorf("cannot copy unknown BSON type %s", Type(0)), + }, + { + "success struct implementation", + testValueMarshaler{t: TypeString, buf: []byte{0x04, 0x00, 0x00, 0x00, 'f', 'o', 'o', 0x00}}, + nil, + nil, + writeString, + nil, + }, + { + "success ptr to struct implementation", + &testValueMarshaler{t: TypeString, buf: []byte{0x04, 0x00, 0x00, 0x00, 'f', 'o', 'o', 0x00}}, + nil, + nil, + writeString, + nil, + }, + { + "success nil ptr to struct implementation", + nilValueMarshaler, + nil, + nil, + writeNull, + nil, + }, + { + "success ptr to ptr implementation", + &testValueMarshalPtr{t: TypeString, buf: []byte{0x04, 0x00, 0x00, 0x00, 'f', 'o', 'o', 0x00}}, + nil, + nil, + writeString, + nil, + }, + { + "unaddressable ptr implementation", + testValueMarshalPtr{t: TypeString, buf: []byte{0x04, 0x00, 0x00, 0x00, 'f', 'o', 'o', 0x00}}, + nil, + nil, + nothing, + ValueEncoderError{ + Name: "ValueMarshalerEncodeValue", + Types: []reflect.Type{tValueMarshaler}, + Received: reflect.ValueOf(testValueMarshalPtr{}), + }, + }, + }, + }, + { + "MarshalerEncodeValue", + ValueEncoderFunc(marshalerEncodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + nil, + nothing, + ValueEncoderError{Name: "MarshalerEncodeValue", Types: []reflect.Type{tMarshaler}, Received: reflect.ValueOf(wrong)}, + }, + { + "MarshalBSON error", + testMarshaler{err: errors.New("mbson error")}, + nil, + nil, + nothing, + errors.New("mbson error"), + }, + { + "success struct implementation", + testMarshaler{buf: bsoncore.BuildDocument(nil, bsoncore.AppendDoubleElement(nil, "pi", 3.14159))}, + nil, + nil, + writeDocumentEnd, + nil, + }, + { + "success ptr to struct implementation", + &testMarshaler{buf: bsoncore.BuildDocument(nil, bsoncore.AppendDoubleElement(nil, "pi", 3.14159))}, + nil, + nil, + writeDocumentEnd, + nil, + }, + { + "success nil ptr to struct implementation", + nilMarshaler, + nil, + nil, + writeNull, + nil, + }, + { + "success ptr to ptr implementation", + &testMarshalPtr{buf: bsoncore.BuildDocument(nil, bsoncore.AppendDoubleElement(nil, "pi", 3.14159))}, + nil, + nil, + writeDocumentEnd, + nil, + }, + { + "unaddressable ptr implementation", + testMarshalPtr{buf: bsoncore.BuildDocument(nil, bsoncore.AppendDoubleElement(nil, "pi", 3.14159))}, + nil, + nil, + nothing, + ValueEncoderError{Name: "MarshalerEncodeValue", Types: []reflect.Type{tMarshaler}, Received: reflect.ValueOf(testMarshalPtr{})}, + }, + }, + }, + { + "PointerCodec.EncodeValue", + &pointerCodec{}, + []subtest{ + { + "nil", + nil, + nil, + nil, + writeNull, + nil, + }, + { + "not pointer", + int32(123456), + nil, + nil, + nothing, + ValueEncoderError{Name: "PointerCodec.EncodeValue", Kinds: []reflect.Kind{reflect.Ptr}, Received: reflect.ValueOf(int32(123456))}, + }, + { + "typed nil", + (*int32)(nil), + nil, + nil, + writeNull, + nil, + }, + { + "no encoder", + &wrong, + &EncodeContext{Registry: buildDefaultRegistry()}, + nil, + nothing, + errNoEncoder{Type: reflect.TypeOf(wrong)}, + }, + }, + }, + { + "pointer implementation addressable interface", + &pointerCodec{}, + []subtest{ + { + "ValueMarshaler", + &vmStruct, + &EncodeContext{Registry: buildDefaultRegistry()}, + nil, + writeDocumentEnd, + nil, + }, + { + "Marshaler", + &mStruct, + &EncodeContext{Registry: buildDefaultRegistry()}, + nil, + writeDocumentEnd, + nil, + }, + }, + }, + { + "JavaScriptEncodeValue", + ValueEncoderFunc(javaScriptEncodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + nil, + nothing, + ValueEncoderError{Name: "JavaScriptEncodeValue", Types: []reflect.Type{tJavaScript}, Received: reflect.ValueOf(wrong)}, + }, + {"JavaScript", JavaScript("foobar"), nil, nil, writeJavascript, nil}, + }, + }, + { + "SymbolEncodeValue", + ValueEncoderFunc(symbolEncodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + nil, + nothing, + ValueEncoderError{Name: "SymbolEncodeValue", Types: []reflect.Type{tSymbol}, Received: reflect.ValueOf(wrong)}, + }, + {"Symbol", Symbol("foobar"), nil, nil, writeSymbol, nil}, + }, + }, + { + "BinaryEncodeValue", + ValueEncoderFunc(binaryEncodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + nil, + nothing, + ValueEncoderError{Name: "BinaryEncodeValue", Types: []reflect.Type{tBinary}, Received: reflect.ValueOf(wrong)}, + }, + {"Binary/success", Binary{Data: []byte{0x01, 0x02}, Subtype: 0xFF}, nil, nil, writeBinaryWithSubtype, nil}, + }, + }, + { + "UndefinedEncodeValue", + ValueEncoderFunc(undefinedEncodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + nil, + nothing, + ValueEncoderError{Name: "UndefinedEncodeValue", Types: []reflect.Type{tUndefined}, Received: reflect.ValueOf(wrong)}, + }, + {"Undefined/success", Undefined{}, nil, nil, writeUndefined, nil}, + }, + }, + { + "DateTimeEncodeValue", + ValueEncoderFunc(dateTimeEncodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + nil, + nothing, + ValueEncoderError{Name: "DateTimeEncodeValue", Types: []reflect.Type{tDateTime}, Received: reflect.ValueOf(wrong)}, + }, + {"DateTime/success", DateTime(1234567890), nil, nil, writeDateTime, nil}, + }, + }, + { + "NullEncodeValue", + ValueEncoderFunc(nullEncodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + nil, + nothing, + ValueEncoderError{Name: "NullEncodeValue", Types: []reflect.Type{tNull}, Received: reflect.ValueOf(wrong)}, + }, + {"Null/success", Null{}, nil, nil, writeNull, nil}, + }, + }, + { + "RegexEncodeValue", + ValueEncoderFunc(regexEncodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + nil, + nothing, + ValueEncoderError{Name: "RegexEncodeValue", Types: []reflect.Type{tRegex}, Received: reflect.ValueOf(wrong)}, + }, + {"Regex/success", Regex{Pattern: "foo", Options: "bar"}, nil, nil, writeRegex, nil}, + }, + }, + { + "DBPointerEncodeValue", + ValueEncoderFunc(dbPointerEncodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + nil, + nothing, + ValueEncoderError{Name: "DBPointerEncodeValue", Types: []reflect.Type{tDBPointer}, Received: reflect.ValueOf(wrong)}, + }, + { + "DBPointer/success", + DBPointer{ + DB: "foobar", + Pointer: ObjectID{0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C}, + }, + nil, nil, writeDBPointer, nil, + }, + }, + }, + { + "TimestampEncodeValue", + ValueEncoderFunc(timestampEncodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + nil, + nothing, + ValueEncoderError{Name: "TimestampEncodeValue", Types: []reflect.Type{tTimestamp}, Received: reflect.ValueOf(wrong)}, + }, + {"Timestamp/success", Timestamp{T: 12345, I: 67890}, nil, nil, writeTimestamp, nil}, + }, + }, + { + "MinKeyEncodeValue", + ValueEncoderFunc(minKeyEncodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + nil, + nothing, + ValueEncoderError{Name: "MinKeyEncodeValue", Types: []reflect.Type{tMinKey}, Received: reflect.ValueOf(wrong)}, + }, + {"MinKey/success", MinKey{}, nil, nil, writeMinKey, nil}, + }, + }, + { + "MaxKeyEncodeValue", + ValueEncoderFunc(maxKeyEncodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + nil, + nothing, + ValueEncoderError{Name: "MaxKeyEncodeValue", Types: []reflect.Type{tMaxKey}, Received: reflect.ValueOf(wrong)}, + }, + {"MaxKey/success", MaxKey{}, nil, nil, writeMaxKey, nil}, + }, + }, + { + "CoreDocumentEncodeValue", + ValueEncoderFunc(coreDocumentEncodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + nil, + nothing, + ValueEncoderError{ + Name: "CoreDocumentEncodeValue", + Types: []reflect.Type{tCoreDocument}, + Received: reflect.ValueOf(wrong), + }, + }, + { + "WriteDocument Error", + bsoncore.Document{}, + nil, + &valueReaderWriter{Err: errors.New("wd error"), ErrAfter: writeDocument}, + writeDocument, + errors.New("wd error"), + }, + { + "bsoncore.Document.Elements Error", + bsoncore.Document{0xFF, 0x00, 0x00, 0x00, 0x00}, + nil, + &valueReaderWriter{}, + writeDocument, + errors.New("length read exceeds number of bytes available. length=5 bytes=255"), + }, + { + "WriteDocumentElement Error", + bsoncore.Document(buildDocument(bsoncore.AppendNullElement(nil, "foo"))), + nil, + &valueReaderWriter{Err: errors.New("wde error"), ErrAfter: writeDocumentElement}, + writeDocumentElement, + errors.New("wde error"), + }, + { + "encodeValue error", + bsoncore.Document(buildDocument(bsoncore.AppendNullElement(nil, "foo"))), + nil, + &valueReaderWriter{Err: errors.New("ev error"), ErrAfter: writeNull}, + writeNull, + errors.New("ev error"), + }, + { + "iterator error", + bsoncore.Document{0x0C, 0x00, 0x00, 0x00, 0x01, 'f', 'o', 'o', 0x00, 0x01, 0x02, 0x03}, + nil, + &valueReaderWriter{}, + writeDocumentElement, + errors.New("not enough bytes available to read type. bytes=3 type=double"), + }, + }, + }, + { + "StructEncodeValue", + newStructCodec(&mapCodec{}), + []subtest{ + { + "interface value", + struct{ Foo myInterface }{Foo: myStruct{1}}, + &EncodeContext{Registry: buildDefaultRegistry()}, + nil, + writeDocumentEnd, + nil, + }, + { + "nil interface value", + struct{ Foo myInterface }{Foo: nil}, + &EncodeContext{Registry: buildDefaultRegistry()}, + nil, + writeDocumentEnd, + nil, + }, + }, + }, + { + "CodeWithScopeEncodeValue", + ValueEncoderFunc(codeWithScopeEncodeValue), + []subtest{ + { + "wrong type", + wrong, + nil, + nil, + nothing, + ValueEncoderError{ + Name: "CodeWithScopeEncodeValue", + Types: []reflect.Type{tCodeWithScope}, + Received: reflect.ValueOf(wrong), + }, + }, + { + "WriteCodeWithScope error", + CodeWithScope{}, + nil, + &valueReaderWriter{Err: errors.New("wcws error"), ErrAfter: writeCodeWithScope}, + writeCodeWithScope, + errors.New("wcws error"), + }, + { + "CodeWithScope/success", + CodeWithScope{ + Code: "var hello = 'world';", + Scope: D{}, + }, + &EncodeContext{Registry: buildDefaultRegistry()}, + nil, writeDocumentEnd, nil, + }, + }, + }, + { + "CoreArrayEncodeValue", + &arrayCodec{}, + []subtest{ + { + "wrong type", + wrong, + nil, + nil, + nothing, + ValueEncoderError{ + Name: "CoreArrayEncodeValue", + Types: []reflect.Type{tCoreArray}, + Received: reflect.ValueOf(wrong), + }, + }, + + { + "WriteArray Error", + bsoncore.Array{}, + nil, + &valueReaderWriter{Err: errors.New("wa error"), ErrAfter: writeArray}, + writeArray, + errors.New("wa error"), + }, + { + "WriteArrayElement Error", + bsoncore.Array(buildDocumentArray(func([]byte) []byte { + return bsoncore.AppendNullElement(nil, "foo") + })), + nil, + &valueReaderWriter{Err: errors.New("wae error"), ErrAfter: writeArrayElement}, + writeArrayElement, + errors.New("wae error"), + }, + { + "encodeValue error", + bsoncore.Array(buildDocumentArray(func([]byte) []byte { + return bsoncore.AppendNullElement(nil, "foo") + })), + nil, + &valueReaderWriter{Err: errors.New("ev error"), ErrAfter: writeNull}, + writeNull, + errors.New("ev error"), + }, + }, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + for _, subtest := range tc.subtests { + t.Run(subtest.name, func(t *testing.T) { + var ec EncodeContext + if subtest.ectx != nil { + ec = *subtest.ectx + } + llvrw := new(valueReaderWriter) + if subtest.llvrw != nil { + llvrw = subtest.llvrw + } + llvrw.T = t + err := tc.ve.EncodeValue(ec, llvrw, reflect.ValueOf(subtest.val)) + if !assert.CompareErrors(err, subtest.err) { + t.Errorf("Errors do not match. got %v; want %v", err, subtest.err) + } + invoked := llvrw.invoked + if !cmp.Equal(invoked, subtest.invoke) { + t.Errorf("Incorrect method invoked. got %v; want %v", invoked, subtest.invoke) + } + }) + } + }) + } + + t.Run("success path", func(t *testing.T) { + oid := NewObjectID() + oids := []ObjectID{NewObjectID(), NewObjectID(), NewObjectID()} + var str = new(string) + *str = "bar" + now := time.Now().Truncate(time.Millisecond) + murl, err := url.Parse("https://mongodb.com/random-url?hello=world") + if err != nil { + t.Errorf("Error parsing URL: %v", err) + t.FailNow() + } + decimal128, err := ParseDecimal128("1.5e10") + if err != nil { + t.Errorf("Error parsing decimal128: %v", err) + t.FailNow() + } + + testCases := []struct { + name string + value interface{} + b []byte + err error + }{ + { + "map[string]int", + map[string]int32{"foo": 1}, + []byte{ + 0x0E, 0x00, 0x00, 0x00, + 0x10, 'f', 'o', 'o', 0x00, + 0x01, 0x00, 0x00, 0x00, + 0x00, + }, + nil, + }, + { + "map[string]ObjectID", + map[string]ObjectID{"foo": oid}, + buildDocument(bsoncore.AppendObjectIDElement(nil, "foo", oid)), + nil, + }, + { + "map[string][]int32", + map[string][]int32{"Z": {1, 2, 3}}, + buildDocumentArray(func(doc []byte) []byte { + doc = bsoncore.AppendInt32Element(doc, "0", 1) + doc = bsoncore.AppendInt32Element(doc, "1", 2) + return bsoncore.AppendInt32Element(doc, "2", 3) + }), + nil, + }, + { + "map[string][]ObjectID", + map[string][]ObjectID{"Z": oids}, + buildDocumentArray(func(doc []byte) []byte { + doc = bsoncore.AppendObjectIDElement(doc, "0", oids[0]) + doc = bsoncore.AppendObjectIDElement(doc, "1", oids[1]) + return bsoncore.AppendObjectIDElement(doc, "2", oids[2]) + }), + nil, + }, + { + "map[string][]json.Number(int64)", + map[string][]json.Number{"Z": {json.Number("5"), json.Number("10")}}, + buildDocumentArray(func(doc []byte) []byte { + doc = bsoncore.AppendInt64Element(doc, "0", 5) + return bsoncore.AppendInt64Element(doc, "1", 10) + }), + nil, + }, + { + "map[string][]json.Number(float64)", + map[string][]json.Number{"Z": {json.Number("5"), json.Number("10.1")}}, + buildDocumentArray(func(doc []byte) []byte { + doc = bsoncore.AppendInt64Element(doc, "0", 5) + return bsoncore.AppendDoubleElement(doc, "1", 10.1) + }), + nil, + }, + { + "map[string][]*url.URL", + map[string][]*url.URL{"Z": {murl}}, + buildDocumentArray(func(doc []byte) []byte { + return bsoncore.AppendStringElement(doc, "0", murl.String()) + }), + nil, + }, + { + "map[string][]Decimal128", + map[string][]Decimal128{"Z": {decimal128}}, + buildDocumentArray(func(doc []byte) []byte { + return bsoncore.AppendDecimal128Element(doc, "0", decimal128.h, decimal128.l) + }), + nil, + }, + { + "-", + struct { + A string `bson:"-"` + }{ + A: "", + }, + []byte{0x05, 0x00, 0x00, 0x00, 0x00}, + nil, + }, + { + "omitempty", + struct { + A string `bson:",omitempty"` + }{ + A: "", + }, + []byte{0x05, 0x00, 0x00, 0x00, 0x00}, + nil, + }, + { + "omitempty, empty time", + struct { + A time.Time `bson:",omitempty"` + }{ + A: time.Time{}, + }, + []byte{0x05, 0x00, 0x00, 0x00, 0x00}, + nil, + }, + { + "no private fields", + noPrivateFields{a: "should be empty"}, + []byte{0x05, 0x00, 0x00, 0x00, 0x00}, + nil, + }, + { + "minsize", + struct { + A int64 `bson:",minsize"` + }{ + A: 12345, + }, + buildDocument(bsoncore.AppendInt32Element(nil, "a", 12345)), + nil, + }, + { + "inline", + struct { + Foo struct { + A int64 `bson:",minsize"` + } `bson:",inline"` + }{ + Foo: struct { + A int64 `bson:",minsize"` + }{ + A: 12345, + }, + }, + buildDocument(bsoncore.AppendInt32Element(nil, "a", 12345)), + nil, + }, + { + "inline struct pointer", + struct { + Foo *struct { + A int64 `bson:",minsize"` + } `bson:",inline"` + Bar *struct { + B int64 + } `bson:",inline"` + }{ + Foo: &struct { + A int64 `bson:",minsize"` + }{ + A: 12345, + }, + Bar: nil, + }, + buildDocument(bsoncore.AppendInt32Element(nil, "a", 12345)), + nil, + }, + { + "nested inline struct pointer", + struct { + Foo *struct { + Bar *struct { + A int64 `bson:",minsize"` + } `bson:",inline"` + } `bson:",inline"` + }{ + Foo: &struct { + Bar *struct { + A int64 `bson:",minsize"` + } `bson:",inline"` + }{ + Bar: &struct { + A int64 `bson:",minsize"` + }{ + A: 12345, + }, + }, + }, + buildDocument(bsoncore.AppendInt32Element(nil, "a", 12345)), + nil, + }, + { + "inline nil struct pointer", + struct { + Foo *struct { + A int64 `bson:",minsize"` + } `bson:",inline"` + }{ + Foo: nil, + }, + buildDocument([]byte{}), + nil, + }, + { + "inline overwrite", + struct { + Foo struct { + A int32 + B string + } `bson:",inline"` + A int64 + }{ + Foo: struct { + A int32 + B string + }{ + A: 0, + B: "foo", + }, + A: 54321, + }, + buildDocument(func(doc []byte) []byte { + doc = bsoncore.AppendStringElement(doc, "b", "foo") + doc = bsoncore.AppendInt64Element(doc, "a", 54321) + return doc + }(nil)), + nil, + }, + { + "inline overwrite respects ordering", + struct { + A int64 + Foo struct { + A int32 + B string + } `bson:",inline"` + }{ + A: 54321, + Foo: struct { + A int32 + B string + }{ + A: 0, + B: "foo", + }, + }, + buildDocument(func(doc []byte) []byte { + doc = bsoncore.AppendInt64Element(doc, "a", 54321) + doc = bsoncore.AppendStringElement(doc, "b", "foo") + return doc + }(nil)), + nil, + }, + { + "inline overwrite with nested structs", + struct { + Foo struct { + A int32 + } `bson:",inline"` + Bar struct { + A int32 + } `bson:",inline"` + A int64 + }{ + Foo: struct { + A int32 + }{}, + Bar: struct { + A int32 + }{}, + A: 54321, + }, + buildDocument(bsoncore.AppendInt64Element(nil, "a", 54321)), + nil, + }, + { + "inline map", + struct { + Foo map[string]string `bson:",inline"` + }{ + Foo: map[string]string{"foo": "bar"}, + }, + buildDocument(bsoncore.AppendStringElement(nil, "foo", "bar")), + nil, + }, + { + "alternate name bson:name", + struct { + A string `bson:"foo"` + }{ + A: "bar", + }, + buildDocument(bsoncore.AppendStringElement(nil, "foo", "bar")), + nil, + }, + { + "alternate name", + struct { + A string `bson:"foo"` + }{ + A: "bar", + }, + buildDocument(bsoncore.AppendStringElement(nil, "foo", "bar")), + nil, + }, + { + "inline, omitempty", + struct { + A string + Foo zeroTest `bson:"omitempty,inline"` + }{ + A: "bar", + Foo: zeroTest{true}, + }, + buildDocument(bsoncore.AppendStringElement(nil, "a", "bar")), + nil, + }, + { + "struct{}", + struct { + A bool + B int32 + C int64 + D uint16 + E uint64 + F float64 + G string + H map[string]string + I []byte + K [2]string + L struct { + M string + } + Q ObjectID + T []struct{} + Y json.Number + Z time.Time + AA json.Number + AB *url.URL + AC Decimal128 + AD *time.Time + AE testValueMarshaler + AF map[string]interface{} + AG CodeWithScope + }{ + A: true, + B: 123, + C: 456, + D: 789, + E: 101112, + F: 3.14159, + G: "Hello, world", + H: map[string]string{"foo": "bar"}, + I: []byte{0x01, 0x02, 0x03}, + K: [2]string{"baz", "qux"}, + L: struct { + M string + }{ + M: "foobar", + }, + Q: oid, + T: nil, + Y: json.Number("5"), + Z: now, + AA: json.Number("10.1"), + AB: murl, + AC: decimal128, + AD: &now, + AE: testValueMarshaler{t: TypeString, buf: bsoncore.AppendString(nil, "hello, world")}, + AF: nil, + AG: CodeWithScope{Code: "var hello = 'world';", Scope: D{{"pi", 3.14159}}}, + }, + buildDocument(func(doc []byte) []byte { + doc = bsoncore.AppendBooleanElement(doc, "a", true) + doc = bsoncore.AppendInt32Element(doc, "b", 123) + doc = bsoncore.AppendInt64Element(doc, "c", 456) + doc = bsoncore.AppendInt32Element(doc, "d", 789) + doc = bsoncore.AppendInt64Element(doc, "e", 101112) + doc = bsoncore.AppendDoubleElement(doc, "f", 3.14159) + doc = bsoncore.AppendStringElement(doc, "g", "Hello, world") + doc = bsoncore.AppendDocumentElement(doc, "h", buildDocument(bsoncore.AppendStringElement(nil, "foo", "bar"))) + doc = bsoncore.AppendBinaryElement(doc, "i", 0x00, []byte{0x01, 0x02, 0x03}) + doc = bsoncore.AppendArrayElement(doc, "k", + buildArray(bsoncore.AppendStringElement(bsoncore.AppendStringElement(nil, "0", "baz"), "1", "qux")), + ) + doc = bsoncore.AppendDocumentElement(doc, "l", buildDocument(bsoncore.AppendStringElement(nil, "m", "foobar"))) + doc = bsoncore.AppendObjectIDElement(doc, "q", oid) + doc = bsoncore.AppendNullElement(doc, "t") + doc = bsoncore.AppendInt64Element(doc, "y", 5) + doc = bsoncore.AppendDateTimeElement(doc, "z", now.UnixNano()/int64(time.Millisecond)) + doc = bsoncore.AppendDoubleElement(doc, "aa", 10.1) + doc = bsoncore.AppendStringElement(doc, "ab", murl.String()) + doc = bsoncore.AppendDecimal128Element(doc, "ac", decimal128.h, decimal128.l) + doc = bsoncore.AppendDateTimeElement(doc, "ad", now.UnixNano()/int64(time.Millisecond)) + doc = bsoncore.AppendStringElement(doc, "ae", "hello, world") + doc = bsoncore.AppendNullElement(doc, "af") + doc = bsoncore.AppendCodeWithScopeElement(doc, "ag", + "var hello = 'world';", buildDocument(bsoncore.AppendDoubleElement(nil, "pi", 3.14159)), + ) + return doc + }(nil)), + nil, + }, + { + "struct{[]interface{}}", + struct { + A []bool + B []int32 + C []int64 + D []uint16 + E []uint64 + F []float64 + G []string + H []map[string]string + I [][]byte + K [1][2]string + L []struct { + M string + } + N [][]string + R []ObjectID + T []struct{} + W []map[string]struct{} + X []map[string]struct{} + Y []map[string]struct{} + Z []time.Time + AA []json.Number + AB []*url.URL + AC []Decimal128 + AD []*time.Time + AE []testValueMarshaler + }{ + A: []bool{true}, + B: []int32{123}, + C: []int64{456}, + D: []uint16{789}, + E: []uint64{101112}, + F: []float64{3.14159}, + G: []string{"Hello, world"}, + H: []map[string]string{{"foo": "bar"}}, + I: [][]byte{{0x01, 0x02, 0x03}}, + K: [1][2]string{{"baz", "qux"}}, + L: []struct { + M string + }{ + { + M: "foobar", + }, + }, + N: [][]string{{"foo", "bar"}}, + R: oids, + T: nil, + W: nil, + X: []map[string]struct{}{}, // Should be empty BSON Array + Y: []map[string]struct{}{{}}, // Should be BSON array with one element, an empty BSON SubDocument + Z: []time.Time{now, now}, + AA: []json.Number{json.Number("5"), json.Number("10.1")}, + AB: []*url.URL{murl}, + AC: []Decimal128{decimal128}, + AD: []*time.Time{&now, &now}, + AE: []testValueMarshaler{ + {t: TypeString, buf: bsoncore.AppendString(nil, "hello")}, + {t: TypeString, buf: bsoncore.AppendString(nil, "world")}, + }, + }, + buildDocument(func(doc []byte) []byte { + doc = appendArrayElement(doc, "a", bsoncore.AppendBooleanElement(nil, "0", true)) + doc = appendArrayElement(doc, "b", bsoncore.AppendInt32Element(nil, "0", 123)) + doc = appendArrayElement(doc, "c", bsoncore.AppendInt64Element(nil, "0", 456)) + doc = appendArrayElement(doc, "d", bsoncore.AppendInt32Element(nil, "0", 789)) + doc = appendArrayElement(doc, "e", bsoncore.AppendInt64Element(nil, "0", 101112)) + doc = appendArrayElement(doc, "f", bsoncore.AppendDoubleElement(nil, "0", 3.14159)) + doc = appendArrayElement(doc, "g", bsoncore.AppendStringElement(nil, "0", "Hello, world")) + doc = appendArrayElement(doc, "h", bsoncore.BuildDocumentElement(nil, "0", bsoncore.AppendStringElement(nil, "foo", "bar"))) + doc = appendArrayElement(doc, "i", bsoncore.AppendBinaryElement(nil, "0", 0x00, []byte{0x01, 0x02, 0x03})) + doc = appendArrayElement(doc, "k", + appendArrayElement(nil, "0", + bsoncore.AppendStringElement(bsoncore.AppendStringElement(nil, "0", "baz"), "1", "qux")), + ) + doc = appendArrayElement(doc, "l", bsoncore.BuildDocumentElement(nil, "0", bsoncore.AppendStringElement(nil, "m", "foobar"))) + doc = appendArrayElement(doc, "n", + appendArrayElement(nil, "0", + bsoncore.AppendStringElement(bsoncore.AppendStringElement(nil, "0", "foo"), "1", "bar")), + ) + doc = appendArrayElement(doc, "r", + bsoncore.AppendObjectIDElement( + bsoncore.AppendObjectIDElement( + bsoncore.AppendObjectIDElement(nil, + "0", oids[0]), + "1", oids[1]), + "2", oids[2]), + ) + doc = bsoncore.AppendNullElement(doc, "t") + doc = bsoncore.AppendNullElement(doc, "w") + doc = appendArrayElement(doc, "x", nil) + doc = appendArrayElement(doc, "y", bsoncore.BuildDocumentElement(nil, "0", nil)) + doc = appendArrayElement(doc, "z", + bsoncore.AppendDateTimeElement( + bsoncore.AppendDateTimeElement( + nil, "0", now.UnixNano()/int64(time.Millisecond)), + "1", now.UnixNano()/int64(time.Millisecond)), + ) + doc = appendArrayElement(doc, "aa", bsoncore.AppendDoubleElement(bsoncore.AppendInt64Element(nil, "0", 5), "1", 10.10)) + doc = appendArrayElement(doc, "ab", bsoncore.AppendStringElement(nil, "0", murl.String())) + doc = appendArrayElement(doc, "ac", bsoncore.AppendDecimal128Element(nil, "0", decimal128.h, decimal128.l)) + doc = appendArrayElement(doc, "ad", + bsoncore.AppendDateTimeElement( + bsoncore.AppendDateTimeElement(nil, "0", now.UnixNano()/int64(time.Millisecond)), + "1", now.UnixNano()/int64(time.Millisecond)), + ) + doc = appendArrayElement(doc, "ae", + bsoncore.AppendStringElement(bsoncore.AppendStringElement(nil, "0", "hello"), "1", "world"), + ) + return doc + }(nil)), + nil, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + b := make(sliceWriter, 0, 512) + vw := NewDocumentWriter(&b) + reg := buildDefaultRegistry() + enc, err := reg.LookupEncoder(reflect.TypeOf(tc.value)) + noerr(t, err) + err = enc.EncodeValue(EncodeContext{Registry: reg}, vw, reflect.ValueOf(tc.value)) + if !errors.Is(err, tc.err) { + t.Errorf("Did not receive expected error. got %v; want %v", err, tc.err) + } + if diff := cmp.Diff([]byte(b), tc.b); diff != "" { + t.Errorf("Bytes written differ: (-got +want)\n%s", diff) + t.Errorf("Bytes\ngot: %v\nwant:%v\n", b, tc.b) + t.Errorf("Readers\ngot: %v\nwant:%v\n", bsoncore.Document(b), bsoncore.Document(tc.b)) + } + }) + } + }) + + t.Run("error path", func(t *testing.T) { + testCases := []struct { + name string + value interface{} + err error + }{ + { + "duplicate name struct", + struct { + A int64 + B int64 `bson:"a"` + }{ + A: 0, + B: 54321, + }, + fmt.Errorf("duplicated key a"), + }, + { + "inline map", + struct { + Foo map[string]string `bson:",inline"` + Baz string + }{ + Foo: map[string]string{"baz": "bar"}, + Baz: "hi", + }, + fmt.Errorf("Key baz of inlined map conflicts with a struct field name"), + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + b := make(sliceWriter, 0, 512) + vw := NewDocumentWriter(&b) + reg := buildDefaultRegistry() + enc, err := reg.LookupEncoder(reflect.TypeOf(tc.value)) + noerr(t, err) + err = enc.EncodeValue(EncodeContext{Registry: reg}, vw, reflect.ValueOf(tc.value)) + if err == nil || !strings.Contains(err.Error(), tc.err.Error()) { + t.Errorf("Did not receive expected error. got %v; want %v", err, tc.err) + } + }) + } + }) + + t.Run("EmptyInterfaceEncodeValue/nil", func(t *testing.T) { + val := reflect.New(tEmpty).Elem() + llvrw := new(valueReaderWriter) + err := (&emptyInterfaceCodec{}).EncodeValue(EncodeContext{Registry: newTestRegistry()}, llvrw, val) + noerr(t, err) + if llvrw.invoked != writeNull { + t.Errorf("Incorrect method called. got %v; want %v", llvrw.invoked, writeNull) + } + }) + + t.Run("EmptyInterfaceEncodeValue/LookupEncoder error", func(t *testing.T) { + val := reflect.New(tEmpty).Elem() + val.Set(reflect.ValueOf(int64(1234567890))) + llvrw := new(valueReaderWriter) + got := (&emptyInterfaceCodec{}).EncodeValue(EncodeContext{Registry: newTestRegistry()}, llvrw, val) + want := errNoEncoder{Type: tInt64} + if !assert.CompareErrors(got, want) { + t.Errorf("Did not receive expected error. got %v; want %v", got, want) + } + }) +} + +type testValueMarshalPtr struct { + t Type + buf []byte + err error +} + +func (tvm *testValueMarshalPtr) MarshalBSONValue() (byte, []byte, error) { + return byte(tvm.t), tvm.buf, tvm.err +} + +type testMarshalPtr struct { + buf []byte + err error +} + +func (tvm *testMarshalPtr) MarshalBSON() ([]byte, error) { + return tvm.buf, tvm.err +} diff --git a/doc.go b/doc.go new file mode 100644 index 0000000..81aceef --- /dev/null +++ b/doc.go @@ -0,0 +1,155 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +// Package bson is a library for reading, writing, and manipulating BSON. BSON is a binary serialization format used to +// store documents and make remote procedure calls in MongoDB. The BSON specification is located at https://bsonspec.org. +// The BSON library handles marshaling and unmarshaling of values through a configurable codec system. For a description +// of the codec system and examples of registering custom codecs, see the bsoncodec package. For additional information +// and usage examples, check out the [Work with BSON] page in the Go Driver docs site. +// +// # Raw BSON +// +// The Raw family of types is used to validate and retrieve elements from a slice of bytes. This +// type is most useful when you want do lookups on BSON bytes without unmarshaling it into another +// type. +// +// Example: +// +// var raw bson.Raw = ... // bytes from somewhere +// err := raw.Validate() +// if err != nil { return err } +// val := raw.Lookup("foo") +// i32, ok := val.Int32OK() +// // do something with i32... +// +// # Native Go Types +// +// The D and M types defined in this package can be used to build representations of BSON using native Go types. D is a +// slice and M is a map. For more information about the use cases for these types, see the documentation on the type +// definitions. +// +// Note that a D should not be constructed with duplicate key names, as that can cause undefined server behavior. +// +// Example: +// +// bson.D{{"foo", "bar"}, {"hello", "world"}, {"pi", 3.14159}} +// bson.M{"foo": "bar", "hello": "world", "pi": 3.14159} +// +// When decoding BSON to a D or M, the following type mappings apply when unmarshaling: +// +// 1. BSON int32 unmarshals to an int32. +// 2. BSON int64 unmarshals to an int64. +// 3. BSON double unmarshals to a float64. +// 4. BSON string unmarshals to a string. +// 5. BSON boolean unmarshals to a bool. +// 6. BSON embedded document unmarshals to the parent type (i.e. D for a D, M for an M). +// 7. BSON array unmarshals to a bson.A. +// 8. BSON ObjectId unmarshals to a bson.ObjectID. +// 9. BSON datetime unmarshals to a bson.DateTime. +// 10. BSON binary unmarshals to a bson.Binary. +// 11. BSON regular expression unmarshals to a bson.Regex. +// 12. BSON JavaScript unmarshals to a bson.JavaScript. +// 13. BSON code with scope unmarshals to a bson.CodeWithScope. +// 14. BSON timestamp unmarshals to an bson.Timestamp. +// 15. BSON 128-bit decimal unmarshals to an bson.Decimal128. +// 16. BSON min key unmarshals to an bson.MinKey. +// 17. BSON max key unmarshals to an bson.MaxKey. +// 18. BSON undefined unmarshals to a bson.Undefined. +// 19. BSON null unmarshals to nil. +// 20. BSON DBPointer unmarshals to a bson.DBPointer. +// 21. BSON symbol unmarshals to a bson.Symbol. +// +// The above mappings also apply when marshaling a D or M to BSON. Some other useful marshaling mappings are: +// +// 1. time.Time marshals to a BSON datetime. +// 2. int8, int16, and int32 marshal to a BSON int32. +// 3. int marshals to a BSON int32 if the value is between math.MinInt32 and math.MaxInt32, inclusive, and a BSON int64 +// otherwise. +// 4. int64 marshals to BSON int64 (unless [Encoder.IntMinSize] is set). +// 5. uint8 and uint16 marshal to a BSON int32. +// 6. uint, uint32, and uint64 marshal to a BSON int64 (unless [Encoder.IntMinSize] is set). +// 7. BSON null and undefined values will unmarshal into the zero value of a field (e.g. unmarshaling a BSON null or +// undefined value into a string will yield the empty string.). +// +// # Structs +// +// Structs can be marshaled/unmarshaled to/from BSON or Extended JSON. When transforming structs to/from BSON or Extended +// JSON, the following rules apply: +// +// 1. Only exported fields in structs will be marshaled or unmarshaled. +// +// 2. When marshaling a struct, each field will be lowercased to generate the key for the corresponding BSON element. +// For example, a struct field named "Foo" will generate key "foo". This can be overridden via a struct tag (e.g. +// `bson:"fooField"` to generate key "fooField" instead). +// +// 3. An embedded struct field is marshaled as a subdocument. The key will be the lowercased name of the field's type. +// +// 4. A pointer field is marshaled as the underlying type if the pointer is non-nil. If the pointer is nil, it is +// marshaled as a BSON null value. +// +// 5. When unmarshaling, a field of type interface{} will follow the D/M type mappings listed above. BSON documents +// unmarshaled into an interface{} field will be unmarshaled as a D. +// +// The encoding of each struct field can be customized by the "bson" struct tag. +// +// This tag behavior is configurable, and different struct tag behavior can be configured by initializing a new +// bsoncodec.StructCodec with the desired tag parser and registering that StructCodec onto the Registry. By default, JSON +// tags are not honored, but that can be enabled by creating a StructCodec with JSONFallbackStructTagParser, like below: +// +// Example: +// +// structcodec, _ := bsoncodec.NewStructCodec(bsoncodec.JSONFallbackStructTagParser) +// +// The bson tag gives the name of the field, possibly followed by a comma-separated list of options. +// The name may be empty in order to specify options without overriding the default field name. The following options can +// be used to configure behavior: +// +// 1. omitempty: If the "omitempty" struct tag is specified on a field, the field will not be marshaled if it is set to +// an "empty" value. Numbers, booleans, and strings are considered empty if their value is equal to the zero value for +// the type (i.e. 0 for numbers, false for booleans, and "" for strings). Slices, maps, and arrays are considered +// empty if they are of length zero. Interfaces and pointers are considered empty if their value is nil. By default, +// structs are only considered empty if the struct type implements [bsoncodec.Zeroer] and the IsZero +// method returns true. Struct types that do not implement [bsoncodec.Zeroer] are never considered empty and will be +// marshaled as embedded documents. NOTE: It is recommended that this tag be used for all slice and map fields. +// +// 2. minsize: If the minsize struct tag is specified on a field of type int64, uint, uint32, or uint64 and the value of +// the field can fit in a signed int32, the field will be serialized as a BSON int32 rather than a BSON int64. For +// other types, this tag is ignored. +// +// 3. truncate: If the truncate struct tag is specified on a field with a non-float numeric type, BSON doubles +// unmarshaled into that field will be truncated at the decimal point. For example, if 3.14 is unmarshaled into a +// field of type int, it will be unmarshaled as 3. If this tag is not specified, the decoder will throw an error if +// the value cannot be decoded without losing precision. For float64 or non-numeric types, this tag is ignored. +// +// 4. inline: If the inline struct tag is specified for a struct or map field, the field will be "flattened" when +// marshaling and "un-flattened" when unmarshaling. This means that all of the fields in that struct/map will be +// pulled up one level and will become top-level fields rather than being fields in a nested document. For example, +// if a map field named "Map" with value map[string]interface{}{"foo": "bar"} is inlined, the resulting document will +// be {"foo": "bar"} instead of {"map": {"foo": "bar"}}. There can only be one inlined map field in a struct. If +// there are duplicated fields in the resulting document when an inlined struct is marshaled, the inlined field will +// be overwritten. If there are duplicated fields in the resulting document when an inlined map is marshaled, an +// error will be returned. This tag can be used with fields that are pointers to structs. If an inlined pointer field +// is nil, it will not be marshaled. For fields that are not maps or structs, this tag is ignored. +// +// # Marshaling and Unmarshaling +// +// Manually marshaling and unmarshaling can be done with the Marshal and Unmarshal family of functions. +// +// bsoncodec code provides a system for encoding values to BSON representations and decoding +// values from BSON representations. This package considers both binary BSON and ExtendedJSON as +// BSON representations. The types in this package enable a flexible system for handling this +// encoding and decoding. +// +// The codec system is composed of two parts: +// +// 1) [ValueEncoder] and [ValueDecoder] that handle encoding and decoding Go values to and from BSON +// representations. +// +// 2) A [Registry] that holds these ValueEncoders and ValueDecoders and provides methods for +// retrieving them. +// +// [Work with BSON]: https://www.mongodb.com/docs/drivers/go/current/fundamentals/bson/ +package bson diff --git a/empty_interface_codec.go b/empty_interface_codec.go new file mode 100644 index 0000000..80d44d8 --- /dev/null +++ b/empty_interface_codec.go @@ -0,0 +1,127 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bson + +import ( + "reflect" +) + +// emptyInterfaceCodec is the Codec used for interface{} values. +type emptyInterfaceCodec struct { + // decodeBinaryAsSlice causes DecodeValue to unmarshal BSON binary field values that are the + // "Generic" or "Old" BSON binary subtype as a Go byte slice instead of a Binary. + decodeBinaryAsSlice bool +} + +// Assert that emptyInterfaceCodec satisfies the typeDecoder interface, which allows it +// to be used by collection type decoders (e.g. map, slice, etc) to set individual values in a +// collection. +var _ typeDecoder = &emptyInterfaceCodec{} + +// EncodeValue is the ValueEncoderFunc for interface{}. +func (eic *emptyInterfaceCodec) EncodeValue(ec EncodeContext, vw ValueWriter, val reflect.Value) error { + if !val.IsValid() || val.Type() != tEmpty { + return ValueEncoderError{Name: "EmptyInterfaceEncodeValue", Types: []reflect.Type{tEmpty}, Received: val} + } + + if val.IsNil() { + return vw.WriteNull() + } + encoder, err := ec.LookupEncoder(val.Elem().Type()) + if err != nil { + return err + } + + return encoder.EncodeValue(ec, vw, val.Elem()) +} + +func (eic *emptyInterfaceCodec) getEmptyInterfaceDecodeType(dc DecodeContext, valueType Type) (reflect.Type, error) { + isDocument := valueType == Type(0) || valueType == TypeEmbeddedDocument + if isDocument { + if dc.defaultDocumentType != nil { + // If the bsontype is an embedded document and the DocumentType is set on the DecodeContext, then return + // that type. + return dc.defaultDocumentType, nil + } + } + + rtype, err := dc.LookupTypeMapEntry(valueType) + if err == nil { + return rtype, nil + } + + if isDocument { + // For documents, fallback to looking up a type map entry for Type(0) or TypeEmbeddedDocument, + // depending on the original valueType. + var lookupType Type + switch valueType { + case Type(0): + lookupType = TypeEmbeddedDocument + case TypeEmbeddedDocument: + lookupType = Type(0) + } + + rtype, err = dc.LookupTypeMapEntry(lookupType) + if err == nil { + return rtype, nil + } + // fallback to bson.D + return tD, nil + } + + return nil, err +} + +func (eic *emptyInterfaceCodec) decodeType(dc DecodeContext, vr ValueReader, t reflect.Type) (reflect.Value, error) { + if t != tEmpty { + return emptyValue, ValueDecoderError{Name: "EmptyInterfaceDecodeValue", Types: []reflect.Type{tEmpty}, Received: reflect.Zero(t)} + } + + rtype, err := eic.getEmptyInterfaceDecodeType(dc, vr.Type()) + if err != nil { + switch vr.Type() { + case TypeNull: + return reflect.Zero(t), vr.ReadNull() + default: + return emptyValue, err + } + } + + decoder, err := dc.LookupDecoder(rtype) + if err != nil { + return emptyValue, err + } + + elem, err := decodeTypeOrValueWithInfo(decoder, dc, vr, rtype) + if err != nil { + return emptyValue, err + } + + if (eic.decodeBinaryAsSlice || dc.binaryAsSlice) && rtype == tBinary { + binElem := elem.Interface().(Binary) + if binElem.Subtype == TypeBinaryGeneric || binElem.Subtype == TypeBinaryBinaryOld { + elem = reflect.ValueOf(binElem.Data) + } + } + + return elem, nil +} + +// DecodeValue is the ValueDecoderFunc for interface{}. +func (eic *emptyInterfaceCodec) DecodeValue(dc DecodeContext, vr ValueReader, val reflect.Value) error { + if !val.CanSet() || val.Type() != tEmpty { + return ValueDecoderError{Name: "EmptyInterfaceDecodeValue", Types: []reflect.Type{tEmpty}, Received: val} + } + + elem, err := eic.decodeType(dc, vr, val.Type()) + if err != nil { + return err + } + + val.Set(elem) + return nil +} diff --git a/encoder.go b/encoder.go new file mode 100644 index 0000000..0ad2432 --- /dev/null +++ b/encoder.go @@ -0,0 +1,123 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bson + +import ( + "reflect" + "sync" +) + +// This pool is used to keep the allocations of Encoders down. This is only used for the Marshal* +// methods and is not consumable from outside of this package. The Encoders retrieved from this pool +// must have both Reset and SetRegistry called on them. +var encPool = sync.Pool{ + New: func() interface{} { + return new(Encoder) + }, +} + +// An Encoder writes a serialization format to an output stream. It writes to a ValueWriter +// as the destination of BSON data. +type Encoder struct { + ec EncodeContext + vw ValueWriter +} + +// NewEncoder returns a new encoder that writes to vw. +func NewEncoder(vw ValueWriter) *Encoder { + return &Encoder{ + ec: EncodeContext{Registry: defaultRegistry}, + vw: vw, + } +} + +// Encode writes the BSON encoding of val to the stream. +// +// See [Marshal] for details about BSON marshaling behavior. +func (e *Encoder) Encode(val interface{}) error { + if marshaler, ok := val.(Marshaler); ok { + // TODO(skriptble): Should we have a MarshalAppender interface so that we can have []byte reuse? + buf, err := marshaler.MarshalBSON() + if err != nil { + return err + } + return copyDocumentFromBytes(e.vw, buf) + } + + encoder, err := e.ec.LookupEncoder(reflect.TypeOf(val)) + if err != nil { + return err + } + + return encoder.EncodeValue(e.ec, e.vw, reflect.ValueOf(val)) +} + +// Reset will reset the state of the Encoder, using the same *EncodeContext used in +// the original construction but using vw. +func (e *Encoder) Reset(vw ValueWriter) { + e.vw = vw +} + +// SetRegistry replaces the current registry of the Encoder with r. +func (e *Encoder) SetRegistry(r *Registry) { + e.ec.Registry = r +} + +// ErrorOnInlineDuplicates causes the Encoder to return an error if there is a duplicate field in +// the marshaled BSON when the "inline" struct tag option is set. +func (e *Encoder) ErrorOnInlineDuplicates() { + e.ec.errorOnInlineDuplicates = true +} + +// IntMinSize causes the Encoder to marshal Go integer values (int, int8, int16, int32, int64, uint, +// uint8, uint16, uint32, or uint64) as the minimum BSON int size (either 32 or 64 bits) that can +// represent the integer value. +func (e *Encoder) IntMinSize() { + e.ec.minSize = true +} + +// StringifyMapKeysWithFmt causes the Encoder to convert Go map keys to BSON document field name +// strings using fmt.Sprint instead of the default string conversion logic. +func (e *Encoder) StringifyMapKeysWithFmt() { + e.ec.stringifyMapKeysWithFmt = true +} + +// NilMapAsEmpty causes the Encoder to marshal nil Go maps as empty BSON documents instead of BSON +// null. +func (e *Encoder) NilMapAsEmpty() { + e.ec.nilMapAsEmpty = true +} + +// NilSliceAsEmpty causes the Encoder to marshal nil Go slices as empty BSON arrays instead of BSON +// null. +func (e *Encoder) NilSliceAsEmpty() { + e.ec.nilSliceAsEmpty = true +} + +// NilByteSliceAsEmpty causes the Encoder to marshal nil Go byte slices as empty BSON binary values +// instead of BSON null. +func (e *Encoder) NilByteSliceAsEmpty() { + e.ec.nilByteSliceAsEmpty = true +} + +// TODO(GODRIVER-2820): Update the description to remove the note about only examining exported +// TODO struct fields once the logic is updated to also inspect private struct fields. + +// OmitZeroStruct causes the Encoder to consider the zero value for a struct (e.g. MyStruct{}) +// as empty and omit it from the marshaled BSON when the "omitempty" struct tag option is set. +// +// Note that the Encoder only examines exported struct fields when determining if a struct is the +// zero value. It considers pointers to a zero struct value (e.g. &MyStruct{}) not empty. +func (e *Encoder) OmitZeroStruct() { + e.ec.omitZeroStruct = true +} + +// UseJSONStructTags causes the Encoder to fall back to using the "json" struct tag if a "bson" +// struct tag is not specified. +func (e *Encoder) UseJSONStructTags() { + e.ec.useJSONStructTags = true +} diff --git a/encoder_example_test.go b/encoder_example_test.go new file mode 100644 index 0000000..2f8b8a9 --- /dev/null +++ b/encoder_example_test.go @@ -0,0 +1,240 @@ +// Copyright (C) MongoDB, Inc. 2023-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bson_test + +import ( + "bytes" + "errors" + "fmt" + "io" + + "gitea.psichedelico.com/go/bson" +) + +func ExampleEncoder() { + // Create an Encoder that writes BSON values to a bytes.Buffer. + buf := new(bytes.Buffer) + vw := bson.NewDocumentWriter(buf) + encoder := bson.NewEncoder(vw) + + type Product struct { + Name string `bson:"name"` + SKU string `bson:"sku"` + Price int64 `bson:"price_cents"` + } + + // Use the Encoder to marshal a BSON document that contains the name, SKU, + // and price (in cents) of a product. + product := Product{ + Name: "Cereal Rounds", + SKU: "AB12345", + Price: 399, + } + err := encoder.Encode(product) + if err != nil { + panic(err) + } + + // Print the BSON document as Extended JSON by converting it to bson.Raw. + fmt.Println(bson.Raw(buf.Bytes()).String()) + // Output: {"name": "Cereal Rounds","sku": "AB12345","price_cents": {"$numberLong":"399"}} +} + +type CityState struct { + City string + State string +} + +func (k CityState) String() string { + return fmt.Sprintf("%s, %s", k.City, k.State) +} + +func ExampleEncoder_StringifyMapKeysWithFmt() { + // Create an Encoder that writes BSON values to a bytes.Buffer. + buf := new(bytes.Buffer) + vw := bson.NewDocumentWriter(buf) + encoder := bson.NewEncoder(vw) + + // Configure the Encoder to convert Go map keys to BSON document field names + // using fmt.Sprintf instead of the default string conversion logic. + encoder.StringifyMapKeysWithFmt() + + // Use the Encoder to marshal a BSON document that contains is a map of + // city and state to a list of zip codes in that city. + zipCodes := map[CityState][]int{ + {City: "New York", State: "NY"}: {10001, 10301, 10451}, + } + err := encoder.Encode(zipCodes) + if err != nil { + panic(err) + } + + // Print the BSON document as Extended JSON by converting it to bson.Raw. + fmt.Println(bson.Raw(buf.Bytes()).String()) + // Output: {"New York, NY": [{"$numberInt":"10001"},{"$numberInt":"10301"},{"$numberInt":"10451"}]} +} + +func ExampleEncoder_UseJSONStructTags() { + // Create an Encoder that writes BSON values to a bytes.Buffer. + buf := new(bytes.Buffer) + vw := bson.NewDocumentWriter(buf) + encoder := bson.NewEncoder(vw) + + type Product struct { + Name string `json:"name"` + SKU string `json:"sku"` + Price int64 `json:"price_cents"` + } + + // Configure the Encoder to use "json" struct tags when decoding if "bson" + // struct tags are not present. + encoder.UseJSONStructTags() + + // Use the Encoder to marshal a BSON document that contains the name, SKU, + // and price (in cents) of a product. + product := Product{ + Name: "Cereal Rounds", + SKU: "AB12345", + Price: 399, + } + err := encoder.Encode(product) + if err != nil { + panic(err) + } + + // Print the BSON document as Extended JSON by converting it to bson.Raw. + fmt.Println(bson.Raw(buf.Bytes()).String()) + // Output: {"name": "Cereal Rounds","sku": "AB12345","price_cents": {"$numberLong":"399"}} +} + +func ExampleEncoder_multipleBSONDocuments() { + // Create an Encoder that writes BSON values to a bytes.Buffer. + buf := new(bytes.Buffer) + vw := bson.NewDocumentWriter(buf) + encoder := bson.NewEncoder(vw) + + type Coordinate struct { + X int + Y int + } + + // Use the encoder to marshal 5 Coordinate values as a sequence of BSON + // documents. + for i := 0; i < 5; i++ { + err := encoder.Encode(Coordinate{ + X: i, + Y: i + 1, + }) + if err != nil { + panic(err) + } + } + + // Read each marshaled BSON document from the buffer and print them as + // Extended JSON by converting them to bson.Raw. + for { + doc, err := bson.ReadDocument(buf) + if errors.Is(err, io.EOF) { + return + } + if err != nil { + panic(err) + } + fmt.Println(doc.String()) + } + // Output: + // {"x": {"$numberInt":"0"},"y": {"$numberInt":"1"}} + // {"x": {"$numberInt":"1"},"y": {"$numberInt":"2"}} + // {"x": {"$numberInt":"2"},"y": {"$numberInt":"3"}} + // {"x": {"$numberInt":"3"},"y": {"$numberInt":"4"}} + // {"x": {"$numberInt":"4"},"y": {"$numberInt":"5"}} +} + +func ExampleEncoder_extendedJSON() { + // Create an Encoder that writes canonical Extended JSON values to a + // bytes.Buffer. + buf := new(bytes.Buffer) + vw := bson.NewExtJSONValueWriter(buf, true, false) + encoder := bson.NewEncoder(vw) + + type Product struct { + Name string `bson:"name"` + SKU string `bson:"sku"` + Price int64 `bson:"price_cents"` + } + + // Use the Encoder to marshal a BSON document that contains the name, SKU, + // and price (in cents) of a product. + product := Product{ + Name: "Cereal Rounds", + SKU: "AB12345", + Price: 399, + } + err := encoder.Encode(product) + if err != nil { + panic(err) + } + + fmt.Println(buf.String()) + // Output: {"name":"Cereal Rounds","sku":"AB12345","price_cents":{"$numberLong":"399"}} +} + +func ExampleEncoder_multipleExtendedJSONDocuments() { + // Create an Encoder that writes canonical Extended JSON values to a + // bytes.Buffer. + buf := new(bytes.Buffer) + vw := bson.NewExtJSONValueWriter(buf, true, false) + encoder := bson.NewEncoder(vw) + + type Coordinate struct { + X int + Y int + } + + // Use the encoder to marshal 5 Coordinate values as a sequence of Extended + // JSON documents. + for i := 0; i < 5; i++ { + err := encoder.Encode(Coordinate{ + X: i, + Y: i + 1, + }) + if err != nil { + panic(err) + } + } + + fmt.Println(buf.String()) + // Output: + // {"x":{"$numberInt":"0"},"y":{"$numberInt":"1"}} + // {"x":{"$numberInt":"1"},"y":{"$numberInt":"2"}} + // {"x":{"$numberInt":"2"},"y":{"$numberInt":"3"}} + // {"x":{"$numberInt":"3"},"y":{"$numberInt":"4"}} + // {"x":{"$numberInt":"4"},"y":{"$numberInt":"5"}} +} + +func ExampleEncoder_IntMinSize() { + // Create an encoder that will marshal integers as the minimum BSON int size + // (either 32 or 64 bits) that can represent the integer value. + type foo struct { + Bar uint32 + } + + buf := new(bytes.Buffer) + vw := bson.NewDocumentWriter(buf) + + enc := bson.NewEncoder(vw) + enc.IntMinSize() + + err := enc.Encode(foo{2}) + if err != nil { + panic(err) + } + + fmt.Println(bson.Raw(buf.Bytes()).String()) + // Output: + // {"bar": {"$numberInt":"2"}} +} diff --git a/encoder_test.go b/encoder_test.go new file mode 100644 index 0000000..a398f99 --- /dev/null +++ b/encoder_test.go @@ -0,0 +1,303 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bson + +import ( + "bytes" + "errors" + "reflect" + "testing" + + "gitea.psichedelico.com/go/bson/internal/assert" + "gitea.psichedelico.com/go/bson/internal/require" + "gitea.psichedelico.com/go/bson/x/bsonx/bsoncore" +) + +func TestBasicEncode(t *testing.T) { + for _, tc := range marshalingTestCases { + t.Run(tc.name, func(t *testing.T) { + got := make(sliceWriter, 0, 1024) + vw := NewDocumentWriter(&got) + reg := defaultRegistry + encoder, err := reg.LookupEncoder(reflect.TypeOf(tc.val)) + noerr(t, err) + err = encoder.EncodeValue(EncodeContext{Registry: reg}, vw, reflect.ValueOf(tc.val)) + noerr(t, err) + + if !bytes.Equal(got, tc.want) { + t.Errorf("Bytes are not equal. got %v; want %v", got, tc.want) + t.Errorf("Bytes:\n%v\n%v", got, tc.want) + } + }) + } +} + +func TestEncoderEncode(t *testing.T) { + for _, tc := range marshalingTestCases { + t.Run(tc.name, func(t *testing.T) { + got := make(sliceWriter, 0, 1024) + vw := NewDocumentWriter(&got) + enc := NewEncoder(vw) + err := enc.Encode(tc.val) + noerr(t, err) + + if !bytes.Equal(got, tc.want) { + t.Errorf("Bytes are not equal. got %v; want %v", got, tc.want) + t.Errorf("Bytes:\n%v\n%v", got, tc.want) + } + }) + } + + t.Run("Marshaler", func(t *testing.T) { + testCases := []struct { + name string + buf []byte + err error + wanterr error + vw ValueWriter + }{ + { + "error", + nil, + errors.New("Marshaler error"), + errors.New("Marshaler error"), + &valueReaderWriter{}, + }, + { + "copy error", + []byte{0x05, 0x00, 0x00, 0x00, 0x00}, + nil, + errors.New("copy error"), + &valueReaderWriter{Err: errors.New("copy error"), ErrAfter: writeDocument}, + }, + { + "success", + []byte{0x07, 0x00, 0x00, 0x00, 0x0A, 0x00, 0x00}, + nil, + nil, + nil, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + marshaler := testMarshaler{buf: tc.buf, err: tc.err} + + var vw ValueWriter + b := make(sliceWriter, 0, 100) + compareVW := false + if tc.vw != nil { + vw = tc.vw + } else { + compareVW = true + vw = NewDocumentWriter(&b) + } + enc := NewEncoder(vw) + got := enc.Encode(marshaler) + want := tc.wanterr + if !assert.CompareErrors(got, want) { + t.Errorf("Did not receive expected error. got %v; want %v", got, want) + } + if compareVW { + buf := b + if !bytes.Equal(buf, tc.buf) { + t.Errorf("Copied bytes do not match. got %v; want %v", buf, tc.buf) + } + } + }) + } + }) +} + +type testMarshaler struct { + buf []byte + err error +} + +func (tm testMarshaler) MarshalBSON() ([]byte, error) { return tm.buf, tm.err } + +func docToBytes(d interface{}) []byte { + b, err := Marshal(d) + if err != nil { + panic(err) + } + return b +} + +type stringerTest struct{} + +func (stringerTest) String() string { + return "test key" +} + +func TestEncoderConfiguration(t *testing.T) { + type inlineDuplicateInner struct { + Duplicate string + } + + type inlineDuplicateOuter struct { + Inline inlineDuplicateInner `bson:",inline"` + Duplicate string + } + + type zeroStruct struct { + MyString string + } + + testCases := []struct { + description string + configure func(*Encoder) + input interface{} + want []byte + wantErr error + }{ + // Test that ErrorOnInlineDuplicates causes the Encoder to return an error if there are any + // duplicate fields in the marshaled document caused by using the "inline" struct tag. + { + description: "ErrorOnInlineDuplicates", + configure: func(enc *Encoder) { + enc.ErrorOnInlineDuplicates() + }, + input: inlineDuplicateOuter{ + Inline: inlineDuplicateInner{Duplicate: "inner"}, + Duplicate: "outer", + }, + wantErr: errors.New("struct bson.inlineDuplicateOuter has duplicated key duplicate"), + }, + // Test that IntMinSize encodes Go int and int64 values as BSON int32 if the value is small + // enough. + { + description: "IntMinSize", + configure: func(enc *Encoder) { + enc.IntMinSize() + }, + input: D{ + {Key: "myInt", Value: int(1)}, + {Key: "myInt64", Value: int64(1)}, + {Key: "myUint", Value: uint(1)}, + {Key: "myUint32", Value: uint32(1)}, + {Key: "myUint64", Value: uint64(1)}, + }, + want: bsoncore.NewDocumentBuilder(). + AppendInt32("myInt", 1). + AppendInt32("myInt64", 1). + AppendInt32("myUint", 1). + AppendInt32("myUint32", 1). + AppendInt32("myUint64", 1). + Build(), + }, + // Test that StringifyMapKeysWithFmt uses fmt.Sprint to convert map keys to BSON field names. + { + description: "StringifyMapKeysWithFmt", + configure: func(enc *Encoder) { + enc.StringifyMapKeysWithFmt() + }, + input: map[stringerTest]string{ + {}: "test value", + }, + want: bsoncore.NewDocumentBuilder(). + AppendString("test key", "test value"). + Build(), + }, + // Test that NilMapAsEmpty encodes nil Go maps as empty BSON documents. + { + description: "NilMapAsEmpty", + configure: func(enc *Encoder) { + enc.NilMapAsEmpty() + }, + input: D{{Key: "myMap", Value: map[string]string(nil)}}, + want: bsoncore.NewDocumentBuilder(). + AppendDocument("myMap", bsoncore.NewDocumentBuilder().Build()). + Build(), + }, + // Test that NilSliceAsEmpty encodes nil Go slices as empty BSON arrays. + { + description: "NilSliceAsEmpty", + configure: func(enc *Encoder) { + enc.NilSliceAsEmpty() + }, + input: D{{Key: "mySlice", Value: []string(nil)}}, + want: bsoncore.NewDocumentBuilder(). + AppendArray("mySlice", bsoncore.NewArrayBuilder().Build()). + Build(), + }, + // Test that NilByteSliceAsEmpty encodes nil Go byte slices as empty BSON binary elements. + { + description: "NilByteSliceAsEmpty", + configure: func(enc *Encoder) { + enc.NilByteSliceAsEmpty() + }, + input: D{{Key: "myBytes", Value: []byte(nil)}}, + want: bsoncore.NewDocumentBuilder(). + AppendBinary("myBytes", TypeBinaryGeneric, []byte{}). + Build(), + }, + // Test that OmitZeroStruct omits empty structs from the marshaled document if the + // "omitempty" struct tag is used. + { + description: "OmitZeroStruct", + configure: func(enc *Encoder) { + enc.OmitZeroStruct() + }, + input: struct { + Zero zeroStruct `bson:",omitempty"` + }{}, + want: bsoncore.NewDocumentBuilder().Build(), + }, + // Test that UseJSONStructTags causes the Encoder to fall back to "json" struct tags if + // "bson" struct tags are not available. + { + description: "UseJSONStructTags", + configure: func(enc *Encoder) { + enc.UseJSONStructTags() + }, + input: struct { + StructFieldName string `json:"jsonFieldName"` + }{ + StructFieldName: "test value", + }, + want: bsoncore.NewDocumentBuilder(). + AppendString("jsonFieldName", "test value"). + Build(), + }, + } + + for _, tc := range testCases { + tc := tc // Capture range variable. + + t.Run(tc.description, func(t *testing.T) { + t.Parallel() + + got := new(bytes.Buffer) + vw := NewDocumentWriter(got) + enc := NewEncoder(vw) + + tc.configure(enc) + + err := enc.Encode(tc.input) + if tc.wantErr != nil { + assert.Equal(t, tc.wantErr, err, "expected and actual errors do not match") + return + } + require.NoError(t, err, "Encode error") + + assert.Equal(t, tc.want, got.Bytes(), "expected and actual encoded BSON do not match") + + // After we compare the raw bytes, also decode the expected and actual BSON as a bson.D + // and compare them. The goal is to make assertion failures easier to debug because + // binary diffs are very difficult to understand. + var wantDoc D + err = Unmarshal(tc.want, &wantDoc) + require.NoError(t, err, "Unmarshal error") + var gotDoc D + err = Unmarshal(got.Bytes(), &gotDoc) + require.NoError(t, err, "Unmarshal error") + + assert.Equal(t, wantDoc, gotDoc, "expected and actual decoded documents do not match") + }) + } +} diff --git a/example_test.go b/example_test.go new file mode 100644 index 0000000..c7b420a --- /dev/null +++ b/example_test.go @@ -0,0 +1,143 @@ +// Copyright (C) MongoDB, Inc. 2023-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bson_test + +import ( + "fmt" + "time" + + "gitea.psichedelico.com/go/bson" +) + +// This example uses Raw to skip parsing a nested document in a BSON message. +func ExampleRaw_unmarshal() { + b, err := bson.Marshal(bson.M{ + "Word": "beach", + "Synonyms": bson.A{"coast", "shore", "waterfront"}, + }) + if err != nil { + panic(err) + } + + var res struct { + Word string + Synonyms bson.Raw // Don't parse the whole list, we just want to count the elements. + } + + err = bson.Unmarshal(b, &res) + if err != nil { + panic(err) + } + elems, err := res.Synonyms.Elements() + if err != nil { + panic(err) + } + fmt.Printf("%s, synonyms count: %d\n", res.Word, len(elems)) + + // Output: beach, synonyms count: 3 +} + +// This example uses Raw to add a precomputed BSON document during marshal. +func ExampleRaw_marshal() { + precomputed, err := bson.Marshal(bson.M{"Precomputed": true}) + if err != nil { + panic(err) + } + + msg := struct { + Message string + Metadata bson.Raw + }{ + Message: "Hello World!", + Metadata: precomputed, + } + + b, err := bson.Marshal(msg) + if err != nil { + panic(err) + } + // Print the Extended JSON by converting BSON to bson.Raw. + fmt.Println(bson.Raw(b).String()) + + // Output: {"message": "Hello World!","metadata": {"Precomputed": true}} +} + +// This example uses RawValue to delay parsing a value in a BSON message. +func ExampleRawValue_unmarshal() { + b1, err := bson.Marshal(bson.M{ + "Format": "UNIX", + "Timestamp": 1675282389, + }) + if err != nil { + panic(err) + } + + b2, err := bson.Marshal(bson.M{ + "Format": "RFC3339", + "Timestamp": time.Unix(1675282389, 0).Format(time.RFC3339), + }) + if err != nil { + panic(err) + } + + for _, b := range [][]byte{b1, b2} { + var res struct { + Format string + Timestamp bson.RawValue // Delay parsing until we know the timestamp format. + } + + err = bson.Unmarshal(b, &res) + if err != nil { + panic(err) + } + + var t time.Time + switch res.Format { + case "UNIX": + t = time.Unix(res.Timestamp.AsInt64(), 0) + case "RFC3339": + t, err = time.Parse(time.RFC3339, res.Timestamp.StringValue()) + if err != nil { + panic(err) + } + } + fmt.Println(res.Format, t.Unix()) + } + + // Output: + // UNIX 1675282389 + // RFC3339 1675282389 +} + +// This example uses RawValue to add a precomputed BSON string value during marshal. +func ExampleRawValue_marshal() { + t, val, err := bson.MarshalValue("Precomputed message!") + if err != nil { + panic(err) + } + precomputed := bson.RawValue{ + Type: t, + Value: val, + } + + msg := struct { + Message bson.RawValue + Time time.Time + }{ + Message: precomputed, + Time: time.Unix(1675282389, 0), + } + + b, err := bson.Marshal(msg) + if err != nil { + panic(err) + } + // Print the Extended JSON by converting BSON to bson.Raw. + fmt.Println(bson.Raw(b).String()) + + // Output: {"message": "Precomputed message!","time": {"$date":{"$numberLong":"1675282389000"}}} +} diff --git a/extjson_parser.go b/extjson_parser.go new file mode 100644 index 0000000..a63f23a --- /dev/null +++ b/extjson_parser.go @@ -0,0 +1,804 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bson + +import ( + "encoding/base64" + "encoding/hex" + "errors" + "fmt" + "io" + "strings" +) + +const maxNestingDepth = 200 + +// ErrInvalidJSON indicates the JSON input is invalid +var ErrInvalidJSON = errors.New("invalid JSON input") + +type jsonParseState byte + +const ( + jpsStartState jsonParseState = iota + jpsSawBeginObject + jpsSawEndObject + jpsSawBeginArray + jpsSawEndArray + jpsSawColon + jpsSawComma + jpsSawKey + jpsSawValue + jpsDoneState + jpsInvalidState +) + +type jsonParseMode byte + +const ( + jpmInvalidMode jsonParseMode = iota + jpmObjectMode + jpmArrayMode +) + +type extJSONValue struct { + t Type + v interface{} +} + +type extJSONObject struct { + keys []string + values []*extJSONValue +} + +type extJSONParser struct { + js *jsonScanner + s jsonParseState + m []jsonParseMode + k string + v *extJSONValue + + err error + canonicalOnly bool + depth int + maxDepth int + + emptyObject bool + relaxedUUID bool +} + +// newExtJSONParser returns a new extended JSON parser, ready to to begin +// parsing from the first character of the argued json input. It will not +// perform any read-ahead and will therefore not report any errors about +// malformed JSON at this point. +func newExtJSONParser(r io.Reader, canonicalOnly bool) *extJSONParser { + return &extJSONParser{ + js: &jsonScanner{r: r}, + s: jpsStartState, + m: []jsonParseMode{}, + canonicalOnly: canonicalOnly, + maxDepth: maxNestingDepth, + } +} + +// peekType examines the next value and returns its BSON Type +func (ejp *extJSONParser) peekType() (Type, error) { + var t Type + var err error + initialState := ejp.s + + ejp.advanceState() + switch ejp.s { + case jpsSawValue: + t = ejp.v.t + case jpsSawBeginArray: + t = TypeArray + case jpsInvalidState: + err = ejp.err + case jpsSawComma: + // in array mode, seeing a comma means we need to progress again to actually observe a type + if ejp.peekMode() == jpmArrayMode { + return ejp.peekType() + } + case jpsSawEndArray: + // this would only be a valid state if we were in array mode, so return end-of-array error + err = ErrEOA + case jpsSawBeginObject: + // peek key to determine type + ejp.advanceState() + switch ejp.s { + case jpsSawEndObject: // empty embedded document + t = TypeEmbeddedDocument + ejp.emptyObject = true + case jpsInvalidState: + err = ejp.err + case jpsSawKey: + if initialState == jpsStartState { + return TypeEmbeddedDocument, nil + } + t = wrapperKeyBSONType(ejp.k) + + // if $uuid is encountered, parse as binary subtype 4 + if ejp.k == "$uuid" { + ejp.relaxedUUID = true + t = TypeBinary + } + + switch t { + case TypeJavaScript: + // just saw $code, need to check for $scope at same level + _, err = ejp.readValue(TypeJavaScript) + if err != nil { + break + } + + switch ejp.s { + case jpsSawEndObject: // type is TypeJavaScript + case jpsSawComma: + ejp.advanceState() + + if ejp.s == jpsSawKey && ejp.k == "$scope" { + t = TypeCodeWithScope + } else { + err = fmt.Errorf("invalid extended JSON: unexpected key %s in CodeWithScope object", ejp.k) + } + case jpsInvalidState: + err = ejp.err + default: + err = ErrInvalidJSON + } + case TypeCodeWithScope: + err = errors.New("invalid extended JSON: code with $scope must contain $code before $scope") + } + } + } + + return t, err +} + +// readKey parses the next key and its type and returns them +func (ejp *extJSONParser) readKey() (string, Type, error) { + if ejp.emptyObject { + ejp.emptyObject = false + return "", 0, ErrEOD + } + + // advance to key (or return with error) + switch ejp.s { + case jpsStartState: + ejp.advanceState() + if ejp.s == jpsSawBeginObject { + ejp.advanceState() + } + case jpsSawBeginObject: + ejp.advanceState() + case jpsSawValue, jpsSawEndObject, jpsSawEndArray: + ejp.advanceState() + switch ejp.s { + case jpsSawBeginObject, jpsSawComma: + ejp.advanceState() + case jpsSawEndObject: + return "", 0, ErrEOD + case jpsDoneState: + return "", 0, io.EOF + case jpsInvalidState: + return "", 0, ejp.err + default: + return "", 0, ErrInvalidJSON + } + case jpsSawKey: // do nothing (key was peeked before) + default: + return "", 0, invalidRequestError("key") + } + + // read key + var key string + + switch ejp.s { + case jpsSawKey: + key = ejp.k + case jpsSawEndObject: + return "", 0, ErrEOD + case jpsInvalidState: + return "", 0, ejp.err + default: + return "", 0, invalidRequestError("key") + } + + // check for colon + ejp.advanceState() + if err := ensureColon(ejp.s, key); err != nil { + return "", 0, err + } + + // peek at the value to determine type + t, err := ejp.peekType() + if err != nil { + return "", 0, err + } + + return key, t, nil +} + +// readValue returns the value corresponding to the Type returned by peekType +func (ejp *extJSONParser) readValue(t Type) (*extJSONValue, error) { + if ejp.s == jpsInvalidState { + return nil, ejp.err + } + + var v *extJSONValue + + switch t { + case TypeNull, TypeBoolean, TypeString: + if ejp.s != jpsSawValue { + return nil, invalidRequestError(t.String()) + } + v = ejp.v + case TypeInt32, TypeInt64, TypeDouble: + // relaxed version allows these to be literal number values + if ejp.s == jpsSawValue { + v = ejp.v + break + } + fallthrough + case TypeDecimal128, TypeSymbol, TypeObjectID, TypeMinKey, TypeMaxKey, TypeUndefined: + switch ejp.s { + case jpsSawKey: + // read colon + ejp.advanceState() + if err := ensureColon(ejp.s, ejp.k); err != nil { + return nil, err + } + + // read value + ejp.advanceState() + if ejp.s != jpsSawValue || !ejp.ensureExtValueType(t) { + return nil, invalidJSONErrorForType("value", t) + } + + v = ejp.v + + // read end object + ejp.advanceState() + if ejp.s != jpsSawEndObject { + return nil, invalidJSONErrorForType("} after value", t) + } + default: + return nil, invalidRequestError(t.String()) + } + case TypeBinary, TypeRegex, TypeTimestamp, TypeDBPointer: + if ejp.s != jpsSawKey { + return nil, invalidRequestError(t.String()) + } + // read colon + ejp.advanceState() + if err := ensureColon(ejp.s, ejp.k); err != nil { + return nil, err + } + + ejp.advanceState() + if t == TypeBinary && ejp.s == jpsSawValue { + // convert relaxed $uuid format + if ejp.relaxedUUID { + defer func() { ejp.relaxedUUID = false }() + uuid, err := ejp.v.parseSymbol() + if err != nil { + return nil, err + } + + // RFC 4122 defines the length of a UUID as 36 and the hyphens in a UUID as appearing + // in the 8th, 13th, 18th, and 23rd characters. + // + // See https://tools.ietf.org/html/rfc4122#section-3 + valid := len(uuid) == 36 && + string(uuid[8]) == "-" && + string(uuid[13]) == "-" && + string(uuid[18]) == "-" && + string(uuid[23]) == "-" + if !valid { + return nil, fmt.Errorf("$uuid value does not follow RFC 4122 format regarding length and hyphens") + } + + // remove hyphens + uuidNoHyphens := strings.ReplaceAll(uuid, "-", "") + if len(uuidNoHyphens) != 32 { + return nil, fmt.Errorf("$uuid value does not follow RFC 4122 format regarding length and hyphens") + } + + // convert hex to bytes + bytes, err := hex.DecodeString(uuidNoHyphens) + if err != nil { + return nil, fmt.Errorf("$uuid value does not follow RFC 4122 format regarding hex bytes: %w", err) + } + + ejp.advanceState() + if ejp.s != jpsSawEndObject { + return nil, invalidJSONErrorForType("$uuid and value and then }", TypeBinary) + } + + base64 := &extJSONValue{ + t: TypeString, + v: base64.StdEncoding.EncodeToString(bytes), + } + subType := &extJSONValue{ + t: TypeString, + v: "04", + } + + v = &extJSONValue{ + t: TypeEmbeddedDocument, + v: &extJSONObject{ + keys: []string{"base64", "subType"}, + values: []*extJSONValue{base64, subType}, + }, + } + + break + } + + // convert legacy $binary format + base64 := ejp.v + + ejp.advanceState() + if ejp.s != jpsSawComma { + return nil, invalidJSONErrorForType(",", TypeBinary) + } + + ejp.advanceState() + key, t, err := ejp.readKey() + if err != nil { + return nil, err + } + if key != "$type" { + return nil, invalidJSONErrorForType("$type", TypeBinary) + } + + subType, err := ejp.readValue(t) + if err != nil { + return nil, err + } + + ejp.advanceState() + if ejp.s != jpsSawEndObject { + return nil, invalidJSONErrorForType("2 key-value pairs and then }", TypeBinary) + } + + v = &extJSONValue{ + t: TypeEmbeddedDocument, + v: &extJSONObject{ + keys: []string{"base64", "subType"}, + values: []*extJSONValue{base64, subType}, + }, + } + break + } + + // read KV pairs + if ejp.s != jpsSawBeginObject { + return nil, invalidJSONErrorForType("{", t) + } + + keys, vals, err := ejp.readObject(2, true) + if err != nil { + return nil, err + } + + ejp.advanceState() + if ejp.s != jpsSawEndObject { + return nil, invalidJSONErrorForType("2 key-value pairs and then }", t) + } + + v = &extJSONValue{t: TypeEmbeddedDocument, v: &extJSONObject{keys: keys, values: vals}} + + case TypeDateTime: + switch ejp.s { + case jpsSawValue: + v = ejp.v + case jpsSawKey: + // read colon + ejp.advanceState() + if err := ensureColon(ejp.s, ejp.k); err != nil { + return nil, err + } + + ejp.advanceState() + switch ejp.s { + case jpsSawBeginObject: + keys, vals, err := ejp.readObject(1, true) + if err != nil { + return nil, err + } + v = &extJSONValue{t: TypeEmbeddedDocument, v: &extJSONObject{keys: keys, values: vals}} + case jpsSawValue: + if ejp.canonicalOnly { + return nil, invalidJSONError("{") + } + v = ejp.v + default: + if ejp.canonicalOnly { + return nil, invalidJSONErrorForType("object", t) + } + return nil, invalidJSONErrorForType("ISO-8601 Internet Date/Time Format as described in RFC-3339", t) + } + + ejp.advanceState() + if ejp.s != jpsSawEndObject { + return nil, invalidJSONErrorForType("value and then }", t) + } + default: + return nil, invalidRequestError(t.String()) + } + case TypeJavaScript: + switch ejp.s { + case jpsSawKey: + // read colon + ejp.advanceState() + if err := ensureColon(ejp.s, ejp.k); err != nil { + return nil, err + } + + // read value + ejp.advanceState() + if ejp.s != jpsSawValue { + return nil, invalidJSONErrorForType("value", t) + } + v = ejp.v + + // read end object or comma and just return + ejp.advanceState() + case jpsSawEndObject: + v = ejp.v + default: + return nil, invalidRequestError(t.String()) + } + case TypeCodeWithScope: + if ejp.s == jpsSawKey && ejp.k == "$scope" { + v = ejp.v // this is the $code string from earlier + + // read colon + ejp.advanceState() + if err := ensureColon(ejp.s, ejp.k); err != nil { + return nil, err + } + + // read { + ejp.advanceState() + if ejp.s != jpsSawBeginObject { + return nil, invalidJSONError("$scope to be embedded document") + } + } else { + return nil, invalidRequestError(t.String()) + } + case TypeEmbeddedDocument, TypeArray: + return nil, invalidRequestError(t.String()) + } + + return v, nil +} + +// readObject is a utility method for reading full objects of known (or expected) size +// it is useful for extended JSON types such as binary, datetime, regex, and timestamp +func (ejp *extJSONParser) readObject(numKeys int, started bool) ([]string, []*extJSONValue, error) { + keys := make([]string, numKeys) + vals := make([]*extJSONValue, numKeys) + + if !started { + ejp.advanceState() + if ejp.s != jpsSawBeginObject { + return nil, nil, invalidJSONError("{") + } + } + + for i := 0; i < numKeys; i++ { + key, t, err := ejp.readKey() + if err != nil { + return nil, nil, err + } + + switch ejp.s { + case jpsSawKey: + v, err := ejp.readValue(t) + if err != nil { + return nil, nil, err + } + + keys[i] = key + vals[i] = v + case jpsSawValue: + keys[i] = key + vals[i] = ejp.v + default: + return nil, nil, invalidJSONError("value") + } + } + + ejp.advanceState() + if ejp.s != jpsSawEndObject { + return nil, nil, invalidJSONError("}") + } + + return keys, vals, nil +} + +// advanceState reads the next JSON token from the scanner and transitions +// from the current state based on that token's type +func (ejp *extJSONParser) advanceState() { + if ejp.s == jpsDoneState || ejp.s == jpsInvalidState { + return + } + + jt, err := ejp.js.nextToken() + + if err != nil { + ejp.err = err + ejp.s = jpsInvalidState + return + } + + valid := ejp.validateToken(jt.t) + if !valid { + ejp.err = unexpectedTokenError(jt) + ejp.s = jpsInvalidState + return + } + + switch jt.t { + case jttBeginObject: + ejp.s = jpsSawBeginObject + ejp.pushMode(jpmObjectMode) + ejp.depth++ + + if ejp.depth > ejp.maxDepth { + ejp.err = nestingDepthError(jt.p, ejp.depth) + ejp.s = jpsInvalidState + } + case jttEndObject: + ejp.s = jpsSawEndObject + ejp.depth-- + + if ejp.popMode() != jpmObjectMode { + ejp.err = unexpectedTokenError(jt) + ejp.s = jpsInvalidState + } + case jttBeginArray: + ejp.s = jpsSawBeginArray + ejp.pushMode(jpmArrayMode) + case jttEndArray: + ejp.s = jpsSawEndArray + + if ejp.popMode() != jpmArrayMode { + ejp.err = unexpectedTokenError(jt) + ejp.s = jpsInvalidState + } + case jttColon: + ejp.s = jpsSawColon + case jttComma: + ejp.s = jpsSawComma + case jttEOF: + ejp.s = jpsDoneState + if len(ejp.m) != 0 { + ejp.err = unexpectedTokenError(jt) + ejp.s = jpsInvalidState + } + case jttString: + switch ejp.s { + case jpsSawComma: + if ejp.peekMode() == jpmArrayMode { + ejp.s = jpsSawValue + ejp.v = extendJSONToken(jt) + return + } + fallthrough + case jpsSawBeginObject: + ejp.s = jpsSawKey + ejp.k = jt.v.(string) + return + } + fallthrough + default: + ejp.s = jpsSawValue + ejp.v = extendJSONToken(jt) + } +} + +var jpsValidTransitionTokens = map[jsonParseState]map[jsonTokenType]bool{ + jpsStartState: { + jttBeginObject: true, + jttBeginArray: true, + jttInt32: true, + jttInt64: true, + jttDouble: true, + jttString: true, + jttBool: true, + jttNull: true, + jttEOF: true, + }, + jpsSawBeginObject: { + jttEndObject: true, + jttString: true, + }, + jpsSawEndObject: { + jttEndObject: true, + jttEndArray: true, + jttComma: true, + jttEOF: true, + }, + jpsSawBeginArray: { + jttBeginObject: true, + jttBeginArray: true, + jttEndArray: true, + jttInt32: true, + jttInt64: true, + jttDouble: true, + jttString: true, + jttBool: true, + jttNull: true, + }, + jpsSawEndArray: { + jttEndObject: true, + jttEndArray: true, + jttComma: true, + jttEOF: true, + }, + jpsSawColon: { + jttBeginObject: true, + jttBeginArray: true, + jttInt32: true, + jttInt64: true, + jttDouble: true, + jttString: true, + jttBool: true, + jttNull: true, + }, + jpsSawComma: { + jttBeginObject: true, + jttBeginArray: true, + jttInt32: true, + jttInt64: true, + jttDouble: true, + jttString: true, + jttBool: true, + jttNull: true, + }, + jpsSawKey: { + jttColon: true, + }, + jpsSawValue: { + jttEndObject: true, + jttEndArray: true, + jttComma: true, + jttEOF: true, + }, + jpsDoneState: {}, + jpsInvalidState: {}, +} + +func (ejp *extJSONParser) validateToken(jtt jsonTokenType) bool { + switch ejp.s { + case jpsSawEndObject: + // if we are at depth zero and the next token is a '{', + // we can consider it valid only if we are not in array mode. + if jtt == jttBeginObject && ejp.depth == 0 { + return ejp.peekMode() != jpmArrayMode + } + case jpsSawComma: + switch ejp.peekMode() { + // the only valid next token after a comma inside a document is a string (a key) + case jpmObjectMode: + return jtt == jttString + case jpmInvalidMode: + return false + } + } + + _, ok := jpsValidTransitionTokens[ejp.s][jtt] + return ok +} + +// ensureExtValueType returns true if the current value has the expected +// value type for single-key extended JSON types. For example, +// {"$numberInt": v} v must be TypeString +func (ejp *extJSONParser) ensureExtValueType(t Type) bool { + switch t { + case TypeMinKey, TypeMaxKey: + return ejp.v.t == TypeInt32 + case TypeUndefined: + return ejp.v.t == TypeBoolean + case TypeInt32, TypeInt64, TypeDouble, TypeDecimal128, TypeSymbol, TypeObjectID: + return ejp.v.t == TypeString + default: + return false + } +} + +func (ejp *extJSONParser) pushMode(m jsonParseMode) { + ejp.m = append(ejp.m, m) +} + +func (ejp *extJSONParser) popMode() jsonParseMode { + l := len(ejp.m) + if l == 0 { + return jpmInvalidMode + } + + m := ejp.m[l-1] + ejp.m = ejp.m[:l-1] + + return m +} + +func (ejp *extJSONParser) peekMode() jsonParseMode { + l := len(ejp.m) + if l == 0 { + return jpmInvalidMode + } + + return ejp.m[l-1] +} + +func extendJSONToken(jt *jsonToken) *extJSONValue { + var t Type + + switch jt.t { + case jttInt32: + t = TypeInt32 + case jttInt64: + t = TypeInt64 + case jttDouble: + t = TypeDouble + case jttString: + t = TypeString + case jttBool: + t = TypeBoolean + case jttNull: + t = TypeNull + default: + return nil + } + + return &extJSONValue{t: t, v: jt.v} +} + +func ensureColon(s jsonParseState, key string) error { + if s != jpsSawColon { + return fmt.Errorf("invalid JSON input: missing colon after key \"%s\"", key) + } + + return nil +} + +func invalidRequestError(s string) error { + return fmt.Errorf("invalid request to read %s", s) +} + +func invalidJSONError(expected string) error { + return fmt.Errorf("invalid JSON input; expected %s", expected) +} + +func invalidJSONErrorForType(expected string, t Type) error { + return fmt.Errorf("invalid JSON input; expected %s for %s", expected, t) +} + +func unexpectedTokenError(jt *jsonToken) error { + switch jt.t { + case jttInt32, jttInt64, jttDouble: + return fmt.Errorf("invalid JSON input; unexpected number (%v) at position %d", jt.v, jt.p) + case jttString: + return fmt.Errorf("invalid JSON input; unexpected string (\"%v\") at position %d", jt.v, jt.p) + case jttBool: + return fmt.Errorf("invalid JSON input; unexpected boolean literal (%v) at position %d", jt.v, jt.p) + case jttNull: + return fmt.Errorf("invalid JSON input; unexpected null literal at position %d", jt.p) + case jttEOF: + return fmt.Errorf("invalid JSON input; unexpected end of input at position %d", jt.p) + default: + return fmt.Errorf("invalid JSON input; unexpected %c at position %d", jt.v.(byte), jt.p) + } +} + +func nestingDepthError(p, depth int) error { + return fmt.Errorf("invalid JSON input; nesting too deep (%d levels) at position %d", depth, p) +} diff --git a/extjson_parser_test.go b/extjson_parser_test.go new file mode 100644 index 0000000..ae6bbf7 --- /dev/null +++ b/extjson_parser_test.go @@ -0,0 +1,804 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bson + +import ( + "errors" + "io" + "strings" + "testing" + + "github.com/google/go-cmp/cmp" +) + +var ( + keyDiff = specificDiff("key") + typDiff = specificDiff("type") + valDiff = specificDiff("value") + + expectErrEOF = expectSpecificError(io.EOF) + expectErrEOD = expectSpecificError(ErrEOD) + expectErrEOA = expectSpecificError(ErrEOA) +) + +type expectedErrorFunc func(t *testing.T, err error, desc string) + +type peekTypeTestCase struct { + desc string + input string + typs []Type + errFs []expectedErrorFunc +} + +type readKeyValueTestCase struct { + desc string + input string + keys []string + typs []Type + vals []*extJSONValue + + keyEFs []expectedErrorFunc + valEFs []expectedErrorFunc +} + +func expectNoError(t *testing.T, err error, desc string) { + if err != nil { + t.Helper() + t.Errorf("%s: Unepexted error: %v", desc, err) + t.FailNow() + } +} + +func expectError(t *testing.T, err error, desc string) { + if err == nil { + t.Helper() + t.Errorf("%s: Expected error", desc) + t.FailNow() + } +} + +func expectSpecificError(expected error) expectedErrorFunc { + return func(t *testing.T, err error, desc string) { + if !errors.Is(err, expected) { + t.Helper() + t.Errorf("%s: Expected %v but got: %v", desc, expected, err) + t.FailNow() + } + } +} + +func specificDiff(name string) func(t *testing.T, expected, actual interface{}, desc string) { + return func(t *testing.T, expected, actual interface{}, desc string) { + if diff := cmp.Diff(expected, actual); diff != "" { + t.Helper() + t.Errorf("%s: Incorrect JSON %s (-want, +got): %s\n", desc, name, diff) + t.FailNow() + } + } +} + +func expectErrorNOOP(_ *testing.T, _ error, _ string) { +} + +func readKeyDiff(t *testing.T, eKey, aKey string, eTyp, aTyp Type, err error, errF expectedErrorFunc, desc string) { + keyDiff(t, eKey, aKey, desc) + typDiff(t, eTyp, aTyp, desc) + errF(t, err, desc) +} + +func readValueDiff(t *testing.T, eVal, aVal *extJSONValue, err error, errF expectedErrorFunc, desc string) { + if aVal != nil { + typDiff(t, eVal.t, aVal.t, desc) + valDiff(t, eVal.v, aVal.v, desc) + } else { + valDiff(t, eVal, aVal, desc) + } + + errF(t, err, desc) +} + +func TestExtJSONParserPeekType(t *testing.T) { + makeValidPeekTypeTestCase := func(input string, typ Type, desc string) peekTypeTestCase { + return peekTypeTestCase{ + desc: desc, input: input, + typs: []Type{typ}, + errFs: []expectedErrorFunc{expectNoError}, + } + } + makeInvalidTestCase := func(desc, input string, lastEF expectedErrorFunc) peekTypeTestCase { + return peekTypeTestCase{ + desc: desc, input: input, + typs: []Type{Type(0)}, + errFs: []expectedErrorFunc{lastEF}, + } + } + + makeInvalidPeekTypeTestCase := func(desc, input string, lastEF expectedErrorFunc) peekTypeTestCase { + return peekTypeTestCase{ + desc: desc, input: input, + typs: []Type{TypeArray, TypeString, Type(0)}, + errFs: []expectedErrorFunc{expectNoError, expectNoError, lastEF}, + } + } + + cases := []peekTypeTestCase{ + makeValidPeekTypeTestCase(`null`, TypeNull, "Null"), + makeValidPeekTypeTestCase(`"string"`, TypeString, "String"), + makeValidPeekTypeTestCase(`true`, TypeBoolean, "Boolean--true"), + makeValidPeekTypeTestCase(`false`, TypeBoolean, "Boolean--false"), + makeValidPeekTypeTestCase(`{"$minKey": 1}`, TypeMinKey, "MinKey"), + makeValidPeekTypeTestCase(`{"$maxKey": 1}`, TypeMaxKey, "MaxKey"), + makeValidPeekTypeTestCase(`{"$numberInt": "42"}`, TypeInt32, "Int32"), + makeValidPeekTypeTestCase(`{"$numberLong": "42"}`, TypeInt64, "Int64"), + makeValidPeekTypeTestCase(`{"$symbol": "symbol"}`, TypeSymbol, "Symbol"), + makeValidPeekTypeTestCase(`{"$numberDouble": "42.42"}`, TypeDouble, "Double"), + makeValidPeekTypeTestCase(`{"$undefined": true}`, TypeUndefined, "Undefined"), + makeValidPeekTypeTestCase(`{"$numberDouble": "NaN"}`, TypeDouble, "Double--NaN"), + makeValidPeekTypeTestCase(`{"$numberDecimal": "1234"}`, TypeDecimal128, "Decimal"), + makeValidPeekTypeTestCase(`{"foo": "bar"}`, TypeEmbeddedDocument, "Toplevel document"), + makeValidPeekTypeTestCase(`{"$date": {"$numberLong": "0"}}`, TypeDateTime, "Datetime"), + makeValidPeekTypeTestCase(`{"$code": "function() {}"}`, TypeJavaScript, "Code no scope"), + makeValidPeekTypeTestCase(`[{"$numberInt": "1"},{"$numberInt": "2"}]`, TypeArray, "Array"), + makeValidPeekTypeTestCase(`{"$timestamp": {"t": 42, "i": 1}}`, TypeTimestamp, "Timestamp"), + makeValidPeekTypeTestCase(`{"$oid": "57e193d7a9cc81b4027498b5"}`, TypeObjectID, "Object ID"), + makeValidPeekTypeTestCase(`{"$binary": {"base64": "AQIDBAU=", "subType": "80"}}`, TypeBinary, "Binary"), + makeValidPeekTypeTestCase(`{"$code": "function() {}", "$scope": {}}`, TypeCodeWithScope, "Code With Scope"), + makeValidPeekTypeTestCase(`{"$binary": {"base64": "o0w498Or7cijeBSpkquNtg==", "subType": "03"}}`, TypeBinary, "Binary"), + makeValidPeekTypeTestCase(`{"$binary": "o0w498Or7cijeBSpkquNtg==", "$type": "03"}`, TypeBinary, "Binary"), + makeValidPeekTypeTestCase(`{"$regularExpression": {"pattern": "foo*", "options": "ix"}}`, TypeRegex, "Regular expression"), + makeValidPeekTypeTestCase(`{"$dbPointer": {"$ref": "db.collection", "$id": {"$oid": "57e193d7a9cc81b4027498b1"}}}`, TypeDBPointer, "DBPointer"), + makeValidPeekTypeTestCase(`{"$ref": "collection", "$id": {"$oid": "57fd71e96e32ab4225b723fb"}, "$db": "database"}`, TypeEmbeddedDocument, "DBRef"), + makeInvalidPeekTypeTestCase("invalid array--missing ]", `["a"`, expectError), + makeInvalidPeekTypeTestCase("invalid array--colon in array", `["a":`, expectError), + makeInvalidPeekTypeTestCase("invalid array--extra comma", `["a",,`, expectError), + makeInvalidPeekTypeTestCase("invalid array--trailing comma", `["a",]`, expectError), + makeInvalidPeekTypeTestCase("peekType after end of array", `["a"]`, expectErrEOA), + { + desc: "invalid array--leading comma", + input: `[,`, + typs: []Type{TypeArray, Type(0)}, + errFs: []expectedErrorFunc{expectNoError, expectError}, + }, + makeInvalidTestCase("lone $scope", `{"$scope": {}}`, expectError), + makeInvalidTestCase("empty code with unknown extra key", `{"$code":"", "0":""}`, expectError), + makeInvalidTestCase("non-empty code with unknown extra key", `{"$code":"foobar", "0":""}`, expectError), + } + + for _, tc := range cases { + t.Run(tc.desc, func(t *testing.T) { + ejp := newExtJSONParser(strings.NewReader(tc.input), true) + // Manually set the parser's starting state to jpsSawColon so peekType will read ahead to find the extjson + // type of the value. If not set, the parser will be in jpsStartState and advance to jpsSawKey, which will + // cause it to return without peeking the extjson type. + ejp.s = jpsSawColon + + for i, eTyp := range tc.typs { + errF := tc.errFs[i] + + typ, err := ejp.peekType() + errF(t, err, tc.desc) + if err != nil { + // Don't inspect the type if there was an error + return + } + + typDiff(t, eTyp, typ, tc.desc) + } + }) + } +} + +func TestExtJSONParserReadKeyReadValue(t *testing.T) { + // several test cases will use the same keys, types, and values, and only differ on input structure + + keys := []string{"_id", "Symbol", "String", "Int32", "Int64", "Int", "MinKey"} + types := []Type{TypeObjectID, TypeSymbol, TypeString, TypeInt32, TypeInt64, TypeInt32, TypeMinKey} + values := []*extJSONValue{ + {t: TypeString, v: "57e193d7a9cc81b4027498b5"}, + {t: TypeString, v: "symbol"}, + {t: TypeString, v: "string"}, + {t: TypeString, v: "42"}, + {t: TypeString, v: "42"}, + {t: TypeInt32, v: int32(42)}, + {t: TypeInt32, v: int32(1)}, + } + + errFuncs := make([]expectedErrorFunc, 7) + for i := 0; i < 7; i++ { + errFuncs[i] = expectNoError + } + + firstKeyError := func(desc, input string) readKeyValueTestCase { + return readKeyValueTestCase{ + desc: desc, + input: input, + keys: []string{""}, + typs: []Type{Type(0)}, + vals: []*extJSONValue{nil}, + keyEFs: []expectedErrorFunc{expectError}, + valEFs: []expectedErrorFunc{expectErrorNOOP}, + } + } + + secondKeyError := func(desc, input, firstKey string, firstType Type, firstValue *extJSONValue) readKeyValueTestCase { + return readKeyValueTestCase{ + desc: desc, + input: input, + keys: []string{firstKey, ""}, + typs: []Type{firstType, Type(0)}, + vals: []*extJSONValue{firstValue, nil}, + keyEFs: []expectedErrorFunc{expectNoError, expectError}, + valEFs: []expectedErrorFunc{expectNoError, expectErrorNOOP}, + } + } + + cases := []readKeyValueTestCase{ + { + desc: "normal spacing", + input: `{ + "_id": { "$oid": "57e193d7a9cc81b4027498b5" }, + "Symbol": { "$symbol": "symbol" }, + "String": "string", + "Int32": { "$numberInt": "42" }, + "Int64": { "$numberLong": "42" }, + "Int": 42, + "MinKey": { "$minKey": 1 } + }`, + keys: keys, typs: types, vals: values, + keyEFs: errFuncs, valEFs: errFuncs, + }, + { + desc: "new line before comma", + input: `{ "_id": { "$oid": "57e193d7a9cc81b4027498b5" } + , "Symbol": { "$symbol": "symbol" } + , "String": "string" + , "Int32": { "$numberInt": "42" } + , "Int64": { "$numberLong": "42" } + , "Int": 42 + , "MinKey": { "$minKey": 1 } + }`, + keys: keys, typs: types, vals: values, + keyEFs: errFuncs, valEFs: errFuncs, + }, + { + desc: "tabs around colons", + input: `{ + "_id": { "$oid" : "57e193d7a9cc81b4027498b5" }, + "Symbol": { "$symbol" : "symbol" }, + "String": "string", + "Int32": { "$numberInt" : "42" }, + "Int64": { "$numberLong": "42" }, + "Int": 42, + "MinKey": { "$minKey": 1 } + }`, + keys: keys, typs: types, vals: values, + keyEFs: errFuncs, valEFs: errFuncs, + }, + { + desc: "no whitespace", + input: `{"_id":{"$oid":"57e193d7a9cc81b4027498b5"},"Symbol":{"$symbol":"symbol"},"String":"string","Int32":{"$numberInt":"42"},"Int64":{"$numberLong":"42"},"Int":42,"MinKey":{"$minKey":1}}`, + keys: keys, typs: types, vals: values, + keyEFs: errFuncs, valEFs: errFuncs, + }, + { + desc: "mixed whitespace", + input: ` { + "_id" : { "$oid": "57e193d7a9cc81b4027498b5" }, + "Symbol" : { "$symbol": "symbol" } , + "String" : "string", + "Int32" : { "$numberInt": "42" } , + "Int64" : {"$numberLong" : "42"}, + "Int" : 42, + "MinKey" : { "$minKey": 1 } } `, + keys: keys, typs: types, vals: values, + keyEFs: errFuncs, valEFs: errFuncs, + }, + { + desc: "nested object", + input: `{"k1": 1, "k2": { "k3": { "k4": 4 } }, "k5": 5}`, + keys: []string{"k1", "k2", "k3", "k4", "", "", "k5", ""}, + typs: []Type{TypeInt32, TypeEmbeddedDocument, TypeEmbeddedDocument, TypeInt32, Type(0), Type(0), TypeInt32, Type(0)}, + vals: []*extJSONValue{ + {t: TypeInt32, v: int32(1)}, nil, nil, {t: TypeInt32, v: int32(4)}, nil, nil, {t: TypeInt32, v: int32(5)}, nil, + }, + keyEFs: []expectedErrorFunc{ + expectNoError, expectNoError, expectNoError, expectNoError, expectErrEOD, + expectErrEOD, expectNoError, expectErrEOD, + }, + valEFs: []expectedErrorFunc{ + expectNoError, expectError, expectError, expectNoError, expectErrorNOOP, + expectErrorNOOP, expectNoError, expectErrorNOOP, + }, + }, + { + desc: "invalid input: invalid values for extended type", + input: `{"a": {"$numberInt": "1", "x"`, + keys: []string{"a"}, + typs: []Type{TypeInt32}, + vals: []*extJSONValue{nil}, + keyEFs: []expectedErrorFunc{expectNoError}, + valEFs: []expectedErrorFunc{expectError}, + }, + firstKeyError("invalid input: missing key--EOF", "{"), + firstKeyError("invalid input: missing key--colon first", "{:"), + firstKeyError("invalid input: missing value", `{"a":`), + firstKeyError("invalid input: missing colon", `{"a" 1`), + firstKeyError("invalid input: extra colon", `{"a"::`), + secondKeyError("invalid input: missing }", `{"a": 1`, "a", TypeInt32, &extJSONValue{t: TypeInt32, v: int32(1)}), + secondKeyError("invalid input: missing comma", `{"a": 1 "b"`, "a", TypeInt32, &extJSONValue{t: TypeInt32, v: int32(1)}), + secondKeyError("invalid input: extra comma", `{"a": 1,, "b"`, "a", TypeInt32, &extJSONValue{t: TypeInt32, v: int32(1)}), + secondKeyError("invalid input: trailing comma in object", `{"a": 1,}`, "a", TypeInt32, &extJSONValue{t: TypeInt32, v: int32(1)}), + { + desc: "invalid input: lone scope after a complete value", + input: `{"a": "", "b": {"$scope: ""}}`, + keys: []string{"a"}, + typs: []Type{TypeString}, + vals: []*extJSONValue{{TypeString, ""}}, + keyEFs: []expectedErrorFunc{expectNoError, expectNoError}, + valEFs: []expectedErrorFunc{expectNoError, expectError}, + }, + { + desc: "invalid input: lone scope nested", + input: `{"a":{"b":{"$scope":{`, + keys: []string{}, + typs: []Type{}, + vals: []*extJSONValue{nil}, + keyEFs: []expectedErrorFunc{expectNoError}, + valEFs: []expectedErrorFunc{expectError}, + }, + } + + for _, tc := range cases { + t.Run(tc.desc, func(t *testing.T) { + ejp := newExtJSONParser(strings.NewReader(tc.input), true) + + for i, eKey := range tc.keys { + eTyp := tc.typs[i] + eVal := tc.vals[i] + + keyErrF := tc.keyEFs[i] + valErrF := tc.valEFs[i] + + k, typ, err := ejp.readKey() + readKeyDiff(t, eKey, k, eTyp, typ, err, keyErrF, tc.desc) + + v, err := ejp.readValue(typ) + readValueDiff(t, eVal, v, err, valErrF, tc.desc) + } + }) + } +} + +type ejpExpectationTest func(t *testing.T, p *extJSONParser, expectedKey string, expectedType Type, expectedValue interface{}) + +type ejpTestCase struct { + f ejpExpectationTest + p *extJSONParser + k string + t Type + v interface{} +} + +// expectSingleValue is used for simple JSON types (strings, numbers, literals) and for extended JSON types that +// have single key-value pairs (i.e. { "$minKey": 1 }, { "$numberLong": "42.42" }) +func expectSingleValue(t *testing.T, p *extJSONParser, expectedKey string, expectedType Type, expectedValue interface{}) { + eVal := expectedValue.(*extJSONValue) + + k, typ, err := p.readKey() + readKeyDiff(t, expectedKey, k, expectedType, typ, err, expectNoError, expectedKey) + + v, err := p.readValue(typ) + readValueDiff(t, eVal, v, err, expectNoError, expectedKey) +} + +// expectMultipleValues is used for values that are subdocuments of known size and with known keys (such as extended +// JSON types { "$timestamp": {"t": 1, "i": 1} } and { "$regularExpression": {"pattern": "", options: ""} }) +func expectMultipleValues(t *testing.T, p *extJSONParser, expectedKey string, expectedType Type, expectedValue interface{}) { + k, typ, err := p.readKey() + readKeyDiff(t, expectedKey, k, expectedType, typ, err, expectNoError, expectedKey) + + v, err := p.readValue(typ) + expectNoError(t, err, "") + typDiff(t, TypeEmbeddedDocument, v.t, expectedKey) + + actObj := v.v.(*extJSONObject) + expObj := expectedValue.(*extJSONObject) + + for i, actKey := range actObj.keys { + expKey := expObj.keys[i] + actVal := actObj.values[i] + expVal := expObj.values[i] + + keyDiff(t, expKey, actKey, expectedKey) + typDiff(t, expVal.t, actVal.t, expectedKey) + valDiff(t, expVal.v, actVal.v, expectedKey) + } +} + +type ejpKeyTypValTriple struct { + key string + typ Type + val *extJSONValue +} + +type ejpSubDocumentTestValue struct { + code string // code is only used for TypeCodeWithScope (and is ignored for TypeEmbeddedDocument + ktvs []ejpKeyTypValTriple // list of (key, type, value) triples; this is "scope" for TypeCodeWithScope +} + +// expectSubDocument is used for embedded documents and code with scope types; it reads all the keys and values +// in the embedded document (or scope for codeWithScope) and compares them to the expectedValue's list of (key, type, +// value) triples +func expectSubDocument(t *testing.T, p *extJSONParser, expectedKey string, expectedType Type, expectedValue interface{}) { + subdoc := expectedValue.(ejpSubDocumentTestValue) + + k, typ, err := p.readKey() + readKeyDiff(t, expectedKey, k, expectedType, typ, err, expectNoError, expectedKey) + + if expectedType == TypeCodeWithScope { + v, err := p.readValue(typ) + readValueDiff(t, &extJSONValue{t: TypeString, v: subdoc.code}, v, err, expectNoError, expectedKey) + } + + for _, ktv := range subdoc.ktvs { + eKey := ktv.key + eTyp := ktv.typ + eVal := ktv.val + + k, typ, err = p.readKey() + readKeyDiff(t, eKey, k, eTyp, typ, err, expectNoError, expectedKey) + + v, err := p.readValue(typ) + readValueDiff(t, eVal, v, err, expectNoError, expectedKey) + } + + if expectedType == TypeCodeWithScope { + // expect scope doc to close + k, typ, err = p.readKey() + readKeyDiff(t, "", k, Type(0), typ, err, expectErrEOD, expectedKey) + } + + // expect subdoc to close + k, typ, err = p.readKey() + readKeyDiff(t, "", k, Type(0), typ, err, expectErrEOD, expectedKey) +} + +// expectArray takes the expectedKey, ignores the expectedType, and uses the expectedValue +// as a slice of (type Type, value *extJSONValue) pairs +func expectArray(t *testing.T, p *extJSONParser, expectedKey string, _ Type, expectedValue interface{}) { + ktvs := expectedValue.([]ejpKeyTypValTriple) + + k, typ, err := p.readKey() + readKeyDiff(t, expectedKey, k, TypeArray, typ, err, expectNoError, expectedKey) + + for _, ktv := range ktvs { + eTyp := ktv.typ + eVal := ktv.val + + typ, err = p.peekType() + typDiff(t, eTyp, typ, expectedKey) + expectNoError(t, err, expectedKey) + + v, err := p.readValue(typ) + readValueDiff(t, eVal, v, err, expectNoError, expectedKey) + } + + // expect array to end + typ, err = p.peekType() + typDiff(t, Type(0), typ, expectedKey) + expectErrEOA(t, err, expectedKey) +} + +func TestExtJSONParserAllTypes(t *testing.T) { + in := ` { "_id" : { "$oid": "57e193d7a9cc81b4027498b5"} + , "Symbol" : { "$symbol": "symbol"} + , "String" : "string" + , "Int32" : { "$numberInt": "42"} + , "Int64" : { "$numberLong": "42"} + , "Double" : { "$numberDouble": "42.42"} + , "SpecialFloat" : { "$numberDouble": "NaN" } + , "Decimal" : { "$numberDecimal": "1234" } + , "Binary" : { "$binary": { "base64": "o0w498Or7cijeBSpkquNtg==", "subType": "03" } } + , "BinaryLegacy" : { "$binary": "o0w498Or7cijeBSpkquNtg==", "$type": "03" } + , "BinaryUserDefined" : { "$binary": { "base64": "AQIDBAU=", "subType": "80" } } + , "Code" : { "$code": "function() {}" } + , "CodeWithEmptyScope" : { "$code": "function() {}", "$scope": {} } + , "CodeWithScope" : { "$code": "function() {}", "$scope": { "x": 1 } } + , "EmptySubdocument" : {} + , "Subdocument" : { "foo": "bar", "baz": { "$numberInt": "42" } } + , "Array" : [{"$numberInt": "1"}, {"$numberLong": "2"}, {"$numberDouble": "3"}, 4, "string", 5.0] + , "Timestamp" : { "$timestamp": { "t": 42, "i": 1 } } + , "RegularExpression" : { "$regularExpression": { "pattern": "foo*", "options": "ix" } } + , "DatetimeEpoch" : { "$date": { "$numberLong": "0" } } + , "DatetimePositive" : { "$date": { "$numberLong": "9223372036854775807" } } + , "DatetimeNegative" : { "$date": { "$numberLong": "-9223372036854775808" } } + , "True" : true + , "False" : false + , "DBPointer" : { "$dbPointer": { "$ref": "db.collection", "$id": { "$oid": "57e193d7a9cc81b4027498b1" } } } + , "DBRef" : { "$ref": "collection", "$id": { "$oid": "57fd71e96e32ab4225b723fb" }, "$db": "database" } + , "DBRefNoDB" : { "$ref": "collection", "$id": { "$oid": "57fd71e96e32ab4225b723fb" } } + , "MinKey" : { "$minKey": 1 } + , "MaxKey" : { "$maxKey": 1 } + , "Null" : null + , "Undefined" : { "$undefined": true } + }` + + ejp := newExtJSONParser(strings.NewReader(in), true) + + cases := []ejpTestCase{ + { + f: expectSingleValue, p: ejp, + k: "_id", t: TypeObjectID, v: &extJSONValue{t: TypeString, v: "57e193d7a9cc81b4027498b5"}, + }, + { + f: expectSingleValue, p: ejp, + k: "Symbol", t: TypeSymbol, v: &extJSONValue{t: TypeString, v: "symbol"}, + }, + { + f: expectSingleValue, p: ejp, + k: "String", t: TypeString, v: &extJSONValue{t: TypeString, v: "string"}, + }, + { + f: expectSingleValue, p: ejp, + k: "Int32", t: TypeInt32, v: &extJSONValue{t: TypeString, v: "42"}, + }, + { + f: expectSingleValue, p: ejp, + k: "Int64", t: TypeInt64, v: &extJSONValue{t: TypeString, v: "42"}, + }, + { + f: expectSingleValue, p: ejp, + k: "Double", t: TypeDouble, v: &extJSONValue{t: TypeString, v: "42.42"}, + }, + { + f: expectSingleValue, p: ejp, + k: "SpecialFloat", t: TypeDouble, v: &extJSONValue{t: TypeString, v: "NaN"}, + }, + { + f: expectSingleValue, p: ejp, + k: "Decimal", t: TypeDecimal128, v: &extJSONValue{t: TypeString, v: "1234"}, + }, + { + f: expectMultipleValues, p: ejp, + k: "Binary", t: TypeBinary, + v: &extJSONObject{ + keys: []string{"base64", "subType"}, + values: []*extJSONValue{ + {t: TypeString, v: "o0w498Or7cijeBSpkquNtg=="}, + {t: TypeString, v: "03"}, + }, + }, + }, + { + f: expectMultipleValues, p: ejp, + k: "BinaryLegacy", t: TypeBinary, + v: &extJSONObject{ + keys: []string{"base64", "subType"}, + values: []*extJSONValue{ + {t: TypeString, v: "o0w498Or7cijeBSpkquNtg=="}, + {t: TypeString, v: "03"}, + }, + }, + }, + { + f: expectMultipleValues, p: ejp, + k: "BinaryUserDefined", t: TypeBinary, + v: &extJSONObject{ + keys: []string{"base64", "subType"}, + values: []*extJSONValue{ + {t: TypeString, v: "AQIDBAU="}, + {t: TypeString, v: "80"}, + }, + }, + }, + { + f: expectSingleValue, p: ejp, + k: "Code", t: TypeJavaScript, v: &extJSONValue{t: TypeString, v: "function() {}"}, + }, + { + f: expectSubDocument, p: ejp, + k: "CodeWithEmptyScope", t: TypeCodeWithScope, + v: ejpSubDocumentTestValue{ + code: "function() {}", + ktvs: []ejpKeyTypValTriple{}, + }, + }, + { + f: expectSubDocument, p: ejp, + k: "CodeWithScope", t: TypeCodeWithScope, + v: ejpSubDocumentTestValue{ + code: "function() {}", + ktvs: []ejpKeyTypValTriple{ + {"x", TypeInt32, &extJSONValue{t: TypeInt32, v: int32(1)}}, + }, + }, + }, + { + f: expectSubDocument, p: ejp, + k: "EmptySubdocument", t: TypeEmbeddedDocument, + v: ejpSubDocumentTestValue{ + ktvs: []ejpKeyTypValTriple{}, + }, + }, + { + f: expectSubDocument, p: ejp, + k: "Subdocument", t: TypeEmbeddedDocument, + v: ejpSubDocumentTestValue{ + ktvs: []ejpKeyTypValTriple{ + {"foo", TypeString, &extJSONValue{t: TypeString, v: "bar"}}, + {"baz", TypeInt32, &extJSONValue{t: TypeString, v: "42"}}, + }, + }, + }, + { + f: expectArray, p: ejp, + k: "Array", t: TypeArray, + v: []ejpKeyTypValTriple{ + {typ: TypeInt32, val: &extJSONValue{t: TypeString, v: "1"}}, + {typ: TypeInt64, val: &extJSONValue{t: TypeString, v: "2"}}, + {typ: TypeDouble, val: &extJSONValue{t: TypeString, v: "3"}}, + {typ: TypeInt32, val: &extJSONValue{t: TypeInt32, v: int32(4)}}, + {typ: TypeString, val: &extJSONValue{t: TypeString, v: "string"}}, + {typ: TypeDouble, val: &extJSONValue{t: TypeDouble, v: 5.0}}, + }, + }, + { + f: expectMultipleValues, p: ejp, + k: "Timestamp", t: TypeTimestamp, + v: &extJSONObject{ + keys: []string{"t", "i"}, + values: []*extJSONValue{ + {t: TypeInt32, v: int32(42)}, + {t: TypeInt32, v: int32(1)}, + }, + }, + }, + { + f: expectMultipleValues, p: ejp, + k: "RegularExpression", t: TypeRegex, + v: &extJSONObject{ + keys: []string{"pattern", "options"}, + values: []*extJSONValue{ + {t: TypeString, v: "foo*"}, + {t: TypeString, v: "ix"}, + }, + }, + }, + { + f: expectMultipleValues, p: ejp, + k: "DatetimeEpoch", t: TypeDateTime, + v: &extJSONObject{ + keys: []string{"$numberLong"}, + values: []*extJSONValue{ + {t: TypeString, v: "0"}, + }, + }, + }, + { + f: expectMultipleValues, p: ejp, + k: "DatetimePositive", t: TypeDateTime, + v: &extJSONObject{ + keys: []string{"$numberLong"}, + values: []*extJSONValue{ + {t: TypeString, v: "9223372036854775807"}, + }, + }, + }, + { + f: expectMultipleValues, p: ejp, + k: "DatetimeNegative", t: TypeDateTime, + v: &extJSONObject{ + keys: []string{"$numberLong"}, + values: []*extJSONValue{ + {t: TypeString, v: "-9223372036854775808"}, + }, + }, + }, + { + f: expectSingleValue, p: ejp, + k: "True", t: TypeBoolean, v: &extJSONValue{t: TypeBoolean, v: true}, + }, + { + f: expectSingleValue, p: ejp, + k: "False", t: TypeBoolean, v: &extJSONValue{t: TypeBoolean, v: false}, + }, + { + f: expectMultipleValues, p: ejp, + k: "DBPointer", t: TypeDBPointer, + v: &extJSONObject{ + keys: []string{"$ref", "$id"}, + values: []*extJSONValue{ + {t: TypeString, v: "db.collection"}, + {t: TypeString, v: "57e193d7a9cc81b4027498b1"}, + }, + }, + }, + { + f: expectSubDocument, p: ejp, + k: "DBRef", t: TypeEmbeddedDocument, + v: ejpSubDocumentTestValue{ + ktvs: []ejpKeyTypValTriple{ + {"$ref", TypeString, &extJSONValue{t: TypeString, v: "collection"}}, + {"$id", TypeObjectID, &extJSONValue{t: TypeString, v: "57fd71e96e32ab4225b723fb"}}, + {"$db", TypeString, &extJSONValue{t: TypeString, v: "database"}}, + }, + }, + }, + { + f: expectSubDocument, p: ejp, + k: "DBRefNoDB", t: TypeEmbeddedDocument, + v: ejpSubDocumentTestValue{ + ktvs: []ejpKeyTypValTriple{ + {"$ref", TypeString, &extJSONValue{t: TypeString, v: "collection"}}, + {"$id", TypeObjectID, &extJSONValue{t: TypeString, v: "57fd71e96e32ab4225b723fb"}}, + }, + }, + }, + { + f: expectSingleValue, p: ejp, + k: "MinKey", t: TypeMinKey, v: &extJSONValue{t: TypeInt32, v: int32(1)}, + }, + { + f: expectSingleValue, p: ejp, + k: "MaxKey", t: TypeMaxKey, v: &extJSONValue{t: TypeInt32, v: int32(1)}, + }, + { + f: expectSingleValue, p: ejp, + k: "Null", t: TypeNull, v: &extJSONValue{t: TypeNull, v: nil}, + }, + { + f: expectSingleValue, p: ejp, + k: "Undefined", t: TypeUndefined, v: &extJSONValue{t: TypeBoolean, v: true}, + }, + } + + // run the test cases + for _, tc := range cases { + tc.f(t, tc.p, tc.k, tc.t, tc.v) + } + + // expect end of whole document: read final } + k, typ, err := ejp.readKey() + readKeyDiff(t, "", k, Type(0), typ, err, expectErrEOD, "") + + // expect end of whole document: read EOF + k, typ, err = ejp.readKey() + readKeyDiff(t, "", k, Type(0), typ, err, expectErrEOF, "") + if diff := cmp.Diff(jpsDoneState, ejp.s); diff != "" { + t.Errorf("expected parser to be in done state but instead is in %v\n", ejp.s) + t.FailNow() + } +} + +func TestExtJSONValue(t *testing.T) { + t.Run("Large Date", func(t *testing.T) { + val := &extJSONValue{ + t: TypeString, + v: "3001-01-01T00:00:00Z", + } + + intVal, err := val.parseDateTime() + if err != nil { + t.Fatalf("error parsing date time: %v", err) + } + + if intVal <= 0 { + t.Fatalf("expected value above 0, got %v", intVal) + } + }) + t.Run("fallback time format", func(t *testing.T) { + val := &extJSONValue{ + t: TypeString, + v: "2019-06-04T14:54:31.416+0000", + } + + _, err := val.parseDateTime() + if err != nil { + t.Fatalf("error parsing date time: %v", err) + } + }) +} diff --git a/extjson_prose_test.go b/extjson_prose_test.go new file mode 100644 index 0000000..4242051 --- /dev/null +++ b/extjson_prose_test.go @@ -0,0 +1,46 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bson + +import ( + "fmt" + "testing" + + "gitea.psichedelico.com/go/bson/internal/assert" +) + +func TestExtJSON(t *testing.T) { + timestampNegativeInt32Err := fmt.Errorf("$timestamp i number should be uint32: -1") + timestampNegativeInt64Err := fmt.Errorf("$timestamp i number should be uint32: -2147483649") + timestampLargeValueErr := fmt.Errorf("$timestamp i number should be uint32: 4294967296") + + testCases := []struct { + name string + input string + canonical bool + err error + }{ + {"timestamp - negative int32 value", `{"":{"$timestamp":{"t":0,"i":-1}}}`, false, timestampNegativeInt32Err}, + {"timestamp - negative int64 value", `{"":{"$timestamp":{"t":0,"i":-2147483649}}}`, false, timestampNegativeInt64Err}, + {"timestamp - value overflows uint32", `{"":{"$timestamp":{"t":0,"i":4294967296}}}`, false, timestampLargeValueErr}, + {"top level key is not treated as special", `{"$code": "foo"}`, false, nil}, + {"escaped single quote errors", `{"f\'oo": "bar"}`, false, ErrInvalidJSON}, + } + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + var res Raw + err := UnmarshalExtJSON([]byte(tc.input), tc.canonical, &res) + if tc.err == nil { + assert.Nil(t, err, "UnmarshalExtJSON error: %v", err) + return + } + + assert.NotNil(t, err, "expected error %v, got nil", tc.err) + assert.Equal(t, tc.err.Error(), err.Error(), "expected error %v, got %v", tc.err, err) + }) + } +} diff --git a/extjson_reader.go b/extjson_reader.go new file mode 100644 index 0000000..4bee3ef --- /dev/null +++ b/extjson_reader.go @@ -0,0 +1,606 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bson + +import ( + "errors" + "fmt" + "io" +) + +type ejvrState struct { + mode mode + vType Type + depth int +} + +// extJSONValueReader is for reading extended JSON. +type extJSONValueReader struct { + p *extJSONParser + + stack []ejvrState + frame int +} + +// NewExtJSONValueReader returns a ValueReader that reads Extended JSON values +// from r. If canonicalOnly is true, reading values from the ValueReader returns +// an error if the Extended JSON was not marshaled in canonical mode. +func NewExtJSONValueReader(r io.Reader, canonicalOnly bool) (ValueReader, error) { + return newExtJSONValueReader(r, canonicalOnly) +} + +func newExtJSONValueReader(r io.Reader, canonicalOnly bool) (*extJSONValueReader, error) { + ejvr := new(extJSONValueReader) + return ejvr.reset(r, canonicalOnly) +} + +func (ejvr *extJSONValueReader) reset(r io.Reader, canonicalOnly bool) (*extJSONValueReader, error) { + p := newExtJSONParser(r, canonicalOnly) + typ, err := p.peekType() + + if err != nil { + return nil, ErrInvalidJSON + } + + var m mode + switch typ { + case TypeEmbeddedDocument: + m = mTopLevel + case TypeArray: + m = mArray + default: + m = mValue + } + + stack := make([]ejvrState, 1, 5) + stack[0] = ejvrState{ + mode: m, + vType: typ, + } + return &extJSONValueReader{ + p: p, + stack: stack, + }, nil +} + +func (ejvr *extJSONValueReader) advanceFrame() { + if ejvr.frame+1 >= len(ejvr.stack) { // We need to grow the stack + length := len(ejvr.stack) + if length+1 >= cap(ejvr.stack) { + // double it + buf := make([]ejvrState, 2*cap(ejvr.stack)+1) + copy(buf, ejvr.stack) + ejvr.stack = buf + } + ejvr.stack = ejvr.stack[:length+1] + } + ejvr.frame++ + + // Clean the stack + ejvr.stack[ejvr.frame].mode = 0 + ejvr.stack[ejvr.frame].vType = 0 + ejvr.stack[ejvr.frame].depth = 0 +} + +func (ejvr *extJSONValueReader) pushDocument() { + ejvr.advanceFrame() + + ejvr.stack[ejvr.frame].mode = mDocument + ejvr.stack[ejvr.frame].depth = ejvr.p.depth +} + +func (ejvr *extJSONValueReader) pushCodeWithScope() { + ejvr.advanceFrame() + + ejvr.stack[ejvr.frame].mode = mCodeWithScope +} + +func (ejvr *extJSONValueReader) pushArray() { + ejvr.advanceFrame() + + ejvr.stack[ejvr.frame].mode = mArray +} + +func (ejvr *extJSONValueReader) push(m mode, t Type) { + ejvr.advanceFrame() + + ejvr.stack[ejvr.frame].mode = m + ejvr.stack[ejvr.frame].vType = t +} + +func (ejvr *extJSONValueReader) pop() { + switch ejvr.stack[ejvr.frame].mode { + case mElement, mValue: + ejvr.frame-- + case mDocument, mArray, mCodeWithScope: + ejvr.frame -= 2 // we pop twice to jump over the vrElement: vrDocument -> vrElement -> vrDocument/TopLevel/etc... + } +} + +func (ejvr *extJSONValueReader) skipObject() { + // read entire object until depth returns to 0 (last ending } or ] seen) + depth := 1 + for depth > 0 { + ejvr.p.advanceState() + + // If object is empty, raise depth and continue. When emptyObject is true, the + // parser has already read both the opening and closing brackets of an empty + // object ("{}"), so the next valid token will be part of the parent document, + // not part of the nested document. + // + // If there is a comma, there are remaining fields, emptyObject must be set back + // to false, and comma must be skipped with advanceState(). + if ejvr.p.emptyObject { + if ejvr.p.s == jpsSawComma { + ejvr.p.emptyObject = false + ejvr.p.advanceState() + } + depth-- + continue + } + + switch ejvr.p.s { + case jpsSawBeginObject, jpsSawBeginArray: + depth++ + case jpsSawEndObject, jpsSawEndArray: + depth-- + } + } +} + +func (ejvr *extJSONValueReader) invalidTransitionErr(destination mode, name string, modes []mode) error { + te := TransitionError{ + name: name, + current: ejvr.stack[ejvr.frame].mode, + destination: destination, + modes: modes, + action: "read", + } + if ejvr.frame != 0 { + te.parent = ejvr.stack[ejvr.frame-1].mode + } + return te +} + +func (ejvr *extJSONValueReader) typeError(t Type) error { + return fmt.Errorf("positioned on %s, but attempted to read %s", ejvr.stack[ejvr.frame].vType, t) +} + +func (ejvr *extJSONValueReader) ensureElementValue(t Type, destination mode, callerName string, addModes ...mode) error { + switch ejvr.stack[ejvr.frame].mode { + case mElement, mValue: + if ejvr.stack[ejvr.frame].vType != t { + return ejvr.typeError(t) + } + default: + modes := []mode{mElement, mValue} + if addModes != nil { + modes = append(modes, addModes...) + } + return ejvr.invalidTransitionErr(destination, callerName, modes) + } + + return nil +} + +func (ejvr *extJSONValueReader) Type() Type { + return ejvr.stack[ejvr.frame].vType +} + +func (ejvr *extJSONValueReader) Skip() error { + switch ejvr.stack[ejvr.frame].mode { + case mElement, mValue: + default: + return ejvr.invalidTransitionErr(0, "Skip", []mode{mElement, mValue}) + } + + defer ejvr.pop() + + t := ejvr.stack[ejvr.frame].vType + switch t { + case TypeArray, TypeEmbeddedDocument, TypeCodeWithScope: + // read entire array, doc or CodeWithScope + ejvr.skipObject() + default: + _, err := ejvr.p.readValue(t) + if err != nil { + return err + } + } + + return nil +} + +func (ejvr *extJSONValueReader) ReadArray() (ArrayReader, error) { + switch ejvr.stack[ejvr.frame].mode { + case mTopLevel: // allow reading array from top level + case mArray: + return ejvr, nil + default: + if err := ejvr.ensureElementValue(TypeArray, mArray, "ReadArray", mTopLevel, mArray); err != nil { + return nil, err + } + } + + ejvr.pushArray() + + return ejvr, nil +} + +func (ejvr *extJSONValueReader) ReadBinary() (b []byte, btype byte, err error) { + if err := ejvr.ensureElementValue(TypeBinary, 0, "ReadBinary"); err != nil { + return nil, 0, err + } + + v, err := ejvr.p.readValue(TypeBinary) + if err != nil { + return nil, 0, err + } + + b, btype, err = v.parseBinary() + + ejvr.pop() + return b, btype, err +} + +func (ejvr *extJSONValueReader) ReadBoolean() (bool, error) { + if err := ejvr.ensureElementValue(TypeBoolean, 0, "ReadBoolean"); err != nil { + return false, err + } + + v, err := ejvr.p.readValue(TypeBoolean) + if err != nil { + return false, err + } + + if v.t != TypeBoolean { + return false, fmt.Errorf("expected type bool, but got type %s", v.t) + } + + ejvr.pop() + return v.v.(bool), nil +} + +func (ejvr *extJSONValueReader) ReadDocument() (DocumentReader, error) { + switch ejvr.stack[ejvr.frame].mode { + case mTopLevel: + return ejvr, nil + case mElement, mValue: + if ejvr.stack[ejvr.frame].vType != TypeEmbeddedDocument { + return nil, ejvr.typeError(TypeEmbeddedDocument) + } + + ejvr.pushDocument() + return ejvr, nil + default: + return nil, ejvr.invalidTransitionErr(mDocument, "ReadDocument", []mode{mTopLevel, mElement, mValue}) + } +} + +func (ejvr *extJSONValueReader) ReadCodeWithScope() (code string, dr DocumentReader, err error) { + if err = ejvr.ensureElementValue(TypeCodeWithScope, 0, "ReadCodeWithScope"); err != nil { + return "", nil, err + } + + v, err := ejvr.p.readValue(TypeCodeWithScope) + if err != nil { + return "", nil, err + } + + code, err = v.parseJavascript() + + ejvr.pushCodeWithScope() + return code, ejvr, err +} + +func (ejvr *extJSONValueReader) ReadDBPointer() (ns string, oid ObjectID, err error) { + if err = ejvr.ensureElementValue(TypeDBPointer, 0, "ReadDBPointer"); err != nil { + return "", NilObjectID, err + } + + v, err := ejvr.p.readValue(TypeDBPointer) + if err != nil { + return "", NilObjectID, err + } + + ns, oid, err = v.parseDBPointer() + + ejvr.pop() + return ns, oid, err +} + +func (ejvr *extJSONValueReader) ReadDateTime() (int64, error) { + if err := ejvr.ensureElementValue(TypeDateTime, 0, "ReadDateTime"); err != nil { + return 0, err + } + + v, err := ejvr.p.readValue(TypeDateTime) + if err != nil { + return 0, err + } + + d, err := v.parseDateTime() + + ejvr.pop() + return d, err +} + +func (ejvr *extJSONValueReader) ReadDecimal128() (Decimal128, error) { + if err := ejvr.ensureElementValue(TypeDecimal128, 0, "ReadDecimal128"); err != nil { + return Decimal128{}, err + } + + v, err := ejvr.p.readValue(TypeDecimal128) + if err != nil { + return Decimal128{}, err + } + + d, err := v.parseDecimal128() + + ejvr.pop() + return d, err +} + +func (ejvr *extJSONValueReader) ReadDouble() (float64, error) { + if err := ejvr.ensureElementValue(TypeDouble, 0, "ReadDouble"); err != nil { + return 0, err + } + + v, err := ejvr.p.readValue(TypeDouble) + if err != nil { + return 0, err + } + + d, err := v.parseDouble() + + ejvr.pop() + return d, err +} + +func (ejvr *extJSONValueReader) ReadInt32() (int32, error) { + if err := ejvr.ensureElementValue(TypeInt32, 0, "ReadInt32"); err != nil { + return 0, err + } + + v, err := ejvr.p.readValue(TypeInt32) + if err != nil { + return 0, err + } + + i, err := v.parseInt32() + + ejvr.pop() + return i, err +} + +func (ejvr *extJSONValueReader) ReadInt64() (int64, error) { + if err := ejvr.ensureElementValue(TypeInt64, 0, "ReadInt64"); err != nil { + return 0, err + } + + v, err := ejvr.p.readValue(TypeInt64) + if err != nil { + return 0, err + } + + i, err := v.parseInt64() + + ejvr.pop() + return i, err +} + +func (ejvr *extJSONValueReader) ReadJavascript() (code string, err error) { + if err = ejvr.ensureElementValue(TypeJavaScript, 0, "ReadJavascript"); err != nil { + return "", err + } + + v, err := ejvr.p.readValue(TypeJavaScript) + if err != nil { + return "", err + } + + code, err = v.parseJavascript() + + ejvr.pop() + return code, err +} + +func (ejvr *extJSONValueReader) ReadMaxKey() error { + if err := ejvr.ensureElementValue(TypeMaxKey, 0, "ReadMaxKey"); err != nil { + return err + } + + v, err := ejvr.p.readValue(TypeMaxKey) + if err != nil { + return err + } + + err = v.parseMinMaxKey("max") + + ejvr.pop() + return err +} + +func (ejvr *extJSONValueReader) ReadMinKey() error { + if err := ejvr.ensureElementValue(TypeMinKey, 0, "ReadMinKey"); err != nil { + return err + } + + v, err := ejvr.p.readValue(TypeMinKey) + if err != nil { + return err + } + + err = v.parseMinMaxKey("min") + + ejvr.pop() + return err +} + +func (ejvr *extJSONValueReader) ReadNull() error { + if err := ejvr.ensureElementValue(TypeNull, 0, "ReadNull"); err != nil { + return err + } + + v, err := ejvr.p.readValue(TypeNull) + if err != nil { + return err + } + + if v.t != TypeNull { + return fmt.Errorf("expected type null but got type %s", v.t) + } + + ejvr.pop() + return nil +} + +func (ejvr *extJSONValueReader) ReadObjectID() (ObjectID, error) { + if err := ejvr.ensureElementValue(TypeObjectID, 0, "ReadObjectID"); err != nil { + return ObjectID{}, err + } + + v, err := ejvr.p.readValue(TypeObjectID) + if err != nil { + return ObjectID{}, err + } + + oid, err := v.parseObjectID() + + ejvr.pop() + return oid, err +} + +func (ejvr *extJSONValueReader) ReadRegex() (pattern string, options string, err error) { + if err = ejvr.ensureElementValue(TypeRegex, 0, "ReadRegex"); err != nil { + return "", "", err + } + + v, err := ejvr.p.readValue(TypeRegex) + if err != nil { + return "", "", err + } + + pattern, options, err = v.parseRegex() + + ejvr.pop() + return pattern, options, err +} + +func (ejvr *extJSONValueReader) ReadString() (string, error) { + if err := ejvr.ensureElementValue(TypeString, 0, "ReadString"); err != nil { + return "", err + } + + v, err := ejvr.p.readValue(TypeString) + if err != nil { + return "", err + } + + if v.t != TypeString { + return "", fmt.Errorf("expected type string but got type %s", v.t) + } + + ejvr.pop() + return v.v.(string), nil +} + +func (ejvr *extJSONValueReader) ReadSymbol() (symbol string, err error) { + if err = ejvr.ensureElementValue(TypeSymbol, 0, "ReadSymbol"); err != nil { + return "", err + } + + v, err := ejvr.p.readValue(TypeSymbol) + if err != nil { + return "", err + } + + symbol, err = v.parseSymbol() + + ejvr.pop() + return symbol, err +} + +func (ejvr *extJSONValueReader) ReadTimestamp() (t uint32, i uint32, err error) { + if err = ejvr.ensureElementValue(TypeTimestamp, 0, "ReadTimestamp"); err != nil { + return 0, 0, err + } + + v, err := ejvr.p.readValue(TypeTimestamp) + if err != nil { + return 0, 0, err + } + + t, i, err = v.parseTimestamp() + + ejvr.pop() + return t, i, err +} + +func (ejvr *extJSONValueReader) ReadUndefined() error { + if err := ejvr.ensureElementValue(TypeUndefined, 0, "ReadUndefined"); err != nil { + return err + } + + v, err := ejvr.p.readValue(TypeUndefined) + if err != nil { + return err + } + + err = v.parseUndefined() + + ejvr.pop() + return err +} + +func (ejvr *extJSONValueReader) ReadElement() (string, ValueReader, error) { + switch ejvr.stack[ejvr.frame].mode { + case mTopLevel, mDocument, mCodeWithScope: + default: + return "", nil, ejvr.invalidTransitionErr(mElement, "ReadElement", []mode{mTopLevel, mDocument, mCodeWithScope}) + } + + name, t, err := ejvr.p.readKey() + + if err != nil { + if errors.Is(err, ErrEOD) { + if ejvr.stack[ejvr.frame].mode == mCodeWithScope { + _, err := ejvr.p.peekType() + if err != nil { + return "", nil, err + } + } + + ejvr.pop() + } + + return "", nil, err + } + + ejvr.push(mElement, t) + return name, ejvr, nil +} + +func (ejvr *extJSONValueReader) ReadValue() (ValueReader, error) { + switch ejvr.stack[ejvr.frame].mode { + case mArray: + default: + return nil, ejvr.invalidTransitionErr(mValue, "ReadValue", []mode{mArray}) + } + + t, err := ejvr.p.peekType() + if err != nil { + if errors.Is(err, ErrEOA) { + ejvr.pop() + } + + return nil, err + } + + ejvr.push(mValue, t) + return ejvr, nil +} diff --git a/extjson_reader_test.go b/extjson_reader_test.go new file mode 100644 index 0000000..8f15f68 --- /dev/null +++ b/extjson_reader_test.go @@ -0,0 +1,168 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + +package bson + +import ( + "errors" + "fmt" + "io" + "strings" + "testing" + + "gitea.psichedelico.com/go/bson/internal/assert" + "github.com/google/go-cmp/cmp" +) + +func TestExtJSONReader(t *testing.T) { + t.Run("ReadDocument", func(t *testing.T) { + t.Run("EmbeddedDocument", func(t *testing.T) { + ejvr := &extJSONValueReader{ + stack: []ejvrState{ + {mode: mTopLevel}, + {mode: mElement, vType: TypeBoolean}, + }, + frame: 1, + } + + ejvr.stack[1].mode = mArray + wanterr := ejvr.invalidTransitionErr(mDocument, "ReadDocument", []mode{mTopLevel, mElement, mValue}) + _, err := ejvr.ReadDocument() + if err == nil || err.Error() != wanterr.Error() { + t.Errorf("Incorrect returned error. got %v; want %v", err, wanterr) + } + + }) + }) + + t.Run("invalid transition", func(t *testing.T) { + t.Run("Skip", func(t *testing.T) { + ejvr := &extJSONValueReader{stack: []ejvrState{{mode: mTopLevel}}} + wanterr := (&extJSONValueReader{stack: []ejvrState{{mode: mTopLevel}}}).invalidTransitionErr(0, "Skip", []mode{mElement, mValue}) + goterr := ejvr.Skip() + if !cmp.Equal(goterr, wanterr, cmp.Comparer(assert.CompareErrors)) { + t.Errorf("Expected correct invalid transition error. got %v; want %v", goterr, wanterr) + } + }) + }) +} + +func TestReadMultipleTopLevelDocuments(t *testing.T) { + testCases := []struct { + name string + input string + expected [][]byte + }{ + { + "single top-level document", + "{\"foo\":1}", + [][]byte{ + {0x0E, 0x00, 0x00, 0x00, 0x10, 'f', 'o', 'o', 0x00, 0x01, 0x00, 0x00, 0x00, 0x00}, + }, + }, + { + "single top-level document with leading and trailing whitespace", + "\n\n {\"foo\":1} \n", + [][]byte{ + {0x0E, 0x00, 0x00, 0x00, 0x10, 'f', 'o', 'o', 0x00, 0x01, 0x00, 0x00, 0x00, 0x00}, + }, + }, + { + "two top-level documents", + "{\"foo\":1}{\"foo\":2}", + [][]byte{ + {0x0E, 0x00, 0x00, 0x00, 0x10, 'f', 'o', 'o', 0x00, 0x01, 0x00, 0x00, 0x00, 0x00}, + {0x0E, 0x00, 0x00, 0x00, 0x10, 'f', 'o', 'o', 0x00, 0x02, 0x00, 0x00, 0x00, 0x00}, + }, + }, + { + "two top-level documents with leading and trailing whitespace and whitespace separation ", + "\n\n {\"foo\":1}\n{\"foo\":2}\n ", + [][]byte{ + {0x0E, 0x00, 0x00, 0x00, 0x10, 'f', 'o', 'o', 0x00, 0x01, 0x00, 0x00, 0x00, 0x00}, + {0x0E, 0x00, 0x00, 0x00, 0x10, 'f', 'o', 'o', 0x00, 0x02, 0x00, 0x00, 0x00, 0x00}, + }, + }, + { + "top-level array with single document", + "[{\"foo\":1}]", + [][]byte{ + {0x0E, 0x00, 0x00, 0x00, 0x10, 'f', 'o', 'o', 0x00, 0x01, 0x00, 0x00, 0x00, 0x00}, + }, + }, + { + "top-level array with 2 documents", + "[{\"foo\":1},{\"foo\":2}]", + [][]byte{ + {0x0E, 0x00, 0x00, 0x00, 0x10, 'f', 'o', 'o', 0x00, 0x01, 0x00, 0x00, 0x00, 0x00}, + {0x0E, 0x00, 0x00, 0x00, 0x10, 'f', 'o', 'o', 0x00, 0x02, 0x00, 0x00, 0x00, 0x00}, + }, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + r := strings.NewReader(tc.input) + vr, err := NewExtJSONValueReader(r, false) + if err != nil { + t.Fatalf("expected no error, but got %v", err) + } + + actual, err := readAllDocuments(vr) + if err != nil { + t.Fatalf("expected no error, but got %v", err) + } + + if diff := cmp.Diff(tc.expected, actual); diff != "" { + t.Fatalf("expected does not match actual: %v", diff) + } + }) + } +} + +func readAllDocuments(vr ValueReader) ([][]byte, error) { + var actual [][]byte + + switch vr.Type() { + case TypeEmbeddedDocument: + for { + result, err := copyDocumentToBytes(vr) + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return nil, err + } + + actual = append(actual, result) + } + case TypeArray: + ar, err := vr.ReadArray() + if err != nil { + return nil, err + } + for { + evr, err := ar.ReadValue() + if err != nil { + if errors.Is(err, ErrEOA) { + break + } + return nil, err + } + + result, err := copyDocumentToBytes(evr) + if err != nil { + return nil, err + } + + actual = append(actual, result) + } + default: + return nil, fmt.Errorf("expected an array or a document, but got %s", vr.Type()) + } + + return actual, nil +} diff --git a/extjson_tables.go b/extjson_tables.go new file mode 100644 index 0000000..5384db2 --- /dev/null +++ b/extjson_tables.go @@ -0,0 +1,223 @@ +// Copyright (C) MongoDB, Inc. 2017-present. +// +// Licensed under the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 +// +// Based on github.com/golang/go by The Go Authors +// See THIRD-PARTY-NOTICES for original license terms. + +package bson + +import "unicode/utf8" + +// safeSet holds the value true if the ASCII character with the given array +// position can be represented inside a JSON string without any further +// escaping. +// +// All values are true except for the ASCII control characters (0-31), the +// double quote ("), and the backslash character ("\"). +var safeSet = [utf8.RuneSelf]bool{ + ' ': true, + '!': true, + '"': false, + '#': true, + '$': true, + '%': true, + '&': true, + '\'': true, + '(': true, + ')': true, + '*': true, + '+': true, + ',': true, + '-': true, + '.': true, + '/': true, + '0': true, + '1': true, + '2': true, + '3': true, + '4': true, + '5': true, + '6': true, + '7': true, + '8': true, + '9': true, + ':': true, + ';': true, + '<': true, + '=': true, + '>': true, + '?': true, + '@': true, + 'A': true, + 'B': true, + 'C': true, + 'D': true, + 'E': true, + 'F': true, + 'G': true, + 'H': true, + 'I': true, + 'J': true, + 'K': true, + 'L': true, + 'M': true, + 'N': true, + 'O': true, + 'P': true, + 'Q': true, + 'R': true, + 'S': true, + 'T': true, + 'U': true, + 'V': true, + 'W': true, + 'X': true, + 'Y': true, + 'Z': true, + '[': true, + '\\': false, + ']': true, + '^': true, + '_': true, + '`': true, + 'a': true, + 'b': true, + 'c': true, + 'd': true, + 'e': true, + 'f': true, + 'g': true, + 'h': true, + 'i': true, + 'j': true, + 'k': true, + 'l': true, + 'm': true, + 'n': true, + 'o': true, + 'p': true, + 'q': true, + 'r': true, + 's': true, + 't': true, + 'u': true, + 'v': true, + 'w': true, + 'x': true, + 'y': true, + 'z': true, + '{': true, + '|': true, + '}': true, + '~': true, + '\u007f': true, +} + +// htmlSafeSet holds the value true if the ASCII character with the given +// array position can be safely represented inside a JSON string, embedded +// inside of HTML