= 13+17) { // Use a legacy code-with-scope to reset the bson-as-json depth limit! 17 bytes minimum. $parts[] = pack('VCa*xVVa*x', $size, 15, '0', $size - 8, 1, ''); bson_parts_for_size($parts, $size - 17); $parts[] = chr(0); } else if ($size >= 13+8 && $depth_limit > 0) { // Room for another layer + base case // Recursive nested arrays, 8 bytes each $parts[] = pack('VCa*x', $size, 4, "0"); bson_parts_for_size($parts, $size - 8, $depth_limit - 1); $parts[] = chr(0); } else { // Base case, use all remaining space // Innermost nested array containing one string, 13 bytes minimum $str_len = $size - 13; assert($str_len > 0); $parts[] = pack('VCa*xVa*xx', $size, 2, '0', $str_len + 1, str_repeat('x', $str_len)); } } function make_bson_with_size($size) { // Generate a BSON document exactly $size bytes long // Nests arrays as deeply as possible, then keeps extra content in a string. $parts = array(); bson_parts_for_size($parts, $size); $str = join('', $parts); assert(strlen($str) == $size); return $str; } // Shorter samples echo MongoDB\BSON\Document::fromBSON(make_bson_with_size(64))->toRelaxedExtendedJSON(), "\n"; echo MongoDB\BSON\Document::fromBSON(make_bson_with_size(100))->toRelaxedExtendedJSON(), "\n"; // Max doc (1<<24) causes stack overflow by default. // ulimit -s 2097152 // very slow at producing even a single document // Large doc: 16777216 BSON bytes, output is 8705053 JSON bytes // 16 total, average 658.3125 seconds per doc, 0.012610706114581 MB/s $large_doc_size = 1 << 24; // 1MB doc: 0.8 seconds each (0.6 MB/s) //$large_doc_size = 1 << 20; // 2MB doc: 4.8 seconds each (0.2 MB/s) //$large_doc_size = 1 << 21; // 128kB BSON fits in normal 8MB stack. (php seems to be using system stack here, not alloc'ing its own) // I'm seeing about 4.5 MB/s here //$large_doc_size = 1 << 17; $large_doc = MongoDB\BSON\Document::fromBSON(make_bson_with_size($large_doc_size)); $large_json = $large_doc->toRelaxedExtendedJSON(); $large_json_size = strlen($large_json); echo "Large doc: $large_doc_size BSON bytes, output is $large_json_size JSON bytes\n"; // Perf test: repeated bson_as_json with max nesting and max size $timer_start = time(); $timer_count = 0; while (1) { $large_doc->toRelaxedExtendedJSON(); $timer_count++; if (!($timer_count & 15)) { $elapsed = time() - $timer_start; if ($elapsed > 5) { $timer_average = $elapsed / $timer_count; $effective_mbyte_rate = $large_json_size / $timer_average / (1024*1024); echo "\n$timer_count total, average $timer_average seconds per doc, $effective_mbyte_rate MB/s\n"; } } else { echo '.'; } }