@@ -8,6 +8,8 @@ import { Buffer } from 'node:buffer';
88import * as fs from 'node:fs' ;
99import * as path from 'node:path' ;
1010import { Readable } from 'node:stream' ;
11+ import protobuf from 'protobufjs/minimal.js' ;
12+ const { Reader } = protobuf ;
1113
1214import * as MessageBackup from '../MessageBackup.js' ;
1315import * as util from './util.js' ;
@@ -190,6 +192,60 @@ const exampleBackup = fs.readFileSync(
190192 path . join ( import . meta. dirname , '../../ts/test/canonical-backup.binproto' )
191193) ;
192194
195+ function chunkLengthDelimited ( binproto : Uint8Array ) : Uint8Array [ ] {
196+ const r = Reader . create ( binproto ) ;
197+ const chunks : Uint8Array [ ] = [ ] ;
198+
199+ while ( r . pos < r . len ) {
200+ const headerStart = r . pos ; // start of the varint length prefix
201+ const length = r . uint32 ( ) ; // implicitly advances to the start of the body
202+ const bodyStart = r . pos ; // now points to the start of the proto message
203+ const end = bodyStart + length ;
204+
205+ if ( end > r . len ) {
206+ throw new Error ( 'truncated length-delimited chunk' ) ;
207+ }
208+
209+ // Include the varint header + body
210+ chunks . push ( binproto . subarray ( headerStart , end ) ) ;
211+ r . pos = end ;
212+ }
213+
214+ return chunks ;
215+ }
216+
217+ function stripLengthPrefix ( chunk : Uint8Array ) : Uint8Array {
218+ const reader = Reader . create ( chunk ) ;
219+ const length = reader . uint32 ( ) ;
220+ const bodyStart = reader . pos ;
221+ const bodyEnd = bodyStart + length ;
222+ if ( bodyEnd > reader . len ) {
223+ throw new Error ( 'truncated length-delimited chunk' ) ;
224+ }
225+ if ( bodyEnd !== reader . len ) {
226+ throw new Error ( 'unexpected trailing data after chunk body' ) ;
227+ }
228+ return chunk . subarray ( bodyStart , bodyEnd ) ;
229+ }
230+
231+ const exampleBackupChunks = chunkLengthDelimited ( exampleBackup ) ;
232+ if ( exampleBackupChunks . length === 0 ) {
233+ throw new Error ( 'expected at least one length-delimited chunk' ) ;
234+ }
235+ const [ exampleBackupInfoChunk , ...exampleFrameChunks ] = exampleBackupChunks ;
236+ const exampleBackupInfo = stripLengthPrefix ( exampleBackupInfoChunk ) ;
237+ const exampleFrames = exampleFrameChunks ;
238+
239+ function concatFrames ( chunks : ReadonlyArray < Uint8Array > ) : Uint8Array {
240+ if ( chunks . length === 0 ) {
241+ return new Uint8Array ( ) ;
242+ }
243+ if ( chunks . length === 1 ) {
244+ return new Uint8Array ( chunks [ 0 ] ) ;
245+ }
246+ return Buffer . concat ( chunks . map ( ( chunk ) => Buffer . from ( chunk ) ) ) ;
247+ }
248+
193249describe ( 'ComparableBackup' , ( ) => {
194250 describe ( 'exampleBackup' , ( ) => {
195251 it ( 'stringifies to the expected value' , async ( ) => {
@@ -211,6 +267,121 @@ describe('ComparableBackup', () => {
211267 } ) ;
212268} ) ;
213269
270+ describe ( 'BackupJsonExporter' , ( ) => {
271+ it ( 'streams pretty JSON for a canonical backup' , ( ) => {
272+ const backupInfo = exampleBackupInfo ;
273+ const frames = exampleFrames . slice ( ) ;
274+
275+ const { exporter, chunk : initialChunk } =
276+ MessageBackup . BackupJsonExporter . start ( backupInfo ) ;
277+
278+ // Stream the frames across multiple chunks to mirror the real exporter usage.
279+ const chunkGroups = [ frames . slice ( 0 , 2 ) , frames . slice ( 2 ) ] . filter (
280+ ( group ) => group . length > 0
281+ ) ;
282+ const exportedFrameChunks = chunkGroups . map ( ( group ) =>
283+ exporter . exportFrames ( concatFrames ( group ) )
284+ ) ;
285+
286+ const jsonText = [
287+ initialChunk ,
288+ ...exportedFrameChunks ,
289+ exporter . finish ( ) ,
290+ ] . join ( '' ) ;
291+ assert . isTrue ( jsonText . startsWith ( '[\n {' ) ) ;
292+ assert . isTrue ( jsonText . endsWith ( ']\n' ) ) ;
293+
294+ const parsed = JSON . parse ( jsonText ) as unknown ;
295+ assert . isArray ( parsed ) ;
296+
297+ const parsedArray = parsed as Array < unknown > ;
298+ assert . lengthOf ( parsedArray , frames . length + 1 ) ;
299+
300+ const [ backupInfoJson , firstFrame ] = parsedArray ;
301+ assert . isObject ( backupInfoJson ) ;
302+ assert . containsAllKeys ( backupInfoJson as Record < string , unknown > , [
303+ 'version' ,
304+ 'mediaRootBackupKey' ,
305+ ] ) ;
306+ assert . isObject ( firstFrame ) ;
307+ const firstFrameRecord = firstFrame as Record < string , unknown > ;
308+ assert . property ( firstFrameRecord , 'account' ) ;
309+ const accountValue = firstFrameRecord . account ;
310+ assert . isObject ( accountValue ) ;
311+ const accountRecord = accountValue as Record < string , unknown > ;
312+ assert . containsAllKeys ( accountRecord , [
313+ 'profileKey' ,
314+ 'username' ,
315+ 'accountSettings' ,
316+ ] ) ;
317+ } ) ;
318+
319+ it ( 'returns an empty chunk when no frames are provided' , ( ) => {
320+ const backupInfo = exampleBackupInfo ;
321+ const { exporter } = MessageBackup . BackupJsonExporter . start ( backupInfo ) ;
322+ assert . equal ( exporter . exportFrames ( new Uint8Array ( ) ) , '' ) ;
323+ } ) ;
324+
325+ it ( 'validates frames when requested' , ( ) => {
326+ const backupInfo = exampleBackupInfo ;
327+ const frames = exampleFrames . slice ( ) ;
328+ const { exporter } = MessageBackup . BackupJsonExporter . start ( backupInfo , {
329+ validate : true ,
330+ } ) ;
331+
332+ const groupedFrames = [ frames . slice ( 0 , 1 ) , frames . slice ( 1 ) ] ;
333+ for ( const group of groupedFrames ) {
334+ if ( group . length === 0 ) {
335+ continue ;
336+ }
337+ exporter . exportFrames ( concatFrames ( group ) ) ;
338+ }
339+
340+ exporter . finish ( ) ;
341+ } ) ;
342+
343+ it ( 'throws when validation fails' , ( ) => {
344+ const backupInfo = exampleBackupInfo ;
345+ const frames = exampleFrames . slice ( ) ;
346+ const { exporter, chunk } = MessageBackup . BackupJsonExporter . start (
347+ backupInfo ,
348+ {
349+ validate : true ,
350+ }
351+ ) ;
352+
353+ // baseline chunk should still be produced
354+ assert . isTrue ( chunk . startsWith ( '[\n' ) ) ;
355+
356+ const missingAccountChunk = concatFrames ( frames . slice ( 1 ) ) ;
357+ exporter . exportFrames ( missingAccountChunk ) ;
358+
359+ assert . throws ( ( ) => exporter . finish ( ) ) ;
360+ } ) ;
361+
362+ it ( 'can skip validation when explicitly disabled' , ( ) => {
363+ const backupInfo = exampleBackupInfo ;
364+ const frames = exampleFrames . slice ( ) ;
365+ const { exporter } = MessageBackup . BackupJsonExporter . start ( backupInfo , {
366+ validate : false ,
367+ } ) ;
368+
369+ const missingAccountChunk = concatFrames ( frames . slice ( 1 ) ) ;
370+ exporter . exportFrames ( missingAccountChunk ) ;
371+
372+ exporter . finish ( ) ;
373+ } ) ;
374+
375+ it ( 'still rejects malformed data even when validation is disabled' , ( ) => {
376+ const backupInfo = exampleBackupInfo ;
377+ const { exporter } = MessageBackup . BackupJsonExporter . start ( backupInfo , {
378+ validate : false ,
379+ } ) ;
380+
381+ assert . throws ( ( ) => exporter . exportFrames ( Uint8Array . of ( 0x02 , 0x01 ) ) ) ;
382+ } ) ;
383+ } ) ;
384+
214385describe ( 'OnlineBackupValidator' , ( ) => {
215386 it ( 'can read frames from a valid file' , ( ) => {
216387 // `Readable.read` normally returns `any`, because it supports settable encodings.
0 commit comments