@@ -963,132 +963,6 @@ function getSelectorFieldName(selector, i) {
963963 return isStr ( br ) ? br : br ?. fieldName ;
964964}
965965
966- // Read and process the next data cluster from the RNTuple
967- async function readNextCluster ( rntuple , selector ) {
968- const builder = rntuple . builder ;
969-
970- // Add validation
971- if ( ! builder . clusterSummaries )
972- throw new Error ( 'No cluster summaries available - possibly incomplete file reading' ) ;
973-
974- const clusterIndex = selector . currentCluster ,
975- clusterSummary = builder . clusterSummaries [ clusterIndex ] ,
976- // Gather all pages for this cluster from selected fields only
977- pages = [ ] ,
978- // Collect only selected field names from selector
979- selectedFields = [ ] ;
980-
981- if ( ! clusterSummary ) {
982- selector . Terminate ( clusterIndex > 0 ) ;
983- return false ;
984- }
985-
986- for ( let i = 0 ; i < selector . numBranches ( ) ; ++ i )
987- selectedFields . push ( getSelectorFieldName ( selector , i ) ) ;
988-
989- // For each selected field, collect its columns' pages
990- for ( const fieldName of selectedFields ) {
991- const columns = rntuple . fieldToColumns [ fieldName ] ;
992- if ( ! columns )
993- throw new Error ( `Selected field '${ fieldName } ' not found in RNTuple` ) ;
994-
995- for ( const colDesc of columns ) {
996- const colEntry = builder . pageLocations [ clusterIndex ] ?. [ colDesc . index ] ;
997-
998- // When the data is missing or broken
999- if ( ! colEntry || ! colEntry . pages )
1000- throw new Error ( `No pages for column ${ colDesc . index } in cluster ${ clusterIndex } ` ) ;
1001-
1002- for ( const page of colEntry . pages )
1003- pages . push ( { page, colDesc, fieldName } ) ;
1004- }
1005- }
1006-
1007- selector . currentCluster ++ ;
1008-
1009- // Early exit if no pages to read (i.e., no selected fields matched)
1010- if ( pages . length === 0 ) {
1011- selector . Terminate ( false ) ;
1012- return false ;
1013- }
1014-
1015- // Build flat array of [offset, size, offset, size, ...] to read pages
1016- const dataToRead = pages . flatMap ( p =>
1017- [ Number ( p . page . locator . offset ) , Number ( p . page . locator . size ) ]
1018- ) ;
1019-
1020- return rntuple . $file . readBuffer ( dataToRead ) . then ( blobsRaw => {
1021- const blobs = Array . isArray ( blobsRaw ) ? blobsRaw : [ blobsRaw ] ,
1022- unzipPromises = blobs . map ( ( blob , idx ) => {
1023- const { page, colDesc } = pages [ idx ] ,
1024- colEntry = builder . pageLocations [ clusterIndex ] [ colDesc . index ] , // Access column entry
1025- numElements = Number ( page . numElements ) ,
1026- elementSize = colDesc . bitsOnStorage / 8 ;
1027-
1028- // Check if data is compressed
1029- if ( colEntry . compression === 0 )
1030- return Promise . resolve ( blob ) ; // Uncompressed: use blob directly
1031- const expectedSize = numElements * elementSize ;
1032-
1033- // Special handling for boolean fields
1034- if ( colDesc . coltype === ENTupleColumnType . kBit ) {
1035- const expectedBoolSize = Math . ceil ( numElements / 8 ) ;
1036- if ( blob . byteLength === expectedBoolSize )
1037- return Promise . resolve ( blob ) ;
1038- // Try decompression but catch errors for boolean fields
1039- return R__unzip ( blob , expectedBoolSize ) . catch ( err => {
1040- throw new Error ( `Failed to unzip boolean page ${ idx } : ${ err . message } ` ) ;
1041- } ) ;
1042- }
1043-
1044- // If the blob is already the expected size, treat as uncompressed
1045- if ( blob . byteLength === expectedSize )
1046- return Promise . resolve ( blob ) ;
1047-
1048- // Try decompression
1049- return R__unzip ( blob , expectedSize ) . then ( result => {
1050- if ( ! result )
1051- return blob ; // Fallback to original blob
1052- return result ;
1053- } ) . catch ( err => {
1054- throw new Error ( `Failed to unzip page ${ idx } : ${ err . message } ` ) ;
1055- } ) ;
1056- } ) ;
1057-
1058- return Promise . all ( unzipPromises ) . then ( unzipBlobs => {
1059- rntuple . _clusterData = { } ; // store deserialized data per column index
1060-
1061- for ( let i = 0 ; i < unzipBlobs . length ; ++ i ) {
1062- const blob = unzipBlobs [ i ] ;
1063- // Ensure blob is a DataView
1064- if ( ! ( blob instanceof DataView ) )
1065- throw new Error ( `Invalid blob type for page ${ i } : ${ Object . prototype . toString . call ( blob ) } ` ) ;
1066- const colDesc = pages [ i ] . colDesc ,
1067- values = builder . deserializePage ( blob , colDesc , pages [ i ] . page ) ;
1068-
1069- // Support multiple representations (e.g., string fields with offsets + payload)
1070- if ( ! rntuple . _clusterData [ colDesc . index ] )
1071- rntuple . _clusterData [ colDesc . index ] = [ ] ;
1072-
1073- rntuple . _clusterData [ colDesc . index ] . push ( values ) ;
1074- }
1075-
1076- const numEntries = clusterSummary . numEntries ;
1077- for ( let i = 0 ; i < numEntries ; ++ i ) {
1078- for ( let b = 0 ; b < selector . numBranches ( ) ; ++ b ) {
1079- const fieldName = getSelectorFieldName ( selector , b ) ,
1080- tgtName = selector . nameOfBranch ( b ) ;
1081-
1082- selector . tgtobj [ tgtName ] = readEntry ( rntuple , fieldName , clusterIndex , i ) ;
1083- }
1084- selector . Process ( selector . currentEntry ++ ) ;
1085- }
1086-
1087- return readNextCluster ( rntuple , selector ) ;
1088- } ) ;
1089- } ) ;
1090- }
1091-
1092966class ReaderItem {
1093967
1094968 constructor ( column , name ) {
@@ -1523,21 +1397,6 @@ async function rntupleProcess(rntuple, selector, args) {
15231397}
15241398
15251399
1526- // TODO args can later be used to filter fields, limit entries, etc.
1527- // Create reader and deserialize doubles from the buffer
1528- function rntupleProcessOld ( rntuple , selector , args ) {
1529- selector . Begin ( ) ;
1530- return readHeaderFooter ( rntuple ) . then ( res => {
1531- if ( ! res ) {
1532- selector . Terminate ( false ) ;
1533- return selector ;
1534- }
1535- selector . currentCluster = 0 ;
1536- selector . currentEntry = 0 ;
1537- return readNextCluster ( rntuple , selector , args ) ;
1538- } ) . then ( ( ) => selector ) ;
1539- }
1540-
15411400class TDrawSelectorTuple extends TDrawSelector {
15421401
15431402 /** @summary Return total number of entries
0 commit comments