@@ -558,7 +558,7 @@ class Toolbar {
558558 "schema": "https://example.com/folder",
559559 "version": "x.y.z",
560560 "in_language": "en",
561- "container": {(class table name): [{row 1 object}, {row 2 object}, ...], ...}
561+ "container": {(class table name): [{row 1 object}, {row 2 object}, ...], ...}
562562 }
563563 */
564564 async openJSONDataFile ( file ) {
@@ -621,27 +621,27 @@ class Toolbar {
621621 }
622622
623623 /**
624- * The JSON array of row objects sparse, so only keys with values are
625- * included. Search through a table's row objects to accumulate a full
626- * list of their keys. The col_index provides a ROUGH idea of which
627- * column to claim for a given key, though a sparse array may vary from
628- * row to row depending on required/optional fields.
624+ * JSON rows are sparse objects (only keys with values are included).
625+ * Collect all unique field names across all rows to build the header,
626+ * then convert each row-object to a value-array aligned to that header.
627+ * This makes JSON data compatible with the same tabular loading pipeline.
629628 */
630629 const header = [ ] ;
631- Object . entries ( data ) . forEach ( ( [ row_index , row ] ) => {
632- Object . entries ( row ) . forEach ( ( [ key , col_index ] ) => {
633- if ( ! ( header . includes ( key ) ) ) {
634- header . splice ( col_index , 0 , key ) ;
635- }
630+ data . forEach ( ( row ) => {
631+ Object . keys ( row ) . forEach ( ( key ) => {
632+ if ( ! header . includes ( key ) ) header . push ( key ) ;
636633 } ) ;
637634 } ) ;
638635
636+ // Convert each sparse row-object to a dense array of values.
637+ const data_matrix = data . map ( ( row ) => header . map ( ( field ) => row [ field ] ?? null ) ) ;
638+
639639 // JSON data SHOULD USE slot_names BUT some test files have slot_titles.
640- const matches = header . filter ( ( element , index ) => dh . slot_names . includes ( element ) || dh . slot_titles . includes ( element ) ) . length ;
641- // raw_data only contains templates that match incomming data tables.
640+ const matches = header . filter ( ( element ) => dh . slot_names . includes ( element ) || dh . slot_titles . includes ( element ) ) . length ;
641+ // raw_data only contains templates that match incoming data tables.
642642 raw_data [ template_name ] = {
643643 header : header ,
644- data : data ,
644+ data : data_matrix ,
645645 matches : matches ,
646646 table_name : dh . container_name
647647 }
@@ -752,12 +752,12 @@ class Toolbar {
752752 */
753753 Object . entries ( this . context . dhs ) . forEach ( ( [ template_name , dh ] ) => {
754754 let data = raw_data [ template_name ] ;
755+ if ( ! data ) return ;
755756 // 29, 34 total, 173 fields total.
756757 //console.log("MATCHES",data.matches,data.header.length,dh.slots.length);
757- if ( data && ( data . matches != data . header . length ) && ( data . matches != dh . slots . length ) ) {
758- // Have user try to correct any missing mappings in a class/template.
759- fieldMapper . appendFieldMappingModal ( dh ) ;
760- }
758+ const needs_modal = ( data . matches != data . header . length ) && ( data . matches != dh . slots . length ) ;
759+ // Always compute slot_to_data_col_matches; only build modal HTML when needed.
760+ fieldMapper . appendFieldMappingModal ( dh , needs_modal ) ;
761761 } ) ;
762762
763763 if ( fieldMapper . field_mapping_html ) {
@@ -891,7 +891,7 @@ class Toolbar {
891891 const schema = this . context . template . default . schema ;
892892 const container_table_list = Object . keys ( schema . classes ?. Container ?. attributes || { } ) . join ( ';' ) ;
893893 const xlsx_table = [
894- [ "id" , "name" , "version" , "in_language" , "container" ] ,
894+ [ "id" , "name" , "version" , "in_language" , "container" ] ,
895895 [ schema . id , schema . name , schema . version , schema . in_language , container_table_list ]
896896 ] ;
897897 const ws = XlsxUtils . aoa_to_sheet ( xlsx_table ) ;
0 commit comments