@@ -15,6 +15,7 @@ use crate::utils::{find_common_prefix, greatest_lower_bound};
1515
1616use bytes_str:: BytesStr ;
1717use debugid:: DebugId ;
18+ use rustc_hash:: FxHashSet ;
1819
1920/// Controls the `SourceMap::rewrite` behavior
2021///
@@ -940,6 +941,14 @@ impl SourceMap {
940941 Cow :: Borrowed ( & adjustment. tokens ) ,
941942 ) ;
942943 }
944+
945+ /// Perform a similar operation as [`Self::adjust_mappings`], but by rewriting the last
946+ /// sourcemap as opposed to the input source map:
947+ ///
948+ /// `transform.js.map.adjust_mappings_from_multiple([foo.js.map, bar.js.map])`
949+ pub fn adjust_mappings_from_multiple ( self , adjustments : Vec < crate :: lazy:: SourceMap > ) -> Self {
950+ adjust_mappings_from_multiple ( self , adjustments)
951+ }
943952}
944953
945954pub ( crate ) fn adjust_mappings (
@@ -1076,6 +1085,207 @@ pub(crate) fn adjust_mappings(
10761085 new_tokens
10771086}
10781087
1088+ pub fn adjust_mappings_from_multiple (
1089+ mut this : SourceMap ,
1090+ mut input_maps : Vec < crate :: lazy:: SourceMap > ,
1091+ ) -> SourceMap {
1092+ // Helper struct that makes it easier to compare tokens by the start and end
1093+ // of the range they cover.
1094+ #[ derive( Debug , Clone , Copy ) ]
1095+ struct Range < ' a > {
1096+ start : ( u32 , u32 ) ,
1097+ end : ( u32 , u32 ) ,
1098+ value : & ' a RawToken ,
1099+ map_idx : u32 ,
1100+ }
1101+
1102+ /// Turns a list of tokens into a list of ranges, using the provided `key` function to determine the order of the tokens.
1103+ #[ allow( clippy:: ptr_arg) ]
1104+ fn create_ranges (
1105+ tokens : & mut [ ( u32 , RawToken ) ] ,
1106+ key : fn ( & RawToken ) -> ( u32 , u32 ) ,
1107+ ) -> Vec < Range < ' _ > > {
1108+ tokens. sort_unstable_by_key ( |( _, t) | key ( t) ) ;
1109+
1110+ let mut token_iter = tokens. iter ( ) . peekable ( ) ;
1111+ let mut ranges = Vec :: new ( ) ;
1112+
1113+ while let Some ( ( map_idx, t) ) = token_iter. next ( ) {
1114+ let start = key ( t) ;
1115+ let next_start = token_iter
1116+ . peek ( )
1117+ . map_or ( ( u32:: MAX , u32:: MAX ) , |( _, t) | key ( t) ) ;
1118+ // A token extends either to the start of the next token or the end of the line, whichever comes sooner
1119+ let end = std:: cmp:: min ( next_start, ( start. 0 , u32:: MAX ) ) ;
1120+ ranges. push ( Range {
1121+ start,
1122+ end,
1123+ value : t,
1124+ map_idx : * map_idx,
1125+ } ) ;
1126+ }
1127+
1128+ ranges
1129+ }
1130+
1131+ // Turn `self.tokens` and `adjustment.tokens` into vectors of ranges so we have easy access to
1132+ // both start and end.
1133+ // We want to compare `self` and `adjustment` tokens by line/column numbers in the "original source" file.
1134+ // These line/column numbers are the `dst_line/col` for
1135+ // the `self` tokens and `src_line/col` for the `adjustment` tokens.
1136+ let mut input_tokens = input_maps
1137+ . iter_mut ( )
1138+ . enumerate ( )
1139+ . flat_map ( |( i, map) | {
1140+ std:: mem:: take ( & mut map. tokens )
1141+ . into_iter ( )
1142+ . map ( move |t| ( ( i + 1 ) as u32 , t) )
1143+ } )
1144+ . collect :: < Vec < _ > > ( ) ;
1145+ let input_ranges = create_ranges ( & mut input_tokens[ ..] , |t| ( t. dst_line , t. dst_col ) ) ;
1146+ let mut self_tokens = std:: mem:: take ( & mut this. tokens )
1147+ . into_iter ( )
1148+ . map ( |t| ( 0u32 , t) )
1149+ . collect :: < Vec < _ > > ( ) ;
1150+ let self_ranges = create_ranges ( & mut self_tokens[ ..] , |t| ( t. src_line , t. src_col ) ) ;
1151+
1152+ let mut input_ranges_iter = input_ranges. iter ( ) ;
1153+ let mut input_range = match input_ranges_iter. next ( ) {
1154+ Some ( r) => Some ( r) ,
1155+ None => return this,
1156+ } ;
1157+
1158+ let covered_input_files = input_maps
1159+ . iter_mut ( )
1160+ . flat_map ( |m| m. file ( ) . cloned ( ) )
1161+ . collect :: < FxHashSet < _ > > ( ) ;
1162+
1163+ let mut new_map = SourceMapBuilder :: new ( None ) ;
1164+ let mut add_mapping = |input_maps : & mut Vec < crate :: lazy:: SourceMap < ' _ > > ,
1165+ map_idx : u32 ,
1166+ dst_line : u32 ,
1167+ dst_col : u32 ,
1168+ src_line : u32 ,
1169+ src_col : u32 ,
1170+ src_id : u32 ,
1171+ name_id : u32 ,
1172+ is_range : bool | {
1173+ let ( src_id, name) = if map_idx == 0 {
1174+ let src = this. get_source ( src_id) . cloned ( ) ;
1175+ (
1176+ src. map ( |src| {
1177+ let src_id = new_map. add_source ( src) ;
1178+ new_map. set_source_contents ( src_id, this. get_source_contents ( src_id) . cloned ( ) ) ;
1179+ src_id
1180+ } ) ,
1181+ this. get_name ( name_id) . cloned ( ) ,
1182+ )
1183+ } else {
1184+ let this = & mut input_maps[ ( map_idx - 1 ) as usize ] ;
1185+ let src = this. get_source ( src_id) . cloned ( ) ;
1186+ (
1187+ src. map ( |src| {
1188+ let src_id = new_map. add_source ( src) ;
1189+ new_map. set_source_contents ( src_id, this. get_source_contents ( src_id) . cloned ( ) ) ;
1190+ src_id
1191+ } ) ,
1192+ this. get_name ( name_id) . cloned ( ) ,
1193+ )
1194+ } ;
1195+ let name_id = name. map ( |name| new_map. add_source ( name) ) ;
1196+ new_map. add_raw (
1197+ dst_line, dst_col, src_line, src_col, src_id, name_id, is_range,
1198+ ) ;
1199+ } ;
1200+
1201+ // Iterate over `self_ranges` (sorted by `src_line/col`). For each such range, consider
1202+ // all `self_ranges` which overlap with it.
1203+ for & self_range in & self_ranges {
1204+ // The `self_range` offsets lines and columns by a certain amount. All `input_ranges`
1205+ // it covers will get the same offset.
1206+ let ( line_diff, col_diff) = (
1207+ self_range. value . dst_line as i32 - self_range. value . src_line as i32 ,
1208+ self_range. value . dst_col as i32 - self_range. value . src_col as i32 ,
1209+ ) ;
1210+
1211+ // Skip `input_ranges` that are entirely before the `_range`.
1212+ while input_range. is_some_and ( |input_range| input_range. end <= self_range. start ) {
1213+ input_range = input_ranges_iter. next ( ) ;
1214+ }
1215+ // At this point `self_range.end` > `input_range.start`
1216+
1217+ if input_range. is_none_or ( |input_range| {
1218+ self_range. start >= input_range. end
1219+ || this. get_source ( self_range. value . src_id ) . is_none_or ( |src| {
1220+ Some ( src) != input_maps[ ( input_range. map_idx - 1 ) as usize ] . file ( )
1221+ } )
1222+ } ) {
1223+ // No input range matches this range, keep the mapping though if this file isn't covered
1224+ // by any input sourcemap
1225+ if this
1226+ . get_source ( self_range. value . src_id )
1227+ . is_none_or ( |f| !covered_input_files. contains ( f) )
1228+ {
1229+ add_mapping (
1230+ & mut input_maps,
1231+ 0 ,
1232+ self_range. value . dst_line ,
1233+ self_range. value . dst_col ,
1234+ self_range. value . src_line ,
1235+ self_range. value . src_col ,
1236+ self_range. value . src_id ,
1237+ self_range. value . name_id ,
1238+ self_range. value . is_range ,
1239+ ) ;
1240+ }
1241+ } else {
1242+ let mut input_range_value = input_range. unwrap ( ) ;
1243+ // Iterate over `input_range` that fall at least partially within the `self_ranges`.
1244+ while input_range_value. start < self_range. end {
1245+ // If `input_range` started before `self_range`, cut off the token's start.
1246+ let ( dst_line, dst_col) = std:: cmp:: max ( input_range_value. start , self_range. start ) ;
1247+ add_mapping (
1248+ & mut input_maps,
1249+ input_range_value. map_idx ,
1250+ ( dst_line as i32 + line_diff) as u32 ,
1251+ ( dst_col as i32 + col_diff) as u32 ,
1252+ input_range_value. value . src_line ,
1253+ input_range_value. value . src_col ,
1254+ input_range_value. value . src_id ,
1255+ input_range_value. value . name_id ,
1256+ input_range_value. value . is_range ,
1257+ ) ;
1258+
1259+ if input_range_value. end >= self_range. end {
1260+ // There are surely no more `input_ranges` for this `self_range`.
1261+ // Break the loop without advancing the `input_range`.
1262+ break ;
1263+ } else {
1264+ // Advance the `input_range`.
1265+ match input_ranges_iter. next ( ) {
1266+ Some ( r) => {
1267+ input_range_value = r;
1268+ input_range = Some ( r) ;
1269+ }
1270+ None => {
1271+ input_range = None ;
1272+ break ;
1273+ }
1274+ }
1275+ }
1276+ }
1277+ }
1278+ }
1279+
1280+ let mut new_map = new_map. into_sourcemap ( ) ;
1281+
1282+ new_map
1283+ . tokens
1284+ . sort_unstable_by_key ( |t| ( t. dst_line , t. dst_col ) ) ;
1285+
1286+ new_map
1287+ }
1288+
10791289impl SourceMapIndex {
10801290 /// Creates a sourcemap index from a reader over a JSON stream in UTF-8
10811291 /// format. Optionally a "garbage header" as defined by the
0 commit comments