|
6 | 6 | // option. This file may not be copied, modified, or distributed
|
7 | 7 | // except according to those terms.
|
8 | 8 | extern crate cexpr;
|
9 |
| -extern crate clang_sys; |
10 | 9 |
|
11 | 10 | use std::collections::HashMap;
|
12 | 11 | use std::io::Write;
|
13 | 12 | use std::str::{self, FromStr};
|
14 |
| -use std::{char, ffi, mem, ptr, slice}; |
| 13 | +use std::char; |
15 | 14 | use std::num::Wrapping;
|
16 | 15 |
|
17 | 16 | use cexpr::assert_full_parse;
|
18 | 17 | use cexpr::expr::{fn_macro_declaration, EvalResult, IdentifierParser};
|
19 | 18 | use cexpr::literal::CChar;
|
20 | 19 | use cexpr::token::Token;
|
21 |
| -use clang_sys::*; |
| 20 | +use clang::{source::SourceRange, token::TokenKind, EntityKind}; |
22 | 21 |
|
23 | 22 | // main testing routine
|
24 | 23 | fn test_definition(
|
@@ -179,162 +178,79 @@ fn test_definition(
|
179 | 178 | }
|
180 | 179 | }
|
181 | 180 |
|
182 |
| -// support code for the clang lexer |
183 |
| -unsafe fn clang_str_to_vec(s: CXString) -> Vec<u8> { |
184 |
| - let vec = ffi::CStr::from_ptr(clang_getCString(s)) |
185 |
| - .to_bytes() |
186 |
| - .to_owned(); |
187 |
| - clang_disposeString(s); |
188 |
| - vec |
189 |
| -} |
190 |
| - |
191 |
| -#[allow(non_upper_case_globals)] |
192 |
| -unsafe fn token_clang_to_cexpr(tu: CXTranslationUnit, orig: &CXToken) -> Token { |
| 181 | +fn token_clang_to_cexpr(token: &clang::token::Token) -> Token { |
193 | 182 | Token {
|
194 |
| - kind: match clang_getTokenKind(*orig) { |
195 |
| - CXToken_Comment => cexpr::token::Kind::Comment, |
196 |
| - CXToken_Identifier => cexpr::token::Kind::Identifier, |
197 |
| - CXToken_Keyword => cexpr::token::Kind::Keyword, |
198 |
| - CXToken_Literal => cexpr::token::Kind::Literal, |
199 |
| - CXToken_Punctuation => cexpr::token::Kind::Punctuation, |
200 |
| - _ => panic!("invalid token kind: {:?}", *orig), |
| 183 | + kind: match token.get_kind() { |
| 184 | + TokenKind::Comment => cexpr::token::Kind::Comment, |
| 185 | + TokenKind::Identifier => cexpr::token::Kind::Identifier, |
| 186 | + TokenKind::Keyword => cexpr::token::Kind::Keyword, |
| 187 | + TokenKind::Literal => cexpr::token::Kind::Literal, |
| 188 | + TokenKind::Punctuation => cexpr::token::Kind::Punctuation, |
201 | 189 | },
|
202 |
| - raw: clang_str_to_vec(clang_getTokenSpelling(tu, *orig)).into_boxed_slice(), |
| 190 | + raw: token.get_spelling().into_bytes().into_boxed_slice(), |
203 | 191 | }
|
204 | 192 | }
|
205 | 193 |
|
206 |
| -extern "C" fn visit_children_thunk<F>( |
207 |
| - cur: CXCursor, |
208 |
| - parent: CXCursor, |
209 |
| - closure: CXClientData, |
210 |
| -) -> CXChildVisitResult |
211 |
| -where |
212 |
| - F: FnMut(CXCursor, CXCursor) -> CXChildVisitResult, |
213 |
| -{ |
214 |
| - unsafe { (&mut *(closure as *mut F))(cur, parent) } |
215 |
| -} |
216 |
| - |
217 |
| -unsafe fn visit_children<F>(cursor: CXCursor, mut f: F) |
218 |
| -where |
219 |
| - F: FnMut(CXCursor, CXCursor) -> CXChildVisitResult, |
220 |
| -{ |
221 |
| - clang_visitChildren( |
222 |
| - cursor, |
223 |
| - visit_children_thunk::<F> as _, |
224 |
| - &mut f as *mut F as CXClientData, |
225 |
| - ); |
| 194 | +fn location_in_scope(r: &SourceRange) -> bool { |
| 195 | + let start = r.get_start(); |
| 196 | + let location = start.get_spelling_location(); |
| 197 | + start.is_in_main_file() && !start.is_in_system_header() && location.file.is_some() |
226 | 198 | }
|
227 | 199 |
|
228 |
| -unsafe fn location_in_scope(r: CXSourceRange) -> bool { |
229 |
| - let start = clang_getRangeStart(r); |
230 |
| - let mut file = ptr::null_mut(); |
231 |
| - clang_getSpellingLocation( |
232 |
| - start, |
233 |
| - &mut file, |
234 |
| - ptr::null_mut(), |
235 |
| - ptr::null_mut(), |
236 |
| - ptr::null_mut(), |
237 |
| - ); |
238 |
| - clang_Location_isFromMainFile(start) != 0 |
239 |
| - && clang_Location_isInSystemHeader(start) == 0 |
240 |
| - && file != ptr::null_mut() |
241 |
| -} |
242 |
| - |
243 |
| -/// tokenize_range_adjust can be used to work around LLVM bug 9069 |
244 |
| -/// https://bugs.llvm.org//show_bug.cgi?id=9069 |
245 | 200 | fn file_visit_macros<F: FnMut(Vec<u8>, Vec<Token>)>(
|
246 | 201 | file: &str,
|
247 |
| - tokenize_range_adjust: bool, |
248 | 202 | mut visitor: F,
|
249 | 203 | ) {
|
250 |
| - unsafe { |
251 |
| - let tu = { |
252 |
| - let index = clang_createIndex(true as _, false as _); |
253 |
| - let cfile = ffi::CString::new(file).unwrap(); |
254 |
| - let mut tu = mem::MaybeUninit::uninit(); |
255 |
| - assert!( |
256 |
| - clang_parseTranslationUnit2( |
257 |
| - index, |
258 |
| - cfile.as_ptr(), |
259 |
| - [b"-std=c11\0".as_ptr() as *const ::std::os::raw::c_char].as_ptr(), |
260 |
| - 1, |
261 |
| - ptr::null_mut(), |
262 |
| - 0, |
263 |
| - CXTranslationUnit_DetailedPreprocessingRecord, |
264 |
| - &mut *tu.as_mut_ptr() |
265 |
| - ) == CXError_Success, |
266 |
| - "Failure reading test case {}", |
267 |
| - file |
268 |
| - ); |
269 |
| - tu.assume_init() |
270 |
| - }; |
271 |
| - visit_children(clang_getTranslationUnitCursor(tu), |cur, _parent| { |
272 |
| - if cur.kind == CXCursor_MacroDefinition { |
273 |
| - let mut range = clang_getCursorExtent(cur); |
274 |
| - if !location_in_scope(range) { |
275 |
| - return CXChildVisit_Continue; |
276 |
| - } |
277 |
| - range.end_int_data -= if tokenize_range_adjust { 1 } else { 0 }; |
278 |
| - let mut token_ptr = ptr::null_mut(); |
279 |
| - let mut num = 0; |
280 |
| - clang_tokenize(tu, range, &mut token_ptr, &mut num); |
281 |
| - if token_ptr != ptr::null_mut() { |
282 |
| - let tokens = slice::from_raw_parts(token_ptr, num as usize); |
283 |
| - let tokens: Vec<_> = tokens |
284 |
| - .iter() |
285 |
| - .filter_map(|t| { |
286 |
| - if clang_getTokenKind(*t) != CXToken_Comment { |
287 |
| - Some(token_clang_to_cexpr(tu, t)) |
288 |
| - } else { |
289 |
| - None |
290 |
| - } |
291 |
| - }) |
292 |
| - .collect(); |
293 |
| - clang_disposeTokens(tu, token_ptr, num); |
294 |
| - visitor(clang_str_to_vec(clang_getCursorSpelling(cur)), tokens) |
295 |
| - } |
| 204 | + let clang = clang::Clang::new().unwrap(); |
| 205 | + |
| 206 | + let index = clang::Index::new(&clang, false, true); |
| 207 | + |
| 208 | + let tu = index |
| 209 | + .parser(file) |
| 210 | + .arguments(&["-std=c11"]) |
| 211 | + .detailed_preprocessing_record(true) |
| 212 | + .skip_function_bodies(true) |
| 213 | + .parse() |
| 214 | + .unwrap(); |
| 215 | + |
| 216 | + let entity = tu.get_entity(); |
| 217 | + |
| 218 | + entity.visit_children(|cur, _parent| { |
| 219 | + if cur.get_kind() == EntityKind::MacroDefinition { |
| 220 | + let range = cur.get_range().unwrap(); |
| 221 | + if !location_in_scope(&range) { |
| 222 | + return clang::EntityVisitResult::Continue; |
296 | 223 | }
|
297 |
| - CXChildVisit_Continue |
298 |
| - }); |
299 |
| - clang_disposeTranslationUnit(tu); |
300 |
| - }; |
| 224 | + |
| 225 | + let tokens: Vec<_> = range |
| 226 | + .tokenize() |
| 227 | + .into_iter() |
| 228 | + .filter_map(|token| { |
| 229 | + if token.get_kind() == TokenKind::Comment { |
| 230 | + return None; |
| 231 | + } |
| 232 | + |
| 233 | + Some(token_clang_to_cexpr(&token)) |
| 234 | + }) |
| 235 | + .collect(); |
| 236 | + |
| 237 | + let display_name = cur.get_display_name().unwrap(); |
| 238 | + visitor(display_name.into_bytes(), tokens) |
| 239 | + } |
| 240 | + |
| 241 | + clang::EntityVisitResult::Continue |
| 242 | + }); |
301 | 243 | }
|
302 | 244 |
|
303 | 245 | fn test_file(file: &str) -> bool {
|
304 | 246 | let mut idents = HashMap::new();
|
305 | 247 | let mut all_succeeded = true;
|
306 |
| - file_visit_macros(file, fix_bug_9069(), |ident, tokens| { |
| 248 | + file_visit_macros(file, |ident, tokens| { |
307 | 249 | all_succeeded &= test_definition(ident, &tokens, &mut idents)
|
308 | 250 | });
|
309 | 251 | all_succeeded
|
310 | 252 | }
|
311 | 253 |
|
312 |
| -fn fix_bug_9069() -> bool { |
313 |
| - fn check_bug_9069() -> bool { |
314 |
| - let mut token_sets = vec![]; |
315 |
| - file_visit_macros( |
316 |
| - "tests/input/test_llvm_bug_9069.h", |
317 |
| - false, |
318 |
| - |ident, tokens| { |
319 |
| - assert_eq!(&ident, b"A"); |
320 |
| - token_sets.push(tokens); |
321 |
| - }, |
322 |
| - ); |
323 |
| - assert_eq!(token_sets.len(), 2); |
324 |
| - token_sets[0] != token_sets[1] |
325 |
| - } |
326 |
| - |
327 |
| - use std::sync::atomic::{AtomicBool, Ordering}; |
328 |
| - use std::sync::Once; |
329 |
| - |
330 |
| - static CHECK_FIX: Once = Once::new(); |
331 |
| - static FIX: AtomicBool = AtomicBool::new(false); |
332 |
| - |
333 |
| - CHECK_FIX.call_once(|| FIX.store(check_bug_9069(), Ordering::SeqCst)); |
334 |
| - |
335 |
| - FIX.load(Ordering::SeqCst) |
336 |
| -} |
337 |
| - |
338 | 254 | macro_rules! test_file {
|
339 | 255 | ($f:ident) => {
|
340 | 256 | #[test]
|
|
0 commit comments