@@ -1324,6 +1324,89 @@ def testCoreToken_SpecialFormat(mockGUI):
13241324 ]
13251325
13261326
1327+ @pytest .mark .core
1328+ def testCoreToken_TextIndent (mockGUI ):
1329+ """Test the handling of text indent in the Tokenizer class."""
1330+ project = NWProject ()
1331+ tokens = BareTokenizer (project )
1332+
1333+ # No First Indent
1334+ tokens .setFirstLineIndent (True , 1.0 , False )
1335+
1336+ assert tokens ._noIndent is False
1337+ assert tokens ._firstIndent is True
1338+ assert tokens ._firstWidth == 1.0
1339+ assert tokens ._indentFirst is False
1340+
1341+ # Page One
1342+ # Two paragraphs in the same scene
1343+ tokens ._text = (
1344+ "# Title One\n \n "
1345+ "### Scene One\n \n "
1346+ "First paragraph.\n \n "
1347+ "Second paragraph.\n \n "
1348+ )
1349+ tokens .tokenizeText ()
1350+ assert tokens ._tokens == [
1351+ (Tokenizer .T_HEAD1 , 1 , "Title One" , [], Tokenizer .A_NONE ),
1352+ (Tokenizer .T_HEAD3 , 2 , "Scene One" , [], Tokenizer .A_NONE ),
1353+ (Tokenizer .T_TEXT , 2 , "First paragraph." , [], Tokenizer .A_NONE ),
1354+ (Tokenizer .T_TEXT , 2 , "Second paragraph." , [], Tokenizer .A_IND_T ),
1355+ ]
1356+ assert tokens ._noIndent is False
1357+
1358+ # Page Two
1359+ # New scene with only a synopsis
1360+ tokens ._text = (
1361+ "### Scene Two\n \n "
1362+ "%Synopsis: Stuff happens.\n \n "
1363+ )
1364+ tokens .tokenizeText ()
1365+ assert tokens ._tokens == [
1366+ (Tokenizer .T_HEAD3 , 1 , "Scene Two" , [], Tokenizer .A_NONE ),
1367+ (Tokenizer .T_SYNOPSIS , 1 , "Stuff happens." , [], Tokenizer .A_NONE ),
1368+ ]
1369+ assert tokens ._noIndent is True
1370+
1371+ # Page Three
1372+ # Two paragraphs for the scene on the previous page
1373+ tokens ._text = (
1374+ "First paragraph.\n \n "
1375+ "Second paragraph.\n \n "
1376+ )
1377+ tokens .tokenizeText ()
1378+ assert tokens ._tokens == [
1379+ (Tokenizer .T_TEXT , 0 , "First paragraph." , [], Tokenizer .A_NONE ),
1380+ (Tokenizer .T_TEXT , 0 , "Second paragraph." , [], Tokenizer .A_IND_T ),
1381+ ]
1382+ assert tokens ._noIndent is False
1383+
1384+ # First Indent
1385+ tokens .setFirstLineIndent (True , 1.0 , True )
1386+
1387+ assert tokens ._noIndent is False
1388+ assert tokens ._firstIndent is True
1389+ assert tokens ._firstWidth == 1.0
1390+ assert tokens ._indentFirst is True
1391+
1392+ # Page Four
1393+ # Two paragraphs in the same scene
1394+ tokens ._text = (
1395+ "# Title One\n \n "
1396+ "### Scene One\n \n "
1397+ "First paragraph.\n \n "
1398+ "Second paragraph.\n \n "
1399+ )
1400+ tokens .tokenizeText ()
1401+ assert tokens ._tokens == [
1402+ (Tokenizer .T_HEAD1 , 1 , "Title One" , [], Tokenizer .A_NONE ),
1403+ (Tokenizer .T_HEAD3 , 2 , "Scene One" , [], Tokenizer .A_NONE ),
1404+ (Tokenizer .T_TEXT , 2 , "First paragraph." , [], Tokenizer .A_IND_T ),
1405+ (Tokenizer .T_TEXT , 2 , "Second paragraph." , [], Tokenizer .A_IND_T ),
1406+ ]
1407+ assert tokens ._noIndent is False
1408+
1409+
13271410@pytest .mark .core
13281411def testCoreToken_ProcessHeaders (mockGUI ):
13291412 """Test the header and page parser of the Tokenizer class."""
0 commit comments