|
176 | 176 | end |
177 | 177 |
|
178 | 178 | local pos = self:GetPosition() |
| 179 | + local parent = self:GetToken() |
179 | 180 | self:Parse() |
180 | 181 | self:SetPosition(pos) |
181 | 182 | local tk = self:ConsumeToken() |
|
186 | 187 | paren_depth = paren_depth - 1 |
187 | 188 | end |
188 | 189 |
|
| 190 | + tk.parent = parent |
189 | 191 | table.insert(tokens, tk) |
190 | 192 | end |
191 | 193 | end |
|
207 | 209 | self:NewToken("space", " "), |
208 | 210 | } |
209 | 211 | end |
| 212 | + |
| 213 | + if tk.parent then |
| 214 | + if tk.parent.whitespace then |
| 215 | + tk.parent.whitespace = { |
| 216 | + self:NewToken("space", " "), |
| 217 | + } |
| 218 | + end |
| 219 | + end |
210 | 220 | end |
211 | 221 |
|
212 | 222 | if i == 1 then if tokens[1] then tokens[1].whitespace = nil end end |
| 223 | + |
| 224 | + if i == 1 then |
| 225 | + if tokens[1] and tokens[1].parent then tokens[1].parent.whitespace = nil end |
| 226 | + end |
213 | 227 | end |
214 | 228 |
|
215 | 229 | return args |
|
357 | 371 | return false |
358 | 372 | end |
359 | 373 |
|
360 | | - |
361 | 374 | local tk_left = self:GetToken() |
362 | 375 | local pos = self:GetPosition() |
363 | 376 | self:Advance(3) |
364 | | - if self:GetDefinition() then |
365 | | - self:Parse() |
366 | | - end |
| 377 | + |
| 378 | + if self:GetDefinition() then self:Parse() end |
| 379 | + |
367 | 380 | self:SetPosition(pos) |
368 | | - |
369 | | - |
370 | | - self:AddTokens( |
371 | | - { |
372 | | - self:NewToken( |
373 | | - "letter", |
374 | | - tk_left.value .. self:GetToken(3).value |
375 | | - ), |
376 | | - } |
377 | | - ) |
| 381 | + self:AddTokens({ |
| 382 | + self:NewToken("letter", tk_left.value .. self:GetToken(3).value), |
| 383 | + }) |
378 | 384 | self:Advance(1) |
| 385 | + |
379 | 386 | for i = 1, 4 do |
380 | 387 | self:RemoveToken(self:GetPosition()) |
381 | 388 | end |
|
390 | 397 |
|
391 | 398 | if not def then return false end |
392 | 399 |
|
| 400 | + local original_tokens = {} |
| 401 | + |
| 402 | + for i, v in pairs(def.tokens) do |
| 403 | + original_tokens[i] = v.parent |
| 404 | + end |
| 405 | + |
393 | 406 | self:RemoveToken(self:GetPosition()) |
394 | | - local tk = self:NewToken("string", "\"" .. self:ToString(def.tokens) .. "\"") |
| 407 | + local tk = self:NewToken("string", "\"" .. self:ToString(original_tokens) .. "\"") |
395 | 408 | self:RemoveToken(self:GetPosition()) |
396 | 409 | self:AddTokens({tk}) |
397 | 410 | self:Advance(#def.tokens) |
@@ -603,18 +616,14 @@ do -- tests |
603 | 616 | assert_find("#define STRINGIFY(a) #a \n >STRINGIFY(1)<", "\"1\"") |
604 | 617 | assert_find("#define STRINGIFY(a) #a \n >STRINGIFY((a,b,c))<", "\"(a,b,c)\"") |
605 | 618 | assert_find("#define STR(x) #x \n >STR(a + b)<", "\"a + b\"") |
606 | | - |
607 | | - if false then |
608 | | - assert_find("#define A value \n #define STR(x) #x \n >STR(A)<", "\"A\"") |
609 | | - end |
| 619 | + assert_find("#define A value \n #define STR(x) #x \n >STR(A)<", "\"A\"") |
610 | 620 | end |
611 | 621 |
|
612 | 622 | do -- token concatenation (##) |
613 | 623 | assert_find( |
614 | 624 | "#define PREFIX(x) pre_##x \n #define SUFFIX(x) x##_post \n >PREFIX(fix) SUFFIX(fix)<", |
615 | 625 | "pre_fix fix_post" |
616 | 626 | ) |
617 | | - |
618 | 627 | assert_find("#define F(a, b) a##b \n >F(1,2)<", "12") |
619 | 628 | assert_find("#define EMPTY_ARG(a, b) a##b \n >EMPTY_ARG(test, )<", "test") |
620 | 629 | assert_find("#define EMPTY_ARG(a, b) a##b \n >EMPTY_ARG(, test)<", "test") |
|
0 commit comments