mirror of
https://github.com/byteworksinc/ORCA-C.git
synced 2025-01-04 17:31:49 +00:00
Generate a string representation of tokens merged with ##.
This is necessary for correct behavior if such tokens are subsequently stringized with #. Previously, only the first half of the token would be produced. Here is an example demonstrating the issue: #define mkstr(a) # a #define in_between(a) mkstr(a) #define joinstr(a,b) in_between(a ## b) #include <stdio.h> int main(void) { puts(joinstr(123,456)); puts(joinstr(abc,def)); puts(joinstr(dou,ble)); puts(joinstr(+,=)); puts(joinstr(:,>)); }
This commit is contained in:
parent
6cfe8cc886
commit
fec7b57ec2
37
Scanner.pas
37
Scanner.pas
@ -4742,6 +4742,42 @@ var
|
|||||||
end; {CharConstant}
|
end; {CharConstant}
|
||||||
|
|
||||||
|
|
||||||
|
procedure ConcatenateTokenString(tPtr: tokenListRecordPtr);
|
||||||
|
|
||||||
|
{ Concatenate the strings for the current token and the one }
|
||||||
|
{ represented by tPtr, and update tokenStart/tokenEnd to }
|
||||||
|
{ point to the new string. }
|
||||||
|
|
||||||
|
var
|
||||||
|
len: longint; {length of new token string}
|
||||||
|
srcPtr, destPtr: ptr; {pointers for data copying}
|
||||||
|
|
||||||
|
begin {ConcatenateTokenString}
|
||||||
|
len := ord4(tokenEnd)-ord4(tokenStart)
|
||||||
|
+ord4(tPtr^.tokenEnd)-ord4(tPtr^.tokenStart)+1;
|
||||||
|
if len <= maxint then begin
|
||||||
|
destPtr := GMalloc(ord(len));
|
||||||
|
srcPtr := tokenStart;
|
||||||
|
tokenStart := destPtr;
|
||||||
|
while srcPtr <> tokenEnd do begin
|
||||||
|
destPtr^ := srcPtr^;
|
||||||
|
destPtr := ptr(ord4(destPtr)+1);
|
||||||
|
srcPtr := ptr(ord4(srcPtr)+1);
|
||||||
|
end; {while}
|
||||||
|
srcPtr := tPtr^.tokenStart;
|
||||||
|
while srcPtr <> tPtr^.tokenEnd do begin
|
||||||
|
destPtr^ := srcPtr^;
|
||||||
|
destPtr := ptr(ord4(destPtr)+1);
|
||||||
|
srcPtr := ptr(ord4(srcPtr)+1);
|
||||||
|
end; {while}
|
||||||
|
destPtr^ := tPtr^.tokenEnd^;
|
||||||
|
tokenEnd := destPtr;
|
||||||
|
end {if}
|
||||||
|
else
|
||||||
|
Error(90);
|
||||||
|
end; {ConcatenateTokenString}
|
||||||
|
|
||||||
|
|
||||||
begin {NextToken}
|
begin {NextToken}
|
||||||
if ifList = nil then {do pending EndInclude calls}
|
if ifList = nil then {do pending EndInclude calls}
|
||||||
while includeCount <> 0 do begin
|
while includeCount <> 0 do begin
|
||||||
@ -4807,6 +4843,7 @@ if tokenList <> nil then begin {get a token put back by a macro}
|
|||||||
tPtr := tokenList;
|
tPtr := tokenList;
|
||||||
tToken := token;
|
tToken := token;
|
||||||
Merge(tToken, tPtr^.token);
|
Merge(tToken, tPtr^.token);
|
||||||
|
ConcatenateTokenString(tPtr);
|
||||||
tokenList := tPtr^.next;
|
tokenList := tPtr^.next;
|
||||||
token := tToken;
|
token := tToken;
|
||||||
tokenExpandEnabled := true;
|
tokenExpandEnabled := true;
|
||||||
|
2
cc.notes
2
cc.notes
@ -1784,6 +1784,8 @@ int foo(int[42]);
|
|||||||
|
|
||||||
182. #pragma path directives were not saved in .sym files. This could cause ORCA/C not to search the proper paths for include files that were not represented in the .sym file (e.g. because they were included after a function).
|
182. #pragma path directives were not saved in .sym files. This could cause ORCA/C not to search the proper paths for include files that were not represented in the .sym file (e.g. because they were included after a function).
|
||||||
|
|
||||||
|
183. The # preprocessor operator would not work correctly on tokens that had been produced by the ## preprocessor operator.
|
||||||
|
|
||||||
-- Bugs from C 2.1.0 that have been fixed -----------------------------------
|
-- Bugs from C 2.1.0 that have been fixed -----------------------------------
|
||||||
|
|
||||||
1. In some situations, fread() reread the first 1K or so of the file.
|
1. In some situations, fread() reread the first 1K or so of the file.
|
||||||
|
Loading…
Reference in New Issue
Block a user