Fix a problem where a conversion to a smaller type could be improperly omitted when followed by a conversion to a larger type.

This also fixes a logic error that may have permitted other conversions to be improperly omitted in some cases.

The following program demonstrates the problem (should print 211):

#pragma optimize 1
#include <stdio.h>
int main(void)
{
        unsigned int i = 1234;
        long l = (unsigned char)(i+1);
        printf("%li\n", l);
}
This commit is contained in:
Stephen Heumann 2016-10-10 20:02:21 -05:00
parent 0e82755334
commit cc75a9b12b
1 changed files with 4 additions and 1 deletions

View File

@ -1135,7 +1135,7 @@ case op^.opcode of {check for optimizations of this node}
end {if}
else if op^.left^.opcode = pc_cnv then begin
doit := false;
firsttype.i := (op^.q & $00F0) >> 4;
firsttype.i := (op^.left^.q & $00F0) >> 4;
if fromType.optype in [cgReal,cgDouble,cgComp,cgExtended] then begin
if toType.optype in [cgReal,cgDouble,cgComp,cgExtended] then
doit := true;
@ -1152,6 +1152,9 @@ case op^.opcode of {check for optimizations of this node}
if TypeSize(firstType.optype) = TypeSize(fromType.optype) then
if TypeSize(firstType.optype) = TypeSize(toType.optype) then
doit := true;
if TypeSize(fromType.optype) < TypeSize(firstType.optype) then
if TypeSize(fromType.optype) < TypeSize(toType.optype) then
doit := false; {disable optimization in invalid cases}
end; {else}
if doit then begin
op^.q := (op^.left^.q & $00F0) | (op^.q & $000F);