Quoting Richard Sandiford <[email protected]>:
Joern Rennecke <[email protected]> writes:
When the condition is not fulfilled, we want to keep the length from the previous iteration.Right, that's what I mean. So we need to make sure that the difference between the address of the current instruction and the address of the next instruction also doesn't change. The code as posted was: else { new_length = insn_current_length (insn); insn_current_address += new_length; } #ifdef ADJUST_INSN_LENGTH /* If needed, do any adjustment. */ tmp_length = new_length; ADJUST_INSN_LENGTH (insn, new_length); insn_current_address += (new_length - tmp_length); #endif if (new_length != insn_lengths[uid] && (new_length > insn_lengths[uid] || first_pass)) { insn_lengths[uid] = new_length; something_changed = 1; } which always leaves insn_current_address based off new_length, even if new_length is out of kilter with insn_lengths[uid].
Indeed. I have attached a patch which I believe addresses the issues you raised. Because making the length sticky is problematic when UIDs aren't unique, I have tested this patch on top of that patch: http://gcc.gnu.org/ml/gcc-patches/2012-10/msg01806.html regression tested on i686-pc-linux-gnu X cris-elf and i686-pc-linux-gnu X mipsel-elf.
2012-10-19 J"orn Rennecke <[email protected]> * final.c (shorten_branches): When optimizing, start with small length and increase from there, and don't decrease lengths. Index: final.c =================================================================== --- final.c (revision 192573) +++ final.c (working copy) @@ -1066,7 +1066,15 @@ shorten_branches (rtx first ATTRIBUTE_UN } #endif /* CASE_VECTOR_SHORTEN_MODE */ + /* When optimizing, we start assuming minimum length, and keep increasing + lengths as we find the need for this, till nothing changes. + When not optimizing, we start assuming maximum lengths, and + do a single pass to update the lengths. */ + bool increasing = optimize != 0; + /* Compute initial lengths, addresses, and varying flags for each insn. */ + int (*length_fun) (rtx) = increasing ? insn_min_length : insn_default_length; + for (insn_current_address = 0, insn = first; insn != 0; insn_current_address += insn_lengths[uid], insn = NEXT_INSN (insn)) @@ -1117,6 +1125,8 @@ shorten_branches (rtx first ATTRIBUTE_UN #else const_delay_slots = 0; #endif + int (*inner_length_fun) (rtx) + = const_delay_slots ? length_fun : insn_default_length; /* Inside a delay slot sequence, we do not do any branch shortening if the shortening could change the number of delay slots of the branch. */ @@ -1131,7 +1141,7 @@ shorten_branches (rtx first ATTRIBUTE_UN inner_length = (asm_insn_count (PATTERN (inner_insn)) * insn_default_length (inner_insn)); else - inner_length = insn_default_length (inner_insn); + inner_length = inner_length_fun (inner_insn); insn_lengths[inner_uid] = inner_length; if (const_delay_slots) @@ -1149,7 +1159,7 @@ shorten_branches (rtx first ATTRIBUTE_UN } else if (GET_CODE (body) != USE && GET_CODE (body) != CLOBBER) { - insn_lengths[uid] = insn_default_length (insn); + insn_lengths[uid] = length_fun (insn); varying_length[uid] = insn_variable_length_p (insn); } @@ -1165,6 +1175,7 @@ shorten_branches (rtx first ATTRIBUTE_UN get the current insn length. If it has changed, reflect the change. When nothing changes for a full pass, we are done. */ + bool first_pass ATTRIBUTE_UNUSED = true; while (something_changed) { something_changed = 0; @@ -1220,6 +1231,7 @@ shorten_branches (rtx first ATTRIBUTE_UN rtx prev; int rel_align = 0; addr_diff_vec_flags flags; + enum machine_mode vec_mode; /* Avoid automatic aggregate initialization. */ flags = ADDR_DIFF_VEC_FLAGS (body); @@ -1298,9 +1310,12 @@ shorten_branches (rtx first ATTRIBUTE_UN else max_addr += align_fuzz (max_lab, rel_lab, 0, 0); } - PUT_MODE (body, CASE_VECTOR_SHORTEN_MODE (min_addr - rel_addr, - max_addr - rel_addr, - body)); + vec_mode = CASE_VECTOR_SHORTEN_MODE (min_addr - rel_addr, + max_addr - rel_addr, body); + if (first_pass + || (GET_MODE_SIZE (vec_mode) + >= GET_MODE_SIZE (GET_MODE (body)))) + PUT_MODE (body, vec_mode); if (JUMP_TABLES_IN_TEXT_SECTION || readonly_data_section == text_section) { @@ -1362,10 +1377,15 @@ shorten_branches (rtx first ATTRIBUTE_UN if (inner_length != insn_lengths[inner_uid]) { - insn_lengths[inner_uid] = inner_length; - something_changed = 1; + if (!increasing || inner_length > insn_lengths[inner_uid]) + { + insn_lengths[inner_uid] = inner_length; + something_changed = 1; + } + else + inner_length = insn_lengths[inner_uid]; } - insn_current_address += insn_lengths[inner_uid]; + insn_current_address += inner_length; new_length += inner_length; } } @@ -1382,15 +1402,19 @@ shorten_branches (rtx first ATTRIBUTE_UN insn_current_address += (new_length - tmp_length); #endif - if (new_length != insn_lengths[uid]) + if (new_length != insn_lengths[uid] + && (!increasing || new_length > insn_lengths[uid])) { insn_lengths[uid] = new_length; something_changed = 1; } + else + insn_current_address += insn_lengths[uid] - new_length; } /* For a non-optimizing compile, do only a single pass. */ - if (!optimize) + if (!increasing) break; + first_pass = false; } free (varying_length);
