diff mbox

[10/13] Eliminate last_basic_block macro.

Message ID 1386341499-19603-11-git-send-email-dmalcolm@redhat.com
State New
Headers show

Commit Message

David Malcolm Dec. 6, 2013, 2:51 p.m. UTC
* basic-block.h (last_basic_block): Eliminate macro.

	* asan.c (transform_statements): Eliminate use of last_basic_block
	in favor of last_basic_block_for_fn, in order to make use of cfun
	explicit.
	* bb-reorder.c (copy_bb, reorder_basic_blocks): Likewise.
	* bt-load.c (compute_defs_uses_and_gen, compute_kill, compute_out,
	link_btr_uses, build_btr_def_use_webs, migrate_btr_defs): Likewise.
	* cfg.c (compact_blocks): Likewise.
	* cfganal.c (mark_dfs_back_edges,
	control_dependences::control_dependences, post_order_compute,
	pre_and_rev_post_order_compute_fn, dfs_enumerate_from, compute_idf,
	single_pred_before_succ_order): Likewise.
	* cfgbuild.c (make_edges): Likewise.
	* cfgexpand.c (add_scope_conflicts, gimple_expand_cfg): Likewise.
	* cfghooks.c (verify_flow_info): Likewise.
	* cfgloop.c (verify_loop_structure): Likewise.
	* cfgloopanal.c (just_once_each_iteration_p,
	mark_irreducible_loops): Likewise.
	* cfgloopmanip.c (fix_bb_placements, remove_path,
	update_dominators_in_loop): Likewise.
	* cfgrtl.c (create_basic_block_structure, rtl_create_basic_block,
	break_superblocks, rtl_flow_call_edges_add): Likewise.
	* config/epiphany/resolve-sw-modes.c (resolve_sw_modes): Likewise.
	* config/frv/frv.c (frv_optimize_membar): Likewise.
	* config/mips/mips.c (r10k_insert_cache_barriers): Likewise.
	* config/spu/spu.c (spu_machine_dependent_reorg): Likewise.
	* cprop.c (compute_local_properties, find_implicit_sets,
	bypass_conditional_jumps, one_cprop_pass): Likewise.
	* cse.c (cse_main): Likewise.
	* df-core.c (rest_of_handle_df_initialize, df_worklist_dataflow,
	df_analyze, df_grow_bb_info, df_compact_blocks): Likewise.
	* df-problems.c (df_lr_verify_solution_start,
	df_live_verify_solution_start, df_md_local_compute): Likewise.
	* dominance.c (init_dom_info, calc_dfs_tree_nonrec, calc_dfs_tree,
	calc_idoms): Likewise.
	* domwalk.c (dom_walker::walk): Likewise.
	* dse.c (dse_step0, dse_step3): Likewise.
	* function.c (epilogue_done): Likewise.
	* gcse.c (alloc_gcse_mem, compute_local_properties,
	prune_insertions_deletions, compute_pre_data,
	pre_expr_reaches_here_p, one_pre_gcse_pass,
	compute_code_hoist_vbeinout, should_hoist_expr_to_dom, hoist_code,
	one_code_hoisting_pass): Likewise.
	* graph.c (draw_cfg_nodes_no_loops): Likewise.
	* graphite-sese-to-poly.c (build_scop_bbs): Likewise.
	* haifa-sched.c (unlink_bb_notes): Likewise.
	* ipa-split.c (execute_split_functions): Likewise.
	* ira-build.c (create_loop_tree_nodes,
	remove_unnecessary_regions): Likewise.
	* ira-emit.c (ira_emit): Likewise.
	* ira.c (find_moveable_pseudos, ira): Likewise.
	* lcm.c (compute_antinout_edge, compute_laterin,
	compute_insert_delete, pre_edge_lcm, compute_available,
	compute_nearerout, compute_rev_insert_delete,
	pre_edge_rev_lcm): Likewise.
	* loop-unroll.c (opt_info_start_duplication,
	apply_opt_in_copies): Likewise.
	* lower-subreg.c (decompose_multiword_subregs): Likewise.
	* lra-lives.c (lra_create_live_ranges): Likewise.
	* lra.c (lra): Likewise.
	* mode-switching.c (optimize_mode_switching): Likewise.
	* recog.c (split_all_insns): Likewise.
	* regcprop.c (copyprop_hardreg_forward): Likewise.
	* regrename.c (regrename_analyze): Likewise.
	* reload1.c (reload): Likewise.
	* resource.c (init_resource_info): Likewise.
	* sched-rgn.c (haifa_find_rgns, extend_rgns, compute_trg_info,
	realloc_bb_state_array, schedule_region, extend_regions): Likewise.
	* sel-sched-ir.c (sel_extend_global_bb_info, extend_region_bb_info,
	recompute_rev_top_order, sel_init_pipelining,
	make_regions_from_the_rest): Likewise.
	* store-motion.c (remove_reachable_equiv_notes,build_store_vectors)
	Likewise.
	* tracer.c (tail_duplicate): Likewise.
	* trans-mem.c (tm_region_init, get_bb_regions_instrumented): Likewise.
	* tree-cfg.c (create_bb, cleanup_dead_labels, gimple_dump_cfg,
	gimple_flow_call_edges_add): Likewise.
	* tree-cfgcleanup.c (split_bbs_on_noreturn_calls,
	cleanup_tree_cfg_1): Likewise.
	* tree-complex.c (tree_lower_complex): Likewise.
	* tree-inline.c (copy_cfg_body): Likewise.
	* tree-into-ssa.c (mark_phi_for_rewrite, rewrite_into_ssa,
	prepare_def_site_for, update_ssa): Likewise.
	* tree-ssa-dce.c (tree_dce_init, perform_tree_ssa_dce): Likewise.
	* tree-ssa-dom.c (record_edge_info): Likewise.
	* tree-ssa-live.c (new_tree_live_info, live_worklist): Likewise.
	* tree-ssa-loop-im.c (fill_always_executed_in_1): Likewise.
	* tree-ssa-loop-manip.c (copy_phi_node_args
	gimple_duplicate_loop_to_header_edge): Likewise.
	* tree-ssa-pre.c (compute_antic): Likewise.
	* tree-ssa-propagate.c (ssa_prop_init): Likewise.
	* tree-ssa-reassoc.c (init_reassoc): Likewise.
	* tree-ssa-sccvn.c (init_scc_vn): Likewise.
	* tree-ssa-tail-merge.c (init_worklist): Likewise.
	* tree-ssa-uncprop.c (associate_equivalences_with_edges): Likewise.
	* tree-stdarg.c (reachable_at_most_once): Likewise.
	* tree-vrp.c (find_assert_locations): Likewise.
	* var-tracking.c (vt_find_locations): Likewise.
---
 gcc/asan.c                             |   2 +-
 gcc/basic-block.h                      |   3 -
 gcc/bb-reorder.c                       |   7 ++-
 gcc/bt-load.c                          |  30 +++++-----
 gcc/cfg.c                              |   4 +-
 gcc/cfganal.c                          |  25 ++++----
 gcc/cfgbuild.c                         |   2 +-
 gcc/cfgexpand.c                        |   4 +-
 gcc/cfghooks.c                         |   4 +-
 gcc/cfgloop.c                          |   4 +-
 gcc/cfgloopanal.c                      |   4 +-
 gcc/cfgloopmanip.c                     |   6 +-
 gcc/cfgrtl.c                           |  13 +++--
 gcc/config/epiphany/resolve-sw-modes.c |   4 +-
 gcc/config/frv/frv.c                   |   4 +-
 gcc/config/mips/mips.c                 |   4 +-
 gcc/config/spu/spu.c                   |   2 +-
 gcc/cprop.c                            |  15 ++---
 gcc/cse.c                              |   4 +-
 gcc/df-core.c                          |  26 +++++----
 gcc/df-problems.c                      |  10 ++--
 gcc/dominance.c                        |  15 +++--
 gcc/domwalk.c                          |   2 +-
 gcc/dse.c                              |   4 +-
 gcc/function.c                         |   2 +-
 gcc/gcse.c                             |  32 +++++-----
 gcc/graph.c                            |   2 +-
 gcc/graphite-sese-to-poly.c            |   2 +-
 gcc/haifa-sched.c                      |   2 +-
 gcc/ipa-split.c                        |   2 +-
 gcc/ira-build.c                        |  13 +++--
 gcc/ira-emit.c                         |  10 ++--
 gcc/ira.c                              |  12 ++--
 gcc/lcm.c                              | 103 +++++++++++++++++++--------------
 gcc/loop-unroll.c                      |  10 +++-
 gcc/lower-subreg.c                     |   2 +-
 gcc/lra-lives.c                        |   2 +-
 gcc/lra.c                              |   2 +-
 gcc/mode-switching.c                   |  17 +++---
 gcc/recog.c                            |   2 +-
 gcc/regcprop.c                         |   4 +-
 gcc/regrename.c                        |   2 +-
 gcc/reload1.c                          |   2 +-
 gcc/resource.c                         |   2 +-
 gcc/sched-rgn.c                        |  43 ++++++++------
 gcc/sel-sched-ir.c                     |  17 +++---
 gcc/store-motion.c                     |  32 +++++-----
 gcc/tracer.c                           |   6 +-
 gcc/trans-mem.c                        |   4 +-
 gcc/tree-cfg.c                         |  19 +++---
 gcc/tree-cfgcleanup.c                  |   4 +-
 gcc/tree-complex.c                     |   2 +-
 gcc/tree-inline.c                      |   4 +-
 gcc/tree-into-ssa.c                    |  19 +++---
 gcc/tree-ssa-dce.c                     |   7 ++-
 gcc/tree-ssa-dom.c                     |   2 +-
 gcc/tree-ssa-live.c                    |  10 ++--
 gcc/tree-ssa-loop-im.c                 |   2 +-
 gcc/tree-ssa-loop-manip.c              |   8 +--
 gcc/tree-ssa-pre.c                     |   4 +-
 gcc/tree-ssa-propagate.c               |   4 +-
 gcc/tree-ssa-reassoc.c                 |   2 +-
 gcc/tree-ssa-sccvn.c                   |   2 +-
 gcc/tree-ssa-tail-merge.c              |   2 +-
 gcc/tree-ssa-uncprop.c                 |   2 +-
 gcc/tree-stdarg.c                      |   2 +-
 gcc/tree-vrp.c                         |  10 ++--
 gcc/var-tracking.c                     |   8 +--
 68 files changed, 349 insertions(+), 289 deletions(-)

Comments

Steven Bosscher Dec. 6, 2013, 3:57 p.m. UTC | #1
On Fri, Dec 6, 2013 at 3:51 PM, David Malcolm wrote:
>         * asan.c (transform_statements): Eliminate use of last_basic_block
>         in favor of last_basic_block_for_fn, in order to make use of cfun
>         explicit.

Can we please make all this _for_fn go away?

Ciao!
Steven
Oleg Endo Dec. 6, 2013, 6:56 p.m. UTC | #2
On Fri, 2013-12-06 at 16:57 +0100, Steven Bosscher wrote:
> On Fri, Dec 6, 2013 at 3:51 PM, David Malcolm wrote:
> >         * asan.c (transform_statements): Eliminate use of last_basic_block
> >         in favor of last_basic_block_for_fn, in order to make use of cfun
> >         explicit.
> 
> Can we please make all this _for_fn go away?
> 

Sorry if this has been discussed before... but why not adding member
functions to 'function' instead of freestanding macros/functions that
take a function* as a first argument?  This would also make it easier to
eliminate the "_for_fn" (freestanding function/macro name clashes etc) I
think.

Cheers,
Oleg
Richard Biener Dec. 6, 2013, 8:27 p.m. UTC | #3
Oleg Endo <oleg.endo@t-online.de> wrote:
>On Fri, 2013-12-06 at 16:57 +0100, Steven Bosscher wrote:
>> On Fri, Dec 6, 2013 at 3:51 PM, David Malcolm wrote:
>> >         * asan.c (transform_statements): Eliminate use of
>last_basic_block
>> >         in favor of last_basic_block_for_fn, in order to make use
>of cfun
>> >         explicit.
>> 
>> Can we please make all this _for_fn go away?
>> 
>
>Sorry if this has been discussed before... but why not adding member
>functions to 'function' instead of freestanding macros/functions that
>take a function* as a first argument?  This would also make it easier
>to
>eliminate the "_for_fn" (freestanding function/macro name clashes etc)
>I
>think.

Both can be done, but these patches make cfun uses explicit which was the goal while following existing practice.

Richard.

>Cheers,
>Oleg
David Malcolm Dec. 9, 2013, 9:47 p.m. UTC | #4
On Fri, 2013-12-06 at 21:27 +0100, Richard Biener wrote:
> Oleg Endo <oleg.endo@t-online.de> wrote:
> >On Fri, 2013-12-06 at 16:57 +0100, Steven Bosscher wrote:
> >> On Fri, Dec 6, 2013 at 3:51 PM, David Malcolm wrote:
> >> >         * asan.c (transform_statements): Eliminate use of
> >last_basic_block
> >> >         in favor of last_basic_block_for_fn, in order to make use
> >of cfun
> >> >         explicit.
> >> 
> >> Can we please make all this _for_fn go away?
> >> 
> >
> >Sorry if this has been discussed before... but why not adding member
> >functions to 'function' instead of freestanding macros/functions that
> >take a function* as a first argument?  This would also make it easier
> >to
> >eliminate the "_for_fn" (freestanding function/macro name clashes etc)
> >I
> >think.
> 
> Both can be done, but these patches make cfun uses explicit which was the goal while following existing practice.

Yes, longer-term I'd prefer member functions.  The approach I posted
approach gives identical results to the status quo after a trip through
the preprocessor, so is somewhat lower-risk than introducing inlinable
member functions. (and in any case, all of the repeated implicit
dereferencing of "cfun->" seems inefficient to me, but not something I
plan to touch in stage3)

I've gone ahead and committed the patch series to trunk, test-building
before each commit, and fixing up patches 11 and 12 for the issues noted
by Oleg (the config/sh files had .cc suffixes, and hence didn't show up
in my grepping; I updated my grep accordingly).

There are still 4 macros in function.h that implicitly use cfun, which
it's less clear to me how to remove:
        #define current_function_funcdef_no
        #define current_loops
        #define dom_computed
        #define n_bbs_in_dom_tree

plus various other cfun-using macros elsewhere in headers...

FWIW, here are the svn revisions of what I committed, vs the numbering
of the patches in the emails:
  0001: r205816
  0002: r205817
  0003: r205818
  0004: r205820
  0005: r205821
  0006: r205822
  0007: r205823
  0008: r205824
  0009: r205825
  0010: r205826
  0011: r205828
  0012: r205829
  0013: r205830

Hope this is all sane
Dave
Oleg Endo Dec. 9, 2013, 9:52 p.m. UTC | #5
On Mon, 2013-12-09 at 16:47 -0500, David Malcolm wrote:
> Yes, longer-term I'd prefer member functions.  The approach I posted
> approach gives identical results to the status quo after a trip through
> the preprocessor, so is somewhat lower-risk than introducing inlinable
> member functions. (and in any case, all of the repeated implicit
> dereferencing of "cfun->" seems inefficient to me, but not something I
> plan to touch in stage3)

Understandable.

> I've gone ahead and committed the patch series to trunk, test-building
> before each commit, and fixing up patches 11 and 12 for the issues noted
> by Oleg (the config/sh files had .cc suffixes, and hence didn't show up
> in my grepping; I updated my grep accordingly).

Thanks!

Cheers,
Oleg
Richard Biener Dec. 10, 2013, 9:43 a.m. UTC | #6
On Mon, 9 Dec 2013, David Malcolm wrote:

> On Fri, 2013-12-06 at 21:27 +0100, Richard Biener wrote:
> > Oleg Endo <oleg.endo@t-online.de> wrote:
> > >On Fri, 2013-12-06 at 16:57 +0100, Steven Bosscher wrote:
> > >> On Fri, Dec 6, 2013 at 3:51 PM, David Malcolm wrote:
> > >> >         * asan.c (transform_statements): Eliminate use of
> > >last_basic_block
> > >> >         in favor of last_basic_block_for_fn, in order to make use
> > >of cfun
> > >> >         explicit.
> > >> 
> > >> Can we please make all this _for_fn go away?
> > >> 
> > >
> > >Sorry if this has been discussed before... but why not adding member
> > >functions to 'function' instead of freestanding macros/functions that
> > >take a function* as a first argument?  This would also make it easier
> > >to
> > >eliminate the "_for_fn" (freestanding function/macro name clashes etc)
> > >I
> > >think.
> > 
> > Both can be done, but these patches make cfun uses explicit which was the goal while following existing practice.
> 
> Yes, longer-term I'd prefer member functions.  The approach I posted
> approach gives identical results to the status quo after a trip through
> the preprocessor, so is somewhat lower-risk than introducing inlinable
> member functions. (and in any case, all of the repeated implicit
> dereferencing of "cfun->" seems inefficient to me, but not something I
> plan to touch in stage3)
> 
> I've gone ahead and committed the patch series to trunk, test-building
> before each commit, and fixing up patches 11 and 12 for the issues noted
> by Oleg (the config/sh files had .cc suffixes, and hence didn't show up
> in my grepping; I updated my grep accordingly).
> 
> There are still 4 macros in function.h that implicitly use cfun, which
> it's less clear to me how to remove:
>         #define current_function_funcdef_no

fundef_no_for_fn (cfun)

>         #define current_loops

loops_for_fn (cfun)

>         #define dom_computed

less obvious - we have DOM info computed only for a single function
throughout the compilation (so rooting DOM info from struct function
is somewhat odd).  I wouldn't touch it unless the DOM API gets
a _fn API variant (if that is desired at all).

>         #define n_bbs_in_dom_tree

Likewise.

As of using more C++ I was thinking about providing context by
means of adding accessors to gimple_opt_pass that automagically
provide the 'cfun' argument.  That of course means making
passes really classes derived from gimple_opt_pass.  The idea
is that from being a gimple_opt_pass you know you are working
with a single function (and the pass instance can have a pointer
to it, to get rid of 'cfun') and that there should be a convenient
API to use from such context where the function you work with
is implicit.

Of course that would overload gimple_opt_pass with various API
wrappers (or we'd use multiple inheritance and API objects).

Richard.

> plus various other cfun-using macros elsewhere in headers...
> 
> FWIW, here are the svn revisions of what I committed, vs the numbering
> of the patches in the emails:
>   0001: r205816
>   0002: r205817
>   0003: r205818
>   0004: r205820
>   0005: r205821
>   0006: r205822
>   0007: r205823
>   0008: r205824
>   0009: r205825
>   0010: r205826
>   0011: r205828
>   0012: r205829
>   0013: r205830
> 
> Hope this is all sane
> Dave
> 
> 
>
diff mbox

Patch

diff --git a/gcc/asan.c b/gcc/asan.c
index 74140d6..09c0667 100644
--- a/gcc/asan.c
+++ b/gcc/asan.c
@@ -2041,7 +2041,7 @@  transform_statements (void)
 {
   basic_block bb, last_bb = NULL;
   gimple_stmt_iterator i;
-  int saved_last_basic_block = last_basic_block;
+  int saved_last_basic_block = last_basic_block_for_fn (cfun);
 
   FOR_EACH_BB (bb)
     {
diff --git a/gcc/basic-block.h b/gcc/basic-block.h
index d000a43..174b650 100644
--- a/gcc/basic-block.h
+++ b/gcc/basic-block.h
@@ -326,9 +326,6 @@  struct GTY(()) control_flow_graph {
 #define SET_BASIC_BLOCK_FOR_FN(FN,N,BB) \
   ((*basic_block_info_for_fn (FN))[(N)] = (BB))
 
-/* Defines for textual backward source compatibility.  */
-#define last_basic_block	(cfun->cfg->x_last_basic_block)
-
 /* For iterating over basic blocks.  */
 #define FOR_BB_BETWEEN(BB, FROM, TO, DIR) \
   for (BB = FROM; BB != TO; BB = BB->DIR)
diff --git a/gcc/bb-reorder.c b/gcc/bb-reorder.c
index fc7b5b7..363af2d 100644
--- a/gcc/bb-reorder.c
+++ b/gcc/bb-reorder.c
@@ -826,12 +826,13 @@  copy_bb (basic_block old_bb, edge e, basic_block bb, int trace)
 	     "Duplicated bb %d (created bb %d)\n",
 	     old_bb->index, new_bb->index);
 
-  if (new_bb->index >= array_size || last_basic_block > array_size)
+  if (new_bb->index >= array_size
+      || last_basic_block_for_fn (cfun) > array_size)
     {
       int i;
       int new_size;
 
-      new_size = MAX (last_basic_block, new_bb->index + 1);
+      new_size = MAX (last_basic_block_for_fn (cfun), new_bb->index + 1);
       new_size = GET_ARRAY_SIZE (new_size);
       bbd = XRESIZEVEC (bbro_basic_block_data, bbd, new_size);
       for (i = array_size; i < new_size; i++)
@@ -2234,7 +2235,7 @@  reorder_basic_blocks (void)
     uncond_jump_length = get_uncond_jump_length ();
 
   /* We need to know some information for each basic block.  */
-  array_size = GET_ARRAY_SIZE (last_basic_block);
+  array_size = GET_ARRAY_SIZE (last_basic_block_for_fn (cfun));
   bbd = XNEWVEC (bbro_basic_block_data, array_size);
   for (i = 0; i < array_size; i++)
     {
diff --git a/gcc/bt-load.c b/gcc/bt-load.c
index bbd0dd8..83b3eba 100644
--- a/gcc/bt-load.c
+++ b/gcc/bt-load.c
@@ -457,8 +457,8 @@  compute_defs_uses_and_gen (fibheap_t all_btr_defs, btr_def *def_array,
   btr_def_group all_btr_def_groups = NULL;
   defs_uses_info info;
 
-  bitmap_vector_clear (bb_gen, last_basic_block);
-  for (i = NUM_FIXED_BLOCKS; i < last_basic_block; i++)
+  bitmap_vector_clear (bb_gen, last_basic_block_for_fn (cfun));
+  for (i = NUM_FIXED_BLOCKS; i < last_basic_block_for_fn (cfun); i++)
     {
       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
       int reg;
@@ -618,8 +618,8 @@  compute_kill (sbitmap *bb_kill, sbitmap *btr_defset,
 
   /* For each basic block, form the set BB_KILL - the set
      of definitions that the block kills.  */
-  bitmap_vector_clear (bb_kill, last_basic_block);
-  for (i = NUM_FIXED_BLOCKS; i < last_basic_block; i++)
+  bitmap_vector_clear (bb_kill, last_basic_block_for_fn (cfun));
+  for (i = NUM_FIXED_BLOCKS; i < last_basic_block_for_fn (cfun); i++)
     {
       for (regno = first_btr; regno <= last_btr; regno++)
 	if (TEST_HARD_REG_BIT (all_btrs, regno)
@@ -642,14 +642,14 @@  compute_out (sbitmap *bb_out, sbitmap *bb_gen, sbitmap *bb_kill, int max_uid)
   int changed;
   sbitmap bb_in = sbitmap_alloc (max_uid);
 
-  for (i = NUM_FIXED_BLOCKS; i < last_basic_block; i++)
+  for (i = NUM_FIXED_BLOCKS; i < last_basic_block_for_fn (cfun); i++)
     bitmap_copy (bb_out[i], bb_gen[i]);
 
   changed = 1;
   while (changed)
     {
       changed = 0;
-      for (i = NUM_FIXED_BLOCKS; i < last_basic_block; i++)
+      for (i = NUM_FIXED_BLOCKS; i < last_basic_block_for_fn (cfun); i++)
 	{
 	  bitmap_union_of_preds (bb_in, bb_out, BASIC_BLOCK_FOR_FN (cfun, i));
 	  changed |= bitmap_ior_and_compl (bb_out[i], bb_gen[i],
@@ -668,7 +668,7 @@  link_btr_uses (btr_def *def_array, btr_user *use_array, sbitmap *bb_out,
 
   /* Link uses to the uses lists of all of their reaching defs.
      Count up the number of reaching defs of each use.  */
-  for (i = NUM_FIXED_BLOCKS; i < last_basic_block; i++)
+  for (i = NUM_FIXED_BLOCKS; i < last_basic_block_for_fn (cfun); i++)
     {
       basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
       rtx insn;
@@ -780,8 +780,10 @@  build_btr_def_use_webs (fibheap_t all_btr_defs)
   btr_user *use_array   = XCNEWVEC (btr_user, max_uid);
   sbitmap *btr_defset   = sbitmap_vector_alloc (
 			   (last_btr - first_btr) + 1, max_uid);
-  sbitmap *bb_gen      = sbitmap_vector_alloc (last_basic_block, max_uid);
-  HARD_REG_SET *btrs_written = XCNEWVEC (HARD_REG_SET, last_basic_block);
+  sbitmap *bb_gen = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
+					  max_uid);
+  HARD_REG_SET *btrs_written = XCNEWVEC (HARD_REG_SET,
+					 last_basic_block_for_fn (cfun));
   sbitmap *bb_kill;
   sbitmap *bb_out;
 
@@ -790,11 +792,11 @@  build_btr_def_use_webs (fibheap_t all_btr_defs)
   compute_defs_uses_and_gen (all_btr_defs, def_array, use_array, btr_defset,
 			     bb_gen, btrs_written);
 
-  bb_kill = sbitmap_vector_alloc (last_basic_block, max_uid);
+  bb_kill = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), max_uid);
   compute_kill (bb_kill, btr_defset, btrs_written);
   free (btrs_written);
 
-  bb_out = sbitmap_vector_alloc (last_basic_block, max_uid);
+  bb_out = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), max_uid);
   compute_out (bb_out, bb_gen, bb_kill, max_uid);
 
   sbitmap_vector_free (bb_gen);
@@ -1405,7 +1407,7 @@  migrate_btr_defs (enum reg_class btr_class, int allow_callee_save)
     {
       int i;
 
-      for (i = NUM_FIXED_BLOCKS; i < last_basic_block; i++)
+      for (i = NUM_FIXED_BLOCKS; i < last_basic_block_for_fn (cfun); i++)
 	{
 	  basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
 	  fprintf (dump_file,
@@ -1428,8 +1430,8 @@  migrate_btr_defs (enum reg_class btr_class, int allow_callee_save)
 	  first_btr = reg;
       }
 
-  btrs_live = XCNEWVEC (HARD_REG_SET, last_basic_block);
-  btrs_live_at_end = XCNEWVEC (HARD_REG_SET, last_basic_block);
+  btrs_live = XCNEWVEC (HARD_REG_SET, last_basic_block_for_fn (cfun));
+  btrs_live_at_end = XCNEWVEC (HARD_REG_SET, last_basic_block_for_fn (cfun));
 
   build_btr_def_use_webs (all_btr_defs);
 
diff --git a/gcc/cfg.c b/gcc/cfg.c
index 3337372..6c3181d 100644
--- a/gcc/cfg.c
+++ b/gcc/cfg.c
@@ -171,10 +171,10 @@  compact_blocks (void)
 	}
       gcc_assert (i == n_basic_blocks_for_fn (cfun));
 
-      for (; i < last_basic_block; i++)
+      for (; i < last_basic_block_for_fn (cfun); i++)
 	SET_BASIC_BLOCK_FOR_FN (cfun, i, NULL);
     }
-  last_basic_block = n_basic_blocks_for_fn (cfun);
+  last_basic_block_for_fn (cfun) = n_basic_blocks_for_fn (cfun);
 }
 
 /* Remove block B from the basic block array.  */
diff --git a/gcc/cfganal.c b/gcc/cfganal.c
index ad5928a..9900d82 100644
--- a/gcc/cfganal.c
+++ b/gcc/cfganal.c
@@ -72,15 +72,15 @@  mark_dfs_back_edges (void)
   bool found = false;
 
   /* Allocate the preorder and postorder number arrays.  */
-  pre = XCNEWVEC (int, last_basic_block);
-  post = XCNEWVEC (int, last_basic_block);
+  pre = XCNEWVEC (int, last_basic_block_for_fn (cfun));
+  post = XCNEWVEC (int, last_basic_block_for_fn (cfun));
 
   /* Allocate stack for back-tracking up CFG.  */
   stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1);
   sp = 0;
 
   /* Allocate bitmap to track nodes that have been visited.  */
-  visited = sbitmap_alloc (last_basic_block);
+  visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
 
   /* None of the nodes in the CFG have been visited yet.  */
   bitmap_clear (visited);
@@ -428,8 +428,8 @@  control_dependences::control_dependences (struct edge_list *edges)
   : m_el (edges)
 {
   timevar_push (TV_CONTROL_DEPENDENCES);
-  control_dependence_map.create (last_basic_block);
-  for (int i = 0; i < last_basic_block; ++i)
+  control_dependence_map.create (last_basic_block_for_fn (cfun));
+  for (int i = 0; i < last_basic_block_for_fn (cfun); ++i)
     control_dependence_map.quick_push (BITMAP_ALLOC (NULL));
   for (int i = 0; i < NUM_EDGES (m_el); ++i)
     find_control_dependence (i);
@@ -622,7 +622,7 @@  post_order_compute (int *post_order, bool include_entry_exit,
   sp = 0;
 
   /* Allocate bitmap to track nodes that have been visited.  */
-  visited = sbitmap_alloc (last_basic_block);
+  visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
 
   /* None of the nodes in the CFG have been visited yet.  */
   bitmap_clear (visited);
@@ -778,7 +778,7 @@  inverted_post_order_compute (int *post_order)
   sp = 0;
 
   /* Allocate bitmap to track nodes that have been visited.  */
-  visited = sbitmap_alloc (last_basic_block);
+  visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
 
   /* None of the nodes in the CFG have been visited yet.  */
   bitmap_clear (visited);
@@ -931,7 +931,7 @@  pre_and_rev_post_order_compute_fn (struct function *fn,
     rev_post_order_num -= NUM_FIXED_BLOCKS;
 
   /* Allocate bitmap to track nodes that have been visited.  */
-  visited = sbitmap_alloc (last_basic_block);
+  visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
 
   /* None of the nodes in the CFG have been visited yet.  */
   bitmap_clear (visited);
@@ -1062,7 +1062,7 @@  flow_dfs_compute_reverse_init (depth_first_search_ds data)
   data->sp = 0;
 
   /* Allocate bitmap to track nodes that have been visited.  */
-  data->visited_blocks = sbitmap_alloc (last_basic_block);
+  data->visited_blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
 
   /* None of the nodes in the CFG have been visited yet.  */
   bitmap_clear (data->visited_blocks);
@@ -1147,7 +1147,7 @@  dfs_enumerate_from (basic_block bb, int reverse,
 #define VISITED_P(BB) (bitmap_bit_p (visited, (BB)->index))
 
   /* Resize the VISITED sbitmap if necessary.  */
-  size = last_basic_block;
+  size = last_basic_block_for_fn (cfun);
   if (size < 10)
     size = 10;
 
@@ -1313,7 +1313,8 @@  compute_idf (bitmap def_blocks, bitmap_head *dfs)
 	 form, the basic blocks where new and/or old names are defined
 	 may have disappeared by CFG cleanup calls.  In this case,
 	 we may pull a non-existing block from the work stack.  */
-      gcc_checking_assert (bb_index < (unsigned) last_basic_block);
+      gcc_checking_assert (bb_index
+			   < (unsigned) last_basic_block_for_fn (cfun));
 
       EXECUTE_IF_AND_COMPL_IN_BITMAP (&dfs[bb_index], phi_insertion_points,
 	                              0, i, bi)
@@ -1508,7 +1509,7 @@  single_pred_before_succ_order (void)
   basic_block *order = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
   unsigned n = n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS;
   unsigned np, i;
-  sbitmap visited = sbitmap_alloc (last_basic_block);
+  sbitmap visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
 
 #define MARK_VISITED(BB) (bitmap_set_bit (visited, (BB)->index))
 #define VISITED_P(BB) (bitmap_bit_p (visited, (BB)->index))
diff --git a/gcc/cfgbuild.c b/gcc/cfgbuild.c
index a0c2c66..f73bbc5 100644
--- a/gcc/cfgbuild.c
+++ b/gcc/cfgbuild.c
@@ -209,7 +209,7 @@  make_edges (basic_block min, basic_block max, int update_p)
      nearly fully-connected CFGs.  In that case we spend a significant
      amount of time searching the edge lists for duplicates.  */
   if (forced_labels || cfun->cfg->max_jumptable_ents > 100)
-    edge_cache = sbitmap_alloc (last_basic_block);
+    edge_cache = sbitmap_alloc (last_basic_block_for_fn (cfun));
 
   /* By nature of the way these get numbered, ENTRY_BLOCK_PTR->next_bb block
      is always the entry.  */
diff --git a/gcc/cfgexpand.c b/gcc/cfgexpand.c
index d98ac5b..014f78b 100644
--- a/gcc/cfgexpand.c
+++ b/gcc/cfgexpand.c
@@ -501,7 +501,7 @@  add_scope_conflicts (void)
   FOR_ALL_BB (bb)
     bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
 
-  rpo = XNEWVEC (int, last_basic_block);
+  rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
   n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
 
   changed = true;
@@ -5809,7 +5809,7 @@  gimple_expand_cfg (void)
 	}
     }
 
-  blocks = sbitmap_alloc (last_basic_block);
+  blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
   bitmap_ones (blocks);
   find_many_sub_basic_blocks (blocks);
   sbitmap_free (blocks);
diff --git a/gcc/cfghooks.c b/gcc/cfghooks.c
index ab1c15f..2400965 100644
--- a/gcc/cfghooks.c
+++ b/gcc/cfghooks.c
@@ -98,8 +98,8 @@  verify_flow_info (void)
   basic_block *last_visited;
 
   timevar_push (TV_CFG_VERIFY);
-  last_visited = XCNEWVEC (basic_block, last_basic_block);
-  edge_checksum = XCNEWVEC (size_t, last_basic_block);
+  last_visited = XCNEWVEC (basic_block, last_basic_block_for_fn (cfun));
+  edge_checksum = XCNEWVEC (size_t, last_basic_block_for_fn (cfun));
 
   /* Check bb chain & numbers.  */
   last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun);
diff --git a/gcc/cfgloop.c b/gcc/cfgloop.c
index 6245605..9d28950 100644
--- a/gcc/cfgloop.c
+++ b/gcc/cfgloop.c
@@ -1364,7 +1364,7 @@  verify_loop_structure (void)
       }
 
   /* Check the recorded loop father and sizes of loops.  */
-  visited = sbitmap_alloc (last_basic_block);
+  visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
   bitmap_clear (visited);
   bbs = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
   FOR_EACH_LOOP (loop, LI_FROM_INNERMOST)
@@ -1478,7 +1478,7 @@  verify_loop_structure (void)
   if (loops_state_satisfies_p (LOOPS_HAVE_MARKED_IRREDUCIBLE_REGIONS))
     {
       /* Record old info.  */
-      irreds = sbitmap_alloc (last_basic_block);
+      irreds = sbitmap_alloc (last_basic_block_for_fn (cfun));
       FOR_EACH_BB (bb)
 	{
 	  edge_iterator ei;
diff --git a/gcc/cfgloopanal.c b/gcc/cfgloopanal.c
index 2260f4b..84b61c1 100644
--- a/gcc/cfgloopanal.c
+++ b/gcc/cfgloopanal.c
@@ -64,7 +64,7 @@  just_once_each_iteration_p (const struct loop *loop, const_basic_block bb)
 
    LOOPS is the loop tree.  */
 
-#define LOOP_REPR(LOOP) ((LOOP)->num + last_basic_block)
+#define LOOP_REPR(LOOP) ((LOOP)->num + last_basic_block_for_fn (cfun))
 #define BB_REPR(BB) ((BB)->index + 1)
 
 bool
@@ -94,7 +94,7 @@  mark_irreducible_loops (void)
     }
 
   /* Create the edge lists.  */
-  g = new_graph (last_basic_block + num);
+  g = new_graph (last_basic_block_for_fn (cfun) + num);
 
   FOR_BB_BETWEEN (act, ENTRY_BLOCK_PTR_FOR_FN (cfun),
 		  EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
diff --git a/gcc/cfgloopmanip.c b/gcc/cfgloopmanip.c
index 7a6b201..2bb8b6a 100644
--- a/gcc/cfgloopmanip.c
+++ b/gcc/cfgloopmanip.c
@@ -204,7 +204,7 @@  fix_bb_placements (basic_block from,
       || from == base_loop->header)
     return;
 
-  in_queue = sbitmap_alloc (last_basic_block);
+  in_queue = sbitmap_alloc (last_basic_block_for_fn (cfun));
   bitmap_clear (in_queue);
   bitmap_set_bit (in_queue, from->index);
   /* Prevent us from going out of the base_loop.  */
@@ -348,7 +348,7 @@  remove_path (edge e)
 
   n_bord_bbs = 0;
   bord_bbs = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
-  seen = sbitmap_alloc (last_basic_block);
+  seen = sbitmap_alloc (last_basic_block_for_fn (cfun));
   bitmap_clear (seen);
 
   /* Find "border" hexes -- i.e. those with predecessor in removed path.  */
@@ -623,7 +623,7 @@  update_dominators_in_loop (struct loop *loop)
   basic_block *body;
   unsigned i;
 
-  seen = sbitmap_alloc (last_basic_block);
+  seen = sbitmap_alloc (last_basic_block_for_fn (cfun));
   bitmap_clear (seen);
   body = get_loop_body (loop);
 
diff --git a/gcc/cfgrtl.c b/gcc/cfgrtl.c
index 34fe4f3..5dc52a6 100644
--- a/gcc/cfgrtl.c
+++ b/gcc/cfgrtl.c
@@ -328,7 +328,7 @@  create_basic_block_structure (rtx head, rtx end, rtx bb_note, basic_block after)
 
   BB_HEAD (bb) = head;
   BB_END (bb) = end;
-  bb->index = last_basic_block++;
+  bb->index = last_basic_block_for_fn (cfun)++;
   bb->flags = BB_NEW | BB_RTL;
   link_block (bb, after);
   SET_BASIC_BLOCK_FOR_FN (cfun, bb->index, bb);
@@ -355,9 +355,12 @@  rtl_create_basic_block (void *headp, void *endp, basic_block after)
   basic_block bb;
 
   /* Grow the basic block array if needed.  */
-  if ((size_t) last_basic_block >= basic_block_info_for_fn (cfun)->length ())
+  if ((size_t) last_basic_block_for_fn (cfun)
+      >= basic_block_info_for_fn (cfun)->length ())
     {
-      size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
+      size_t new_size =
+	(last_basic_block_for_fn (cfun)
+	 + (last_basic_block_for_fn (cfun) + 3) / 4);
       vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
     }
 
@@ -4252,7 +4255,7 @@  break_superblocks (void)
   bool need = false;
   basic_block bb;
 
-  superblocks = sbitmap_alloc (last_basic_block);
+  superblocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
   bitmap_clear (superblocks);
 
   FOR_EACH_BB (bb)
@@ -4778,7 +4781,7 @@  rtl_flow_call_edges_add (sbitmap blocks)
 {
   int i;
   int blocks_split = 0;
-  int last_bb = last_basic_block;
+  int last_bb = last_basic_block_for_fn (cfun);
   bool check_last_block = false;
 
   if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
diff --git a/gcc/config/epiphany/resolve-sw-modes.c b/gcc/config/epiphany/resolve-sw-modes.c
index b43b4d9..a780254 100644
--- a/gcc/config/epiphany/resolve-sw-modes.c
+++ b/gcc/config/epiphany/resolve-sw-modes.c
@@ -61,8 +61,8 @@  resolve_sw_modes (void)
   bool need_commit = false;
   bool finalize_fp_sets = (MACHINE_FUNCTION (cfun)->unknown_mode_sets == 0);
 
-  todo.create (last_basic_block);
-  pushed = sbitmap_alloc (last_basic_block);
+  todo.create (last_basic_block_for_fn (cfun));
+  pushed = sbitmap_alloc (last_basic_block_for_fn (cfun));
   bitmap_clear (pushed);
   if (!finalize_fp_sets)
     {
diff --git a/gcc/config/frv/frv.c b/gcc/config/frv/frv.c
index a5eb2c1..a5aeb75 100644
--- a/gcc/config/frv/frv.c
+++ b/gcc/config/frv/frv.c
@@ -8067,8 +8067,8 @@  frv_optimize_membar (void)
   rtx *last_membar;
 
   compute_bb_for_insn ();
-  first_io = XCNEWVEC (struct frv_io, last_basic_block);
-  last_membar = XCNEWVEC (rtx, last_basic_block);
+  first_io = XCNEWVEC (struct frv_io, last_basic_block_for_fn (cfun));
+  last_membar = XCNEWVEC (rtx, last_basic_block_for_fn (cfun));
 
   FOR_EACH_BB (bb)
     frv_optimize_membar_local (bb, &first_io[bb->index],
diff --git a/gcc/config/mips/mips.c b/gcc/config/mips/mips.c
index 7903443..f19478c 100644
--- a/gcc/config/mips/mips.c
+++ b/gcc/config/mips/mips.c
@@ -15071,11 +15071,11 @@  r10k_insert_cache_barriers (void)
 
   /* Bit X of PROTECTED_BBS is set if the last operation in basic block
      X is protected by a cache barrier.  */
-  protected_bbs = sbitmap_alloc (last_basic_block);
+  protected_bbs = sbitmap_alloc (last_basic_block_for_fn (cfun));
   bitmap_clear (protected_bbs);
 
   /* Iterate over the basic blocks in reverse post-order.  */
-  rev_post_order = XNEWVEC (int, last_basic_block);
+  rev_post_order = XNEWVEC (int, last_basic_block_for_fn (cfun));
   n = pre_and_rev_post_order_compute (NULL, rev_post_order, false);
   for (i = 0; i < n; i++)
     {
diff --git a/gcc/config/spu/spu.c b/gcc/config/spu/spu.c
index a658ee6..1a9895e 100644
--- a/gcc/config/spu/spu.c
+++ b/gcc/config/spu/spu.c
@@ -2469,7 +2469,7 @@  spu_machine_dependent_reorg (void)
       return;
     }
 
-  blocks = sbitmap_alloc (last_basic_block);
+  blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
   bitmap_clear (blocks);
 
   in_spu_reorg = 1;
diff --git a/gcc/cprop.c b/gcc/cprop.c
index 9b8bd1e..600c617 100644
--- a/gcc/cprop.c
+++ b/gcc/cprop.c
@@ -595,8 +595,8 @@  compute_local_properties (sbitmap *kill, sbitmap *comp,
   unsigned int i;
 
   /* Initialize the bitmaps that were passed in.  */
-  bitmap_vector_clear (kill, last_basic_block);
-  bitmap_vector_clear (comp, last_basic_block);
+  bitmap_vector_clear (kill, last_basic_block_for_fn (cfun));
+  bitmap_vector_clear (comp, last_basic_block_for_fn (cfun));
 
   for (i = 0; i < table->size; i++)
     {
@@ -1355,7 +1355,7 @@  find_implicit_sets (void)
   rtx cond, new_rtx;
   unsigned int count = 0;
   bool edges_split = false;
-  size_t implicit_sets_size = last_basic_block + 10;
+  size_t implicit_sets_size = last_basic_block_for_fn (cfun) + 10;
 
   implicit_sets = XCNEWVEC (rtx, implicit_sets_size);
 
@@ -1667,7 +1667,7 @@  bypass_conditional_jumps (void)
   if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
     return 0;
 
-  bypass_last_basic_block = last_basic_block;
+  bypass_last_basic_block = last_basic_block_for_fn (cfun);
   mark_dfs_back_edges ();
 
   changed = 0;
@@ -1809,8 +1809,8 @@  one_cprop_pass (void)
     df_analyze ();
 
   /* Initialize implicit_set_indexes array.  */
-  implicit_set_indexes = XNEWVEC (int, last_basic_block);
-  for (i = 0; i < last_basic_block; i++)
+  implicit_set_indexes = XNEWVEC (int, last_basic_block_for_fn (cfun));
+  for (i = 0; i < last_basic_block_for_fn (cfun); i++)
     implicit_set_indexes[i] = -1;
 
   alloc_hash_table (&set_hash_table);
@@ -1827,7 +1827,8 @@  one_cprop_pass (void)
       basic_block bb;
       rtx insn;
 
-      alloc_cprop_mem (last_basic_block, set_hash_table.n_elems);
+      alloc_cprop_mem (last_basic_block_for_fn (cfun),
+		       set_hash_table.n_elems);
       compute_cprop_data ();
 
       free (implicit_set_indexes);
diff --git a/gcc/cse.c b/gcc/cse.c
index 215beb0..74ae8ba 100644
--- a/gcc/cse.c
+++ b/gcc/cse.c
@@ -6522,7 +6522,7 @@  cse_main (rtx f ATTRIBUTE_UNUSED, int nregs)
 {
   struct cse_basic_block_data ebb_data;
   basic_block bb;
-  int *rc_order = XNEWVEC (int, last_basic_block);
+  int *rc_order = XNEWVEC (int, last_basic_block_for_fn (cfun));
   int i, n_blocks;
 
   df_set_flags (DF_LR_RUN_DCE);
@@ -6551,7 +6551,7 @@  cse_main (rtx f ATTRIBUTE_UNUSED, int nregs)
   reg_eqv_table = XNEWVEC (struct reg_eqv_elem, nregs);
 
   /* Set up the table of already visited basic blocks.  */
-  cse_visited_basic_blocks = sbitmap_alloc (last_basic_block);
+  cse_visited_basic_blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
   bitmap_clear (cse_visited_basic_blocks);
 
   /* Loop over basic blocks in reverse completion order (RPO),
diff --git a/gcc/df-core.c b/gcc/df-core.c
index 87419c2..d41fb72 100644
--- a/gcc/df-core.c
+++ b/gcc/df-core.c
@@ -721,8 +721,8 @@  rest_of_handle_df_initialize (void)
   if (optimize > 1)
     df_live_add_problem ();
 
-  df->postorder = XNEWVEC (int, last_basic_block);
-  df->postorder_inverted = XNEWVEC (int, last_basic_block);
+  df->postorder = XNEWVEC (int, last_basic_block_for_fn (cfun));
+  df->postorder_inverted = XNEWVEC (int, last_basic_block_for_fn (cfun));
   df->n_blocks = post_order_compute (df->postorder, true, true);
   df->n_blocks_inverted = inverted_post_order_compute (df->postorder_inverted);
   gcc_assert (df->n_blocks == df->n_blocks_inverted);
@@ -1115,7 +1115,7 @@  df_worklist_dataflow (struct dataflow *dataflow,
                       int n_blocks)
 {
   bitmap pending = BITMAP_ALLOC (&df_bitmap_obstack);
-  sbitmap considered = sbitmap_alloc (last_basic_block);
+  sbitmap considered = sbitmap_alloc (last_basic_block_for_fn (cfun));
   bitmap_iterator bi;
   unsigned int *bbindex_to_postorder;
   int i;
@@ -1125,11 +1125,12 @@  df_worklist_dataflow (struct dataflow *dataflow,
   gcc_assert (dir != DF_NONE);
 
   /* BBINDEX_TO_POSTORDER maps the bb->index to the reverse postorder.  */
-  bbindex_to_postorder = XNEWVEC (unsigned int, last_basic_block);
+  bbindex_to_postorder = XNEWVEC (unsigned int,
+				  last_basic_block_for_fn (cfun));
 
   /* Initialize the array to an out-of-bound value.  */
-  for (i = 0; i < last_basic_block; i++)
-    bbindex_to_postorder[i] = last_basic_block;
+  for (i = 0; i < last_basic_block_for_fn (cfun); i++)
+    bbindex_to_postorder[i] = last_basic_block_for_fn (cfun);
 
   /* Initialize the considered map.  */
   bitmap_clear (considered);
@@ -1236,8 +1237,8 @@  df_analyze (void)
 
   free (df->postorder);
   free (df->postorder_inverted);
-  df->postorder = XNEWVEC (int, last_basic_block);
-  df->postorder_inverted = XNEWVEC (int, last_basic_block);
+  df->postorder = XNEWVEC (int, last_basic_block_for_fn (cfun));
+  df->postorder_inverted = XNEWVEC (int, last_basic_block_for_fn (cfun));
   df->n_blocks = post_order_compute (df->postorder, true, true);
   df->n_blocks_inverted = inverted_post_order_compute (df->postorder_inverted);
 
@@ -1481,7 +1482,7 @@  df_set_bb_dirty (basic_block bb)
 void
 df_grow_bb_info (struct dataflow *dflow)
 {
-  unsigned int new_size = last_basic_block + 1;
+  unsigned int new_size = last_basic_block_for_fn (cfun) + 1;
   if (dflow->block_info_size < new_size)
     {
       new_size += new_size / 4;
@@ -1553,7 +1554,8 @@  df_compact_blocks (void)
       /* Now shuffle the block info for the problem.  */
       if (dflow->problem->free_bb_fun)
 	{
-	  int size = last_basic_block * dflow->problem->block_info_elt_size;
+	  int size = (last_basic_block_for_fn (cfun)
+		      * dflow->problem->block_info_elt_size);
 	  problem_temps = XNEWVAR (char, size);
 	  df_grow_bb_info (dflow);
 	  memcpy (problem_temps, dflow->block_info, size);
@@ -1571,7 +1573,7 @@  df_compact_blocks (void)
 	    }
 	  memset ((char *)dflow->block_info
 		  + i * dflow->problem->block_info_elt_size, 0,
-		  (last_basic_block - i)
+		  (last_basic_block_for_fn (cfun) - i)
 		  * dflow->problem->block_info_elt_size);
 	  free (problem_temps);
 	}
@@ -1608,7 +1610,7 @@  df_compact_blocks (void)
 
   gcc_assert (i == n_basic_blocks_for_fn (cfun));
 
-  for (; i < last_basic_block; i++)
+  for (; i < last_basic_block_for_fn (cfun); i++)
     SET_BASIC_BLOCK_FOR_FN (cfun, i, NULL);
 
 #ifdef DF_DEBUG_CFG
diff --git a/gcc/df-problems.c b/gcc/df-problems.c
index 2b42b48..ab19372 100644
--- a/gcc/df-problems.c
+++ b/gcc/df-problems.c
@@ -1173,8 +1173,8 @@  df_lr_verify_solution_start (void)
   df_lr->solutions_dirty = true;
 
   problem_data = (struct df_lr_problem_data *)df_lr->problem_data;
-  problem_data->in = XNEWVEC (bitmap_head, last_basic_block);
-  problem_data->out = XNEWVEC (bitmap_head, last_basic_block);
+  problem_data->in = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
+  problem_data->out = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
 
   FOR_ALL_BB (bb)
     {
@@ -1710,8 +1710,8 @@  df_live_verify_solution_start (void)
   df_live->solutions_dirty = true;
 
   problem_data = (struct df_live_problem_data *)df_live->problem_data;
-  problem_data->in = XNEWVEC (bitmap_head, last_basic_block);
-  problem_data->out = XNEWVEC (bitmap_head, last_basic_block);
+  problem_data->in = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
+  problem_data->out = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
 
   FOR_ALL_BB (bb)
     {
@@ -4315,7 +4315,7 @@  df_md_local_compute (bitmap all_blocks)
 
   bitmap_clear (&seen_in_insn);
 
-  frontiers = XNEWVEC (bitmap_head, last_basic_block);
+  frontiers = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
   FOR_ALL_BB (bb)
     bitmap_initialize (&frontiers[bb->index], &bitmap_default_obstack);
 
diff --git a/gcc/dominance.c b/gcc/dominance.c
index e9d2265..af73078 100644
--- a/gcc/dominance.c
+++ b/gcc/dominance.c
@@ -159,7 +159,8 @@  init_dom_info (struct dom_info *di, enum cdi_direction dir)
   init_ar (di->set_size, unsigned int, num, 1);
   init_ar (di->set_child, TBB, num, 0);
 
-  init_ar (di->dfs_order, TBB, (unsigned int) last_basic_block + 1, 0);
+  init_ar (di->dfs_order, TBB,
+	   (unsigned int) last_basic_block_for_fn (cfun) + 1, 0);
   init_ar (di->dfs_to_bb, basic_block, num, 0);
 
   di->dfsnum = 1;
@@ -296,7 +297,7 @@  calc_dfs_tree_nonrec (struct dom_info *di, basic_block bb, bool reverse)
 	  if (bb != en_block)
 	    my_i = di->dfs_order[bb->index];
 	  else
-	    my_i = di->dfs_order[last_basic_block];
+	    my_i = di->dfs_order[last_basic_block_for_fn (cfun)];
 	  child_i = di->dfs_order[bn->index] = di->dfsnum++;
 	  di->dfs_to_bb[child_i] = bn;
 	  di->dfs_parent[child_i] = my_i;
@@ -335,7 +336,7 @@  calc_dfs_tree (struct dom_info *di, bool reverse)
   /* The first block is the ENTRY_BLOCK (or EXIT_BLOCK if REVERSE).  */
   basic_block begin = (reverse
 		       ? EXIT_BLOCK_PTR_FOR_FN (cfun) : ENTRY_BLOCK_PTR_FOR_FN (cfun));
-  di->dfs_order[last_basic_block] = di->dfsnum;
+  di->dfs_order[last_basic_block_for_fn (cfun)] = di->dfsnum;
   di->dfs_to_bb[di->dfsnum] = begin;
   di->dfsnum++;
 
@@ -367,7 +368,8 @@  calc_dfs_tree (struct dom_info *di, bool reverse)
 	  bitmap_set_bit (di->fake_exit_edge, b->index);
 	  di->dfs_order[b->index] = di->dfsnum;
 	  di->dfs_to_bb[di->dfsnum] = b;
-	  di->dfs_parent[di->dfsnum] = di->dfs_order[last_basic_block];
+	  di->dfs_parent[di->dfsnum] =
+	    di->dfs_order[last_basic_block_for_fn (cfun)];
 	  di->dfsnum++;
 	  calc_dfs_tree_nonrec (di, b, reverse);
 	}
@@ -384,7 +386,8 @@  calc_dfs_tree (struct dom_info *di, bool reverse)
 	      bitmap_set_bit (di->fake_exit_edge, b2->index);
 	      di->dfs_order[b2->index] = di->dfsnum;
 	      di->dfs_to_bb[di->dfsnum] = b2;
-	      di->dfs_parent[di->dfsnum] = di->dfs_order[last_basic_block];
+	      di->dfs_parent[di->dfsnum] =
+		di->dfs_order[last_basic_block_for_fn (cfun)];
 	      di->dfsnum++;
 	      calc_dfs_tree_nonrec (di, b2, reverse);
 	      gcc_checking_assert (di->dfs_order[b->index]);
@@ -546,7 +549,7 @@  calc_idoms (struct dom_info *di, bool reverse)
 	  if (b == en_block)
 	    {
 	    do_fake_exit_edge:
-	      k1 = di->dfs_order[last_basic_block];
+	      k1 = di->dfs_order[last_basic_block_for_fn (cfun)];
 	    }
 	  else
 	    k1 = di->dfs_order[b->index];
diff --git a/gcc/domwalk.c b/gcc/domwalk.c
index 3350e4b..e84c8f7 100644
--- a/gcc/domwalk.c
+++ b/gcc/domwalk.c
@@ -159,7 +159,7 @@  dom_walker::walk (basic_block bb)
     {
       postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
       postorder_num = inverted_post_order_compute (postorder);
-      bb_postorder = XNEWVEC (int, last_basic_block);
+      bb_postorder = XNEWVEC (int, last_basic_block_for_fn (cfun));
       for (int i = 0; i < postorder_num; ++i)
 	bb_postorder[postorder[i]] = i;
       free (postorder);
diff --git a/gcc/dse.c b/gcc/dse.c
index 2d8ce1e..a926cb8 100644
--- a/gcc/dse.c
+++ b/gcc/dse.c
@@ -772,7 +772,7 @@  dse_step0 (void)
 
   rtx_group_table.create (11);
 
-  bb_table = XNEWVEC (bb_info_t, last_basic_block);
+  bb_table = XNEWVEC (bb_info_t, last_basic_block_for_fn (cfun));
   rtx_group_next_id = 0;
 
   stores_off_frame_dead_at_return = !cfun->stdarg;
@@ -3283,7 +3283,7 @@  static void
 dse_step3 (bool for_spills)
 {
   basic_block bb;
-  sbitmap unreachable_blocks = sbitmap_alloc (last_basic_block);
+  sbitmap unreachable_blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
   sbitmap_iterator sbi;
   bitmap all_ones = NULL;
   unsigned int i;
diff --git a/gcc/function.c b/gcc/function.c
index 2c8d781..d257af4 100644
--- a/gcc/function.c
+++ b/gcc/function.c
@@ -6498,7 +6498,7 @@  epilogue_done:
       commit_edge_insertions ();
 
       /* Look for basic blocks within the prologue insns.  */
-      blocks = sbitmap_alloc (last_basic_block);
+      blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
       bitmap_clear (blocks);
       bitmap_set_bit (blocks, entry_edge->dest->index);
       bitmap_set_bit (blocks, orig_entry_edge->dest->index);
diff --git a/gcc/gcse.c b/gcc/gcse.c
index 8928c85..fa25a46 100644
--- a/gcc/gcse.c
+++ b/gcc/gcse.c
@@ -633,8 +633,9 @@  alloc_gcse_mem (void)
      pre-processor limitation with template types in macro arguments.  */
   typedef vec<rtx> vec_rtx_heap;
   typedef vec<modify_pair> vec_modify_pair_heap;
-  modify_mem_list = GCNEWVEC (vec_rtx_heap, last_basic_block);
-  canon_modify_mem_list = GCNEWVEC (vec_modify_pair_heap, last_basic_block);
+  modify_mem_list = GCNEWVEC (vec_rtx_heap, last_basic_block_for_fn (cfun));
+  canon_modify_mem_list = GCNEWVEC (vec_modify_pair_heap,
+				    last_basic_block_for_fn (cfun));
   modify_mem_list_set = BITMAP_ALLOC (NULL);
   blocks_with_calls = BITMAP_ALLOC (NULL);
 }
@@ -685,13 +686,13 @@  compute_local_properties (sbitmap *transp, sbitmap *comp, sbitmap *antloc,
   /* Initialize any bitmaps that were passed in.  */
   if (transp)
     {
-      bitmap_vector_ones (transp, last_basic_block);
+      bitmap_vector_ones (transp, last_basic_block_for_fn (cfun));
     }
 
   if (comp)
-    bitmap_vector_clear (comp, last_basic_block);
+    bitmap_vector_clear (comp, last_basic_block_for_fn (cfun));
   if (antloc)
-    bitmap_vector_clear (antloc, last_basic_block);
+    bitmap_vector_clear (antloc, last_basic_block_for_fn (cfun));
 
   for (i = 0; i < table->size; i++)
     {
@@ -1972,7 +1973,7 @@  prune_insertions_deletions (int n_elems)
 
   /* Similarly for deletions, but those occur in blocks rather than on
      edges.  */
-  for (i = 0; i < (unsigned) last_basic_block; i++)
+  for (i = 0; i < (unsigned) last_basic_block_for_fn (cfun); i++)
     {
       EXECUTE_IF_SET_IN_BITMAP (pre_delete_map[i], 0, j, sbi)
 	deletions[j]++;
@@ -1993,7 +1994,7 @@  prune_insertions_deletions (int n_elems)
       for (i = 0; i < (unsigned) n_edges_for_fn (cfun); i++)
 	bitmap_clear_bit (pre_insert_map[i], j);
 
-      for (i = 0; i < (unsigned) last_basic_block; i++)
+      for (i = 0; i < (unsigned) last_basic_block_for_fn (cfun); i++)
 	bitmap_clear_bit (pre_delete_map[i], j);
     }
 
@@ -2012,7 +2013,7 @@  compute_pre_data (void)
 
   compute_local_properties (transp, comp, antloc, &expr_hash_table);
   prune_expressions (true);
-  bitmap_vector_clear (ae_kill, last_basic_block);
+  bitmap_vector_clear (ae_kill, last_basic_block_for_fn (cfun));
 
   /* Compute ae_kill for each basic block using:
 
@@ -2103,7 +2104,7 @@  static int
 pre_expr_reaches_here_p (basic_block occr_bb, struct expr *expr, basic_block bb)
 {
   int rval;
-  char *visited = XCNEWVEC (char, last_basic_block);
+  char *visited = XCNEWVEC (char, last_basic_block_for_fn (cfun));
 
   rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
 
@@ -2687,7 +2688,7 @@  one_pre_gcse_pass (void)
   if (expr_hash_table.n_elems > 0)
     {
       struct edge_list *edge_list;
-      alloc_pre_mem (last_basic_block, expr_hash_table.n_elems);
+      alloc_pre_mem (last_basic_block_for_fn (cfun), expr_hash_table.n_elems);
       edge_list = compute_pre_data ();
       changed |= pre_gcse (edge_list);
       free_edge_list (edge_list);
@@ -2816,8 +2817,8 @@  compute_code_hoist_vbeinout (void)
   int changed, passes;
   basic_block bb;
 
-  bitmap_vector_clear (hoist_vbeout, last_basic_block);
-  bitmap_vector_clear (hoist_vbein, last_basic_block);
+  bitmap_vector_clear (hoist_vbeout, last_basic_block_for_fn (cfun));
+  bitmap_vector_clear (hoist_vbein, last_basic_block_for_fn (cfun));
 
   passes = 0;
   changed = 1;
@@ -3033,7 +3034,7 @@  should_hoist_expr_to_dom (basic_block expr_bb, struct expr *expr,
   if (visited == NULL)
     {
       visited_allocated_locally = 1;
-      visited = sbitmap_alloc (last_basic_block);
+      visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
       bitmap_clear (visited);
     }
 
@@ -3166,7 +3167,7 @@  hoist_code (void)
      data to restrict distance an expression can travel.  */
 
   to_bb_head = XCNEWVEC (int, get_max_uid ());
-  bb_size = XCNEWVEC (int, last_basic_block);
+  bb_size = XCNEWVEC (int, last_basic_block_for_fn (cfun));
 
   FOR_EACH_BB (bb)
     {
@@ -3622,7 +3623,8 @@  one_code_hoisting_pass (void)
 
   if (expr_hash_table.n_elems > 0)
     {
-      alloc_code_hoist_mem (last_basic_block, expr_hash_table.n_elems);
+      alloc_code_hoist_mem (last_basic_block_for_fn (cfun),
+			    expr_hash_table.n_elems);
       compute_code_hoist_data ();
       changed = hoist_code ();
       free_code_hoist_mem ();
diff --git a/gcc/graph.c b/gcc/graph.c
index 3f02cab..6c405d8 100644
--- a/gcc/graph.c
+++ b/gcc/graph.c
@@ -157,7 +157,7 @@  draw_cfg_nodes_no_loops (pretty_printer *pp, struct function *fun)
   int i, n;
   sbitmap visited;
 
-  visited = sbitmap_alloc (last_basic_block);
+  visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
   bitmap_clear (visited);
 
   n = pre_and_rev_post_order_compute_fn (fun, NULL, rpo, true);
diff --git a/gcc/graphite-sese-to-poly.c b/gcc/graphite-sese-to-poly.c
index 0eebbab..975db63 100644
--- a/gcc/graphite-sese-to-poly.c
+++ b/gcc/graphite-sese-to-poly.c
@@ -423,7 +423,7 @@  build_scop_bbs_1 (scop_p scop, sbitmap visited, basic_block bb)
 static void
 build_scop_bbs (scop_p scop)
 {
-  sbitmap visited = sbitmap_alloc (last_basic_block);
+  sbitmap visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
   sese region = SCOP_REGION (scop);
 
   bitmap_clear (visited);
diff --git a/gcc/haifa-sched.c b/gcc/haifa-sched.c
index 8d47eb9..d5e3309 100644
--- a/gcc/haifa-sched.c
+++ b/gcc/haifa-sched.c
@@ -8075,7 +8075,7 @@  unlink_bb_notes (basic_block first, basic_block last)
   if (first == last)
     return;
 
-  bb_header = XNEWVEC (rtx, last_basic_block);
+  bb_header = XNEWVEC (rtx, last_basic_block_for_fn (cfun));
 
   /* Make a sentinel.  */
   if (last->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
diff --git a/gcc/ipa-split.c b/gcc/ipa-split.c
index f8fa0ee..d5dfb8d 100644
--- a/gcc/ipa-split.c
+++ b/gcc/ipa-split.c
@@ -1593,7 +1593,7 @@  execute_split_functions (void)
   calculate_dominance_info (CDI_DOMINATORS);
 
   /* Compute local info about basic blocks and determine function size/time.  */
-  bb_info_vec.safe_grow_cleared (last_basic_block + 1);
+  bb_info_vec.safe_grow_cleared (last_basic_block_for_fn (cfun) + 1);
   memset (&best_split_point, 0, sizeof (best_split_point));
   FOR_EACH_BB (bb)
     {
diff --git a/gcc/ira-build.c b/gcc/ira-build.c
index 09e22d7..f9258ee 100644
--- a/gcc/ira-build.c
+++ b/gcc/ira-build.c
@@ -138,9 +138,10 @@  create_loop_tree_nodes (void)
 
   ira_bb_nodes
     = ((struct ira_loop_tree_node *)
-       ira_allocate (sizeof (struct ira_loop_tree_node) * last_basic_block));
-  last_basic_block_before_change = last_basic_block;
-  for (i = 0; i < (unsigned int) last_basic_block; i++)
+       ira_allocate (sizeof (struct ira_loop_tree_node)
+		     * last_basic_block_for_fn (cfun)));
+  last_basic_block_before_change = last_basic_block_for_fn (cfun);
+  for (i = 0; i < (unsigned int) last_basic_block_for_fn (cfun); i++)
     {
       ira_bb_nodes[i].regno_allocno_map = NULL;
       memset (ira_bb_nodes[i].reg_pressure, 0,
@@ -2605,8 +2606,10 @@  remove_unnecessary_regions (bool all_p)
     mark_all_loops_for_removal ();
   else
     mark_loops_for_removal ();
-  children_vec.create (last_basic_block + number_of_loops (cfun));
-  removed_loop_vec.create (last_basic_block + number_of_loops (cfun));
+  children_vec.create (last_basic_block_for_fn (cfun)
+		       + number_of_loops (cfun));
+  removed_loop_vec.create (last_basic_block_for_fn (cfun)
+			   + number_of_loops (cfun));
   remove_uneccesary_loop_nodes_from_loop_tree (ira_loop_tree_root);
   children_vec.release ();
   if (all_p)
diff --git a/gcc/ira-emit.c b/gcc/ira-emit.c
index 198fa47..d59461b 100644
--- a/gcc/ira-emit.c
+++ b/gcc/ira-emit.c
@@ -1239,15 +1239,17 @@  ira_emit (bool loops_p)
   edge e;
   ira_allocno_t a;
   ira_allocno_iterator ai;
+  size_t sz;
 
   FOR_EACH_ALLOCNO (a, ai)
     ALLOCNO_EMIT_DATA (a)->reg = regno_reg_rtx[ALLOCNO_REGNO (a)];
   if (! loops_p)
     return;
-  at_bb_start = (move_t *) ira_allocate (sizeof (move_t) * last_basic_block);
-  memset (at_bb_start, 0, sizeof (move_t) * last_basic_block);
-  at_bb_end = (move_t *) ira_allocate (sizeof (move_t) * last_basic_block);
-  memset (at_bb_end, 0, sizeof (move_t) * last_basic_block);
+  sz = sizeof (move_t) * last_basic_block_for_fn (cfun);
+  at_bb_start = (move_t *) ira_allocate (sz);
+  memset (at_bb_start, 0, sz);
+  at_bb_end = (move_t *) ira_allocate (sz);
+  memset (at_bb_end, 0, sz);
   local_allocno_bitmap = ira_allocate_bitmap ();
   used_regno_bitmap = ira_allocate_bitmap ();
   renamed_regno_bitmap = ira_allocate_bitmap ();
diff --git a/gcc/ira.c b/gcc/ira.c
index b3477ae..ae35035 100644
--- a/gcc/ira.c
+++ b/gcc/ira.c
@@ -4507,12 +4507,15 @@  find_moveable_pseudos (void)
   int *uid_luid = XNEWVEC (int, max_uid);
   rtx *closest_uses = XNEWVEC (rtx, max_regs);
   /* A set of registers which are live but not modified throughout a block.  */
-  bitmap_head *bb_transp_live = XNEWVEC (bitmap_head, last_basic_block);
+  bitmap_head *bb_transp_live = XNEWVEC (bitmap_head,
+					 last_basic_block_for_fn (cfun));
   /* A set of registers which only exist in a given basic block.  */
-  bitmap_head *bb_local = XNEWVEC (bitmap_head, last_basic_block);
+  bitmap_head *bb_local = XNEWVEC (bitmap_head,
+				   last_basic_block_for_fn (cfun));
   /* A set of registers which are set once, in an instruction that can be
      moved freely downwards, but are otherwise transparent to a block.  */
-  bitmap_head *bb_moveable_reg_sets = XNEWVEC (bitmap_head, last_basic_block);
+  bitmap_head *bb_moveable_reg_sets = XNEWVEC (bitmap_head,
+					       last_basic_block_for_fn (cfun));
   bitmap_head live, used, set, interesting, unusable_as_input;
   bitmap_iterator bi;
   bitmap_initialize (&interesting, 0);
@@ -5187,7 +5190,8 @@  ira (FILE *f)
      pseudos and 10K blocks or 100K pseudos and 1K blocks), we will
      use simplified and faster algorithms in LRA.  */
   lra_simple_p
-    = (ira_use_lra_p && max_reg_num () >= (1 << 26) / last_basic_block);
+    = (ira_use_lra_p
+       && max_reg_num () >= (1 << 26) / last_basic_block_for_fn (cfun));
   if (lra_simple_p)
     {
       /* It permits to skip live range splitting in LRA.  */
diff --git a/gcc/lcm.c b/gcc/lcm.c
index aa63c72..1129d6c 100644
--- a/gcc/lcm.c
+++ b/gcc/lcm.c
@@ -105,7 +105,7 @@  compute_antinout_edge (sbitmap *antloc, sbitmap *transp, sbitmap *antin,
 
   /* We want a maximal solution, so make an optimistic initialization of
      ANTIN.  */
-  bitmap_vector_ones (antin, last_basic_block);
+  bitmap_vector_ones (antin, last_basic_block_for_fn (cfun));
 
   /* Put every block on the worklist; this is necessary because of the
      optimistic initialization of ANTIN above.  */
@@ -330,10 +330,10 @@  compute_laterin (struct edge_list *edge_list, sbitmap *earliest,
   /* Computation of insertion and deletion points requires computing LATERIN
      for the EXIT block.  We allocated an extra entry in the LATERIN array
      for just this purpose.  */
-  bitmap_ones (laterin[last_basic_block]);
+  bitmap_ones (laterin[last_basic_block_for_fn (cfun)]);
   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
-    bitmap_and (laterin[last_basic_block],
-		     laterin[last_basic_block],
+    bitmap_and (laterin[last_basic_block_for_fn (cfun)],
+		     laterin[last_basic_block_for_fn (cfun)],
 		     later[(size_t) e->aux]);
 
   clear_aux_for_edges ();
@@ -359,7 +359,8 @@  compute_insert_delete (struct edge_list *edge_list, sbitmap *antloc,
       basic_block b = INDEX_EDGE_SUCC_BB (edge_list, x);
 
       if (b == EXIT_BLOCK_PTR_FOR_FN (cfun))
-	bitmap_and_compl (insert[x], later[x], laterin[last_basic_block]);
+	bitmap_and_compl (insert[x], later[x],
+			  laterin[last_basic_block_for_fn (cfun)]);
       else
 	bitmap_and_compl (insert[x], later[x], laterin[b->index]);
     }
@@ -389,29 +390,35 @@  pre_edge_lcm (int n_exprs, sbitmap *transp,
       fprintf (dump_file, "Edge List:\n");
       verify_edge_list (dump_file, edge_list);
       print_edge_list (dump_file, edge_list);
-      dump_bitmap_vector (dump_file, "transp", "", transp, last_basic_block);
-      dump_bitmap_vector (dump_file, "antloc", "", antloc, last_basic_block);
-      dump_bitmap_vector (dump_file, "avloc", "", avloc, last_basic_block);
-      dump_bitmap_vector (dump_file, "kill", "", kill, last_basic_block);
+      dump_bitmap_vector (dump_file, "transp", "", transp,
+			  last_basic_block_for_fn (cfun));
+      dump_bitmap_vector (dump_file, "antloc", "", antloc,
+			  last_basic_block_for_fn (cfun));
+      dump_bitmap_vector (dump_file, "avloc", "", avloc,
+			  last_basic_block_for_fn (cfun));
+      dump_bitmap_vector (dump_file, "kill", "", kill,
+			  last_basic_block_for_fn (cfun));
     }
 #endif
 
   /* Compute global availability.  */
-  avin = sbitmap_vector_alloc (last_basic_block, n_exprs);
-  avout = sbitmap_vector_alloc (last_basic_block, n_exprs);
+  avin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
+  avout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
   compute_available (avloc, kill, avout, avin);
   sbitmap_vector_free (avin);
 
   /* Compute global anticipatability.  */
-  antin = sbitmap_vector_alloc (last_basic_block, n_exprs);
-  antout = sbitmap_vector_alloc (last_basic_block, n_exprs);
+  antin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
+  antout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
   compute_antinout_edge (antloc, transp, antin, antout);
 
 #ifdef LCM_DEBUG_INFO
   if (dump_file)
     {
-      dump_bitmap_vector (dump_file, "antin", "", antin, last_basic_block);
-      dump_bitmap_vector (dump_file, "antout", "", antout, last_basic_block);
+      dump_bitmap_vector (dump_file, "antin", "", antin,
+			  last_basic_block_for_fn (cfun));
+      dump_bitmap_vector (dump_file, "antout", "", antout,
+			  last_basic_block_for_fn (cfun));
     }
 #endif
 
@@ -431,13 +438,15 @@  pre_edge_lcm (int n_exprs, sbitmap *transp,
   later = sbitmap_vector_alloc (num_edges, n_exprs);
 
   /* Allocate an extra element for the exit block in the laterin vector.  */
-  laterin = sbitmap_vector_alloc (last_basic_block + 1, n_exprs);
+  laterin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun) + 1,
+				  n_exprs);
   compute_laterin (edge_list, earliest, antloc, later, laterin);
 
 #ifdef LCM_DEBUG_INFO
   if (dump_file)
     {
-      dump_bitmap_vector (dump_file, "laterin", "", laterin, last_basic_block + 1);
+      dump_bitmap_vector (dump_file, "laterin", "", laterin,
+			  last_basic_block_for_fn (cfun) + 1);
       dump_bitmap_vector (dump_file, "later", "", later, num_edges);
     }
 #endif
@@ -445,9 +454,9 @@  pre_edge_lcm (int n_exprs, sbitmap *transp,
   sbitmap_vector_free (earliest);
 
   *insert = sbitmap_vector_alloc (num_edges, n_exprs);
-  *del = sbitmap_vector_alloc (last_basic_block, n_exprs);
+  *del = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
   bitmap_vector_clear (*insert, num_edges);
-  bitmap_vector_clear (*del, last_basic_block);
+  bitmap_vector_clear (*del, last_basic_block_for_fn (cfun));
   compute_insert_delete (edge_list, antloc, later, laterin, *insert, *del);
 
   sbitmap_vector_free (laterin);
@@ -458,7 +467,7 @@  pre_edge_lcm (int n_exprs, sbitmap *transp,
     {
       dump_bitmap_vector (dump_file, "pre_insert_map", "", *insert, num_edges);
       dump_bitmap_vector (dump_file, "pre_delete_map", "", *del,
-			   last_basic_block);
+			  last_basic_block_for_fn (cfun));
     }
 #endif
 
@@ -484,7 +493,7 @@  compute_available (sbitmap *avloc, sbitmap *kill, sbitmap *avout,
     XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
 
   /* We want a maximal solution.  */
-  bitmap_vector_ones (avout, last_basic_block);
+  bitmap_vector_ones (avout, last_basic_block_for_fn (cfun));
 
   /* Put every block on the worklist; this is necessary because of the
      optimistic initialization of AVOUT above.  */
@@ -666,10 +675,10 @@  compute_nearerout (struct edge_list *edge_list, sbitmap *farthest,
   /* Computation of insertion and deletion points requires computing NEAREROUT
      for the ENTRY block.  We allocated an extra entry in the NEAREROUT array
      for just this purpose.  */
-  bitmap_ones (nearerout[last_basic_block]);
+  bitmap_ones (nearerout[last_basic_block_for_fn (cfun)]);
   FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
-    bitmap_and (nearerout[last_basic_block],
-		     nearerout[last_basic_block],
+    bitmap_and (nearerout[last_basic_block_for_fn (cfun)],
+		     nearerout[last_basic_block_for_fn (cfun)],
 		     nearer[(size_t) e->aux]);
 
   clear_aux_for_edges ();
@@ -694,7 +703,8 @@  compute_rev_insert_delete (struct edge_list *edge_list, sbitmap *st_avloc,
     {
       basic_block b = INDEX_EDGE_PRED_BB (edge_list, x);
       if (b == ENTRY_BLOCK_PTR_FOR_FN (cfun))
-	bitmap_and_compl (insert[x], nearer[x], nearerout[last_basic_block]);
+	bitmap_and_compl (insert[x], nearer[x],
+			  nearerout[last_basic_block_for_fn (cfun)]);
       else
 	bitmap_and_compl (insert[x], nearer[x], nearerout[b->index]);
     }
@@ -719,15 +729,15 @@  pre_edge_rev_lcm (int n_exprs, sbitmap *transp,
   edge_list = create_edge_list ();
   num_edges = NUM_EDGES (edge_list);
 
-  st_antin = sbitmap_vector_alloc (last_basic_block, n_exprs);
-  st_antout = sbitmap_vector_alloc (last_basic_block, n_exprs);
-  bitmap_vector_clear (st_antin, last_basic_block);
-  bitmap_vector_clear (st_antout, last_basic_block);
+  st_antin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
+  st_antout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
+  bitmap_vector_clear (st_antin, last_basic_block_for_fn (cfun));
+  bitmap_vector_clear (st_antout, last_basic_block_for_fn (cfun));
   compute_antinout_edge (st_antloc, transp, st_antin, st_antout);
 
   /* Compute global anticipatability.  */
-  st_avout = sbitmap_vector_alloc (last_basic_block, n_exprs);
-  st_avin = sbitmap_vector_alloc (last_basic_block, n_exprs);
+  st_avout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
+  st_avin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
   compute_available (st_avloc, kill, st_avout, st_avin);
 
 #ifdef LCM_DEBUG_INFO
@@ -736,20 +746,26 @@  pre_edge_rev_lcm (int n_exprs, sbitmap *transp,
       fprintf (dump_file, "Edge List:\n");
       verify_edge_list (dump_file, edge_list);
       print_edge_list (dump_file, edge_list);
-      dump_bitmap_vector (dump_file, "transp", "", transp, last_basic_block);
-      dump_bitmap_vector (dump_file, "st_avloc", "", st_avloc, last_basic_block);
-      dump_bitmap_vector (dump_file, "st_antloc", "", st_antloc, last_basic_block);
-      dump_bitmap_vector (dump_file, "st_antin", "", st_antin, last_basic_block);
-      dump_bitmap_vector (dump_file, "st_antout", "", st_antout, last_basic_block);
-      dump_bitmap_vector (dump_file, "st_kill", "", kill, last_basic_block);
+      dump_bitmap_vector (dump_file, "transp", "", transp,
+			  last_basic_block_for_fn (cfun));
+      dump_bitmap_vector (dump_file, "st_avloc", "", st_avloc,
+			  last_basic_block_for_fn (cfun));
+      dump_bitmap_vector (dump_file, "st_antloc", "", st_antloc,
+			  last_basic_block_for_fn (cfun));
+      dump_bitmap_vector (dump_file, "st_antin", "", st_antin,
+			  last_basic_block_for_fn (cfun));
+      dump_bitmap_vector (dump_file, "st_antout", "", st_antout,
+			  last_basic_block_for_fn (cfun));
+      dump_bitmap_vector (dump_file, "st_kill", "", kill,
+			  last_basic_block_for_fn (cfun));
     }
 #endif
 
 #ifdef LCM_DEBUG_INFO
   if (dump_file)
     {
-      dump_bitmap_vector (dump_file, "st_avout", "", st_avout, last_basic_block);
-      dump_bitmap_vector (dump_file, "st_avin", "", st_avin, last_basic_block);
+      dump_bitmap_vector (dump_file, "st_avout", "", st_avout, last_basic_block_for_fn (cfun));
+      dump_bitmap_vector (dump_file, "st_avin", "", st_avin, last_basic_block_for_fn (cfun));
     }
 #endif
 
@@ -772,14 +788,15 @@  pre_edge_rev_lcm (int n_exprs, sbitmap *transp,
   nearer = sbitmap_vector_alloc (num_edges, n_exprs);
 
   /* Allocate an extra element for the entry block.  */
-  nearerout = sbitmap_vector_alloc (last_basic_block + 1, n_exprs);
+  nearerout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun) + 1,
+				    n_exprs);
   compute_nearerout (edge_list, farthest, st_avloc, nearer, nearerout);
 
 #ifdef LCM_DEBUG_INFO
   if (dump_file)
     {
       dump_bitmap_vector (dump_file, "nearerout", "", nearerout,
-			   last_basic_block + 1);
+			   last_basic_block_for_fn (cfun) + 1);
       dump_bitmap_vector (dump_file, "nearer", "", nearer, num_edges);
     }
 #endif
@@ -787,7 +804,7 @@  pre_edge_rev_lcm (int n_exprs, sbitmap *transp,
   sbitmap_vector_free (farthest);
 
   *insert = sbitmap_vector_alloc (num_edges, n_exprs);
-  *del = sbitmap_vector_alloc (last_basic_block, n_exprs);
+  *del = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
   compute_rev_insert_delete (edge_list, st_avloc, nearer, nearerout,
 			     *insert, *del);
 
@@ -799,7 +816,7 @@  pre_edge_rev_lcm (int n_exprs, sbitmap *transp,
     {
       dump_bitmap_vector (dump_file, "pre_insert_map", "", *insert, num_edges);
       dump_bitmap_vector (dump_file, "pre_delete_map", "", *del,
-			   last_basic_block);
+			   last_basic_block_for_fn (cfun));
     }
 #endif
   return edge_list;
diff --git a/gcc/loop-unroll.c b/gcc/loop-unroll.c
index d1c7b9c..24ed83f 100644
--- a/gcc/loop-unroll.c
+++ b/gcc/loop-unroll.c
@@ -2007,7 +2007,7 @@  static void
 opt_info_start_duplication (struct opt_info *opt_info)
 {
   if (opt_info)
-    opt_info->first_new_block = last_basic_block;
+    opt_info->first_new_block = last_basic_block_for_fn (cfun);
 }
 
 /* Determine the number of iterations between initialization of the base
@@ -2368,7 +2368,9 @@  apply_opt_in_copies (struct opt_info *opt_info,
     for (ivts = opt_info->iv_to_split_head; ivts; ivts = ivts->next)
       allocate_basic_variable (ivts);
 
-  for (i = opt_info->first_new_block; i < (unsigned) last_basic_block; i++)
+  for (i = opt_info->first_new_block;
+       i < (unsigned) last_basic_block_for_fn (cfun);
+       i++)
     {
       bb = BASIC_BLOCK_FOR_FN (cfun, i);
       orig_bb = get_bb_original (bb);
@@ -2444,7 +2446,9 @@  apply_opt_in_copies (struct opt_info *opt_info,
   /* Rewrite also the original loop body.  Find them as originals of the blocks
      in the last copied iteration, i.e. those that have
      get_bb_copy (get_bb_original (bb)) == bb.  */
-  for (i = opt_info->first_new_block; i < (unsigned) last_basic_block; i++)
+  for (i = opt_info->first_new_block;
+       i < (unsigned) last_basic_block_for_fn (cfun);
+       i++)
     {
       bb = BASIC_BLOCK_FOR_FN (cfun, i);
       orig_bb = get_bb_original (bb);
diff --git a/gcc/lower-subreg.c b/gcc/lower-subreg.c
index 6c9d622..60c47b9 100644
--- a/gcc/lower-subreg.c
+++ b/gcc/lower-subreg.c
@@ -1537,7 +1537,7 @@  decompose_multiword_subregs (bool decompose_copies)
 
       propagate_pseudo_copies ();
 
-      sub_blocks = sbitmap_alloc (last_basic_block);
+      sub_blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
       bitmap_clear (sub_blocks);
 
       EXECUTE_IF_SET_IN_BITMAP (decomposable_context, 0, regno, iter)
diff --git a/gcc/lra-lives.c b/gcc/lra-lives.c
index d2082fe..a677f86 100644
--- a/gcc/lra-lives.c
+++ b/gcc/lra-lives.c
@@ -996,7 +996,7 @@  lra_create_live_ranges (bool all_p)
   curr_point = 0;
   point_freq_vec.create (get_max_uid () * 2);
   lra_point_freq = point_freq_vec.address ();
-  int *post_order_rev_cfg = XNEWVEC (int, last_basic_block);
+  int *post_order_rev_cfg = XNEWVEC (int, last_basic_block_for_fn (cfun));
   int n_blocks_inverted = inverted_post_order_compute (post_order_rev_cfg);
   lra_assert (n_blocks_inverted == n_basic_blocks_for_fn (cfun));
   for (i = n_blocks_inverted - 1; i >= 0; --i)
diff --git a/gcc/lra.c b/gcc/lra.c
index d21d864..50a0786 100644
--- a/gcc/lra.c
+++ b/gcc/lra.c
@@ -2422,7 +2422,7 @@  lra (FILE *f)
   if (cfun->can_throw_non_call_exceptions)
     {
       sbitmap blocks;
-      blocks = sbitmap_alloc (last_basic_block);
+      blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
       bitmap_ones (blocks);
       find_many_sub_basic_blocks (blocks);
       sbitmap_free (blocks);
diff --git a/gcc/mode-switching.c b/gcc/mode-switching.c
index ed45094..a9e5069 100644
--- a/gcc/mode-switching.c
+++ b/gcc/mode-switching.c
@@ -480,7 +480,8 @@  optimize_mode_switching (void)
 	entry_exit_extra = 3;
 #endif
 	bb_info[n_entities]
-	  = XCNEWVEC (struct bb_info, last_basic_block + entry_exit_extra);
+	  = XCNEWVEC (struct bb_info,
+		      last_basic_block_for_fn (cfun) + entry_exit_extra);
 	entity_map[n_entities++] = e;
 	if (num_modes[e] > max_num_modes)
 	  max_num_modes = num_modes[e];
@@ -500,11 +501,11 @@  optimize_mode_switching (void)
 
   /* Create the bitmap vectors.  */
 
-  antic = sbitmap_vector_alloc (last_basic_block, n_entities);
-  transp = sbitmap_vector_alloc (last_basic_block, n_entities);
-  comp = sbitmap_vector_alloc (last_basic_block, n_entities);
+  antic = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_entities);
+  transp = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_entities);
+  comp = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_entities);
 
-  bitmap_vector_ones (transp, last_basic_block);
+  bitmap_vector_ones (transp, last_basic_block_for_fn (cfun));
 
   for (j = n_entities - 1; j >= 0; j--)
     {
@@ -608,7 +609,7 @@  optimize_mode_switching (void)
 #endif /* NORMAL_MODE */
     }
 
-  kill = sbitmap_vector_alloc (last_basic_block, n_entities);
+  kill = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_entities);
   for (i = 0; i < max_num_modes; i++)
     {
       int current_mode[N_ENTITIES];
@@ -616,8 +617,8 @@  optimize_mode_switching (void)
       sbitmap *insert;
 
       /* Set the anticipatable and computing arrays.  */
-      bitmap_vector_clear (antic, last_basic_block);
-      bitmap_vector_clear (comp, last_basic_block);
+      bitmap_vector_clear (antic, last_basic_block_for_fn (cfun));
+      bitmap_vector_clear (comp, last_basic_block_for_fn (cfun));
       for (j = n_entities - 1; j >= 0; j--)
 	{
 	  int m = current_mode[j] = MODE_PRIORITY_TO_MODE (entity_map[j], i);
diff --git a/gcc/recog.c b/gcc/recog.c
index 7f59756..c59aa0e 100644
--- a/gcc/recog.c
+++ b/gcc/recog.c
@@ -2898,7 +2898,7 @@  split_all_insns (void)
   bool changed;
   basic_block bb;
 
-  blocks = sbitmap_alloc (last_basic_block);
+  blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
   bitmap_clear (blocks);
   changed = false;
 
diff --git a/gcc/regcprop.c b/gcc/regcprop.c
index 9b52a63..0438875 100644
--- a/gcc/regcprop.c
+++ b/gcc/regcprop.c
@@ -1066,9 +1066,9 @@  copyprop_hardreg_forward (void)
   sbitmap visited;
   bool analyze_called = false;
 
-  all_vd = XNEWVEC (struct value_data, last_basic_block);
+  all_vd = XNEWVEC (struct value_data, last_basic_block_for_fn (cfun));
 
-  visited = sbitmap_alloc (last_basic_block);
+  visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
   bitmap_clear (visited);
 
   if (MAY_HAVE_DEBUG_INSNS)
diff --git a/gcc/regrename.c b/gcc/regrename.c
index ac8b0f3..3c242fb 100644
--- a/gcc/regrename.c
+++ b/gcc/regrename.c
@@ -668,7 +668,7 @@  regrename_analyze (bitmap bb_mask)
   int n_bbs;
   int *inverse_postorder;
 
-  inverse_postorder = XNEWVEC (int, last_basic_block);
+  inverse_postorder = XNEWVEC (int, last_basic_block_for_fn (cfun));
   n_bbs = pre_and_rev_post_order_compute (NULL, inverse_postorder, false);
 
   /* Gather some information about the blocks in this function.  */
diff --git a/gcc/reload1.c b/gcc/reload1.c
index 6864ec1..15c6db5 100644
--- a/gcc/reload1.c
+++ b/gcc/reload1.c
@@ -1283,7 +1283,7 @@  reload (rtx first, int global)
   if (cfun->can_throw_non_call_exceptions)
     {
       sbitmap blocks;
-      blocks = sbitmap_alloc (last_basic_block);
+      blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
       bitmap_ones (blocks);
       find_many_sub_basic_blocks (blocks);
       sbitmap_free (blocks);
diff --git a/gcc/resource.c b/gcc/resource.c
index 3106a09..861d969 100644
--- a/gcc/resource.c
+++ b/gcc/resource.c
@@ -1216,7 +1216,7 @@  init_resource_info (rtx epilogue_insn)
 
   /* Allocate and initialize the tables used by mark_target_live_regs.  */
   target_hash_table = XCNEWVEC (struct target_info *, TARGET_HASH_PRIME);
-  bb_ticks = XCNEWVEC (int, last_basic_block);
+  bb_ticks = XCNEWVEC (int, last_basic_block_for_fn (cfun));
 
   /* Set the BLOCK_FOR_INSN of each label that starts a basic block.  */
   FOR_EACH_BB (bb)
diff --git a/gcc/sched-rgn.c b/gcc/sched-rgn.c
index 2d8b939..a85ee5b 100644
--- a/gcc/sched-rgn.c
+++ b/gcc/sched-rgn.c
@@ -642,23 +642,23 @@  haifa_find_rgns (void)
      STACK, SP and DFS_NR are only used during the first traversal.  */
 
   /* Allocate and initialize variables for the first traversal.  */
-  max_hdr = XNEWVEC (int, last_basic_block);
-  dfs_nr = XCNEWVEC (int, last_basic_block);
+  max_hdr = XNEWVEC (int, last_basic_block_for_fn (cfun));
+  dfs_nr = XCNEWVEC (int, last_basic_block_for_fn (cfun));
   stack = XNEWVEC (edge_iterator, n_edges_for_fn (cfun));
 
-  inner = sbitmap_alloc (last_basic_block);
+  inner = sbitmap_alloc (last_basic_block_for_fn (cfun));
   bitmap_ones (inner);
 
-  header = sbitmap_alloc (last_basic_block);
+  header = sbitmap_alloc (last_basic_block_for_fn (cfun));
   bitmap_clear (header);
 
-  in_queue = sbitmap_alloc (last_basic_block);
+  in_queue = sbitmap_alloc (last_basic_block_for_fn (cfun));
   bitmap_clear (in_queue);
 
-  in_stack = sbitmap_alloc (last_basic_block);
+  in_stack = sbitmap_alloc (last_basic_block_for_fn (cfun));
   bitmap_clear (in_stack);
 
-  for (i = 0; i < last_basic_block; i++)
+  for (i = 0; i < last_basic_block_for_fn (cfun); i++)
     max_hdr[i] = -1;
 
   #define EDGE_PASSED(E) (ei_end_p ((E)) || ei_edge ((E))->aux)
@@ -799,8 +799,9 @@  haifa_find_rgns (void)
       extend_regions_p = PARAM_VALUE (PARAM_MAX_SCHED_EXTEND_REGIONS_ITERS) > 0;
       if (extend_regions_p)
         {
-          degree1 = XNEWVEC (int, last_basic_block);
-          extended_rgn_header = sbitmap_alloc (last_basic_block);
+          degree1 = XNEWVEC (int, last_basic_block_for_fn (cfun));
+          extended_rgn_header =
+	    sbitmap_alloc (last_basic_block_for_fn (cfun));
           bitmap_clear (extended_rgn_header);
 	}
 
@@ -854,7 +855,8 @@  haifa_find_rgns (void)
                 /* We save degree in case when we meet a too_large region
 		   and cancel it.  We need a correct degree later when
                    calling extend_rgns.  */
-                memcpy (degree1, degree, last_basic_block * sizeof (int));
+                memcpy (degree1, degree,
+			last_basic_block_for_fn (cfun) * sizeof (int));
 
 	      /* Decrease degree of all I's successors for topological
 		 ordering.  */
@@ -1161,9 +1163,9 @@  extend_rgns (int *degree, int *idxp, sbitmap header, int *loop_hdr)
 
   max_iter = PARAM_VALUE (PARAM_MAX_SCHED_EXTEND_REGIONS_ITERS);
 
-  max_hdr = XNEWVEC (int, last_basic_block);
+  max_hdr = XNEWVEC (int, last_basic_block_for_fn (cfun));
 
-  order = XNEWVEC (int, last_basic_block);
+  order = XNEWVEC (int, last_basic_block_for_fn (cfun));
   post_order_compute (order, false, false);
 
   for (i = nblocks - 1; i >= 0; i--)
@@ -1514,7 +1516,7 @@  compute_trg_info (int trg)
   sp->is_speculative = 0;
   sp->src_prob = REG_BR_PROB_BASE;
 
-  visited = sbitmap_alloc (last_basic_block);
+  visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
 
   for (i = trg + 1; i < current_nr_blocks; i++)
     {
@@ -2936,11 +2938,11 @@  static void
 realloc_bb_state_array (int saved_last_basic_block)
 {
   char *old_bb_state_array = bb_state_array;
-  size_t lbb = (size_t) last_basic_block;
+  size_t lbb = (size_t) last_basic_block_for_fn (cfun);
   size_t slbb = (size_t) saved_last_basic_block;
 
   /* Nothing to do if nothing changed since the last time this was called.  */
-  if (saved_last_basic_block == last_basic_block)
+  if (saved_last_basic_block == last_basic_block_for_fn (cfun))
     return;
 
   /* The selective scheduler doesn't use the state arrays.  */
@@ -3060,7 +3062,7 @@  schedule_region (int rgn)
       if (dbg_cnt (sched_block))
         {
 	  edge f;
-	  int saved_last_basic_block = last_basic_block;
+	  int saved_last_basic_block = last_basic_block_for_fn (cfun);
 
 	  schedule_block (&curr_bb, bb_state[first_bb->index]);
 	  gcc_assert (EBB_FIRST_BB (bb) == first_bb);
@@ -3430,9 +3432,12 @@  void
 extend_regions (void)
 {
   rgn_table = XRESIZEVEC (region, rgn_table, n_basic_blocks_for_fn (cfun));
-  rgn_bb_table = XRESIZEVEC (int, rgn_bb_table, n_basic_blocks_for_fn (cfun));
-  block_to_bb = XRESIZEVEC (int, block_to_bb, last_basic_block);
-  containing_rgn = XRESIZEVEC (int, containing_rgn, last_basic_block);
+  rgn_bb_table = XRESIZEVEC (int, rgn_bb_table,
+			     n_basic_blocks_for_fn (cfun));
+  block_to_bb = XRESIZEVEC (int, block_to_bb,
+			    last_basic_block_for_fn (cfun));
+  containing_rgn = XRESIZEVEC (int, containing_rgn,
+			       last_basic_block_for_fn (cfun));
 }
 
 void
diff --git a/gcc/sel-sched-ir.c b/gcc/sel-sched-ir.c
index da84cce..f7cc9ec 100644
--- a/gcc/sel-sched-ir.c
+++ b/gcc/sel-sched-ir.c
@@ -4095,14 +4095,14 @@  get_seqno_by_preds (rtx insn)
 void
 sel_extend_global_bb_info (void)
 {
-  sel_global_bb_info.safe_grow_cleared (last_basic_block);
+  sel_global_bb_info.safe_grow_cleared (last_basic_block_for_fn (cfun));
 }
 
 /* Extend region-scope data structures for basic blocks.  */
 static void
 extend_region_bb_info (void)
 {
-  sel_region_bb_info.safe_grow_cleared (last_basic_block);
+  sel_region_bb_info.safe_grow_cleared (last_basic_block_for_fn (cfun));
 }
 
 /* Extend all data structures to fit for all basic blocks.  */
@@ -4905,9 +4905,10 @@  recompute_rev_top_order (void)
   int *postorder;
   int n_blocks, i;
 
-  if (!rev_top_order_index || rev_top_order_index_len < last_basic_block)
+  if (!rev_top_order_index
+      || rev_top_order_index_len < last_basic_block_for_fn (cfun))
     {
-      rev_top_order_index_len = last_basic_block;
+      rev_top_order_index_len = last_basic_block_for_fn (cfun);
       rev_top_order_index = XRESIZEVEC (int, rev_top_order_index,
                                         rev_top_order_index_len);
     }
@@ -6079,7 +6080,7 @@  sel_init_pipelining (void)
 		       | LOOPS_HAVE_MARKED_IRREDUCIBLE_REGIONS);
   current_loop_nest = NULL;
 
-  bbs_in_loop_rgns = sbitmap_alloc (last_basic_block);
+  bbs_in_loop_rgns = sbitmap_alloc (last_basic_block_for_fn (cfun));
   bitmap_clear (bbs_in_loop_rgns);
 
   recompute_rev_top_order ();
@@ -6145,13 +6146,13 @@  make_regions_from_the_rest (void)
   /* LOOP_HDR[I] == -1 if I-th bb doesn't belong to any loop,
      LOOP_HDR[I] == LOOP_HDR[J] iff basic blocks I and J reside within the same
      loop.  */
-  loop_hdr = XNEWVEC (int, last_basic_block);
-  degree = XCNEWVEC (int, last_basic_block);
+  loop_hdr = XNEWVEC (int, last_basic_block_for_fn (cfun));
+  degree = XCNEWVEC (int, last_basic_block_for_fn (cfun));
 
 
   /* For each basic block that belongs to some loop assign the number
      of innermost loop it belongs to.  */
-  for (i = 0; i < last_basic_block; i++)
+  for (i = 0; i < last_basic_block_for_fn (cfun); i++)
     loop_hdr[i] = -1;
 
   FOR_EACH_BB (bb)
diff --git a/gcc/store-motion.c b/gcc/store-motion.c
index 378d6c7..808b0a7 100644
--- a/gcc/store-motion.c
+++ b/gcc/store-motion.c
@@ -844,7 +844,7 @@  remove_reachable_equiv_notes (basic_block bb, struct st_expr *smexpr)
   edge_iterator *stack, ei;
   int sp;
   edge act;
-  sbitmap visited = sbitmap_alloc (last_basic_block);
+  sbitmap visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
   rtx last, insn, note;
   rtx mem = smexpr->pattern;
 
@@ -1016,11 +1016,13 @@  build_store_vectors (void)
 
   /* Build the gen_vector. This is any store in the table which is not killed
      by aliasing later in its block.  */
-  st_avloc = sbitmap_vector_alloc (last_basic_block, num_stores);
-  bitmap_vector_clear (st_avloc, last_basic_block);
+  st_avloc = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
+				   num_stores);
+  bitmap_vector_clear (st_avloc, last_basic_block_for_fn (cfun));
 
-  st_antloc = sbitmap_vector_alloc (last_basic_block, num_stores);
-  bitmap_vector_clear (st_antloc, last_basic_block);
+  st_antloc = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
+				    num_stores);
+  bitmap_vector_clear (st_antloc, last_basic_block_for_fn (cfun));
 
   for (ptr = first_st_expr (); ptr != NULL; ptr = next_st_expr (ptr))
     {
@@ -1052,11 +1054,11 @@  build_store_vectors (void)
 	}
     }
 
-  st_kill = sbitmap_vector_alloc (last_basic_block, num_stores);
-  bitmap_vector_clear (st_kill, last_basic_block);
+  st_kill = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), num_stores);
+  bitmap_vector_clear (st_kill, last_basic_block_for_fn (cfun));
 
-  st_transp = sbitmap_vector_alloc (last_basic_block, num_stores);
-  bitmap_vector_clear (st_transp, last_basic_block);
+  st_transp = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), num_stores);
+  bitmap_vector_clear (st_transp, last_basic_block_for_fn (cfun));
   regs_set_in_block = XNEWVEC (int, max_gcse_regno);
 
   FOR_EACH_BB (bb)
@@ -1095,10 +1097,14 @@  build_store_vectors (void)
 
   if (dump_file)
     {
-      dump_bitmap_vector (dump_file, "st_antloc", "", st_antloc, last_basic_block);
-      dump_bitmap_vector (dump_file, "st_kill", "", st_kill, last_basic_block);
-      dump_bitmap_vector (dump_file, "st_transp", "", st_transp, last_basic_block);
-      dump_bitmap_vector (dump_file, "st_avloc", "", st_avloc, last_basic_block);
+      dump_bitmap_vector (dump_file, "st_antloc", "", st_antloc,
+			  last_basic_block_for_fn (cfun));
+      dump_bitmap_vector (dump_file, "st_kill", "", st_kill,
+			  last_basic_block_for_fn (cfun));
+      dump_bitmap_vector (dump_file, "st_transp", "", st_transp,
+			  last_basic_block_for_fn (cfun));
+      dump_bitmap_vector (dump_file, "st_avloc", "", st_avloc,
+			  last_basic_block_for_fn (cfun));
     }
 }
 
diff --git a/gcc/tracer.c b/gcc/tracer.c
index 99689500..de6877a 100644
--- a/gcc/tracer.c
+++ b/gcc/tracer.c
@@ -230,9 +230,9 @@  find_trace (basic_block bb, basic_block *trace)
 static bool
 tail_duplicate (void)
 {
-  fibnode_t *blocks = XCNEWVEC (fibnode_t, last_basic_block);
+  fibnode_t *blocks = XCNEWVEC (fibnode_t, last_basic_block_for_fn (cfun));
   basic_block *trace = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
-  int *counts = XNEWVEC (int, last_basic_block);
+  int *counts = XNEWVEC (int, last_basic_block_for_fn (cfun));
   int ninsns = 0, nduplicated = 0;
   gcov_type weighted_insns = 0, traced_insns = 0;
   fibheap_t heap = fibheap_new ();
@@ -243,7 +243,7 @@  tail_duplicate (void)
 
   /* Create an oversized sbitmap to reduce the chance that we need to
      resize it.  */
-  bb_seen = sbitmap_alloc (last_basic_block * 2);
+  bb_seen = sbitmap_alloc (last_basic_block_for_fn (cfun) * 2);
   bitmap_clear (bb_seen);
   initialize_original_copy_tables ();
 
diff --git a/gcc/trans-mem.c b/gcc/trans-mem.c
index 39715b8..2a6597d 100644
--- a/gcc/trans-mem.c
+++ b/gcc/trans-mem.c
@@ -1956,7 +1956,7 @@  tm_region_init (struct tm_region *region)
   /* We could store this information in bb->aux, but we may get called
      through get_all_tm_blocks() from another pass that may be already
      using bb->aux.  */
-  bb_regions.safe_grow_cleared (last_basic_block);
+  bb_regions.safe_grow_cleared (last_basic_block_for_fn (cfun));
 
   queue.safe_push (bb);
   bb_regions[bb->index] = region;
@@ -2628,7 +2628,7 @@  static vec<tm_region_p>
 get_bb_regions_instrumented (bool traverse_clones,
 			     bool include_uninstrumented_p)
 {
-  unsigned n = last_basic_block;
+  unsigned n = last_basic_block_for_fn (cfun);
   struct bb2reg_stuff stuff;
   vec<tm_region_p> ret;
 
diff --git a/gcc/tree-cfg.c b/gcc/tree-cfg.c
index 57d6487..ec365b5 100644
--- a/gcc/tree-cfg.c
+++ b/gcc/tree-cfg.c
@@ -597,7 +597,7 @@  create_bb (void *h, void *e, basic_block after)
      not have to clear the newly allocated basic block here.  */
   bb = alloc_block ();
 
-  bb->index = last_basic_block;
+  bb->index = last_basic_block_for_fn (cfun);
   bb->flags = BB_NEW;
   set_bb_seq (bb, h ? (gimple_seq) h : NULL);
 
@@ -605,17 +605,20 @@  create_bb (void *h, void *e, basic_block after)
   link_block (bb, after);
 
   /* Grow the basic block array if needed.  */
-  if ((size_t) last_basic_block == basic_block_info_for_fn (cfun)->length ())
+  if ((size_t) last_basic_block_for_fn (cfun)
+      == basic_block_info_for_fn (cfun)->length ())
     {
-      size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
+      size_t new_size =
+	(last_basic_block_for_fn (cfun)
+	 + (last_basic_block_for_fn (cfun) + 3) / 4);
       vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
     }
 
   /* Add the newly created block to the array.  */
-  SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block, bb);
+  SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
 
   n_basic_blocks_for_fn (cfun)++;
-  last_basic_block++;
+  last_basic_block_for_fn (cfun)++;
 
   return bb;
 }
@@ -1228,7 +1231,7 @@  void
 cleanup_dead_labels (void)
 {
   basic_block bb;
-  label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
+  label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
 
   /* Find a suitable label for each block.  We use the first user-defined
      label if there is one, or otherwise just the first label we see.  */
@@ -2116,7 +2119,7 @@  gimple_dump_cfg (FILE *file, int flags)
       dump_function_header (file, current_function_decl, flags);
       fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
 	       n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
-	       last_basic_block);
+	       last_basic_block_for_fn (cfun));
 
       brief_dump_cfg (file, flags | TDF_COMMENT);
       fprintf (file, "\n");
@@ -7430,7 +7433,7 @@  gimple_flow_call_edges_add (sbitmap blocks)
 {
   int i;
   int blocks_split = 0;
-  int last_bb = last_basic_block;
+  int last_bb = last_basic_block_for_fn (cfun);
   bool check_last_block = false;
 
   if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
diff --git a/gcc/tree-cfgcleanup.c b/gcc/tree-cfgcleanup.c
index 76d9749..50b4a68 100644
--- a/gcc/tree-cfgcleanup.c
+++ b/gcc/tree-cfgcleanup.c
@@ -585,7 +585,7 @@  split_bbs_on_noreturn_calls (void)
 	   BB is present in the cfg.  */
 	if (bb == NULL
 	    || bb->index < NUM_FIXED_BLOCKS
-	    || bb->index >= last_basic_block
+	    || bb->index >= last_basic_block_for_fn (cfun)
 	    || BASIC_BLOCK_FOR_FN (cfun, bb->index) != bb
 	    || !gimple_call_noreturn_p (stmt))
 	  continue;
@@ -642,7 +642,7 @@  cleanup_tree_cfg_1 (void)
 
   /* Start by iterating over all basic blocks.  We cannot use FOR_EACH_BB,
      since the basic blocks may get removed.  */
-  n = last_basic_block;
+  n = last_basic_block_for_fn (cfun);
   for (i = NUM_FIXED_BLOCKS; i < n; i++)
     {
       bb = BASIC_BLOCK_FOR_FN (cfun, i);
diff --git a/gcc/tree-complex.c b/gcc/tree-complex.c
index 80a978e..ff5ccab 100644
--- a/gcc/tree-complex.c
+++ b/gcc/tree-complex.c
@@ -1636,7 +1636,7 @@  tree_lower_complex (void)
   update_parameter_components ();
 
   /* ??? Ideally we'd traverse the blocks in breadth-first order.  */
-  old_last_basic_block = last_basic_block;
+  old_last_basic_block = last_basic_block_for_fn (cfun);
   FOR_EACH_BB (bb)
     {
       if (bb->index >= old_last_basic_block)
diff --git a/gcc/tree-inline.c b/gcc/tree-inline.c
index fd7eedb..ed06cb9 100644
--- a/gcc/tree-inline.c
+++ b/gcc/tree-inline.c
@@ -2488,7 +2488,7 @@  copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
 	new_bb->loop_father = entry_block_map->loop_father;
       }
 
-  last = last_basic_block;
+  last = last_basic_block_for_fn (cfun);
 
   /* Now that we've duplicated the blocks, duplicate their edges.  */
   bool can_make_abormal_goto
@@ -2544,7 +2544,7 @@  copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
 
   /* Zero out AUX fields of newly created block during EH edge
      insertion. */
-  for (; last < last_basic_block; last++)
+  for (; last < last_basic_block_for_fn (cfun); last++)
     {
       if (need_debug_cleanup)
 	maybe_move_debug_stmts_to_successors (id,
diff --git a/gcc/tree-into-ssa.c b/gcc/tree-into-ssa.c
index ac10440..b6d3dd7 100644
--- a/gcc/tree-into-ssa.c
+++ b/gcc/tree-into-ssa.c
@@ -964,7 +964,7 @@  mark_phi_for_rewrite (basic_block bb, gimple phi)
 
   bitmap_set_bit (blocks_with_phis_to_rewrite, idx);
 
-  n = (unsigned) last_basic_block + 1;
+  n = (unsigned) last_basic_block_for_fn (cfun) + 1;
   if (phis_to_rewrite.length () < n)
     phis_to_rewrite.safe_grow_cleared (n);
 
@@ -2315,11 +2315,11 @@  rewrite_into_ssa (void)
   /* Initialize the set of interesting blocks.  The callback
      mark_def_sites will add to this set those blocks that the renamer
      should process.  */
-  interesting_blocks = sbitmap_alloc (last_basic_block);
+  interesting_blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
   bitmap_clear (interesting_blocks);
 
   /* Initialize dominance frontier.  */
-  dfs = XNEWVEC (bitmap_head, last_basic_block);
+  dfs = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
   FOR_EACH_BB (bb)
     bitmap_initialize (&dfs[bb->index], &bitmap_default_obstack);
 
@@ -2635,7 +2635,7 @@  prepare_def_site_for (tree name, bool insert_phi_p)
   bb = gimple_bb (stmt);
   if (bb)
     {
-      gcc_checking_assert (bb->index < last_basic_block);
+      gcc_checking_assert (bb->index < last_basic_block_for_fn (cfun));
       mark_block_for_update (bb);
       mark_def_interesting (name, stmt, bb, insert_phi_p);
     }
@@ -3185,7 +3185,7 @@  update_ssa (unsigned update_flags)
 
   blocks_with_phis_to_rewrite = BITMAP_ALLOC (NULL);
   if (!phis_to_rewrite.exists ())
-    phis_to_rewrite.create (last_basic_block + 1);
+    phis_to_rewrite.create (last_basic_block_for_fn (cfun) + 1);
   blocks_to_update = BITMAP_ALLOC (NULL);
 
   /* Ensure that the dominance information is up-to-date.  */
@@ -3269,7 +3269,7 @@  update_ssa (unsigned update_flags)
 
       /* If the caller requested PHI nodes to be added, compute
 	 dominance frontiers.  */
-      dfs = XNEWVEC (bitmap_head, last_basic_block);
+      dfs = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
       FOR_EACH_BB (bb)
 	bitmap_initialize (&dfs[bb->index], &bitmap_default_obstack);
       compute_dominance_frontiers (dfs);
@@ -3317,7 +3317,7 @@  update_ssa (unsigned update_flags)
     get_var_info (sym)->info.current_def = NULL_TREE;
 
   /* Now start the renaming process at START_BB.  */
-  interesting_blocks = sbitmap_alloc (last_basic_block);
+  interesting_blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
   bitmap_clear (interesting_blocks);
   EXECUTE_IF_SET_IN_BITMAP (blocks_to_update, 0, i, bi)
     bitmap_set_bit (interesting_blocks, i);
@@ -3340,9 +3340,10 @@  update_ssa (unsigned update_flags)
       c = 0;
       EXECUTE_IF_SET_IN_BITMAP (blocks_to_update, 0, i, bi)
 	c++;
-      fprintf (dump_file, "Number of blocks in CFG: %d\n", last_basic_block);
+      fprintf (dump_file, "Number of blocks in CFG: %d\n",
+	       last_basic_block_for_fn (cfun));
       fprintf (dump_file, "Number of blocks to update: %d (%3.0f%%)\n",
-	       c, PERCENT (c, last_basic_block));
+	       c, PERCENT (c, last_basic_block_for_fn (cfun)));
 
       if (dump_flags & TDF_DETAILS)
 	{
diff --git a/gcc/tree-ssa-dce.c b/gcc/tree-ssa-dce.c
index 8fc6fce..701dd44 100644
--- a/gcc/tree-ssa-dce.c
+++ b/gcc/tree-ssa-dce.c
@@ -1364,9 +1364,9 @@  tree_dce_init (bool aggressive)
 
   if (aggressive)
     {
-      last_stmt_necessary = sbitmap_alloc (last_basic_block);
+      last_stmt_necessary = sbitmap_alloc (last_basic_block_for_fn (cfun));
       bitmap_clear (last_stmt_necessary);
-      bb_contains_live_stmts = sbitmap_alloc (last_basic_block);
+      bb_contains_live_stmts = sbitmap_alloc (last_basic_block_for_fn (cfun));
       bitmap_clear (bb_contains_live_stmts);
     }
 
@@ -1432,7 +1432,8 @@  perform_tree_ssa_dce (bool aggressive)
       calculate_dominance_info (CDI_POST_DOMINATORS);
       cd = new control_dependences (create_edge_list ());
 
-      visited_control_parents = sbitmap_alloc (last_basic_block);
+      visited_control_parents =
+	sbitmap_alloc (last_basic_block_for_fn (cfun));
       bitmap_clear (visited_control_parents);
 
       mark_dfs_back_edges ();
diff --git a/gcc/tree-ssa-dom.c b/gcc/tree-ssa-dom.c
index ebdf511..6cf60be 100644
--- a/gcc/tree-ssa-dom.c
+++ b/gcc/tree-ssa-dom.c
@@ -1793,7 +1793,7 @@  record_edge_info (basic_block bb)
 	    {
 	      int i;
               int n_labels = gimple_switch_num_labels (stmt);
-	      tree *info = XCNEWVEC (tree, last_basic_block);
+	      tree *info = XCNEWVEC (tree, last_basic_block_for_fn (cfun));
 	      edge e;
 	      edge_iterator ei;
 
diff --git a/gcc/tree-ssa-live.c b/gcc/tree-ssa-live.c
index 5d1a3b9..6ccf2fb 100644
--- a/gcc/tree-ssa-live.c
+++ b/gcc/tree-ssa-live.c
@@ -960,17 +960,17 @@  new_tree_live_info (var_map map)
 
   live = XNEW (struct tree_live_info_d);
   live->map = map;
-  live->num_blocks = last_basic_block;
+  live->num_blocks = last_basic_block_for_fn (cfun);
 
-  live->livein = XNEWVEC (bitmap_head, last_basic_block);
+  live->livein = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
   FOR_EACH_BB (bb)
     bitmap_initialize (&live->livein[bb->index], &liveness_bitmap_obstack);
 
-  live->liveout = XNEWVEC (bitmap_head, last_basic_block);
+  live->liveout = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
   FOR_EACH_BB (bb)
     bitmap_initialize (&live->liveout[bb->index], &liveness_bitmap_obstack);
 
-  live->work_stack = XNEWVEC (int, last_basic_block);
+  live->work_stack = XNEWVEC (int, last_basic_block_for_fn (cfun));
   live->stack_top = live->work_stack;
 
   live->global = BITMAP_ALLOC (&liveness_bitmap_obstack);
@@ -1043,7 +1043,7 @@  live_worklist (tree_live_info_p live)
 {
   unsigned b;
   basic_block bb;
-  sbitmap visited = sbitmap_alloc (last_basic_block + 1);
+  sbitmap visited = sbitmap_alloc (last_basic_block_for_fn (cfun) + 1);
   bitmap tmp = BITMAP_ALLOC (&liveness_bitmap_obstack);
 
   bitmap_clear (visited);
diff --git a/gcc/tree-ssa-loop-im.c b/gcc/tree-ssa-loop-im.c
index 6292576..3aaf2b2 100644
--- a/gcc/tree-ssa-loop-im.c
+++ b/gcc/tree-ssa-loop-im.c
@@ -2401,7 +2401,7 @@  fill_always_executed_in_1 (struct loop *loop, sbitmap contains_call)
 static void
 fill_always_executed_in (void)
 {
-  sbitmap contains_call = sbitmap_alloc (last_basic_block);
+  sbitmap contains_call = sbitmap_alloc (last_basic_block_for_fn (cfun));
   basic_block bb;
   struct loop *loop;
 
diff --git a/gcc/tree-ssa-loop-manip.c b/gcc/tree-ssa-loop-manip.c
index de667ad..76d5958 100644
--- a/gcc/tree-ssa-loop-manip.c
+++ b/gcc/tree-ssa-loop-manip.c
@@ -728,13 +728,13 @@  copy_phi_node_args (unsigned first_new_block)
 {
   unsigned i;
 
-  for (i = first_new_block; i < (unsigned) last_basic_block; i++)
+  for (i = first_new_block; i < (unsigned) last_basic_block_for_fn (cfun); i++)
     BASIC_BLOCK_FOR_FN (cfun, i)->flags |= BB_DUPLICATED;
 
-  for (i = first_new_block; i < (unsigned) last_basic_block; i++)
+  for (i = first_new_block; i < (unsigned) last_basic_block_for_fn (cfun); i++)
     add_phi_args_after_copy_bb (BASIC_BLOCK_FOR_FN (cfun, i));
 
-  for (i = first_new_block; i < (unsigned) last_basic_block; i++)
+  for (i = first_new_block; i < (unsigned) last_basic_block_for_fn (cfun); i++)
     BASIC_BLOCK_FOR_FN (cfun, i)->flags &= ~BB_DUPLICATED;
 }
 
@@ -772,7 +772,7 @@  gimple_duplicate_loop_to_header_edge (struct loop *loop, edge e,
     verify_loop_closed_ssa (true);
 #endif
 
-  first_new_block = last_basic_block;
+  first_new_block = last_basic_block_for_fn (cfun);
   if (!duplicate_loop_to_header_edge (loop, e, ndupl, wont_exit,
 				      orig, to_remove, flags))
     return false;
diff --git a/gcc/tree-ssa-pre.c b/gcc/tree-ssa-pre.c
index dcce38a..c1c5b4f 100644
--- a/gcc/tree-ssa-pre.c
+++ b/gcc/tree-ssa-pre.c
@@ -2442,7 +2442,7 @@  compute_antic (void)
 
   /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
      We pre-build the map of blocks with incoming abnormal edges here.  */
-  has_abnormal_preds = sbitmap_alloc (last_basic_block);
+  has_abnormal_preds = sbitmap_alloc (last_basic_block_for_fn (cfun));
   bitmap_clear (has_abnormal_preds);
 
   FOR_ALL_BB (block)
@@ -2471,7 +2471,7 @@  compute_antic (void)
   /* At the exit block we anticipate nothing.  */
   BB_VISITED (EXIT_BLOCK_PTR_FOR_FN (cfun)) = 1;
 
-  changed_blocks = sbitmap_alloc (last_basic_block + 1);
+  changed_blocks = sbitmap_alloc (last_basic_block_for_fn (cfun) + 1);
   bitmap_ones (changed_blocks);
   while (changed)
     {
diff --git a/gcc/tree-ssa-propagate.c b/gcc/tree-ssa-propagate.c
index 783b651..55ae68b 100644
--- a/gcc/tree-ssa-propagate.c
+++ b/gcc/tree-ssa-propagate.c
@@ -495,10 +495,10 @@  ssa_prop_init (void)
   vec_alloc (interesting_ssa_edges, 20);
   vec_alloc (varying_ssa_edges, 20);
 
-  executable_blocks = sbitmap_alloc (last_basic_block);
+  executable_blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
   bitmap_clear (executable_blocks);
 
-  bb_in_list = sbitmap_alloc (last_basic_block);
+  bb_in_list = sbitmap_alloc (last_basic_block_for_fn (cfun));
   bitmap_clear (bb_in_list);
 
   if (dump_file && (dump_flags & TDF_DETAILS))
diff --git a/gcc/tree-ssa-reassoc.c b/gcc/tree-ssa-reassoc.c
index 9108983..1392879 100644
--- a/gcc/tree-ssa-reassoc.c
+++ b/gcc/tree-ssa-reassoc.c
@@ -4564,7 +4564,7 @@  init_reassoc (void)
   /* Reverse RPO (Reverse Post Order) will give us something where
      deeper loops come later.  */
   pre_and_rev_post_order_compute (NULL, bbs, false);
-  bb_rank = XCNEWVEC (long, last_basic_block);
+  bb_rank = XCNEWVEC (long, last_basic_block_for_fn (cfun));
   operand_rank = pointer_map_create ();
 
   /* Give each default definition a distinct rank.  This includes
diff --git a/gcc/tree-ssa-sccvn.c b/gcc/tree-ssa-sccvn.c
index e98652c..c271778 100644
--- a/gcc/tree-ssa-sccvn.c
+++ b/gcc/tree-ssa-sccvn.c
@@ -3984,7 +3984,7 @@  init_scc_vn (void)
 
   shared_lookup_phiargs.create (0);
   shared_lookup_references.create (0);
-  rpo_numbers = XNEWVEC (int, last_basic_block);
+  rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
   rpo_numbers_temp =
     XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
   pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
diff --git a/gcc/tree-ssa-tail-merge.c b/gcc/tree-ssa-tail-merge.c
index fbcbf78..a0eac67 100644
--- a/gcc/tree-ssa-tail-merge.c
+++ b/gcc/tree-ssa-tail-merge.c
@@ -771,7 +771,7 @@  init_worklist (void)
 {
   alloc_aux_for_blocks (sizeof (struct aux_bb_info));
   same_succ_htab.create (n_basic_blocks_for_fn (cfun));
-  same_succ_edge_flags = XCNEWVEC (int, last_basic_block);
+  same_succ_edge_flags = XCNEWVEC (int, last_basic_block_for_fn (cfun));
   deleted_bbs = BITMAP_ALLOC (NULL);
   deleted_bb_preds = BITMAP_ALLOC (NULL);
   worklist.create (n_basic_blocks_for_fn (cfun));
diff --git a/gcc/tree-ssa-uncprop.c b/gcc/tree-ssa-uncprop.c
index 92652de..d38e0dd 100644
--- a/gcc/tree-ssa-uncprop.c
+++ b/gcc/tree-ssa-uncprop.c
@@ -179,7 +179,7 @@  associate_equivalences_with_edges (void)
 	      && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (cond))
 	    {
 	      int i, n_labels = gimple_switch_num_labels (stmt);
-	      tree *info = XCNEWVEC (tree, last_basic_block);
+	      tree *info = XCNEWVEC (tree, last_basic_block_for_fn (cfun));
 
 	      /* Walk over the case label vector.  Record blocks
 		 which are reached by a single case label which represents
diff --git a/gcc/tree-stdarg.c b/gcc/tree-stdarg.c
index 5a22cfd..8b168e0 100644
--- a/gcc/tree-stdarg.c
+++ b/gcc/tree-stdarg.c
@@ -72,7 +72,7 @@  reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
   if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
     return false;
 
-  visited = sbitmap_alloc (last_basic_block);
+  visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
   bitmap_clear (visited);
   ret = true;
 
diff --git a/gcc/tree-vrp.c b/gcc/tree-vrp.c
index 785e72f..06b6259 100644
--- a/gcc/tree-vrp.c
+++ b/gcc/tree-vrp.c
@@ -5934,13 +5934,13 @@  find_assert_locations_1 (basic_block bb, sbitmap live)
 static bool
 find_assert_locations (void)
 {
-  int *rpo = XNEWVEC (int, last_basic_block);
-  int *bb_rpo = XNEWVEC (int, last_basic_block);
-  int *last_rpo = XCNEWVEC (int, last_basic_block);
+  int *rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
+  int *bb_rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
+  int *last_rpo = XCNEWVEC (int, last_basic_block_for_fn (cfun));
   int rpo_cnt, i;
   bool need_asserts;
 
-  live = XCNEWVEC (sbitmap, last_basic_block);
+  live = XCNEWVEC (sbitmap, last_basic_block_for_fn (cfun));
   rpo_cnt = pre_and_rev_post_order_compute (NULL, rpo, false);
   for (i = 0; i < rpo_cnt; ++i)
     bb_rpo[rpo[i]] = i;
@@ -6034,7 +6034,7 @@  find_assert_locations (void)
   XDELETEVEC (rpo);
   XDELETEVEC (bb_rpo);
   XDELETEVEC (last_rpo);
-  for (i = 0; i < last_basic_block; ++i)
+  for (i = 0; i < last_basic_block_for_fn (cfun); ++i)
     if (live[i])
       sbitmap_free (live[i]);
   XDELETEVEC (live);
diff --git a/gcc/var-tracking.c b/gcc/var-tracking.c
index 7d4a983..5bd0799 100644
--- a/gcc/var-tracking.c
+++ b/gcc/var-tracking.c
@@ -6928,7 +6928,7 @@  vt_find_locations (void)
   /* Compute reverse completion order of depth first search of the CFG
      so that the data-flow runs faster.  */
   rc_order = XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
-  bb_order = XNEWVEC (int, last_basic_block);
+  bb_order = XNEWVEC (int, last_basic_block_for_fn (cfun));
   pre_and_rev_post_order_compute (NULL, rc_order, false);
   for (i = 0; i < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; i++)
     bb_order[rc_order[i]] = i;
@@ -6936,9 +6936,9 @@  vt_find_locations (void)
 
   worklist = fibheap_new ();
   pending = fibheap_new ();
-  visited = sbitmap_alloc (last_basic_block);
-  in_worklist = sbitmap_alloc (last_basic_block);
-  in_pending = sbitmap_alloc (last_basic_block);
+  visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
+  in_worklist = sbitmap_alloc (last_basic_block_for_fn (cfun));
+  in_pending = sbitmap_alloc (last_basic_block_for_fn (cfun));
   bitmap_clear (in_worklist);
 
   FOR_EACH_BB (bb)