Lines Matching refs:loop_vinfo

280 vect_determine_vectorization_factor (loop_vec_info loop_vinfo)  in vect_determine_vectorization_factor()  argument
282 class loop *loop = LOOP_VINFO_LOOP (loop_vinfo); in vect_determine_vectorization_factor()
283 basic_block *bbs = LOOP_VINFO_BBS (loop_vinfo); in vect_determine_vectorization_factor()
302 stmt_info = loop_vinfo->lookup_stmt (phi); in vect_determine_vectorization_factor()
320 vectype = get_vectype_for_scalar_type (loop_vinfo, scalar_type); in vect_determine_vectorization_factor()
346 stmt_info = loop_vinfo->lookup_stmt (gsi_stmt (si)); in vect_determine_vectorization_factor()
365 LOOP_VINFO_VECT_FACTOR (loop_vinfo) = vectorization_factor; in vect_determine_vectorization_factor()
447 loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info); in vect_inner_phi_in_double_reduction_p() local
451 if (stmt_vec_info def_info = loop_vinfo->lookup_def (USE_FROM_PTR (use_p))) in vect_inner_phi_in_double_reduction_p()
465 vect_analyze_scalar_cycles_1 (loop_vec_info loop_vinfo, class loop *loop) in vect_analyze_scalar_cycles_1() argument
483 stmt_vec_info stmt_vinfo = loop_vinfo->lookup_stmt (phi); in vect_analyze_scalar_cycles_1()
512 || (LOOP_VINFO_LOOP (loop_vinfo) != loop in vect_analyze_scalar_cycles_1()
543 = vect_is_simple_reduction (loop_vinfo, stmt_vinfo, &double_reduc, in vect_analyze_scalar_cycles_1()
560 if (loop != LOOP_VINFO_LOOP (loop_vinfo)) in vect_analyze_scalar_cycles_1()
580 LOOP_VINFO_REDUCTIONS (loop_vinfo).safe_push in vect_analyze_scalar_cycles_1()
615 vect_analyze_scalar_cycles (loop_vec_info loop_vinfo) in vect_analyze_scalar_cycles() argument
617 class loop *loop = LOOP_VINFO_LOOP (loop_vinfo); in vect_analyze_scalar_cycles()
619 vect_analyze_scalar_cycles_1 (loop_vinfo, loop); in vect_analyze_scalar_cycles()
631 vect_analyze_scalar_cycles_1 (loop_vinfo, loop->inner); in vect_analyze_scalar_cycles()
662 vect_fixup_scalar_cycles_with_patterns (loop_vec_info loop_vinfo) in vect_fixup_scalar_cycles_with_patterns() argument
667 FOR_EACH_VEC_ELT (LOOP_VINFO_REDUCTION_CHAINS (loop_vinfo), i, first) in vect_fixup_scalar_cycles_with_patterns()
685 LOOP_VINFO_REDUCTION_CHAINS (loop_vinfo)[i] in vect_fixup_scalar_cycles_with_patterns() local
909 cse_and_gimplify_to_preheader (loop_vec_info loop_vinfo, tree expr) in cse_and_gimplify_to_preheader() argument
915 if (! loop_vinfo->ivexpr_map) in cse_and_gimplify_to_preheader()
916 loop_vinfo->ivexpr_map = new hash_map<tree_operand_hash, tree>; in cse_and_gimplify_to_preheader()
917 tree &cached = loop_vinfo->ivexpr_map->get_or_insert (expr); in cse_and_gimplify_to_preheader()
925 edge e = loop_preheader_edge (LOOP_VINFO_LOOP (loop_vinfo)); in cse_and_gimplify_to_preheader()
936 can_produce_all_loop_masks_p (loop_vec_info loop_vinfo, tree cmp_type) in can_produce_all_loop_masks_p() argument
940 FOR_EACH_VEC_ELT (LOOP_VINFO_MASKS (loop_vinfo), i, rgm) in can_produce_all_loop_masks_p()
953 vect_get_max_nscalars_per_iter (loop_vec_info loop_vinfo) in vect_get_max_nscalars_per_iter() argument
958 FOR_EACH_VEC_ELT (LOOP_VINFO_MASKS (loop_vinfo), i, rgm) in vect_get_max_nscalars_per_iter()
968 vect_verify_full_masking (loop_vec_info loop_vinfo) in vect_verify_full_masking() argument
970 class loop *loop = LOOP_VINFO_LOOP (loop_vinfo); in vect_verify_full_masking()
973 = vect_get_max_nscalars_per_iter (loop_vinfo); in vect_verify_full_masking()
978 if (LOOP_VINFO_MASKS (loop_vinfo).is_empty ()) in vect_verify_full_masking()
983 tree ni_type = TREE_TYPE (LOOP_VINFO_NITERSM1 (loop_vinfo)); in vect_verify_full_masking()
1001 widest_int iv_limit = vect_iv_limit_for_full_masking (loop_vinfo); in vect_verify_full_masking()
1016 && can_produce_all_loop_masks_p (loop_vinfo, this_type)) in vect_verify_full_masking()
1054 LOOP_VINFO_MASK_COMPARE_TYPE (loop_vinfo) = cmp_type; in vect_verify_full_masking()
1055 LOOP_VINFO_MASK_IV_TYPE (loop_vinfo) = iv_type; in vect_verify_full_masking()
1061 vect_compute_single_scalar_iteration_cost (loop_vec_info loop_vinfo) in vect_compute_single_scalar_iteration_cost() argument
1063 class loop *loop = LOOP_VINFO_LOOP (loop_vinfo); in vect_compute_single_scalar_iteration_cost()
1064 basic_block *bbs = LOOP_VINFO_BBS (loop_vinfo); in vect_compute_single_scalar_iteration_cost()
1090 stmt_vec_info stmt_info = loop_vinfo->lookup_stmt (stmt); in vect_compute_single_scalar_iteration_cost()
1116 record_stmt_cost (&LOOP_VINFO_SCALAR_ITERATION_COST (loop_vinfo), in vect_compute_single_scalar_iteration_cost()
1125 FOR_EACH_VEC_ELT (LOOP_VINFO_SCALAR_ITERATION_COST (loop_vinfo), in vect_compute_single_scalar_iteration_cost()
1133 LOOP_VINFO_SINGLE_SCALAR_ITERATION_COST (loop_vinfo) = body_cost; in vect_compute_single_scalar_iteration_cost()
1311 loop_vec_info loop_vinfo = new _loop_vec_info (loop, shared); in vect_analyze_loop_form() local
1312 LOOP_VINFO_NITERSM1 (loop_vinfo) = number_of_iterationsm1; in vect_analyze_loop_form()
1313 LOOP_VINFO_NITERS (loop_vinfo) = number_of_iterations; in vect_analyze_loop_form()
1314 LOOP_VINFO_NITERS_UNCHANGED (loop_vinfo) = number_of_iterations; in vect_analyze_loop_form()
1326 LOOP_VINFO_NITERS_ASSUMPTIONS (loop_vinfo) = assumptions; in vect_analyze_loop_form()
1329 if (!LOOP_VINFO_NITERS_KNOWN_P (loop_vinfo)) in vect_analyze_loop_form()
1340 stmt_vec_info loop_cond_info = loop_vinfo->lookup_stmt (loop_cond); in vect_analyze_loop_form()
1345 = loop_vinfo->lookup_stmt (inner_loop_cond); in vect_analyze_loop_form()
1350 loop->aux = loop_vinfo; in vect_analyze_loop_form()
1351 return opt_loop_vec_info::success (loop_vinfo); in vect_analyze_loop_form()
1360 vect_update_vf_for_slp (loop_vec_info loop_vinfo) in vect_update_vf_for_slp() argument
1362 class loop *loop = LOOP_VINFO_LOOP (loop_vinfo); in vect_update_vf_for_slp()
1363 basic_block *bbs = LOOP_VINFO_BBS (loop_vinfo); in vect_update_vf_for_slp()
1370 vectorization_factor = LOOP_VINFO_VECT_FACTOR (loop_vinfo); in vect_update_vf_for_slp()
1385 stmt_vec_info stmt_info = loop_vinfo->lookup_stmt (si.phi ()); in vect_update_vf_for_slp()
1397 stmt_vec_info stmt_info = loop_vinfo->lookup_stmt (gsi_stmt (si)); in vect_update_vf_for_slp()
1412 vectorization_factor = LOOP_VINFO_SLP_UNROLLING_FACTOR (loop_vinfo); in vect_update_vf_for_slp()
1424 LOOP_VINFO_SLP_UNROLLING_FACTOR (loop_vinfo)); in vect_update_vf_for_slp()
1427 LOOP_VINFO_VECT_FACTOR (loop_vinfo) = vectorization_factor; in vect_update_vf_for_slp()
1468 vect_analyze_loop_operations (loop_vec_info loop_vinfo) in vect_analyze_loop_operations() argument
1470 class loop *loop = LOOP_VINFO_LOOP (loop_vinfo); in vect_analyze_loop_operations()
1471 basic_block *bbs = LOOP_VINFO_BBS (loop_vinfo); in vect_analyze_loop_operations()
1492 stmt_info = loop_vinfo->lookup_stmt (phi); in vect_analyze_loop_operations()
1522 stmt_vec_info op_def_info = loop_vinfo->lookup_def (phi_op); in vect_analyze_loop_operations()
1587 = vect_analyze_stmt (loop_vinfo->lookup_stmt (stmt), in vect_analyze_loop_operations()
1596 add_stmt_costs (loop_vinfo->target_cost_data, &cost_vec); in vect_analyze_loop_operations()
1621 vect_analyze_loop_costing (loop_vec_info loop_vinfo) in vect_analyze_loop_costing() argument
1623 class loop *loop = LOOP_VINFO_LOOP (loop_vinfo); in vect_analyze_loop_costing()
1624 unsigned int assumed_vf = vect_vf_for_cost (loop_vinfo); in vect_analyze_loop_costing()
1628 if (!LOOP_VINFO_FULLY_MASKED_P (loop_vinfo)) in vect_analyze_loop_costing()
1632 if (LOOP_VINFO_NITERS_KNOWN_P (loop_vinfo)) in vect_analyze_loop_costing()
1633 max_niter = LOOP_VINFO_INT_NITERS (loop_vinfo); in vect_analyze_loop_costing()
1649 vect_estimate_min_profitable_iters (loop_vinfo, &min_profitable_iters, in vect_analyze_loop_costing()
1672 LOOP_VINFO_COST_MODEL_THRESHOLD (loop_vinfo) = th; in vect_analyze_loop_costing()
1674 if (LOOP_VINFO_NITERS_KNOWN_P (loop_vinfo) in vect_analyze_loop_costing()
1675 && LOOP_VINFO_INT_NITERS (loop_vinfo) < th) in vect_analyze_loop_costing()
1695 && !LOOP_REQUIRES_VERSIONING (loop_vinfo) in vect_analyze_loop_costing()
1696 && !LOOP_VINFO_PEELING_FOR_NITER (loop_vinfo) in vect_analyze_loop_costing()
1697 && !LOOP_VINFO_PEELING_FOR_ALIGNMENT (loop_vinfo) in vect_analyze_loop_costing()
1698 && !vect_apply_runtime_profitability_check_p (loop_vinfo)) in vect_analyze_loop_costing()
1711 if (LOOP_VINFO_EPILOGUE_P (loop_vinfo)) in vect_analyze_loop_costing()
1713 = vect_vf_for_cost (LOOP_VINFO_ORIG_LOOP_INFO (loop_vinfo)) - 1; in vect_analyze_loop_costing()
1802 vect_dissolve_slp_only_groups (loop_vec_info loop_vinfo) in vect_dissolve_slp_only_groups() argument
1809 vec<data_reference_p> datarefs = loop_vinfo->shared->datarefs; in vect_dissolve_slp_only_groups()
1813 stmt_vec_info stmt_info = loop_vinfo->lookup_stmt (DR_STMT (dr)); in vect_dissolve_slp_only_groups()
1851 determine_peel_for_niter (loop_vec_info loop_vinfo) in determine_peel_for_niter() argument
1853 LOOP_VINFO_PEELING_FOR_NITER (loop_vinfo) = false; in determine_peel_for_niter()
1857 = likely_max_stmt_executions_int (LOOP_VINFO_LOOP (loop_vinfo)); in determine_peel_for_niter()
1859 unsigned th = LOOP_VINFO_COST_MODEL_THRESHOLD (loop_vinfo); in determine_peel_for_niter()
1860 if (!th && LOOP_VINFO_ORIG_LOOP_INFO (loop_vinfo)) in determine_peel_for_niter()
1862 (loop_vinfo)); in determine_peel_for_niter()
1864 if (LOOP_VINFO_FULLY_MASKED_P (loop_vinfo)) in determine_peel_for_niter()
1866 LOOP_VINFO_PEELING_FOR_NITER (loop_vinfo) = false; in determine_peel_for_niter()
1867 else if (LOOP_VINFO_NITERS_KNOWN_P (loop_vinfo) in determine_peel_for_niter()
1868 && LOOP_VINFO_PEELING_FOR_ALIGNMENT (loop_vinfo) >= 0) in determine_peel_for_niter()
1872 unsigned int peel_niter = LOOP_VINFO_PEELING_FOR_ALIGNMENT (loop_vinfo); in determine_peel_for_niter()
1873 if (LOOP_VINFO_PEELING_FOR_GAPS (loop_vinfo)) in determine_peel_for_niter()
1875 if (!multiple_p (LOOP_VINFO_INT_NITERS (loop_vinfo) - peel_niter, in determine_peel_for_niter()
1876 LOOP_VINFO_VECT_FACTOR (loop_vinfo))) in determine_peel_for_niter()
1877 LOOP_VINFO_PEELING_FOR_NITER (loop_vinfo) = true; in determine_peel_for_niter()
1879 else if (LOOP_VINFO_PEELING_FOR_ALIGNMENT (loop_vinfo) in determine_peel_for_niter()
1883 || LOOP_VINFO_PEELING_FOR_GAPS (loop_vinfo) in determine_peel_for_niter()
1884 || !LOOP_VINFO_VECT_FACTOR (loop_vinfo).is_constant (&const_vf) in determine_peel_for_niter()
1885 || ((tree_ctz (LOOP_VINFO_NITERS (loop_vinfo)) in determine_peel_for_niter()
1890 && (!LOOP_REQUIRES_VERSIONING (loop_vinfo) in determine_peel_for_niter()
1893 LOOP_VINFO_PEELING_FOR_NITER (loop_vinfo) = true; in determine_peel_for_niter()
1903 vect_analyze_loop_2 (loop_vec_info loop_vinfo, bool &fatal, unsigned *n_stmts) in vect_analyze_loop_2() argument
1913 if (LOOP_VINFO_EPILOGUE_P (loop_vinfo)) in vect_analyze_loop_2()
1914 orig_loop_vinfo = LOOP_VINFO_ORIG_LOOP_INFO (loop_vinfo); in vect_analyze_loop_2()
1916 orig_loop_vinfo = loop_vinfo; in vect_analyze_loop_2()
1922 if (LOOP_VINFO_SIMD_IF_COND (loop_vinfo) in vect_analyze_loop_2()
1923 && integer_zerop (LOOP_VINFO_SIMD_IF_COND (loop_vinfo))) in vect_analyze_loop_2()
1930 loop_p loop = LOOP_VINFO_LOOP (loop_vinfo); in vect_analyze_loop_2()
1933 if (!LOOP_VINFO_DATAREFS (loop_vinfo).exists ()) in vect_analyze_loop_2()
1936 = vect_get_datarefs_in_loop (loop, LOOP_VINFO_BBS (loop_vinfo), in vect_analyze_loop_2()
1937 &LOOP_VINFO_DATAREFS (loop_vinfo), in vect_analyze_loop_2()
1948 loop_vinfo->shared->save_datarefs (); in vect_analyze_loop_2()
1951 loop_vinfo->shared->check_datarefs (); in vect_analyze_loop_2()
1956 ok = vect_analyze_data_refs (loop_vinfo, &min_vf, &fatal); in vect_analyze_loop_2()
1967 vect_analyze_scalar_cycles (loop_vinfo); in vect_analyze_loop_2()
1969 vect_pattern_recog (loop_vinfo); in vect_analyze_loop_2()
1971 vect_fixup_scalar_cycles_with_patterns (loop_vinfo); in vect_analyze_loop_2()
1976 ok = vect_analyze_data_ref_accesses (loop_vinfo); in vect_analyze_loop_2()
1987 ok = vect_mark_stmts_to_be_vectorized (loop_vinfo, &fatal); in vect_analyze_loop_2()
2004 ok = vect_analyze_data_ref_dependences (loop_vinfo, &max_vf); in vect_analyze_loop_2()
2015 LOOP_VINFO_MAX_VECT_FACTOR (loop_vinfo) = max_vf; in vect_analyze_loop_2()
2017 ok = vect_determine_vectorization_factor (loop_vinfo); in vect_analyze_loop_2()
2026 && maybe_lt (max_vf, LOOP_VINFO_VECT_FACTOR (loop_vinfo))) in vect_analyze_loop_2()
2030 vect_compute_single_scalar_iteration_cost (loop_vinfo); in vect_analyze_loop_2()
2032 poly_uint64 saved_vectorization_factor = LOOP_VINFO_VECT_FACTOR (loop_vinfo); in vect_analyze_loop_2()
2035 ok = vect_analyze_slp (loop_vinfo, *n_stmts); in vect_analyze_loop_2()
2040 bool slp = vect_make_slp_decision (loop_vinfo); in vect_analyze_loop_2()
2044 vect_detect_hybrid_slp (loop_vinfo); in vect_analyze_loop_2()
2047 vect_update_vf_for_slp (loop_vinfo); in vect_analyze_loop_2()
2050 bool saved_can_fully_mask_p = LOOP_VINFO_CAN_FULLY_MASK_P (loop_vinfo); in vect_analyze_loop_2()
2054 gcc_assert (LOOP_VINFO_MASKS (loop_vinfo).is_empty ()); in vect_analyze_loop_2()
2060 poly_uint64 vectorization_factor = LOOP_VINFO_VECT_FACTOR (loop_vinfo); in vect_analyze_loop_2()
2063 if (LOOP_VINFO_NITERS_KNOWN_P (loop_vinfo) && dump_enabled_p ()) in vect_analyze_loop_2()
2069 LOOP_VINFO_INT_NITERS (loop_vinfo)); in vect_analyze_loop_2()
2075 ok = vect_analyze_data_refs_alignment (loop_vinfo); in vect_analyze_loop_2()
2087 ok = vect_prune_runtime_alias_test_list (loop_vinfo); in vect_analyze_loop_2()
2094 if (!LOOP_VINFO_EPILOGUE_P (loop_vinfo)) in vect_analyze_loop_2()
2097 ok = vect_enhance_data_refs_alignment (loop_vinfo); in vect_analyze_loop_2()
2099 ok = vect_verify_datarefs_alignment (loop_vinfo); in vect_analyze_loop_2()
2108 unsigned old_size = LOOP_VINFO_SLP_INSTANCES (loop_vinfo).length (); in vect_analyze_loop_2()
2109 vect_slp_analyze_operations (loop_vinfo); in vect_analyze_loop_2()
2110 if (LOOP_VINFO_SLP_INSTANCES (loop_vinfo).length () != old_size) in vect_analyze_loop_2()
2119 vect_dissolve_slp_only_groups (loop_vinfo); in vect_analyze_loop_2()
2123 ok = vect_analyze_loop_operations (loop_vinfo); in vect_analyze_loop_2()
2134 LOOP_VINFO_FULLY_MASKED_P (loop_vinfo) in vect_analyze_loop_2()
2135 = (LOOP_VINFO_CAN_FULLY_MASK_P (loop_vinfo) in vect_analyze_loop_2()
2136 && vect_verify_full_masking (loop_vinfo)); in vect_analyze_loop_2()
2139 if (LOOP_VINFO_FULLY_MASKED_P (loop_vinfo)) in vect_analyze_loop_2()
2150 if (LOOP_VINFO_PEELING_FOR_GAPS (loop_vinfo) in vect_analyze_loop_2()
2151 && LOOP_VINFO_NITERS_KNOWN_P (loop_vinfo) in vect_analyze_loop_2()
2152 && !LOOP_VINFO_FULLY_MASKED_P (loop_vinfo)) in vect_analyze_loop_2()
2154 poly_uint64 vf = LOOP_VINFO_VECT_FACTOR (loop_vinfo); in vect_analyze_loop_2()
2155 tree scalar_niters = LOOP_VINFO_NITERSM1 (loop_vinfo); in vect_analyze_loop_2()
2165 if (LOOP_VINFO_EPILOGUE_P (loop_vinfo) in vect_analyze_loop_2()
2166 && !LOOP_VINFO_FULLY_MASKED_P (loop_vinfo) in vect_analyze_loop_2()
2167 && maybe_ge (LOOP_VINFO_VECT_FACTOR (loop_vinfo), in vect_analyze_loop_2()
2174 res = vect_analyze_loop_costing (loop_vinfo); in vect_analyze_loop_2()
2185 determine_peel_for_niter (loop_vinfo); in vect_analyze_loop_2()
2187 if (LOOP_VINFO_PEELING_FOR_GAPS (loop_vinfo) in vect_analyze_loop_2()
2188 || LOOP_VINFO_PEELING_FOR_NITER (loop_vinfo)) in vect_analyze_loop_2()
2192 if (!vect_can_advance_ivs_p (loop_vinfo) in vect_analyze_loop_2()
2193 || !slpeel_can_duplicate_loop_p (LOOP_VINFO_LOOP (loop_vinfo), in vect_analyze_loop_2()
2195 (loop_vinfo)))) in vect_analyze_loop_2()
2221 unsigned int th = LOOP_VINFO_COST_MODEL_THRESHOLD (loop_vinfo); in vect_analyze_loop_2()
2223 if (!vect_use_loop_mask_for_alignment_p (loop_vinfo)) in vect_analyze_loop_2()
2226 if (LOOP_VINFO_PEELING_FOR_ALIGNMENT (loop_vinfo) < 0) in vect_analyze_loop_2()
2228 dr_vec_info *dr_info = LOOP_VINFO_UNALIGNED_DR (loop_vinfo); in vect_analyze_loop_2()
2233 niters_th += LOOP_VINFO_PEELING_FOR_ALIGNMENT (loop_vinfo); in vect_analyze_loop_2()
2237 if (!LOOP_VINFO_FULLY_MASKED_P (loop_vinfo)) in vect_analyze_loop_2()
2238 niters_th += LOOP_VINFO_VECT_FACTOR (loop_vinfo); in vect_analyze_loop_2()
2240 if (LOOP_VINFO_PEELING_FOR_GAPS (loop_vinfo)) in vect_analyze_loop_2()
2245 if (vect_apply_runtime_profitability_check_p (loop_vinfo) in vect_analyze_loop_2()
2249 LOOP_VINFO_VERSIONING_THRESHOLD (loop_vinfo) = niters_th; in vect_analyze_loop_2()
2253 LOOP_VINFO_VECT_FACTOR (loop_vinfo))); in vect_analyze_loop_2()
2268 if (! LOOP_VINFO_REDUCTION_CHAINS (loop_vinfo).is_empty ()) in vect_analyze_loop_2()
2276 FOR_EACH_VEC_ELT (LOOP_VINFO_SLP_INSTANCES (loop_vinfo), i, instance) in vect_analyze_loop_2()
2312 LOOP_VINFO_VECT_FACTOR (loop_vinfo) = saved_vectorization_factor; in vect_analyze_loop_2()
2314 FOR_EACH_VEC_ELT (LOOP_VINFO_SLP_INSTANCES (loop_vinfo), j, instance) in vect_analyze_loop_2()
2316 LOOP_VINFO_SLP_INSTANCES (loop_vinfo).release (); in vect_analyze_loop_2()
2318 for (i = 0; i < LOOP_VINFO_LOOP (loop_vinfo)->num_nodes; ++i) in vect_analyze_loop_2()
2320 basic_block bb = LOOP_VINFO_BBS (loop_vinfo)[i]; in vect_analyze_loop_2()
2324 stmt_vec_info stmt_info = loop_vinfo->lookup_stmt (gsi_stmt (si)); in vect_analyze_loop_2()
2341 stmt_vec_info stmt_info = loop_vinfo->lookup_stmt (gsi_stmt (si)); in vect_analyze_loop_2()
2350 STMT_SLP_TYPE (loop_vinfo->lookup_stmt (gsi_stmt (pi))) in vect_analyze_loop_2()
2356 LOOP_VINFO_LOWER_BOUNDS (loop_vinfo).truncate (0); in vect_analyze_loop_2()
2357 LOOP_VINFO_COMP_ALIAS_DDRS (loop_vinfo).release (); in vect_analyze_loop_2()
2358 LOOP_VINFO_CHECK_UNEQUAL_ADDRS (loop_vinfo).release (); in vect_analyze_loop_2()
2360 destroy_cost_data (LOOP_VINFO_TARGET_COST_DATA (loop_vinfo)); in vect_analyze_loop_2()
2361 LOOP_VINFO_TARGET_COST_DATA (loop_vinfo) in vect_analyze_loop_2()
2362 = init_cost (LOOP_VINFO_LOOP (loop_vinfo)); in vect_analyze_loop_2()
2364 release_vec_loop_masks (&LOOP_VINFO_MASKS (loop_vinfo)); in vect_analyze_loop_2()
2366 LOOP_VINFO_PEELING_FOR_NITER (loop_vinfo) = false; in vect_analyze_loop_2()
2367 LOOP_VINFO_PEELING_FOR_GAPS (loop_vinfo) = false; in vect_analyze_loop_2()
2368 LOOP_VINFO_COST_MODEL_THRESHOLD (loop_vinfo) = 0; in vect_analyze_loop_2()
2369 LOOP_VINFO_VERSIONING_THRESHOLD (loop_vinfo) = 0; in vect_analyze_loop_2()
2370 LOOP_VINFO_CAN_FULLY_MASK_P (loop_vinfo) = saved_can_fully_mask_p; in vect_analyze_loop_2()
2483 vect_reanalyze_as_main_loop (loop_vec_info loop_vinfo, unsigned int *n_stmts) in vect_reanalyze_as_main_loop() argument
2485 if (!LOOP_VINFO_EPILOGUE_P (loop_vinfo)) in vect_reanalyze_as_main_loop()
2486 return loop_vinfo; in vect_reanalyze_as_main_loop()
2491 GET_MODE_NAME (loop_vinfo->vector_mode)); in vect_reanalyze_as_main_loop()
2493 struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo); in vect_reanalyze_as_main_loop()
2494 vec_info_shared *shared = loop_vinfo->shared; in vect_reanalyze_as_main_loop()
2498 main_loop_vinfo->vector_mode = loop_vinfo->vector_mode; in vect_reanalyze_as_main_loop()
2509 GET_MODE_NAME (loop_vinfo->vector_mode)); in vect_reanalyze_as_main_loop()
2562 opt_loop_vec_info loop_vinfo = vect_analyze_loop_form (loop, shared); in vect_analyze_loop() local
2563 if (!loop_vinfo) in vect_analyze_loop()
2569 return loop_vinfo; in vect_analyze_loop()
2571 loop_vinfo->vector_mode = next_vector_mode; in vect_analyze_loop()
2604 LOOP_VINFO_ORIG_LOOP_INFO (loop_vinfo) = first_loop_vinfo; in vect_analyze_loop()
2606 res = vect_analyze_loop_2 (loop_vinfo, fatal, &n_stmts); in vect_analyze_loop()
2608 autodetected_vector_mode = loop_vinfo->vector_mode; in vect_analyze_loop()
2614 GET_MODE_NAME (loop_vinfo->vector_mode)); in vect_analyze_loop()
2618 GET_MODE_NAME (loop_vinfo->vector_mode)); in vect_analyze_loop()
2625 && vect_chooses_same_modes_p (loop_vinfo, vector_modes[mode_i])) in vect_analyze_loop()
2637 LOOP_VINFO_VECTORIZABLE_P (loop_vinfo) = 1; in vect_analyze_loop()
2643 && known_eq (LOOP_VINFO_VECT_FACTOR (loop_vinfo), simdlen)) in vect_analyze_loop()
2647 LOOP_VINFO_ORIG_LOOP_INFO (loop_vinfo) = NULL; in vect_analyze_loop()
2656 && vect_joust_loop_vinfos (loop_vinfo, vinfos.last ())) in vect_analyze_loop()
2662 && vect_joust_loop_vinfos (loop_vinfo, first_loop_vinfo)) in vect_analyze_loop()
2665 = vect_reanalyze_as_main_loop (loop_vinfo, &n_stmts); in vect_analyze_loop()
2666 if (main_loop_vinfo == loop_vinfo) in vect_analyze_loop()
2677 delete loop_vinfo; in vect_analyze_loop() local
2678 loop_vinfo in vect_analyze_loop()
2688 first_loop_vinfo = loop_vinfo; in vect_analyze_loop()
2695 first_loop_vinfo->epilogue_vinfos.safe_push (loop_vinfo); in vect_analyze_loop()
2696 poly_uint64 th = LOOP_VINFO_VERSIONING_THRESHOLD (loop_vinfo); in vect_analyze_loop()
2697 gcc_assert (!LOOP_REQUIRES_VERSIONING (loop_vinfo) in vect_analyze_loop()
2705 delete loop_vinfo; in vect_analyze_loop() local
2729 delete loop_vinfo; in vect_analyze_loop() local
3358 vect_get_known_peeling_cost (loop_vec_info loop_vinfo, int peel_iters_prologue, in vect_get_known_peeling_cost() argument
3365 int assumed_vf = vect_vf_for_cost (loop_vinfo); in vect_get_known_peeling_cost()
3367 if (!LOOP_VINFO_NITERS_KNOWN_P (loop_vinfo)) in vect_get_known_peeling_cost()
3384 int niters = LOOP_VINFO_INT_NITERS (loop_vinfo); in vect_get_known_peeling_cost()
3390 if (LOOP_VINFO_PEELING_FOR_GAPS (loop_vinfo) && !*peel_iters_epilogue) in vect_get_known_peeling_cost()
3427 vect_estimate_min_profitable_iters (loop_vec_info loop_vinfo, in vect_estimate_min_profitable_iters() argument
3441 int assumed_vf = vect_vf_for_cost (loop_vinfo); in vect_estimate_min_profitable_iters()
3442 int npeel = LOOP_VINFO_PEELING_FOR_ALIGNMENT (loop_vinfo); in vect_estimate_min_profitable_iters()
3443 void *target_cost_data = LOOP_VINFO_TARGET_COST_DATA (loop_vinfo); in vect_estimate_min_profitable_iters()
3446 if (unlimited_cost_model (LOOP_VINFO_LOOP (loop_vinfo))) in vect_estimate_min_profitable_iters()
3456 if (LOOP_REQUIRES_VERSIONING_FOR_ALIGNMENT (loop_vinfo)) in vect_estimate_min_profitable_iters()
3459 unsigned len = LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo).length (); in vect_estimate_min_profitable_iters()
3469 if (LOOP_REQUIRES_VERSIONING_FOR_ALIAS (loop_vinfo)) in vect_estimate_min_profitable_iters()
3472 unsigned len = LOOP_VINFO_COMP_ALIAS_DDRS (loop_vinfo).length (); in vect_estimate_min_profitable_iters()
3475 len = LOOP_VINFO_CHECK_UNEQUAL_ADDRS (loop_vinfo).length (); in vect_estimate_min_profitable_iters()
3480 len = LOOP_VINFO_LOWER_BOUNDS (loop_vinfo).length (); in vect_estimate_min_profitable_iters()
3487 if (!LOOP_VINFO_LOWER_BOUNDS (loop_vinfo)[i].unsigned_p) in vect_estimate_min_profitable_iters()
3499 if (LOOP_REQUIRES_VERSIONING_FOR_NITERS (loop_vinfo)) in vect_estimate_min_profitable_iters()
3510 if (LOOP_REQUIRES_VERSIONING (loop_vinfo)) in vect_estimate_min_profitable_iters()
3523 = LOOP_VINFO_SINGLE_SCALAR_ITERATION_COST (loop_vinfo); in vect_estimate_min_profitable_iters()
3534 if (LOOP_VINFO_FULLY_MASKED_P (loop_vinfo)) in vect_estimate_min_profitable_iters()
3539 if (LOOP_VINFO_PEELING_FOR_GAPS (loop_vinfo)) in vect_estimate_min_profitable_iters()
3545 FOR_EACH_VEC_ELT (LOOP_VINFO_SCALAR_ITERATION_COST (loop_vinfo), in vect_estimate_min_profitable_iters()
3556 FOR_EACH_VEC_ELT (LOOP_VINFO_MASKS (loop_vinfo), num_vectors_m1, rgm) in vect_estimate_min_profitable_iters()
3607 FOR_EACH_VEC_ELT (LOOP_VINFO_SCALAR_ITERATION_COST (loop_vinfo), j, si) in vect_estimate_min_profitable_iters()
3624 void *data = LOOP_VINFO_TARGET_COST_DATA (loop_vinfo); in vect_estimate_min_profitable_iters()
3630 (void) vect_get_known_peeling_cost (loop_vinfo, peel_iters_prologue, in vect_estimate_min_profitable_iters()
3633 (loop_vinfo), in vect_estimate_min_profitable_iters()
3702 if (!LOOP_VINFO_NITERS_KNOWN_P (loop_vinfo) in vect_estimate_min_profitable_iters()
3703 || LOOP_REQUIRES_VERSIONING (loop_vinfo)) in vect_estimate_min_profitable_iters()
3706 if (LOOP_REQUIRES_VERSIONING (loop_vinfo)) in vect_estimate_min_profitable_iters()
3711 if (LOOP_VINFO_PEELING_FOR_ALIGNMENT (loop_vinfo) < 0) in vect_estimate_min_profitable_iters()
3721 finish_cost (LOOP_VINFO_TARGET_COST_DATA (loop_vinfo), &vec_prologue_cost, in vect_estimate_min_profitable_iters()
3727 loop_vinfo->vec_inside_cost = vec_inside_cost; in vect_estimate_min_profitable_iters()
3728 loop_vinfo->vec_outside_cost = vec_outside_cost; in vect_estimate_min_profitable_iters()
3765 if (LOOP_VINFO_LOOP (loop_vinfo)->force_vectorize) in vect_estimate_min_profitable_iters()
3783 if (LOOP_VINFO_FULLY_MASKED_P (loop_vinfo)) in vect_estimate_min_profitable_iters()
3810 if (LOOP_VINFO_FULLY_MASKED_P (loop_vinfo)) in vect_estimate_min_profitable_iters()
3861 if (!LOOP_VINFO_FULLY_MASKED_P (loop_vinfo) in vect_estimate_min_profitable_iters()
3882 else if (LOOP_VINFO_FULLY_MASKED_P (loop_vinfo)) in vect_estimate_min_profitable_iters()
3894 if (LOOP_VINFO_FULLY_MASKED_P (loop_vinfo)) in vect_estimate_min_profitable_iters()
3982 loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info); in vect_model_reduction_cost() local
3985 if (loop_vinfo) in vect_model_reduction_cost()
3986 loop = LOOP_VINFO_LOOP (loop_vinfo); in vect_model_reduction_cost()
4211 loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo); in get_initial_def_for_reduction() local
4212 class loop *loop = LOOP_VINFO_LOOP (loop_vinfo); in get_initial_def_for_reduction()
4214 tree vectype = get_vectype_for_scalar_type (loop_vinfo, scalar_type); in get_initial_def_for_reduction()
4509 loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info); in vect_create_epilog_for_reduction() local
4520 stmt_info = loop_vinfo->lookup_def (gimple_phi_arg_def in vect_create_epilog_for_reduction()
4531 class loop *loop = LOOP_VINFO_LOOP (loop_vinfo), *outer_loop = NULL; in vect_create_epilog_for_reduction()
4608 phi_info = STMT_VINFO_VEC_STMT (loop_vinfo->lookup_stmt (reduc_def_stmt)); in vect_create_epilog_for_reduction()
4643 = loop_vinfo->lookup_def (gimple_op (cond_info->stmt, in vect_create_epilog_for_reduction()
4687 loop_vinfo->add_stmt (new_phi); in vect_create_epilog_for_reduction()
4715 = loop_vinfo->add_stmt (SSA_NAME_DEF_STMT (new_phi_tree)); in vect_create_epilog_for_reduction()
4769 stmt_vec_info phi_info = loop_vinfo->add_stmt (phi); in vect_create_epilog_for_reduction()
4774 def = vect_get_vec_def_for_stmt_copy (loop_vinfo, def); in vect_create_epilog_for_reduction()
4854 stmt_vec_info next_phi_info = loop_vinfo->lookup_stmt (new_phis[0]); in vect_create_epilog_for_reduction()
5502 stmt_vec_info epilog_stmt_info = loop_vinfo->add_stmt (epilog_stmt); in vect_create_epilog_for_reduction()
5504 = STMT_VINFO_RELATED_STMT (loop_vinfo->lookup_stmt (new_phi)); in vect_create_epilog_for_reduction()
5727 loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info); in vectorize_fold_left_reduction() local
5728 class loop *loop = LOOP_VINFO_LOOP (loop_vinfo); in vectorize_fold_left_reduction()
5737 ncopies = vect_get_num_copies (loop_vinfo, vectype_in); in vectorize_fold_left_reduction()
5780 if (LOOP_VINFO_FULLY_MASKED_P (loop_vinfo)) in vectorize_fold_left_reduction()
5790 if (LOOP_VINFO_FULLY_MASKED_P (loop_vinfo)) in vectorize_fold_left_reduction()
6019 loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info); in vectorizable_reduction() local
6020 class loop *loop = LOOP_VINFO_LOOP (loop_vinfo); in vectorizable_reduction()
6081 phi_info = loop_vinfo->lookup_stmt (use_stmt); in vectorizable_reduction()
6099 stmt_vec_info def = loop_vinfo->lookup_def (reduc_def); in vectorizable_reduction()
6235 if (!vect_is_simple_use (op, loop_vinfo, &dt, &tem, in vectorizable_reduction()
6364 vect_is_simple_use (cond_initial_val, loop_vinfo, &cond_initial_dt); in vectorizable_reduction()
6392 ncopies = vect_get_num_copies (loop_vinfo, vectype_in); in vectorizable_reduction()
6618 && !can_duplicate_and_interleave_p (loop_vinfo, group_size, in vectorizable_reduction()
6727 || !vect_worthwhile_without_simd_p (loop_vinfo, code)) in vectorizable_reduction()
6737 && !vect_worthwhile_without_simd_p (loop_vinfo, code)) in vectorizable_reduction()
6808 else if (loop_vinfo && LOOP_VINFO_CAN_FULLY_MASK_P (loop_vinfo)) in vectorizable_reduction()
6810 vec_loop_masks *masks = &LOOP_VINFO_MASKS (loop_vinfo); in vectorizable_reduction()
6823 LOOP_VINFO_CAN_FULLY_MASK_P (loop_vinfo) = false; in vectorizable_reduction()
6835 LOOP_VINFO_CAN_FULLY_MASK_P (loop_vinfo) = false; in vectorizable_reduction()
6838 vect_record_loop_mask (loop_vinfo, masks, ncopies * vec_num, in vectorizable_reduction()
6852 loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info); in vect_transform_reduction() local
6853 class loop *loop = LOOP_VINFO_LOOP (loop_vinfo); in vect_transform_reduction()
6903 ncopies = vect_get_num_copies (loop_vinfo, vectype_in); in vect_transform_reduction()
6908 vec_loop_masks *masks = &LOOP_VINFO_MASKS (loop_vinfo); in vect_transform_reduction()
6927 bool masked_loop_p = LOOP_VINFO_FULLY_MASKED_P (loop_vinfo); in vect_transform_reduction()
6999 = vect_get_vec_def_for_stmt_copy (loop_vinfo, in vect_transform_reduction()
7005 = vect_get_vec_def_for_stmt_copy (loop_vinfo, in vect_transform_reduction()
7013 = vect_get_vec_def_for_stmt_copy (loop_vinfo, in vect_transform_reduction()
7090 loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info); in vect_transform_cycle_phi() local
7091 class loop *loop = LOOP_VINFO_LOOP (loop_vinfo); in vect_transform_cycle_phi()
7125 (LOOP_VINFO_VECT_FACTOR (loop_vinfo) in vect_transform_cycle_phi()
7132 ncopies = vect_get_num_copies (loop_vinfo, vectype_in); in vect_transform_cycle_phi()
7220 stmt_vec_info new_phi_info = loop_vinfo->add_stmt (new_phi); in vect_transform_cycle_phi()
7224 vec_init_def = vect_get_vec_def_for_stmt_copy (loop_vinfo, in vect_transform_cycle_phi()
7253 loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info); in vectorizable_lc_phi() local
7254 if (!loop_vinfo in vectorizable_lc_phi()
7286 stmt_vec_info new_phi_info = loop_vinfo->add_stmt (new_phi); in vectorizable_lc_phi()
7292 unsigned ncopies = vect_get_num_copies (loop_vinfo, vectype); in vectorizable_lc_phi()
7297 vect_get_vec_defs_for_stmt_copy (loop_vinfo, &vec_oprnds, NULL); in vectorizable_lc_phi()
7301 stmt_vec_info new_phi_info = loop_vinfo->add_stmt (new_phi); in vectorizable_lc_phi()
7348 loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo); in vect_worthwhile_without_simd_p() local
7350 return (loop_vinfo in vect_worthwhile_without_simd_p()
7351 && LOOP_VINFO_VECT_FACTOR (loop_vinfo).is_constant (&value) in vect_worthwhile_without_simd_p()
7368 loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info); in vectorizable_induction() local
7369 class loop *loop = LOOP_VINFO_LOOP (loop_vinfo); in vectorizable_induction()
7382 poly_uint64 vf = LOOP_VINFO_VECT_FACTOR (loop_vinfo); in vectorizable_induction()
7410 ncopies = vect_get_num_copies (loop_vinfo, vectype); in vectorizable_induction()
7451 stmt_vec_info exit_phi_vinfo = loop_vinfo->lookup_stmt (exit_phi); in vectorizable_induction()
7518 tree skip_niters = LOOP_VINFO_MASK_SKIP_NITERS (loop_vinfo); in vectorizable_induction()
7601 = loop_vinfo->add_stmt (induction_phi); in vectorizable_induction()
7610 loop_vinfo->add_stmt (SSA_NAME_DEF_STMT (vec_def)); in vectorizable_induction()
7662 (loop_vinfo->add_stmt (SSA_NAME_DEF_STMT (def))); in vectorizable_induction()
7690 loop_vinfo->add_stmt (new_stmt); in vectorizable_induction()
7795 stmt_vec_info induction_phi_info = loop_vinfo->add_stmt (induction_phi); in vectorizable_induction()
7805 stmt_vec_info new_stmt_info = loop_vinfo->add_stmt (new_stmt); in vectorizable_induction()
7862 new_stmt_info = loop_vinfo->add_stmt (new_stmt); in vectorizable_induction()
7887 stmt_vec_info stmt_vinfo = loop_vinfo->lookup_stmt (exit_phi); in vectorizable_induction()
7922 loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info); in vectorizable_live_operation() local
7923 class loop *loop = LOOP_VINFO_LOOP (loop_vinfo); in vectorizable_live_operation()
7978 gcc_assert (is_simple_and_all_uses_invariant (stmt_info, loop_vinfo)); in vectorizable_live_operation()
7989 ncopies = vect_get_num_copies (loop_vinfo, vectype); in vectorizable_live_operation()
8018 if (LOOP_VINFO_CAN_FULLY_MASK_P (loop_vinfo)) in vectorizable_live_operation()
8028 LOOP_VINFO_CAN_FULLY_MASK_P (loop_vinfo) = false; in vectorizable_live_operation()
8036 LOOP_VINFO_CAN_FULLY_MASK_P (loop_vinfo) = false; in vectorizable_live_operation()
8044 LOOP_VINFO_CAN_FULLY_MASK_P (loop_vinfo) = false; in vectorizable_live_operation()
8049 vect_record_loop_mask (loop_vinfo, in vectorizable_live_operation()
8050 &LOOP_VINFO_MASKS (loop_vinfo), in vectorizable_live_operation()
8073 gcc_assert (!LOOP_VINFO_FULLY_MASKED_P (loop_vinfo)); in vectorizable_live_operation()
8091 || !LOOP_VINFO_FULLY_MASKED_P (loop_vinfo)); in vectorizable_live_operation()
8095 vec_lhs = vect_get_vec_def_for_stmt_copy (loop_vinfo, vec_lhs); in vectorizable_live_operation()
8122 if (LOOP_VINFO_FULLY_MASKED_P (loop_vinfo)) in vectorizable_live_operation()
8132 tree mask = vect_get_loop_mask (gsi, &LOOP_VINFO_MASKS (loop_vinfo), 1, in vectorizable_live_operation()
8240 loop_niters_no_overflow (loop_vec_info loop_vinfo) in loop_niters_no_overflow() argument
8243 if (LOOP_VINFO_NITERS_KNOWN_P (loop_vinfo)) in loop_niters_no_overflow()
8245 tree cst_niters = LOOP_VINFO_NITERS (loop_vinfo); in loop_niters_no_overflow()
8246 tree cst_nitersm1 = LOOP_VINFO_NITERSM1 (loop_vinfo); in loop_niters_no_overflow()
8255 class loop *loop = LOOP_VINFO_LOOP (loop_vinfo); in loop_niters_no_overflow()
8259 tree type = TREE_TYPE (LOOP_VINFO_NITERS (loop_vinfo)); in loop_niters_no_overflow()
8294 vect_record_loop_mask (loop_vec_info loop_vinfo, vec_loop_masks *masks, in vect_record_loop_mask() argument
8305 LOOP_VINFO_VECT_FACTOR (loop_vinfo)).to_constant (); in vect_record_loop_mask()
8310 loop_vinfo->scalar_cond_masked_set.add (cond); in vect_record_loop_mask()
8407 maybe_set_vectorized_backedge_value (loop_vec_info loop_vinfo, in maybe_set_vectorized_backedge_value() argument
8419 && (phi_info = loop_vinfo->lookup_stmt (phi)) in maybe_set_vectorized_backedge_value()
8450 vect_transform_loop_stmt (loop_vec_info loop_vinfo, stmt_vec_info stmt_info, in vect_transform_loop_stmt() argument
8453 class loop *loop = LOOP_VINFO_LOOP (loop_vinfo); in vect_transform_loop_stmt()
8454 poly_uint64 vf = LOOP_VINFO_VECT_FACTOR (loop_vinfo); in vect_transform_loop_stmt()
8680 vect_transform_loop (loop_vec_info loop_vinfo, gimple *loop_vectorized_call) in vect_transform_loop() argument
8682 class loop *loop = LOOP_VINFO_LOOP (loop_vinfo); in vect_transform_loop()
8684 basic_block *bbs = LOOP_VINFO_BBS (loop_vinfo); in vect_transform_loop()
8690 poly_uint64 vf = LOOP_VINFO_VECT_FACTOR (loop_vinfo); in vect_transform_loop()
8698 loop_vinfo->shared->check_datarefs (); in vect_transform_loop()
8705 th = LOOP_VINFO_COST_MODEL_THRESHOLD (loop_vinfo); in vect_transform_loop()
8706 if (vect_apply_runtime_profitability_check_p (loop_vinfo)) in vect_transform_loop()
8728 if (LOOP_REQUIRES_VERSIONING (loop_vinfo)) in vect_transform_loop()
8731 = vect_loop_versioning (loop_vinfo, loop_vectorized_call); in vect_transform_loop()
8741 if (LOOP_VINFO_SCALAR_LOOP (loop_vinfo)) in vect_transform_loop()
8743 e = single_exit (LOOP_VINFO_SCALAR_LOOP (loop_vinfo)); in vect_transform_loop()
8752 tree niters = vect_build_loop_niters (loop_vinfo); in vect_transform_loop()
8753 LOOP_VINFO_NITERS_UNCHANGED (loop_vinfo) = niters; in vect_transform_loop()
8754 tree nitersm1 = unshare_expr (LOOP_VINFO_NITERSM1 (loop_vinfo)); in vect_transform_loop()
8755 bool niters_no_overflow = loop_niters_no_overflow (loop_vinfo); in vect_transform_loop()
8759 epilogue = vect_do_peeling (loop_vinfo, niters, nitersm1, &niters_vector, in vect_transform_loop()
8764 if (LOOP_VINFO_SCALAR_LOOP (loop_vinfo) in vect_transform_loop()
8765 && LOOP_VINFO_SCALAR_LOOP_SCALING (loop_vinfo).initialized_p ()) in vect_transform_loop()
8766 scale_loop_frequencies (LOOP_VINFO_SCALAR_LOOP (loop_vinfo), in vect_transform_loop()
8767 LOOP_VINFO_SCALAR_LOOP_SCALING (loop_vinfo)); in vect_transform_loop()
8771 if (LOOP_VINFO_NITERS_KNOWN_P (loop_vinfo) in vect_transform_loop()
8772 && !LOOP_VINFO_FULLY_MASKED_P (loop_vinfo) in vect_transform_loop()
8776 = build_int_cst (TREE_TYPE (LOOP_VINFO_NITERS (loop_vinfo)), in vect_transform_loop()
8777 LOOP_VINFO_INT_NITERS (loop_vinfo) / lowest_vf); in vect_transform_loop()
8781 vect_gen_vector_loop_niters (loop_vinfo, niters, &niters_vector, in vect_transform_loop()
8792 if (LOOP_VINFO_FULLY_MASKED_P (loop_vinfo) in vect_transform_loop()
8793 && vect_use_loop_mask_for_alignment_p (loop_vinfo)) in vect_transform_loop()
8795 vect_prepare_for_masked_peels (loop_vinfo); in vect_transform_loop()
8799 if (!loop_vinfo->slp_instances.is_empty ()) in vect_transform_loop()
8802 vect_schedule_slp (loop_vinfo); in vect_transform_loop()
8822 stmt_info = loop_vinfo->lookup_stmt (phi); in vect_transform_loop()
8856 stmt_info = loop_vinfo->lookup_stmt (phi); in vect_transform_loop()
8870 maybe_set_vectorized_backedge_value (loop_vinfo, stmt_info); in vect_transform_loop()
8886 stmt_info = loop_vinfo->lookup_stmt (stmt); in vect_transform_loop()
8901 = loop_vinfo->lookup_stmt (gsi_stmt (subsi)); in vect_transform_loop()
8902 vect_transform_loop_stmt (loop_vinfo, pat_stmt_info, in vect_transform_loop()
8907 if (vect_transform_loop_stmt (loop_vinfo, pat_stmt_info, in vect_transform_loop()
8909 maybe_set_vectorized_backedge_value (loop_vinfo, in vect_transform_loop()
8914 if (vect_transform_loop_stmt (loop_vinfo, stmt_info, &si, in vect_transform_loop()
8916 maybe_set_vectorized_backedge_value (loop_vinfo, in vect_transform_loop()
8930 loop_vinfo->remove_stmt (stmt_info); in vect_transform_loop()
8973 vect_set_loop_condition (loop, loop_vinfo, niters_vector, step_vector, in vect_transform_loop()
8976 unsigned int assumed_vf = vect_vf_for_cost (loop_vinfo); in vect_transform_loop()
8981 bool final_iter_may_be_partial = LOOP_VINFO_FULLY_MASKED_P (loop_vinfo); in vect_transform_loop()
8985 int min_epilogue_iters = LOOP_VINFO_PEELING_FOR_GAPS (loop_vinfo) ? 1 : 0; in vect_transform_loop()
8991 int alignment_npeels = LOOP_VINFO_PEELING_FOR_ALIGNMENT (loop_vinfo); in vect_transform_loop()
8992 if (alignment_npeels && LOOP_VINFO_FULLY_MASKED_P (loop_vinfo)) in vect_transform_loop()
9027 if (!LOOP_VINFO_EPILOGUE_P (loop_vinfo)) in vect_transform_loop()
9039 GET_MODE_NAME (loop_vinfo->vector_mode)); in vect_transform_loop()
9054 FOR_EACH_VEC_ELT (LOOP_VINFO_SLP_INSTANCES (loop_vinfo), i, instance) in vect_transform_loop()
9056 LOOP_VINFO_SLP_INSTANCES (loop_vinfo).release (); in vect_transform_loop()
9289 vect_iv_limit_for_full_masking (loop_vec_info loop_vinfo) in vect_iv_limit_for_full_masking() argument
9291 tree niters_skip = LOOP_VINFO_MASK_SKIP_NITERS (loop_vinfo); in vect_iv_limit_for_full_masking()
9292 class loop *loop = LOOP_VINFO_LOOP (loop_vinfo); in vect_iv_limit_for_full_masking()
9293 unsigned HOST_WIDE_INT max_vf = vect_max_vf (loop_vinfo); in vect_iv_limit_for_full_masking()
9311 else if (LOOP_VINFO_PEELING_FOR_ALIGNMENT (loop_vinfo)) in vect_iv_limit_for_full_masking()
9318 poly_uint64 vf = LOOP_VINFO_VECT_FACTOR (loop_vinfo); in vect_iv_limit_for_full_masking()