Lines Matching refs:loop_vinfo

289 vect_determine_vectorization_factor (loop_vec_info loop_vinfo)  in vect_determine_vectorization_factor()  argument
291 struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo); in vect_determine_vectorization_factor()
292 basic_block *bbs = LOOP_VINFO_BBS (loop_vinfo); in vect_determine_vectorization_factor()
312 stmt_info = loop_vinfo->lookup_stmt (phi); in vect_determine_vectorization_factor()
356 stmt_info = loop_vinfo->lookup_stmt (gsi_stmt (si)); in vect_determine_vectorization_factor()
376 LOOP_VINFO_VECT_FACTOR (loop_vinfo) = vectorization_factor; in vect_determine_vectorization_factor()
468 loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info); in vect_inner_phi_in_double_reduction_p() local
472 if (stmt_vec_info def_info = loop_vinfo->lookup_def (USE_FROM_PTR (use_p))) in vect_inner_phi_in_double_reduction_p()
486 vect_analyze_scalar_cycles_1 (loop_vec_info loop_vinfo, struct loop *loop) in vect_analyze_scalar_cycles_1() argument
504 stmt_vec_info stmt_vinfo = loop_vinfo->lookup_stmt (phi); in vect_analyze_scalar_cycles_1()
533 || (LOOP_VINFO_LOOP (loop_vinfo) != loop in vect_analyze_scalar_cycles_1()
564 = vect_force_simple_reduction (loop_vinfo, stmt_vinfo, in vect_analyze_scalar_cycles_1()
580 if (loop != LOOP_VINFO_LOOP (loop_vinfo)) in vect_analyze_scalar_cycles_1()
601 LOOP_VINFO_REDUCTIONS (loop_vinfo).safe_push in vect_analyze_scalar_cycles_1()
636 vect_analyze_scalar_cycles (loop_vec_info loop_vinfo) in vect_analyze_scalar_cycles() argument
638 struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo); in vect_analyze_scalar_cycles()
640 vect_analyze_scalar_cycles_1 (loop_vinfo, loop); in vect_analyze_scalar_cycles()
652 vect_analyze_scalar_cycles_1 (loop_vinfo, loop->inner); in vect_analyze_scalar_cycles()
682 vect_fixup_scalar_cycles_with_patterns (loop_vec_info loop_vinfo) in vect_fixup_scalar_cycles_with_patterns() argument
687 FOR_EACH_VEC_ELT (LOOP_VINFO_REDUCTION_CHAINS (loop_vinfo), i, first) in vect_fixup_scalar_cycles_with_patterns()
702 LOOP_VINFO_REDUCTION_CHAINS (loop_vinfo)[i] in vect_fixup_scalar_cycles_with_patterns() local
972 cse_and_gimplify_to_preheader (loop_vec_info loop_vinfo, tree expr) in cse_and_gimplify_to_preheader() argument
978 if (! loop_vinfo->ivexpr_map) in cse_and_gimplify_to_preheader()
979 loop_vinfo->ivexpr_map = new hash_map<tree_operand_hash, tree>; in cse_and_gimplify_to_preheader()
980 tree &cached = loop_vinfo->ivexpr_map->get_or_insert (expr); in cse_and_gimplify_to_preheader()
988 edge e = loop_preheader_edge (LOOP_VINFO_LOOP (loop_vinfo)); in cse_and_gimplify_to_preheader()
999 can_produce_all_loop_masks_p (loop_vec_info loop_vinfo, tree cmp_type) in can_produce_all_loop_masks_p() argument
1003 FOR_EACH_VEC_ELT (LOOP_VINFO_MASKS (loop_vinfo), i, rgm) in can_produce_all_loop_masks_p()
1016 vect_get_max_nscalars_per_iter (loop_vec_info loop_vinfo) in vect_get_max_nscalars_per_iter() argument
1021 FOR_EACH_VEC_ELT (LOOP_VINFO_MASKS (loop_vinfo), i, rgm) in vect_get_max_nscalars_per_iter()
1031 vect_verify_full_masking (loop_vec_info loop_vinfo) in vect_verify_full_masking() argument
1033 struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo); in vect_verify_full_masking()
1039 if (LOOP_VINFO_MASKS (loop_vinfo).is_empty ()) in vect_verify_full_masking()
1044 tree ni_type = TREE_TYPE (LOOP_VINFO_NITERSM1 (loop_vinfo)); in vect_verify_full_masking()
1053 max_ni *= vect_get_max_nscalars_per_iter (loop_vinfo); in vect_verify_full_masking()
1069 && can_produce_all_loop_masks_p (loop_vinfo, this_type)) in vect_verify_full_masking()
1085 LOOP_VINFO_MASK_COMPARE_TYPE (loop_vinfo) = cmp_type; in vect_verify_full_masking()
1091 vect_compute_single_scalar_iteration_cost (loop_vec_info loop_vinfo) in vect_compute_single_scalar_iteration_cost() argument
1093 struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo); in vect_compute_single_scalar_iteration_cost()
1094 basic_block *bbs = LOOP_VINFO_BBS (loop_vinfo); in vect_compute_single_scalar_iteration_cost()
1120 stmt_vec_info stmt_info = loop_vinfo->lookup_stmt (stmt); in vect_compute_single_scalar_iteration_cost()
1144 record_stmt_cost (&LOOP_VINFO_SCALAR_ITERATION_COST (loop_vinfo), in vect_compute_single_scalar_iteration_cost()
1153 FOR_EACH_VEC_ELT (LOOP_VINFO_SCALAR_ITERATION_COST (loop_vinfo), in vect_compute_single_scalar_iteration_cost()
1161 LOOP_VINFO_SINGLE_SCALAR_ITERATION_COST (loop_vinfo) = body_cost; in vect_compute_single_scalar_iteration_cost()
1339 loop_vec_info loop_vinfo = new _loop_vec_info (loop, shared); in vect_analyze_loop_form() local
1340 LOOP_VINFO_NITERSM1 (loop_vinfo) = number_of_iterationsm1; in vect_analyze_loop_form()
1341 LOOP_VINFO_NITERS (loop_vinfo) = number_of_iterations; in vect_analyze_loop_form()
1342 LOOP_VINFO_NITERS_UNCHANGED (loop_vinfo) = number_of_iterations; in vect_analyze_loop_form()
1354 LOOP_VINFO_NITERS_ASSUMPTIONS (loop_vinfo) = assumptions; in vect_analyze_loop_form()
1357 if (!LOOP_VINFO_NITERS_KNOWN_P (loop_vinfo)) in vect_analyze_loop_form()
1368 stmt_vec_info loop_cond_info = loop_vinfo->lookup_stmt (loop_cond); in vect_analyze_loop_form()
1373 = loop_vinfo->lookup_stmt (inner_loop_cond); in vect_analyze_loop_form()
1378 loop->aux = loop_vinfo; in vect_analyze_loop_form()
1379 return opt_loop_vec_info::success (loop_vinfo); in vect_analyze_loop_form()
1388 vect_update_vf_for_slp (loop_vec_info loop_vinfo) in vect_update_vf_for_slp() argument
1390 struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo); in vect_update_vf_for_slp()
1391 basic_block *bbs = LOOP_VINFO_BBS (loop_vinfo); in vect_update_vf_for_slp()
1398 vectorization_factor = LOOP_VINFO_VECT_FACTOR (loop_vinfo); in vect_update_vf_for_slp()
1413 stmt_vec_info stmt_info = loop_vinfo->lookup_stmt (gsi_stmt (si)); in vect_update_vf_for_slp()
1428 vectorization_factor = LOOP_VINFO_SLP_UNROLLING_FACTOR (loop_vinfo); in vect_update_vf_for_slp()
1440 LOOP_VINFO_SLP_UNROLLING_FACTOR (loop_vinfo)); in vect_update_vf_for_slp()
1443 LOOP_VINFO_VECT_FACTOR (loop_vinfo) = vectorization_factor; in vect_update_vf_for_slp()
1484 vect_analyze_loop_operations (loop_vec_info loop_vinfo) in vect_analyze_loop_operations() argument
1486 struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo); in vect_analyze_loop_operations()
1487 basic_block *bbs = LOOP_VINFO_BBS (loop_vinfo); in vect_analyze_loop_operations()
1508 stmt_info = loop_vinfo->lookup_stmt (phi); in vect_analyze_loop_operations()
1538 stmt_vec_info op_def_info = loop_vinfo->lookup_def (phi_op); in vect_analyze_loop_operations()
1596 = vect_analyze_stmt (loop_vinfo->lookup_stmt (stmt), in vect_analyze_loop_operations()
1605 add_stmt_costs (loop_vinfo->target_cost_data, &cost_vec); in vect_analyze_loop_operations()
1630 vect_analyze_loop_costing (loop_vec_info loop_vinfo) in vect_analyze_loop_costing() argument
1632 struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo); in vect_analyze_loop_costing()
1633 unsigned int assumed_vf = vect_vf_for_cost (loop_vinfo); in vect_analyze_loop_costing()
1637 if (!LOOP_VINFO_FULLY_MASKED_P (loop_vinfo)) in vect_analyze_loop_costing()
1641 if (LOOP_VINFO_NITERS_KNOWN_P (loop_vinfo)) in vect_analyze_loop_costing()
1642 max_niter = LOOP_VINFO_INT_NITERS (loop_vinfo); in vect_analyze_loop_costing()
1658 vect_estimate_min_profitable_iters (loop_vinfo, &min_profitable_iters, in vect_analyze_loop_costing()
1681 LOOP_VINFO_COST_MODEL_THRESHOLD (loop_vinfo) = th; in vect_analyze_loop_costing()
1683 if (LOOP_VINFO_NITERS_KNOWN_P (loop_vinfo) in vect_analyze_loop_costing()
1684 && LOOP_VINFO_INT_NITERS (loop_vinfo) < th) in vect_analyze_loop_costing()
1785 vect_analyze_loop_2 (loop_vec_info loop_vinfo, bool &fatal, unsigned *n_stmts) in vect_analyze_loop_2() argument
1795 if (LOOP_VINFO_SIMD_IF_COND (loop_vinfo) in vect_analyze_loop_2()
1796 && integer_zerop (LOOP_VINFO_SIMD_IF_COND (loop_vinfo))) in vect_analyze_loop_2()
1803 loop_p loop = LOOP_VINFO_LOOP (loop_vinfo); in vect_analyze_loop_2()
1806 if (!LOOP_VINFO_DATAREFS (loop_vinfo).exists ()) in vect_analyze_loop_2()
1809 = vect_get_datarefs_in_loop (loop, LOOP_VINFO_BBS (loop_vinfo), in vect_analyze_loop_2()
1810 &LOOP_VINFO_DATAREFS (loop_vinfo), in vect_analyze_loop_2()
1821 loop_vinfo->shared->save_datarefs (); in vect_analyze_loop_2()
1824 loop_vinfo->shared->check_datarefs (); in vect_analyze_loop_2()
1829 ok = vect_analyze_data_refs (loop_vinfo, &min_vf); in vect_analyze_loop_2()
1840 vect_analyze_scalar_cycles (loop_vinfo); in vect_analyze_loop_2()
1842 vect_pattern_recog (loop_vinfo); in vect_analyze_loop_2()
1844 vect_fixup_scalar_cycles_with_patterns (loop_vinfo); in vect_analyze_loop_2()
1849 ok = vect_analyze_data_ref_accesses (loop_vinfo); in vect_analyze_loop_2()
1860 ok = vect_mark_stmts_to_be_vectorized (loop_vinfo); in vect_analyze_loop_2()
1877 ok = vect_analyze_data_ref_dependences (loop_vinfo, &max_vf); in vect_analyze_loop_2()
1888 LOOP_VINFO_MAX_VECT_FACTOR (loop_vinfo) = max_vf; in vect_analyze_loop_2()
1890 ok = vect_determine_vectorization_factor (loop_vinfo); in vect_analyze_loop_2()
1899 && maybe_lt (max_vf, LOOP_VINFO_VECT_FACTOR (loop_vinfo))) in vect_analyze_loop_2()
1903 vect_compute_single_scalar_iteration_cost (loop_vinfo); in vect_analyze_loop_2()
1905 poly_uint64 saved_vectorization_factor = LOOP_VINFO_VECT_FACTOR (loop_vinfo); in vect_analyze_loop_2()
1909 ok = vect_analyze_slp (loop_vinfo, *n_stmts); in vect_analyze_loop_2()
1914 bool slp = vect_make_slp_decision (loop_vinfo); in vect_analyze_loop_2()
1918 vect_detect_hybrid_slp (loop_vinfo); in vect_analyze_loop_2()
1921 vect_update_vf_for_slp (loop_vinfo); in vect_analyze_loop_2()
1924 bool saved_can_fully_mask_p = LOOP_VINFO_CAN_FULLY_MASK_P (loop_vinfo); in vect_analyze_loop_2()
1928 gcc_assert (LOOP_VINFO_MASKS (loop_vinfo).is_empty ()); in vect_analyze_loop_2()
1934 poly_uint64 vectorization_factor = LOOP_VINFO_VECT_FACTOR (loop_vinfo); in vect_analyze_loop_2()
1937 if (LOOP_VINFO_NITERS_KNOWN_P (loop_vinfo) && dump_enabled_p ()) in vect_analyze_loop_2()
1943 LOOP_VINFO_INT_NITERS (loop_vinfo)); in vect_analyze_loop_2()
1947 = likely_max_stmt_executions_int (LOOP_VINFO_LOOP (loop_vinfo)); in vect_analyze_loop_2()
1952 ok = vect_analyze_data_refs_alignment (loop_vinfo); in vect_analyze_loop_2()
1964 ok = vect_prune_runtime_alias_test_list (loop_vinfo); in vect_analyze_loop_2()
1971 if (!LOOP_VINFO_EPILOGUE_P (loop_vinfo)) in vect_analyze_loop_2()
1974 ok = vect_enhance_data_refs_alignment (loop_vinfo); in vect_analyze_loop_2()
1976 ok = vect_verify_datarefs_alignment (loop_vinfo); in vect_analyze_loop_2()
1985 unsigned old_size = LOOP_VINFO_SLP_INSTANCES (loop_vinfo).length (); in vect_analyze_loop_2()
1986 vect_slp_analyze_operations (loop_vinfo); in vect_analyze_loop_2()
1987 if (LOOP_VINFO_SLP_INSTANCES (loop_vinfo).length () != old_size) in vect_analyze_loop_2()
1997 ok = vect_analyze_loop_operations (loop_vinfo); in vect_analyze_loop_2()
2008 LOOP_VINFO_FULLY_MASKED_P (loop_vinfo) in vect_analyze_loop_2()
2009 = (LOOP_VINFO_CAN_FULLY_MASK_P (loop_vinfo) in vect_analyze_loop_2()
2010 && vect_verify_full_masking (loop_vinfo)); in vect_analyze_loop_2()
2013 if (LOOP_VINFO_FULLY_MASKED_P (loop_vinfo)) in vect_analyze_loop_2()
2024 if (LOOP_VINFO_PEELING_FOR_GAPS (loop_vinfo) in vect_analyze_loop_2()
2025 && LOOP_VINFO_NITERS_KNOWN_P (loop_vinfo) in vect_analyze_loop_2()
2026 && !LOOP_VINFO_FULLY_MASKED_P (loop_vinfo)) in vect_analyze_loop_2()
2028 poly_uint64 vf = LOOP_VINFO_VECT_FACTOR (loop_vinfo); in vect_analyze_loop_2()
2029 tree scalar_niters = LOOP_VINFO_NITERSM1 (loop_vinfo); in vect_analyze_loop_2()
2038 res = vect_analyze_loop_costing (loop_vinfo); in vect_analyze_loop_2()
2051 th = LOOP_VINFO_COST_MODEL_THRESHOLD (loop_vinfo); in vect_analyze_loop_2()
2054 if (LOOP_VINFO_FULLY_MASKED_P (loop_vinfo)) in vect_analyze_loop_2()
2056 LOOP_VINFO_PEELING_FOR_NITER (loop_vinfo) = false; in vect_analyze_loop_2()
2057 else if (LOOP_VINFO_NITERS_KNOWN_P (loop_vinfo) in vect_analyze_loop_2()
2058 && LOOP_VINFO_PEELING_FOR_ALIGNMENT (loop_vinfo) >= 0) in vect_analyze_loop_2()
2062 unsigned int peel_niter = LOOP_VINFO_PEELING_FOR_ALIGNMENT (loop_vinfo); in vect_analyze_loop_2()
2063 if (LOOP_VINFO_PEELING_FOR_GAPS (loop_vinfo)) in vect_analyze_loop_2()
2065 if (!multiple_p (LOOP_VINFO_INT_NITERS (loop_vinfo) - peel_niter, in vect_analyze_loop_2()
2066 LOOP_VINFO_VECT_FACTOR (loop_vinfo))) in vect_analyze_loop_2()
2067 LOOP_VINFO_PEELING_FOR_NITER (loop_vinfo) = true; in vect_analyze_loop_2()
2069 else if (LOOP_VINFO_PEELING_FOR_ALIGNMENT (loop_vinfo) in vect_analyze_loop_2()
2073 || LOOP_VINFO_PEELING_FOR_GAPS (loop_vinfo) in vect_analyze_loop_2()
2074 || !LOOP_VINFO_VECT_FACTOR (loop_vinfo).is_constant (&const_vf) in vect_analyze_loop_2()
2075 || ((tree_ctz (LOOP_VINFO_NITERS (loop_vinfo)) in vect_analyze_loop_2()
2080 && (!LOOP_REQUIRES_VERSIONING (loop_vinfo) in vect_analyze_loop_2()
2083 LOOP_VINFO_PEELING_FOR_NITER (loop_vinfo) = true; in vect_analyze_loop_2()
2086 if (LOOP_VINFO_PEELING_FOR_GAPS (loop_vinfo) in vect_analyze_loop_2()
2087 || LOOP_VINFO_PEELING_FOR_NITER (loop_vinfo)) in vect_analyze_loop_2()
2091 if (!vect_can_advance_ivs_p (loop_vinfo) in vect_analyze_loop_2()
2092 || !slpeel_can_duplicate_loop_p (LOOP_VINFO_LOOP (loop_vinfo), in vect_analyze_loop_2()
2094 (loop_vinfo)))) in vect_analyze_loop_2()
2107 if (LOOP_REQUIRES_VERSIONING (loop_vinfo)) in vect_analyze_loop_2()
2111 if (!vect_use_loop_mask_for_alignment_p (loop_vinfo)) in vect_analyze_loop_2()
2114 if (LOOP_VINFO_PEELING_FOR_ALIGNMENT (loop_vinfo) < 0) in vect_analyze_loop_2()
2116 dr_vec_info *dr_info = LOOP_VINFO_UNALIGNED_DR (loop_vinfo); in vect_analyze_loop_2()
2121 niters_th += LOOP_VINFO_PEELING_FOR_ALIGNMENT (loop_vinfo); in vect_analyze_loop_2()
2125 if (!LOOP_VINFO_FULLY_MASKED_P (loop_vinfo)) in vect_analyze_loop_2()
2126 niters_th += LOOP_VINFO_VECT_FACTOR (loop_vinfo); in vect_analyze_loop_2()
2128 if (LOOP_VINFO_PEELING_FOR_GAPS (loop_vinfo)) in vect_analyze_loop_2()
2130 LOOP_VINFO_VERSIONING_THRESHOLD (loop_vinfo) = niters_th; in vect_analyze_loop_2()
2134 LOOP_VINFO_VECT_FACTOR (loop_vinfo))); in vect_analyze_loop_2()
2149 if (! LOOP_VINFO_REDUCTION_CHAINS (loop_vinfo).is_empty ()) in vect_analyze_loop_2()
2157 FOR_EACH_VEC_ELT (LOOP_VINFO_SLP_INSTANCES (loop_vinfo), i, instance) in vect_analyze_loop_2()
2193 LOOP_VINFO_VECT_FACTOR (loop_vinfo) = saved_vectorization_factor; in vect_analyze_loop_2()
2195 FOR_EACH_VEC_ELT (LOOP_VINFO_SLP_INSTANCES (loop_vinfo), j, instance) in vect_analyze_loop_2()
2197 LOOP_VINFO_SLP_INSTANCES (loop_vinfo).release (); in vect_analyze_loop_2()
2199 for (i = 0; i < LOOP_VINFO_LOOP (loop_vinfo)->num_nodes; ++i) in vect_analyze_loop_2()
2201 basic_block bb = LOOP_VINFO_BBS (loop_vinfo)[i]; in vect_analyze_loop_2()
2205 stmt_vec_info stmt_info = loop_vinfo->lookup_stmt (gsi_stmt (si)); in vect_analyze_loop_2()
2211 stmt_vec_info stmt_info = loop_vinfo->lookup_stmt (gsi_stmt (si)); in vect_analyze_loop_2()
2220 STMT_SLP_TYPE (loop_vinfo->lookup_stmt (gsi_stmt (pi))) in vect_analyze_loop_2()
2226 LOOP_VINFO_LOWER_BOUNDS (loop_vinfo).truncate (0); in vect_analyze_loop_2()
2227 LOOP_VINFO_COMP_ALIAS_DDRS (loop_vinfo).release (); in vect_analyze_loop_2()
2228 LOOP_VINFO_CHECK_UNEQUAL_ADDRS (loop_vinfo).release (); in vect_analyze_loop_2()
2230 destroy_cost_data (LOOP_VINFO_TARGET_COST_DATA (loop_vinfo)); in vect_analyze_loop_2()
2231 LOOP_VINFO_TARGET_COST_DATA (loop_vinfo) in vect_analyze_loop_2()
2232 = init_cost (LOOP_VINFO_LOOP (loop_vinfo)); in vect_analyze_loop_2()
2234 release_vec_loop_masks (&LOOP_VINFO_MASKS (loop_vinfo)); in vect_analyze_loop_2()
2236 LOOP_VINFO_PEELING_FOR_NITER (loop_vinfo) = false; in vect_analyze_loop_2()
2237 LOOP_VINFO_PEELING_FOR_GAPS (loop_vinfo) = false; in vect_analyze_loop_2()
2238 LOOP_VINFO_COST_MODEL_THRESHOLD (loop_vinfo) = 0; in vect_analyze_loop_2()
2239 LOOP_VINFO_VERSIONING_THRESHOLD (loop_vinfo) = 0; in vect_analyze_loop_2()
2240 LOOP_VINFO_CAN_FULLY_MASK_P (loop_vinfo) = saved_can_fully_mask_p; in vect_analyze_loop_2()
2281 opt_loop_vec_info loop_vinfo in vect_analyze_loop() local
2283 if (!loop_vinfo) in vect_analyze_loop()
2288 return loop_vinfo; in vect_analyze_loop()
2294 LOOP_VINFO_ORIG_LOOP_INFO (loop_vinfo) = orig_loop_vinfo; in vect_analyze_loop()
2296 opt_result res = vect_analyze_loop_2 (loop_vinfo, fatal, &n_stmts); in vect_analyze_loop()
2299 LOOP_VINFO_VECTORIZABLE_P (loop_vinfo) = 1; in vect_analyze_loop()
2301 return loop_vinfo; in vect_analyze_loop()
2304 delete loop_vinfo; in vect_analyze_loop() local
3265 vect_get_known_peeling_cost (loop_vec_info loop_vinfo, int peel_iters_prologue, in vect_get_known_peeling_cost() argument
3272 int assumed_vf = vect_vf_for_cost (loop_vinfo); in vect_get_known_peeling_cost()
3274 if (!LOOP_VINFO_NITERS_KNOWN_P (loop_vinfo)) in vect_get_known_peeling_cost()
3291 int niters = LOOP_VINFO_INT_NITERS (loop_vinfo); in vect_get_known_peeling_cost()
3297 if (LOOP_VINFO_PEELING_FOR_GAPS (loop_vinfo) && !*peel_iters_epilogue) in vect_get_known_peeling_cost()
3334 vect_estimate_min_profitable_iters (loop_vec_info loop_vinfo, in vect_estimate_min_profitable_iters() argument
3348 int assumed_vf = vect_vf_for_cost (loop_vinfo); in vect_estimate_min_profitable_iters()
3349 int npeel = LOOP_VINFO_PEELING_FOR_ALIGNMENT (loop_vinfo); in vect_estimate_min_profitable_iters()
3350 void *target_cost_data = LOOP_VINFO_TARGET_COST_DATA (loop_vinfo); in vect_estimate_min_profitable_iters()
3353 if (unlimited_cost_model (LOOP_VINFO_LOOP (loop_vinfo))) in vect_estimate_min_profitable_iters()
3363 if (LOOP_REQUIRES_VERSIONING_FOR_ALIGNMENT (loop_vinfo)) in vect_estimate_min_profitable_iters()
3366 unsigned len = LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo).length (); in vect_estimate_min_profitable_iters()
3376 if (LOOP_REQUIRES_VERSIONING_FOR_ALIAS (loop_vinfo)) in vect_estimate_min_profitable_iters()
3379 unsigned len = LOOP_VINFO_COMP_ALIAS_DDRS (loop_vinfo).length (); in vect_estimate_min_profitable_iters()
3382 len = LOOP_VINFO_CHECK_UNEQUAL_ADDRS (loop_vinfo).length (); in vect_estimate_min_profitable_iters()
3387 len = LOOP_VINFO_LOWER_BOUNDS (loop_vinfo).length (); in vect_estimate_min_profitable_iters()
3394 if (!LOOP_VINFO_LOWER_BOUNDS (loop_vinfo)[i].unsigned_p) in vect_estimate_min_profitable_iters()
3406 if (LOOP_REQUIRES_VERSIONING_FOR_NITERS (loop_vinfo)) in vect_estimate_min_profitable_iters()
3417 if (LOOP_REQUIRES_VERSIONING (loop_vinfo)) in vect_estimate_min_profitable_iters()
3430 = LOOP_VINFO_SINGLE_SCALAR_ITERATION_COST (loop_vinfo); in vect_estimate_min_profitable_iters()
3441 if (LOOP_VINFO_FULLY_MASKED_P (loop_vinfo)) in vect_estimate_min_profitable_iters()
3446 if (LOOP_VINFO_PEELING_FOR_GAPS (loop_vinfo)) in vect_estimate_min_profitable_iters()
3452 FOR_EACH_VEC_ELT (LOOP_VINFO_SCALAR_ITERATION_COST (loop_vinfo), in vect_estimate_min_profitable_iters()
3488 FOR_EACH_VEC_ELT (LOOP_VINFO_SCALAR_ITERATION_COST (loop_vinfo), j, si) in vect_estimate_min_profitable_iters()
3505 void *data = LOOP_VINFO_TARGET_COST_DATA (loop_vinfo); in vect_estimate_min_profitable_iters()
3511 (void) vect_get_known_peeling_cost (loop_vinfo, peel_iters_prologue, in vect_estimate_min_profitable_iters()
3514 (loop_vinfo), in vect_estimate_min_profitable_iters()
3583 if (!LOOP_VINFO_NITERS_KNOWN_P (loop_vinfo) in vect_estimate_min_profitable_iters()
3584 || LOOP_REQUIRES_VERSIONING (loop_vinfo)) in vect_estimate_min_profitable_iters()
3587 if (LOOP_REQUIRES_VERSIONING (loop_vinfo)) in vect_estimate_min_profitable_iters()
3592 if (LOOP_VINFO_PEELING_FOR_ALIGNMENT (loop_vinfo) < 0) in vect_estimate_min_profitable_iters()
3602 finish_cost (LOOP_VINFO_TARGET_COST_DATA (loop_vinfo), &vec_prologue_cost, in vect_estimate_min_profitable_iters()
3642 if (LOOP_VINFO_LOOP (loop_vinfo)->force_vectorize) in vect_estimate_min_profitable_iters()
3660 if (LOOP_VINFO_FULLY_MASKED_P (loop_vinfo)) in vect_estimate_min_profitable_iters()
3687 if (LOOP_VINFO_FULLY_MASKED_P (loop_vinfo)) in vect_estimate_min_profitable_iters()
3738 if (!LOOP_VINFO_FULLY_MASKED_P (loop_vinfo) in vect_estimate_min_profitable_iters()
3759 else if (LOOP_VINFO_FULLY_MASKED_P (loop_vinfo)) in vect_estimate_min_profitable_iters()
3771 if (LOOP_VINFO_FULLY_MASKED_P (loop_vinfo)) in vect_estimate_min_profitable_iters()
3858 loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info); in vect_model_reduction_cost() local
3861 if (loop_vinfo) in vect_model_reduction_cost()
3862 loop = LOOP_VINFO_LOOP (loop_vinfo); in vect_model_reduction_cost()
4089 loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo); in get_initial_def_for_reduction() local
4090 struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo); in get_initial_def_for_reduction()
4398 loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info); in vect_create_epilog_for_reduction() local
4399 struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo), *outer_loop = NULL; in vect_create_epilog_for_reduction()
4505 vect_is_simple_use (initial_def, loop_vinfo, &initial_def_dt); in vect_create_epilog_for_reduction()
4529 = vect_get_vec_def_for_stmt_copy (loop_vinfo, vec_init_def); in vect_create_epilog_for_reduction()
4556 def = vect_get_vec_def_for_stmt_copy (loop_vinfo, def); in vect_create_epilog_for_reduction()
4616 loop_vinfo->add_stmt (new_phi); in vect_create_epilog_for_reduction()
4641 stmt_vec_info index_vec_info = loop_vinfo->add_stmt (index_condition); in vect_create_epilog_for_reduction()
4689 stmt_vec_info phi_info = loop_vinfo->add_stmt (phi); in vect_create_epilog_for_reduction()
4694 def = vect_get_vec_def_for_stmt_copy (loop_vinfo, def); in vect_create_epilog_for_reduction()
4712 stmt_vec_info phi_info = loop_vinfo->lookup_stmt (phi); in vect_create_epilog_for_reduction()
4717 prev_phi_info = loop_vinfo->add_stmt (outer_phi); in vect_create_epilog_for_reduction()
4727 stmt_vec_info outer_phi_info = loop_vinfo->add_stmt (outer_phi); in vect_create_epilog_for_reduction()
4824 stmt_vec_info next_phi_info = loop_vinfo->lookup_stmt (new_phis[0]); in vect_create_epilog_for_reduction()
5497 stmt_vec_info epilog_stmt_info = loop_vinfo->add_stmt (epilog_stmt); in vect_create_epilog_for_reduction()
5499 = STMT_VINFO_RELATED_STMT (loop_vinfo->lookup_stmt (new_phi)); in vect_create_epilog_for_reduction()
5569 epilog_stmt_info = loop_vinfo->lookup_stmt (new_phis[k / ratio]); in vect_create_epilog_for_reduction()
5602 = loop_vinfo->lookup_stmt (exit_phi); in vect_create_epilog_for_reduction()
5641 use_stmt_vinfo = loop_vinfo->lookup_stmt (use_stmt); in vect_create_epilog_for_reduction()
5815 loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info); in vectorize_fold_left_reduction() local
5816 struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo); in vectorize_fold_left_reduction()
5824 ncopies = vect_get_num_copies (loop_vinfo, vectype_in); in vectorize_fold_left_reduction()
5873 if (LOOP_VINFO_FULLY_MASKED_P (loop_vinfo)) in vectorize_fold_left_reduction()
5883 if (LOOP_VINFO_FULLY_MASKED_P (loop_vinfo)) in vectorize_fold_left_reduction()
6049 loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info); in vectorizable_reduction() local
6050 struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo); in vectorizable_reduction()
6137 bool is_simple_use = vect_is_simple_use (op, loop_vinfo, &dt); in vectorizable_reduction()
6156 ncopies = vect_get_num_copies (loop_vinfo, vectype_in); in vectorizable_reduction()
6161 && (use_stmt_info = loop_vinfo->lookup_single_use (phi_result)) in vectorizable_reduction()
6172 (LOOP_VINFO_VECT_FACTOR (loop_vinfo) in vectorizable_reduction()
6189 stmt_vec_info new_phi_info = loop_vinfo->add_stmt (new_phi); in vectorizable_reduction()
6302 is_simple_use = vect_is_simple_use (ops[i], loop_vinfo, &dts[i], &tem, in vectorizable_reduction()
6494 vect_is_simple_use (cond_initial_val, loop_vinfo, &cond_initial_dt); in vectorizable_reduction()
6532 ncopies = vect_get_num_copies (loop_vinfo, vectype_in); in vectorizable_reduction()
6545 stmt_vec_info def_arg_stmt_info = loop_vinfo->lookup_def (def_arg); in vectorizable_reduction()
6615 || !vect_worthwhile_without_simd_p (loop_vinfo, code)) in vectorizable_reduction()
6624 && !vect_worthwhile_without_simd_p (loop_vinfo, code)) in vectorizable_reduction()
6934 && (use_stmt_info = loop_vinfo->lookup_single_use (reduc_phi_result)) in vectorizable_reduction()
6964 vec_loop_masks *masks = &LOOP_VINFO_MASKS (loop_vinfo); in vectorizable_reduction()
6969 if (loop_vinfo && LOOP_VINFO_CAN_FULLY_MASK_P (loop_vinfo)) in vectorizable_reduction()
6980 LOOP_VINFO_CAN_FULLY_MASK_P (loop_vinfo) = false; in vectorizable_reduction()
6988 LOOP_VINFO_CAN_FULLY_MASK_P (loop_vinfo) = false; in vectorizable_reduction()
6991 vect_record_loop_mask (loop_vinfo, masks, ncopies * vec_num, in vectorizable_reduction()
7011 bool masked_loop_p = LOOP_VINFO_FULLY_MASKED_P (loop_vinfo); in vectorizable_reduction()
7112 = vect_get_vec_def_for_stmt_copy (loop_vinfo, in vectorizable_reduction()
7118 = vect_get_vec_def_for_stmt_copy (loop_vinfo, in vectorizable_reduction()
7126 = vect_get_vec_def_for_stmt_copy (loop_vinfo, in vectorizable_reduction()
7234 loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo); in vect_worthwhile_without_simd_p() local
7236 return (loop_vinfo in vect_worthwhile_without_simd_p()
7237 && LOOP_VINFO_VECT_FACTOR (loop_vinfo).is_constant (&value) in vect_worthwhile_without_simd_p()
7254 loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info); in vectorizable_induction() local
7255 struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo); in vectorizable_induction()
7268 poly_uint64 vf = LOOP_VINFO_VECT_FACTOR (loop_vinfo); in vectorizable_induction()
7296 ncopies = vect_get_num_copies (loop_vinfo, vectype); in vectorizable_induction()
7337 stmt_vec_info exit_phi_vinfo = loop_vinfo->lookup_stmt (exit_phi); in vectorizable_induction()
7403 tree skip_niters = LOOP_VINFO_MASK_SKIP_NITERS (loop_vinfo); in vectorizable_induction()
7488 = loop_vinfo->add_stmt (induction_phi); in vectorizable_induction()
7495 loop_vinfo->add_stmt (new_stmt); in vectorizable_induction()
7544 (loop_vinfo->add_stmt (new_stmt)); in vectorizable_induction()
7572 loop_vinfo->add_stmt (new_stmt); in vectorizable_induction()
7676 stmt_vec_info induction_phi_info = loop_vinfo->add_stmt (induction_phi); in vectorizable_induction()
7683 stmt_vec_info new_stmt_info = loop_vinfo->add_stmt (new_stmt); in vectorizable_induction()
7738 new_stmt_info = loop_vinfo->add_stmt (new_stmt); in vectorizable_induction()
7763 stmt_vec_info stmt_vinfo = loop_vinfo->lookup_stmt (exit_phi); in vectorizable_induction()
7798 loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info); in vectorizable_live_operation() local
7799 struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo); in vectorizable_live_operation()
7824 gcc_assert (is_simple_and_all_uses_invariant (stmt_info, loop_vinfo)); in vectorizable_live_operation()
7835 ncopies = vect_get_num_copies (loop_vinfo, vectype); in vectorizable_live_operation()
7864 if (LOOP_VINFO_CAN_FULLY_MASK_P (loop_vinfo)) in vectorizable_live_operation()
7874 LOOP_VINFO_CAN_FULLY_MASK_P (loop_vinfo) = false; in vectorizable_live_operation()
7882 LOOP_VINFO_CAN_FULLY_MASK_P (loop_vinfo) = false; in vectorizable_live_operation()
7890 LOOP_VINFO_CAN_FULLY_MASK_P (loop_vinfo) = false; in vectorizable_live_operation()
7895 vect_record_loop_mask (loop_vinfo, in vectorizable_live_operation()
7896 &LOOP_VINFO_MASKS (loop_vinfo), in vectorizable_live_operation()
7919 gcc_assert (!LOOP_VINFO_FULLY_MASKED_P (loop_vinfo)); in vectorizable_live_operation()
7937 || !LOOP_VINFO_FULLY_MASKED_P (loop_vinfo)); in vectorizable_live_operation()
7941 vec_lhs = vect_get_vec_def_for_stmt_copy (loop_vinfo, vec_lhs); in vectorizable_live_operation()
7968 if (LOOP_VINFO_FULLY_MASKED_P (loop_vinfo)) in vectorizable_live_operation()
7978 tree mask = vect_get_loop_mask (gsi, &LOOP_VINFO_MASKS (loop_vinfo), 1, in vectorizable_live_operation()
8086 loop_niters_no_overflow (loop_vec_info loop_vinfo) in loop_niters_no_overflow() argument
8089 if (LOOP_VINFO_NITERS_KNOWN_P (loop_vinfo)) in loop_niters_no_overflow()
8091 tree cst_niters = LOOP_VINFO_NITERS (loop_vinfo); in loop_niters_no_overflow()
8092 tree cst_nitersm1 = LOOP_VINFO_NITERSM1 (loop_vinfo); in loop_niters_no_overflow()
8101 struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo); in loop_niters_no_overflow()
8105 tree type = TREE_TYPE (LOOP_VINFO_NITERS (loop_vinfo)); in loop_niters_no_overflow()
8137 vect_record_loop_mask (loop_vec_info loop_vinfo, vec_loop_masks *masks, in vect_record_loop_mask() argument
8148 LOOP_VINFO_VECT_FACTOR (loop_vinfo)).to_constant (); in vect_record_loop_mask()
8244 vect_transform_loop_stmt (loop_vec_info loop_vinfo, stmt_vec_info stmt_info, in vect_transform_loop_stmt() argument
8247 struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo); in vect_transform_loop_stmt()
8248 poly_uint64 vf = LOOP_VINFO_VECT_FACTOR (loop_vinfo); in vect_transform_loop_stmt()
8293 vect_transform_loop (loop_vec_info loop_vinfo) in vect_transform_loop() argument
8295 struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo); in vect_transform_loop()
8297 basic_block *bbs = LOOP_VINFO_BBS (loop_vinfo); in vect_transform_loop()
8303 poly_uint64 vf = LOOP_VINFO_VECT_FACTOR (loop_vinfo); in vect_transform_loop()
8311 loop_vinfo->shared->check_datarefs (); in vect_transform_loop()
8318 th = LOOP_VINFO_COST_MODEL_THRESHOLD (loop_vinfo); in vect_transform_loop()
8319 if (th >= vect_vf_for_cost (loop_vinfo) in vect_transform_loop()
8320 && !LOOP_VINFO_NITERS_KNOWN_P (loop_vinfo)) in vect_transform_loop()
8342 if (LOOP_REQUIRES_VERSIONING (loop_vinfo)) in vect_transform_loop()
8345 = LOOP_VINFO_VERSIONING_THRESHOLD (loop_vinfo); in vect_transform_loop()
8354 = vect_loop_versioning (loop_vinfo, th, check_profitability, in vect_transform_loop()
8365 if (LOOP_VINFO_SCALAR_LOOP (loop_vinfo)) in vect_transform_loop()
8367 e = single_exit (LOOP_VINFO_SCALAR_LOOP (loop_vinfo)); in vect_transform_loop()
8376 tree niters = vect_build_loop_niters (loop_vinfo); in vect_transform_loop()
8377 LOOP_VINFO_NITERS_UNCHANGED (loop_vinfo) = niters; in vect_transform_loop()
8378 tree nitersm1 = unshare_expr (LOOP_VINFO_NITERSM1 (loop_vinfo)); in vect_transform_loop()
8379 bool niters_no_overflow = loop_niters_no_overflow (loop_vinfo); in vect_transform_loop()
8380 epilogue = vect_do_peeling (loop_vinfo, niters, nitersm1, &niters_vector, in vect_transform_loop()
8386 if (LOOP_VINFO_NITERS_KNOWN_P (loop_vinfo) in vect_transform_loop()
8387 && !LOOP_VINFO_FULLY_MASKED_P (loop_vinfo) in vect_transform_loop()
8391 = build_int_cst (TREE_TYPE (LOOP_VINFO_NITERS (loop_vinfo)), in vect_transform_loop()
8392 LOOP_VINFO_INT_NITERS (loop_vinfo) / lowest_vf); in vect_transform_loop()
8396 vect_gen_vector_loop_niters (loop_vinfo, niters, &niters_vector, in vect_transform_loop()
8407 if (LOOP_VINFO_FULLY_MASKED_P (loop_vinfo) in vect_transform_loop()
8408 && vect_use_loop_mask_for_alignment_p (loop_vinfo)) in vect_transform_loop()
8410 vect_prepare_for_masked_peels (loop_vinfo); in vect_transform_loop()
8414 if (!loop_vinfo->slp_instances.is_empty ()) in vect_transform_loop()
8417 vect_schedule_slp (loop_vinfo); in vect_transform_loop()
8437 stmt_info = loop_vinfo->lookup_stmt (phi); in vect_transform_loop()
8478 stmt_info = loop_vinfo->lookup_stmt (stmt); in vect_transform_loop()
8493 = loop_vinfo->lookup_stmt (gsi_stmt (subsi)); in vect_transform_loop()
8494 vect_transform_loop_stmt (loop_vinfo, pat_stmt_info, in vect_transform_loop()
8499 vect_transform_loop_stmt (loop_vinfo, pat_stmt_info, &si, in vect_transform_loop()
8502 vect_transform_loop_stmt (loop_vinfo, stmt_info, &si, in vect_transform_loop()
8515 loop_vinfo->remove_stmt (stmt_info); in vect_transform_loop()
8558 vect_set_loop_condition (loop, loop_vinfo, niters_vector, step_vector, in vect_transform_loop()
8561 unsigned int assumed_vf = vect_vf_for_cost (loop_vinfo); in vect_transform_loop()
8566 bool final_iter_may_be_partial = LOOP_VINFO_FULLY_MASKED_P (loop_vinfo); in vect_transform_loop()
8570 int min_epilogue_iters = LOOP_VINFO_PEELING_FOR_GAPS (loop_vinfo) ? 1 : 0; in vect_transform_loop()
8576 int alignment_npeels = LOOP_VINFO_PEELING_FOR_ALIGNMENT (loop_vinfo); in vect_transform_loop()
8577 if (alignment_npeels && LOOP_VINFO_FULLY_MASKED_P (loop_vinfo)) in vect_transform_loop()
8612 if (!LOOP_VINFO_EPILOGUE_P (loop_vinfo)) in vect_transform_loop()
8642 FOR_EACH_VEC_ELT (LOOP_VINFO_SLP_INSTANCES (loop_vinfo), i, instance) in vect_transform_loop()
8644 LOOP_VINFO_SLP_INSTANCES (loop_vinfo).release (); in vect_transform_loop()
8650 if (LOOP_VINFO_EPILOGUE_P (loop_vinfo)) in vect_transform_loop()
8664 if (LOOP_VINFO_NITERS_KNOWN_P (loop_vinfo) in vect_transform_loop()
8668 = (LOOP_VINFO_INT_NITERS (loop_vinfo) in vect_transform_loop()
8669 - LOOP_VINFO_PEELING_FOR_GAPS (loop_vinfo)); in vect_transform_loop()
8671 = eiters % lowest_vf + LOOP_VINFO_PEELING_FOR_GAPS (loop_vinfo); in vect_transform_loop()
8698 if (LOOP_VINFO_SCALAR_LOOP (loop_vinfo)) in vect_transform_loop()