1 /* Schedule GIMPLE vector statements.
2    Copyright (C) 2020-2021 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "expmed.h"
30 #include "optabs-tree.h"
31 #include "tree-eh.h"
32 #include "gimple-iterator.h"
33 #include "gimplify-me.h"
34 #include "gimplify.h"
35 #include "tree-cfg.h"
36 #include "bitmap.h"
37 #include "tree-ssa-dce.h"
38 #include "memmodel.h"
39 #include "optabs.h"
40 #include "gimple-fold.h"
41 #include "internal-fn.h"
42 
43 /* Expand all ARRAY_REF(VIEW_CONVERT_EXPR) gimple assignments into calls to
44    internal function based on vector type of selected expansion.
45    i.e.:
46      VIEW_CONVERT_EXPR<int[4]>(u)[_1] =  = i_4(D);
47    =>
48      _7 = u;
49      _8 = .VEC_SET (_7, i_4(D), _1);
50      u = _8;  */
51 
52 static gimple *
gimple_expand_vec_set_expr(gimple_stmt_iterator * gsi)53 gimple_expand_vec_set_expr (gimple_stmt_iterator *gsi)
54 {
55   enum tree_code code;
56   gcall *new_stmt = NULL;
57   gassign *ass_stmt = NULL;
58 
59   /* Only consider code == GIMPLE_ASSIGN.  */
60   gassign *stmt = dyn_cast<gassign *> (gsi_stmt (*gsi));
61   if (!stmt)
62     return NULL;
63 
64   tree lhs = gimple_assign_lhs (stmt);
65   code = TREE_CODE (lhs);
66   if (code != ARRAY_REF)
67     return NULL;
68 
69   tree val = gimple_assign_rhs1 (stmt);
70   tree op0 = TREE_OPERAND (lhs, 0);
71   if (TREE_CODE (op0) == VIEW_CONVERT_EXPR && DECL_P (TREE_OPERAND (op0, 0))
72       && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
73       && TYPE_MODE (TREE_TYPE (lhs))
74 	   == TYPE_MODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (op0, 0)))))
75     {
76       tree pos = TREE_OPERAND (lhs, 1);
77       tree view_op0 = TREE_OPERAND (op0, 0);
78       machine_mode outermode = TYPE_MODE (TREE_TYPE (view_op0));
79       if (auto_var_in_fn_p (view_op0, cfun->decl)
80 	  && !TREE_ADDRESSABLE (view_op0) && can_vec_set_var_idx_p (outermode))
81 	{
82 	  location_t loc = gimple_location (stmt);
83 	  tree var_src = make_ssa_name (TREE_TYPE (view_op0));
84 	  tree var_dst = make_ssa_name (TREE_TYPE (view_op0));
85 
86 	  ass_stmt = gimple_build_assign (var_src, view_op0);
87 	  gimple_set_vuse (ass_stmt, gimple_vuse (stmt));
88 	  gimple_set_location (ass_stmt, loc);
89 	  gsi_insert_before (gsi, ass_stmt, GSI_SAME_STMT);
90 
91 	  new_stmt
92 	    = gimple_build_call_internal (IFN_VEC_SET, 3, var_src, val, pos);
93 	  gimple_call_set_lhs (new_stmt, var_dst);
94 	  gimple_set_location (new_stmt, loc);
95 	  gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
96 
97 	  ass_stmt = gimple_build_assign (view_op0, var_dst);
98 	  gimple_set_location (ass_stmt, loc);
99 	  gsi_insert_before (gsi, ass_stmt, GSI_SAME_STMT);
100 
101 	  gimple_move_vops (ass_stmt, stmt);
102 	  gsi_remove (gsi, true);
103 	}
104     }
105 
106   return ass_stmt;
107 }
108 
109 /* Expand all VEC_COND_EXPR gimple assignments into calls to internal
110    function based on type of selected expansion.  */
111 
112 static gimple *
gimple_expand_vec_cond_expr(gimple_stmt_iterator * gsi,hash_map<tree,unsigned int> * vec_cond_ssa_name_uses)113 gimple_expand_vec_cond_expr (gimple_stmt_iterator *gsi,
114 			     hash_map<tree, unsigned int> *vec_cond_ssa_name_uses)
115 {
116   tree lhs, op0a = NULL_TREE, op0b = NULL_TREE;
117   enum tree_code code;
118   enum tree_code tcode;
119   machine_mode cmp_op_mode;
120   bool unsignedp;
121   enum insn_code icode;
122   imm_use_iterator imm_iter;
123 
124   /* Only consider code == GIMPLE_ASSIGN.  */
125   gassign *stmt = dyn_cast<gassign *> (gsi_stmt (*gsi));
126   if (!stmt)
127     return NULL;
128 
129   code = gimple_assign_rhs_code (stmt);
130   if (code != VEC_COND_EXPR)
131     return NULL;
132 
133   tree op0 = gimple_assign_rhs1 (stmt);
134   tree op1 = gimple_assign_rhs2 (stmt);
135   tree op2 = gimple_assign_rhs3 (stmt);
136   lhs = gimple_assign_lhs (stmt);
137   machine_mode mode = TYPE_MODE (TREE_TYPE (lhs));
138 
139   /* Lower mask typed, non-vector mode VEC_COND_EXPRs to bitwise operations.
140      Those can end up generated by folding and at least for integer mode masks
141      we cannot expect vcond expanders to exist.  We lower a ? b : c
142      to (b & a) | (c & ~a).  */
143   if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (lhs))
144       && !VECTOR_MODE_P (mode))
145     {
146       gcc_assert (types_compatible_p (TREE_TYPE (op0), TREE_TYPE (op1)));
147       gimple_seq stmts = NULL;
148       tree type = TREE_TYPE (lhs);
149       location_t loc = gimple_location (stmt);
150       tree tem0 = gimple_build (&stmts, loc, BIT_AND_EXPR, type, op1, op0);
151       tree tem1 = gimple_build (&stmts, loc, BIT_NOT_EXPR, type, op0);
152       tree tem2 = gimple_build (&stmts, loc, BIT_AND_EXPR, type, op2, tem1);
153       tree tem3 = gimple_build (&stmts, loc, BIT_IOR_EXPR, type, tem0, tem2);
154       gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
155       return gimple_build_assign (lhs, tem3);
156     }
157 
158   bool can_compute_op0 = true;
159   gcc_assert (!COMPARISON_CLASS_P (op0));
160   if (TREE_CODE (op0) == SSA_NAME)
161     {
162       unsigned int used_vec_cond_exprs = 0;
163       unsigned int *slot = vec_cond_ssa_name_uses->get (op0);
164       if (slot)
165 	used_vec_cond_exprs = *slot;
166       else
167 	{
168 	  gimple *use_stmt;
169 	  FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, op0)
170 	    {
171 	      gassign *assign = dyn_cast<gassign *> (use_stmt);
172 	      if (assign != NULL
173 		  && gimple_assign_rhs_code (assign) == VEC_COND_EXPR
174 		  && gimple_assign_rhs1 (assign) == op0)
175 		used_vec_cond_exprs++;
176 	    }
177 	  vec_cond_ssa_name_uses->put (op0, used_vec_cond_exprs);
178 	}
179 
180       gassign *def_stmt = dyn_cast<gassign *> (SSA_NAME_DEF_STMT (op0));
181       if (def_stmt)
182 	{
183 	  tcode = gimple_assign_rhs_code (def_stmt);
184 	  op0a = gimple_assign_rhs1 (def_stmt);
185 	  op0b = gimple_assign_rhs2 (def_stmt);
186 
187 	  tree op0_type = TREE_TYPE (op0);
188 	  tree op0a_type = TREE_TYPE (op0a);
189 	  if (TREE_CODE_CLASS (tcode) == tcc_comparison)
190 	    can_compute_op0 = expand_vec_cmp_expr_p (op0a_type, op0_type,
191 						     tcode);
192 
193 	  /* Try to fold x CMP y ? -1 : 0 to x CMP y.  */
194 
195 	  if (can_compute_op0
196 	      && integer_minus_onep (op1)
197 	      && integer_zerop (op2)
198 	      && TYPE_MODE (TREE_TYPE (lhs)) == TYPE_MODE (TREE_TYPE (op0)))
199 	    {
200 	      tree conv_op = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), op0);
201 	      gassign *new_stmt = gimple_build_assign (lhs, conv_op);
202 	      gsi_replace (gsi, new_stmt, true);
203 	      return new_stmt;
204 	    }
205 
206 	  if (can_compute_op0
207 	      && used_vec_cond_exprs >= 2
208 	      && (get_vcond_mask_icode (mode, TYPE_MODE (op0_type))
209 		  != CODE_FOR_nothing))
210 	    {
211 	      /* Keep the SSA name and use vcond_mask.  */
212 	      tcode = TREE_CODE (op0);
213 	    }
214 	}
215       else
216 	tcode = TREE_CODE (op0);
217     }
218   else
219     tcode = TREE_CODE (op0);
220 
221   if (TREE_CODE_CLASS (tcode) != tcc_comparison)
222     {
223       gcc_assert (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (op0)));
224       if (get_vcond_mask_icode (mode, TYPE_MODE (TREE_TYPE (op0)))
225 	  != CODE_FOR_nothing)
226 	return gimple_build_call_internal (IFN_VCOND_MASK, 3, op0, op1, op2);
227       /* Fake op0 < 0.  */
228       else
229 	{
230 	  gcc_assert (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (op0)))
231 		      == MODE_VECTOR_INT);
232 	  op0a = op0;
233 	  op0b = build_zero_cst (TREE_TYPE (op0));
234 	  tcode = LT_EXPR;
235 	}
236     }
237   cmp_op_mode = TYPE_MODE (TREE_TYPE (op0a));
238   unsignedp = TYPE_UNSIGNED (TREE_TYPE (op0a));
239 
240   gcc_assert (known_eq (GET_MODE_NUNITS (mode),
241 			GET_MODE_NUNITS (cmp_op_mode)));
242 
243   icode = get_vcond_icode (mode, cmp_op_mode, unsignedp);
244   if (icode == CODE_FOR_nothing)
245     {
246       if (tcode == LT_EXPR
247 	  && op0a == op0)
248 	{
249 	  /* A VEC_COND_EXPR condition could be folded from EQ_EXPR/NE_EXPR
250 	     into a constant when only get_vcond_eq_icode is supported.
251 	     Try changing it to NE_EXPR.  */
252 	  tcode = NE_EXPR;
253 	}
254       if ((tcode == EQ_EXPR || tcode == NE_EXPR)
255 	  && direct_internal_fn_supported_p (IFN_VCONDEQ, TREE_TYPE (lhs),
256 					     TREE_TYPE (op0a),
257 					     OPTIMIZE_FOR_BOTH))
258 	{
259 	  tree tcode_tree = build_int_cst (integer_type_node, tcode);
260 	  return gimple_build_call_internal (IFN_VCONDEQ, 5, op0a, op0b, op1,
261 					     op2, tcode_tree);
262 	}
263     }
264 
265   if (icode == CODE_FOR_nothing)
266     {
267       gcc_assert (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (op0))
268 		  && can_compute_op0
269 		  && (get_vcond_mask_icode (mode, TYPE_MODE (TREE_TYPE (op0)))
270 		      != CODE_FOR_nothing));
271       return gimple_build_call_internal (IFN_VCOND_MASK, 3, op0, op1, op2);
272     }
273 
274   tree tcode_tree = build_int_cst (integer_type_node, tcode);
275   return gimple_build_call_internal (unsignedp ? IFN_VCONDU : IFN_VCOND,
276 				     5, op0a, op0b, op1, op2, tcode_tree);
277 }
278 
279 
280 
281 /* Iterate all gimple statements and try to expand
282    VEC_COND_EXPR assignments.  */
283 
284 static unsigned int
gimple_expand_vec_exprs(void)285 gimple_expand_vec_exprs (void)
286 {
287   gimple_stmt_iterator gsi;
288   basic_block bb;
289   hash_map<tree, unsigned int> vec_cond_ssa_name_uses;
290   auto_bitmap dce_ssa_names;
291 
292   FOR_EACH_BB_FN (bb, cfun)
293     {
294       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
295 	{
296 	  gimple *g = gimple_expand_vec_cond_expr (&gsi,
297 						   &vec_cond_ssa_name_uses);
298 	  if (g != NULL)
299 	    {
300 	      tree lhs = gimple_assign_lhs (gsi_stmt (gsi));
301 	      gimple_set_lhs (g, lhs);
302 	      gsi_replace (&gsi, g, false);
303 	    }
304 
305 	  gimple_expand_vec_set_expr (&gsi);
306 	  if (gsi_end_p (gsi))
307 	    break;
308 	}
309     }
310 
311   for (hash_map<tree, unsigned int>::iterator it = vec_cond_ssa_name_uses.begin ();
312        it != vec_cond_ssa_name_uses.end (); ++it)
313     bitmap_set_bit (dce_ssa_names, SSA_NAME_VERSION ((*it).first));
314 
315   simple_dce_from_worklist (dce_ssa_names);
316 
317   return 0;
318 }
319 
320 namespace {
321 
322 const pass_data pass_data_gimple_isel =
323 {
324   GIMPLE_PASS, /* type */
325   "isel", /* name */
326   OPTGROUP_VEC, /* optinfo_flags */
327   TV_NONE, /* tv_id */
328   PROP_cfg, /* properties_required */
329   0, /* properties_provided */
330   0, /* properties_destroyed */
331   0, /* todo_flags_start */
332   TODO_update_ssa, /* todo_flags_finish */
333 };
334 
335 class pass_gimple_isel : public gimple_opt_pass
336 {
337 public:
pass_gimple_isel(gcc::context * ctxt)338   pass_gimple_isel (gcc::context *ctxt)
339     : gimple_opt_pass (pass_data_gimple_isel, ctxt)
340   {}
341 
342   /* opt_pass methods: */
gate(function *)343   virtual bool gate (function *)
344     {
345       return true;
346     }
347 
execute(function *)348   virtual unsigned int execute (function *)
349     {
350       return gimple_expand_vec_exprs ();
351     }
352 
353 }; // class pass_gimple_isel
354 
355 } // anon namespace
356 
357 gimple_opt_pass *
make_pass_gimple_isel(gcc::context * ctxt)358 make_pass_gimple_isel (gcc::context *ctxt)
359 {
360   return new pass_gimple_isel (ctxt);
361 }
362 
363