1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /* Atlantic Network Driver
3  *
4  * Copyright (C) 2014-2019 aQuantia Corporation
5  * Copyright (C) 2019-2020 Marvell International Ltd.
6  */
7 
8 /* File hw_atl_llh.h: Declarations of bitfield and register access functions for
9  * Atlantic registers.
10  */
11 
12 #ifndef HW_ATL_LLH_H
13 #define HW_ATL_LLH_H
14 
15 #include <linux/types.h>
16 
17 struct aq_hw_s;
18 
19 /* set temperature sense reset */
20 void hw_atl_ts_reset_set(struct aq_hw_s *aq_hw, u32 val);
21 
22 /* set temperature sense power down */
23 void hw_atl_ts_power_down_set(struct aq_hw_s *aq_hw, u32 val);
24 
25 /* get temperature sense power down */
26 u32 hw_atl_ts_power_down_get(struct aq_hw_s *aq_hw);
27 
28 /* get temperature sense ready */
29 u32 hw_atl_ts_ready_get(struct aq_hw_s *aq_hw);
30 
31 /* get temperature sense ready latch high */
32 u32 hw_atl_ts_ready_latch_high_get(struct aq_hw_s *aq_hw);
33 
34 /* get temperature sense data */
35 u32 hw_atl_ts_data_get(struct aq_hw_s *aq_hw);
36 
37 /* global */
38 
39 /* set global microprocessor semaphore */
40 void hw_atl_reg_glb_cpu_sem_set(struct aq_hw_s *aq_hw,	u32 glb_cpu_sem,
41 				u32 semaphore);
42 
43 /* get global microprocessor semaphore */
44 u32 hw_atl_reg_glb_cpu_sem_get(struct aq_hw_s *aq_hw, u32 semaphore);
45 
46 /* set global register reset disable */
47 void hw_atl_glb_glb_reg_res_dis_set(struct aq_hw_s *aq_hw, u32 glb_reg_res_dis);
48 
49 /* set soft reset */
50 void hw_atl_glb_soft_res_set(struct aq_hw_s *aq_hw, u32 soft_res);
51 
52 /* get soft reset */
53 u32 hw_atl_glb_soft_res_get(struct aq_hw_s *aq_hw);
54 
55 /* stats */
56 
57 u32 hw_atl_rpb_rx_dma_drop_pkt_cnt_get(struct aq_hw_s *aq_hw);
58 
59 /* get rx dma good octet counter */
60 u64 hw_atl_stats_rx_dma_good_octet_counter_get(struct aq_hw_s *aq_hw);
61 
62 /* get rx dma good packet counter */
63 u64 hw_atl_stats_rx_dma_good_pkt_counter_get(struct aq_hw_s *aq_hw);
64 
65 /* get tx dma good octet counter */
66 u64 hw_atl_stats_tx_dma_good_octet_counter_get(struct aq_hw_s *aq_hw);
67 
68 /* get tx dma good packet counter */
69 u64 hw_atl_stats_tx_dma_good_pkt_counter_get(struct aq_hw_s *aq_hw);
70 
71 /* get msm rx errors counter register */
72 u32 hw_atl_reg_mac_msm_rx_errs_cnt_get(struct aq_hw_s *aq_hw);
73 
74 /* get msm rx unicast frames counter register */
75 u32 hw_atl_reg_mac_msm_rx_ucst_frm_cnt_get(struct aq_hw_s *aq_hw);
76 
77 /* get msm rx multicast frames counter register */
78 u32 hw_atl_reg_mac_msm_rx_mcst_frm_cnt_get(struct aq_hw_s *aq_hw);
79 
80 /* get msm rx broadcast frames counter register */
81 u32 hw_atl_reg_mac_msm_rx_bcst_frm_cnt_get(struct aq_hw_s *aq_hw);
82 
83 /* get msm rx broadcast octets counter register 1 */
84 u32 hw_atl_reg_mac_msm_rx_bcst_octets_counter1get(struct aq_hw_s *aq_hw);
85 
86 /* get msm rx unicast octets counter register 0 */
87 u32 hw_atl_reg_mac_msm_rx_ucst_octets_counter0get(struct aq_hw_s *aq_hw);
88 
89 /* get msm tx errors counter register */
90 u32 hw_atl_reg_mac_msm_tx_errs_cnt_get(struct aq_hw_s *aq_hw);
91 
92 /* get msm tx unicast frames counter register */
93 u32 hw_atl_reg_mac_msm_tx_ucst_frm_cnt_get(struct aq_hw_s *aq_hw);
94 
95 /* get msm tx multicast frames counter register */
96 u32 hw_atl_reg_mac_msm_tx_mcst_frm_cnt_get(struct aq_hw_s *aq_hw);
97 
98 /* get msm tx broadcast frames counter register */
99 u32 hw_atl_reg_mac_msm_tx_bcst_frm_cnt_get(struct aq_hw_s *aq_hw);
100 
101 /* get msm tx multicast octets counter register 1 */
102 u32 hw_atl_reg_mac_msm_tx_mcst_octets_counter1get(struct aq_hw_s *aq_hw);
103 
104 /* get msm tx broadcast octets counter register 1 */
105 u32 hw_atl_reg_mac_msm_tx_bcst_octets_counter1get(struct aq_hw_s *aq_hw);
106 
107 /* get msm tx unicast octets counter register 0 */
108 u32 hw_atl_reg_mac_msm_tx_ucst_octets_counter0get(struct aq_hw_s *aq_hw);
109 
110 /* get global mif identification */
111 u32 hw_atl_reg_glb_mif_id_get(struct aq_hw_s *aq_hw);
112 
113 /* interrupt */
114 
115 /* set interrupt auto mask lsw */
116 void hw_atl_itr_irq_auto_masklsw_set(struct aq_hw_s *aq_hw,
117 				     u32 irq_auto_masklsw);
118 
119 /* set interrupt mapping enable rx */
120 void hw_atl_itr_irq_map_en_rx_set(struct aq_hw_s *aq_hw, u32 irq_map_en_rx,
121 				  u32 rx);
122 
123 /* set interrupt mapping enable tx */
124 void hw_atl_itr_irq_map_en_tx_set(struct aq_hw_s *aq_hw, u32 irq_map_en_tx,
125 				  u32 tx);
126 
127 /* set interrupt mapping rx */
128 void hw_atl_itr_irq_map_rx_set(struct aq_hw_s *aq_hw, u32 irq_map_rx, u32 rx);
129 
130 /* set interrupt mapping tx */
131 void hw_atl_itr_irq_map_tx_set(struct aq_hw_s *aq_hw, u32 irq_map_tx, u32 tx);
132 
133 /* set interrupt mask clear lsw */
134 void hw_atl_itr_irq_msk_clearlsw_set(struct aq_hw_s *aq_hw,
135 				     u32 irq_msk_clearlsw);
136 
137 /* set interrupt mask set lsw */
138 void hw_atl_itr_irq_msk_setlsw_set(struct aq_hw_s *aq_hw, u32 irq_msk_setlsw);
139 
140 /* set interrupt register reset disable */
141 void hw_atl_itr_irq_reg_res_dis_set(struct aq_hw_s *aq_hw, u32 irq_reg_res_dis);
142 
143 /* set interrupt status clear lsw */
144 void hw_atl_itr_irq_status_clearlsw_set(struct aq_hw_s *aq_hw,
145 					u32 irq_status_clearlsw);
146 
147 /* get interrupt status lsw */
148 u32 hw_atl_itr_irq_statuslsw_get(struct aq_hw_s *aq_hw);
149 
150 /* get reset interrupt */
151 u32 hw_atl_itr_res_irq_get(struct aq_hw_s *aq_hw);
152 
153 /* set reset interrupt */
154 void hw_atl_itr_res_irq_set(struct aq_hw_s *aq_hw, u32 res_irq);
155 
156 /* set RSC interrupt */
157 void hw_atl_itr_rsc_en_set(struct aq_hw_s *aq_hw, u32 enable);
158 
159 /* set RSC delay */
160 void hw_atl_itr_rsc_delay_set(struct aq_hw_s *aq_hw, u32 delay);
161 
162 /* rdm */
163 
164 /* set cpu id */
165 void hw_atl_rdm_cpu_id_set(struct aq_hw_s *aq_hw, u32 cpuid, u32 dca);
166 
167 /* set rx dca enable */
168 void hw_atl_rdm_rx_dca_en_set(struct aq_hw_s *aq_hw, u32 rx_dca_en);
169 
170 /* set rx dca mode */
171 void hw_atl_rdm_rx_dca_mode_set(struct aq_hw_s *aq_hw, u32 rx_dca_mode);
172 
173 /* set rx descriptor data buffer size */
174 void hw_atl_rdm_rx_desc_data_buff_size_set(struct aq_hw_s *aq_hw,
175 					   u32 rx_desc_data_buff_size,
176 				    u32 descriptor);
177 
178 /* set rx descriptor dca enable */
179 void hw_atl_rdm_rx_desc_dca_en_set(struct aq_hw_s *aq_hw, u32 rx_desc_dca_en,
180 				   u32 dca);
181 
182 /* set rx descriptor enable */
183 void hw_atl_rdm_rx_desc_en_set(struct aq_hw_s *aq_hw, u32 rx_desc_en,
184 			       u32 descriptor);
185 
186 /* set rx descriptor header splitting */
187 void hw_atl_rdm_rx_desc_head_splitting_set(struct aq_hw_s *aq_hw,
188 					   u32 rx_desc_head_splitting,
189 				    u32 descriptor);
190 
191 /* get rx descriptor head pointer */
192 u32 hw_atl_rdm_rx_desc_head_ptr_get(struct aq_hw_s *aq_hw, u32 descriptor);
193 
194 /* set rx descriptor length */
195 void hw_atl_rdm_rx_desc_len_set(struct aq_hw_s *aq_hw, u32 rx_desc_len,
196 				u32 descriptor);
197 
198 /* set rx descriptor write-back interrupt enable */
199 void hw_atl_rdm_rx_desc_wr_wb_irq_en_set(struct aq_hw_s *aq_hw,
200 					 u32 rx_desc_wr_wb_irq_en);
201 
202 /* set rx header dca enable */
203 void hw_atl_rdm_rx_head_dca_en_set(struct aq_hw_s *aq_hw, u32 rx_head_dca_en,
204 				   u32 dca);
205 
206 /* set rx payload dca enable */
207 void hw_atl_rdm_rx_pld_dca_en_set(struct aq_hw_s *aq_hw, u32 rx_pld_dca_en,
208 				  u32 dca);
209 
210 /* set rx descriptor header buffer size */
211 void hw_atl_rdm_rx_desc_head_buff_size_set(struct aq_hw_s *aq_hw,
212 					   u32 rx_desc_head_buff_size,
213 					   u32 descriptor);
214 
215 /* set rx descriptor reset */
216 void hw_atl_rdm_rx_desc_res_set(struct aq_hw_s *aq_hw, u32 rx_desc_res,
217 				u32 descriptor);
218 
219 /* Set RDM Interrupt Moderation Enable */
220 void hw_atl_rdm_rdm_intr_moder_en_set(struct aq_hw_s *aq_hw,
221 				      u32 rdm_intr_moder_en);
222 
223 /* reg */
224 
225 /* set general interrupt mapping register */
226 void hw_atl_reg_gen_irq_map_set(struct aq_hw_s *aq_hw, u32 gen_intr_map,
227 				u32 regidx);
228 
229 /* get general interrupt status register */
230 u32 hw_atl_reg_gen_irq_status_get(struct aq_hw_s *aq_hw);
231 
232 /* set interrupt global control register */
233 void hw_atl_reg_irq_glb_ctl_set(struct aq_hw_s *aq_hw, u32 intr_glb_ctl);
234 
235 /* set interrupt throttle register */
236 void hw_atl_reg_irq_thr_set(struct aq_hw_s *aq_hw, u32 intr_thr, u32 throttle);
237 
238 /* set rx dma descriptor base address lsw */
239 void hw_atl_reg_rx_dma_desc_base_addresslswset(struct aq_hw_s *aq_hw,
240 					       u32 rx_dma_desc_base_addrlsw,
241 					u32 descriptor);
242 
243 /* set rx dma descriptor base address msw */
244 void hw_atl_reg_rx_dma_desc_base_addressmswset(struct aq_hw_s *aq_hw,
245 					       u32 rx_dma_desc_base_addrmsw,
246 					u32 descriptor);
247 
248 /* get rx dma descriptor status register */
249 u32 hw_atl_reg_rx_dma_desc_status_get(struct aq_hw_s *aq_hw, u32 descriptor);
250 
251 /* set rx dma descriptor tail pointer register */
252 void hw_atl_reg_rx_dma_desc_tail_ptr_set(struct aq_hw_s *aq_hw,
253 					 u32 rx_dma_desc_tail_ptr,
254 				  u32 descriptor);
255 
256 /* set rx filter multicast filter mask register */
257 void hw_atl_reg_rx_flr_mcst_flr_msk_set(struct aq_hw_s *aq_hw,
258 					u32 rx_flr_mcst_flr_msk);
259 
260 /* set rx filter multicast filter register */
261 void hw_atl_reg_rx_flr_mcst_flr_set(struct aq_hw_s *aq_hw, u32 rx_flr_mcst_flr,
262 				    u32 filter);
263 
264 /* set rx filter rss control register 1 */
265 void hw_atl_reg_rx_flr_rss_control1set(struct aq_hw_s *aq_hw,
266 				       u32 rx_flr_rss_control1);
267 
268 /* Set RX Filter Control Register 2 */
269 void hw_atl_reg_rx_flr_control2_set(struct aq_hw_s *aq_hw, u32 rx_flr_control2);
270 
271 /* Set RX Interrupt Moderation Control Register */
272 void hw_atl_reg_rx_intr_moder_ctrl_set(struct aq_hw_s *aq_hw,
273 				       u32 rx_intr_moderation_ctl,
274 				u32 queue);
275 
276 /* set tx dma debug control */
277 void hw_atl_reg_tx_dma_debug_ctl_set(struct aq_hw_s *aq_hw,
278 				     u32 tx_dma_debug_ctl);
279 
280 /* set tx dma descriptor base address lsw */
281 void hw_atl_reg_tx_dma_desc_base_addresslswset(struct aq_hw_s *aq_hw,
282 					       u32 tx_dma_desc_base_addrlsw,
283 					u32 descriptor);
284 
285 /* set tx dma descriptor base address msw */
286 void hw_atl_reg_tx_dma_desc_base_addressmswset(struct aq_hw_s *aq_hw,
287 					       u32 tx_dma_desc_base_addrmsw,
288 					u32 descriptor);
289 
290 /* set tx dma descriptor tail pointer register */
291 void hw_atl_reg_tx_dma_desc_tail_ptr_set(struct aq_hw_s *aq_hw,
292 					 u32 tx_dma_desc_tail_ptr,
293 					 u32 descriptor);
294 
295 /* Set TX Interrupt Moderation Control Register */
296 void hw_atl_reg_tx_intr_moder_ctrl_set(struct aq_hw_s *aq_hw,
297 				       u32 tx_intr_moderation_ctl,
298 				       u32 queue);
299 
300 /* set global microprocessor scratch pad */
301 void hw_atl_reg_glb_cpu_scratch_scp_set(struct aq_hw_s *aq_hw,
302 					u32 glb_cpu_scratch_scp,
303 					u32 scratch_scp);
304 
305 /* rpb */
306 
307 /* set dma system loopback */
308 void hw_atl_rpb_dma_sys_lbk_set(struct aq_hw_s *aq_hw, u32 dma_sys_lbk);
309 
310 /* set dma network loopback */
311 void hw_atl_rpb_dma_net_lbk_set(struct aq_hw_s *aq_hw, u32 dma_net_lbk);
312 
313 /* set rx traffic class mode */
314 void hw_atl_rpb_rpf_rx_traf_class_mode_set(struct aq_hw_s *aq_hw,
315 					   u32 rx_traf_class_mode);
316 
317 /* get rx traffic class mode */
318 u32 hw_atl_rpb_rpf_rx_traf_class_mode_get(struct aq_hw_s *aq_hw);
319 
320 /* set rx buffer enable */
321 void hw_atl_rpb_rx_buff_en_set(struct aq_hw_s *aq_hw, u32 rx_buff_en);
322 
323 /* set rx buffer high threshold (per tc) */
324 void hw_atl_rpb_rx_buff_hi_threshold_per_tc_set(struct aq_hw_s *aq_hw,
325 						u32 rx_buff_hi_threshold_per_tc,
326 						u32 buffer);
327 
328 /* set rx buffer low threshold (per tc) */
329 void hw_atl_rpb_rx_buff_lo_threshold_per_tc_set(struct aq_hw_s *aq_hw,
330 						u32 rx_buff_lo_threshold_per_tc,
331 					 u32 buffer);
332 
333 /* set rx flow control mode */
334 void hw_atl_rpb_rx_flow_ctl_mode_set(struct aq_hw_s *aq_hw,
335 				     u32 rx_flow_ctl_mode);
336 
337 /* set rx packet buffer size (per tc) */
338 void hw_atl_rpb_rx_pkt_buff_size_per_tc_set(struct aq_hw_s *aq_hw,
339 					    u32 rx_pkt_buff_size_per_tc,
340 					    u32 buffer);
341 
342 /* toggle rdm rx dma descriptor cache init */
343 void hw_atl_rdm_rx_dma_desc_cache_init_tgl(struct aq_hw_s *aq_hw);
344 
345 /* get rdm rx dma descriptor cache init done */
346 u32 hw_atl_rdm_rx_dma_desc_cache_init_done_get(struct aq_hw_s *aq_hw);
347 
348 /* set rx xoff enable (per tc) */
349 void hw_atl_rpb_rx_xoff_en_per_tc_set(struct aq_hw_s *aq_hw,
350 				      u32 rx_xoff_en_per_tc,
351 				      u32 buffer);
352 
353 /* rpf */
354 
355 /* set l2 broadcast count threshold */
356 void hw_atl_rpfl2broadcast_count_threshold_set(struct aq_hw_s *aq_hw,
357 					       u32 l2broadcast_count_threshold);
358 
359 /* set l2 broadcast enable */
360 void hw_atl_rpfl2broadcast_en_set(struct aq_hw_s *aq_hw, u32 l2broadcast_en);
361 
362 /* set l2 broadcast filter action */
363 void hw_atl_rpfl2broadcast_flr_act_set(struct aq_hw_s *aq_hw,
364 				       u32 l2broadcast_flr_act);
365 
366 /* set l2 multicast filter enable */
367 void hw_atl_rpfl2multicast_flr_en_set(struct aq_hw_s *aq_hw,
368 				      u32 l2multicast_flr_en,
369 				      u32 filter);
370 
371 /* get l2 promiscuous mode enable */
372 u32 hw_atl_rpfl2promiscuous_mode_en_get(struct aq_hw_s *aq_hw);
373 
374 /* set l2 promiscuous mode enable */
375 void hw_atl_rpfl2promiscuous_mode_en_set(struct aq_hw_s *aq_hw,
376 					 u32 l2promiscuous_mode_en);
377 
378 /* set l2 unicast filter action */
379 void hw_atl_rpfl2unicast_flr_act_set(struct aq_hw_s *aq_hw,
380 				     u32 l2unicast_flr_act,
381 				     u32 filter);
382 
383 /* set l2 unicast filter enable */
384 void hw_atl_rpfl2_uc_flr_en_set(struct aq_hw_s *aq_hw, u32 l2unicast_flr_en,
385 				u32 filter);
386 
387 /* set l2 unicast destination address lsw */
388 void hw_atl_rpfl2unicast_dest_addresslsw_set(struct aq_hw_s *aq_hw,
389 					     u32 l2unicast_dest_addresslsw,
390 				      u32 filter);
391 
392 /* set l2 unicast destination address msw */
393 void hw_atl_rpfl2unicast_dest_addressmsw_set(struct aq_hw_s *aq_hw,
394 					     u32 l2unicast_dest_addressmsw,
395 				      u32 filter);
396 
397 /* Set L2 Accept all Multicast packets */
398 void hw_atl_rpfl2_accept_all_mc_packets_set(struct aq_hw_s *aq_hw,
399 					    u32 l2_accept_all_mc_packets);
400 
401 /* set user-priority tc mapping */
402 void hw_atl_rpf_rpb_user_priority_tc_map_set(struct aq_hw_s *aq_hw,
403 					     u32 user_priority_tc_map, u32 tc);
404 
405 /* set rss key address */
406 void hw_atl_rpf_rss_key_addr_set(struct aq_hw_s *aq_hw, u32 rss_key_addr);
407 
408 /* set rss key write data */
409 void hw_atl_rpf_rss_key_wr_data_set(struct aq_hw_s *aq_hw, u32 rss_key_wr_data);
410 
411 /* get rss key write enable */
412 u32 hw_atl_rpf_rss_key_wr_en_get(struct aq_hw_s *aq_hw);
413 
414 /* set rss key write enable */
415 void hw_atl_rpf_rss_key_wr_en_set(struct aq_hw_s *aq_hw, u32 rss_key_wr_en);
416 
417 /* set rss redirection table address */
418 void hw_atl_rpf_rss_redir_tbl_addr_set(struct aq_hw_s *aq_hw,
419 				       u32 rss_redir_tbl_addr);
420 
421 /* set rss redirection table write data */
422 void hw_atl_rpf_rss_redir_tbl_wr_data_set(struct aq_hw_s *aq_hw,
423 					  u32 rss_redir_tbl_wr_data);
424 
425 /* get rss redirection write enable */
426 u32 hw_atl_rpf_rss_redir_wr_en_get(struct aq_hw_s *aq_hw);
427 
428 /* set rss redirection write enable */
429 void hw_atl_rpf_rss_redir_wr_en_set(struct aq_hw_s *aq_hw, u32 rss_redir_wr_en);
430 
431 /* set tpo to rpf system loopback */
432 void hw_atl_rpf_tpo_to_rpf_sys_lbk_set(struct aq_hw_s *aq_hw,
433 				       u32 tpo_to_rpf_sys_lbk);
434 
435 /* set vlan inner ethertype */
436 void hw_atl_rpf_vlan_inner_etht_set(struct aq_hw_s *aq_hw, u32 vlan_inner_etht);
437 
438 /* set vlan outer ethertype */
439 void hw_atl_rpf_vlan_outer_etht_set(struct aq_hw_s *aq_hw, u32 vlan_outer_etht);
440 
441 /* set vlan promiscuous mode enable */
442 void hw_atl_rpf_vlan_prom_mode_en_set(struct aq_hw_s *aq_hw,
443 				      u32 vlan_prom_mode_en);
444 
445 /* Get VLAN promiscuous mode enable */
446 u32 hw_atl_rpf_vlan_prom_mode_en_get(struct aq_hw_s *aq_hw);
447 
448 /* Set VLAN untagged action */
449 void hw_atl_rpf_vlan_untagged_act_set(struct aq_hw_s *aq_hw,
450 				      u32 vlan_untagged_act);
451 
452 /* Set VLAN accept untagged packets */
453 void hw_atl_rpf_vlan_accept_untagged_packets_set(struct aq_hw_s *aq_hw,
454 						 u32 vlan_acc_untagged_packets);
455 
456 /* Set VLAN filter enable */
457 void hw_atl_rpf_vlan_flr_en_set(struct aq_hw_s *aq_hw, u32 vlan_flr_en,
458 				u32 filter);
459 
460 /* Set VLAN Filter Action */
461 void hw_atl_rpf_vlan_flr_act_set(struct aq_hw_s *aq_hw, u32 vlan_filter_act,
462 				 u32 filter);
463 
464 /* Set VLAN ID Filter */
465 void hw_atl_rpf_vlan_id_flr_set(struct aq_hw_s *aq_hw, u32 vlan_id_flr,
466 				u32 filter);
467 
468 /* Set VLAN RX queue assignment enable */
469 void hw_atl_rpf_vlan_rxq_en_flr_set(struct aq_hw_s *aq_hw, u32 vlan_rxq_en,
470 				    u32 filter);
471 
472 /* Set VLAN RX queue */
473 void hw_atl_rpf_vlan_rxq_flr_set(struct aq_hw_s *aq_hw, u32 vlan_rxq,
474 				 u32 filter);
475 
476 /* set ethertype filter enable */
477 void hw_atl_rpf_etht_flr_en_set(struct aq_hw_s *aq_hw, u32 etht_flr_en,
478 				u32 filter);
479 
480 /* set  ethertype user-priority enable */
481 void hw_atl_rpf_etht_user_priority_en_set(struct aq_hw_s *aq_hw,
482 					  u32 etht_user_priority_en,
483 					  u32 filter);
484 
485 /* set  ethertype rx queue enable */
486 void hw_atl_rpf_etht_rx_queue_en_set(struct aq_hw_s *aq_hw,
487 				     u32 etht_rx_queue_en,
488 				     u32 filter);
489 
490 /* set ethertype rx queue */
491 void hw_atl_rpf_etht_rx_queue_set(struct aq_hw_s *aq_hw, u32 etht_rx_queue,
492 				  u32 filter);
493 
494 /* set ethertype user-priority */
495 void hw_atl_rpf_etht_user_priority_set(struct aq_hw_s *aq_hw,
496 				       u32 etht_user_priority,
497 				       u32 filter);
498 
499 /* set ethertype management queue */
500 void hw_atl_rpf_etht_mgt_queue_set(struct aq_hw_s *aq_hw, u32 etht_mgt_queue,
501 				   u32 filter);
502 
503 /* set ethertype filter action */
504 void hw_atl_rpf_etht_flr_act_set(struct aq_hw_s *aq_hw, u32 etht_flr_act,
505 				 u32 filter);
506 
507 /* set ethertype filter */
508 void hw_atl_rpf_etht_flr_set(struct aq_hw_s *aq_hw, u32 etht_flr, u32 filter);
509 
510 /* set L4 source port */
511 void hw_atl_rpf_l4_spd_set(struct aq_hw_s *aq_hw, u32 val, u32 filter);
512 
513 /* set L4 destination port */
514 void hw_atl_rpf_l4_dpd_set(struct aq_hw_s *aq_hw, u32 val, u32 filter);
515 
516 /* rpo */
517 
518 /* set ipv4 header checksum offload enable */
519 void hw_atl_rpo_ipv4header_crc_offload_en_set(struct aq_hw_s *aq_hw,
520 					      u32 ipv4header_crc_offload_en);
521 
522 /* set rx descriptor vlan stripping */
523 void hw_atl_rpo_rx_desc_vlan_stripping_set(struct aq_hw_s *aq_hw,
524 					   u32 rx_desc_vlan_stripping,
525 					   u32 descriptor);
526 
527 void hw_atl_rpo_outer_vlan_tag_mode_set(void *context,
528 					u32 outervlantagmode);
529 
530 u32 hw_atl_rpo_outer_vlan_tag_mode_get(void *context);
531 
532 /* set tcp/udp checksum offload enable */
533 void hw_atl_rpo_tcp_udp_crc_offload_en_set(struct aq_hw_s *aq_hw,
534 					   u32 tcp_udp_crc_offload_en);
535 
536 /* Set LRO Patch Optimization Enable. */
537 void hw_atl_rpo_lro_patch_optimization_en_set(struct aq_hw_s *aq_hw,
538 					      u32 lro_patch_optimization_en);
539 
540 /* Set Large Receive Offload Enable */
541 void hw_atl_rpo_lro_en_set(struct aq_hw_s *aq_hw, u32 lro_en);
542 
543 /* Set LRO Q Sessions Limit */
544 void hw_atl_rpo_lro_qsessions_lim_set(struct aq_hw_s *aq_hw,
545 				      u32 lro_qsessions_lim);
546 
547 /* Set LRO Total Descriptor Limit */
548 void hw_atl_rpo_lro_total_desc_lim_set(struct aq_hw_s *aq_hw,
549 				       u32 lro_total_desc_lim);
550 
551 /* Set LRO Min Payload of First Packet */
552 void hw_atl_rpo_lro_min_pay_of_first_pkt_set(struct aq_hw_s *aq_hw,
553 					     u32 lro_min_pld_of_first_pkt);
554 
555 /* Set LRO Packet Limit */
556 void hw_atl_rpo_lro_pkt_lim_set(struct aq_hw_s *aq_hw, u32 lro_packet_lim);
557 
558 /* Set LRO Max Number of Descriptors */
559 void hw_atl_rpo_lro_max_num_of_descriptors_set(struct aq_hw_s *aq_hw,
560 					       u32 lro_max_desc_num, u32 lro);
561 
562 /* Set LRO Time Base Divider */
563 void hw_atl_rpo_lro_time_base_divider_set(struct aq_hw_s *aq_hw,
564 					  u32 lro_time_base_divider);
565 
566 /*Set LRO Inactive Interval */
567 void hw_atl_rpo_lro_inactive_interval_set(struct aq_hw_s *aq_hw,
568 					  u32 lro_inactive_interval);
569 
570 /*Set LRO Max Coalescing Interval */
571 void hw_atl_rpo_lro_max_coalescing_interval_set(struct aq_hw_s *aq_hw,
572 						u32 lro_max_coal_interval);
573 
574 /* rx */
575 
576 /* set rx register reset disable */
577 void hw_atl_rx_rx_reg_res_dis_set(struct aq_hw_s *aq_hw, u32 rx_reg_res_dis);
578 
579 /* tdm */
580 
581 /* set cpu id */
582 void hw_atl_tdm_cpu_id_set(struct aq_hw_s *aq_hw, u32 cpuid, u32 dca);
583 
584 /* set large send offload enable */
585 void hw_atl_tdm_large_send_offload_en_set(struct aq_hw_s *aq_hw,
586 					  u32 large_send_offload_en);
587 
588 /* set tx descriptor enable */
589 void hw_atl_tdm_tx_desc_en_set(struct aq_hw_s *aq_hw, u32 tx_desc_en,
590 			       u32 descriptor);
591 
592 /* set tx dca enable */
593 void hw_atl_tdm_tx_dca_en_set(struct aq_hw_s *aq_hw, u32 tx_dca_en);
594 
595 /* set tx dca mode */
596 void hw_atl_tdm_tx_dca_mode_set(struct aq_hw_s *aq_hw, u32 tx_dca_mode);
597 
598 /* set tx descriptor dca enable */
599 void hw_atl_tdm_tx_desc_dca_en_set(struct aq_hw_s *aq_hw, u32 tx_desc_dca_en,
600 				   u32 dca);
601 
602 /* get tx descriptor head pointer */
603 u32 hw_atl_tdm_tx_desc_head_ptr_get(struct aq_hw_s *aq_hw, u32 descriptor);
604 
605 /* set tx descriptor length */
606 void hw_atl_tdm_tx_desc_len_set(struct aq_hw_s *aq_hw, u32 tx_desc_len,
607 				u32 descriptor);
608 
609 /* set tx descriptor write-back interrupt enable */
610 void hw_atl_tdm_tx_desc_wr_wb_irq_en_set(struct aq_hw_s *aq_hw,
611 					 u32 tx_desc_wr_wb_irq_en);
612 
613 /* set tx descriptor write-back threshold */
614 void hw_atl_tdm_tx_desc_wr_wb_threshold_set(struct aq_hw_s *aq_hw,
615 					    u32 tx_desc_wr_wb_threshold,
616 				     u32 descriptor);
617 
618 /* Set TDM Interrupt Moderation Enable */
619 void hw_atl_tdm_tdm_intr_moder_en_set(struct aq_hw_s *aq_hw,
620 				      u32 tdm_irq_moderation_en);
621 /* thm */
622 
623 /* set lso tcp flag of first packet */
624 void hw_atl_thm_lso_tcp_flag_of_first_pkt_set(struct aq_hw_s *aq_hw,
625 					      u32 lso_tcp_flag_of_first_pkt);
626 
627 /* set lso tcp flag of last packet */
628 void hw_atl_thm_lso_tcp_flag_of_last_pkt_set(struct aq_hw_s *aq_hw,
629 					     u32 lso_tcp_flag_of_last_pkt);
630 
631 /* set lso tcp flag of middle packet */
632 void hw_atl_thm_lso_tcp_flag_of_middle_pkt_set(struct aq_hw_s *aq_hw,
633 					       u32 lso_tcp_flag_of_middle_pkt);
634 
635 /* tpb */
636 
637 /* set TX Traffic Class Mode */
638 void hw_atl_tpb_tps_tx_tc_mode_set(struct aq_hw_s *aq_hw,
639 				   u32 tx_traf_class_mode);
640 
641 /* get TX Traffic Class Mode */
642 u32 hw_atl_tpb_tps_tx_tc_mode_get(struct aq_hw_s *aq_hw);
643 
644 /* set tx buffer enable */
645 void hw_atl_tpb_tx_buff_en_set(struct aq_hw_s *aq_hw, u32 tx_buff_en);
646 
647 /* set tx buffer high threshold (per tc) */
648 void hw_atl_tpb_tx_buff_hi_threshold_per_tc_set(struct aq_hw_s *aq_hw,
649 						u32 tx_buff_hi_threshold_per_tc,
650 					 u32 buffer);
651 
652 /* set tx buffer low threshold (per tc) */
653 void hw_atl_tpb_tx_buff_lo_threshold_per_tc_set(struct aq_hw_s *aq_hw,
654 						u32 tx_buff_lo_threshold_per_tc,
655 					 u32 buffer);
656 
657 /* set tx dma system loopback enable */
658 void hw_atl_tpb_tx_dma_sys_lbk_en_set(struct aq_hw_s *aq_hw, u32 tx_dma_sys_lbk_en);
659 
660 /* set tx dma network loopback enable */
661 void hw_atl_tpb_tx_dma_net_lbk_en_set(struct aq_hw_s *aq_hw,
662 				      u32 tx_dma_net_lbk_en);
663 
664 /* set tx clock gating enable */
665 void hw_atl_tpb_tx_tx_clk_gate_en_set(struct aq_hw_s *aq_hw,
666 				      u32 tx_clk_gate_en);
667 
668 /* set tx packet buffer size (per tc) */
669 void hw_atl_tpb_tx_pkt_buff_size_per_tc_set(struct aq_hw_s *aq_hw,
670 					    u32 tx_pkt_buff_size_per_tc,
671 					    u32 buffer);
672 
673 /* set tx path pad insert enable */
674 void hw_atl_tpb_tx_path_scp_ins_en_set(struct aq_hw_s *aq_hw, u32 tx_path_scp_ins_en);
675 
676 /* tpo */
677 
678 /* set ipv4 header checksum offload enable */
679 void hw_atl_tpo_ipv4header_crc_offload_en_set(struct aq_hw_s *aq_hw,
680 					      u32 ipv4header_crc_offload_en);
681 
682 /* set tcp/udp checksum offload enable */
683 void hw_atl_tpo_tcp_udp_crc_offload_en_set(struct aq_hw_s *aq_hw,
684 					   u32 tcp_udp_crc_offload_en);
685 
686 /* set tx pkt system loopback enable */
687 void hw_atl_tpo_tx_pkt_sys_lbk_en_set(struct aq_hw_s *aq_hw,
688 				      u32 tx_pkt_sys_lbk_en);
689 
690 /* tps */
691 
692 /* set tx packet scheduler data arbitration mode */
693 void hw_atl_tps_tx_pkt_shed_data_arb_mode_set(struct aq_hw_s *aq_hw,
694 					      u32 tx_pkt_shed_data_arb_mode);
695 
696 /* set tx packet scheduler descriptor rate current time reset */
697 void hw_atl_tps_tx_pkt_shed_desc_rate_curr_time_res_set(struct aq_hw_s *aq_hw,
698 							u32 curr_time_res);
699 
700 /* set tx packet scheduler descriptor rate limit */
701 void hw_atl_tps_tx_pkt_shed_desc_rate_lim_set(struct aq_hw_s *aq_hw,
702 					      u32 tx_pkt_shed_desc_rate_lim);
703 
704 /* set tx packet scheduler descriptor tc arbitration mode */
705 void hw_atl_tps_tx_pkt_shed_desc_tc_arb_mode_set(struct aq_hw_s *aq_hw,
706 						 u32 arb_mode);
707 
708 /* set tx packet scheduler descriptor tc max credit */
709 void hw_atl_tps_tx_pkt_shed_desc_tc_max_credit_set(struct aq_hw_s *aq_hw,
710 						   const u32 tc,
711 						   const u32 max_credit);
712 
713 /* set tx packet scheduler descriptor tc weight */
714 void hw_atl_tps_tx_pkt_shed_desc_tc_weight_set(struct aq_hw_s *aq_hw,
715 					       const u32 tc,
716 					       const u32 weight);
717 
718 /* set tx packet scheduler descriptor vm arbitration mode */
719 void hw_atl_tps_tx_pkt_shed_desc_vm_arb_mode_set(struct aq_hw_s *aq_hw,
720 						 u32 arb_mode);
721 
722 /* set tx packet scheduler tc data max credit */
723 void hw_atl_tps_tx_pkt_shed_tc_data_max_credit_set(struct aq_hw_s *aq_hw,
724 						   const u32 tc,
725 						   const u32 max_credit);
726 
727 /* set tx packet scheduler tc data weight */
728 void hw_atl_tps_tx_pkt_shed_tc_data_weight_set(struct aq_hw_s *aq_hw,
729 					       const u32 tc,
730 					       const u32 weight);
731 
732 /* set tx descriptor rate mode */
733 void hw_atl_tps_tx_desc_rate_mode_set(struct aq_hw_s *aq_hw,
734 				      const u32 rate_mode);
735 
736 /* set tx packet scheduler descriptor rate enable */
737 void hw_atl_tps_tx_desc_rate_en_set(struct aq_hw_s *aq_hw, const u32 desc,
738 				    const u32 enable);
739 
740 /* set tx packet scheduler descriptor rate integral value */
741 void hw_atl_tps_tx_desc_rate_x_set(struct aq_hw_s *aq_hw, const u32 desc,
742 				   const u32 rate_int);
743 
744 /* set tx packet scheduler descriptor rate fractional value */
745 void hw_atl_tps_tx_desc_rate_y_set(struct aq_hw_s *aq_hw, const u32 desc,
746 				   const u32 rate_frac);
747 
748 /* tx */
749 
750 /* set tx register reset disable */
751 void hw_atl_tx_tx_reg_res_dis_set(struct aq_hw_s *aq_hw, u32 tx_reg_res_dis);
752 
753 /* msm */
754 
755 /* get register access status */
756 u32 hw_atl_msm_reg_access_status_get(struct aq_hw_s *aq_hw);
757 
758 /* set  register address for indirect address */
759 void hw_atl_msm_reg_addr_for_indirect_addr_set(struct aq_hw_s *aq_hw,
760 					       u32 reg_addr_for_indirect_addr);
761 
762 /* set register read strobe */
763 void hw_atl_msm_reg_rd_strobe_set(struct aq_hw_s *aq_hw, u32 reg_rd_strobe);
764 
765 /* get  register read data */
766 u32 hw_atl_msm_reg_rd_data_get(struct aq_hw_s *aq_hw);
767 
768 /* set  register write data */
769 void hw_atl_msm_reg_wr_data_set(struct aq_hw_s *aq_hw, u32 reg_wr_data);
770 
771 /* set register write strobe */
772 void hw_atl_msm_reg_wr_strobe_set(struct aq_hw_s *aq_hw, u32 reg_wr_strobe);
773 
774 /* pci */
775 
776 /* set pci register reset disable */
777 void hw_atl_pci_pci_reg_res_dis_set(struct aq_hw_s *aq_hw, u32 pci_reg_res_dis);
778 
779 /* pcs */
780 void hw_atl_pcs_ptp_clock_read_enable(struct aq_hw_s *aq_hw,
781 				      u32 ptp_clock_read_enable);
782 
783 u32 hw_atl_pcs_ptp_clock_get(struct aq_hw_s *aq_hw, u32 index);
784 
785 /* set uP Force Interrupt */
786 void hw_atl_mcp_up_force_intr_set(struct aq_hw_s *aq_hw, u32 up_force_intr);
787 
788 /* clear ipv4 filter destination address */
789 void hw_atl_rpfl3l4_ipv4_dest_addr_clear(struct aq_hw_s *aq_hw, u8 location);
790 
791 /* clear ipv4 filter source address */
792 void hw_atl_rpfl3l4_ipv4_src_addr_clear(struct aq_hw_s *aq_hw, u8 location);
793 
794 /* clear command for filter l3-l4 */
795 void hw_atl_rpfl3l4_cmd_clear(struct aq_hw_s *aq_hw, u8 location);
796 
797 /* clear ipv6 filter destination address */
798 void hw_atl_rpfl3l4_ipv6_dest_addr_clear(struct aq_hw_s *aq_hw, u8 location);
799 
800 /* clear ipv6 filter source address */
801 void hw_atl_rpfl3l4_ipv6_src_addr_clear(struct aq_hw_s *aq_hw, u8 location);
802 
803 /* set ipv4 filter destination address */
804 void hw_atl_rpfl3l4_ipv4_dest_addr_set(struct aq_hw_s *aq_hw, u8 location,
805 				       u32 ipv4_dest);
806 
807 /* set ipv4 filter source address */
808 void hw_atl_rpfl3l4_ipv4_src_addr_set(struct aq_hw_s *aq_hw, u8 location,
809 				      u32 ipv4_src);
810 
811 /* set command for filter l3-l4 */
812 void hw_atl_rpfl3l4_cmd_set(struct aq_hw_s *aq_hw, u8 location, u32 cmd);
813 
814 /* set ipv6 filter source address */
815 void hw_atl_rpfl3l4_ipv6_src_addr_set(struct aq_hw_s *aq_hw, u8 location,
816 				      u32 *ipv6_src);
817 
818 /* set ipv6 filter destination address */
819 void hw_atl_rpfl3l4_ipv6_dest_addr_set(struct aq_hw_s *aq_hw, u8 location,
820 				       u32 *ipv6_dest);
821 
822 /* set Global MDIO Interface 1 */
823 void hw_atl_glb_mdio_iface1_set(struct aq_hw_s *hw, u32 value);
824 
825 /* get Global MDIO Interface 1 */
826 u32 hw_atl_glb_mdio_iface1_get(struct aq_hw_s *hw);
827 
828 /* set Global MDIO Interface 2 */
829 void hw_atl_glb_mdio_iface2_set(struct aq_hw_s *hw, u32 value);
830 
831 /* get Global MDIO Interface 2 */
832 u32 hw_atl_glb_mdio_iface2_get(struct aq_hw_s *hw);
833 
834 /* set Global MDIO Interface 3 */
835 void hw_atl_glb_mdio_iface3_set(struct aq_hw_s *hw, u32 value);
836 
837 /* get Global MDIO Interface 3 */
838 u32 hw_atl_glb_mdio_iface3_get(struct aq_hw_s *hw);
839 
840 /* set Global MDIO Interface 4 */
841 void hw_atl_glb_mdio_iface4_set(struct aq_hw_s *hw, u32 value);
842 
843 /* get Global MDIO Interface 4 */
844 u32 hw_atl_glb_mdio_iface4_get(struct aq_hw_s *hw);
845 
846 /* set Global MDIO Interface 5 */
847 void hw_atl_glb_mdio_iface5_set(struct aq_hw_s *hw, u32 value);
848 
849 /* get Global MDIO Interface 5 */
850 u32 hw_atl_glb_mdio_iface5_get(struct aq_hw_s *hw);
851 
852 u32 hw_atl_mdio_busy_get(struct aq_hw_s *aq_hw);
853 
854 /* get global microprocessor ram semaphore */
855 u32 hw_atl_sem_ram_get(struct aq_hw_s *self);
856 
857 /* get global microprocessor mdio semaphore */
858 u32 hw_atl_sem_mdio_get(struct aq_hw_s *self);
859 
860 u32 hw_atl_sem_reset1_get(struct aq_hw_s *self);
861 u32 hw_atl_sem_reset2_get(struct aq_hw_s *self);
862 
863 /* get global microprocessor scratch pad register */
864 u32 hw_atl_scrpad_get(struct aq_hw_s *aq_hw, u32 scratch_scp);
865 
866 /* get global microprocessor scratch pad 12 register */
867 u32 hw_atl_scrpad12_get(struct aq_hw_s *self);
868 
869 /* get global microprocessor scratch pad 25 register */
870 u32 hw_atl_scrpad25_get(struct aq_hw_s *self);
871 
872 #endif /* HW_ATL_LLH_H */
873