xref: /linux/drivers/clk/at91/clk-master.c (revision 7029db09)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  *  Copyright (C) 2013 Boris BREZILLON <b.brezillon@overkiz.com>
4  */
5 
6 #include <linux/clk-provider.h>
7 #include <linux/clkdev.h>
8 #include <linux/clk.h>
9 #include <linux/clk/at91_pmc.h>
10 #include <linux/of.h>
11 #include <linux/mfd/syscon.h>
12 #include <linux/regmap.h>
13 
14 #include "pmc.h"
15 
16 #define MASTER_PRES_MASK	0x7
17 #define MASTER_PRES_MAX		MASTER_PRES_MASK
18 #define MASTER_DIV_SHIFT	8
19 #define MASTER_DIV_MASK		0x7
20 
21 #define PMC_MCR_CSS_SHIFT	(16)
22 
23 #define MASTER_MAX_ID		4
24 
25 #define to_clk_master(hw) container_of(hw, struct clk_master, hw)
26 
27 struct clk_master {
28 	struct clk_hw hw;
29 	struct regmap *regmap;
30 	spinlock_t *lock;
31 	const struct clk_master_layout *layout;
32 	const struct clk_master_characteristics *characteristics;
33 	struct at91_clk_pms pms;
34 	u32 *mux_table;
35 	u32 mckr;
36 	int chg_pid;
37 	u8 id;
38 	u8 parent;
39 	u8 div;
40 	u32 safe_div;
41 };
42 
43 /* MCK div reference to be used by notifier. */
44 static struct clk_master *master_div;
45 
46 static inline bool clk_master_ready(struct clk_master *master)
47 {
48 	unsigned int bit = master->id ? AT91_PMC_MCKXRDY : AT91_PMC_MCKRDY;
49 	unsigned int status;
50 
51 	regmap_read(master->regmap, AT91_PMC_SR, &status);
52 
53 	return !!(status & bit);
54 }
55 
56 static int clk_master_prepare(struct clk_hw *hw)
57 {
58 	struct clk_master *master = to_clk_master(hw);
59 	unsigned long flags;
60 
61 	spin_lock_irqsave(master->lock, flags);
62 
63 	while (!clk_master_ready(master))
64 		cpu_relax();
65 
66 	spin_unlock_irqrestore(master->lock, flags);
67 
68 	return 0;
69 }
70 
71 static int clk_master_is_prepared(struct clk_hw *hw)
72 {
73 	struct clk_master *master = to_clk_master(hw);
74 	unsigned long flags;
75 	bool status;
76 
77 	spin_lock_irqsave(master->lock, flags);
78 	status = clk_master_ready(master);
79 	spin_unlock_irqrestore(master->lock, flags);
80 
81 	return status;
82 }
83 
84 static unsigned long clk_master_div_recalc_rate(struct clk_hw *hw,
85 						unsigned long parent_rate)
86 {
87 	u8 div;
88 	unsigned long flags, rate = parent_rate;
89 	struct clk_master *master = to_clk_master(hw);
90 	const struct clk_master_layout *layout = master->layout;
91 	const struct clk_master_characteristics *characteristics =
92 						master->characteristics;
93 	unsigned int mckr;
94 
95 	spin_lock_irqsave(master->lock, flags);
96 	regmap_read(master->regmap, master->layout->offset, &mckr);
97 	spin_unlock_irqrestore(master->lock, flags);
98 
99 	mckr &= layout->mask;
100 
101 	div = (mckr >> MASTER_DIV_SHIFT) & MASTER_DIV_MASK;
102 
103 	rate /= characteristics->divisors[div];
104 
105 	if (rate < characteristics->output.min)
106 		pr_warn("master clk div is underclocked");
107 	else if (rate > characteristics->output.max)
108 		pr_warn("master clk div is overclocked");
109 
110 	return rate;
111 }
112 
113 static int clk_master_div_save_context(struct clk_hw *hw)
114 {
115 	struct clk_master *master = to_clk_master(hw);
116 	struct clk_hw *parent_hw = clk_hw_get_parent(hw);
117 	unsigned long flags;
118 	unsigned int mckr, div;
119 
120 	spin_lock_irqsave(master->lock, flags);
121 	regmap_read(master->regmap, master->layout->offset, &mckr);
122 	spin_unlock_irqrestore(master->lock, flags);
123 
124 	mckr &= master->layout->mask;
125 	div = (mckr >> MASTER_DIV_SHIFT) & MASTER_DIV_MASK;
126 	div = master->characteristics->divisors[div];
127 
128 	master->pms.parent_rate = clk_hw_get_rate(parent_hw);
129 	master->pms.rate = DIV_ROUND_CLOSEST(master->pms.parent_rate, div);
130 
131 	return 0;
132 }
133 
134 static void clk_master_div_restore_context(struct clk_hw *hw)
135 {
136 	struct clk_master *master = to_clk_master(hw);
137 	unsigned long flags;
138 	unsigned int mckr;
139 	u8 div;
140 
141 	spin_lock_irqsave(master->lock, flags);
142 	regmap_read(master->regmap, master->layout->offset, &mckr);
143 	spin_unlock_irqrestore(master->lock, flags);
144 
145 	mckr &= master->layout->mask;
146 	div = (mckr >> MASTER_DIV_SHIFT) & MASTER_DIV_MASK;
147 	div = master->characteristics->divisors[div];
148 
149 	if (div != DIV_ROUND_CLOSEST(master->pms.parent_rate, master->pms.rate))
150 		pr_warn("MCKR DIV not configured properly by firmware!\n");
151 }
152 
153 static const struct clk_ops master_div_ops = {
154 	.prepare = clk_master_prepare,
155 	.is_prepared = clk_master_is_prepared,
156 	.recalc_rate = clk_master_div_recalc_rate,
157 	.save_context = clk_master_div_save_context,
158 	.restore_context = clk_master_div_restore_context,
159 };
160 
161 /* This function must be called with lock acquired. */
162 static int clk_master_div_set(struct clk_master *master,
163 			      unsigned long parent_rate, int div)
164 {
165 	const struct clk_master_characteristics *characteristics =
166 						master->characteristics;
167 	unsigned long rate = parent_rate;
168 	unsigned int max_div = 0, div_index = 0, max_div_index = 0;
169 	unsigned int i, mckr, tmp;
170 	int ret;
171 
172 	for (i = 0; i < ARRAY_SIZE(characteristics->divisors); i++) {
173 		if (!characteristics->divisors[i])
174 			break;
175 
176 		if (div == characteristics->divisors[i])
177 			div_index = i;
178 
179 		if (max_div < characteristics->divisors[i]) {
180 			max_div = characteristics->divisors[i];
181 			max_div_index = i;
182 		}
183 	}
184 
185 	if (div > max_div)
186 		div_index = max_div_index;
187 
188 	ret = regmap_read(master->regmap, master->layout->offset, &mckr);
189 	if (ret)
190 		return ret;
191 
192 	mckr &= master->layout->mask;
193 	tmp = (mckr >> MASTER_DIV_SHIFT) & MASTER_DIV_MASK;
194 	if (tmp == div_index)
195 		return 0;
196 
197 	rate /= characteristics->divisors[div_index];
198 	if (rate < characteristics->output.min)
199 		pr_warn("master clk div is underclocked");
200 	else if (rate > characteristics->output.max)
201 		pr_warn("master clk div is overclocked");
202 
203 	mckr &= ~(MASTER_DIV_MASK << MASTER_DIV_SHIFT);
204 	mckr |= (div_index << MASTER_DIV_SHIFT);
205 	ret = regmap_write(master->regmap, master->layout->offset, mckr);
206 	if (ret)
207 		return ret;
208 
209 	while (!clk_master_ready(master))
210 		cpu_relax();
211 
212 	master->div = characteristics->divisors[div_index];
213 
214 	return 0;
215 }
216 
217 static unsigned long clk_master_div_recalc_rate_chg(struct clk_hw *hw,
218 						    unsigned long parent_rate)
219 {
220 	struct clk_master *master = to_clk_master(hw);
221 
222 	return DIV_ROUND_CLOSEST_ULL(parent_rate, master->div);
223 }
224 
225 static void clk_master_div_restore_context_chg(struct clk_hw *hw)
226 {
227 	struct clk_master *master = to_clk_master(hw);
228 	unsigned long flags;
229 	int ret;
230 
231 	spin_lock_irqsave(master->lock, flags);
232 	ret = clk_master_div_set(master, master->pms.parent_rate,
233 				 DIV_ROUND_CLOSEST(master->pms.parent_rate,
234 						   master->pms.rate));
235 	spin_unlock_irqrestore(master->lock, flags);
236 	if (ret)
237 		pr_warn("Failed to restore MCK DIV clock\n");
238 }
239 
240 static const struct clk_ops master_div_ops_chg = {
241 	.prepare = clk_master_prepare,
242 	.is_prepared = clk_master_is_prepared,
243 	.recalc_rate = clk_master_div_recalc_rate_chg,
244 	.save_context = clk_master_div_save_context,
245 	.restore_context = clk_master_div_restore_context_chg,
246 };
247 
248 static int clk_master_div_notifier_fn(struct notifier_block *notifier,
249 				      unsigned long code, void *data)
250 {
251 	const struct clk_master_characteristics *characteristics =
252 						master_div->characteristics;
253 	struct clk_notifier_data *cnd = data;
254 	unsigned long flags, new_parent_rate, new_rate;
255 	unsigned int mckr, div, new_div = 0;
256 	int ret, i;
257 	long tmp_diff;
258 	long best_diff = -1;
259 
260 	spin_lock_irqsave(master_div->lock, flags);
261 	switch (code) {
262 	case PRE_RATE_CHANGE:
263 		/*
264 		 * We want to avoid any overclocking of MCK DIV domain. To do
265 		 * this we set a safe divider (the underclocking is not of
266 		 * interest as we can go as low as 32KHz). The relation
267 		 * b/w this clock and its parents are as follows:
268 		 *
269 		 * FRAC PLL -> DIV PLL -> MCK DIV
270 		 *
271 		 * With the proper safe divider we should be good even with FRAC
272 		 * PLL at its maximum value.
273 		 */
274 		ret = regmap_read(master_div->regmap, master_div->layout->offset,
275 				  &mckr);
276 		if (ret) {
277 			ret = NOTIFY_STOP_MASK;
278 			goto unlock;
279 		}
280 
281 		mckr &= master_div->layout->mask;
282 		div = (mckr >> MASTER_DIV_SHIFT) & MASTER_DIV_MASK;
283 
284 		/* Switch to safe divider. */
285 		clk_master_div_set(master_div,
286 				   cnd->old_rate * characteristics->divisors[div],
287 				   master_div->safe_div);
288 		break;
289 
290 	case POST_RATE_CHANGE:
291 		/*
292 		 * At this point we want to restore MCK DIV domain to its maximum
293 		 * allowed rate.
294 		 */
295 		ret = regmap_read(master_div->regmap, master_div->layout->offset,
296 				  &mckr);
297 		if (ret) {
298 			ret = NOTIFY_STOP_MASK;
299 			goto unlock;
300 		}
301 
302 		mckr &= master_div->layout->mask;
303 		div = (mckr >> MASTER_DIV_SHIFT) & MASTER_DIV_MASK;
304 		new_parent_rate = cnd->new_rate * characteristics->divisors[div];
305 
306 		for (i = 0; i < ARRAY_SIZE(characteristics->divisors); i++) {
307 			if (!characteristics->divisors[i])
308 				break;
309 
310 			new_rate = DIV_ROUND_CLOSEST_ULL(new_parent_rate,
311 							 characteristics->divisors[i]);
312 
313 			tmp_diff = characteristics->output.max - new_rate;
314 			if (tmp_diff < 0)
315 				continue;
316 
317 			if (best_diff < 0 || best_diff > tmp_diff) {
318 				new_div = characteristics->divisors[i];
319 				best_diff = tmp_diff;
320 			}
321 
322 			if (!tmp_diff)
323 				break;
324 		}
325 
326 		if (!new_div) {
327 			ret = NOTIFY_STOP_MASK;
328 			goto unlock;
329 		}
330 
331 		/* Update the div to preserve MCK DIV clock rate. */
332 		clk_master_div_set(master_div, new_parent_rate,
333 				   new_div);
334 
335 		ret = NOTIFY_OK;
336 		break;
337 
338 	default:
339 		ret = NOTIFY_DONE;
340 		break;
341 	}
342 
343 unlock:
344 	spin_unlock_irqrestore(master_div->lock, flags);
345 
346 	return ret;
347 }
348 
349 static struct notifier_block clk_master_div_notifier = {
350 	.notifier_call = clk_master_div_notifier_fn,
351 };
352 
353 static void clk_sama7g5_master_best_diff(struct clk_rate_request *req,
354 					 struct clk_hw *parent,
355 					 unsigned long parent_rate,
356 					 long *best_rate,
357 					 long *best_diff,
358 					 u32 div)
359 {
360 	unsigned long tmp_rate, tmp_diff;
361 
362 	if (div == MASTER_PRES_MAX)
363 		tmp_rate = parent_rate / 3;
364 	else
365 		tmp_rate = parent_rate >> div;
366 
367 	tmp_diff = abs(req->rate - tmp_rate);
368 
369 	if (*best_diff < 0 || *best_diff >= tmp_diff) {
370 		*best_rate = tmp_rate;
371 		*best_diff = tmp_diff;
372 		req->best_parent_rate = parent_rate;
373 		req->best_parent_hw = parent;
374 	}
375 }
376 
377 static int clk_master_pres_determine_rate(struct clk_hw *hw,
378 					  struct clk_rate_request *req)
379 {
380 	struct clk_master *master = to_clk_master(hw);
381 	struct clk_rate_request req_parent = *req;
382 	const struct clk_master_characteristics *characteristics =
383 							master->characteristics;
384 	struct clk_hw *parent;
385 	long best_rate = LONG_MIN, best_diff = LONG_MIN;
386 	u32 pres;
387 	int i;
388 
389 	if (master->chg_pid < 0)
390 		return -EOPNOTSUPP;
391 
392 	parent = clk_hw_get_parent_by_index(hw, master->chg_pid);
393 	if (!parent)
394 		return -EOPNOTSUPP;
395 
396 	for (i = 0; i <= MASTER_PRES_MAX; i++) {
397 		if (characteristics->have_div3_pres && i == MASTER_PRES_MAX)
398 			pres = 3;
399 		else
400 			pres = 1 << i;
401 
402 		req_parent.rate = req->rate * pres;
403 		if (__clk_determine_rate(parent, &req_parent))
404 			continue;
405 
406 		clk_sama7g5_master_best_diff(req, parent, req_parent.rate,
407 					     &best_diff, &best_rate, pres);
408 		if (!best_diff)
409 			break;
410 	}
411 
412 	return 0;
413 }
414 
415 static int clk_master_pres_set_rate(struct clk_hw *hw, unsigned long rate,
416 				    unsigned long parent_rate)
417 {
418 	struct clk_master *master = to_clk_master(hw);
419 	unsigned long flags;
420 	unsigned int pres, mckr, tmp;
421 	int ret;
422 
423 	pres = DIV_ROUND_CLOSEST(parent_rate, rate);
424 	if (pres > MASTER_PRES_MAX)
425 		return -EINVAL;
426 
427 	else if (pres == 3)
428 		pres = MASTER_PRES_MAX;
429 	else if (pres)
430 		pres = ffs(pres) - 1;
431 
432 	spin_lock_irqsave(master->lock, flags);
433 	ret = regmap_read(master->regmap, master->layout->offset, &mckr);
434 	if (ret)
435 		goto unlock;
436 
437 	mckr &= master->layout->mask;
438 	tmp = (mckr >> master->layout->pres_shift) & MASTER_PRES_MASK;
439 	if (pres == tmp)
440 		goto unlock;
441 
442 	mckr &= ~(MASTER_PRES_MASK << master->layout->pres_shift);
443 	mckr |= (pres << master->layout->pres_shift);
444 	ret = regmap_write(master->regmap, master->layout->offset, mckr);
445 	if (ret)
446 		goto unlock;
447 
448 	while (!clk_master_ready(master))
449 		cpu_relax();
450 unlock:
451 	spin_unlock_irqrestore(master->lock, flags);
452 
453 	return ret;
454 }
455 
456 static unsigned long clk_master_pres_recalc_rate(struct clk_hw *hw,
457 						 unsigned long parent_rate)
458 {
459 	struct clk_master *master = to_clk_master(hw);
460 	const struct clk_master_characteristics *characteristics =
461 						master->characteristics;
462 	unsigned long flags;
463 	unsigned int val, pres;
464 
465 	spin_lock_irqsave(master->lock, flags);
466 	regmap_read(master->regmap, master->layout->offset, &val);
467 	spin_unlock_irqrestore(master->lock, flags);
468 
469 	val &= master->layout->mask;
470 	pres = (val >> master->layout->pres_shift) & MASTER_PRES_MASK;
471 	if (pres == MASTER_PRES_MAX && characteristics->have_div3_pres)
472 		pres = 3;
473 	else
474 		pres = (1 << pres);
475 
476 	return DIV_ROUND_CLOSEST_ULL(parent_rate, pres);
477 }
478 
479 static u8 clk_master_pres_get_parent(struct clk_hw *hw)
480 {
481 	struct clk_master *master = to_clk_master(hw);
482 	unsigned long flags;
483 	unsigned int mckr;
484 
485 	spin_lock_irqsave(master->lock, flags);
486 	regmap_read(master->regmap, master->layout->offset, &mckr);
487 	spin_unlock_irqrestore(master->lock, flags);
488 
489 	mckr &= master->layout->mask;
490 
491 	return mckr & AT91_PMC_CSS;
492 }
493 
494 static int clk_master_pres_save_context(struct clk_hw *hw)
495 {
496 	struct clk_master *master = to_clk_master(hw);
497 	struct clk_hw *parent_hw = clk_hw_get_parent(hw);
498 	unsigned long flags;
499 	unsigned int val, pres;
500 
501 	spin_lock_irqsave(master->lock, flags);
502 	regmap_read(master->regmap, master->layout->offset, &val);
503 	spin_unlock_irqrestore(master->lock, flags);
504 
505 	val &= master->layout->mask;
506 	pres = (val >> master->layout->pres_shift) & MASTER_PRES_MASK;
507 	if (pres == MASTER_PRES_MAX && master->characteristics->have_div3_pres)
508 		pres = 3;
509 	else
510 		pres = (1 << pres);
511 
512 	master->pms.parent = val & AT91_PMC_CSS;
513 	master->pms.parent_rate = clk_hw_get_rate(parent_hw);
514 	master->pms.rate = DIV_ROUND_CLOSEST_ULL(master->pms.parent_rate, pres);
515 
516 	return 0;
517 }
518 
519 static void clk_master_pres_restore_context(struct clk_hw *hw)
520 {
521 	struct clk_master *master = to_clk_master(hw);
522 	unsigned long flags;
523 	unsigned int val, pres;
524 
525 	spin_lock_irqsave(master->lock, flags);
526 	regmap_read(master->regmap, master->layout->offset, &val);
527 	spin_unlock_irqrestore(master->lock, flags);
528 
529 	val &= master->layout->mask;
530 	pres = (val >> master->layout->pres_shift) & MASTER_PRES_MASK;
531 	if (pres == MASTER_PRES_MAX && master->characteristics->have_div3_pres)
532 		pres = 3;
533 	else
534 		pres = (1 << pres);
535 
536 	if (master->pms.rate !=
537 	    DIV_ROUND_CLOSEST_ULL(master->pms.parent_rate, pres) ||
538 	    (master->pms.parent != (val & AT91_PMC_CSS)))
539 		pr_warn("MCKR PRES was not configured properly by firmware!\n");
540 }
541 
542 static void clk_master_pres_restore_context_chg(struct clk_hw *hw)
543 {
544 	struct clk_master *master = to_clk_master(hw);
545 
546 	clk_master_pres_set_rate(hw, master->pms.rate, master->pms.parent_rate);
547 }
548 
549 static const struct clk_ops master_pres_ops = {
550 	.prepare = clk_master_prepare,
551 	.is_prepared = clk_master_is_prepared,
552 	.recalc_rate = clk_master_pres_recalc_rate,
553 	.get_parent = clk_master_pres_get_parent,
554 	.save_context = clk_master_pres_save_context,
555 	.restore_context = clk_master_pres_restore_context,
556 };
557 
558 static const struct clk_ops master_pres_ops_chg = {
559 	.prepare = clk_master_prepare,
560 	.is_prepared = clk_master_is_prepared,
561 	.determine_rate = clk_master_pres_determine_rate,
562 	.recalc_rate = clk_master_pres_recalc_rate,
563 	.get_parent = clk_master_pres_get_parent,
564 	.set_rate = clk_master_pres_set_rate,
565 	.save_context = clk_master_pres_save_context,
566 	.restore_context = clk_master_pres_restore_context_chg,
567 };
568 
569 static struct clk_hw * __init
570 at91_clk_register_master_internal(struct regmap *regmap,
571 		const char *name, int num_parents,
572 		const char **parent_names,
573 		const struct clk_master_layout *layout,
574 		const struct clk_master_characteristics *characteristics,
575 		const struct clk_ops *ops, spinlock_t *lock, u32 flags,
576 		int chg_pid)
577 {
578 	struct clk_master *master;
579 	struct clk_init_data init;
580 	struct clk_hw *hw;
581 	unsigned int mckr;
582 	unsigned long irqflags;
583 	int ret;
584 
585 	if (!name || !num_parents || !parent_names || !lock)
586 		return ERR_PTR(-EINVAL);
587 
588 	master = kzalloc(sizeof(*master), GFP_KERNEL);
589 	if (!master)
590 		return ERR_PTR(-ENOMEM);
591 
592 	init.name = name;
593 	init.ops = ops;
594 	init.parent_names = parent_names;
595 	init.num_parents = num_parents;
596 	init.flags = flags;
597 
598 	master->hw.init = &init;
599 	master->layout = layout;
600 	master->characteristics = characteristics;
601 	master->regmap = regmap;
602 	master->chg_pid = chg_pid;
603 	master->lock = lock;
604 
605 	if (ops == &master_div_ops_chg) {
606 		spin_lock_irqsave(master->lock, irqflags);
607 		regmap_read(master->regmap, master->layout->offset, &mckr);
608 		spin_unlock_irqrestore(master->lock, irqflags);
609 
610 		mckr &= layout->mask;
611 		mckr = (mckr >> MASTER_DIV_SHIFT) & MASTER_DIV_MASK;
612 		master->div = characteristics->divisors[mckr];
613 	}
614 
615 	hw = &master->hw;
616 	ret = clk_hw_register(NULL, &master->hw);
617 	if (ret) {
618 		kfree(master);
619 		hw = ERR_PTR(ret);
620 	}
621 
622 	return hw;
623 }
624 
625 struct clk_hw * __init
626 at91_clk_register_master_pres(struct regmap *regmap,
627 		const char *name, int num_parents,
628 		const char **parent_names,
629 		const struct clk_master_layout *layout,
630 		const struct clk_master_characteristics *characteristics,
631 		spinlock_t *lock, u32 flags, int chg_pid)
632 {
633 	const struct clk_ops *ops;
634 
635 	if (flags & CLK_SET_RATE_GATE)
636 		ops = &master_pres_ops;
637 	else
638 		ops = &master_pres_ops_chg;
639 
640 	return at91_clk_register_master_internal(regmap, name, num_parents,
641 						 parent_names, layout,
642 						 characteristics, ops,
643 						 lock, flags, chg_pid);
644 }
645 
646 struct clk_hw * __init
647 at91_clk_register_master_div(struct regmap *regmap,
648 		const char *name, const char *parent_name,
649 		const struct clk_master_layout *layout,
650 		const struct clk_master_characteristics *characteristics,
651 		spinlock_t *lock, u32 flags, u32 safe_div)
652 {
653 	const struct clk_ops *ops;
654 	struct clk_hw *hw;
655 
656 	if (flags & CLK_SET_RATE_GATE)
657 		ops = &master_div_ops;
658 	else
659 		ops = &master_div_ops_chg;
660 
661 	hw = at91_clk_register_master_internal(regmap, name, 1,
662 					       &parent_name, layout,
663 					       characteristics, ops,
664 					       lock, flags, -EINVAL);
665 
666 	if (!IS_ERR(hw) && safe_div) {
667 		master_div = to_clk_master(hw);
668 		master_div->safe_div = safe_div;
669 		clk_notifier_register(hw->clk,
670 				      &clk_master_div_notifier);
671 	}
672 
673 	return hw;
674 }
675 
676 static unsigned long
677 clk_sama7g5_master_recalc_rate(struct clk_hw *hw,
678 			       unsigned long parent_rate)
679 {
680 	struct clk_master *master = to_clk_master(hw);
681 
682 	return DIV_ROUND_CLOSEST_ULL(parent_rate, (1 << master->div));
683 }
684 
685 static int clk_sama7g5_master_determine_rate(struct clk_hw *hw,
686 					     struct clk_rate_request *req)
687 {
688 	struct clk_master *master = to_clk_master(hw);
689 	struct clk_rate_request req_parent = *req;
690 	struct clk_hw *parent;
691 	long best_rate = LONG_MIN, best_diff = LONG_MIN;
692 	unsigned long parent_rate;
693 	unsigned int div, i;
694 
695 	/* First: check the dividers of MCR. */
696 	for (i = 0; i < clk_hw_get_num_parents(hw); i++) {
697 		parent = clk_hw_get_parent_by_index(hw, i);
698 		if (!parent)
699 			continue;
700 
701 		parent_rate = clk_hw_get_rate(parent);
702 		if (!parent_rate)
703 			continue;
704 
705 		for (div = 0; div < MASTER_PRES_MAX + 1; div++) {
706 			clk_sama7g5_master_best_diff(req, parent, parent_rate,
707 						     &best_rate, &best_diff,
708 						     div);
709 			if (!best_diff)
710 				break;
711 		}
712 
713 		if (!best_diff)
714 			break;
715 	}
716 
717 	/* Second: try to request rate form changeable parent. */
718 	if (master->chg_pid < 0)
719 		goto end;
720 
721 	parent = clk_hw_get_parent_by_index(hw, master->chg_pid);
722 	if (!parent)
723 		goto end;
724 
725 	for (div = 0; div < MASTER_PRES_MAX + 1; div++) {
726 		if (div == MASTER_PRES_MAX)
727 			req_parent.rate = req->rate * 3;
728 		else
729 			req_parent.rate = req->rate << div;
730 
731 		if (__clk_determine_rate(parent, &req_parent))
732 			continue;
733 
734 		clk_sama7g5_master_best_diff(req, parent, req_parent.rate,
735 					     &best_rate, &best_diff, div);
736 
737 		if (!best_diff)
738 			break;
739 	}
740 
741 end:
742 	pr_debug("MCK: %s, best_rate = %ld, parent clk: %s @ %ld\n",
743 		 __func__, best_rate,
744 		 __clk_get_name((req->best_parent_hw)->clk),
745 		req->best_parent_rate);
746 
747 	if (best_rate < 0)
748 		return -EINVAL;
749 
750 	req->rate = best_rate;
751 
752 	return 0;
753 }
754 
755 static u8 clk_sama7g5_master_get_parent(struct clk_hw *hw)
756 {
757 	struct clk_master *master = to_clk_master(hw);
758 	unsigned long flags;
759 	u8 index;
760 
761 	spin_lock_irqsave(master->lock, flags);
762 	index = clk_mux_val_to_index(&master->hw, master->mux_table, 0,
763 				     master->parent);
764 	spin_unlock_irqrestore(master->lock, flags);
765 
766 	return index;
767 }
768 
769 static int clk_sama7g5_master_set_parent(struct clk_hw *hw, u8 index)
770 {
771 	struct clk_master *master = to_clk_master(hw);
772 	unsigned long flags;
773 
774 	if (index >= clk_hw_get_num_parents(hw))
775 		return -EINVAL;
776 
777 	spin_lock_irqsave(master->lock, flags);
778 	master->parent = clk_mux_index_to_val(master->mux_table, 0, index);
779 	spin_unlock_irqrestore(master->lock, flags);
780 
781 	return 0;
782 }
783 
784 static void clk_sama7g5_master_set(struct clk_master *master,
785 				   unsigned int status)
786 {
787 	unsigned long flags;
788 	unsigned int val, cparent;
789 	unsigned int enable = status ? AT91_PMC_MCR_V2_EN : 0;
790 	unsigned int parent = master->parent << PMC_MCR_CSS_SHIFT;
791 	unsigned int div = master->div << MASTER_DIV_SHIFT;
792 
793 	spin_lock_irqsave(master->lock, flags);
794 
795 	regmap_write(master->regmap, AT91_PMC_MCR_V2,
796 		     AT91_PMC_MCR_V2_ID(master->id));
797 	regmap_read(master->regmap, AT91_PMC_MCR_V2, &val);
798 	regmap_update_bits(master->regmap, AT91_PMC_MCR_V2,
799 			   enable | AT91_PMC_MCR_V2_CSS | AT91_PMC_MCR_V2_DIV |
800 			   AT91_PMC_MCR_V2_CMD | AT91_PMC_MCR_V2_ID_MSK,
801 			   enable | parent | div | AT91_PMC_MCR_V2_CMD |
802 			   AT91_PMC_MCR_V2_ID(master->id));
803 
804 	cparent = (val & AT91_PMC_MCR_V2_CSS) >> PMC_MCR_CSS_SHIFT;
805 
806 	/* Wait here only if parent is being changed. */
807 	while ((cparent != master->parent) && !clk_master_ready(master))
808 		cpu_relax();
809 
810 	spin_unlock_irqrestore(master->lock, flags);
811 }
812 
813 static int clk_sama7g5_master_enable(struct clk_hw *hw)
814 {
815 	struct clk_master *master = to_clk_master(hw);
816 
817 	clk_sama7g5_master_set(master, 1);
818 
819 	return 0;
820 }
821 
822 static void clk_sama7g5_master_disable(struct clk_hw *hw)
823 {
824 	struct clk_master *master = to_clk_master(hw);
825 	unsigned long flags;
826 
827 	spin_lock_irqsave(master->lock, flags);
828 
829 	regmap_write(master->regmap, AT91_PMC_MCR_V2, master->id);
830 	regmap_update_bits(master->regmap, AT91_PMC_MCR_V2,
831 			   AT91_PMC_MCR_V2_EN | AT91_PMC_MCR_V2_CMD |
832 			   AT91_PMC_MCR_V2_ID_MSK,
833 			   AT91_PMC_MCR_V2_CMD |
834 			   AT91_PMC_MCR_V2_ID(master->id));
835 
836 	spin_unlock_irqrestore(master->lock, flags);
837 }
838 
839 static int clk_sama7g5_master_is_enabled(struct clk_hw *hw)
840 {
841 	struct clk_master *master = to_clk_master(hw);
842 	unsigned long flags;
843 	unsigned int val;
844 
845 	spin_lock_irqsave(master->lock, flags);
846 
847 	regmap_write(master->regmap, AT91_PMC_MCR_V2, master->id);
848 	regmap_read(master->regmap, AT91_PMC_MCR_V2, &val);
849 
850 	spin_unlock_irqrestore(master->lock, flags);
851 
852 	return !!(val & AT91_PMC_MCR_V2_EN);
853 }
854 
855 static int clk_sama7g5_master_set_rate(struct clk_hw *hw, unsigned long rate,
856 				       unsigned long parent_rate)
857 {
858 	struct clk_master *master = to_clk_master(hw);
859 	unsigned long div, flags;
860 
861 	div = DIV_ROUND_CLOSEST(parent_rate, rate);
862 	if ((div > (1 << (MASTER_PRES_MAX - 1))) || (div & (div - 1)))
863 		return -EINVAL;
864 
865 	if (div == 3)
866 		div = MASTER_PRES_MAX;
867 	else if (div)
868 		div = ffs(div) - 1;
869 
870 	spin_lock_irqsave(master->lock, flags);
871 	master->div = div;
872 	spin_unlock_irqrestore(master->lock, flags);
873 
874 	return 0;
875 }
876 
877 static int clk_sama7g5_master_save_context(struct clk_hw *hw)
878 {
879 	struct clk_master *master = to_clk_master(hw);
880 
881 	master->pms.status = clk_sama7g5_master_is_enabled(hw);
882 
883 	return 0;
884 }
885 
886 static void clk_sama7g5_master_restore_context(struct clk_hw *hw)
887 {
888 	struct clk_master *master = to_clk_master(hw);
889 
890 	if (master->pms.status)
891 		clk_sama7g5_master_set(master, master->pms.status);
892 }
893 
894 static const struct clk_ops sama7g5_master_ops = {
895 	.enable = clk_sama7g5_master_enable,
896 	.disable = clk_sama7g5_master_disable,
897 	.is_enabled = clk_sama7g5_master_is_enabled,
898 	.recalc_rate = clk_sama7g5_master_recalc_rate,
899 	.determine_rate = clk_sama7g5_master_determine_rate,
900 	.set_rate = clk_sama7g5_master_set_rate,
901 	.get_parent = clk_sama7g5_master_get_parent,
902 	.set_parent = clk_sama7g5_master_set_parent,
903 	.save_context = clk_sama7g5_master_save_context,
904 	.restore_context = clk_sama7g5_master_restore_context,
905 };
906 
907 struct clk_hw * __init
908 at91_clk_sama7g5_register_master(struct regmap *regmap,
909 				 const char *name, int num_parents,
910 				 const char **parent_names,
911 				 u32 *mux_table,
912 				 spinlock_t *lock, u8 id,
913 				 bool critical, int chg_pid)
914 {
915 	struct clk_master *master;
916 	struct clk_hw *hw;
917 	struct clk_init_data init;
918 	unsigned long flags;
919 	unsigned int val;
920 	int ret;
921 
922 	if (!name || !num_parents || !parent_names || !mux_table ||
923 	    !lock || id > MASTER_MAX_ID)
924 		return ERR_PTR(-EINVAL);
925 
926 	master = kzalloc(sizeof(*master), GFP_KERNEL);
927 	if (!master)
928 		return ERR_PTR(-ENOMEM);
929 
930 	init.name = name;
931 	init.ops = &sama7g5_master_ops;
932 	init.parent_names = parent_names;
933 	init.num_parents = num_parents;
934 	init.flags = CLK_SET_RATE_GATE | CLK_SET_PARENT_GATE;
935 	if (chg_pid >= 0)
936 		init.flags |= CLK_SET_RATE_PARENT;
937 	if (critical)
938 		init.flags |= CLK_IS_CRITICAL;
939 
940 	master->hw.init = &init;
941 	master->regmap = regmap;
942 	master->id = id;
943 	master->chg_pid = chg_pid;
944 	master->lock = lock;
945 	master->mux_table = mux_table;
946 
947 	spin_lock_irqsave(master->lock, flags);
948 	regmap_write(master->regmap, AT91_PMC_MCR_V2, master->id);
949 	regmap_read(master->regmap, AT91_PMC_MCR_V2, &val);
950 	master->parent = (val & AT91_PMC_MCR_V2_CSS) >> PMC_MCR_CSS_SHIFT;
951 	master->div = (val & AT91_PMC_MCR_V2_DIV) >> MASTER_DIV_SHIFT;
952 	spin_unlock_irqrestore(master->lock, flags);
953 
954 	hw = &master->hw;
955 	ret = clk_hw_register(NULL, &master->hw);
956 	if (ret) {
957 		kfree(master);
958 		hw = ERR_PTR(ret);
959 	}
960 
961 	return hw;
962 }
963 
964 const struct clk_master_layout at91rm9200_master_layout = {
965 	.mask = 0x31F,
966 	.pres_shift = 2,
967 	.offset = AT91_PMC_MCKR,
968 };
969 
970 const struct clk_master_layout at91sam9x5_master_layout = {
971 	.mask = 0x373,
972 	.pres_shift = 4,
973 	.offset = AT91_PMC_MCKR,
974 };
975