1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * Copyright (C) 2013 Boris BREZILLON <b.brezillon@overkiz.com> 4 */ 5 6 #include <linux/clk-provider.h> 7 #include <linux/clkdev.h> 8 #include <linux/clk/at91_pmc.h> 9 #include <linux/of.h> 10 #include <linux/mfd/syscon.h> 11 #include <linux/regmap.h> 12 13 #include "pmc.h" 14 15 #define MASTER_PRES_MASK 0x7 16 #define MASTER_PRES_MAX MASTER_PRES_MASK 17 #define MASTER_DIV_SHIFT 8 18 #define MASTER_DIV_MASK 0x7 19 20 #define PMC_MCR_CSS_SHIFT (16) 21 22 #define MASTER_MAX_ID 4 23 24 #define to_clk_master(hw) container_of(hw, struct clk_master, hw) 25 26 struct clk_master { 27 struct clk_hw hw; 28 struct regmap *regmap; 29 spinlock_t *lock; 30 const struct clk_master_layout *layout; 31 const struct clk_master_characteristics *characteristics; 32 struct at91_clk_pms pms; 33 u32 *mux_table; 34 u32 mckr; 35 int chg_pid; 36 u8 id; 37 u8 parent; 38 u8 div; 39 }; 40 41 static inline bool clk_master_ready(struct clk_master *master) 42 { 43 unsigned int bit = master->id ? AT91_PMC_MCKXRDY : AT91_PMC_MCKRDY; 44 unsigned int status; 45 46 regmap_read(master->regmap, AT91_PMC_SR, &status); 47 48 return !!(status & bit); 49 } 50 51 static int clk_master_prepare(struct clk_hw *hw) 52 { 53 struct clk_master *master = to_clk_master(hw); 54 unsigned long flags; 55 56 spin_lock_irqsave(master->lock, flags); 57 58 while (!clk_master_ready(master)) 59 cpu_relax(); 60 61 spin_unlock_irqrestore(master->lock, flags); 62 63 return 0; 64 } 65 66 static int clk_master_is_prepared(struct clk_hw *hw) 67 { 68 struct clk_master *master = to_clk_master(hw); 69 unsigned long flags; 70 bool status; 71 72 spin_lock_irqsave(master->lock, flags); 73 status = clk_master_ready(master); 74 spin_unlock_irqrestore(master->lock, flags); 75 76 return status; 77 } 78 79 static unsigned long clk_master_div_recalc_rate(struct clk_hw *hw, 80 unsigned long parent_rate) 81 { 82 u8 div; 83 unsigned long flags, rate = parent_rate; 84 struct clk_master *master = to_clk_master(hw); 85 const struct clk_master_layout *layout = master->layout; 86 const struct clk_master_characteristics *characteristics = 87 master->characteristics; 88 unsigned int mckr; 89 90 spin_lock_irqsave(master->lock, flags); 91 regmap_read(master->regmap, master->layout->offset, &mckr); 92 spin_unlock_irqrestore(master->lock, flags); 93 94 mckr &= layout->mask; 95 96 div = (mckr >> MASTER_DIV_SHIFT) & MASTER_DIV_MASK; 97 98 rate /= characteristics->divisors[div]; 99 100 if (rate < characteristics->output.min) 101 pr_warn("master clk div is underclocked"); 102 else if (rate > characteristics->output.max) 103 pr_warn("master clk div is overclocked"); 104 105 return rate; 106 } 107 108 static int clk_master_div_save_context(struct clk_hw *hw) 109 { 110 struct clk_master *master = to_clk_master(hw); 111 struct clk_hw *parent_hw = clk_hw_get_parent(hw); 112 unsigned long flags; 113 unsigned int mckr, div; 114 115 spin_lock_irqsave(master->lock, flags); 116 regmap_read(master->regmap, master->layout->offset, &mckr); 117 spin_unlock_irqrestore(master->lock, flags); 118 119 mckr &= master->layout->mask; 120 div = (mckr >> MASTER_DIV_SHIFT) & MASTER_DIV_MASK; 121 div = master->characteristics->divisors[div]; 122 123 master->pms.parent_rate = clk_hw_get_rate(parent_hw); 124 master->pms.rate = DIV_ROUND_CLOSEST(master->pms.parent_rate, div); 125 126 return 0; 127 } 128 129 static void clk_master_div_restore_context(struct clk_hw *hw) 130 { 131 struct clk_master *master = to_clk_master(hw); 132 unsigned long flags; 133 unsigned int mckr; 134 u8 div; 135 136 spin_lock_irqsave(master->lock, flags); 137 regmap_read(master->regmap, master->layout->offset, &mckr); 138 spin_unlock_irqrestore(master->lock, flags); 139 140 mckr &= master->layout->mask; 141 div = (mckr >> MASTER_DIV_SHIFT) & MASTER_DIV_MASK; 142 div = master->characteristics->divisors[div]; 143 144 if (div != DIV_ROUND_CLOSEST(master->pms.parent_rate, master->pms.rate)) 145 pr_warn("MCKR DIV not configured properly by firmware!\n"); 146 } 147 148 static const struct clk_ops master_div_ops = { 149 .prepare = clk_master_prepare, 150 .is_prepared = clk_master_is_prepared, 151 .recalc_rate = clk_master_div_recalc_rate, 152 .save_context = clk_master_div_save_context, 153 .restore_context = clk_master_div_restore_context, 154 }; 155 156 static int clk_master_div_set_rate(struct clk_hw *hw, unsigned long rate, 157 unsigned long parent_rate) 158 { 159 struct clk_master *master = to_clk_master(hw); 160 const struct clk_master_characteristics *characteristics = 161 master->characteristics; 162 unsigned long flags; 163 unsigned int mckr, tmp; 164 int div, i; 165 int ret; 166 167 div = DIV_ROUND_CLOSEST(parent_rate, rate); 168 if (div > ARRAY_SIZE(characteristics->divisors)) 169 return -EINVAL; 170 171 for (i = 0; i < ARRAY_SIZE(characteristics->divisors); i++) { 172 if (!characteristics->divisors[i]) 173 break; 174 175 if (div == characteristics->divisors[i]) { 176 div = i; 177 break; 178 } 179 } 180 181 if (i == ARRAY_SIZE(characteristics->divisors)) 182 return -EINVAL; 183 184 spin_lock_irqsave(master->lock, flags); 185 ret = regmap_read(master->regmap, master->layout->offset, &mckr); 186 if (ret) 187 goto unlock; 188 189 tmp = mckr & master->layout->mask; 190 tmp = (tmp >> MASTER_DIV_SHIFT) & MASTER_DIV_MASK; 191 if (tmp == div) 192 goto unlock; 193 194 mckr &= ~(MASTER_DIV_MASK << MASTER_DIV_SHIFT); 195 mckr |= (div << MASTER_DIV_SHIFT); 196 ret = regmap_write(master->regmap, master->layout->offset, mckr); 197 if (ret) 198 goto unlock; 199 200 while (!clk_master_ready(master)) 201 cpu_relax(); 202 unlock: 203 spin_unlock_irqrestore(master->lock, flags); 204 205 return 0; 206 } 207 208 static int clk_master_div_determine_rate(struct clk_hw *hw, 209 struct clk_rate_request *req) 210 { 211 struct clk_master *master = to_clk_master(hw); 212 const struct clk_master_characteristics *characteristics = 213 master->characteristics; 214 struct clk_hw *parent; 215 unsigned long parent_rate, tmp_rate, best_rate = 0; 216 int i, best_diff = INT_MIN, tmp_diff; 217 218 parent = clk_hw_get_parent(hw); 219 if (!parent) 220 return -EINVAL; 221 222 parent_rate = clk_hw_get_rate(parent); 223 if (!parent_rate) 224 return -EINVAL; 225 226 for (i = 0; i < ARRAY_SIZE(characteristics->divisors); i++) { 227 if (!characteristics->divisors[i]) 228 break; 229 230 tmp_rate = DIV_ROUND_CLOSEST_ULL(parent_rate, 231 characteristics->divisors[i]); 232 tmp_diff = abs(tmp_rate - req->rate); 233 234 if (!best_rate || best_diff > tmp_diff) { 235 best_diff = tmp_diff; 236 best_rate = tmp_rate; 237 } 238 239 if (!best_diff) 240 break; 241 } 242 243 req->best_parent_rate = best_rate; 244 req->best_parent_hw = parent; 245 req->rate = best_rate; 246 247 return 0; 248 } 249 250 static void clk_master_div_restore_context_chg(struct clk_hw *hw) 251 { 252 struct clk_master *master = to_clk_master(hw); 253 int ret; 254 255 ret = clk_master_div_set_rate(hw, master->pms.rate, 256 master->pms.parent_rate); 257 if (ret) 258 pr_warn("Failed to restore MCK DIV clock\n"); 259 } 260 261 static const struct clk_ops master_div_ops_chg = { 262 .prepare = clk_master_prepare, 263 .is_prepared = clk_master_is_prepared, 264 .recalc_rate = clk_master_div_recalc_rate, 265 .determine_rate = clk_master_div_determine_rate, 266 .set_rate = clk_master_div_set_rate, 267 .save_context = clk_master_div_save_context, 268 .restore_context = clk_master_div_restore_context_chg, 269 }; 270 271 static void clk_sama7g5_master_best_diff(struct clk_rate_request *req, 272 struct clk_hw *parent, 273 unsigned long parent_rate, 274 long *best_rate, 275 long *best_diff, 276 u32 div) 277 { 278 unsigned long tmp_rate, tmp_diff; 279 280 if (div == MASTER_PRES_MAX) 281 tmp_rate = parent_rate / 3; 282 else 283 tmp_rate = parent_rate >> div; 284 285 tmp_diff = abs(req->rate - tmp_rate); 286 287 if (*best_diff < 0 || *best_diff >= tmp_diff) { 288 *best_rate = tmp_rate; 289 *best_diff = tmp_diff; 290 req->best_parent_rate = parent_rate; 291 req->best_parent_hw = parent; 292 } 293 } 294 295 static int clk_master_pres_determine_rate(struct clk_hw *hw, 296 struct clk_rate_request *req) 297 { 298 struct clk_master *master = to_clk_master(hw); 299 struct clk_rate_request req_parent = *req; 300 const struct clk_master_characteristics *characteristics = 301 master->characteristics; 302 struct clk_hw *parent; 303 long best_rate = LONG_MIN, best_diff = LONG_MIN; 304 u32 pres; 305 int i; 306 307 if (master->chg_pid < 0) 308 return -EOPNOTSUPP; 309 310 parent = clk_hw_get_parent_by_index(hw, master->chg_pid); 311 if (!parent) 312 return -EOPNOTSUPP; 313 314 for (i = 0; i <= MASTER_PRES_MAX; i++) { 315 if (characteristics->have_div3_pres && i == MASTER_PRES_MAX) 316 pres = 3; 317 else 318 pres = 1 << i; 319 320 req_parent.rate = req->rate * pres; 321 if (__clk_determine_rate(parent, &req_parent)) 322 continue; 323 324 clk_sama7g5_master_best_diff(req, parent, req_parent.rate, 325 &best_diff, &best_rate, pres); 326 if (!best_diff) 327 break; 328 } 329 330 return 0; 331 } 332 333 static int clk_master_pres_set_rate(struct clk_hw *hw, unsigned long rate, 334 unsigned long parent_rate) 335 { 336 struct clk_master *master = to_clk_master(hw); 337 unsigned long flags; 338 unsigned int pres, mckr, tmp; 339 int ret; 340 341 pres = DIV_ROUND_CLOSEST(parent_rate, rate); 342 if (pres > MASTER_PRES_MAX) 343 return -EINVAL; 344 345 else if (pres == 3) 346 pres = MASTER_PRES_MAX; 347 else 348 pres = ffs(pres) - 1; 349 350 spin_lock_irqsave(master->lock, flags); 351 ret = regmap_read(master->regmap, master->layout->offset, &mckr); 352 if (ret) 353 goto unlock; 354 355 mckr &= master->layout->mask; 356 tmp = (mckr >> master->layout->pres_shift) & MASTER_PRES_MASK; 357 if (pres == tmp) 358 goto unlock; 359 360 mckr &= ~(MASTER_PRES_MASK << master->layout->pres_shift); 361 mckr |= (pres << master->layout->pres_shift); 362 ret = regmap_write(master->regmap, master->layout->offset, mckr); 363 if (ret) 364 goto unlock; 365 366 while (!clk_master_ready(master)) 367 cpu_relax(); 368 unlock: 369 spin_unlock_irqrestore(master->lock, flags); 370 371 return ret; 372 } 373 374 static unsigned long clk_master_pres_recalc_rate(struct clk_hw *hw, 375 unsigned long parent_rate) 376 { 377 struct clk_master *master = to_clk_master(hw); 378 const struct clk_master_characteristics *characteristics = 379 master->characteristics; 380 unsigned long flags; 381 unsigned int val, pres; 382 383 spin_lock_irqsave(master->lock, flags); 384 regmap_read(master->regmap, master->layout->offset, &val); 385 spin_unlock_irqrestore(master->lock, flags); 386 387 pres = (val >> master->layout->pres_shift) & MASTER_PRES_MASK; 388 if (pres == 3 && characteristics->have_div3_pres) 389 pres = 3; 390 else 391 pres = (1 << pres); 392 393 return DIV_ROUND_CLOSEST_ULL(parent_rate, pres); 394 } 395 396 static u8 clk_master_pres_get_parent(struct clk_hw *hw) 397 { 398 struct clk_master *master = to_clk_master(hw); 399 unsigned long flags; 400 unsigned int mckr; 401 402 spin_lock_irqsave(master->lock, flags); 403 regmap_read(master->regmap, master->layout->offset, &mckr); 404 spin_unlock_irqrestore(master->lock, flags); 405 406 return mckr & AT91_PMC_CSS; 407 } 408 409 static int clk_master_pres_save_context(struct clk_hw *hw) 410 { 411 struct clk_master *master = to_clk_master(hw); 412 struct clk_hw *parent_hw = clk_hw_get_parent(hw); 413 unsigned long flags; 414 unsigned int val, pres; 415 416 spin_lock_irqsave(master->lock, flags); 417 regmap_read(master->regmap, master->layout->offset, &val); 418 spin_unlock_irqrestore(master->lock, flags); 419 420 val &= master->layout->mask; 421 pres = (val >> master->layout->pres_shift) & MASTER_PRES_MASK; 422 if (pres == MASTER_PRES_MAX && master->characteristics->have_div3_pres) 423 pres = 3; 424 else 425 pres = (1 << pres); 426 427 master->pms.parent = val & AT91_PMC_CSS; 428 master->pms.parent_rate = clk_hw_get_rate(parent_hw); 429 master->pms.rate = DIV_ROUND_CLOSEST_ULL(master->pms.parent_rate, pres); 430 431 return 0; 432 } 433 434 static void clk_master_pres_restore_context(struct clk_hw *hw) 435 { 436 struct clk_master *master = to_clk_master(hw); 437 unsigned long flags; 438 unsigned int val, pres; 439 440 spin_lock_irqsave(master->lock, flags); 441 regmap_read(master->regmap, master->layout->offset, &val); 442 spin_unlock_irqrestore(master->lock, flags); 443 444 val &= master->layout->mask; 445 pres = (val >> master->layout->pres_shift) & MASTER_PRES_MASK; 446 if (pres == MASTER_PRES_MAX && master->characteristics->have_div3_pres) 447 pres = 3; 448 else 449 pres = (1 << pres); 450 451 if (master->pms.rate != 452 DIV_ROUND_CLOSEST_ULL(master->pms.parent_rate, pres) || 453 (master->pms.parent != (val & AT91_PMC_CSS))) 454 pr_warn("MCKR PRES was not configured properly by firmware!\n"); 455 } 456 457 static void clk_master_pres_restore_context_chg(struct clk_hw *hw) 458 { 459 struct clk_master *master = to_clk_master(hw); 460 461 clk_master_pres_set_rate(hw, master->pms.rate, master->pms.parent_rate); 462 } 463 464 static const struct clk_ops master_pres_ops = { 465 .prepare = clk_master_prepare, 466 .is_prepared = clk_master_is_prepared, 467 .recalc_rate = clk_master_pres_recalc_rate, 468 .get_parent = clk_master_pres_get_parent, 469 .save_context = clk_master_pres_save_context, 470 .restore_context = clk_master_pres_restore_context, 471 }; 472 473 static const struct clk_ops master_pres_ops_chg = { 474 .prepare = clk_master_prepare, 475 .is_prepared = clk_master_is_prepared, 476 .determine_rate = clk_master_pres_determine_rate, 477 .recalc_rate = clk_master_pres_recalc_rate, 478 .get_parent = clk_master_pres_get_parent, 479 .set_rate = clk_master_pres_set_rate, 480 .save_context = clk_master_pres_save_context, 481 .restore_context = clk_master_pres_restore_context_chg, 482 }; 483 484 static struct clk_hw * __init 485 at91_clk_register_master_internal(struct regmap *regmap, 486 const char *name, int num_parents, 487 const char **parent_names, 488 const struct clk_master_layout *layout, 489 const struct clk_master_characteristics *characteristics, 490 const struct clk_ops *ops, spinlock_t *lock, u32 flags, 491 int chg_pid) 492 { 493 struct clk_master *master; 494 struct clk_init_data init; 495 struct clk_hw *hw; 496 int ret; 497 498 if (!name || !num_parents || !parent_names || !lock) 499 return ERR_PTR(-EINVAL); 500 501 master = kzalloc(sizeof(*master), GFP_KERNEL); 502 if (!master) 503 return ERR_PTR(-ENOMEM); 504 505 init.name = name; 506 init.ops = ops; 507 init.parent_names = parent_names; 508 init.num_parents = num_parents; 509 init.flags = flags; 510 511 master->hw.init = &init; 512 master->layout = layout; 513 master->characteristics = characteristics; 514 master->regmap = regmap; 515 master->chg_pid = chg_pid; 516 master->lock = lock; 517 518 hw = &master->hw; 519 ret = clk_hw_register(NULL, &master->hw); 520 if (ret) { 521 kfree(master); 522 hw = ERR_PTR(ret); 523 } 524 525 return hw; 526 } 527 528 struct clk_hw * __init 529 at91_clk_register_master_pres(struct regmap *regmap, 530 const char *name, int num_parents, 531 const char **parent_names, 532 const struct clk_master_layout *layout, 533 const struct clk_master_characteristics *characteristics, 534 spinlock_t *lock, u32 flags, int chg_pid) 535 { 536 const struct clk_ops *ops; 537 538 if (flags & CLK_SET_RATE_GATE) 539 ops = &master_pres_ops; 540 else 541 ops = &master_pres_ops_chg; 542 543 return at91_clk_register_master_internal(regmap, name, num_parents, 544 parent_names, layout, 545 characteristics, ops, 546 lock, flags, chg_pid); 547 } 548 549 struct clk_hw * __init 550 at91_clk_register_master_div(struct regmap *regmap, 551 const char *name, const char *parent_name, 552 const struct clk_master_layout *layout, 553 const struct clk_master_characteristics *characteristics, 554 spinlock_t *lock, u32 flags) 555 { 556 const struct clk_ops *ops; 557 558 if (flags & CLK_SET_RATE_GATE) 559 ops = &master_div_ops; 560 else 561 ops = &master_div_ops_chg; 562 563 return at91_clk_register_master_internal(regmap, name, 1, 564 &parent_name, layout, 565 characteristics, ops, 566 lock, flags, -EINVAL); 567 } 568 569 static unsigned long 570 clk_sama7g5_master_recalc_rate(struct clk_hw *hw, 571 unsigned long parent_rate) 572 { 573 struct clk_master *master = to_clk_master(hw); 574 575 return DIV_ROUND_CLOSEST_ULL(parent_rate, (1 << master->div)); 576 } 577 578 static int clk_sama7g5_master_determine_rate(struct clk_hw *hw, 579 struct clk_rate_request *req) 580 { 581 struct clk_master *master = to_clk_master(hw); 582 struct clk_rate_request req_parent = *req; 583 struct clk_hw *parent; 584 long best_rate = LONG_MIN, best_diff = LONG_MIN; 585 unsigned long parent_rate; 586 unsigned int div, i; 587 588 /* First: check the dividers of MCR. */ 589 for (i = 0; i < clk_hw_get_num_parents(hw); i++) { 590 parent = clk_hw_get_parent_by_index(hw, i); 591 if (!parent) 592 continue; 593 594 parent_rate = clk_hw_get_rate(parent); 595 if (!parent_rate) 596 continue; 597 598 for (div = 0; div < MASTER_PRES_MAX + 1; div++) { 599 clk_sama7g5_master_best_diff(req, parent, parent_rate, 600 &best_rate, &best_diff, 601 div); 602 if (!best_diff) 603 break; 604 } 605 606 if (!best_diff) 607 break; 608 } 609 610 /* Second: try to request rate form changeable parent. */ 611 if (master->chg_pid < 0) 612 goto end; 613 614 parent = clk_hw_get_parent_by_index(hw, master->chg_pid); 615 if (!parent) 616 goto end; 617 618 for (div = 0; div < MASTER_PRES_MAX + 1; div++) { 619 if (div == MASTER_PRES_MAX) 620 req_parent.rate = req->rate * 3; 621 else 622 req_parent.rate = req->rate << div; 623 624 if (__clk_determine_rate(parent, &req_parent)) 625 continue; 626 627 clk_sama7g5_master_best_diff(req, parent, req_parent.rate, 628 &best_rate, &best_diff, div); 629 630 if (!best_diff) 631 break; 632 } 633 634 end: 635 pr_debug("MCK: %s, best_rate = %ld, parent clk: %s @ %ld\n", 636 __func__, best_rate, 637 __clk_get_name((req->best_parent_hw)->clk), 638 req->best_parent_rate); 639 640 if (best_rate < 0) 641 return -EINVAL; 642 643 req->rate = best_rate; 644 645 return 0; 646 } 647 648 static u8 clk_sama7g5_master_get_parent(struct clk_hw *hw) 649 { 650 struct clk_master *master = to_clk_master(hw); 651 unsigned long flags; 652 u8 index; 653 654 spin_lock_irqsave(master->lock, flags); 655 index = clk_mux_val_to_index(&master->hw, master->mux_table, 0, 656 master->parent); 657 spin_unlock_irqrestore(master->lock, flags); 658 659 return index; 660 } 661 662 static int clk_sama7g5_master_set_parent(struct clk_hw *hw, u8 index) 663 { 664 struct clk_master *master = to_clk_master(hw); 665 unsigned long flags; 666 667 if (index >= clk_hw_get_num_parents(hw)) 668 return -EINVAL; 669 670 spin_lock_irqsave(master->lock, flags); 671 master->parent = clk_mux_index_to_val(master->mux_table, 0, index); 672 spin_unlock_irqrestore(master->lock, flags); 673 674 return 0; 675 } 676 677 static void clk_sama7g5_master_set(struct clk_master *master, 678 unsigned int status) 679 { 680 unsigned long flags; 681 unsigned int val, cparent; 682 unsigned int enable = status ? AT91_PMC_MCR_V2_EN : 0; 683 684 spin_lock_irqsave(master->lock, flags); 685 686 regmap_write(master->regmap, AT91_PMC_MCR_V2, 687 AT91_PMC_MCR_V2_ID(master->id)); 688 regmap_read(master->regmap, AT91_PMC_MCR_V2, &val); 689 regmap_update_bits(master->regmap, AT91_PMC_MCR_V2, 690 enable | AT91_PMC_MCR_V2_CSS | AT91_PMC_MCR_V2_DIV | 691 AT91_PMC_MCR_V2_CMD | AT91_PMC_MCR_V2_ID_MSK, 692 enable | (master->parent << PMC_MCR_CSS_SHIFT) | 693 (master->div << MASTER_DIV_SHIFT) | 694 AT91_PMC_MCR_V2_CMD | 695 AT91_PMC_MCR_V2_ID(master->id)); 696 697 cparent = (val & AT91_PMC_MCR_V2_CSS) >> PMC_MCR_CSS_SHIFT; 698 699 /* Wait here only if parent is being changed. */ 700 while ((cparent != master->parent) && !clk_master_ready(master)) 701 cpu_relax(); 702 703 spin_unlock_irqrestore(master->lock, flags); 704 } 705 706 static int clk_sama7g5_master_enable(struct clk_hw *hw) 707 { 708 struct clk_master *master = to_clk_master(hw); 709 710 clk_sama7g5_master_set(master, 1); 711 712 return 0; 713 } 714 715 static void clk_sama7g5_master_disable(struct clk_hw *hw) 716 { 717 struct clk_master *master = to_clk_master(hw); 718 unsigned long flags; 719 720 spin_lock_irqsave(master->lock, flags); 721 722 regmap_write(master->regmap, AT91_PMC_MCR_V2, master->id); 723 regmap_update_bits(master->regmap, AT91_PMC_MCR_V2, 724 AT91_PMC_MCR_V2_EN | AT91_PMC_MCR_V2_CMD | 725 AT91_PMC_MCR_V2_ID_MSK, 726 AT91_PMC_MCR_V2_CMD | 727 AT91_PMC_MCR_V2_ID(master->id)); 728 729 spin_unlock_irqrestore(master->lock, flags); 730 } 731 732 static int clk_sama7g5_master_is_enabled(struct clk_hw *hw) 733 { 734 struct clk_master *master = to_clk_master(hw); 735 unsigned long flags; 736 unsigned int val; 737 738 spin_lock_irqsave(master->lock, flags); 739 740 regmap_write(master->regmap, AT91_PMC_MCR_V2, master->id); 741 regmap_read(master->regmap, AT91_PMC_MCR_V2, &val); 742 743 spin_unlock_irqrestore(master->lock, flags); 744 745 return !!(val & AT91_PMC_MCR_V2_EN); 746 } 747 748 static int clk_sama7g5_master_set_rate(struct clk_hw *hw, unsigned long rate, 749 unsigned long parent_rate) 750 { 751 struct clk_master *master = to_clk_master(hw); 752 unsigned long div, flags; 753 754 div = DIV_ROUND_CLOSEST(parent_rate, rate); 755 if ((div > (1 << (MASTER_PRES_MAX - 1))) || (div & (div - 1))) 756 return -EINVAL; 757 758 if (div == 3) 759 div = MASTER_PRES_MAX; 760 else 761 div = ffs(div) - 1; 762 763 spin_lock_irqsave(master->lock, flags); 764 master->div = div; 765 spin_unlock_irqrestore(master->lock, flags); 766 767 return 0; 768 } 769 770 static int clk_sama7g5_master_save_context(struct clk_hw *hw) 771 { 772 struct clk_master *master = to_clk_master(hw); 773 774 master->pms.status = clk_sama7g5_master_is_enabled(hw); 775 776 return 0; 777 } 778 779 static void clk_sama7g5_master_restore_context(struct clk_hw *hw) 780 { 781 struct clk_master *master = to_clk_master(hw); 782 783 if (master->pms.status) 784 clk_sama7g5_master_set(master, master->pms.status); 785 } 786 787 static const struct clk_ops sama7g5_master_ops = { 788 .enable = clk_sama7g5_master_enable, 789 .disable = clk_sama7g5_master_disable, 790 .is_enabled = clk_sama7g5_master_is_enabled, 791 .recalc_rate = clk_sama7g5_master_recalc_rate, 792 .determine_rate = clk_sama7g5_master_determine_rate, 793 .set_rate = clk_sama7g5_master_set_rate, 794 .get_parent = clk_sama7g5_master_get_parent, 795 .set_parent = clk_sama7g5_master_set_parent, 796 .save_context = clk_sama7g5_master_save_context, 797 .restore_context = clk_sama7g5_master_restore_context, 798 }; 799 800 struct clk_hw * __init 801 at91_clk_sama7g5_register_master(struct regmap *regmap, 802 const char *name, int num_parents, 803 const char **parent_names, 804 u32 *mux_table, 805 spinlock_t *lock, u8 id, 806 bool critical, int chg_pid) 807 { 808 struct clk_master *master; 809 struct clk_hw *hw; 810 struct clk_init_data init; 811 unsigned long flags; 812 unsigned int val; 813 int ret; 814 815 if (!name || !num_parents || !parent_names || !mux_table || 816 !lock || id > MASTER_MAX_ID) 817 return ERR_PTR(-EINVAL); 818 819 master = kzalloc(sizeof(*master), GFP_KERNEL); 820 if (!master) 821 return ERR_PTR(-ENOMEM); 822 823 init.name = name; 824 init.ops = &sama7g5_master_ops; 825 init.parent_names = parent_names; 826 init.num_parents = num_parents; 827 init.flags = CLK_SET_RATE_GATE | CLK_SET_PARENT_GATE; 828 if (chg_pid >= 0) 829 init.flags |= CLK_SET_RATE_PARENT; 830 if (critical) 831 init.flags |= CLK_IS_CRITICAL; 832 833 master->hw.init = &init; 834 master->regmap = regmap; 835 master->id = id; 836 master->chg_pid = chg_pid; 837 master->lock = lock; 838 master->mux_table = mux_table; 839 840 spin_lock_irqsave(master->lock, flags); 841 regmap_write(master->regmap, AT91_PMC_MCR_V2, master->id); 842 regmap_read(master->regmap, AT91_PMC_MCR_V2, &val); 843 master->parent = (val & AT91_PMC_MCR_V2_CSS) >> PMC_MCR_CSS_SHIFT; 844 master->div = (val & AT91_PMC_MCR_V2_DIV) >> MASTER_DIV_SHIFT; 845 spin_unlock_irqrestore(master->lock, flags); 846 847 hw = &master->hw; 848 ret = clk_hw_register(NULL, &master->hw); 849 if (ret) { 850 kfree(master); 851 hw = ERR_PTR(ret); 852 } 853 854 return hw; 855 } 856 857 const struct clk_master_layout at91rm9200_master_layout = { 858 .mask = 0x31F, 859 .pres_shift = 2, 860 .offset = AT91_PMC_MCKR, 861 }; 862 863 const struct clk_master_layout at91sam9x5_master_layout = { 864 .mask = 0x373, 865 .pres_shift = 4, 866 .offset = AT91_PMC_MCKR, 867 }; 868