1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Copyright (C) 2013 Boris BREZILLON <b.brezillon@overkiz.com>
6 #include <linux/clk-provider.h>
7 #include <linux/clkdev.h>
9 #include <linux/clk/at91_pmc.h>
11 #include <linux/mfd/syscon.h>
12 #include <linux/regmap.h>
16 #define MASTER_PRES_MASK 0x7
17 #define MASTER_PRES_MAX MASTER_PRES_MASK
18 #define MASTER_DIV_SHIFT 8
19 #define MASTER_DIV_MASK 0x7
21 #define PMC_MCR_CSS_SHIFT (16)
23 #define MASTER_MAX_ID 4
25 #define to_clk_master(hw) container_of(hw, struct clk_master, hw)
29 struct regmap *regmap;
31 const struct clk_master_layout *layout;
32 const struct clk_master_characteristics *characteristics;
33 struct at91_clk_pms pms;
43 /* MCK div reference to be used by notifier. */
44 static struct clk_master *master_div;
46 static inline bool clk_master_ready(struct clk_master *master)
48 unsigned int bit = master->id ? AT91_PMC_MCKXRDY : AT91_PMC_MCKRDY;
51 regmap_read(master->regmap, AT91_PMC_SR, &status);
53 return !!(status & bit);
56 static int clk_master_prepare(struct clk_hw *hw)
58 struct clk_master *master = to_clk_master(hw);
61 spin_lock_irqsave(master->lock, flags);
63 while (!clk_master_ready(master))
66 spin_unlock_irqrestore(master->lock, flags);
71 static int clk_master_is_prepared(struct clk_hw *hw)
73 struct clk_master *master = to_clk_master(hw);
77 spin_lock_irqsave(master->lock, flags);
78 status = clk_master_ready(master);
79 spin_unlock_irqrestore(master->lock, flags);
84 static unsigned long clk_master_div_recalc_rate(struct clk_hw *hw,
85 unsigned long parent_rate)
88 unsigned long flags, rate = parent_rate;
89 struct clk_master *master = to_clk_master(hw);
90 const struct clk_master_layout *layout = master->layout;
91 const struct clk_master_characteristics *characteristics =
92 master->characteristics;
95 spin_lock_irqsave(master->lock, flags);
96 regmap_read(master->regmap, master->layout->offset, &mckr);
97 spin_unlock_irqrestore(master->lock, flags);
101 div = (mckr >> MASTER_DIV_SHIFT) & MASTER_DIV_MASK;
103 rate /= characteristics->divisors[div];
105 if (rate < characteristics->output.min)
106 pr_warn("master clk div is underclocked");
107 else if (rate > characteristics->output.max)
108 pr_warn("master clk div is overclocked");
113 static int clk_master_div_save_context(struct clk_hw *hw)
115 struct clk_master *master = to_clk_master(hw);
116 struct clk_hw *parent_hw = clk_hw_get_parent(hw);
118 unsigned int mckr, div;
120 spin_lock_irqsave(master->lock, flags);
121 regmap_read(master->regmap, master->layout->offset, &mckr);
122 spin_unlock_irqrestore(master->lock, flags);
124 mckr &= master->layout->mask;
125 div = (mckr >> MASTER_DIV_SHIFT) & MASTER_DIV_MASK;
126 div = master->characteristics->divisors[div];
128 master->pms.parent_rate = clk_hw_get_rate(parent_hw);
129 master->pms.rate = DIV_ROUND_CLOSEST(master->pms.parent_rate, div);
134 static void clk_master_div_restore_context(struct clk_hw *hw)
136 struct clk_master *master = to_clk_master(hw);
141 spin_lock_irqsave(master->lock, flags);
142 regmap_read(master->regmap, master->layout->offset, &mckr);
143 spin_unlock_irqrestore(master->lock, flags);
145 mckr &= master->layout->mask;
146 div = (mckr >> MASTER_DIV_SHIFT) & MASTER_DIV_MASK;
147 div = master->characteristics->divisors[div];
149 if (div != DIV_ROUND_CLOSEST(master->pms.parent_rate, master->pms.rate))
150 pr_warn("MCKR DIV not configured properly by firmware!\n");
153 static const struct clk_ops master_div_ops = {
154 .prepare = clk_master_prepare,
155 .is_prepared = clk_master_is_prepared,
156 .recalc_rate = clk_master_div_recalc_rate,
157 .save_context = clk_master_div_save_context,
158 .restore_context = clk_master_div_restore_context,
161 /* This function must be called with lock acquired. */
162 static int clk_master_div_set(struct clk_master *master,
163 unsigned long parent_rate, int div)
165 const struct clk_master_characteristics *characteristics =
166 master->characteristics;
167 unsigned long rate = parent_rate;
168 unsigned int max_div = 0, div_index = 0, max_div_index = 0;
169 unsigned int i, mckr, tmp;
172 for (i = 0; i < ARRAY_SIZE(characteristics->divisors); i++) {
173 if (!characteristics->divisors[i])
176 if (div == characteristics->divisors[i])
179 if (max_div < characteristics->divisors[i]) {
180 max_div = characteristics->divisors[i];
186 div_index = max_div_index;
188 ret = regmap_read(master->regmap, master->layout->offset, &mckr);
192 mckr &= master->layout->mask;
193 tmp = (mckr >> MASTER_DIV_SHIFT) & MASTER_DIV_MASK;
194 if (tmp == div_index)
197 rate /= characteristics->divisors[div_index];
198 if (rate < characteristics->output.min)
199 pr_warn("master clk div is underclocked");
200 else if (rate > characteristics->output.max)
201 pr_warn("master clk div is overclocked");
203 mckr &= ~(MASTER_DIV_MASK << MASTER_DIV_SHIFT);
204 mckr |= (div_index << MASTER_DIV_SHIFT);
205 ret = regmap_write(master->regmap, master->layout->offset, mckr);
209 while (!clk_master_ready(master))
212 master->div = characteristics->divisors[div_index];
217 static unsigned long clk_master_div_recalc_rate_chg(struct clk_hw *hw,
218 unsigned long parent_rate)
220 struct clk_master *master = to_clk_master(hw);
222 return DIV_ROUND_CLOSEST_ULL(parent_rate, master->div);
225 static void clk_master_div_restore_context_chg(struct clk_hw *hw)
227 struct clk_master *master = to_clk_master(hw);
231 spin_lock_irqsave(master->lock, flags);
232 ret = clk_master_div_set(master, master->pms.parent_rate,
233 DIV_ROUND_CLOSEST(master->pms.parent_rate,
235 spin_unlock_irqrestore(master->lock, flags);
237 pr_warn("Failed to restore MCK DIV clock\n");
240 static const struct clk_ops master_div_ops_chg = {
241 .prepare = clk_master_prepare,
242 .is_prepared = clk_master_is_prepared,
243 .recalc_rate = clk_master_div_recalc_rate_chg,
244 .save_context = clk_master_div_save_context,
245 .restore_context = clk_master_div_restore_context_chg,
248 static int clk_master_div_notifier_fn(struct notifier_block *notifier,
249 unsigned long code, void *data)
251 const struct clk_master_characteristics *characteristics =
252 master_div->characteristics;
253 struct clk_notifier_data *cnd = data;
254 unsigned long flags, new_parent_rate, new_rate;
255 unsigned int mckr, div, new_div = 0;
260 spin_lock_irqsave(master_div->lock, flags);
262 case PRE_RATE_CHANGE:
264 * We want to avoid any overclocking of MCK DIV domain. To do
265 * this we set a safe divider (the underclocking is not of
266 * interest as we can go as low as 32KHz). The relation
267 * b/w this clock and its parents are as follows:
269 * FRAC PLL -> DIV PLL -> MCK DIV
271 * With the proper safe divider we should be good even with FRAC
272 * PLL at its maximum value.
274 ret = regmap_read(master_div->regmap, master_div->layout->offset,
277 ret = NOTIFY_STOP_MASK;
281 mckr &= master_div->layout->mask;
282 div = (mckr >> MASTER_DIV_SHIFT) & MASTER_DIV_MASK;
284 /* Switch to safe divider. */
285 clk_master_div_set(master_div,
286 cnd->old_rate * characteristics->divisors[div],
287 master_div->safe_div);
290 case POST_RATE_CHANGE:
292 * At this point we want to restore MCK DIV domain to its maximum
295 ret = regmap_read(master_div->regmap, master_div->layout->offset,
298 ret = NOTIFY_STOP_MASK;
302 mckr &= master_div->layout->mask;
303 div = (mckr >> MASTER_DIV_SHIFT) & MASTER_DIV_MASK;
304 new_parent_rate = cnd->new_rate * characteristics->divisors[div];
306 for (i = 0; i < ARRAY_SIZE(characteristics->divisors); i++) {
307 if (!characteristics->divisors[i])
310 new_rate = DIV_ROUND_CLOSEST_ULL(new_parent_rate,
311 characteristics->divisors[i]);
313 tmp_diff = characteristics->output.max - new_rate;
317 if (best_diff < 0 || best_diff > tmp_diff) {
318 new_div = characteristics->divisors[i];
319 best_diff = tmp_diff;
327 ret = NOTIFY_STOP_MASK;
331 /* Update the div to preserve MCK DIV clock rate. */
332 clk_master_div_set(master_div, new_parent_rate,
344 spin_unlock_irqrestore(master_div->lock, flags);
349 static struct notifier_block clk_master_div_notifier = {
350 .notifier_call = clk_master_div_notifier_fn,
353 static void clk_sama7g5_master_best_diff(struct clk_rate_request *req,
354 struct clk_hw *parent,
355 unsigned long parent_rate,
360 unsigned long tmp_rate, tmp_diff;
362 if (div == MASTER_PRES_MAX)
363 tmp_rate = parent_rate / 3;
365 tmp_rate = parent_rate >> div;
367 tmp_diff = abs(req->rate - tmp_rate);
369 if (*best_diff < 0 || *best_diff >= tmp_diff) {
370 *best_rate = tmp_rate;
371 *best_diff = tmp_diff;
372 req->best_parent_rate = parent_rate;
373 req->best_parent_hw = parent;
377 static unsigned long clk_master_pres_recalc_rate(struct clk_hw *hw,
378 unsigned long parent_rate)
380 struct clk_master *master = to_clk_master(hw);
381 const struct clk_master_characteristics *characteristics =
382 master->characteristics;
384 unsigned int val, pres;
386 spin_lock_irqsave(master->lock, flags);
387 regmap_read(master->regmap, master->layout->offset, &val);
388 spin_unlock_irqrestore(master->lock, flags);
390 val &= master->layout->mask;
391 pres = (val >> master->layout->pres_shift) & MASTER_PRES_MASK;
392 if (pres == MASTER_PRES_MAX && characteristics->have_div3_pres)
397 return DIV_ROUND_CLOSEST_ULL(parent_rate, pres);
400 static u8 clk_master_pres_get_parent(struct clk_hw *hw)
402 struct clk_master *master = to_clk_master(hw);
406 spin_lock_irqsave(master->lock, flags);
407 regmap_read(master->regmap, master->layout->offset, &mckr);
408 spin_unlock_irqrestore(master->lock, flags);
410 mckr &= master->layout->mask;
412 return mckr & AT91_PMC_CSS;
415 static int clk_master_pres_save_context(struct clk_hw *hw)
417 struct clk_master *master = to_clk_master(hw);
418 struct clk_hw *parent_hw = clk_hw_get_parent(hw);
420 unsigned int val, pres;
422 spin_lock_irqsave(master->lock, flags);
423 regmap_read(master->regmap, master->layout->offset, &val);
424 spin_unlock_irqrestore(master->lock, flags);
426 val &= master->layout->mask;
427 pres = (val >> master->layout->pres_shift) & MASTER_PRES_MASK;
428 if (pres == MASTER_PRES_MAX && master->characteristics->have_div3_pres)
433 master->pms.parent = val & AT91_PMC_CSS;
434 master->pms.parent_rate = clk_hw_get_rate(parent_hw);
435 master->pms.rate = DIV_ROUND_CLOSEST_ULL(master->pms.parent_rate, pres);
440 static void clk_master_pres_restore_context(struct clk_hw *hw)
442 struct clk_master *master = to_clk_master(hw);
444 unsigned int val, pres;
446 spin_lock_irqsave(master->lock, flags);
447 regmap_read(master->regmap, master->layout->offset, &val);
448 spin_unlock_irqrestore(master->lock, flags);
450 val &= master->layout->mask;
451 pres = (val >> master->layout->pres_shift) & MASTER_PRES_MASK;
452 if (pres == MASTER_PRES_MAX && master->characteristics->have_div3_pres)
457 if (master->pms.rate !=
458 DIV_ROUND_CLOSEST_ULL(master->pms.parent_rate, pres) ||
459 (master->pms.parent != (val & AT91_PMC_CSS)))
460 pr_warn("MCKR PRES was not configured properly by firmware!\n");
463 static const struct clk_ops master_pres_ops = {
464 .prepare = clk_master_prepare,
465 .is_prepared = clk_master_is_prepared,
466 .recalc_rate = clk_master_pres_recalc_rate,
467 .get_parent = clk_master_pres_get_parent,
468 .save_context = clk_master_pres_save_context,
469 .restore_context = clk_master_pres_restore_context,
472 static struct clk_hw * __init
473 at91_clk_register_master_internal(struct regmap *regmap,
474 const char *name, int num_parents,
475 const char **parent_names,
476 struct clk_hw **parent_hws,
477 const struct clk_master_layout *layout,
478 const struct clk_master_characteristics *characteristics,
479 const struct clk_ops *ops, spinlock_t *lock, u32 flags)
481 struct clk_master *master;
482 struct clk_init_data init = {};
485 unsigned long irqflags;
488 if (!name || !num_parents || !(parent_names || parent_hws) || !lock)
489 return ERR_PTR(-EINVAL);
491 master = kzalloc(sizeof(*master), GFP_KERNEL);
493 return ERR_PTR(-ENOMEM);
498 init.parent_hws = (const struct clk_hw **)parent_hws;
500 init.parent_names = parent_names;
501 init.num_parents = num_parents;
504 master->hw.init = &init;
505 master->layout = layout;
506 master->characteristics = characteristics;
507 master->regmap = regmap;
510 if (ops == &master_div_ops_chg) {
511 spin_lock_irqsave(master->lock, irqflags);
512 regmap_read(master->regmap, master->layout->offset, &mckr);
513 spin_unlock_irqrestore(master->lock, irqflags);
515 mckr &= layout->mask;
516 mckr = (mckr >> MASTER_DIV_SHIFT) & MASTER_DIV_MASK;
517 master->div = characteristics->divisors[mckr];
521 ret = clk_hw_register(NULL, &master->hw);
530 struct clk_hw * __init
531 at91_clk_register_master_pres(struct regmap *regmap,
532 const char *name, int num_parents,
533 const char **parent_names,
534 struct clk_hw **parent_hws,
535 const struct clk_master_layout *layout,
536 const struct clk_master_characteristics *characteristics,
539 return at91_clk_register_master_internal(regmap, name, num_parents,
540 parent_names, parent_hws, layout,
543 lock, CLK_SET_RATE_GATE);
546 struct clk_hw * __init
547 at91_clk_register_master_div(struct regmap *regmap,
548 const char *name, const char *parent_name,
549 struct clk_hw *parent_hw, const struct clk_master_layout *layout,
550 const struct clk_master_characteristics *characteristics,
551 spinlock_t *lock, u32 flags, u32 safe_div)
553 const struct clk_ops *ops;
556 if (flags & CLK_SET_RATE_GATE)
557 ops = &master_div_ops;
559 ops = &master_div_ops_chg;
561 hw = at91_clk_register_master_internal(regmap, name, 1,
562 parent_name ? &parent_name : NULL,
563 parent_hw ? &parent_hw : NULL, layout,
564 characteristics, ops,
567 if (!IS_ERR(hw) && safe_div) {
568 master_div = to_clk_master(hw);
569 master_div->safe_div = safe_div;
570 clk_notifier_register(hw->clk,
571 &clk_master_div_notifier);
578 clk_sama7g5_master_recalc_rate(struct clk_hw *hw,
579 unsigned long parent_rate)
581 struct clk_master *master = to_clk_master(hw);
583 return DIV_ROUND_CLOSEST_ULL(parent_rate, (1 << master->div));
586 static int clk_sama7g5_master_determine_rate(struct clk_hw *hw,
587 struct clk_rate_request *req)
589 struct clk_master *master = to_clk_master(hw);
590 struct clk_hw *parent;
591 long best_rate = LONG_MIN, best_diff = LONG_MIN;
592 unsigned long parent_rate;
595 /* First: check the dividers of MCR. */
596 for (i = 0; i < clk_hw_get_num_parents(hw); i++) {
597 parent = clk_hw_get_parent_by_index(hw, i);
601 parent_rate = clk_hw_get_rate(parent);
605 for (div = 0; div < MASTER_PRES_MAX + 1; div++) {
606 clk_sama7g5_master_best_diff(req, parent, parent_rate,
607 &best_rate, &best_diff,
617 /* Second: try to request rate form changeable parent. */
618 if (master->chg_pid < 0)
621 parent = clk_hw_get_parent_by_index(hw, master->chg_pid);
625 for (div = 0; div < MASTER_PRES_MAX + 1; div++) {
626 struct clk_rate_request req_parent;
627 unsigned long req_rate;
629 if (div == MASTER_PRES_MAX)
630 req_rate = req->rate * 3;
632 req_rate = req->rate << div;
634 clk_hw_forward_rate_request(hw, req, parent, &req_parent, req_rate);
635 if (__clk_determine_rate(parent, &req_parent))
638 clk_sama7g5_master_best_diff(req, parent, req_parent.rate,
639 &best_rate, &best_diff, div);
646 pr_debug("MCK: %s, best_rate = %ld, parent clk: %s @ %ld\n",
648 __clk_get_name((req->best_parent_hw)->clk),
649 req->best_parent_rate);
654 req->rate = best_rate;
659 static u8 clk_sama7g5_master_get_parent(struct clk_hw *hw)
661 struct clk_master *master = to_clk_master(hw);
665 spin_lock_irqsave(master->lock, flags);
666 index = clk_mux_val_to_index(&master->hw, master->mux_table, 0,
668 spin_unlock_irqrestore(master->lock, flags);
673 static int clk_sama7g5_master_set_parent(struct clk_hw *hw, u8 index)
675 struct clk_master *master = to_clk_master(hw);
678 if (index >= clk_hw_get_num_parents(hw))
681 spin_lock_irqsave(master->lock, flags);
682 master->parent = clk_mux_index_to_val(master->mux_table, 0, index);
683 spin_unlock_irqrestore(master->lock, flags);
688 static void clk_sama7g5_master_set(struct clk_master *master,
692 unsigned int val, cparent;
693 unsigned int enable = status ? AT91_PMC_MCR_V2_EN : 0;
694 unsigned int parent = master->parent << PMC_MCR_CSS_SHIFT;
695 unsigned int div = master->div << MASTER_DIV_SHIFT;
697 spin_lock_irqsave(master->lock, flags);
699 regmap_write(master->regmap, AT91_PMC_MCR_V2,
700 AT91_PMC_MCR_V2_ID(master->id));
701 regmap_read(master->regmap, AT91_PMC_MCR_V2, &val);
702 regmap_update_bits(master->regmap, AT91_PMC_MCR_V2,
703 enable | AT91_PMC_MCR_V2_CSS | AT91_PMC_MCR_V2_DIV |
704 AT91_PMC_MCR_V2_CMD | AT91_PMC_MCR_V2_ID_MSK,
705 enable | parent | div | AT91_PMC_MCR_V2_CMD |
706 AT91_PMC_MCR_V2_ID(master->id));
708 cparent = (val & AT91_PMC_MCR_V2_CSS) >> PMC_MCR_CSS_SHIFT;
710 /* Wait here only if parent is being changed. */
711 while ((cparent != master->parent) && !clk_master_ready(master))
714 spin_unlock_irqrestore(master->lock, flags);
717 static int clk_sama7g5_master_enable(struct clk_hw *hw)
719 struct clk_master *master = to_clk_master(hw);
721 clk_sama7g5_master_set(master, 1);
726 static void clk_sama7g5_master_disable(struct clk_hw *hw)
728 struct clk_master *master = to_clk_master(hw);
731 spin_lock_irqsave(master->lock, flags);
733 regmap_write(master->regmap, AT91_PMC_MCR_V2, master->id);
734 regmap_update_bits(master->regmap, AT91_PMC_MCR_V2,
735 AT91_PMC_MCR_V2_EN | AT91_PMC_MCR_V2_CMD |
736 AT91_PMC_MCR_V2_ID_MSK,
737 AT91_PMC_MCR_V2_CMD |
738 AT91_PMC_MCR_V2_ID(master->id));
740 spin_unlock_irqrestore(master->lock, flags);
743 static int clk_sama7g5_master_is_enabled(struct clk_hw *hw)
745 struct clk_master *master = to_clk_master(hw);
749 spin_lock_irqsave(master->lock, flags);
751 regmap_write(master->regmap, AT91_PMC_MCR_V2, master->id);
752 regmap_read(master->regmap, AT91_PMC_MCR_V2, &val);
754 spin_unlock_irqrestore(master->lock, flags);
756 return !!(val & AT91_PMC_MCR_V2_EN);
759 static int clk_sama7g5_master_set_rate(struct clk_hw *hw, unsigned long rate,
760 unsigned long parent_rate)
762 struct clk_master *master = to_clk_master(hw);
763 unsigned long div, flags;
765 div = DIV_ROUND_CLOSEST(parent_rate, rate);
766 if ((div > (1 << (MASTER_PRES_MAX - 1))) || (div & (div - 1)))
770 div = MASTER_PRES_MAX;
774 spin_lock_irqsave(master->lock, flags);
776 spin_unlock_irqrestore(master->lock, flags);
781 static int clk_sama7g5_master_save_context(struct clk_hw *hw)
783 struct clk_master *master = to_clk_master(hw);
785 master->pms.status = clk_sama7g5_master_is_enabled(hw);
790 static void clk_sama7g5_master_restore_context(struct clk_hw *hw)
792 struct clk_master *master = to_clk_master(hw);
794 if (master->pms.status)
795 clk_sama7g5_master_set(master, master->pms.status);
798 static const struct clk_ops sama7g5_master_ops = {
799 .enable = clk_sama7g5_master_enable,
800 .disable = clk_sama7g5_master_disable,
801 .is_enabled = clk_sama7g5_master_is_enabled,
802 .recalc_rate = clk_sama7g5_master_recalc_rate,
803 .determine_rate = clk_sama7g5_master_determine_rate,
804 .set_rate = clk_sama7g5_master_set_rate,
805 .get_parent = clk_sama7g5_master_get_parent,
806 .set_parent = clk_sama7g5_master_set_parent,
807 .save_context = clk_sama7g5_master_save_context,
808 .restore_context = clk_sama7g5_master_restore_context,
811 struct clk_hw * __init
812 at91_clk_sama7g5_register_master(struct regmap *regmap,
813 const char *name, int num_parents,
814 const char **parent_names,
815 struct clk_hw **parent_hws,
817 spinlock_t *lock, u8 id,
818 bool critical, int chg_pid)
820 struct clk_master *master;
822 struct clk_init_data init = {};
827 if (!name || !num_parents || !(parent_names || parent_hws) || !mux_table ||
828 !lock || id > MASTER_MAX_ID)
829 return ERR_PTR(-EINVAL);
831 master = kzalloc(sizeof(*master), GFP_KERNEL);
833 return ERR_PTR(-ENOMEM);
836 init.ops = &sama7g5_master_ops;
838 init.parent_hws = (const struct clk_hw **)parent_hws;
840 init.parent_names = parent_names;
841 init.num_parents = num_parents;
842 init.flags = CLK_SET_RATE_GATE | CLK_SET_PARENT_GATE;
844 init.flags |= CLK_SET_RATE_PARENT;
846 init.flags |= CLK_IS_CRITICAL;
848 master->hw.init = &init;
849 master->regmap = regmap;
851 master->chg_pid = chg_pid;
853 master->mux_table = mux_table;
855 spin_lock_irqsave(master->lock, flags);
856 regmap_write(master->regmap, AT91_PMC_MCR_V2, master->id);
857 regmap_read(master->regmap, AT91_PMC_MCR_V2, &val);
858 master->parent = (val & AT91_PMC_MCR_V2_CSS) >> PMC_MCR_CSS_SHIFT;
859 master->div = (val & AT91_PMC_MCR_V2_DIV) >> MASTER_DIV_SHIFT;
860 spin_unlock_irqrestore(master->lock, flags);
863 ret = clk_hw_register(NULL, &master->hw);
872 const struct clk_master_layout at91rm9200_master_layout = {
875 .offset = AT91_PMC_MCKR,
878 const struct clk_master_layout at91sam9x5_master_layout = {
881 .offset = AT91_PMC_MCKR,