2 * Copyright (c) 2013, The Linux Foundation. All rights reserved.
4 * This software is licensed under the terms of the GNU General Public
5 * License version 2, as published by the Free Software Foundation, and
6 * may be copied, distributed, and modified under those terms.
8 * This program is distributed in the hope that it will be useful,
9 * but WITHOUT ANY WARRANTY; without even the implied warranty of
10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 * GNU General Public License for more details.
14 #include <linux/kernel.h>
15 #include <linux/bitops.h>
16 #include <linux/err.h>
17 #include <linux/bug.h>
18 #include <linux/export.h>
19 #include <linux/clk-provider.h>
20 #include <linux/delay.h>
21 #include <linux/regmap.h>
22 #include <linux/math64.h>
24 #include <asm/div64.h>
30 #define CMD_UPDATE BIT(0)
31 #define CMD_ROOT_EN BIT(1)
32 #define CMD_DIRTY_CFG BIT(4)
33 #define CMD_DIRTY_N BIT(5)
34 #define CMD_DIRTY_M BIT(6)
35 #define CMD_DIRTY_D BIT(7)
36 #define CMD_ROOT_OFF BIT(31)
39 #define CFG_SRC_DIV_SHIFT 0
40 #define CFG_SRC_SEL_SHIFT 8
41 #define CFG_SRC_SEL_MASK (0x7 << CFG_SRC_SEL_SHIFT)
42 #define CFG_MODE_SHIFT 12
43 #define CFG_MODE_MASK (0x3 << CFG_MODE_SHIFT)
44 #define CFG_MODE_DUAL_EDGE (0x2 << CFG_MODE_SHIFT)
50 static int clk_rcg2_is_enabled(struct clk_hw *hw)
52 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
56 ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG, &cmd);
60 return (cmd & CMD_ROOT_OFF) == 0;
63 static u8 clk_rcg2_get_parent(struct clk_hw *hw)
65 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
66 int num_parents = clk_hw_get_num_parents(hw);
70 ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
74 cfg &= CFG_SRC_SEL_MASK;
75 cfg >>= CFG_SRC_SEL_SHIFT;
77 for (i = 0; i < num_parents; i++)
78 if (cfg == rcg->parent_map[i].cfg)
82 pr_debug("%s: Clock %s has invalid parent, using default.\n",
83 __func__, clk_hw_get_name(hw));
87 static int update_config(struct clk_rcg2 *rcg)
91 struct clk_hw *hw = &rcg->clkr.hw;
92 const char *name = clk_hw_get_name(hw);
94 ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG,
95 CMD_UPDATE, CMD_UPDATE);
99 /* Wait for update to take effect */
100 for (count = 500; count > 0; count--) {
101 ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG, &cmd);
104 if (!(cmd & CMD_UPDATE))
109 WARN(1, "%s: rcg didn't update its configuration.", name);
113 static int clk_rcg2_set_parent(struct clk_hw *hw, u8 index)
115 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
117 u32 cfg = rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
119 ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
120 CFG_SRC_SEL_MASK, cfg);
124 return update_config(rcg);
128 * Calculate m/n:d rate
131 * rate = ----------- x ---
135 calc_rate(unsigned long rate, u32 m, u32 n, u32 mode, u32 hid_div)
153 clk_rcg2_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
155 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
156 u32 cfg, hid_div, m = 0, n = 0, mode = 0, mask;
158 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
160 if (rcg->mnd_width) {
161 mask = BIT(rcg->mnd_width) - 1;
162 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + M_REG, &m);
164 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + N_REG, &n);
168 mode = cfg & CFG_MODE_MASK;
169 mode >>= CFG_MODE_SHIFT;
172 mask = BIT(rcg->hid_width) - 1;
173 hid_div = cfg >> CFG_SRC_DIV_SHIFT;
176 return calc_rate(parent_rate, m, n, mode, hid_div);
179 static int _freq_tbl_determine_rate(struct clk_hw *hw,
180 const struct freq_tbl *f, struct clk_rate_request *req)
182 unsigned long clk_flags, rate = req->rate;
184 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
187 f = qcom_find_freq(f, rate);
191 index = qcom_find_src_index(hw, rcg->parent_map, f->src);
195 clk_flags = clk_hw_get_flags(hw);
196 p = clk_hw_get_parent_by_index(hw, index);
200 if (clk_flags & CLK_SET_RATE_PARENT) {
205 rate *= f->pre_div + 1;
215 rate = clk_hw_get_rate(p);
217 req->best_parent_hw = p;
218 req->best_parent_rate = rate;
224 static int clk_rcg2_determine_rate(struct clk_hw *hw,
225 struct clk_rate_request *req)
227 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
229 return _freq_tbl_determine_rate(hw, rcg->freq_tbl, req);
232 static int clk_rcg2_configure(struct clk_rcg2 *rcg, const struct freq_tbl *f)
235 struct clk_hw *hw = &rcg->clkr.hw;
236 int ret, index = qcom_find_src_index(hw, rcg->parent_map, f->src);
241 if (rcg->mnd_width && f->n) {
242 mask = BIT(rcg->mnd_width) - 1;
243 ret = regmap_update_bits(rcg->clkr.regmap,
244 rcg->cmd_rcgr + M_REG, mask, f->m);
248 ret = regmap_update_bits(rcg->clkr.regmap,
249 rcg->cmd_rcgr + N_REG, mask, ~(f->n - f->m));
253 ret = regmap_update_bits(rcg->clkr.regmap,
254 rcg->cmd_rcgr + D_REG, mask, ~f->n);
259 mask = BIT(rcg->hid_width) - 1;
260 mask |= CFG_SRC_SEL_MASK | CFG_MODE_MASK;
261 cfg = f->pre_div << CFG_SRC_DIV_SHIFT;
262 cfg |= rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
263 if (rcg->mnd_width && f->n && (f->m != f->n))
264 cfg |= CFG_MODE_DUAL_EDGE;
265 ret = regmap_update_bits(rcg->clkr.regmap,
266 rcg->cmd_rcgr + CFG_REG, mask, cfg);
270 return update_config(rcg);
273 static int __clk_rcg2_set_rate(struct clk_hw *hw, unsigned long rate)
275 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
276 const struct freq_tbl *f;
278 f = qcom_find_freq(rcg->freq_tbl, rate);
282 return clk_rcg2_configure(rcg, f);
285 static int clk_rcg2_set_rate(struct clk_hw *hw, unsigned long rate,
286 unsigned long parent_rate)
288 return __clk_rcg2_set_rate(hw, rate);
291 static int clk_rcg2_set_rate_and_parent(struct clk_hw *hw,
292 unsigned long rate, unsigned long parent_rate, u8 index)
294 return __clk_rcg2_set_rate(hw, rate);
297 const struct clk_ops clk_rcg2_ops = {
298 .is_enabled = clk_rcg2_is_enabled,
299 .get_parent = clk_rcg2_get_parent,
300 .set_parent = clk_rcg2_set_parent,
301 .recalc_rate = clk_rcg2_recalc_rate,
302 .determine_rate = clk_rcg2_determine_rate,
303 .set_rate = clk_rcg2_set_rate,
304 .set_rate_and_parent = clk_rcg2_set_rate_and_parent,
306 EXPORT_SYMBOL_GPL(clk_rcg2_ops);
308 static int clk_rcg2_shared_force_enable(struct clk_hw *hw, unsigned long rate)
310 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
311 const char *name = clk_hw_get_name(hw);
314 /* force enable RCG */
315 ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG,
316 CMD_ROOT_EN, CMD_ROOT_EN);
320 /* wait for RCG to turn ON */
321 for (count = 500; count > 0; count--) {
322 ret = clk_rcg2_is_enabled(hw);
328 pr_err("%s: RCG did not turn on\n", name);
331 ret = __clk_rcg2_set_rate(hw, rate);
335 /* clear force enable RCG */
336 return regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG,
340 static int clk_rcg2_shared_set_rate(struct clk_hw *hw, unsigned long rate,
341 unsigned long parent_rate)
343 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
346 rcg->current_freq = rate;
348 if (!__clk_is_enabled(hw->clk))
351 return clk_rcg2_shared_force_enable(hw, rcg->current_freq);
355 clk_rcg2_shared_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
357 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
359 return rcg->current_freq = clk_rcg2_recalc_rate(hw, parent_rate);
362 static int clk_rcg2_shared_enable(struct clk_hw *hw)
364 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
366 return clk_rcg2_shared_force_enable(hw, rcg->current_freq);
369 static void clk_rcg2_shared_disable(struct clk_hw *hw)
371 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
373 /* switch to XO, which is the lowest entry in the freq table */
374 clk_rcg2_shared_set_rate(hw, rcg->freq_tbl[0].freq, 0);
377 const struct clk_ops clk_rcg2_shared_ops = {
378 .enable = clk_rcg2_shared_enable,
379 .disable = clk_rcg2_shared_disable,
380 .get_parent = clk_rcg2_get_parent,
381 .recalc_rate = clk_rcg2_shared_recalc_rate,
382 .determine_rate = clk_rcg2_determine_rate,
383 .set_rate = clk_rcg2_shared_set_rate,
385 EXPORT_SYMBOL_GPL(clk_rcg2_shared_ops);
392 static const struct frac_entry frac_table_675m[] = { /* link rate of 270M */
393 { 52, 295 }, /* 119 M */
394 { 11, 57 }, /* 130.25 M */
395 { 63, 307 }, /* 138.50 M */
396 { 11, 50 }, /* 148.50 M */
397 { 47, 206 }, /* 154 M */
398 { 31, 100 }, /* 205.25 M */
399 { 107, 269 }, /* 268.50 M */
403 static struct frac_entry frac_table_810m[] = { /* Link rate of 162M */
404 { 31, 211 }, /* 119 M */
405 { 32, 199 }, /* 130.25 M */
406 { 63, 307 }, /* 138.50 M */
407 { 11, 60 }, /* 148.50 M */
408 { 50, 263 }, /* 154 M */
409 { 31, 120 }, /* 205.25 M */
410 { 119, 359 }, /* 268.50 M */
414 static int clk_edp_pixel_set_rate(struct clk_hw *hw, unsigned long rate,
415 unsigned long parent_rate)
417 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
418 struct freq_tbl f = *rcg->freq_tbl;
419 const struct frac_entry *frac;
421 s64 src_rate = parent_rate;
423 u32 mask = BIT(rcg->hid_width) - 1;
426 if (src_rate == 810000000)
427 frac = frac_table_810m;
429 frac = frac_table_675m;
431 for (; frac->num; frac++) {
433 request *= frac->den;
434 request = div_s64(request, frac->num);
435 if ((src_rate < (request - delta)) ||
436 (src_rate > (request + delta)))
439 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
442 f.pre_div >>= CFG_SRC_DIV_SHIFT;
447 return clk_rcg2_configure(rcg, &f);
453 static int clk_edp_pixel_set_rate_and_parent(struct clk_hw *hw,
454 unsigned long rate, unsigned long parent_rate, u8 index)
456 /* Parent index is set statically in frequency table */
457 return clk_edp_pixel_set_rate(hw, rate, parent_rate);
460 static int clk_edp_pixel_determine_rate(struct clk_hw *hw,
461 struct clk_rate_request *req)
463 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
464 const struct freq_tbl *f = rcg->freq_tbl;
465 const struct frac_entry *frac;
468 u32 mask = BIT(rcg->hid_width) - 1;
470 int index = qcom_find_src_index(hw, rcg->parent_map, f->src);
472 /* Force the correct parent */
473 req->best_parent_hw = clk_hw_get_parent_by_index(hw, index);
474 req->best_parent_rate = clk_hw_get_rate(req->best_parent_hw);
476 if (req->best_parent_rate == 810000000)
477 frac = frac_table_810m;
479 frac = frac_table_675m;
481 for (; frac->num; frac++) {
483 request *= frac->den;
484 request = div_s64(request, frac->num);
485 if ((req->best_parent_rate < (request - delta)) ||
486 (req->best_parent_rate > (request + delta)))
489 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
491 hid_div >>= CFG_SRC_DIV_SHIFT;
494 req->rate = calc_rate(req->best_parent_rate,
495 frac->num, frac->den,
496 !!frac->den, hid_div);
503 const struct clk_ops clk_edp_pixel_ops = {
504 .is_enabled = clk_rcg2_is_enabled,
505 .get_parent = clk_rcg2_get_parent,
506 .set_parent = clk_rcg2_set_parent,
507 .recalc_rate = clk_rcg2_recalc_rate,
508 .set_rate = clk_edp_pixel_set_rate,
509 .set_rate_and_parent = clk_edp_pixel_set_rate_and_parent,
510 .determine_rate = clk_edp_pixel_determine_rate,
512 EXPORT_SYMBOL_GPL(clk_edp_pixel_ops);
514 static int clk_byte_determine_rate(struct clk_hw *hw,
515 struct clk_rate_request *req)
517 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
518 const struct freq_tbl *f = rcg->freq_tbl;
519 int index = qcom_find_src_index(hw, rcg->parent_map, f->src);
520 unsigned long parent_rate, div;
521 u32 mask = BIT(rcg->hid_width) - 1;
527 req->best_parent_hw = p = clk_hw_get_parent_by_index(hw, index);
528 req->best_parent_rate = parent_rate = clk_hw_round_rate(p, req->rate);
530 div = DIV_ROUND_UP((2 * parent_rate), req->rate) - 1;
531 div = min_t(u32, div, mask);
533 req->rate = calc_rate(parent_rate, 0, 0, 0, div);
538 static int clk_byte_set_rate(struct clk_hw *hw, unsigned long rate,
539 unsigned long parent_rate)
541 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
542 struct freq_tbl f = *rcg->freq_tbl;
544 u32 mask = BIT(rcg->hid_width) - 1;
546 div = DIV_ROUND_UP((2 * parent_rate), rate) - 1;
547 div = min_t(u32, div, mask);
551 return clk_rcg2_configure(rcg, &f);
554 static int clk_byte_set_rate_and_parent(struct clk_hw *hw,
555 unsigned long rate, unsigned long parent_rate, u8 index)
557 /* Parent index is set statically in frequency table */
558 return clk_byte_set_rate(hw, rate, parent_rate);
561 const struct clk_ops clk_byte_ops = {
562 .is_enabled = clk_rcg2_is_enabled,
563 .get_parent = clk_rcg2_get_parent,
564 .set_parent = clk_rcg2_set_parent,
565 .recalc_rate = clk_rcg2_recalc_rate,
566 .set_rate = clk_byte_set_rate,
567 .set_rate_and_parent = clk_byte_set_rate_and_parent,
568 .determine_rate = clk_byte_determine_rate,
570 EXPORT_SYMBOL_GPL(clk_byte_ops);
572 static int clk_byte2_determine_rate(struct clk_hw *hw,
573 struct clk_rate_request *req)
575 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
576 unsigned long parent_rate, div;
577 u32 mask = BIT(rcg->hid_width) - 1;
579 unsigned long rate = req->rate;
584 p = req->best_parent_hw;
585 req->best_parent_rate = parent_rate = clk_hw_round_rate(p, rate);
587 div = DIV_ROUND_UP((2 * parent_rate), rate) - 1;
588 div = min_t(u32, div, mask);
590 req->rate = calc_rate(parent_rate, 0, 0, 0, div);
595 static int clk_byte2_set_rate(struct clk_hw *hw, unsigned long rate,
596 unsigned long parent_rate)
598 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
599 struct freq_tbl f = { 0 };
601 int i, num_parents = clk_hw_get_num_parents(hw);
602 u32 mask = BIT(rcg->hid_width) - 1;
605 div = DIV_ROUND_UP((2 * parent_rate), rate) - 1;
606 div = min_t(u32, div, mask);
610 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
611 cfg &= CFG_SRC_SEL_MASK;
612 cfg >>= CFG_SRC_SEL_SHIFT;
614 for (i = 0; i < num_parents; i++) {
615 if (cfg == rcg->parent_map[i].cfg) {
616 f.src = rcg->parent_map[i].src;
617 return clk_rcg2_configure(rcg, &f);
624 static int clk_byte2_set_rate_and_parent(struct clk_hw *hw,
625 unsigned long rate, unsigned long parent_rate, u8 index)
627 /* Read the hardware to determine parent during set_rate */
628 return clk_byte2_set_rate(hw, rate, parent_rate);
631 const struct clk_ops clk_byte2_ops = {
632 .is_enabled = clk_rcg2_is_enabled,
633 .get_parent = clk_rcg2_get_parent,
634 .set_parent = clk_rcg2_set_parent,
635 .recalc_rate = clk_rcg2_recalc_rate,
636 .set_rate = clk_byte2_set_rate,
637 .set_rate_and_parent = clk_byte2_set_rate_and_parent,
638 .determine_rate = clk_byte2_determine_rate,
640 EXPORT_SYMBOL_GPL(clk_byte2_ops);
642 static const struct frac_entry frac_table_pixel[] = {
650 static int clk_pixel_determine_rate(struct clk_hw *hw,
651 struct clk_rate_request *req)
653 unsigned long request, src_rate;
655 const struct frac_entry *frac = frac_table_pixel;
657 for (; frac->num; frac++) {
658 request = (req->rate * frac->den) / frac->num;
660 src_rate = clk_hw_round_rate(req->best_parent_hw, request);
661 if ((src_rate < (request - delta)) ||
662 (src_rate > (request + delta)))
665 req->best_parent_rate = src_rate;
666 req->rate = (src_rate * frac->num) / frac->den;
673 static int clk_pixel_set_rate(struct clk_hw *hw, unsigned long rate,
674 unsigned long parent_rate)
676 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
677 struct freq_tbl f = { 0 };
678 const struct frac_entry *frac = frac_table_pixel;
679 unsigned long request;
681 u32 mask = BIT(rcg->hid_width) - 1;
683 int i, num_parents = clk_hw_get_num_parents(hw);
685 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
686 cfg &= CFG_SRC_SEL_MASK;
687 cfg >>= CFG_SRC_SEL_SHIFT;
689 for (i = 0; i < num_parents; i++)
690 if (cfg == rcg->parent_map[i].cfg) {
691 f.src = rcg->parent_map[i].src;
695 for (; frac->num; frac++) {
696 request = (rate * frac->den) / frac->num;
698 if ((parent_rate < (request - delta)) ||
699 (parent_rate > (request + delta)))
702 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
705 f.pre_div >>= CFG_SRC_DIV_SHIFT;
710 return clk_rcg2_configure(rcg, &f);
715 static int clk_pixel_set_rate_and_parent(struct clk_hw *hw, unsigned long rate,
716 unsigned long parent_rate, u8 index)
718 return clk_pixel_set_rate(hw, rate, parent_rate);
721 const struct clk_ops clk_pixel_ops = {
722 .is_enabled = clk_rcg2_is_enabled,
723 .get_parent = clk_rcg2_get_parent,
724 .set_parent = clk_rcg2_set_parent,
725 .recalc_rate = clk_rcg2_recalc_rate,
726 .set_rate = clk_pixel_set_rate,
727 .set_rate_and_parent = clk_pixel_set_rate_and_parent,
728 .determine_rate = clk_pixel_determine_rate,
730 EXPORT_SYMBOL_GPL(clk_pixel_ops);
732 static int clk_gfx3d_determine_rate(struct clk_hw *hw,
733 struct clk_rate_request *req)
735 struct clk_rate_request parent_req = { };
736 struct clk_hw *p2, *p8, *p9, *xo;
737 unsigned long p9_rate;
740 xo = clk_hw_get_parent_by_index(hw, 0);
741 if (req->rate == clk_hw_get_rate(xo)) {
742 req->best_parent_hw = xo;
746 p9 = clk_hw_get_parent_by_index(hw, 2);
747 p2 = clk_hw_get_parent_by_index(hw, 3);
748 p8 = clk_hw_get_parent_by_index(hw, 4);
750 /* PLL9 is a fixed rate PLL */
751 p9_rate = clk_hw_get_rate(p9);
753 parent_req.rate = req->rate = min(req->rate, p9_rate);
754 if (req->rate == p9_rate) {
755 req->rate = req->best_parent_rate = p9_rate;
756 req->best_parent_hw = p9;
760 if (req->best_parent_hw == p9) {
761 /* Are we going back to a previously used rate? */
762 if (clk_hw_get_rate(p8) == req->rate)
763 req->best_parent_hw = p8;
765 req->best_parent_hw = p2;
766 } else if (req->best_parent_hw == p8) {
767 req->best_parent_hw = p2;
769 req->best_parent_hw = p8;
772 ret = __clk_determine_rate(req->best_parent_hw, &parent_req);
776 req->rate = req->best_parent_rate = parent_req.rate;
781 static int clk_gfx3d_set_rate_and_parent(struct clk_hw *hw, unsigned long rate,
782 unsigned long parent_rate, u8 index)
784 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
788 /* Just mux it, we don't use the division or m/n hardware */
789 cfg = rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
790 ret = regmap_write(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, cfg);
794 return update_config(rcg);
797 static int clk_gfx3d_set_rate(struct clk_hw *hw, unsigned long rate,
798 unsigned long parent_rate)
801 * We should never get here; clk_gfx3d_determine_rate() should always
802 * make us use a different parent than what we're currently using, so
803 * clk_gfx3d_set_rate_and_parent() should always be called.
808 const struct clk_ops clk_gfx3d_ops = {
809 .is_enabled = clk_rcg2_is_enabled,
810 .get_parent = clk_rcg2_get_parent,
811 .set_parent = clk_rcg2_set_parent,
812 .recalc_rate = clk_rcg2_recalc_rate,
813 .set_rate = clk_gfx3d_set_rate,
814 .set_rate_and_parent = clk_gfx3d_set_rate_and_parent,
815 .determine_rate = clk_gfx3d_determine_rate,
817 EXPORT_SYMBOL_GPL(clk_gfx3d_ops);