clk: change clk_ops' ->determine_rate() prototype
[GitHub/exynos8895/android_kernel_samsung_universal8895.git] / drivers / clk / qcom / clk-rcg2.c
1 /*
2 * Copyright (c) 2013, The Linux Foundation. All rights reserved.
3 *
4 * This software is licensed under the terms of the GNU General Public
5 * License version 2, as published by the Free Software Foundation, and
6 * may be copied, distributed, and modified under those terms.
7 *
8 * This program is distributed in the hope that it will be useful,
9 * but WITHOUT ANY WARRANTY; without even the implied warranty of
10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 * GNU General Public License for more details.
12 */
13
14 #include <linux/kernel.h>
15 #include <linux/bitops.h>
16 #include <linux/err.h>
17 #include <linux/bug.h>
18 #include <linux/export.h>
19 #include <linux/clk-provider.h>
20 #include <linux/delay.h>
21 #include <linux/regmap.h>
22 #include <linux/math64.h>
23
24 #include <asm/div64.h>
25
26 #include "clk-rcg.h"
27 #include "common.h"
28
29 #define CMD_REG 0x0
30 #define CMD_UPDATE BIT(0)
31 #define CMD_ROOT_EN BIT(1)
32 #define CMD_DIRTY_CFG BIT(4)
33 #define CMD_DIRTY_N BIT(5)
34 #define CMD_DIRTY_M BIT(6)
35 #define CMD_DIRTY_D BIT(7)
36 #define CMD_ROOT_OFF BIT(31)
37
38 #define CFG_REG 0x4
39 #define CFG_SRC_DIV_SHIFT 0
40 #define CFG_SRC_SEL_SHIFT 8
41 #define CFG_SRC_SEL_MASK (0x7 << CFG_SRC_SEL_SHIFT)
42 #define CFG_MODE_SHIFT 12
43 #define CFG_MODE_MASK (0x3 << CFG_MODE_SHIFT)
44 #define CFG_MODE_DUAL_EDGE (0x2 << CFG_MODE_SHIFT)
45
46 #define M_REG 0x8
47 #define N_REG 0xc
48 #define D_REG 0x10
49
50 static int clk_rcg2_is_enabled(struct clk_hw *hw)
51 {
52 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
53 u32 cmd;
54 int ret;
55
56 ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG, &cmd);
57 if (ret)
58 return ret;
59
60 return (cmd & CMD_ROOT_OFF) == 0;
61 }
62
63 static u8 clk_rcg2_get_parent(struct clk_hw *hw)
64 {
65 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
66 int num_parents = __clk_get_num_parents(hw->clk);
67 u32 cfg;
68 int i, ret;
69
70 ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
71 if (ret)
72 goto err;
73
74 cfg &= CFG_SRC_SEL_MASK;
75 cfg >>= CFG_SRC_SEL_SHIFT;
76
77 for (i = 0; i < num_parents; i++)
78 if (cfg == rcg->parent_map[i].cfg)
79 return i;
80
81 err:
82 pr_debug("%s: Clock %s has invalid parent, using default.\n",
83 __func__, __clk_get_name(hw->clk));
84 return 0;
85 }
86
87 static int update_config(struct clk_rcg2 *rcg)
88 {
89 int count, ret;
90 u32 cmd;
91 struct clk_hw *hw = &rcg->clkr.hw;
92 const char *name = __clk_get_name(hw->clk);
93
94 ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG,
95 CMD_UPDATE, CMD_UPDATE);
96 if (ret)
97 return ret;
98
99 /* Wait for update to take effect */
100 for (count = 500; count > 0; count--) {
101 ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG, &cmd);
102 if (ret)
103 return ret;
104 if (!(cmd & CMD_UPDATE))
105 return 0;
106 udelay(1);
107 }
108
109 WARN(1, "%s: rcg didn't update its configuration.", name);
110 return 0;
111 }
112
113 static int clk_rcg2_set_parent(struct clk_hw *hw, u8 index)
114 {
115 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
116 int ret;
117 u32 cfg = rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
118
119 ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
120 CFG_SRC_SEL_MASK, cfg);
121 if (ret)
122 return ret;
123
124 return update_config(rcg);
125 }
126
127 /*
128 * Calculate m/n:d rate
129 *
130 * parent_rate m
131 * rate = ----------- x ---
132 * hid_div n
133 */
134 static unsigned long
135 calc_rate(unsigned long rate, u32 m, u32 n, u32 mode, u32 hid_div)
136 {
137 if (hid_div) {
138 rate *= 2;
139 rate /= hid_div + 1;
140 }
141
142 if (mode) {
143 u64 tmp = rate;
144 tmp *= m;
145 do_div(tmp, n);
146 rate = tmp;
147 }
148
149 return rate;
150 }
151
152 static unsigned long
153 clk_rcg2_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
154 {
155 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
156 u32 cfg, hid_div, m = 0, n = 0, mode = 0, mask;
157
158 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
159
160 if (rcg->mnd_width) {
161 mask = BIT(rcg->mnd_width) - 1;
162 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + M_REG, &m);
163 m &= mask;
164 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + N_REG, &n);
165 n = ~n;
166 n &= mask;
167 n += m;
168 mode = cfg & CFG_MODE_MASK;
169 mode >>= CFG_MODE_SHIFT;
170 }
171
172 mask = BIT(rcg->hid_width) - 1;
173 hid_div = cfg >> CFG_SRC_DIV_SHIFT;
174 hid_div &= mask;
175
176 return calc_rate(parent_rate, m, n, mode, hid_div);
177 }
178
179 static int _freq_tbl_determine_rate(struct clk_hw *hw,
180 const struct freq_tbl *f, struct clk_rate_request *req)
181 {
182 unsigned long clk_flags, rate = req->rate;
183 struct clk *p;
184 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
185 int index;
186
187 f = qcom_find_freq(f, rate);
188 if (!f)
189 return -EINVAL;
190
191 index = qcom_find_src_index(hw, rcg->parent_map, f->src);
192 if (index < 0)
193 return index;
194
195 clk_flags = __clk_get_flags(hw->clk);
196 p = clk_get_parent_by_index(hw->clk, index);
197 if (clk_flags & CLK_SET_RATE_PARENT) {
198 if (f->pre_div) {
199 rate /= 2;
200 rate *= f->pre_div + 1;
201 }
202
203 if (f->n) {
204 u64 tmp = rate;
205 tmp = tmp * f->n;
206 do_div(tmp, f->m);
207 rate = tmp;
208 }
209 } else {
210 rate = __clk_get_rate(p);
211 }
212 req->best_parent_hw = __clk_get_hw(p);
213 req->best_parent_rate = rate;
214 req->rate = f->freq;
215
216 return 0;
217 }
218
219 static int clk_rcg2_determine_rate(struct clk_hw *hw,
220 struct clk_rate_request *req)
221 {
222 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
223
224 return _freq_tbl_determine_rate(hw, rcg->freq_tbl, req);
225 }
226
227 static int clk_rcg2_configure(struct clk_rcg2 *rcg, const struct freq_tbl *f)
228 {
229 u32 cfg, mask;
230 struct clk_hw *hw = &rcg->clkr.hw;
231 int ret, index = qcom_find_src_index(hw, rcg->parent_map, f->src);
232
233 if (index < 0)
234 return index;
235
236 if (rcg->mnd_width && f->n) {
237 mask = BIT(rcg->mnd_width) - 1;
238 ret = regmap_update_bits(rcg->clkr.regmap,
239 rcg->cmd_rcgr + M_REG, mask, f->m);
240 if (ret)
241 return ret;
242
243 ret = regmap_update_bits(rcg->clkr.regmap,
244 rcg->cmd_rcgr + N_REG, mask, ~(f->n - f->m));
245 if (ret)
246 return ret;
247
248 ret = regmap_update_bits(rcg->clkr.regmap,
249 rcg->cmd_rcgr + D_REG, mask, ~f->n);
250 if (ret)
251 return ret;
252 }
253
254 mask = BIT(rcg->hid_width) - 1;
255 mask |= CFG_SRC_SEL_MASK | CFG_MODE_MASK;
256 cfg = f->pre_div << CFG_SRC_DIV_SHIFT;
257 cfg |= rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
258 if (rcg->mnd_width && f->n && (f->m != f->n))
259 cfg |= CFG_MODE_DUAL_EDGE;
260 ret = regmap_update_bits(rcg->clkr.regmap,
261 rcg->cmd_rcgr + CFG_REG, mask, cfg);
262 if (ret)
263 return ret;
264
265 return update_config(rcg);
266 }
267
268 static int __clk_rcg2_set_rate(struct clk_hw *hw, unsigned long rate)
269 {
270 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
271 const struct freq_tbl *f;
272
273 f = qcom_find_freq(rcg->freq_tbl, rate);
274 if (!f)
275 return -EINVAL;
276
277 return clk_rcg2_configure(rcg, f);
278 }
279
280 static int clk_rcg2_set_rate(struct clk_hw *hw, unsigned long rate,
281 unsigned long parent_rate)
282 {
283 return __clk_rcg2_set_rate(hw, rate);
284 }
285
286 static int clk_rcg2_set_rate_and_parent(struct clk_hw *hw,
287 unsigned long rate, unsigned long parent_rate, u8 index)
288 {
289 return __clk_rcg2_set_rate(hw, rate);
290 }
291
292 const struct clk_ops clk_rcg2_ops = {
293 .is_enabled = clk_rcg2_is_enabled,
294 .get_parent = clk_rcg2_get_parent,
295 .set_parent = clk_rcg2_set_parent,
296 .recalc_rate = clk_rcg2_recalc_rate,
297 .determine_rate = clk_rcg2_determine_rate,
298 .set_rate = clk_rcg2_set_rate,
299 .set_rate_and_parent = clk_rcg2_set_rate_and_parent,
300 };
301 EXPORT_SYMBOL_GPL(clk_rcg2_ops);
302
303 struct frac_entry {
304 int num;
305 int den;
306 };
307
308 static const struct frac_entry frac_table_675m[] = { /* link rate of 270M */
309 { 52, 295 }, /* 119 M */
310 { 11, 57 }, /* 130.25 M */
311 { 63, 307 }, /* 138.50 M */
312 { 11, 50 }, /* 148.50 M */
313 { 47, 206 }, /* 154 M */
314 { 31, 100 }, /* 205.25 M */
315 { 107, 269 }, /* 268.50 M */
316 { },
317 };
318
319 static struct frac_entry frac_table_810m[] = { /* Link rate of 162M */
320 { 31, 211 }, /* 119 M */
321 { 32, 199 }, /* 130.25 M */
322 { 63, 307 }, /* 138.50 M */
323 { 11, 60 }, /* 148.50 M */
324 { 50, 263 }, /* 154 M */
325 { 31, 120 }, /* 205.25 M */
326 { 119, 359 }, /* 268.50 M */
327 { },
328 };
329
330 static int clk_edp_pixel_set_rate(struct clk_hw *hw, unsigned long rate,
331 unsigned long parent_rate)
332 {
333 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
334 struct freq_tbl f = *rcg->freq_tbl;
335 const struct frac_entry *frac;
336 int delta = 100000;
337 s64 src_rate = parent_rate;
338 s64 request;
339 u32 mask = BIT(rcg->hid_width) - 1;
340 u32 hid_div;
341
342 if (src_rate == 810000000)
343 frac = frac_table_810m;
344 else
345 frac = frac_table_675m;
346
347 for (; frac->num; frac++) {
348 request = rate;
349 request *= frac->den;
350 request = div_s64(request, frac->num);
351 if ((src_rate < (request - delta)) ||
352 (src_rate > (request + delta)))
353 continue;
354
355 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
356 &hid_div);
357 f.pre_div = hid_div;
358 f.pre_div >>= CFG_SRC_DIV_SHIFT;
359 f.pre_div &= mask;
360 f.m = frac->num;
361 f.n = frac->den;
362
363 return clk_rcg2_configure(rcg, &f);
364 }
365
366 return -EINVAL;
367 }
368
369 static int clk_edp_pixel_set_rate_and_parent(struct clk_hw *hw,
370 unsigned long rate, unsigned long parent_rate, u8 index)
371 {
372 /* Parent index is set statically in frequency table */
373 return clk_edp_pixel_set_rate(hw, rate, parent_rate);
374 }
375
376 static int clk_edp_pixel_determine_rate(struct clk_hw *hw,
377 struct clk_rate_request *req)
378 {
379 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
380 const struct freq_tbl *f = rcg->freq_tbl;
381 const struct frac_entry *frac;
382 int delta = 100000;
383 s64 request;
384 u32 mask = BIT(rcg->hid_width) - 1;
385 u32 hid_div;
386 int index = qcom_find_src_index(hw, rcg->parent_map, f->src);
387 struct clk *p = clk_get_parent_by_index(hw->clk, index);
388
389 /* Force the correct parent */
390 req->best_parent_hw = __clk_get_hw(p);
391 req->best_parent_rate = __clk_get_rate(p);
392
393 if (req->best_parent_rate == 810000000)
394 frac = frac_table_810m;
395 else
396 frac = frac_table_675m;
397
398 for (; frac->num; frac++) {
399 request = req->rate;
400 request *= frac->den;
401 request = div_s64(request, frac->num);
402 if ((req->best_parent_rate < (request - delta)) ||
403 (req->best_parent_rate > (request + delta)))
404 continue;
405
406 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
407 &hid_div);
408 hid_div >>= CFG_SRC_DIV_SHIFT;
409 hid_div &= mask;
410
411 req->rate = calc_rate(req->best_parent_rate,
412 frac->num, frac->den,
413 !!frac->den, hid_div);
414 return 0;
415 }
416
417 return -EINVAL;
418 }
419
420 const struct clk_ops clk_edp_pixel_ops = {
421 .is_enabled = clk_rcg2_is_enabled,
422 .get_parent = clk_rcg2_get_parent,
423 .set_parent = clk_rcg2_set_parent,
424 .recalc_rate = clk_rcg2_recalc_rate,
425 .set_rate = clk_edp_pixel_set_rate,
426 .set_rate_and_parent = clk_edp_pixel_set_rate_and_parent,
427 .determine_rate = clk_edp_pixel_determine_rate,
428 };
429 EXPORT_SYMBOL_GPL(clk_edp_pixel_ops);
430
431 static int clk_byte_determine_rate(struct clk_hw *hw,
432 struct clk_rate_request *req)
433 {
434 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
435 const struct freq_tbl *f = rcg->freq_tbl;
436 int index = qcom_find_src_index(hw, rcg->parent_map, f->src);
437 unsigned long parent_rate, div;
438 u32 mask = BIT(rcg->hid_width) - 1;
439 struct clk *p;
440
441 if (req->rate == 0)
442 return -EINVAL;
443
444 p = clk_get_parent_by_index(hw->clk, index);
445 req->best_parent_hw = __clk_get_hw(p);
446 req->best_parent_rate = parent_rate = __clk_round_rate(p, req->rate);
447
448 div = DIV_ROUND_UP((2 * parent_rate), req->rate) - 1;
449 div = min_t(u32, div, mask);
450
451 req->rate = calc_rate(parent_rate, 0, 0, 0, div);
452
453 return 0;
454 }
455
456 static int clk_byte_set_rate(struct clk_hw *hw, unsigned long rate,
457 unsigned long parent_rate)
458 {
459 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
460 struct freq_tbl f = *rcg->freq_tbl;
461 unsigned long div;
462 u32 mask = BIT(rcg->hid_width) - 1;
463
464 div = DIV_ROUND_UP((2 * parent_rate), rate) - 1;
465 div = min_t(u32, div, mask);
466
467 f.pre_div = div;
468
469 return clk_rcg2_configure(rcg, &f);
470 }
471
472 static int clk_byte_set_rate_and_parent(struct clk_hw *hw,
473 unsigned long rate, unsigned long parent_rate, u8 index)
474 {
475 /* Parent index is set statically in frequency table */
476 return clk_byte_set_rate(hw, rate, parent_rate);
477 }
478
479 const struct clk_ops clk_byte_ops = {
480 .is_enabled = clk_rcg2_is_enabled,
481 .get_parent = clk_rcg2_get_parent,
482 .set_parent = clk_rcg2_set_parent,
483 .recalc_rate = clk_rcg2_recalc_rate,
484 .set_rate = clk_byte_set_rate,
485 .set_rate_and_parent = clk_byte_set_rate_and_parent,
486 .determine_rate = clk_byte_determine_rate,
487 };
488 EXPORT_SYMBOL_GPL(clk_byte_ops);
489
490 static const struct frac_entry frac_table_pixel[] = {
491 { 3, 8 },
492 { 2, 9 },
493 { 4, 9 },
494 { 1, 1 },
495 { }
496 };
497
498 static int clk_pixel_determine_rate(struct clk_hw *hw,
499 struct clk_rate_request *req)
500 {
501 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
502 unsigned long request, src_rate;
503 int delta = 100000;
504 const struct freq_tbl *f = rcg->freq_tbl;
505 const struct frac_entry *frac = frac_table_pixel;
506 int index = qcom_find_src_index(hw, rcg->parent_map, f->src);
507 struct clk *parent = clk_get_parent_by_index(hw->clk, index);
508
509 req->best_parent_hw = __clk_get_hw(parent);
510
511 for (; frac->num; frac++) {
512 request = (req->rate * frac->den) / frac->num;
513
514 src_rate = __clk_round_rate(parent, request);
515 if ((src_rate < (request - delta)) ||
516 (src_rate > (request + delta)))
517 continue;
518
519 req->best_parent_rate = src_rate;
520 req->rate = (src_rate * frac->num) / frac->den;
521 return 0;
522 }
523
524 return -EINVAL;
525 }
526
527 static int clk_pixel_set_rate(struct clk_hw *hw, unsigned long rate,
528 unsigned long parent_rate)
529 {
530 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
531 struct freq_tbl f = *rcg->freq_tbl;
532 const struct frac_entry *frac = frac_table_pixel;
533 unsigned long request, src_rate;
534 int delta = 100000;
535 u32 mask = BIT(rcg->hid_width) - 1;
536 u32 hid_div;
537 int index = qcom_find_src_index(hw, rcg->parent_map, f.src);
538 struct clk *parent = clk_get_parent_by_index(hw->clk, index);
539
540 for (; frac->num; frac++) {
541 request = (rate * frac->den) / frac->num;
542
543 src_rate = __clk_round_rate(parent, request);
544 if ((src_rate < (request - delta)) ||
545 (src_rate > (request + delta)))
546 continue;
547
548 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
549 &hid_div);
550 f.pre_div = hid_div;
551 f.pre_div >>= CFG_SRC_DIV_SHIFT;
552 f.pre_div &= mask;
553 f.m = frac->num;
554 f.n = frac->den;
555
556 return clk_rcg2_configure(rcg, &f);
557 }
558 return -EINVAL;
559 }
560
561 static int clk_pixel_set_rate_and_parent(struct clk_hw *hw, unsigned long rate,
562 unsigned long parent_rate, u8 index)
563 {
564 /* Parent index is set statically in frequency table */
565 return clk_pixel_set_rate(hw, rate, parent_rate);
566 }
567
568 const struct clk_ops clk_pixel_ops = {
569 .is_enabled = clk_rcg2_is_enabled,
570 .get_parent = clk_rcg2_get_parent,
571 .set_parent = clk_rcg2_set_parent,
572 .recalc_rate = clk_rcg2_recalc_rate,
573 .set_rate = clk_pixel_set_rate,
574 .set_rate_and_parent = clk_pixel_set_rate_and_parent,
575 .determine_rate = clk_pixel_determine_rate,
576 };
577 EXPORT_SYMBOL_GPL(clk_pixel_ops);