1 #include <linux/xhci/xhci-mtk-scheduler.h>
2 #include <linux/kernel.h> /* printk() */
4 static struct sch_ep
**ss_out_eps
[MAX_EP_NUM
];
5 static struct sch_ep
**ss_in_eps
[MAX_EP_NUM
];
6 static struct sch_ep
**hs_eps
[MAX_EP_NUM
]; /* including tt isoc */
7 static struct sch_ep
**tt_intr_eps
[MAX_EP_NUM
];
9 int mtk_xhci_scheduler_init(void)
13 for (i
= 0; i
< MAX_EP_NUM
; i
++) {
16 for (i
= 0; i
< MAX_EP_NUM
; i
++) {
19 for (i
= 0; i
< MAX_EP_NUM
; i
++) {
22 for (i
= 0; i
< MAX_EP_NUM
; i
++) {
23 tt_intr_eps
[i
] = NULL
;
28 int add_sch_ep(int dev_speed
, int is_in
, int isTT
, int ep_type
, int maxp
, int interval
, int burst
,
29 int mult
, int offset
, int repeat
, int pkts
, int cs_count
, int burst_mode
,
30 int bw_cost
, mtk_u32
*ep
, struct sch_ep
*tmp_ep
)
33 struct sch_ep
**ep_array
;
36 if (is_in
&& dev_speed
== USB_SPEED_SUPER
) {
37 ep_array
= (struct sch_ep
**)ss_in_eps
;
38 } else if (dev_speed
== USB_SPEED_SUPER
) {
39 ep_array
= (struct sch_ep
**)ss_out_eps
;
40 } else if (dev_speed
== USB_SPEED_HIGH
|| (isTT
&& ep_type
== USB_EP_ISOC
)) {
41 ep_array
= (struct sch_ep
**)hs_eps
;
43 ep_array
= (struct sch_ep
**)tt_intr_eps
;
45 for (i
= 0; i
< MAX_EP_NUM
; i
++) {
46 if (ep_array
[i
] == NULL
) {
47 tmp_ep
->dev_speed
= dev_speed
;
49 tmp_ep
->is_in
= is_in
;
50 tmp_ep
->ep_type
= ep_type
;
52 tmp_ep
->interval
= interval
;
53 tmp_ep
->burst
= burst
;
55 tmp_ep
->offset
= offset
;
56 tmp_ep
->repeat
= repeat
;
58 tmp_ep
->cs_count
= cs_count
;
59 tmp_ep
->burst_mode
= burst_mode
;
60 tmp_ep
->bw_cost
= bw_cost
;
69 int count_ss_bw(int is_in
, int ep_type
, int maxp
, int interval
, int burst
, int mult
, int offset
,
70 int repeat
, int td_size
)
74 int final_bw_required
;
75 int bw_required_per_repeat
;
77 struct sch_ep
*cur_sch_ep
;
78 struct sch_ep
**ep_array
;
89 ep_array
= (struct sch_ep
**)ss_in_eps
;
91 ep_array
= (struct sch_ep
**)ss_out_eps
;
99 final_bw_required
= 0;
100 for (i
= 0; i
< MAX_EP_NUM
; i
++) {
101 cur_sch_ep
= ep_array
[i
];
102 if (cur_sch_ep
== NULL
) {
105 ep_interval
= cur_sch_ep
->interval
;
106 ep_offset
= cur_sch_ep
->offset
;
107 if (cur_sch_ep
->repeat
== 0) {
108 if (ep_interval
>= interval
) {
109 tmp_offset
= ep_offset
+ ep_interval
- offset
;
110 tmp_interval
= interval
;
112 tmp_offset
= offset
+ interval
- ep_offset
;
113 tmp_interval
= ep_interval
;
115 if (tmp_offset
% tmp_interval
== 0) {
116 final_bw_required
+= cur_sch_ep
->bw_cost
;
119 ep_repeat
= cur_sch_ep
->repeat
;
120 ep_mult
= cur_sch_ep
->mult
;
121 for (k
= 0; k
<= ep_mult
; k
++) {
122 cur_ep_offset
= ep_offset
+ (k
* ep_mult
);
123 if (ep_interval
>= interval
) {
124 tmp_offset
= cur_ep_offset
+ ep_interval
- offset
;
125 tmp_interval
= interval
;
127 tmp_offset
= offset
+ interval
- cur_ep_offset
;
128 tmp_interval
= ep_interval
;
130 if (tmp_offset
% tmp_interval
== 0) {
131 final_bw_required
+= cur_sch_ep
->bw_cost
;
137 final_bw_required
+= td_size
;
139 bw_required_per_repeat
= maxp
* (burst
+ 1);
140 for (j
= 0; j
<= mult
; j
++) {
142 cur_offset
= offset
+ (j
* repeat
);
143 for (i
= 0; i
< MAX_EP_NUM
; i
++) {
144 cur_sch_ep
= ep_array
[i
];
145 if (cur_sch_ep
== NULL
) {
148 ep_interval
= cur_sch_ep
->interval
;
149 ep_offset
= cur_sch_ep
->offset
;
150 if (cur_sch_ep
->repeat
== 0) {
151 if (ep_interval
>= interval
) {
152 tmp_offset
= ep_offset
+ ep_interval
- cur_offset
;
153 tmp_interval
= interval
;
155 tmp_offset
= cur_offset
+ interval
- ep_offset
;
156 tmp_interval
= ep_interval
;
158 if (tmp_offset
% tmp_interval
== 0) {
159 tmp_bw_required
+= cur_sch_ep
->bw_cost
;
162 ep_repeat
= cur_sch_ep
->repeat
;
163 ep_mult
= cur_sch_ep
->mult
;
164 for (k
= 0; k
<= ep_mult
; k
++) {
165 cur_ep_offset
= ep_offset
+ (k
* ep_repeat
);
166 if (ep_interval
>= interval
) {
168 cur_ep_offset
+ ep_interval
-
170 tmp_interval
= interval
;
173 cur_offset
+ interval
- cur_ep_offset
;
174 tmp_interval
= ep_interval
;
176 if (tmp_offset
% tmp_interval
== 0) {
177 tmp_bw_required
+= cur_sch_ep
->bw_cost
;
183 bw_required
[j
] = tmp_bw_required
;
185 final_bw_required
= SS_BW_BOUND
;
186 for (j
= 0; j
<= mult
; j
++) {
187 if (bw_required
[j
] < final_bw_required
) {
188 final_bw_required
= bw_required
[j
];
191 final_bw_required
+= bw_required_per_repeat
;
193 return final_bw_required
;
196 int count_hs_bw(int ep_type
, int maxp
, int interval
, int offset
, int td_size
)
200 struct sch_ep
*cur_sch_ep
;
205 int cur_tt_isoc_interval
; /* for isoc tt check */
208 for (i
= 0; i
< MAX_EP_NUM
; i
++) {
210 cur_sch_ep
= (struct sch_ep
*)hs_eps
[i
];
211 if (cur_sch_ep
== NULL
) {
214 ep_offset
= cur_sch_ep
->offset
;
215 ep_interval
= cur_sch_ep
->interval
;
217 if (cur_sch_ep
->isTT
&& cur_sch_ep
->ep_type
== USB_EP_ISOC
) {
218 cur_tt_isoc_interval
= ep_interval
<< 3;
219 if (ep_interval
>= interval
) {
220 tmp_offset
= ep_offset
+ cur_tt_isoc_interval
- offset
;
221 tmp_interval
= interval
;
223 tmp_offset
= offset
+ interval
- ep_offset
;
224 tmp_interval
= cur_tt_isoc_interval
;
226 if (cur_sch_ep
->is_in
) {
227 if ((tmp_offset
% tmp_interval
>= 2)
228 && (tmp_offset
% tmp_interval
<= cur_sch_ep
->cs_count
)) {
232 if (tmp_offset
% tmp_interval
<= cur_sch_ep
->cs_count
) {
237 if (ep_interval
>= interval
) {
238 tmp_offset
= ep_offset
+ ep_interval
- offset
;
239 tmp_interval
= interval
;
241 tmp_offset
= offset
+ interval
- ep_offset
;
242 tmp_interval
= ep_interval
;
244 if (tmp_offset
% tmp_interval
== 0) {
245 bw_required
+= cur_sch_ep
->bw_cost
;
249 bw_required
+= td_size
;
253 int count_tt_isoc_bw(int is_in
, int maxp
, int interval
, int offset
, int td_size
)
256 int s_frame
, s_mframe
, cur_mframe
;
257 int bw_required
, max_bw
;
261 struct sch_ep
*cur_sch_ep
;
264 int tt_isoc_interval
; /* for isoc tt check */
265 int cur_tt_isoc_interval
; /* for isoc tt check */
271 tt_isoc_interval
= interval
<< 3; /* frame to mframe */
275 s_frame
= offset
/ 8;
276 s_mframe
= offset
% 8;
277 ss_cs_count
= (maxp
+ (188 - 1)) / 188;
279 cs_mframe
= offset
% 8 + 2 + ss_cs_count
;
282 else if (cs_mframe
== 7)
284 else if (cs_mframe
> 8)
291 for (cur_mframe
= offset
+ i
; i
< ss_cs_count
; cur_mframe
++, i
++) {
293 for (j
= 0; j
< MAX_EP_NUM
; j
++) {
294 cur_sch_ep
= (struct sch_ep
*)hs_eps
[j
];
295 if (cur_sch_ep
== NULL
) {
298 ep_offset
= cur_sch_ep
->offset
;
299 ep_interval
= cur_sch_ep
->interval
;
300 if (cur_sch_ep
->isTT
&& cur_sch_ep
->ep_type
== USB_EP_ISOC
) {
302 /* check if mframe offset overlap */
303 /* if overlap, add 188 to the bw */
304 cur_tt_isoc_interval
= ep_interval
<< 3;
305 if (cur_tt_isoc_interval
>= tt_isoc_interval
) {
307 (ep_offset
+ cur_tt_isoc_interval
) - cur_mframe
;
308 tmp_interval
= tt_isoc_interval
;
310 tmp_offset
= (cur_mframe
+ tt_isoc_interval
) - ep_offset
;
311 tmp_interval
= cur_tt_isoc_interval
;
313 if (cur_sch_ep
->is_in
) {
314 if ((tmp_offset
% tmp_interval
>= 2)
315 && (tmp_offset
% tmp_interval
<=
316 cur_sch_ep
->cs_count
)) {
320 if (tmp_offset
% tmp_interval
<= cur_sch_ep
->cs_count
) {
325 } else if (cur_sch_ep
->ep_type
== USB_EP_INT
326 || cur_sch_ep
->ep_type
== USB_EP_ISOC
) {
327 /* check if mframe */
328 if (ep_interval
>= tt_isoc_interval
) {
329 tmp_offset
= (ep_offset
+ ep_interval
) - cur_mframe
;
330 tmp_interval
= tt_isoc_interval
;
332 tmp_offset
= (cur_mframe
+ tt_isoc_interval
) - ep_offset
;
333 tmp_interval
= ep_interval
;
335 if (tmp_offset
% tmp_interval
== 0) {
336 bw_required
+= cur_sch_ep
->bw_cost
;
341 if (bw_required
> max_bw
) {
342 max_bw
= bw_required
;
348 int count_tt_intr_bw(int interval
, int frame_offset
)
350 /* check all eps in tt_intr_eps */
357 struct sch_ep
*cur_sch_ep
;
361 for (i
= 0; i
< MAX_EP_NUM
; i
++) {
362 cur_sch_ep
= (struct sch_ep
*)tt_intr_eps
[i
];
363 if (cur_sch_ep
== NULL
) {
366 ep_offset
= cur_sch_ep
->offset
;
367 ep_interval
= cur_sch_ep
->interval
;
368 if (ep_interval
>= interval
) {
369 tmp_offset
= ep_offset
+ ep_interval
- frame_offset
;
370 tmp_interval
= interval
;
372 tmp_offset
= frame_offset
+ interval
- ep_offset
;
373 tmp_interval
= ep_interval
;
376 if (tmp_offset
% tmp_interval
== 0) {
383 struct sch_ep
*mtk_xhci_scheduler_remove_ep(int dev_speed
, int is_in
, int isTT
, int ep_type
,
387 struct sch_ep
**ep_array
;
388 struct sch_ep
*cur_ep
;
390 if (is_in
&& dev_speed
== USB_SPEED_SUPER
) {
391 ep_array
= (struct sch_ep
**)ss_in_eps
;
392 } else if (dev_speed
== USB_SPEED_SUPER
) {
393 ep_array
= (struct sch_ep
**)ss_out_eps
;
394 } else if (dev_speed
== USB_SPEED_HIGH
|| (isTT
&& ep_type
== USB_EP_ISOC
)) {
395 ep_array
= (struct sch_ep
**)hs_eps
;
397 ep_array
= (struct sch_ep
**)tt_intr_eps
;
399 for (i
= 0; i
< MAX_EP_NUM
; i
++) {
400 cur_ep
= (struct sch_ep
*)ep_array
[i
];
401 if (cur_ep
!= NULL
&& cur_ep
->ep
== ep
) {
409 int mtk_xhci_scheduler_add_ep(int dev_speed
, int is_in
, int isTT
, int ep_type
, int maxp
,
410 int interval
, int burst
, int mult
, mtk_u32
*ep
, mtk_u32
*ep_ctx
,
411 struct sch_ep
*sch_ep
)
414 mtk_u32 bCsCount
= 0;
419 struct mtk_xhci_ep_ctx
*temp_ep_ctx
;
421 int mframe_idx
, frame_idx
;
423 int cur_bw
, best_bw
, best_bw_idx
, repeat
, max_repeat
, best_bw_repeat
;
424 int cur_offset
, cs_mframe
;
430 "add_ep parameters, dev_speed %d, is_in %d, isTT %d, ep_type %d, maxp %d, interval %d, burst %d, mult %d, ep 0x%p, ep_ctx 0x%p, sch_ep 0x%p\n",
431 dev_speed
, is_in
, isTT
, ep_type
, maxp
, interval
, burst
, mult
, ep
,
433 if (isTT
&& ep_type
== USB_EP_INT
434 && ((dev_speed
== USB_SPEED_LOW
) || (dev_speed
== USB_SPEED_FULL
))) {
435 frame_interval
= interval
>> 3;
436 for (frame_idx
= 0; frame_idx
< frame_interval
; frame_idx
++) {
437 printk(KERN_ERR
"check tt_intr_bw interval %d, frame_idx %d\n",
438 frame_interval
, frame_idx
);
439 if (count_tt_intr_bw(frame_interval
, frame_idx
) == SCH_SUCCESS
) {
440 printk(KERN_ERR
"check OK............\n");
441 bOffset
= frame_idx
<< 3;
447 (dev_speed
, is_in
, isTT
, ep_type
, maxp
, frame_interval
, burst
,
448 mult
, bOffset
, bRepeat
, bPkts
, bCsCount
, bBm
, maxp
, ep
,
449 sch_ep
) == SCH_FAIL
) {
456 } else if (isTT
&& ep_type
== USB_EP_ISOC
) {
457 best_bw
= HS_BW_BOUND
;
462 frame_interval
= interval
>> 3;
463 for (frame_idx
= 0; frame_idx
< frame_interval
&& !break_out
; frame_idx
++) {
464 for (mframe_idx
= 0; mframe_idx
< 8; mframe_idx
++) {
465 cur_offset
= (frame_idx
* 8) + mframe_idx
;
467 count_tt_isoc_bw(is_in
, maxp
, frame_interval
, cur_offset
,
469 if (cur_bw
> 0 && cur_bw
< best_bw
) {
470 best_bw_idx
= cur_offset
;
472 if (cur_bw
== td_size
|| cur_bw
< (HS_BW_BOUND
>> 1)) {
479 if (best_bw_idx
== -1) {
482 bOffset
= best_bw_idx
;
484 bCsCount
= maxp
+ (188 - 1) / 188;
486 cs_mframe
= bOffset
% 8 + 2 + bCsCount
;
489 else if (cs_mframe
== 7)
495 (dev_speed
, is_in
, isTT
, ep_type
, maxp
, interval
, burst
, mult
, bOffset
,
496 bRepeat
, bPkts
, bCsCount
, bBm
, bw_cost
, ep
, sch_ep
) == SCH_FAIL
) {
501 } else if ((dev_speed
== USB_SPEED_FULL
|| dev_speed
== USB_SPEED_LOW
)
502 && ep_type
== USB_EP_INT
) {
505 } else if (dev_speed
== USB_SPEED_FULL
&& ep_type
== USB_EP_ISOC
) {
508 } else if (dev_speed
== USB_SPEED_HIGH
&& (ep_type
== USB_EP_INT
|| ep_type
== USB_EP_ISOC
)) {
509 best_bw
= HS_BW_BOUND
;
512 td_size
= maxp
* (burst
+ 1);
513 for (cur_offset
= 0; cur_offset
< interval
; cur_offset
++) {
514 cur_bw
= count_hs_bw(ep_type
, maxp
, interval
, cur_offset
, td_size
);
515 if (cur_bw
> 0 && cur_bw
< best_bw
) {
516 best_bw_idx
= cur_offset
;
518 if (cur_bw
== td_size
|| cur_bw
< (HS_BW_BOUND
>> 1)) {
523 if (best_bw_idx
== -1) {
526 bOffset
= best_bw_idx
;
532 (dev_speed
, is_in
, isTT
, ep_type
, maxp
, interval
, burst
, mult
, bOffset
,
533 bRepeat
, bPkts
, bCsCount
, bBm
, bw_cost
, ep
, sch_ep
) == SCH_FAIL
) {
538 } else if (dev_speed
== USB_SPEED_SUPER
539 && (ep_type
== USB_EP_INT
|| ep_type
== USB_EP_ISOC
)) {
540 best_bw
= SS_BW_BOUND
;
543 td_size
= maxp
* (mult
+ 1) * (burst
+ 1);
547 max_repeat
= (interval
- 1) / (mult
+ 1);
550 for (frame_idx
= 0; (frame_idx
< interval
) && !break_out
; frame_idx
++) {
551 for (repeat
= max_repeat
; repeat
>= 0; repeat
--) {
553 count_ss_bw(is_in
, ep_type
, maxp
, interval
, burst
, mult
,
554 frame_idx
, repeat
, td_size
);
556 "count_ss_bw, frame_idx %d, repeat %d, td_size %d, result bw %d\n",
557 frame_idx
, repeat
, td_size
, cur_bw
);
558 if (cur_bw
> 0 && cur_bw
< best_bw
) {
559 best_bw_idx
= frame_idx
;
560 best_bw_repeat
= repeat
;
562 if (cur_bw
<= td_size
|| cur_bw
< (HS_BW_BOUND
>> 1)) {
569 printk(KERN_ERR
"final best idx %d, best repeat %d\n", best_bw_idx
, best_bw_repeat
);
570 if (best_bw_idx
== -1) {
573 bOffset
= best_bw_idx
;
575 bRepeat
= best_bw_repeat
;
577 bw_cost
= (burst
+ 1) * (mult
+ 1) * maxp
;
578 bPkts
= (burst
+ 1) * (mult
+ 1);
580 bw_cost
= (burst
+ 1) * maxp
;
584 (dev_speed
, is_in
, isTT
, ep_type
, maxp
, interval
, burst
, mult
, bOffset
,
585 bRepeat
, bPkts
, bCsCount
, bBm
, bw_cost
, ep
, sch_ep
) == SCH_FAIL
) {
594 if (ret
== SCH_SUCCESS
) {
595 temp_ep_ctx
= (struct mtk_xhci_ep_ctx
*)ep_ctx
;
596 temp_ep_ctx
->reserved
[0] |= (BPKTS(bPkts
) | BCSCOUNT(bCsCount
) | BBM(bBm
));
597 temp_ep_ctx
->reserved
[1] |= (BOFFSET(bOffset
) | BREPEAT(bRepeat
));
598 printk(KERN_ERR
"[DBG] BPKTS: %x, BCSCOUNT: %x, BBM: %x\n", (unsigned int)bPkts
,
599 (unsigned int)bCsCount
, (unsigned int)bBm
);
600 printk(KERN_ERR
"[DBG] BOFFSET: %x, BREPEAT: %x\n", (unsigned int)bOffset
,
601 (unsigned int)bRepeat
);