blob: 6f747464571792b49345cdd4aacfba480c21d98a [file] [log] [blame]
Horatiu Vultur2bd4c9f2019-01-31 15:30:35 +01001// SPDX-License-Identifier: (GPL-2.0+ OR MIT)
2/*
3 * Copyright (c) 2018 Microsemi Corporation
4 */
5
Simon Glass0f2af882020-05-10 11:40:05 -06006#include <log.h>
Simon Glass4dcacfc2020-05-10 11:40:13 -06007#include <linux/bitops.h>
Simon Glassdbd79542020-05-10 11:40:11 -06008#include <linux/delay.h>
Horatiu Vultur2bd4c9f2019-01-31 15:30:35 +01009#include <linux/io.h>
10#include "mscc_xfer.h"
11
12#define QS_XTR_FLUSH_FLUSH GENMASK(1, 0)
13#define QS_INJ_CTRL_GAP_SIZE(x) ((x) << 21)
14#define QS_INJ_CTRL_EOF BIT(19)
15#define QS_INJ_CTRL_SOF BIT(18)
16#define QS_INJ_CTRL_VLD_BYTES(x) ((x) << 16)
17
18#define XTR_EOF_0 ntohl(0x80000000u)
19#define XTR_EOF_1 ntohl(0x80000001u)
20#define XTR_EOF_2 ntohl(0x80000002u)
21#define XTR_EOF_3 ntohl(0x80000003u)
22#define XTR_PRUNED ntohl(0x80000004u)
23#define XTR_ABORT ntohl(0x80000005u)
24#define XTR_ESCAPE ntohl(0x80000006u)
25#define XTR_NOT_READY ntohl(0x80000007u)
26
27#define BUF_CELL_SZ 60
28#define XTR_VALID_BYTES(x) (4 - ((x) & 3))
29
30int mscc_send(void __iomem *regs, const unsigned long *mscc_qs_offset,
31 u32 *ifh, size_t ifh_len, u32 *buff, size_t buff_len)
32{
33 int i, count = (buff_len + 3) / 4, last = buff_len % 4;
34
35 writel(QS_INJ_CTRL_GAP_SIZE(1) | QS_INJ_CTRL_SOF,
36 regs + mscc_qs_offset[MSCC_QS_INJ_CTRL]);
37
38 for (i = 0; i < ifh_len; i++)
39 writel(ifh[i], regs + mscc_qs_offset[MSCC_QS_INJ_WR]);
40
41 for (i = 0; i < count; i++)
42 writel(buff[i], regs + mscc_qs_offset[MSCC_QS_INJ_WR]);
43
44 /* Add padding */
45 while (i < (BUF_CELL_SZ / 4)) {
46 writel(0, regs + mscc_qs_offset[MSCC_QS_INJ_WR]);
47 i++;
48 }
49
50 /* Indicate EOF and valid bytes in last word */
51 writel(QS_INJ_CTRL_GAP_SIZE(1) |
52 QS_INJ_CTRL_VLD_BYTES(buff_len < BUF_CELL_SZ ? 0 : last) |
53 QS_INJ_CTRL_EOF, regs + mscc_qs_offset[MSCC_QS_INJ_CTRL]);
54
55 /* Add dummy CRC */
56 writel(0, regs + mscc_qs_offset[MSCC_QS_INJ_WR]);
57
58 return 0;
59}
60
61int mscc_recv(void __iomem *regs, const unsigned long *mscc_qs_offset,
62 u32 *rxbuf, size_t ifh_len, bool byte_swap)
63{
64 u8 grp = 0; /* Recv everything on CPU group 0 */
65 int i, byte_cnt = 0;
66 bool eof_flag = false, pruned_flag = false, abort_flag = false;
67
68 if (!(readl(regs + mscc_qs_offset[MSCC_QS_XTR_DATA_PRESENT]) &
69 BIT(grp)))
70 return -EAGAIN;
71
72 /* skip IFH */
73 for (i = 0; i < ifh_len; i++)
74 readl(regs + mscc_qs_offset[MSCC_QS_XTR_RD]);
75
76 while (!eof_flag) {
77 u32 val = readl(regs + mscc_qs_offset[MSCC_QS_XTR_RD]);
78 u32 cmp = val;
79
80 if (byte_swap)
81 cmp = ntohl(val);
82
83 switch (cmp) {
84 case XTR_NOT_READY:
85 debug("%d NOT_READY...?\n", byte_cnt);
86 break;
87 case XTR_ABORT:
88 *rxbuf = readl(regs + mscc_qs_offset[MSCC_QS_XTR_RD]);
89 abort_flag = true;
90 eof_flag = true;
91 debug("XTR_ABORT\n");
92 break;
93 case XTR_EOF_0:
94 case XTR_EOF_1:
95 case XTR_EOF_2:
96 case XTR_EOF_3:
97 byte_cnt += XTR_VALID_BYTES(val);
98 *rxbuf = readl(regs + mscc_qs_offset[MSCC_QS_XTR_RD]);
99 eof_flag = true;
100 debug("EOF\n");
101 break;
102 case XTR_PRUNED:
103 /* But get the last 4 bytes as well */
104 eof_flag = true;
105 pruned_flag = true;
106 debug("PRUNED\n");
107 /* fallthrough */
108 case XTR_ESCAPE:
109 *rxbuf = readl(regs + mscc_qs_offset[MSCC_QS_XTR_RD]);
110 byte_cnt += 4;
111 rxbuf++;
112 debug("ESCAPED\n");
113 break;
114 default:
115 *rxbuf = val;
116 byte_cnt += 4;
117 rxbuf++;
118 }
119 }
120
121 if (abort_flag || pruned_flag || !eof_flag) {
122 debug("Discarded frame: abort:%d pruned:%d eof:%d\n",
123 abort_flag, pruned_flag, eof_flag);
124 return -EAGAIN;
125 }
126
127 return byte_cnt;
128}
129
130void mscc_flush(void __iomem *regs, const unsigned long *mscc_qs_offset)
131{
132 /* All Queues flush */
133 setbits_le32(regs + mscc_qs_offset[MSCC_QS_XTR_FLUSH],
134 QS_XTR_FLUSH_FLUSH);
135
136 /* Allow to drain */
137 mdelay(1);
138
139 /* All Queues normal */
140 clrbits_le32(regs + mscc_qs_offset[MSCC_QS_XTR_FLUSH],
141 QS_XTR_FLUSH_FLUSH);
142}