blob: 01eb204db8c9f2d981a98e799a3555291a3ee0f2 [file] [log] [blame]
Horatiu Vultur2bd4c9f2019-01-31 15:30:35 +01001// SPDX-License-Identifier: (GPL-2.0+ OR MIT)
2/*
3 * Copyright (c) 2018 Microsemi Corporation
4 */
5
Simon Glass0f2af882020-05-10 11:40:05 -06006#include <log.h>
Simon Glassdbd79542020-05-10 11:40:11 -06007#include <linux/delay.h>
Horatiu Vultur2bd4c9f2019-01-31 15:30:35 +01008#include <linux/io.h>
9#include "mscc_xfer.h"
10
11#define QS_XTR_FLUSH_FLUSH GENMASK(1, 0)
12#define QS_INJ_CTRL_GAP_SIZE(x) ((x) << 21)
13#define QS_INJ_CTRL_EOF BIT(19)
14#define QS_INJ_CTRL_SOF BIT(18)
15#define QS_INJ_CTRL_VLD_BYTES(x) ((x) << 16)
16
17#define XTR_EOF_0 ntohl(0x80000000u)
18#define XTR_EOF_1 ntohl(0x80000001u)
19#define XTR_EOF_2 ntohl(0x80000002u)
20#define XTR_EOF_3 ntohl(0x80000003u)
21#define XTR_PRUNED ntohl(0x80000004u)
22#define XTR_ABORT ntohl(0x80000005u)
23#define XTR_ESCAPE ntohl(0x80000006u)
24#define XTR_NOT_READY ntohl(0x80000007u)
25
26#define BUF_CELL_SZ 60
27#define XTR_VALID_BYTES(x) (4 - ((x) & 3))
28
29int mscc_send(void __iomem *regs, const unsigned long *mscc_qs_offset,
30 u32 *ifh, size_t ifh_len, u32 *buff, size_t buff_len)
31{
32 int i, count = (buff_len + 3) / 4, last = buff_len % 4;
33
34 writel(QS_INJ_CTRL_GAP_SIZE(1) | QS_INJ_CTRL_SOF,
35 regs + mscc_qs_offset[MSCC_QS_INJ_CTRL]);
36
37 for (i = 0; i < ifh_len; i++)
38 writel(ifh[i], regs + mscc_qs_offset[MSCC_QS_INJ_WR]);
39
40 for (i = 0; i < count; i++)
41 writel(buff[i], regs + mscc_qs_offset[MSCC_QS_INJ_WR]);
42
43 /* Add padding */
44 while (i < (BUF_CELL_SZ / 4)) {
45 writel(0, regs + mscc_qs_offset[MSCC_QS_INJ_WR]);
46 i++;
47 }
48
49 /* Indicate EOF and valid bytes in last word */
50 writel(QS_INJ_CTRL_GAP_SIZE(1) |
51 QS_INJ_CTRL_VLD_BYTES(buff_len < BUF_CELL_SZ ? 0 : last) |
52 QS_INJ_CTRL_EOF, regs + mscc_qs_offset[MSCC_QS_INJ_CTRL]);
53
54 /* Add dummy CRC */
55 writel(0, regs + mscc_qs_offset[MSCC_QS_INJ_WR]);
56
57 return 0;
58}
59
60int mscc_recv(void __iomem *regs, const unsigned long *mscc_qs_offset,
61 u32 *rxbuf, size_t ifh_len, bool byte_swap)
62{
63 u8 grp = 0; /* Recv everything on CPU group 0 */
64 int i, byte_cnt = 0;
65 bool eof_flag = false, pruned_flag = false, abort_flag = false;
66
67 if (!(readl(regs + mscc_qs_offset[MSCC_QS_XTR_DATA_PRESENT]) &
68 BIT(grp)))
69 return -EAGAIN;
70
71 /* skip IFH */
72 for (i = 0; i < ifh_len; i++)
73 readl(regs + mscc_qs_offset[MSCC_QS_XTR_RD]);
74
75 while (!eof_flag) {
76 u32 val = readl(regs + mscc_qs_offset[MSCC_QS_XTR_RD]);
77 u32 cmp = val;
78
79 if (byte_swap)
80 cmp = ntohl(val);
81
82 switch (cmp) {
83 case XTR_NOT_READY:
84 debug("%d NOT_READY...?\n", byte_cnt);
85 break;
86 case XTR_ABORT:
87 *rxbuf = readl(regs + mscc_qs_offset[MSCC_QS_XTR_RD]);
88 abort_flag = true;
89 eof_flag = true;
90 debug("XTR_ABORT\n");
91 break;
92 case XTR_EOF_0:
93 case XTR_EOF_1:
94 case XTR_EOF_2:
95 case XTR_EOF_3:
96 byte_cnt += XTR_VALID_BYTES(val);
97 *rxbuf = readl(regs + mscc_qs_offset[MSCC_QS_XTR_RD]);
98 eof_flag = true;
99 debug("EOF\n");
100 break;
101 case XTR_PRUNED:
102 /* But get the last 4 bytes as well */
103 eof_flag = true;
104 pruned_flag = true;
105 debug("PRUNED\n");
106 /* fallthrough */
107 case XTR_ESCAPE:
108 *rxbuf = readl(regs + mscc_qs_offset[MSCC_QS_XTR_RD]);
109 byte_cnt += 4;
110 rxbuf++;
111 debug("ESCAPED\n");
112 break;
113 default:
114 *rxbuf = val;
115 byte_cnt += 4;
116 rxbuf++;
117 }
118 }
119
120 if (abort_flag || pruned_flag || !eof_flag) {
121 debug("Discarded frame: abort:%d pruned:%d eof:%d\n",
122 abort_flag, pruned_flag, eof_flag);
123 return -EAGAIN;
124 }
125
126 return byte_cnt;
127}
128
129void mscc_flush(void __iomem *regs, const unsigned long *mscc_qs_offset)
130{
131 /* All Queues flush */
132 setbits_le32(regs + mscc_qs_offset[MSCC_QS_XTR_FLUSH],
133 QS_XTR_FLUSH_FLUSH);
134
135 /* Allow to drain */
136 mdelay(1);
137
138 /* All Queues normal */
139 clrbits_le32(regs + mscc_qs_offset[MSCC_QS_XTR_FLUSH],
140 QS_XTR_FLUSH_FLUSH);
141}