blob: ecf4a697e273604467de447b9679b1a82dae3967 [file] [log] [blame]
Tom Rini10e47792018-05-06 17:58:06 -04001/* SPDX-License-Identifier: GPL-2.0+ */
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +09002/*
Robert P. J. Day8c60f922016-05-04 04:47:31 -04003 * sh_eth.h - Driver for Renesas SuperH ethernet controller.
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +09004 *
Nobuhiro Iwamatsu4ad2c2a2012-08-02 22:08:40 +00005 * Copyright (C) 2008 - 2012 Renesas Solutions Corp.
6 * Copyright (c) 2008 - 2012 Nobuhiro Iwamatsu
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +09007 * Copyright (c) 2007 Carlos Munoz <carlos@kenati.com>
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +09008 */
9
Nobuhiro Iwamatsud8f5d502008-11-21 12:04:18 +090010#include <netdev.h>
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +090011#include <asm/types.h>
12
13#define SHETHER_NAME "sh_eth"
14
Nobuhiro Iwamatsu4ad2c2a2012-08-02 22:08:40 +000015#if defined(CONFIG_SH)
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +090016/* Malloc returns addresses in the P1 area (cacheable). However we need to
17 use area P2 (non-cacheable) */
Marek Vasut9aa1d5b2019-07-31 14:48:17 +020018#define ADDR_TO_P2(addr) ((((uintptr_t)(addr) & ~0xe0000000) | 0xa0000000))
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +090019
20/* The ethernet controller needs to use physical addresses */
Marek Vasut9aa1d5b2019-07-31 14:48:17 +020021#define ADDR_TO_PHY(addr) ((uintptr_t)(addr) & ~0xe0000000)
Nobuhiro Iwamatsu4ad2c2a2012-08-02 22:08:40 +000022#elif defined(CONFIG_ARM)
Chris Brandt71230772017-11-03 08:30:11 -050023#ifndef inl
24#define inl readl
Nobuhiro Iwamatsu4ad2c2a2012-08-02 22:08:40 +000025#define outl writel
Chris Brandt71230772017-11-03 08:30:11 -050026#endif
Marek Vasut9aa1d5b2019-07-31 14:48:17 +020027#define ADDR_TO_PHY(addr) ((uintptr_t)(addr))
Nobuhiro Iwamatsu4ad2c2a2012-08-02 22:08:40 +000028#define ADDR_TO_P2(addr) (addr)
29#endif /* defined(CONFIG_SH) */
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +090030
Nobuhiro Iwamatsu7a2142c2013-08-22 13:22:02 +090031/* base padding size is 16 */
Tom Rinidd2eba02022-12-04 10:13:47 -050032#ifndef CFG_SH_ETHER_ALIGNE_SIZE
33#define CFG_SH_ETHER_ALIGNE_SIZE 16
Nobuhiro Iwamatsu7a2142c2013-08-22 13:22:02 +090034#endif
35
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +090036/* Number of supported ports */
37#define MAX_PORT_NUM 2
38
39/* Buffers must be big enough to hold the largest ethernet frame. Also, rx
40 buffers must be a multiple of 32 bytes */
41#define MAX_BUF_SIZE (48 * 32)
42
43/* The number of tx descriptors must be large enough to point to 5 or more
44 frames. If each frame uses 2 descriptors, at least 10 descriptors are needed.
45 We use one descriptor per frame */
46#define NUM_TX_DESC 8
47
48/* The size of the tx descriptor is determined by how much padding is used.
49 4, 20, or 52 bytes of padding can be used */
Tom Rinidd2eba02022-12-04 10:13:47 -050050#define TX_DESC_PADDING (CFG_SH_ETHER_ALIGNE_SIZE - 12)
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +090051
Nobuhiro Iwamatsud8f5d502008-11-21 12:04:18 +090052/* Tx descriptor. We always use 3 bytes of padding */
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +090053struct tx_desc_s {
54 volatile u32 td0;
55 u32 td1;
56 u32 td2; /* Buffer start */
Nobuhiro Iwamatsu7a2142c2013-08-22 13:22:02 +090057 u8 padding[TX_DESC_PADDING]; /* aligned cache line size */
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +090058};
59
60/* There is no limitation in the number of rx descriptors */
61#define NUM_RX_DESC 8
62
63/* The size of the rx descriptor is determined by how much padding is used.
64 4, 20, or 52 bytes of padding can be used */
Tom Rinidd2eba02022-12-04 10:13:47 -050065#define RX_DESC_PADDING (CFG_SH_ETHER_ALIGNE_SIZE - 12)
Nobuhiro Iwamatsu7a2142c2013-08-22 13:22:02 +090066/* aligned cache line size */
Tom Rinidd2eba02022-12-04 10:13:47 -050067#define RX_BUF_ALIGNE_SIZE (CFG_SH_ETHER_ALIGNE_SIZE > 32 ? 64 : 32)
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +090068
69/* Rx descriptor. We always use 4 bytes of padding */
70struct rx_desc_s {
71 volatile u32 rd0;
72 volatile u32 rd1;
73 u32 rd2; /* Buffer start */
Nobuhiro Iwamatsu7a2142c2013-08-22 13:22:02 +090074 u8 padding[TX_DESC_PADDING]; /* aligned cache line size */
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +090075};
76
Nobuhiro Iwamatsud8f5d502008-11-21 12:04:18 +090077struct sh_eth_info {
Nobuhiro Iwamatsu1c822112014-11-04 09:15:47 +090078 struct tx_desc_s *tx_desc_alloc;
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +090079 struct tx_desc_s *tx_desc_base;
80 struct tx_desc_s *tx_desc_cur;
Nobuhiro Iwamatsu1c822112014-11-04 09:15:47 +090081 struct rx_desc_s *rx_desc_alloc;
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +090082 struct rx_desc_s *rx_desc_base;
83 struct rx_desc_s *rx_desc_cur;
Nobuhiro Iwamatsu1c822112014-11-04 09:15:47 +090084 u8 *rx_buf_alloc;
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +090085 u8 *rx_buf_base;
86 u8 mac_addr[6];
87 u8 phy_addr;
Nobuhiro Iwamatsud8f5d502008-11-21 12:04:18 +090088 struct eth_device *dev;
Yoshihiro Shimoda677f6cd2011-10-11 18:10:14 +090089 struct phy_device *phydev;
Nobuhiro Iwamatsuec921f12017-12-01 08:10:32 +090090 void __iomem *iobase;
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +090091};
92
Nobuhiro Iwamatsud8f5d502008-11-21 12:04:18 +090093struct sh_eth_dev {
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +090094 int port;
Nobuhiro Iwamatsud8f5d502008-11-21 12:04:18 +090095 struct sh_eth_info port_info[MAX_PORT_NUM];
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +090096};
97
Yoshihiro Shimoda4c4aa6c2012-06-26 16:38:09 +000098/* from linux/drivers/net/ethernet/renesas/sh_eth.h */
99enum {
100 /* E-DMAC registers */
101 EDSR = 0,
102 EDMR,
103 EDTRR,
104 EDRRR,
105 EESR,
106 EESIPR,
107 TDLAR,
108 TDFAR,
109 TDFXR,
110 TDFFR,
111 RDLAR,
112 RDFAR,
113 RDFXR,
114 RDFFR,
115 TRSCER,
116 RMFCR,
117 TFTR,
118 FDR,
119 RMCR,
120 EDOCR,
121 TFUCR,
122 RFOCR,
123 FCFTR,
124 RPADIR,
125 TRIMD,
126 RBWAR,
127 TBRAR,
128
129 /* Ether registers */
130 ECMR,
131 ECSR,
132 ECSIPR,
133 PIR,
134 PSR,
135 RDMLR,
136 PIPR,
137 RFLR,
138 IPGR,
139 APR,
140 MPR,
141 PFTCR,
142 PFRCR,
143 RFCR,
144 RFCF,
145 TPAUSER,
146 TPAUSECR,
147 BCFR,
148 BCFRR,
149 GECMR,
150 BCULR,
151 MAHR,
152 MALR,
153 TROCR,
154 CDCR,
155 LCCR,
156 CNDCR,
157 CEFCR,
158 FRECR,
159 TSFRCR,
160 TLFRCR,
161 CERCR,
162 CEECR,
Nobuhiro Iwamatsu72befd32013-08-22 13:22:04 +0900163 RMIIMR, /* R8A7790 */
Yoshihiro Shimoda4c4aa6c2012-06-26 16:38:09 +0000164 MAFCR,
165 RTRATE,
166 CSMR,
167 RMII_MII,
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +0900168
Yoshihiro Shimoda4c4aa6c2012-06-26 16:38:09 +0000169 /* This value must be written at last. */
170 SH_ETH_MAX_REGISTER_OFFSET,
171};
172
173static const u16 sh_eth_offset_gigabit[SH_ETH_MAX_REGISTER_OFFSET] = {
174 [EDSR] = 0x0000,
175 [EDMR] = 0x0400,
176 [EDTRR] = 0x0408,
177 [EDRRR] = 0x0410,
178 [EESR] = 0x0428,
179 [EESIPR] = 0x0430,
180 [TDLAR] = 0x0010,
181 [TDFAR] = 0x0014,
182 [TDFXR] = 0x0018,
183 [TDFFR] = 0x001c,
184 [RDLAR] = 0x0030,
185 [RDFAR] = 0x0034,
186 [RDFXR] = 0x0038,
187 [RDFFR] = 0x003c,
188 [TRSCER] = 0x0438,
189 [RMFCR] = 0x0440,
190 [TFTR] = 0x0448,
191 [FDR] = 0x0450,
192 [RMCR] = 0x0458,
193 [RPADIR] = 0x0460,
194 [FCFTR] = 0x0468,
195 [CSMR] = 0x04E4,
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +0900196
Yoshihiro Shimoda4c4aa6c2012-06-26 16:38:09 +0000197 [ECMR] = 0x0500,
198 [ECSR] = 0x0510,
199 [ECSIPR] = 0x0518,
200 [PIR] = 0x0520,
201 [PSR] = 0x0528,
202 [PIPR] = 0x052c,
203 [RFLR] = 0x0508,
204 [APR] = 0x0554,
205 [MPR] = 0x0558,
206 [PFTCR] = 0x055c,
207 [PFRCR] = 0x0560,
208 [TPAUSER] = 0x0564,
209 [GECMR] = 0x05b0,
210 [BCULR] = 0x05b4,
211 [MAHR] = 0x05c0,
212 [MALR] = 0x05c8,
213 [TROCR] = 0x0700,
214 [CDCR] = 0x0708,
215 [LCCR] = 0x0710,
216 [CEFCR] = 0x0740,
217 [FRECR] = 0x0748,
218 [TSFRCR] = 0x0750,
219 [TLFRCR] = 0x0758,
220 [RFCR] = 0x0760,
221 [CERCR] = 0x0768,
222 [CEECR] = 0x0770,
223 [MAFCR] = 0x0778,
224 [RMII_MII] = 0x0790,
225};
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +0900226
Marek Vasut7f6f5ab2019-05-01 18:20:48 +0200227static const u16 sh_eth_offset_rz[SH_ETH_MAX_REGISTER_OFFSET] = {
228 [EDSR] = 0x0000,
229 [EDMR] = 0x0400,
230 [EDTRR] = 0x0408,
231 [EDRRR] = 0x0410,
232 [EESR] = 0x0428,
233 [EESIPR] = 0x0430,
234 [TDLAR] = 0x0010,
235 [TDFAR] = 0x0014,
236 [TDFXR] = 0x0018,
237 [TDFFR] = 0x001c,
238 [RDLAR] = 0x0030,
239 [RDFAR] = 0x0034,
240 [RDFXR] = 0x0038,
241 [RDFFR] = 0x003c,
242 [TRSCER] = 0x0438,
243 [RMFCR] = 0x0440,
244 [TFTR] = 0x0448,
245 [FDR] = 0x0450,
246 [RMCR] = 0x0458,
247 [RPADIR] = 0x0460,
248 [FCFTR] = 0x0468,
249 [CSMR] = 0x04E4,
250
251 [ECMR] = 0x0500,
252 [ECSR] = 0x0510,
253 [ECSIPR] = 0x0518,
254 [PIR] = 0x0520,
255 [PSR] = 0x0528,
256 [PIPR] = 0x052c,
257 [RFLR] = 0x0508,
258 [APR] = 0x0554,
259 [MPR] = 0x0558,
260 [PFTCR] = 0x055c,
261 [PFRCR] = 0x0560,
262 [TPAUSER] = 0x0564,
263 [GECMR] = 0x05b0,
264 [BCULR] = 0x05b4,
265 [MAHR] = 0x05c0,
266 [MALR] = 0x05c8,
267 [TROCR] = 0x0700,
268 [CDCR] = 0x0708,
269 [LCCR] = 0x0710,
270 [CEFCR] = 0x0740,
271 [FRECR] = 0x0748,
272 [TSFRCR] = 0x0750,
273 [TLFRCR] = 0x0758,
274 [RFCR] = 0x0760,
275 [CERCR] = 0x0768,
276 [CEECR] = 0x0770,
277 [MAFCR] = 0x0778,
278 [RMII_MII] = 0x0790,
279};
280
Yoshihiro Shimoda4c4aa6c2012-06-26 16:38:09 +0000281static const u16 sh_eth_offset_fast_sh4[SH_ETH_MAX_REGISTER_OFFSET] = {
282 [ECMR] = 0x0100,
283 [RFLR] = 0x0108,
284 [ECSR] = 0x0110,
285 [ECSIPR] = 0x0118,
286 [PIR] = 0x0120,
287 [PSR] = 0x0128,
288 [RDMLR] = 0x0140,
289 [IPGR] = 0x0150,
290 [APR] = 0x0154,
291 [MPR] = 0x0158,
292 [TPAUSER] = 0x0164,
293 [RFCF] = 0x0160,
294 [TPAUSECR] = 0x0168,
295 [BCFRR] = 0x016c,
296 [MAHR] = 0x01c0,
297 [MALR] = 0x01c8,
298 [TROCR] = 0x01d0,
299 [CDCR] = 0x01d4,
300 [LCCR] = 0x01d8,
301 [CNDCR] = 0x01dc,
302 [CEFCR] = 0x01e4,
303 [FRECR] = 0x01e8,
304 [TSFRCR] = 0x01ec,
305 [TLFRCR] = 0x01f0,
306 [RFCR] = 0x01f4,
307 [MAFCR] = 0x01f8,
308 [RTRATE] = 0x01fc,
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +0900309
Yoshihiro Shimoda4c4aa6c2012-06-26 16:38:09 +0000310 [EDMR] = 0x0000,
311 [EDTRR] = 0x0008,
312 [EDRRR] = 0x0010,
313 [TDLAR] = 0x0018,
314 [RDLAR] = 0x0020,
315 [EESR] = 0x0028,
316 [EESIPR] = 0x0030,
317 [TRSCER] = 0x0038,
318 [RMFCR] = 0x0040,
319 [TFTR] = 0x0048,
320 [FDR] = 0x0050,
321 [RMCR] = 0x0058,
322 [TFUCR] = 0x0064,
323 [RFOCR] = 0x0068,
Nobuhiro Iwamatsu72befd32013-08-22 13:22:04 +0900324 [RMIIMR] = 0x006C,
Yoshihiro Shimoda4c4aa6c2012-06-26 16:38:09 +0000325 [FCFTR] = 0x0070,
326 [RPADIR] = 0x0078,
327 [TRIMD] = 0x007c,
328 [RBWAR] = 0x00c8,
329 [RDFAR] = 0x00cc,
330 [TBRAR] = 0x00d4,
331 [TDFAR] = 0x00d8,
332};
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +0900333
Yoshihiro Shimoda4c4aa6c2012-06-26 16:38:09 +0000334/* Register Address */
335#if defined(CONFIG_CPU_SH7763) || defined(CONFIG_CPU_SH7734)
336#define SH_ETH_TYPE_GETHER
337#define BASE_IO_ADDR 0xfee00000
Yoshihiro Shimodac5901fb2013-12-18 16:04:04 +0900338#elif defined(CONFIG_CPU_SH7757) || \
339 defined(CONFIG_CPU_SH7752) || \
340 defined(CONFIG_CPU_SH7753)
Yoshihiro Shimoda36944902012-06-26 16:38:11 +0000341#if defined(CONFIG_SH_ETHER_USE_GETHER)
342#define SH_ETH_TYPE_GETHER
343#define BASE_IO_ADDR 0xfee00000
344#else
Yoshihiro Shimoda9d553032012-06-26 16:38:06 +0000345#define SH_ETH_TYPE_ETHER
Yoshihiro Shimoda34cca922011-01-18 17:53:45 +0900346#define BASE_IO_ADDR 0xfef00000
Yoshihiro Shimoda36944902012-06-26 16:38:11 +0000347#endif
Nobuhiro Iwamatsu4ad2c2a2012-08-02 22:08:40 +0000348#elif defined(CONFIG_R8A7740)
349#define SH_ETH_TYPE_GETHER
350#define BASE_IO_ADDR 0xE9A00000
Marek Vasutee2f21b2018-01-22 01:42:32 +0100351#elif defined(CONFIG_RCAR_GEN2)
Nobuhiro Iwamatsu72befd32013-08-22 13:22:04 +0900352#define SH_ETH_TYPE_ETHER
353#define BASE_IO_ADDR 0xEE700200
Nobuhiro Iwamatsu46288f42014-01-23 07:52:18 +0900354#elif defined(CONFIG_R7S72100)
355#define SH_ETH_TYPE_RZ
356#define BASE_IO_ADDR 0xE8203000
Marek Vasut31124502019-07-31 12:58:06 +0200357#elif defined(CONFIG_R8A77980)
358#define SH_ETH_TYPE_GETHER
359#define BASE_IO_ADDR 0xE7400000
Yoshihiro Shimoda34cca922011-01-18 17:53:45 +0900360#endif
361
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +0900362/*
363 * Register's bits
364 * Copy from Linux driver source code
365 */
Nobuhiro Iwamatsu46288f42014-01-23 07:52:18 +0900366#if defined(SH_ETH_TYPE_GETHER) || defined(SH_ETH_TYPE_RZ)
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +0900367/* EDSR */
368enum EDSR_BIT {
369 EDSR_ENT = 0x01, EDSR_ENR = 0x02,
370};
371#define EDSR_ENALL (EDSR_ENT|EDSR_ENR)
372#endif
373
374/* EDMR */
375enum DMAC_M_BIT {
Marek Vasut31124502019-07-31 12:58:06 +0200376 EDMR_NBST = 0x80, /* DMA transfer burst mode */
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +0900377 EDMR_DL1 = 0x20, EDMR_DL0 = 0x10,
Nobuhiro Iwamatsu46288f42014-01-23 07:52:18 +0900378#if defined(SH_ETH_TYPE_GETHER) || defined(SH_ETH_TYPE_RZ)
Nobuhiro Iwamatsu58802902012-02-02 21:28:49 +0000379 EDMR_SRST = 0x03, /* Receive/Send reset */
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +0900380 EMDR_DESC_R = 0x30, /* Descriptor reserve size */
381 EDMR_EL = 0x40, /* Litte endian */
Yoshihiro Shimoda9d553032012-06-26 16:38:06 +0000382#elif defined(SH_ETH_TYPE_ETHER)
Yoshihiro Shimoda34cca922011-01-18 17:53:45 +0900383 EDMR_SRST = 0x01,
384 EMDR_DESC_R = 0x30, /* Descriptor reserve size */
385 EDMR_EL = 0x40, /* Litte endian */
Yoshihiro Shimoda9d553032012-06-26 16:38:06 +0000386#else
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +0900387 EDMR_SRST = 0x01,
388#endif
389};
390
Tom Rinidd2eba02022-12-04 10:13:47 -0500391#if CFG_SH_ETHER_ALIGNE_SIZE == 64
Nobuhiro Iwamatsu7a2142c2013-08-22 13:22:02 +0900392# define EMDR_DESC EDMR_DL1
Tom Rinidd2eba02022-12-04 10:13:47 -0500393#elif CFG_SH_ETHER_ALIGNE_SIZE == 32
Nobuhiro Iwamatsu7a2142c2013-08-22 13:22:02 +0900394# define EMDR_DESC EDMR_DL0
Tom Rinidd2eba02022-12-04 10:13:47 -0500395#elif CFG_SH_ETHER_ALIGNE_SIZE == 16 /* Default */
Nobuhiro Iwamatsu7a2142c2013-08-22 13:22:02 +0900396# define EMDR_DESC 0
397#endif
398
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +0900399/* RFLR */
400#define RFLR_RFL_MIN 0x05EE /* Recv Frame length 1518 byte */
401
402/* EDTRR */
403enum DMAC_T_BIT {
Nobuhiro Iwamatsu46288f42014-01-23 07:52:18 +0900404#if defined(SH_ETH_TYPE_GETHER) || defined(SH_ETH_TYPE_RZ)
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +0900405 EDTRR_TRNS = 0x03,
406#else
407 EDTRR_TRNS = 0x01,
408#endif
409};
410
411/* GECMR */
412enum GECMR_BIT {
Yoshihiro Shimodac5901fb2013-12-18 16:04:04 +0900413#if defined(CONFIG_CPU_SH7757) || \
414 defined(CONFIG_CPU_SH7752) || \
415 defined(CONFIG_CPU_SH7753)
Yoshihiro Shimoda36944902012-06-26 16:38:11 +0000416 GECMR_1000B = 0x20, GECMR_100B = 0x01, GECMR_10B = 0x00,
417#else
Simon Muntonc2d704f2009-02-02 09:44:08 +0000418 GECMR_1000B = 0x01, GECMR_100B = 0x04, GECMR_10B = 0x00,
Yoshihiro Shimoda36944902012-06-26 16:38:11 +0000419#endif
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +0900420};
421
422/* EDRRR*/
423enum EDRRR_R_BIT {
424 EDRRR_R = 0x01,
425};
426
427/* TPAUSER */
428enum TPAUSER_BIT {
429 TPAUSER_TPAUSE = 0x0000ffff,
430 TPAUSER_UNLIMITED = 0,
431};
432
433/* BCFR */
434enum BCFR_BIT {
435 BCFR_RPAUSE = 0x0000ffff,
436 BCFR_UNLIMITED = 0,
437};
438
439/* PIR */
440enum PIR_BIT {
441 PIR_MDI = 0x08, PIR_MDO = 0x04, PIR_MMD = 0x02, PIR_MDC = 0x01,
442};
443
444/* PSR */
445enum PHY_STATUS_BIT { PHY_ST_LINK = 0x01, };
446
447/* EESR */
448enum EESR_BIT {
Yoshihiro Shimoda9d553032012-06-26 16:38:06 +0000449#if defined(SH_ETH_TYPE_ETHER)
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +0900450 EESR_TWB = 0x40000000,
451#else
452 EESR_TWB = 0xC0000000,
453 EESR_TC1 = 0x20000000,
454 EESR_TUC = 0x10000000,
455 EESR_ROC = 0x80000000,
456#endif
457 EESR_TABT = 0x04000000,
458 EESR_RABT = 0x02000000, EESR_RFRMER = 0x01000000,
Yoshihiro Shimoda9d553032012-06-26 16:38:06 +0000459#if defined(SH_ETH_TYPE_ETHER)
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +0900460 EESR_ADE = 0x00800000,
461#endif
462 EESR_ECI = 0x00400000,
463 EESR_FTC = 0x00200000, EESR_TDE = 0x00100000,
464 EESR_TFE = 0x00080000, EESR_FRC = 0x00040000,
465 EESR_RDE = 0x00020000, EESR_RFE = 0x00010000,
Yoshihiro Shimoda9d553032012-06-26 16:38:06 +0000466#if defined(SH_ETH_TYPE_ETHER)
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +0900467 EESR_CND = 0x00000800,
468#endif
469 EESR_DLC = 0x00000400,
470 EESR_CD = 0x00000200, EESR_RTO = 0x00000100,
471 EESR_RMAF = 0x00000080, EESR_CEEF = 0x00000040,
472 EESR_CELF = 0x00000020, EESR_RRF = 0x00000010,
Nobuhiro Iwamatsu8d14b252014-01-23 07:52:20 +0900473 EESR_RTLF = 0x00000008, EESR_RTSF = 0x00000004,
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +0900474 EESR_PRE = 0x00000002, EESR_CERF = 0x00000001,
475};
476
Nobuhiro Iwamatsu46288f42014-01-23 07:52:18 +0900477#if defined(SH_ETH_TYPE_GETHER) || defined(SH_ETH_TYPE_RZ)
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +0900478# define TX_CHECK (EESR_TC1 | EESR_FTC)
479# define EESR_ERR_CHECK (EESR_TWB | EESR_TABT | EESR_RABT | EESR_RDE \
480 | EESR_RFRMER | EESR_TFE | EESR_TDE | EESR_ECI)
481# define TX_ERROR_CEHCK (EESR_TWB | EESR_TABT | EESR_TDE | EESR_TFE)
482
483#else
484# define TX_CHECK (EESR_FTC | EESR_CND | EESR_DLC | EESR_CD | EESR_RTO)
485# define EESR_ERR_CHECK (EESR_TWB | EESR_TABT | EESR_RABT | EESR_RDE \
486 | EESR_RFRMER | EESR_ADE | EESR_TFE | EESR_TDE | EESR_ECI)
487# define TX_ERROR_CEHCK (EESR_TWB | EESR_TABT | EESR_ADE | EESR_TDE | EESR_TFE)
488#endif
489
490/* EESIPR */
491enum DMAC_IM_BIT {
492 DMAC_M_TWB = 0x40000000, DMAC_M_TABT = 0x04000000,
493 DMAC_M_RABT = 0x02000000,
494 DMAC_M_RFRMER = 0x01000000, DMAC_M_ADF = 0x00800000,
495 DMAC_M_ECI = 0x00400000, DMAC_M_FTC = 0x00200000,
496 DMAC_M_TDE = 0x00100000, DMAC_M_TFE = 0x00080000,
497 DMAC_M_FRC = 0x00040000, DMAC_M_RDE = 0x00020000,
498 DMAC_M_RFE = 0x00010000, DMAC_M_TINT4 = 0x00000800,
499 DMAC_M_TINT3 = 0x00000400, DMAC_M_TINT2 = 0x00000200,
500 DMAC_M_TINT1 = 0x00000100, DMAC_M_RINT8 = 0x00000080,
501 DMAC_M_RINT5 = 0x00000010, DMAC_M_RINT4 = 0x00000008,
502 DMAC_M_RINT3 = 0x00000004, DMAC_M_RINT2 = 0x00000002,
503 DMAC_M_RINT1 = 0x00000001,
504};
505
506/* Receive descriptor bit */
507enum RD_STS_BIT {
508 RD_RACT = 0x80000000, RD_RDLE = 0x40000000,
509 RD_RFP1 = 0x20000000, RD_RFP0 = 0x10000000,
510 RD_RFE = 0x08000000, RD_RFS10 = 0x00000200,
511 RD_RFS9 = 0x00000100, RD_RFS8 = 0x00000080,
512 RD_RFS7 = 0x00000040, RD_RFS6 = 0x00000020,
513 RD_RFS5 = 0x00000010, RD_RFS4 = 0x00000008,
514 RD_RFS3 = 0x00000004, RD_RFS2 = 0x00000002,
515 RD_RFS1 = 0x00000001,
516};
517#define RDF1ST RD_RFP1
518#define RDFEND RD_RFP0
519#define RD_RFP (RD_RFP1|RD_RFP0)
520
521/* RDFFR*/
522enum RDFFR_BIT {
523 RDFFR_RDLF = 0x01,
524};
525
526/* FCFTR */
527enum FCFTR_BIT {
528 FCFTR_RFF2 = 0x00040000, FCFTR_RFF1 = 0x00020000,
529 FCFTR_RFF0 = 0x00010000, FCFTR_RFD2 = 0x00000004,
530 FCFTR_RFD1 = 0x00000002, FCFTR_RFD0 = 0x00000001,
531};
532#define FIFO_F_D_RFF (FCFTR_RFF2|FCFTR_RFF1|FCFTR_RFF0)
533#define FIFO_F_D_RFD (FCFTR_RFD2|FCFTR_RFD1|FCFTR_RFD0)
534
535/* Transfer descriptor bit */
536enum TD_STS_BIT {
Nobuhiro Iwamatsu46288f42014-01-23 07:52:18 +0900537#if defined(SH_ETH_TYPE_GETHER) || defined(SH_ETH_TYPE_ETHER) || \
538 defined(SH_ETH_TYPE_RZ)
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +0900539 TD_TACT = 0x80000000,
540#else
541 TD_TACT = 0x7fffffff,
542#endif
543 TD_TDLE = 0x40000000, TD_TFP1 = 0x20000000,
544 TD_TFP0 = 0x10000000,
545};
546#define TDF1ST TD_TFP1
547#define TDFEND TD_TFP0
548#define TD_TFP (TD_TFP1|TD_TFP0)
549
550/* RMCR */
551enum RECV_RST_BIT { RMCR_RST = 0x01, };
552/* ECMR */
553enum FELIC_MODE_BIT {
Nobuhiro Iwamatsu46288f42014-01-23 07:52:18 +0900554#if defined(SH_ETH_TYPE_GETHER) || defined(SH_ETH_TYPE_RZ)
Nobuhiro Iwamatsu31e84df2014-01-23 07:52:19 +0900555 ECMR_TRCCM = 0x04000000, ECMR_RCSC = 0x00800000,
556 ECMR_DPAD = 0x00200000, ECMR_RZPF = 0x00100000,
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +0900557#endif
558 ECMR_ZPF = 0x00080000, ECMR_PFR = 0x00040000, ECMR_RXF = 0x00020000,
559 ECMR_TXF = 0x00010000, ECMR_MCT = 0x00002000, ECMR_PRCEF = 0x00001000,
560 ECMR_PMDE = 0x00000200, ECMR_RE = 0x00000040, ECMR_TE = 0x00000020,
561 ECMR_ILB = 0x00000008, ECMR_ELB = 0x00000004, ECMR_DM = 0x00000002,
562 ECMR_PRM = 0x00000001,
Nobuhiro Iwamatsu9dfac0a2011-11-14 16:56:59 +0900563#ifdef CONFIG_CPU_SH7724
564 ECMR_RTM = 0x00000010,
Marek Vasut31124502019-07-31 12:58:06 +0200565#elif defined(CONFIG_RCAR_GEN2) || defined (CONFIG_R8A77980)
Nobuhiro Iwamatsu72befd32013-08-22 13:22:04 +0900566 ECMR_RTM = 0x00000004,
Nobuhiro Iwamatsu9dfac0a2011-11-14 16:56:59 +0900567#endif
568
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +0900569};
570
Nobuhiro Iwamatsu46288f42014-01-23 07:52:18 +0900571#if defined(SH_ETH_TYPE_GETHER) || defined(SH_ETH_TYPE_RZ)
Nobuhiro Iwamatsu31e84df2014-01-23 07:52:19 +0900572#define ECMR_CHG_DM (ECMR_TRCCM | ECMR_RZPF | ECMR_ZPF | ECMR_PFR | \
573 ECMR_RXF | ECMR_TXF | ECMR_MCT)
Yoshihiro Shimoda9d553032012-06-26 16:38:06 +0000574#elif defined(SH_ETH_TYPE_ETHER)
Nobuhiro Iwamatsu9dfac0a2011-11-14 16:56:59 +0900575#define ECMR_CHG_DM (ECMR_ZPF | ECMR_PFR | ECMR_RXF | ECMR_TXF)
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +0900576#else
Yoshihiro Shimoda34cca922011-01-18 17:53:45 +0900577#define ECMR_CHG_DM (ECMR_ZPF | ECMR_PFR | ECMR_RXF | ECMR_TXF | ECMR_MCT)
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +0900578#endif
579
580/* ECSR */
581enum ECSR_STATUS_BIT {
Yoshihiro Shimoda9d553032012-06-26 16:38:06 +0000582#if defined(SH_ETH_TYPE_ETHER)
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +0900583 ECSR_BRCRX = 0x20, ECSR_PSRTO = 0x10,
584#endif
585 ECSR_LCHNG = 0x04,
586 ECSR_MPD = 0x02, ECSR_ICD = 0x01,
587};
588
Nobuhiro Iwamatsu46288f42014-01-23 07:52:18 +0900589#if defined(SH_ETH_TYPE_GETHER) || defined(SH_ETH_TYPE_RZ)
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +0900590# define ECSR_INIT (ECSR_ICD | ECSIPR_MPDIP)
591#else
592# define ECSR_INIT (ECSR_BRCRX | ECSR_PSRTO | \
593 ECSR_LCHNG | ECSR_ICD | ECSIPR_MPDIP)
594#endif
595
596/* ECSIPR */
597enum ECSIPR_STATUS_MASK_BIT {
Yoshihiro Shimoda9d553032012-06-26 16:38:06 +0000598#if defined(SH_ETH_TYPE_ETHER)
Nobuhiro Iwamatsud8d74e82012-06-05 16:39:06 +0000599 ECSIPR_BRCRXIP = 0x20,
Nobuhiro Iwamatsu58802902012-02-02 21:28:49 +0000600 ECSIPR_PSRTOIP = 0x10,
Yoshihiro Shimoda9d553032012-06-26 16:38:06 +0000601#elif defined(SH_ETY_TYPE_GETHER)
Nobuhiro Iwamatsu58802902012-02-02 21:28:49 +0000602 ECSIPR_PSRTOIP = 0x10,
603 ECSIPR_PHYIP = 0x08,
Nobuhiro Iwamatsud8d74e82012-06-05 16:39:06 +0000604#endif
Nobuhiro Iwamatsu58802902012-02-02 21:28:49 +0000605 ECSIPR_LCHNGIP = 0x04,
606 ECSIPR_MPDIP = 0x02,
607 ECSIPR_ICDIP = 0x01,
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +0900608};
609
Nobuhiro Iwamatsu46288f42014-01-23 07:52:18 +0900610#if defined(SH_ETH_TYPE_GETHER) || defined(SH_ETH_TYPE_RZ)
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +0900611# define ECSIPR_INIT (ECSIPR_LCHNGIP | ECSIPR_ICDIP | ECSIPR_MPDIP)
612#else
613# define ECSIPR_INIT (ECSIPR_BRCRXIP | ECSIPR_PSRTOIP | ECSIPR_LCHNGIP | \
614 ECSIPR_ICDIP | ECSIPR_MPDIP)
615#endif
616
617/* APR */
618enum APR_BIT {
619 APR_AP = 0x00000004,
620};
621
622/* MPR */
623enum MPR_BIT {
624 MPR_MP = 0x00000006,
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +0900625};
626
627/* TRSCER */
628enum DESC_I_BIT {
629 DESC_I_TINT4 = 0x0800, DESC_I_TINT3 = 0x0400, DESC_I_TINT2 = 0x0200,
630 DESC_I_TINT1 = 0x0100, DESC_I_RINT8 = 0x0080, DESC_I_RINT5 = 0x0010,
631 DESC_I_RINT4 = 0x0008, DESC_I_RINT3 = 0x0004, DESC_I_RINT2 = 0x0002,
632 DESC_I_RINT1 = 0x0001,
633};
634
635/* RPADIR */
636enum RPADIR_BIT {
637 RPADIR_PADS1 = 0x20000, RPADIR_PADS0 = 0x10000,
638 RPADIR_PADR = 0x0003f,
639};
640
Nobuhiro Iwamatsu46288f42014-01-23 07:52:18 +0900641#if defined(SH_ETH_TYPE_GETHER) || defined(SH_ETH_TYPE_RZ)
Nobuhiro Iwamatsu240b7232008-06-11 21:05:00 +0900642# define RPADIR_INIT (0x00)
643#else
644# define RPADIR_INIT (RPADIR_PADS1)
645#endif
646
647/* FDR */
648enum FIFO_SIZE_BIT {
649 FIFO_SIZE_T = 0x00000700, FIFO_SIZE_R = 0x00000007,
650};
Yoshihiro Shimoda4c4aa6c2012-06-26 16:38:09 +0000651
Nobuhiro Iwamatsuec921f12017-12-01 08:10:32 +0900652static inline unsigned long sh_eth_reg_addr(struct sh_eth_info *port,
Yoshihiro Shimoda4c4aa6c2012-06-26 16:38:09 +0000653 int enum_index)
654{
Chris Brandta65a9292017-11-03 08:30:12 -0500655#if defined(SH_ETH_TYPE_GETHER) || defined(SH_ETH_TYPE_RZ)
Yoshihiro Shimoda4c4aa6c2012-06-26 16:38:09 +0000656 const u16 *reg_offset = sh_eth_offset_gigabit;
657#elif defined(SH_ETH_TYPE_ETHER)
658 const u16 *reg_offset = sh_eth_offset_fast_sh4;
Marek Vasut7f6f5ab2019-05-01 18:20:48 +0200659#elif defined(SH_ETH_TYPE_RZ)
660 const u16 *reg_offset = sh_eth_offset_rz;
Yoshihiro Shimoda4c4aa6c2012-06-26 16:38:09 +0000661#else
662#error
663#endif
Nobuhiro Iwamatsuec921f12017-12-01 08:10:32 +0900664 return (unsigned long)port->iobase + reg_offset[enum_index];
Yoshihiro Shimoda4c4aa6c2012-06-26 16:38:09 +0000665}
666
Nobuhiro Iwamatsuec921f12017-12-01 08:10:32 +0900667static inline void sh_eth_write(struct sh_eth_info *port, unsigned long data,
Yoshihiro Shimoda4c4aa6c2012-06-26 16:38:09 +0000668 int enum_index)
669{
Nobuhiro Iwamatsuec921f12017-12-01 08:10:32 +0900670 outl(data, sh_eth_reg_addr(port, enum_index));
Yoshihiro Shimoda4c4aa6c2012-06-26 16:38:09 +0000671}
672
Nobuhiro Iwamatsuec921f12017-12-01 08:10:32 +0900673static inline unsigned long sh_eth_read(struct sh_eth_info *port,
Yoshihiro Shimoda4c4aa6c2012-06-26 16:38:09 +0000674 int enum_index)
675{
Nobuhiro Iwamatsuec921f12017-12-01 08:10:32 +0900676 return inl(sh_eth_reg_addr(port, enum_index));
Yoshihiro Shimoda4c4aa6c2012-06-26 16:38:09 +0000677}