|
69 | 69 | #define AXI_DMAC_REG_START_TRANSFER 0x408 |
70 | 70 | #define AXI_DMAC_REG_FLAGS 0x40c |
71 | 71 | #define AXI_DMAC_REG_DEST_ADDRESS 0x410 |
| 72 | +#define AXI_DMAC_REG_DEST_ADDRESS_HIGH 0x490 |
72 | 73 | #define AXI_DMAC_REG_SRC_ADDRESS 0x414 |
| 74 | +#define AXI_DMAC_REG_SRC_ADDRESS_HIGH 0x494 |
73 | 75 | #define AXI_DMAC_REG_X_LENGTH 0x418 |
74 | 76 | #define AXI_DMAC_REG_Y_LENGTH 0x41c |
75 | 77 | #define AXI_DMAC_REG_DEST_STRIDE 0x420 |
@@ -274,11 +276,14 @@ static void axi_dmac_start_transfer(struct axi_dmac_chan *chan) |
274 | 276 | if (!chan->hw_sg) { |
275 | 277 | if (axi_dmac_dest_is_mem(chan)) { |
276 | 278 | axi_dmac_write(dmac, AXI_DMAC_REG_DEST_ADDRESS, sg->hw->dest_addr); |
| 279 | + axi_dmac_write(dmac, AXI_DMAC_REG_DEST_ADDRESS_HIGH, |
| 280 | + sg->hw->dest_addr >> 32); |
277 | 281 | axi_dmac_write(dmac, AXI_DMAC_REG_DEST_STRIDE, sg->hw->dst_stride); |
278 | 282 | } |
279 | 283 |
|
280 | 284 | if (axi_dmac_src_is_mem(chan)) { |
281 | 285 | axi_dmac_write(dmac, AXI_DMAC_REG_SRC_ADDRESS, sg->hw->src_addr); |
| 286 | + axi_dmac_write(dmac, AXI_DMAC_REG_SRC_ADDRESS_HIGH, sg->hw->src_addr >> 32); |
282 | 287 | axi_dmac_write(dmac, AXI_DMAC_REG_SRC_STRIDE, sg->hw->src_stride); |
283 | 288 | } |
284 | 289 | } |
@@ -1016,6 +1021,9 @@ static int axi_dmac_read_chan_config(struct device *dev, struct axi_dmac *dmac) |
1016 | 1021 | static int axi_dmac_detect_caps(struct axi_dmac *dmac, unsigned int version) |
1017 | 1022 | { |
1018 | 1023 | struct axi_dmac_chan *chan = &dmac->chan; |
| 1024 | + struct device *dev = dmac->dma_dev.dev; |
| 1025 | + u32 mask; |
| 1026 | + int ret; |
1019 | 1027 |
|
1020 | 1028 | axi_dmac_write(dmac, AXI_DMAC_REG_FLAGS, AXI_DMAC_FLAG_CYCLIC); |
1021 | 1029 | if (axi_dmac_read(dmac, AXI_DMAC_REG_FLAGS) == AXI_DMAC_FLAG_CYCLIC) |
@@ -1050,6 +1058,22 @@ static int axi_dmac_detect_caps(struct axi_dmac *dmac, unsigned int version) |
1050 | 1058 | return -ENODEV; |
1051 | 1059 | } |
1052 | 1060 |
|
| 1061 | + if (axi_dmac_dest_is_mem(chan)) { |
| 1062 | + axi_dmac_write(dmac, AXI_DMAC_REG_DEST_ADDRESS_HIGH, 0xffffffff); |
| 1063 | + mask = axi_dmac_read(dmac, AXI_DMAC_REG_DEST_ADDRESS_HIGH); |
| 1064 | + } else { |
| 1065 | + axi_dmac_write(dmac, AXI_DMAC_REG_SRC_ADDRESS_HIGH, 0xffffffff); |
| 1066 | + mask = axi_dmac_read(dmac, AXI_DMAC_REG_SRC_ADDRESS_HIGH); |
| 1067 | + } |
| 1068 | + |
| 1069 | + mask = 32 + fls(mask); |
| 1070 | + |
| 1071 | + ret = dma_set_mask_and_coherent(dev, DMA_BIT_MASK(mask)); |
| 1072 | + if (ret) { |
| 1073 | + dev_err(dev, "DMA mask set error %d\n", ret); |
| 1074 | + return ret; |
| 1075 | + } |
| 1076 | + |
1053 | 1077 | if (version >= ADI_AXI_PCORE_VER(4, 2, 'a')) |
1054 | 1078 | chan->hw_partial_xfer = true; |
1055 | 1079 |
|
|
0 commit comments