Commit 293d3f6a70704691c3539bc3630ba1acbabc5c43

Authored by Jingoo Han
Committed by Dave Airlie
1 parent e2a75c446e

drm/exynos: dsi: use IS_ERR() to check devm_ioremap_resource() results

devm_ioremap_resource() returns an error pointer, not NULL. Thus,
the result should be checked with IS_ERR().

Signed-off-by: Jingoo Han <jg1.han@samsung.com>
Signed-off-by: Inki Dae <inki.dae@samsung.com>
Signed-off-by: Dave Airlie <airlied@redhat.com>

Showing 1 changed file with 2 additions and 2 deletions Inline Diff

drivers/gpu/drm/exynos/exynos_drm_dsi.c
1 /* 1 /*
2 * Samsung SoC MIPI DSI Master driver. 2 * Samsung SoC MIPI DSI Master driver.
3 * 3 *
4 * Copyright (c) 2014 Samsung Electronics Co., Ltd 4 * Copyright (c) 2014 Samsung Electronics Co., Ltd
5 * 5 *
6 * Contacts: Tomasz Figa <t.figa@samsung.com> 6 * Contacts: Tomasz Figa <t.figa@samsung.com>
7 * 7 *
8 * This program is free software; you can redistribute it and/or modify 8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License version 2 as 9 * it under the terms of the GNU General Public License version 2 as
10 * published by the Free Software Foundation. 10 * published by the Free Software Foundation.
11 */ 11 */
12 12
13 #include <drm/drmP.h> 13 #include <drm/drmP.h>
14 #include <drm/drm_crtc_helper.h> 14 #include <drm/drm_crtc_helper.h>
15 #include <drm/drm_mipi_dsi.h> 15 #include <drm/drm_mipi_dsi.h>
16 #include <drm/drm_panel.h> 16 #include <drm/drm_panel.h>
17 17
18 #include <linux/clk.h> 18 #include <linux/clk.h>
19 #include <linux/irq.h> 19 #include <linux/irq.h>
20 #include <linux/phy/phy.h> 20 #include <linux/phy/phy.h>
21 #include <linux/regulator/consumer.h> 21 #include <linux/regulator/consumer.h>
22 22
23 #include <video/mipi_display.h> 23 #include <video/mipi_display.h>
24 #include <video/videomode.h> 24 #include <video/videomode.h>
25 25
26 #include "exynos_drm_drv.h" 26 #include "exynos_drm_drv.h"
27 27
28 /* returns true iff both arguments logically differs */ 28 /* returns true iff both arguments logically differs */
29 #define NEQV(a, b) (!(a) ^ !(b)) 29 #define NEQV(a, b) (!(a) ^ !(b))
30 30
31 #define DSIM_STATUS_REG 0x0 /* Status register */ 31 #define DSIM_STATUS_REG 0x0 /* Status register */
32 #define DSIM_SWRST_REG 0x4 /* Software reset register */ 32 #define DSIM_SWRST_REG 0x4 /* Software reset register */
33 #define DSIM_CLKCTRL_REG 0x8 /* Clock control register */ 33 #define DSIM_CLKCTRL_REG 0x8 /* Clock control register */
34 #define DSIM_TIMEOUT_REG 0xc /* Time out register */ 34 #define DSIM_TIMEOUT_REG 0xc /* Time out register */
35 #define DSIM_CONFIG_REG 0x10 /* Configuration register */ 35 #define DSIM_CONFIG_REG 0x10 /* Configuration register */
36 #define DSIM_ESCMODE_REG 0x14 /* Escape mode register */ 36 #define DSIM_ESCMODE_REG 0x14 /* Escape mode register */
37 37
38 /* Main display image resolution register */ 38 /* Main display image resolution register */
39 #define DSIM_MDRESOL_REG 0x18 39 #define DSIM_MDRESOL_REG 0x18
40 #define DSIM_MVPORCH_REG 0x1c /* Main display Vporch register */ 40 #define DSIM_MVPORCH_REG 0x1c /* Main display Vporch register */
41 #define DSIM_MHPORCH_REG 0x20 /* Main display Hporch register */ 41 #define DSIM_MHPORCH_REG 0x20 /* Main display Hporch register */
42 #define DSIM_MSYNC_REG 0x24 /* Main display sync area register */ 42 #define DSIM_MSYNC_REG 0x24 /* Main display sync area register */
43 43
44 /* Sub display image resolution register */ 44 /* Sub display image resolution register */
45 #define DSIM_SDRESOL_REG 0x28 45 #define DSIM_SDRESOL_REG 0x28
46 #define DSIM_INTSRC_REG 0x2c /* Interrupt source register */ 46 #define DSIM_INTSRC_REG 0x2c /* Interrupt source register */
47 #define DSIM_INTMSK_REG 0x30 /* Interrupt mask register */ 47 #define DSIM_INTMSK_REG 0x30 /* Interrupt mask register */
48 #define DSIM_PKTHDR_REG 0x34 /* Packet Header FIFO register */ 48 #define DSIM_PKTHDR_REG 0x34 /* Packet Header FIFO register */
49 #define DSIM_PAYLOAD_REG 0x38 /* Payload FIFO register */ 49 #define DSIM_PAYLOAD_REG 0x38 /* Payload FIFO register */
50 #define DSIM_RXFIFO_REG 0x3c /* Read FIFO register */ 50 #define DSIM_RXFIFO_REG 0x3c /* Read FIFO register */
51 #define DSIM_FIFOTHLD_REG 0x40 /* FIFO threshold level register */ 51 #define DSIM_FIFOTHLD_REG 0x40 /* FIFO threshold level register */
52 #define DSIM_FIFOCTRL_REG 0x44 /* FIFO status and control register */ 52 #define DSIM_FIFOCTRL_REG 0x44 /* FIFO status and control register */
53 53
54 /* FIFO memory AC characteristic register */ 54 /* FIFO memory AC characteristic register */
55 #define DSIM_PLLCTRL_REG 0x4c /* PLL control register */ 55 #define DSIM_PLLCTRL_REG 0x4c /* PLL control register */
56 #define DSIM_PLLTMR_REG 0x50 /* PLL timer register */ 56 #define DSIM_PLLTMR_REG 0x50 /* PLL timer register */
57 #define DSIM_PHYACCHR_REG 0x54 /* D-PHY AC characteristic register */ 57 #define DSIM_PHYACCHR_REG 0x54 /* D-PHY AC characteristic register */
58 #define DSIM_PHYACCHR1_REG 0x58 /* D-PHY AC characteristic register1 */ 58 #define DSIM_PHYACCHR1_REG 0x58 /* D-PHY AC characteristic register1 */
59 59
60 /* DSIM_STATUS */ 60 /* DSIM_STATUS */
61 #define DSIM_STOP_STATE_DAT(x) (((x) & 0xf) << 0) 61 #define DSIM_STOP_STATE_DAT(x) (((x) & 0xf) << 0)
62 #define DSIM_STOP_STATE_CLK (1 << 8) 62 #define DSIM_STOP_STATE_CLK (1 << 8)
63 #define DSIM_TX_READY_HS_CLK (1 << 10) 63 #define DSIM_TX_READY_HS_CLK (1 << 10)
64 #define DSIM_PLL_STABLE (1 << 31) 64 #define DSIM_PLL_STABLE (1 << 31)
65 65
66 /* DSIM_SWRST */ 66 /* DSIM_SWRST */
67 #define DSIM_FUNCRST (1 << 16) 67 #define DSIM_FUNCRST (1 << 16)
68 #define DSIM_SWRST (1 << 0) 68 #define DSIM_SWRST (1 << 0)
69 69
70 /* DSIM_TIMEOUT */ 70 /* DSIM_TIMEOUT */
71 #define DSIM_LPDR_TIMEOUT(x) ((x) << 0) 71 #define DSIM_LPDR_TIMEOUT(x) ((x) << 0)
72 #define DSIM_BTA_TIMEOUT(x) ((x) << 16) 72 #define DSIM_BTA_TIMEOUT(x) ((x) << 16)
73 73
74 /* DSIM_CLKCTRL */ 74 /* DSIM_CLKCTRL */
75 #define DSIM_ESC_PRESCALER(x) (((x) & 0xffff) << 0) 75 #define DSIM_ESC_PRESCALER(x) (((x) & 0xffff) << 0)
76 #define DSIM_ESC_PRESCALER_MASK (0xffff << 0) 76 #define DSIM_ESC_PRESCALER_MASK (0xffff << 0)
77 #define DSIM_LANE_ESC_CLK_EN_CLK (1 << 19) 77 #define DSIM_LANE_ESC_CLK_EN_CLK (1 << 19)
78 #define DSIM_LANE_ESC_CLK_EN_DATA(x) (((x) & 0xf) << 20) 78 #define DSIM_LANE_ESC_CLK_EN_DATA(x) (((x) & 0xf) << 20)
79 #define DSIM_LANE_ESC_CLK_EN_DATA_MASK (0xf << 20) 79 #define DSIM_LANE_ESC_CLK_EN_DATA_MASK (0xf << 20)
80 #define DSIM_BYTE_CLKEN (1 << 24) 80 #define DSIM_BYTE_CLKEN (1 << 24)
81 #define DSIM_BYTE_CLK_SRC(x) (((x) & 0x3) << 25) 81 #define DSIM_BYTE_CLK_SRC(x) (((x) & 0x3) << 25)
82 #define DSIM_BYTE_CLK_SRC_MASK (0x3 << 25) 82 #define DSIM_BYTE_CLK_SRC_MASK (0x3 << 25)
83 #define DSIM_PLL_BYPASS (1 << 27) 83 #define DSIM_PLL_BYPASS (1 << 27)
84 #define DSIM_ESC_CLKEN (1 << 28) 84 #define DSIM_ESC_CLKEN (1 << 28)
85 #define DSIM_TX_REQUEST_HSCLK (1 << 31) 85 #define DSIM_TX_REQUEST_HSCLK (1 << 31)
86 86
87 /* DSIM_CONFIG */ 87 /* DSIM_CONFIG */
88 #define DSIM_LANE_EN_CLK (1 << 0) 88 #define DSIM_LANE_EN_CLK (1 << 0)
89 #define DSIM_LANE_EN(x) (((x) & 0xf) << 1) 89 #define DSIM_LANE_EN(x) (((x) & 0xf) << 1)
90 #define DSIM_NUM_OF_DATA_LANE(x) (((x) & 0x3) << 5) 90 #define DSIM_NUM_OF_DATA_LANE(x) (((x) & 0x3) << 5)
91 #define DSIM_SUB_PIX_FORMAT(x) (((x) & 0x7) << 8) 91 #define DSIM_SUB_PIX_FORMAT(x) (((x) & 0x7) << 8)
92 #define DSIM_MAIN_PIX_FORMAT_MASK (0x7 << 12) 92 #define DSIM_MAIN_PIX_FORMAT_MASK (0x7 << 12)
93 #define DSIM_MAIN_PIX_FORMAT_RGB888 (0x7 << 12) 93 #define DSIM_MAIN_PIX_FORMAT_RGB888 (0x7 << 12)
94 #define DSIM_MAIN_PIX_FORMAT_RGB666 (0x6 << 12) 94 #define DSIM_MAIN_PIX_FORMAT_RGB666 (0x6 << 12)
95 #define DSIM_MAIN_PIX_FORMAT_RGB666_P (0x5 << 12) 95 #define DSIM_MAIN_PIX_FORMAT_RGB666_P (0x5 << 12)
96 #define DSIM_MAIN_PIX_FORMAT_RGB565 (0x4 << 12) 96 #define DSIM_MAIN_PIX_FORMAT_RGB565 (0x4 << 12)
97 #define DSIM_SUB_VC (((x) & 0x3) << 16) 97 #define DSIM_SUB_VC (((x) & 0x3) << 16)
98 #define DSIM_MAIN_VC (((x) & 0x3) << 18) 98 #define DSIM_MAIN_VC (((x) & 0x3) << 18)
99 #define DSIM_HSA_MODE (1 << 20) 99 #define DSIM_HSA_MODE (1 << 20)
100 #define DSIM_HBP_MODE (1 << 21) 100 #define DSIM_HBP_MODE (1 << 21)
101 #define DSIM_HFP_MODE (1 << 22) 101 #define DSIM_HFP_MODE (1 << 22)
102 #define DSIM_HSE_MODE (1 << 23) 102 #define DSIM_HSE_MODE (1 << 23)
103 #define DSIM_AUTO_MODE (1 << 24) 103 #define DSIM_AUTO_MODE (1 << 24)
104 #define DSIM_VIDEO_MODE (1 << 25) 104 #define DSIM_VIDEO_MODE (1 << 25)
105 #define DSIM_BURST_MODE (1 << 26) 105 #define DSIM_BURST_MODE (1 << 26)
106 #define DSIM_SYNC_INFORM (1 << 27) 106 #define DSIM_SYNC_INFORM (1 << 27)
107 #define DSIM_EOT_DISABLE (1 << 28) 107 #define DSIM_EOT_DISABLE (1 << 28)
108 #define DSIM_MFLUSH_VS (1 << 29) 108 #define DSIM_MFLUSH_VS (1 << 29)
109 109
110 /* DSIM_ESCMODE */ 110 /* DSIM_ESCMODE */
111 #define DSIM_TX_TRIGGER_RST (1 << 4) 111 #define DSIM_TX_TRIGGER_RST (1 << 4)
112 #define DSIM_TX_LPDT_LP (1 << 6) 112 #define DSIM_TX_LPDT_LP (1 << 6)
113 #define DSIM_CMD_LPDT_LP (1 << 7) 113 #define DSIM_CMD_LPDT_LP (1 << 7)
114 #define DSIM_FORCE_BTA (1 << 16) 114 #define DSIM_FORCE_BTA (1 << 16)
115 #define DSIM_FORCE_STOP_STATE (1 << 20) 115 #define DSIM_FORCE_STOP_STATE (1 << 20)
116 #define DSIM_STOP_STATE_CNT(x) (((x) & 0x7ff) << 21) 116 #define DSIM_STOP_STATE_CNT(x) (((x) & 0x7ff) << 21)
117 #define DSIM_STOP_STATE_CNT_MASK (0x7ff << 21) 117 #define DSIM_STOP_STATE_CNT_MASK (0x7ff << 21)
118 118
119 /* DSIM_MDRESOL */ 119 /* DSIM_MDRESOL */
120 #define DSIM_MAIN_STAND_BY (1 << 31) 120 #define DSIM_MAIN_STAND_BY (1 << 31)
121 #define DSIM_MAIN_VRESOL(x) (((x) & 0x7ff) << 16) 121 #define DSIM_MAIN_VRESOL(x) (((x) & 0x7ff) << 16)
122 #define DSIM_MAIN_HRESOL(x) (((x) & 0X7ff) << 0) 122 #define DSIM_MAIN_HRESOL(x) (((x) & 0X7ff) << 0)
123 123
124 /* DSIM_MVPORCH */ 124 /* DSIM_MVPORCH */
125 #define DSIM_CMD_ALLOW(x) ((x) << 28) 125 #define DSIM_CMD_ALLOW(x) ((x) << 28)
126 #define DSIM_STABLE_VFP(x) ((x) << 16) 126 #define DSIM_STABLE_VFP(x) ((x) << 16)
127 #define DSIM_MAIN_VBP(x) ((x) << 0) 127 #define DSIM_MAIN_VBP(x) ((x) << 0)
128 #define DSIM_CMD_ALLOW_MASK (0xf << 28) 128 #define DSIM_CMD_ALLOW_MASK (0xf << 28)
129 #define DSIM_STABLE_VFP_MASK (0x7ff << 16) 129 #define DSIM_STABLE_VFP_MASK (0x7ff << 16)
130 #define DSIM_MAIN_VBP_MASK (0x7ff << 0) 130 #define DSIM_MAIN_VBP_MASK (0x7ff << 0)
131 131
132 /* DSIM_MHPORCH */ 132 /* DSIM_MHPORCH */
133 #define DSIM_MAIN_HFP(x) ((x) << 16) 133 #define DSIM_MAIN_HFP(x) ((x) << 16)
134 #define DSIM_MAIN_HBP(x) ((x) << 0) 134 #define DSIM_MAIN_HBP(x) ((x) << 0)
135 #define DSIM_MAIN_HFP_MASK ((0xffff) << 16) 135 #define DSIM_MAIN_HFP_MASK ((0xffff) << 16)
136 #define DSIM_MAIN_HBP_MASK ((0xffff) << 0) 136 #define DSIM_MAIN_HBP_MASK ((0xffff) << 0)
137 137
138 /* DSIM_MSYNC */ 138 /* DSIM_MSYNC */
139 #define DSIM_MAIN_VSA(x) ((x) << 22) 139 #define DSIM_MAIN_VSA(x) ((x) << 22)
140 #define DSIM_MAIN_HSA(x) ((x) << 0) 140 #define DSIM_MAIN_HSA(x) ((x) << 0)
141 #define DSIM_MAIN_VSA_MASK ((0x3ff) << 22) 141 #define DSIM_MAIN_VSA_MASK ((0x3ff) << 22)
142 #define DSIM_MAIN_HSA_MASK ((0xffff) << 0) 142 #define DSIM_MAIN_HSA_MASK ((0xffff) << 0)
143 143
144 /* DSIM_SDRESOL */ 144 /* DSIM_SDRESOL */
145 #define DSIM_SUB_STANDY(x) ((x) << 31) 145 #define DSIM_SUB_STANDY(x) ((x) << 31)
146 #define DSIM_SUB_VRESOL(x) ((x) << 16) 146 #define DSIM_SUB_VRESOL(x) ((x) << 16)
147 #define DSIM_SUB_HRESOL(x) ((x) << 0) 147 #define DSIM_SUB_HRESOL(x) ((x) << 0)
148 #define DSIM_SUB_STANDY_MASK ((0x1) << 31) 148 #define DSIM_SUB_STANDY_MASK ((0x1) << 31)
149 #define DSIM_SUB_VRESOL_MASK ((0x7ff) << 16) 149 #define DSIM_SUB_VRESOL_MASK ((0x7ff) << 16)
150 #define DSIM_SUB_HRESOL_MASK ((0x7ff) << 0) 150 #define DSIM_SUB_HRESOL_MASK ((0x7ff) << 0)
151 151
152 /* DSIM_INTSRC */ 152 /* DSIM_INTSRC */
153 #define DSIM_INT_PLL_STABLE (1 << 31) 153 #define DSIM_INT_PLL_STABLE (1 << 31)
154 #define DSIM_INT_SW_RST_RELEASE (1 << 30) 154 #define DSIM_INT_SW_RST_RELEASE (1 << 30)
155 #define DSIM_INT_SFR_FIFO_EMPTY (1 << 29) 155 #define DSIM_INT_SFR_FIFO_EMPTY (1 << 29)
156 #define DSIM_INT_BTA (1 << 25) 156 #define DSIM_INT_BTA (1 << 25)
157 #define DSIM_INT_FRAME_DONE (1 << 24) 157 #define DSIM_INT_FRAME_DONE (1 << 24)
158 #define DSIM_INT_RX_TIMEOUT (1 << 21) 158 #define DSIM_INT_RX_TIMEOUT (1 << 21)
159 #define DSIM_INT_BTA_TIMEOUT (1 << 20) 159 #define DSIM_INT_BTA_TIMEOUT (1 << 20)
160 #define DSIM_INT_RX_DONE (1 << 18) 160 #define DSIM_INT_RX_DONE (1 << 18)
161 #define DSIM_INT_RX_TE (1 << 17) 161 #define DSIM_INT_RX_TE (1 << 17)
162 #define DSIM_INT_RX_ACK (1 << 16) 162 #define DSIM_INT_RX_ACK (1 << 16)
163 #define DSIM_INT_RX_ECC_ERR (1 << 15) 163 #define DSIM_INT_RX_ECC_ERR (1 << 15)
164 #define DSIM_INT_RX_CRC_ERR (1 << 14) 164 #define DSIM_INT_RX_CRC_ERR (1 << 14)
165 165
166 /* DSIM_FIFOCTRL */ 166 /* DSIM_FIFOCTRL */
167 #define DSIM_RX_DATA_FULL (1 << 25) 167 #define DSIM_RX_DATA_FULL (1 << 25)
168 #define DSIM_RX_DATA_EMPTY (1 << 24) 168 #define DSIM_RX_DATA_EMPTY (1 << 24)
169 #define DSIM_SFR_HEADER_FULL (1 << 23) 169 #define DSIM_SFR_HEADER_FULL (1 << 23)
170 #define DSIM_SFR_HEADER_EMPTY (1 << 22) 170 #define DSIM_SFR_HEADER_EMPTY (1 << 22)
171 #define DSIM_SFR_PAYLOAD_FULL (1 << 21) 171 #define DSIM_SFR_PAYLOAD_FULL (1 << 21)
172 #define DSIM_SFR_PAYLOAD_EMPTY (1 << 20) 172 #define DSIM_SFR_PAYLOAD_EMPTY (1 << 20)
173 #define DSIM_I80_HEADER_FULL (1 << 19) 173 #define DSIM_I80_HEADER_FULL (1 << 19)
174 #define DSIM_I80_HEADER_EMPTY (1 << 18) 174 #define DSIM_I80_HEADER_EMPTY (1 << 18)
175 #define DSIM_I80_PAYLOAD_FULL (1 << 17) 175 #define DSIM_I80_PAYLOAD_FULL (1 << 17)
176 #define DSIM_I80_PAYLOAD_EMPTY (1 << 16) 176 #define DSIM_I80_PAYLOAD_EMPTY (1 << 16)
177 #define DSIM_SD_HEADER_FULL (1 << 15) 177 #define DSIM_SD_HEADER_FULL (1 << 15)
178 #define DSIM_SD_HEADER_EMPTY (1 << 14) 178 #define DSIM_SD_HEADER_EMPTY (1 << 14)
179 #define DSIM_SD_PAYLOAD_FULL (1 << 13) 179 #define DSIM_SD_PAYLOAD_FULL (1 << 13)
180 #define DSIM_SD_PAYLOAD_EMPTY (1 << 12) 180 #define DSIM_SD_PAYLOAD_EMPTY (1 << 12)
181 #define DSIM_MD_HEADER_FULL (1 << 11) 181 #define DSIM_MD_HEADER_FULL (1 << 11)
182 #define DSIM_MD_HEADER_EMPTY (1 << 10) 182 #define DSIM_MD_HEADER_EMPTY (1 << 10)
183 #define DSIM_MD_PAYLOAD_FULL (1 << 9) 183 #define DSIM_MD_PAYLOAD_FULL (1 << 9)
184 #define DSIM_MD_PAYLOAD_EMPTY (1 << 8) 184 #define DSIM_MD_PAYLOAD_EMPTY (1 << 8)
185 #define DSIM_RX_FIFO (1 << 4) 185 #define DSIM_RX_FIFO (1 << 4)
186 #define DSIM_SFR_FIFO (1 << 3) 186 #define DSIM_SFR_FIFO (1 << 3)
187 #define DSIM_I80_FIFO (1 << 2) 187 #define DSIM_I80_FIFO (1 << 2)
188 #define DSIM_SD_FIFO (1 << 1) 188 #define DSIM_SD_FIFO (1 << 1)
189 #define DSIM_MD_FIFO (1 << 0) 189 #define DSIM_MD_FIFO (1 << 0)
190 190
191 /* DSIM_PHYACCHR */ 191 /* DSIM_PHYACCHR */
192 #define DSIM_AFC_EN (1 << 14) 192 #define DSIM_AFC_EN (1 << 14)
193 #define DSIM_AFC_CTL(x) (((x) & 0x7) << 5) 193 #define DSIM_AFC_CTL(x) (((x) & 0x7) << 5)
194 194
195 /* DSIM_PLLCTRL */ 195 /* DSIM_PLLCTRL */
196 #define DSIM_FREQ_BAND(x) ((x) << 24) 196 #define DSIM_FREQ_BAND(x) ((x) << 24)
197 #define DSIM_PLL_EN (1 << 23) 197 #define DSIM_PLL_EN (1 << 23)
198 #define DSIM_PLL_P(x) ((x) << 13) 198 #define DSIM_PLL_P(x) ((x) << 13)
199 #define DSIM_PLL_M(x) ((x) << 4) 199 #define DSIM_PLL_M(x) ((x) << 4)
200 #define DSIM_PLL_S(x) ((x) << 1) 200 #define DSIM_PLL_S(x) ((x) << 1)
201 201
202 #define DSI_MAX_BUS_WIDTH 4 202 #define DSI_MAX_BUS_WIDTH 4
203 #define DSI_NUM_VIRTUAL_CHANNELS 4 203 #define DSI_NUM_VIRTUAL_CHANNELS 4
204 #define DSI_TX_FIFO_SIZE 2048 204 #define DSI_TX_FIFO_SIZE 2048
205 #define DSI_RX_FIFO_SIZE 256 205 #define DSI_RX_FIFO_SIZE 256
206 #define DSI_XFER_TIMEOUT_MS 100 206 #define DSI_XFER_TIMEOUT_MS 100
207 #define DSI_RX_FIFO_EMPTY 0x30800002 207 #define DSI_RX_FIFO_EMPTY 0x30800002
208 208
209 enum exynos_dsi_transfer_type { 209 enum exynos_dsi_transfer_type {
210 EXYNOS_DSI_TX, 210 EXYNOS_DSI_TX,
211 EXYNOS_DSI_RX, 211 EXYNOS_DSI_RX,
212 }; 212 };
213 213
214 struct exynos_dsi_transfer { 214 struct exynos_dsi_transfer {
215 struct list_head list; 215 struct list_head list;
216 struct completion completed; 216 struct completion completed;
217 int result; 217 int result;
218 u8 data_id; 218 u8 data_id;
219 u8 data[2]; 219 u8 data[2];
220 u16 flags; 220 u16 flags;
221 221
222 const u8 *tx_payload; 222 const u8 *tx_payload;
223 u16 tx_len; 223 u16 tx_len;
224 u16 tx_done; 224 u16 tx_done;
225 225
226 u8 *rx_payload; 226 u8 *rx_payload;
227 u16 rx_len; 227 u16 rx_len;
228 u16 rx_done; 228 u16 rx_done;
229 }; 229 };
230 230
231 #define DSIM_STATE_ENABLED BIT(0) 231 #define DSIM_STATE_ENABLED BIT(0)
232 #define DSIM_STATE_INITIALIZED BIT(1) 232 #define DSIM_STATE_INITIALIZED BIT(1)
233 #define DSIM_STATE_CMD_LPM BIT(2) 233 #define DSIM_STATE_CMD_LPM BIT(2)
234 234
235 struct exynos_dsi { 235 struct exynos_dsi {
236 struct mipi_dsi_host dsi_host; 236 struct mipi_dsi_host dsi_host;
237 struct drm_connector connector; 237 struct drm_connector connector;
238 struct drm_encoder *encoder; 238 struct drm_encoder *encoder;
239 struct device_node *panel_node; 239 struct device_node *panel_node;
240 struct drm_panel *panel; 240 struct drm_panel *panel;
241 struct device *dev; 241 struct device *dev;
242 242
243 void __iomem *reg_base; 243 void __iomem *reg_base;
244 struct phy *phy; 244 struct phy *phy;
245 struct clk *pll_clk; 245 struct clk *pll_clk;
246 struct clk *bus_clk; 246 struct clk *bus_clk;
247 struct regulator_bulk_data supplies[2]; 247 struct regulator_bulk_data supplies[2];
248 int irq; 248 int irq;
249 249
250 u32 pll_clk_rate; 250 u32 pll_clk_rate;
251 u32 burst_clk_rate; 251 u32 burst_clk_rate;
252 u32 esc_clk_rate; 252 u32 esc_clk_rate;
253 u32 lanes; 253 u32 lanes;
254 u32 mode_flags; 254 u32 mode_flags;
255 u32 format; 255 u32 format;
256 struct videomode vm; 256 struct videomode vm;
257 257
258 int state; 258 int state;
259 struct drm_property *brightness; 259 struct drm_property *brightness;
260 struct completion completed; 260 struct completion completed;
261 261
262 spinlock_t transfer_lock; /* protects transfer_list */ 262 spinlock_t transfer_lock; /* protects transfer_list */
263 struct list_head transfer_list; 263 struct list_head transfer_list;
264 }; 264 };
265 265
266 #define host_to_dsi(host) container_of(host, struct exynos_dsi, dsi_host) 266 #define host_to_dsi(host) container_of(host, struct exynos_dsi, dsi_host)
267 #define connector_to_dsi(c) container_of(c, struct exynos_dsi, connector) 267 #define connector_to_dsi(c) container_of(c, struct exynos_dsi, connector)
268 268
269 static void exynos_dsi_wait_for_reset(struct exynos_dsi *dsi) 269 static void exynos_dsi_wait_for_reset(struct exynos_dsi *dsi)
270 { 270 {
271 if (wait_for_completion_timeout(&dsi->completed, msecs_to_jiffies(300))) 271 if (wait_for_completion_timeout(&dsi->completed, msecs_to_jiffies(300)))
272 return; 272 return;
273 273
274 dev_err(dsi->dev, "timeout waiting for reset\n"); 274 dev_err(dsi->dev, "timeout waiting for reset\n");
275 } 275 }
276 276
277 static void exynos_dsi_reset(struct exynos_dsi *dsi) 277 static void exynos_dsi_reset(struct exynos_dsi *dsi)
278 { 278 {
279 reinit_completion(&dsi->completed); 279 reinit_completion(&dsi->completed);
280 writel(DSIM_SWRST, dsi->reg_base + DSIM_SWRST_REG); 280 writel(DSIM_SWRST, dsi->reg_base + DSIM_SWRST_REG);
281 } 281 }
282 282
283 #ifndef MHZ 283 #ifndef MHZ
284 #define MHZ (1000*1000) 284 #define MHZ (1000*1000)
285 #endif 285 #endif
286 286
287 static unsigned long exynos_dsi_pll_find_pms(struct exynos_dsi *dsi, 287 static unsigned long exynos_dsi_pll_find_pms(struct exynos_dsi *dsi,
288 unsigned long fin, unsigned long fout, u8 *p, u16 *m, u8 *s) 288 unsigned long fin, unsigned long fout, u8 *p, u16 *m, u8 *s)
289 { 289 {
290 unsigned long best_freq = 0; 290 unsigned long best_freq = 0;
291 u32 min_delta = 0xffffffff; 291 u32 min_delta = 0xffffffff;
292 u8 p_min, p_max; 292 u8 p_min, p_max;
293 u8 _p, uninitialized_var(best_p); 293 u8 _p, uninitialized_var(best_p);
294 u16 _m, uninitialized_var(best_m); 294 u16 _m, uninitialized_var(best_m);
295 u8 _s, uninitialized_var(best_s); 295 u8 _s, uninitialized_var(best_s);
296 296
297 p_min = DIV_ROUND_UP(fin, (12 * MHZ)); 297 p_min = DIV_ROUND_UP(fin, (12 * MHZ));
298 p_max = fin / (6 * MHZ); 298 p_max = fin / (6 * MHZ);
299 299
300 for (_p = p_min; _p <= p_max; ++_p) { 300 for (_p = p_min; _p <= p_max; ++_p) {
301 for (_s = 0; _s <= 5; ++_s) { 301 for (_s = 0; _s <= 5; ++_s) {
302 u64 tmp; 302 u64 tmp;
303 u32 delta; 303 u32 delta;
304 304
305 tmp = (u64)fout * (_p << _s); 305 tmp = (u64)fout * (_p << _s);
306 do_div(tmp, fin); 306 do_div(tmp, fin);
307 _m = tmp; 307 _m = tmp;
308 if (_m < 41 || _m > 125) 308 if (_m < 41 || _m > 125)
309 continue; 309 continue;
310 310
311 tmp = (u64)_m * fin; 311 tmp = (u64)_m * fin;
312 do_div(tmp, _p); 312 do_div(tmp, _p);
313 if (tmp < 500 * MHZ || tmp > 1000 * MHZ) 313 if (tmp < 500 * MHZ || tmp > 1000 * MHZ)
314 continue; 314 continue;
315 315
316 tmp = (u64)_m * fin; 316 tmp = (u64)_m * fin;
317 do_div(tmp, _p << _s); 317 do_div(tmp, _p << _s);
318 318
319 delta = abs(fout - tmp); 319 delta = abs(fout - tmp);
320 if (delta < min_delta) { 320 if (delta < min_delta) {
321 best_p = _p; 321 best_p = _p;
322 best_m = _m; 322 best_m = _m;
323 best_s = _s; 323 best_s = _s;
324 min_delta = delta; 324 min_delta = delta;
325 best_freq = tmp; 325 best_freq = tmp;
326 } 326 }
327 } 327 }
328 } 328 }
329 329
330 if (best_freq) { 330 if (best_freq) {
331 *p = best_p; 331 *p = best_p;
332 *m = best_m; 332 *m = best_m;
333 *s = best_s; 333 *s = best_s;
334 } 334 }
335 335
336 return best_freq; 336 return best_freq;
337 } 337 }
338 338
339 static unsigned long exynos_dsi_set_pll(struct exynos_dsi *dsi, 339 static unsigned long exynos_dsi_set_pll(struct exynos_dsi *dsi,
340 unsigned long freq) 340 unsigned long freq)
341 { 341 {
342 static const unsigned long freq_bands[] = { 342 static const unsigned long freq_bands[] = {
343 100 * MHZ, 120 * MHZ, 160 * MHZ, 200 * MHZ, 343 100 * MHZ, 120 * MHZ, 160 * MHZ, 200 * MHZ,
344 270 * MHZ, 320 * MHZ, 390 * MHZ, 450 * MHZ, 344 270 * MHZ, 320 * MHZ, 390 * MHZ, 450 * MHZ,
345 510 * MHZ, 560 * MHZ, 640 * MHZ, 690 * MHZ, 345 510 * MHZ, 560 * MHZ, 640 * MHZ, 690 * MHZ,
346 770 * MHZ, 870 * MHZ, 950 * MHZ, 346 770 * MHZ, 870 * MHZ, 950 * MHZ,
347 }; 347 };
348 unsigned long fin, fout; 348 unsigned long fin, fout;
349 int timeout, band; 349 int timeout, band;
350 u8 p, s; 350 u8 p, s;
351 u16 m; 351 u16 m;
352 u32 reg; 352 u32 reg;
353 353
354 clk_set_rate(dsi->pll_clk, dsi->pll_clk_rate); 354 clk_set_rate(dsi->pll_clk, dsi->pll_clk_rate);
355 355
356 fin = clk_get_rate(dsi->pll_clk); 356 fin = clk_get_rate(dsi->pll_clk);
357 if (!fin) { 357 if (!fin) {
358 dev_err(dsi->dev, "failed to get PLL clock frequency\n"); 358 dev_err(dsi->dev, "failed to get PLL clock frequency\n");
359 return 0; 359 return 0;
360 } 360 }
361 361
362 dev_dbg(dsi->dev, "PLL input frequency: %lu\n", fin); 362 dev_dbg(dsi->dev, "PLL input frequency: %lu\n", fin);
363 363
364 fout = exynos_dsi_pll_find_pms(dsi, fin, freq, &p, &m, &s); 364 fout = exynos_dsi_pll_find_pms(dsi, fin, freq, &p, &m, &s);
365 if (!fout) { 365 if (!fout) {
366 dev_err(dsi->dev, 366 dev_err(dsi->dev,
367 "failed to find PLL PMS for requested frequency\n"); 367 "failed to find PLL PMS for requested frequency\n");
368 return -EFAULT; 368 return -EFAULT;
369 } 369 }
370 370
371 for (band = 0; band < ARRAY_SIZE(freq_bands); ++band) 371 for (band = 0; band < ARRAY_SIZE(freq_bands); ++band)
372 if (fout < freq_bands[band]) 372 if (fout < freq_bands[band])
373 break; 373 break;
374 374
375 dev_dbg(dsi->dev, "PLL freq %lu, (p %d, m %d, s %d), band %d\n", fout, 375 dev_dbg(dsi->dev, "PLL freq %lu, (p %d, m %d, s %d), band %d\n", fout,
376 p, m, s, band); 376 p, m, s, band);
377 377
378 writel(500, dsi->reg_base + DSIM_PLLTMR_REG); 378 writel(500, dsi->reg_base + DSIM_PLLTMR_REG);
379 379
380 reg = DSIM_FREQ_BAND(band) | DSIM_PLL_EN 380 reg = DSIM_FREQ_BAND(band) | DSIM_PLL_EN
381 | DSIM_PLL_P(p) | DSIM_PLL_M(m) | DSIM_PLL_S(s); 381 | DSIM_PLL_P(p) | DSIM_PLL_M(m) | DSIM_PLL_S(s);
382 writel(reg, dsi->reg_base + DSIM_PLLCTRL_REG); 382 writel(reg, dsi->reg_base + DSIM_PLLCTRL_REG);
383 383
384 timeout = 1000; 384 timeout = 1000;
385 do { 385 do {
386 if (timeout-- == 0) { 386 if (timeout-- == 0) {
387 dev_err(dsi->dev, "PLL failed to stabilize\n"); 387 dev_err(dsi->dev, "PLL failed to stabilize\n");
388 return -EFAULT; 388 return -EFAULT;
389 } 389 }
390 reg = readl(dsi->reg_base + DSIM_STATUS_REG); 390 reg = readl(dsi->reg_base + DSIM_STATUS_REG);
391 } while ((reg & DSIM_PLL_STABLE) == 0); 391 } while ((reg & DSIM_PLL_STABLE) == 0);
392 392
393 return fout; 393 return fout;
394 } 394 }
395 395
396 static int exynos_dsi_enable_clock(struct exynos_dsi *dsi) 396 static int exynos_dsi_enable_clock(struct exynos_dsi *dsi)
397 { 397 {
398 unsigned long hs_clk, byte_clk, esc_clk; 398 unsigned long hs_clk, byte_clk, esc_clk;
399 unsigned long esc_div; 399 unsigned long esc_div;
400 u32 reg; 400 u32 reg;
401 401
402 hs_clk = exynos_dsi_set_pll(dsi, dsi->burst_clk_rate); 402 hs_clk = exynos_dsi_set_pll(dsi, dsi->burst_clk_rate);
403 if (!hs_clk) { 403 if (!hs_clk) {
404 dev_err(dsi->dev, "failed to configure DSI PLL\n"); 404 dev_err(dsi->dev, "failed to configure DSI PLL\n");
405 return -EFAULT; 405 return -EFAULT;
406 } 406 }
407 407
408 byte_clk = hs_clk / 8; 408 byte_clk = hs_clk / 8;
409 esc_div = DIV_ROUND_UP(byte_clk, dsi->esc_clk_rate); 409 esc_div = DIV_ROUND_UP(byte_clk, dsi->esc_clk_rate);
410 esc_clk = byte_clk / esc_div; 410 esc_clk = byte_clk / esc_div;
411 411
412 if (esc_clk > 20 * MHZ) { 412 if (esc_clk > 20 * MHZ) {
413 ++esc_div; 413 ++esc_div;
414 esc_clk = byte_clk / esc_div; 414 esc_clk = byte_clk / esc_div;
415 } 415 }
416 416
417 dev_dbg(dsi->dev, "hs_clk = %lu, byte_clk = %lu, esc_clk = %lu\n", 417 dev_dbg(dsi->dev, "hs_clk = %lu, byte_clk = %lu, esc_clk = %lu\n",
418 hs_clk, byte_clk, esc_clk); 418 hs_clk, byte_clk, esc_clk);
419 419
420 reg = readl(dsi->reg_base + DSIM_CLKCTRL_REG); 420 reg = readl(dsi->reg_base + DSIM_CLKCTRL_REG);
421 reg &= ~(DSIM_ESC_PRESCALER_MASK | DSIM_LANE_ESC_CLK_EN_CLK 421 reg &= ~(DSIM_ESC_PRESCALER_MASK | DSIM_LANE_ESC_CLK_EN_CLK
422 | DSIM_LANE_ESC_CLK_EN_DATA_MASK | DSIM_PLL_BYPASS 422 | DSIM_LANE_ESC_CLK_EN_DATA_MASK | DSIM_PLL_BYPASS
423 | DSIM_BYTE_CLK_SRC_MASK); 423 | DSIM_BYTE_CLK_SRC_MASK);
424 reg |= DSIM_ESC_CLKEN | DSIM_BYTE_CLKEN 424 reg |= DSIM_ESC_CLKEN | DSIM_BYTE_CLKEN
425 | DSIM_ESC_PRESCALER(esc_div) 425 | DSIM_ESC_PRESCALER(esc_div)
426 | DSIM_LANE_ESC_CLK_EN_CLK 426 | DSIM_LANE_ESC_CLK_EN_CLK
427 | DSIM_LANE_ESC_CLK_EN_DATA(BIT(dsi->lanes) - 1) 427 | DSIM_LANE_ESC_CLK_EN_DATA(BIT(dsi->lanes) - 1)
428 | DSIM_BYTE_CLK_SRC(0) 428 | DSIM_BYTE_CLK_SRC(0)
429 | DSIM_TX_REQUEST_HSCLK; 429 | DSIM_TX_REQUEST_HSCLK;
430 writel(reg, dsi->reg_base + DSIM_CLKCTRL_REG); 430 writel(reg, dsi->reg_base + DSIM_CLKCTRL_REG);
431 431
432 return 0; 432 return 0;
433 } 433 }
434 434
435 static void exynos_dsi_disable_clock(struct exynos_dsi *dsi) 435 static void exynos_dsi_disable_clock(struct exynos_dsi *dsi)
436 { 436 {
437 u32 reg; 437 u32 reg;
438 438
439 reg = readl(dsi->reg_base + DSIM_CLKCTRL_REG); 439 reg = readl(dsi->reg_base + DSIM_CLKCTRL_REG);
440 reg &= ~(DSIM_LANE_ESC_CLK_EN_CLK | DSIM_LANE_ESC_CLK_EN_DATA_MASK 440 reg &= ~(DSIM_LANE_ESC_CLK_EN_CLK | DSIM_LANE_ESC_CLK_EN_DATA_MASK
441 | DSIM_ESC_CLKEN | DSIM_BYTE_CLKEN); 441 | DSIM_ESC_CLKEN | DSIM_BYTE_CLKEN);
442 writel(reg, dsi->reg_base + DSIM_CLKCTRL_REG); 442 writel(reg, dsi->reg_base + DSIM_CLKCTRL_REG);
443 443
444 reg = readl(dsi->reg_base + DSIM_PLLCTRL_REG); 444 reg = readl(dsi->reg_base + DSIM_PLLCTRL_REG);
445 reg &= ~DSIM_PLL_EN; 445 reg &= ~DSIM_PLL_EN;
446 writel(reg, dsi->reg_base + DSIM_PLLCTRL_REG); 446 writel(reg, dsi->reg_base + DSIM_PLLCTRL_REG);
447 } 447 }
448 448
449 static int exynos_dsi_init_link(struct exynos_dsi *dsi) 449 static int exynos_dsi_init_link(struct exynos_dsi *dsi)
450 { 450 {
451 int timeout; 451 int timeout;
452 u32 reg; 452 u32 reg;
453 u32 lanes_mask; 453 u32 lanes_mask;
454 454
455 /* Initialize FIFO pointers */ 455 /* Initialize FIFO pointers */
456 reg = readl(dsi->reg_base + DSIM_FIFOCTRL_REG); 456 reg = readl(dsi->reg_base + DSIM_FIFOCTRL_REG);
457 reg &= ~0x1f; 457 reg &= ~0x1f;
458 writel(reg, dsi->reg_base + DSIM_FIFOCTRL_REG); 458 writel(reg, dsi->reg_base + DSIM_FIFOCTRL_REG);
459 459
460 usleep_range(9000, 11000); 460 usleep_range(9000, 11000);
461 461
462 reg |= 0x1f; 462 reg |= 0x1f;
463 writel(reg, dsi->reg_base + DSIM_FIFOCTRL_REG); 463 writel(reg, dsi->reg_base + DSIM_FIFOCTRL_REG);
464 464
465 usleep_range(9000, 11000); 465 usleep_range(9000, 11000);
466 466
467 /* DSI configuration */ 467 /* DSI configuration */
468 reg = 0; 468 reg = 0;
469 469
470 if (dsi->mode_flags & MIPI_DSI_MODE_VIDEO) { 470 if (dsi->mode_flags & MIPI_DSI_MODE_VIDEO) {
471 reg |= DSIM_VIDEO_MODE; 471 reg |= DSIM_VIDEO_MODE;
472 472
473 if (!(dsi->mode_flags & MIPI_DSI_MODE_VSYNC_FLUSH)) 473 if (!(dsi->mode_flags & MIPI_DSI_MODE_VSYNC_FLUSH))
474 reg |= DSIM_MFLUSH_VS; 474 reg |= DSIM_MFLUSH_VS;
475 if (!(dsi->mode_flags & MIPI_DSI_MODE_EOT_PACKET)) 475 if (!(dsi->mode_flags & MIPI_DSI_MODE_EOT_PACKET))
476 reg |= DSIM_EOT_DISABLE; 476 reg |= DSIM_EOT_DISABLE;
477 if (dsi->mode_flags & MIPI_DSI_MODE_VIDEO_SYNC_PULSE) 477 if (dsi->mode_flags & MIPI_DSI_MODE_VIDEO_SYNC_PULSE)
478 reg |= DSIM_SYNC_INFORM; 478 reg |= DSIM_SYNC_INFORM;
479 if (dsi->mode_flags & MIPI_DSI_MODE_VIDEO_BURST) 479 if (dsi->mode_flags & MIPI_DSI_MODE_VIDEO_BURST)
480 reg |= DSIM_BURST_MODE; 480 reg |= DSIM_BURST_MODE;
481 if (dsi->mode_flags & MIPI_DSI_MODE_VIDEO_AUTO_VERT) 481 if (dsi->mode_flags & MIPI_DSI_MODE_VIDEO_AUTO_VERT)
482 reg |= DSIM_AUTO_MODE; 482 reg |= DSIM_AUTO_MODE;
483 if (dsi->mode_flags & MIPI_DSI_MODE_VIDEO_HSE) 483 if (dsi->mode_flags & MIPI_DSI_MODE_VIDEO_HSE)
484 reg |= DSIM_HSE_MODE; 484 reg |= DSIM_HSE_MODE;
485 if (!(dsi->mode_flags & MIPI_DSI_MODE_VIDEO_HFP)) 485 if (!(dsi->mode_flags & MIPI_DSI_MODE_VIDEO_HFP))
486 reg |= DSIM_HFP_MODE; 486 reg |= DSIM_HFP_MODE;
487 if (!(dsi->mode_flags & MIPI_DSI_MODE_VIDEO_HBP)) 487 if (!(dsi->mode_flags & MIPI_DSI_MODE_VIDEO_HBP))
488 reg |= DSIM_HBP_MODE; 488 reg |= DSIM_HBP_MODE;
489 if (!(dsi->mode_flags & MIPI_DSI_MODE_VIDEO_HSA)) 489 if (!(dsi->mode_flags & MIPI_DSI_MODE_VIDEO_HSA))
490 reg |= DSIM_HSA_MODE; 490 reg |= DSIM_HSA_MODE;
491 } 491 }
492 492
493 switch (dsi->format) { 493 switch (dsi->format) {
494 case MIPI_DSI_FMT_RGB888: 494 case MIPI_DSI_FMT_RGB888:
495 reg |= DSIM_MAIN_PIX_FORMAT_RGB888; 495 reg |= DSIM_MAIN_PIX_FORMAT_RGB888;
496 break; 496 break;
497 case MIPI_DSI_FMT_RGB666: 497 case MIPI_DSI_FMT_RGB666:
498 reg |= DSIM_MAIN_PIX_FORMAT_RGB666; 498 reg |= DSIM_MAIN_PIX_FORMAT_RGB666;
499 break; 499 break;
500 case MIPI_DSI_FMT_RGB666_PACKED: 500 case MIPI_DSI_FMT_RGB666_PACKED:
501 reg |= DSIM_MAIN_PIX_FORMAT_RGB666_P; 501 reg |= DSIM_MAIN_PIX_FORMAT_RGB666_P;
502 break; 502 break;
503 case MIPI_DSI_FMT_RGB565: 503 case MIPI_DSI_FMT_RGB565:
504 reg |= DSIM_MAIN_PIX_FORMAT_RGB565; 504 reg |= DSIM_MAIN_PIX_FORMAT_RGB565;
505 break; 505 break;
506 default: 506 default:
507 dev_err(dsi->dev, "invalid pixel format\n"); 507 dev_err(dsi->dev, "invalid pixel format\n");
508 return -EINVAL; 508 return -EINVAL;
509 } 509 }
510 510
511 reg |= DSIM_NUM_OF_DATA_LANE(dsi->lanes - 1); 511 reg |= DSIM_NUM_OF_DATA_LANE(dsi->lanes - 1);
512 512
513 writel(reg, dsi->reg_base + DSIM_CONFIG_REG); 513 writel(reg, dsi->reg_base + DSIM_CONFIG_REG);
514 514
515 reg |= DSIM_LANE_EN_CLK; 515 reg |= DSIM_LANE_EN_CLK;
516 writel(reg, dsi->reg_base + DSIM_CONFIG_REG); 516 writel(reg, dsi->reg_base + DSIM_CONFIG_REG);
517 517
518 lanes_mask = BIT(dsi->lanes) - 1; 518 lanes_mask = BIT(dsi->lanes) - 1;
519 reg |= DSIM_LANE_EN(lanes_mask); 519 reg |= DSIM_LANE_EN(lanes_mask);
520 writel(reg, dsi->reg_base + DSIM_CONFIG_REG); 520 writel(reg, dsi->reg_base + DSIM_CONFIG_REG);
521 521
522 /* Check clock and data lane state are stop state */ 522 /* Check clock and data lane state are stop state */
523 timeout = 100; 523 timeout = 100;
524 do { 524 do {
525 if (timeout-- == 0) { 525 if (timeout-- == 0) {
526 dev_err(dsi->dev, "waiting for bus lanes timed out\n"); 526 dev_err(dsi->dev, "waiting for bus lanes timed out\n");
527 return -EFAULT; 527 return -EFAULT;
528 } 528 }
529 529
530 reg = readl(dsi->reg_base + DSIM_STATUS_REG); 530 reg = readl(dsi->reg_base + DSIM_STATUS_REG);
531 if ((reg & DSIM_STOP_STATE_DAT(lanes_mask)) 531 if ((reg & DSIM_STOP_STATE_DAT(lanes_mask))
532 != DSIM_STOP_STATE_DAT(lanes_mask)) 532 != DSIM_STOP_STATE_DAT(lanes_mask))
533 continue; 533 continue;
534 } while (!(reg & (DSIM_STOP_STATE_CLK | DSIM_TX_READY_HS_CLK))); 534 } while (!(reg & (DSIM_STOP_STATE_CLK | DSIM_TX_READY_HS_CLK)));
535 535
536 reg = readl(dsi->reg_base + DSIM_ESCMODE_REG); 536 reg = readl(dsi->reg_base + DSIM_ESCMODE_REG);
537 reg &= ~DSIM_STOP_STATE_CNT_MASK; 537 reg &= ~DSIM_STOP_STATE_CNT_MASK;
538 reg |= DSIM_STOP_STATE_CNT(0xf); 538 reg |= DSIM_STOP_STATE_CNT(0xf);
539 writel(reg, dsi->reg_base + DSIM_ESCMODE_REG); 539 writel(reg, dsi->reg_base + DSIM_ESCMODE_REG);
540 540
541 reg = DSIM_BTA_TIMEOUT(0xff) | DSIM_LPDR_TIMEOUT(0xffff); 541 reg = DSIM_BTA_TIMEOUT(0xff) | DSIM_LPDR_TIMEOUT(0xffff);
542 writel(reg, dsi->reg_base + DSIM_TIMEOUT_REG); 542 writel(reg, dsi->reg_base + DSIM_TIMEOUT_REG);
543 543
544 return 0; 544 return 0;
545 } 545 }
546 546
547 static void exynos_dsi_set_display_mode(struct exynos_dsi *dsi) 547 static void exynos_dsi_set_display_mode(struct exynos_dsi *dsi)
548 { 548 {
549 struct videomode *vm = &dsi->vm; 549 struct videomode *vm = &dsi->vm;
550 u32 reg; 550 u32 reg;
551 551
552 if (dsi->mode_flags & MIPI_DSI_MODE_VIDEO) { 552 if (dsi->mode_flags & MIPI_DSI_MODE_VIDEO) {
553 reg = DSIM_CMD_ALLOW(0xf) 553 reg = DSIM_CMD_ALLOW(0xf)
554 | DSIM_STABLE_VFP(vm->vfront_porch) 554 | DSIM_STABLE_VFP(vm->vfront_porch)
555 | DSIM_MAIN_VBP(vm->vback_porch); 555 | DSIM_MAIN_VBP(vm->vback_porch);
556 writel(reg, dsi->reg_base + DSIM_MVPORCH_REG); 556 writel(reg, dsi->reg_base + DSIM_MVPORCH_REG);
557 557
558 reg = DSIM_MAIN_HFP(vm->hfront_porch) 558 reg = DSIM_MAIN_HFP(vm->hfront_porch)
559 | DSIM_MAIN_HBP(vm->hback_porch); 559 | DSIM_MAIN_HBP(vm->hback_porch);
560 writel(reg, dsi->reg_base + DSIM_MHPORCH_REG); 560 writel(reg, dsi->reg_base + DSIM_MHPORCH_REG);
561 561
562 reg = DSIM_MAIN_VSA(vm->vsync_len) 562 reg = DSIM_MAIN_VSA(vm->vsync_len)
563 | DSIM_MAIN_HSA(vm->hsync_len); 563 | DSIM_MAIN_HSA(vm->hsync_len);
564 writel(reg, dsi->reg_base + DSIM_MSYNC_REG); 564 writel(reg, dsi->reg_base + DSIM_MSYNC_REG);
565 } 565 }
566 566
567 reg = DSIM_MAIN_HRESOL(vm->hactive) | DSIM_MAIN_VRESOL(vm->vactive); 567 reg = DSIM_MAIN_HRESOL(vm->hactive) | DSIM_MAIN_VRESOL(vm->vactive);
568 writel(reg, dsi->reg_base + DSIM_MDRESOL_REG); 568 writel(reg, dsi->reg_base + DSIM_MDRESOL_REG);
569 569
570 dev_dbg(dsi->dev, "LCD size = %dx%d\n", vm->hactive, vm->vactive); 570 dev_dbg(dsi->dev, "LCD size = %dx%d\n", vm->hactive, vm->vactive);
571 } 571 }
572 572
573 static void exynos_dsi_set_display_enable(struct exynos_dsi *dsi, bool enable) 573 static void exynos_dsi_set_display_enable(struct exynos_dsi *dsi, bool enable)
574 { 574 {
575 u32 reg; 575 u32 reg;
576 576
577 reg = readl(dsi->reg_base + DSIM_MDRESOL_REG); 577 reg = readl(dsi->reg_base + DSIM_MDRESOL_REG);
578 if (enable) 578 if (enable)
579 reg |= DSIM_MAIN_STAND_BY; 579 reg |= DSIM_MAIN_STAND_BY;
580 else 580 else
581 reg &= ~DSIM_MAIN_STAND_BY; 581 reg &= ~DSIM_MAIN_STAND_BY;
582 writel(reg, dsi->reg_base + DSIM_MDRESOL_REG); 582 writel(reg, dsi->reg_base + DSIM_MDRESOL_REG);
583 } 583 }
584 584
585 static int exynos_dsi_wait_for_hdr_fifo(struct exynos_dsi *dsi) 585 static int exynos_dsi_wait_for_hdr_fifo(struct exynos_dsi *dsi)
586 { 586 {
587 int timeout = 2000; 587 int timeout = 2000;
588 588
589 do { 589 do {
590 u32 reg = readl(dsi->reg_base + DSIM_FIFOCTRL_REG); 590 u32 reg = readl(dsi->reg_base + DSIM_FIFOCTRL_REG);
591 591
592 if (!(reg & DSIM_SFR_HEADER_FULL)) 592 if (!(reg & DSIM_SFR_HEADER_FULL))
593 return 0; 593 return 0;
594 594
595 if (!cond_resched()) 595 if (!cond_resched())
596 usleep_range(950, 1050); 596 usleep_range(950, 1050);
597 } while (--timeout); 597 } while (--timeout);
598 598
599 return -ETIMEDOUT; 599 return -ETIMEDOUT;
600 } 600 }
601 601
602 static void exynos_dsi_set_cmd_lpm(struct exynos_dsi *dsi, bool lpm) 602 static void exynos_dsi_set_cmd_lpm(struct exynos_dsi *dsi, bool lpm)
603 { 603 {
604 u32 v = readl(dsi->reg_base + DSIM_ESCMODE_REG); 604 u32 v = readl(dsi->reg_base + DSIM_ESCMODE_REG);
605 605
606 if (lpm) 606 if (lpm)
607 v |= DSIM_CMD_LPDT_LP; 607 v |= DSIM_CMD_LPDT_LP;
608 else 608 else
609 v &= ~DSIM_CMD_LPDT_LP; 609 v &= ~DSIM_CMD_LPDT_LP;
610 610
611 writel(v, dsi->reg_base + DSIM_ESCMODE_REG); 611 writel(v, dsi->reg_base + DSIM_ESCMODE_REG);
612 } 612 }
613 613
614 static void exynos_dsi_force_bta(struct exynos_dsi *dsi) 614 static void exynos_dsi_force_bta(struct exynos_dsi *dsi)
615 { 615 {
616 u32 v = readl(dsi->reg_base + DSIM_ESCMODE_REG); 616 u32 v = readl(dsi->reg_base + DSIM_ESCMODE_REG);
617 617
618 v |= DSIM_FORCE_BTA; 618 v |= DSIM_FORCE_BTA;
619 writel(v, dsi->reg_base + DSIM_ESCMODE_REG); 619 writel(v, dsi->reg_base + DSIM_ESCMODE_REG);
620 } 620 }
621 621
622 static void exynos_dsi_send_to_fifo(struct exynos_dsi *dsi, 622 static void exynos_dsi_send_to_fifo(struct exynos_dsi *dsi,
623 struct exynos_dsi_transfer *xfer) 623 struct exynos_dsi_transfer *xfer)
624 { 624 {
625 struct device *dev = dsi->dev; 625 struct device *dev = dsi->dev;
626 const u8 *payload = xfer->tx_payload + xfer->tx_done; 626 const u8 *payload = xfer->tx_payload + xfer->tx_done;
627 u16 length = xfer->tx_len - xfer->tx_done; 627 u16 length = xfer->tx_len - xfer->tx_done;
628 bool first = !xfer->tx_done; 628 bool first = !xfer->tx_done;
629 u32 reg; 629 u32 reg;
630 630
631 dev_dbg(dev, "< xfer %p: tx len %u, done %u, rx len %u, done %u\n", 631 dev_dbg(dev, "< xfer %p: tx len %u, done %u, rx len %u, done %u\n",
632 xfer, xfer->tx_len, xfer->tx_done, xfer->rx_len, xfer->rx_done); 632 xfer, xfer->tx_len, xfer->tx_done, xfer->rx_len, xfer->rx_done);
633 633
634 if (length > DSI_TX_FIFO_SIZE) 634 if (length > DSI_TX_FIFO_SIZE)
635 length = DSI_TX_FIFO_SIZE; 635 length = DSI_TX_FIFO_SIZE;
636 636
637 xfer->tx_done += length; 637 xfer->tx_done += length;
638 638
639 /* Send payload */ 639 /* Send payload */
640 while (length >= 4) { 640 while (length >= 4) {
641 reg = (payload[3] << 24) | (payload[2] << 16) 641 reg = (payload[3] << 24) | (payload[2] << 16)
642 | (payload[1] << 8) | payload[0]; 642 | (payload[1] << 8) | payload[0];
643 writel(reg, dsi->reg_base + DSIM_PAYLOAD_REG); 643 writel(reg, dsi->reg_base + DSIM_PAYLOAD_REG);
644 payload += 4; 644 payload += 4;
645 length -= 4; 645 length -= 4;
646 } 646 }
647 647
648 reg = 0; 648 reg = 0;
649 switch (length) { 649 switch (length) {
650 case 3: 650 case 3:
651 reg |= payload[2] << 16; 651 reg |= payload[2] << 16;
652 /* Fall through */ 652 /* Fall through */
653 case 2: 653 case 2:
654 reg |= payload[1] << 8; 654 reg |= payload[1] << 8;
655 /* Fall through */ 655 /* Fall through */
656 case 1: 656 case 1:
657 reg |= payload[0]; 657 reg |= payload[0];
658 writel(reg, dsi->reg_base + DSIM_PAYLOAD_REG); 658 writel(reg, dsi->reg_base + DSIM_PAYLOAD_REG);
659 break; 659 break;
660 case 0: 660 case 0:
661 /* Do nothing */ 661 /* Do nothing */
662 break; 662 break;
663 } 663 }
664 664
665 /* Send packet header */ 665 /* Send packet header */
666 if (!first) 666 if (!first)
667 return; 667 return;
668 668
669 reg = (xfer->data[1] << 16) | (xfer->data[0] << 8) | xfer->data_id; 669 reg = (xfer->data[1] << 16) | (xfer->data[0] << 8) | xfer->data_id;
670 if (exynos_dsi_wait_for_hdr_fifo(dsi)) { 670 if (exynos_dsi_wait_for_hdr_fifo(dsi)) {
671 dev_err(dev, "waiting for header FIFO timed out\n"); 671 dev_err(dev, "waiting for header FIFO timed out\n");
672 return; 672 return;
673 } 673 }
674 674
675 if (NEQV(xfer->flags & MIPI_DSI_MSG_USE_LPM, 675 if (NEQV(xfer->flags & MIPI_DSI_MSG_USE_LPM,
676 dsi->state & DSIM_STATE_CMD_LPM)) { 676 dsi->state & DSIM_STATE_CMD_LPM)) {
677 exynos_dsi_set_cmd_lpm(dsi, xfer->flags & MIPI_DSI_MSG_USE_LPM); 677 exynos_dsi_set_cmd_lpm(dsi, xfer->flags & MIPI_DSI_MSG_USE_LPM);
678 dsi->state ^= DSIM_STATE_CMD_LPM; 678 dsi->state ^= DSIM_STATE_CMD_LPM;
679 } 679 }
680 680
681 writel(reg, dsi->reg_base + DSIM_PKTHDR_REG); 681 writel(reg, dsi->reg_base + DSIM_PKTHDR_REG);
682 682
683 if (xfer->flags & MIPI_DSI_MSG_REQ_ACK) 683 if (xfer->flags & MIPI_DSI_MSG_REQ_ACK)
684 exynos_dsi_force_bta(dsi); 684 exynos_dsi_force_bta(dsi);
685 } 685 }
686 686
687 static void exynos_dsi_read_from_fifo(struct exynos_dsi *dsi, 687 static void exynos_dsi_read_from_fifo(struct exynos_dsi *dsi,
688 struct exynos_dsi_transfer *xfer) 688 struct exynos_dsi_transfer *xfer)
689 { 689 {
690 u8 *payload = xfer->rx_payload + xfer->rx_done; 690 u8 *payload = xfer->rx_payload + xfer->rx_done;
691 bool first = !xfer->rx_done; 691 bool first = !xfer->rx_done;
692 struct device *dev = dsi->dev; 692 struct device *dev = dsi->dev;
693 u16 length; 693 u16 length;
694 u32 reg; 694 u32 reg;
695 695
696 if (first) { 696 if (first) {
697 reg = readl(dsi->reg_base + DSIM_RXFIFO_REG); 697 reg = readl(dsi->reg_base + DSIM_RXFIFO_REG);
698 698
699 switch (reg & 0x3f) { 699 switch (reg & 0x3f) {
700 case MIPI_DSI_RX_GENERIC_SHORT_READ_RESPONSE_2BYTE: 700 case MIPI_DSI_RX_GENERIC_SHORT_READ_RESPONSE_2BYTE:
701 case MIPI_DSI_RX_DCS_SHORT_READ_RESPONSE_2BYTE: 701 case MIPI_DSI_RX_DCS_SHORT_READ_RESPONSE_2BYTE:
702 if (xfer->rx_len >= 2) { 702 if (xfer->rx_len >= 2) {
703 payload[1] = reg >> 16; 703 payload[1] = reg >> 16;
704 ++xfer->rx_done; 704 ++xfer->rx_done;
705 } 705 }
706 /* Fall through */ 706 /* Fall through */
707 case MIPI_DSI_RX_GENERIC_SHORT_READ_RESPONSE_1BYTE: 707 case MIPI_DSI_RX_GENERIC_SHORT_READ_RESPONSE_1BYTE:
708 case MIPI_DSI_RX_DCS_SHORT_READ_RESPONSE_1BYTE: 708 case MIPI_DSI_RX_DCS_SHORT_READ_RESPONSE_1BYTE:
709 payload[0] = reg >> 8; 709 payload[0] = reg >> 8;
710 ++xfer->rx_done; 710 ++xfer->rx_done;
711 xfer->rx_len = xfer->rx_done; 711 xfer->rx_len = xfer->rx_done;
712 xfer->result = 0; 712 xfer->result = 0;
713 goto clear_fifo; 713 goto clear_fifo;
714 case MIPI_DSI_RX_ACKNOWLEDGE_AND_ERROR_REPORT: 714 case MIPI_DSI_RX_ACKNOWLEDGE_AND_ERROR_REPORT:
715 dev_err(dev, "DSI Error Report: 0x%04x\n", 715 dev_err(dev, "DSI Error Report: 0x%04x\n",
716 (reg >> 8) & 0xffff); 716 (reg >> 8) & 0xffff);
717 xfer->result = 0; 717 xfer->result = 0;
718 goto clear_fifo; 718 goto clear_fifo;
719 } 719 }
720 720
721 length = (reg >> 8) & 0xffff; 721 length = (reg >> 8) & 0xffff;
722 if (length > xfer->rx_len) { 722 if (length > xfer->rx_len) {
723 dev_err(dev, 723 dev_err(dev,
724 "response too long (%u > %u bytes), stripping\n", 724 "response too long (%u > %u bytes), stripping\n",
725 xfer->rx_len, length); 725 xfer->rx_len, length);
726 length = xfer->rx_len; 726 length = xfer->rx_len;
727 } else if (length < xfer->rx_len) 727 } else if (length < xfer->rx_len)
728 xfer->rx_len = length; 728 xfer->rx_len = length;
729 } 729 }
730 730
731 length = xfer->rx_len - xfer->rx_done; 731 length = xfer->rx_len - xfer->rx_done;
732 xfer->rx_done += length; 732 xfer->rx_done += length;
733 733
734 /* Receive payload */ 734 /* Receive payload */
735 while (length >= 4) { 735 while (length >= 4) {
736 reg = readl(dsi->reg_base + DSIM_RXFIFO_REG); 736 reg = readl(dsi->reg_base + DSIM_RXFIFO_REG);
737 payload[0] = (reg >> 0) & 0xff; 737 payload[0] = (reg >> 0) & 0xff;
738 payload[1] = (reg >> 8) & 0xff; 738 payload[1] = (reg >> 8) & 0xff;
739 payload[2] = (reg >> 16) & 0xff; 739 payload[2] = (reg >> 16) & 0xff;
740 payload[3] = (reg >> 24) & 0xff; 740 payload[3] = (reg >> 24) & 0xff;
741 payload += 4; 741 payload += 4;
742 length -= 4; 742 length -= 4;
743 } 743 }
744 744
745 if (length) { 745 if (length) {
746 reg = readl(dsi->reg_base + DSIM_RXFIFO_REG); 746 reg = readl(dsi->reg_base + DSIM_RXFIFO_REG);
747 switch (length) { 747 switch (length) {
748 case 3: 748 case 3:
749 payload[2] = (reg >> 16) & 0xff; 749 payload[2] = (reg >> 16) & 0xff;
750 /* Fall through */ 750 /* Fall through */
751 case 2: 751 case 2:
752 payload[1] = (reg >> 8) & 0xff; 752 payload[1] = (reg >> 8) & 0xff;
753 /* Fall through */ 753 /* Fall through */
754 case 1: 754 case 1:
755 payload[0] = reg & 0xff; 755 payload[0] = reg & 0xff;
756 } 756 }
757 } 757 }
758 758
759 if (xfer->rx_done == xfer->rx_len) 759 if (xfer->rx_done == xfer->rx_len)
760 xfer->result = 0; 760 xfer->result = 0;
761 761
762 clear_fifo: 762 clear_fifo:
763 length = DSI_RX_FIFO_SIZE / 4; 763 length = DSI_RX_FIFO_SIZE / 4;
764 do { 764 do {
765 reg = readl(dsi->reg_base + DSIM_RXFIFO_REG); 765 reg = readl(dsi->reg_base + DSIM_RXFIFO_REG);
766 if (reg == DSI_RX_FIFO_EMPTY) 766 if (reg == DSI_RX_FIFO_EMPTY)
767 break; 767 break;
768 } while (--length); 768 } while (--length);
769 } 769 }
770 770
771 static void exynos_dsi_transfer_start(struct exynos_dsi *dsi) 771 static void exynos_dsi_transfer_start(struct exynos_dsi *dsi)
772 { 772 {
773 unsigned long flags; 773 unsigned long flags;
774 struct exynos_dsi_transfer *xfer; 774 struct exynos_dsi_transfer *xfer;
775 bool start = false; 775 bool start = false;
776 776
777 again: 777 again:
778 spin_lock_irqsave(&dsi->transfer_lock, flags); 778 spin_lock_irqsave(&dsi->transfer_lock, flags);
779 779
780 if (list_empty(&dsi->transfer_list)) { 780 if (list_empty(&dsi->transfer_list)) {
781 spin_unlock_irqrestore(&dsi->transfer_lock, flags); 781 spin_unlock_irqrestore(&dsi->transfer_lock, flags);
782 return; 782 return;
783 } 783 }
784 784
785 xfer = list_first_entry(&dsi->transfer_list, 785 xfer = list_first_entry(&dsi->transfer_list,
786 struct exynos_dsi_transfer, list); 786 struct exynos_dsi_transfer, list);
787 787
788 spin_unlock_irqrestore(&dsi->transfer_lock, flags); 788 spin_unlock_irqrestore(&dsi->transfer_lock, flags);
789 789
790 if (xfer->tx_len && xfer->tx_done == xfer->tx_len) 790 if (xfer->tx_len && xfer->tx_done == xfer->tx_len)
791 /* waiting for RX */ 791 /* waiting for RX */
792 return; 792 return;
793 793
794 exynos_dsi_send_to_fifo(dsi, xfer); 794 exynos_dsi_send_to_fifo(dsi, xfer);
795 795
796 if (xfer->tx_len || xfer->rx_len) 796 if (xfer->tx_len || xfer->rx_len)
797 return; 797 return;
798 798
799 xfer->result = 0; 799 xfer->result = 0;
800 complete(&xfer->completed); 800 complete(&xfer->completed);
801 801
802 spin_lock_irqsave(&dsi->transfer_lock, flags); 802 spin_lock_irqsave(&dsi->transfer_lock, flags);
803 803
804 list_del_init(&xfer->list); 804 list_del_init(&xfer->list);
805 start = !list_empty(&dsi->transfer_list); 805 start = !list_empty(&dsi->transfer_list);
806 806
807 spin_unlock_irqrestore(&dsi->transfer_lock, flags); 807 spin_unlock_irqrestore(&dsi->transfer_lock, flags);
808 808
809 if (start) 809 if (start)
810 goto again; 810 goto again;
811 } 811 }
812 812
813 static bool exynos_dsi_transfer_finish(struct exynos_dsi *dsi) 813 static bool exynos_dsi_transfer_finish(struct exynos_dsi *dsi)
814 { 814 {
815 struct exynos_dsi_transfer *xfer; 815 struct exynos_dsi_transfer *xfer;
816 unsigned long flags; 816 unsigned long flags;
817 bool start = true; 817 bool start = true;
818 818
819 spin_lock_irqsave(&dsi->transfer_lock, flags); 819 spin_lock_irqsave(&dsi->transfer_lock, flags);
820 820
821 if (list_empty(&dsi->transfer_list)) { 821 if (list_empty(&dsi->transfer_list)) {
822 spin_unlock_irqrestore(&dsi->transfer_lock, flags); 822 spin_unlock_irqrestore(&dsi->transfer_lock, flags);
823 return false; 823 return false;
824 } 824 }
825 825
826 xfer = list_first_entry(&dsi->transfer_list, 826 xfer = list_first_entry(&dsi->transfer_list,
827 struct exynos_dsi_transfer, list); 827 struct exynos_dsi_transfer, list);
828 828
829 spin_unlock_irqrestore(&dsi->transfer_lock, flags); 829 spin_unlock_irqrestore(&dsi->transfer_lock, flags);
830 830
831 dev_dbg(dsi->dev, 831 dev_dbg(dsi->dev,
832 "> xfer %p, tx_len %u, tx_done %u, rx_len %u, rx_done %u\n", 832 "> xfer %p, tx_len %u, tx_done %u, rx_len %u, rx_done %u\n",
833 xfer, xfer->tx_len, xfer->tx_done, xfer->rx_len, xfer->rx_done); 833 xfer, xfer->tx_len, xfer->tx_done, xfer->rx_len, xfer->rx_done);
834 834
835 if (xfer->tx_done != xfer->tx_len) 835 if (xfer->tx_done != xfer->tx_len)
836 return true; 836 return true;
837 837
838 if (xfer->rx_done != xfer->rx_len) 838 if (xfer->rx_done != xfer->rx_len)
839 exynos_dsi_read_from_fifo(dsi, xfer); 839 exynos_dsi_read_from_fifo(dsi, xfer);
840 840
841 if (xfer->rx_done != xfer->rx_len) 841 if (xfer->rx_done != xfer->rx_len)
842 return true; 842 return true;
843 843
844 spin_lock_irqsave(&dsi->transfer_lock, flags); 844 spin_lock_irqsave(&dsi->transfer_lock, flags);
845 845
846 list_del_init(&xfer->list); 846 list_del_init(&xfer->list);
847 start = !list_empty(&dsi->transfer_list); 847 start = !list_empty(&dsi->transfer_list);
848 848
849 spin_unlock_irqrestore(&dsi->transfer_lock, flags); 849 spin_unlock_irqrestore(&dsi->transfer_lock, flags);
850 850
851 if (!xfer->rx_len) 851 if (!xfer->rx_len)
852 xfer->result = 0; 852 xfer->result = 0;
853 complete(&xfer->completed); 853 complete(&xfer->completed);
854 854
855 return start; 855 return start;
856 } 856 }
857 857
858 static void exynos_dsi_remove_transfer(struct exynos_dsi *dsi, 858 static void exynos_dsi_remove_transfer(struct exynos_dsi *dsi,
859 struct exynos_dsi_transfer *xfer) 859 struct exynos_dsi_transfer *xfer)
860 { 860 {
861 unsigned long flags; 861 unsigned long flags;
862 bool start; 862 bool start;
863 863
864 spin_lock_irqsave(&dsi->transfer_lock, flags); 864 spin_lock_irqsave(&dsi->transfer_lock, flags);
865 865
866 if (!list_empty(&dsi->transfer_list) && 866 if (!list_empty(&dsi->transfer_list) &&
867 xfer == list_first_entry(&dsi->transfer_list, 867 xfer == list_first_entry(&dsi->transfer_list,
868 struct exynos_dsi_transfer, list)) { 868 struct exynos_dsi_transfer, list)) {
869 list_del_init(&xfer->list); 869 list_del_init(&xfer->list);
870 start = !list_empty(&dsi->transfer_list); 870 start = !list_empty(&dsi->transfer_list);
871 spin_unlock_irqrestore(&dsi->transfer_lock, flags); 871 spin_unlock_irqrestore(&dsi->transfer_lock, flags);
872 if (start) 872 if (start)
873 exynos_dsi_transfer_start(dsi); 873 exynos_dsi_transfer_start(dsi);
874 return; 874 return;
875 } 875 }
876 876
877 list_del_init(&xfer->list); 877 list_del_init(&xfer->list);
878 878
879 spin_unlock_irqrestore(&dsi->transfer_lock, flags); 879 spin_unlock_irqrestore(&dsi->transfer_lock, flags);
880 } 880 }
881 881
882 static int exynos_dsi_transfer(struct exynos_dsi *dsi, 882 static int exynos_dsi_transfer(struct exynos_dsi *dsi,
883 struct exynos_dsi_transfer *xfer) 883 struct exynos_dsi_transfer *xfer)
884 { 884 {
885 unsigned long flags; 885 unsigned long flags;
886 bool stopped; 886 bool stopped;
887 887
888 xfer->tx_done = 0; 888 xfer->tx_done = 0;
889 xfer->rx_done = 0; 889 xfer->rx_done = 0;
890 xfer->result = -ETIMEDOUT; 890 xfer->result = -ETIMEDOUT;
891 init_completion(&xfer->completed); 891 init_completion(&xfer->completed);
892 892
893 spin_lock_irqsave(&dsi->transfer_lock, flags); 893 spin_lock_irqsave(&dsi->transfer_lock, flags);
894 894
895 stopped = list_empty(&dsi->transfer_list); 895 stopped = list_empty(&dsi->transfer_list);
896 list_add_tail(&xfer->list, &dsi->transfer_list); 896 list_add_tail(&xfer->list, &dsi->transfer_list);
897 897
898 spin_unlock_irqrestore(&dsi->transfer_lock, flags); 898 spin_unlock_irqrestore(&dsi->transfer_lock, flags);
899 899
900 if (stopped) 900 if (stopped)
901 exynos_dsi_transfer_start(dsi); 901 exynos_dsi_transfer_start(dsi);
902 902
903 wait_for_completion_timeout(&xfer->completed, 903 wait_for_completion_timeout(&xfer->completed,
904 msecs_to_jiffies(DSI_XFER_TIMEOUT_MS)); 904 msecs_to_jiffies(DSI_XFER_TIMEOUT_MS));
905 if (xfer->result == -ETIMEDOUT) { 905 if (xfer->result == -ETIMEDOUT) {
906 exynos_dsi_remove_transfer(dsi, xfer); 906 exynos_dsi_remove_transfer(dsi, xfer);
907 dev_err(dsi->dev, "xfer timed out: %*ph %*ph\n", 2, xfer->data, 907 dev_err(dsi->dev, "xfer timed out: %*ph %*ph\n", 2, xfer->data,
908 xfer->tx_len, xfer->tx_payload); 908 xfer->tx_len, xfer->tx_payload);
909 return -ETIMEDOUT; 909 return -ETIMEDOUT;
910 } 910 }
911 911
912 /* Also covers hardware timeout condition */ 912 /* Also covers hardware timeout condition */
913 return xfer->result; 913 return xfer->result;
914 } 914 }
915 915
916 static irqreturn_t exynos_dsi_irq(int irq, void *dev_id) 916 static irqreturn_t exynos_dsi_irq(int irq, void *dev_id)
917 { 917 {
918 struct exynos_dsi *dsi = dev_id; 918 struct exynos_dsi *dsi = dev_id;
919 u32 status; 919 u32 status;
920 920
921 status = readl(dsi->reg_base + DSIM_INTSRC_REG); 921 status = readl(dsi->reg_base + DSIM_INTSRC_REG);
922 if (!status) { 922 if (!status) {
923 static unsigned long int j; 923 static unsigned long int j;
924 if (printk_timed_ratelimit(&j, 500)) 924 if (printk_timed_ratelimit(&j, 500))
925 dev_warn(dsi->dev, "spurious interrupt\n"); 925 dev_warn(dsi->dev, "spurious interrupt\n");
926 return IRQ_HANDLED; 926 return IRQ_HANDLED;
927 } 927 }
928 writel(status, dsi->reg_base + DSIM_INTSRC_REG); 928 writel(status, dsi->reg_base + DSIM_INTSRC_REG);
929 929
930 if (status & DSIM_INT_SW_RST_RELEASE) { 930 if (status & DSIM_INT_SW_RST_RELEASE) {
931 u32 mask = ~(DSIM_INT_RX_DONE | DSIM_INT_SFR_FIFO_EMPTY); 931 u32 mask = ~(DSIM_INT_RX_DONE | DSIM_INT_SFR_FIFO_EMPTY);
932 writel(mask, dsi->reg_base + DSIM_INTMSK_REG); 932 writel(mask, dsi->reg_base + DSIM_INTMSK_REG);
933 complete(&dsi->completed); 933 complete(&dsi->completed);
934 return IRQ_HANDLED; 934 return IRQ_HANDLED;
935 } 935 }
936 936
937 if (!(status & (DSIM_INT_RX_DONE | DSIM_INT_SFR_FIFO_EMPTY))) 937 if (!(status & (DSIM_INT_RX_DONE | DSIM_INT_SFR_FIFO_EMPTY)))
938 return IRQ_HANDLED; 938 return IRQ_HANDLED;
939 939
940 if (exynos_dsi_transfer_finish(dsi)) 940 if (exynos_dsi_transfer_finish(dsi))
941 exynos_dsi_transfer_start(dsi); 941 exynos_dsi_transfer_start(dsi);
942 942
943 return IRQ_HANDLED; 943 return IRQ_HANDLED;
944 } 944 }
945 945
946 static int exynos_dsi_init(struct exynos_dsi *dsi) 946 static int exynos_dsi_init(struct exynos_dsi *dsi)
947 { 947 {
948 exynos_dsi_enable_clock(dsi); 948 exynos_dsi_enable_clock(dsi);
949 exynos_dsi_reset(dsi); 949 exynos_dsi_reset(dsi);
950 enable_irq(dsi->irq); 950 enable_irq(dsi->irq);
951 exynos_dsi_wait_for_reset(dsi); 951 exynos_dsi_wait_for_reset(dsi);
952 exynos_dsi_init_link(dsi); 952 exynos_dsi_init_link(dsi);
953 953
954 return 0; 954 return 0;
955 } 955 }
956 956
957 static int exynos_dsi_host_attach(struct mipi_dsi_host *host, 957 static int exynos_dsi_host_attach(struct mipi_dsi_host *host,
958 struct mipi_dsi_device *device) 958 struct mipi_dsi_device *device)
959 { 959 {
960 struct exynos_dsi *dsi = host_to_dsi(host); 960 struct exynos_dsi *dsi = host_to_dsi(host);
961 961
962 dsi->lanes = device->lanes; 962 dsi->lanes = device->lanes;
963 dsi->format = device->format; 963 dsi->format = device->format;
964 dsi->mode_flags = device->mode_flags; 964 dsi->mode_flags = device->mode_flags;
965 dsi->panel_node = device->dev.of_node; 965 dsi->panel_node = device->dev.of_node;
966 966
967 if (dsi->connector.dev) 967 if (dsi->connector.dev)
968 drm_helper_hpd_irq_event(dsi->connector.dev); 968 drm_helper_hpd_irq_event(dsi->connector.dev);
969 969
970 return 0; 970 return 0;
971 } 971 }
972 972
973 static int exynos_dsi_host_detach(struct mipi_dsi_host *host, 973 static int exynos_dsi_host_detach(struct mipi_dsi_host *host,
974 struct mipi_dsi_device *device) 974 struct mipi_dsi_device *device)
975 { 975 {
976 struct exynos_dsi *dsi = host_to_dsi(host); 976 struct exynos_dsi *dsi = host_to_dsi(host);
977 977
978 dsi->panel_node = NULL; 978 dsi->panel_node = NULL;
979 979
980 if (dsi->connector.dev) 980 if (dsi->connector.dev)
981 drm_helper_hpd_irq_event(dsi->connector.dev); 981 drm_helper_hpd_irq_event(dsi->connector.dev);
982 982
983 return 0; 983 return 0;
984 } 984 }
985 985
986 /* distinguish between short and long DSI packet types */ 986 /* distinguish between short and long DSI packet types */
987 static bool exynos_dsi_is_short_dsi_type(u8 type) 987 static bool exynos_dsi_is_short_dsi_type(u8 type)
988 { 988 {
989 return (type & 0x0f) <= 8; 989 return (type & 0x0f) <= 8;
990 } 990 }
991 991
992 static ssize_t exynos_dsi_host_transfer(struct mipi_dsi_host *host, 992 static ssize_t exynos_dsi_host_transfer(struct mipi_dsi_host *host,
993 struct mipi_dsi_msg *msg) 993 struct mipi_dsi_msg *msg)
994 { 994 {
995 struct exynos_dsi *dsi = host_to_dsi(host); 995 struct exynos_dsi *dsi = host_to_dsi(host);
996 struct exynos_dsi_transfer xfer; 996 struct exynos_dsi_transfer xfer;
997 int ret; 997 int ret;
998 998
999 if (!(dsi->state & DSIM_STATE_INITIALIZED)) { 999 if (!(dsi->state & DSIM_STATE_INITIALIZED)) {
1000 ret = exynos_dsi_init(dsi); 1000 ret = exynos_dsi_init(dsi);
1001 if (ret) 1001 if (ret)
1002 return ret; 1002 return ret;
1003 dsi->state |= DSIM_STATE_INITIALIZED; 1003 dsi->state |= DSIM_STATE_INITIALIZED;
1004 } 1004 }
1005 1005
1006 if (msg->tx_len == 0) 1006 if (msg->tx_len == 0)
1007 return -EINVAL; 1007 return -EINVAL;
1008 1008
1009 xfer.data_id = msg->type | (msg->channel << 6); 1009 xfer.data_id = msg->type | (msg->channel << 6);
1010 1010
1011 if (exynos_dsi_is_short_dsi_type(msg->type)) { 1011 if (exynos_dsi_is_short_dsi_type(msg->type)) {
1012 const char *tx_buf = msg->tx_buf; 1012 const char *tx_buf = msg->tx_buf;
1013 1013
1014 if (msg->tx_len > 2) 1014 if (msg->tx_len > 2)
1015 return -EINVAL; 1015 return -EINVAL;
1016 xfer.tx_len = 0; 1016 xfer.tx_len = 0;
1017 xfer.data[0] = tx_buf[0]; 1017 xfer.data[0] = tx_buf[0];
1018 xfer.data[1] = (msg->tx_len == 2) ? tx_buf[1] : 0; 1018 xfer.data[1] = (msg->tx_len == 2) ? tx_buf[1] : 0;
1019 } else { 1019 } else {
1020 xfer.tx_len = msg->tx_len; 1020 xfer.tx_len = msg->tx_len;
1021 xfer.data[0] = msg->tx_len & 0xff; 1021 xfer.data[0] = msg->tx_len & 0xff;
1022 xfer.data[1] = msg->tx_len >> 8; 1022 xfer.data[1] = msg->tx_len >> 8;
1023 xfer.tx_payload = msg->tx_buf; 1023 xfer.tx_payload = msg->tx_buf;
1024 } 1024 }
1025 1025
1026 xfer.rx_len = msg->rx_len; 1026 xfer.rx_len = msg->rx_len;
1027 xfer.rx_payload = msg->rx_buf; 1027 xfer.rx_payload = msg->rx_buf;
1028 xfer.flags = msg->flags; 1028 xfer.flags = msg->flags;
1029 1029
1030 ret = exynos_dsi_transfer(dsi, &xfer); 1030 ret = exynos_dsi_transfer(dsi, &xfer);
1031 return (ret < 0) ? ret : xfer.rx_done; 1031 return (ret < 0) ? ret : xfer.rx_done;
1032 } 1032 }
1033 1033
1034 static const struct mipi_dsi_host_ops exynos_dsi_ops = { 1034 static const struct mipi_dsi_host_ops exynos_dsi_ops = {
1035 .attach = exynos_dsi_host_attach, 1035 .attach = exynos_dsi_host_attach,
1036 .detach = exynos_dsi_host_detach, 1036 .detach = exynos_dsi_host_detach,
1037 .transfer = exynos_dsi_host_transfer, 1037 .transfer = exynos_dsi_host_transfer,
1038 }; 1038 };
1039 1039
1040 static int exynos_dsi_poweron(struct exynos_dsi *dsi) 1040 static int exynos_dsi_poweron(struct exynos_dsi *dsi)
1041 { 1041 {
1042 int ret; 1042 int ret;
1043 1043
1044 ret = regulator_bulk_enable(ARRAY_SIZE(dsi->supplies), dsi->supplies); 1044 ret = regulator_bulk_enable(ARRAY_SIZE(dsi->supplies), dsi->supplies);
1045 if (ret < 0) { 1045 if (ret < 0) {
1046 dev_err(dsi->dev, "cannot enable regulators %d\n", ret); 1046 dev_err(dsi->dev, "cannot enable regulators %d\n", ret);
1047 return ret; 1047 return ret;
1048 } 1048 }
1049 1049
1050 ret = clk_prepare_enable(dsi->bus_clk); 1050 ret = clk_prepare_enable(dsi->bus_clk);
1051 if (ret < 0) { 1051 if (ret < 0) {
1052 dev_err(dsi->dev, "cannot enable bus clock %d\n", ret); 1052 dev_err(dsi->dev, "cannot enable bus clock %d\n", ret);
1053 goto err_bus_clk; 1053 goto err_bus_clk;
1054 } 1054 }
1055 1055
1056 ret = clk_prepare_enable(dsi->pll_clk); 1056 ret = clk_prepare_enable(dsi->pll_clk);
1057 if (ret < 0) { 1057 if (ret < 0) {
1058 dev_err(dsi->dev, "cannot enable pll clock %d\n", ret); 1058 dev_err(dsi->dev, "cannot enable pll clock %d\n", ret);
1059 goto err_pll_clk; 1059 goto err_pll_clk;
1060 } 1060 }
1061 1061
1062 ret = phy_power_on(dsi->phy); 1062 ret = phy_power_on(dsi->phy);
1063 if (ret < 0) { 1063 if (ret < 0) {
1064 dev_err(dsi->dev, "cannot enable phy %d\n", ret); 1064 dev_err(dsi->dev, "cannot enable phy %d\n", ret);
1065 goto err_phy; 1065 goto err_phy;
1066 } 1066 }
1067 1067
1068 return 0; 1068 return 0;
1069 1069
1070 err_phy: 1070 err_phy:
1071 clk_disable_unprepare(dsi->pll_clk); 1071 clk_disable_unprepare(dsi->pll_clk);
1072 err_pll_clk: 1072 err_pll_clk:
1073 clk_disable_unprepare(dsi->bus_clk); 1073 clk_disable_unprepare(dsi->bus_clk);
1074 err_bus_clk: 1074 err_bus_clk:
1075 regulator_bulk_disable(ARRAY_SIZE(dsi->supplies), dsi->supplies); 1075 regulator_bulk_disable(ARRAY_SIZE(dsi->supplies), dsi->supplies);
1076 1076
1077 return ret; 1077 return ret;
1078 } 1078 }
1079 1079
1080 static void exynos_dsi_poweroff(struct exynos_dsi *dsi) 1080 static void exynos_dsi_poweroff(struct exynos_dsi *dsi)
1081 { 1081 {
1082 int ret; 1082 int ret;
1083 1083
1084 usleep_range(10000, 20000); 1084 usleep_range(10000, 20000);
1085 1085
1086 if (dsi->state & DSIM_STATE_INITIALIZED) { 1086 if (dsi->state & DSIM_STATE_INITIALIZED) {
1087 dsi->state &= ~DSIM_STATE_INITIALIZED; 1087 dsi->state &= ~DSIM_STATE_INITIALIZED;
1088 1088
1089 exynos_dsi_disable_clock(dsi); 1089 exynos_dsi_disable_clock(dsi);
1090 1090
1091 disable_irq(dsi->irq); 1091 disable_irq(dsi->irq);
1092 } 1092 }
1093 1093
1094 dsi->state &= ~DSIM_STATE_CMD_LPM; 1094 dsi->state &= ~DSIM_STATE_CMD_LPM;
1095 1095
1096 phy_power_off(dsi->phy); 1096 phy_power_off(dsi->phy);
1097 1097
1098 clk_disable_unprepare(dsi->pll_clk); 1098 clk_disable_unprepare(dsi->pll_clk);
1099 clk_disable_unprepare(dsi->bus_clk); 1099 clk_disable_unprepare(dsi->bus_clk);
1100 1100
1101 ret = regulator_bulk_disable(ARRAY_SIZE(dsi->supplies), dsi->supplies); 1101 ret = regulator_bulk_disable(ARRAY_SIZE(dsi->supplies), dsi->supplies);
1102 if (ret < 0) 1102 if (ret < 0)
1103 dev_err(dsi->dev, "cannot disable regulators %d\n", ret); 1103 dev_err(dsi->dev, "cannot disable regulators %d\n", ret);
1104 } 1104 }
1105 1105
1106 static int exynos_dsi_enable(struct exynos_dsi *dsi) 1106 static int exynos_dsi_enable(struct exynos_dsi *dsi)
1107 { 1107 {
1108 int ret; 1108 int ret;
1109 1109
1110 if (dsi->state & DSIM_STATE_ENABLED) 1110 if (dsi->state & DSIM_STATE_ENABLED)
1111 return 0; 1111 return 0;
1112 1112
1113 ret = exynos_dsi_poweron(dsi); 1113 ret = exynos_dsi_poweron(dsi);
1114 if (ret < 0) 1114 if (ret < 0)
1115 return ret; 1115 return ret;
1116 1116
1117 ret = drm_panel_enable(dsi->panel); 1117 ret = drm_panel_enable(dsi->panel);
1118 if (ret < 0) { 1118 if (ret < 0) {
1119 exynos_dsi_poweroff(dsi); 1119 exynos_dsi_poweroff(dsi);
1120 return ret; 1120 return ret;
1121 } 1121 }
1122 1122
1123 exynos_dsi_set_display_mode(dsi); 1123 exynos_dsi_set_display_mode(dsi);
1124 exynos_dsi_set_display_enable(dsi, true); 1124 exynos_dsi_set_display_enable(dsi, true);
1125 1125
1126 dsi->state |= DSIM_STATE_ENABLED; 1126 dsi->state |= DSIM_STATE_ENABLED;
1127 1127
1128 return 0; 1128 return 0;
1129 } 1129 }
1130 1130
1131 static void exynos_dsi_disable(struct exynos_dsi *dsi) 1131 static void exynos_dsi_disable(struct exynos_dsi *dsi)
1132 { 1132 {
1133 if (!(dsi->state & DSIM_STATE_ENABLED)) 1133 if (!(dsi->state & DSIM_STATE_ENABLED))
1134 return; 1134 return;
1135 1135
1136 exynos_dsi_set_display_enable(dsi, false); 1136 exynos_dsi_set_display_enable(dsi, false);
1137 drm_panel_disable(dsi->panel); 1137 drm_panel_disable(dsi->panel);
1138 exynos_dsi_poweroff(dsi); 1138 exynos_dsi_poweroff(dsi);
1139 1139
1140 dsi->state &= ~DSIM_STATE_ENABLED; 1140 dsi->state &= ~DSIM_STATE_ENABLED;
1141 } 1141 }
1142 1142
1143 static void exynos_dsi_dpms(struct exynos_drm_display *display, int mode) 1143 static void exynos_dsi_dpms(struct exynos_drm_display *display, int mode)
1144 { 1144 {
1145 struct exynos_dsi *dsi = display->ctx; 1145 struct exynos_dsi *dsi = display->ctx;
1146 1146
1147 if (dsi->panel) { 1147 if (dsi->panel) {
1148 switch (mode) { 1148 switch (mode) {
1149 case DRM_MODE_DPMS_ON: 1149 case DRM_MODE_DPMS_ON:
1150 exynos_dsi_enable(dsi); 1150 exynos_dsi_enable(dsi);
1151 break; 1151 break;
1152 case DRM_MODE_DPMS_STANDBY: 1152 case DRM_MODE_DPMS_STANDBY:
1153 case DRM_MODE_DPMS_SUSPEND: 1153 case DRM_MODE_DPMS_SUSPEND:
1154 case DRM_MODE_DPMS_OFF: 1154 case DRM_MODE_DPMS_OFF:
1155 exynos_dsi_disable(dsi); 1155 exynos_dsi_disable(dsi);
1156 break; 1156 break;
1157 default: 1157 default:
1158 break; 1158 break;
1159 } 1159 }
1160 } 1160 }
1161 } 1161 }
1162 1162
1163 static enum drm_connector_status 1163 static enum drm_connector_status
1164 exynos_dsi_detect(struct drm_connector *connector, bool force) 1164 exynos_dsi_detect(struct drm_connector *connector, bool force)
1165 { 1165 {
1166 struct exynos_dsi *dsi = connector_to_dsi(connector); 1166 struct exynos_dsi *dsi = connector_to_dsi(connector);
1167 1167
1168 if (!dsi->panel) { 1168 if (!dsi->panel) {
1169 dsi->panel = of_drm_find_panel(dsi->panel_node); 1169 dsi->panel = of_drm_find_panel(dsi->panel_node);
1170 if (dsi->panel) 1170 if (dsi->panel)
1171 drm_panel_attach(dsi->panel, &dsi->connector); 1171 drm_panel_attach(dsi->panel, &dsi->connector);
1172 } else if (!dsi->panel_node) { 1172 } else if (!dsi->panel_node) {
1173 struct exynos_drm_display *display; 1173 struct exynos_drm_display *display;
1174 1174
1175 display = platform_get_drvdata(to_platform_device(dsi->dev)); 1175 display = platform_get_drvdata(to_platform_device(dsi->dev));
1176 exynos_dsi_dpms(display, DRM_MODE_DPMS_OFF); 1176 exynos_dsi_dpms(display, DRM_MODE_DPMS_OFF);
1177 drm_panel_detach(dsi->panel); 1177 drm_panel_detach(dsi->panel);
1178 dsi->panel = NULL; 1178 dsi->panel = NULL;
1179 } 1179 }
1180 1180
1181 if (dsi->panel) 1181 if (dsi->panel)
1182 return connector_status_connected; 1182 return connector_status_connected;
1183 1183
1184 return connector_status_disconnected; 1184 return connector_status_disconnected;
1185 } 1185 }
1186 1186
1187 static void exynos_dsi_connector_destroy(struct drm_connector *connector) 1187 static void exynos_dsi_connector_destroy(struct drm_connector *connector)
1188 { 1188 {
1189 } 1189 }
1190 1190
1191 static struct drm_connector_funcs exynos_dsi_connector_funcs = { 1191 static struct drm_connector_funcs exynos_dsi_connector_funcs = {
1192 .dpms = drm_helper_connector_dpms, 1192 .dpms = drm_helper_connector_dpms,
1193 .detect = exynos_dsi_detect, 1193 .detect = exynos_dsi_detect,
1194 .fill_modes = drm_helper_probe_single_connector_modes, 1194 .fill_modes = drm_helper_probe_single_connector_modes,
1195 .destroy = exynos_dsi_connector_destroy, 1195 .destroy = exynos_dsi_connector_destroy,
1196 }; 1196 };
1197 1197
1198 static int exynos_dsi_get_modes(struct drm_connector *connector) 1198 static int exynos_dsi_get_modes(struct drm_connector *connector)
1199 { 1199 {
1200 struct exynos_dsi *dsi = connector_to_dsi(connector); 1200 struct exynos_dsi *dsi = connector_to_dsi(connector);
1201 1201
1202 if (dsi->panel) 1202 if (dsi->panel)
1203 return dsi->panel->funcs->get_modes(dsi->panel); 1203 return dsi->panel->funcs->get_modes(dsi->panel);
1204 1204
1205 return 0; 1205 return 0;
1206 } 1206 }
1207 1207
1208 static int exynos_dsi_mode_valid(struct drm_connector *connector, 1208 static int exynos_dsi_mode_valid(struct drm_connector *connector,
1209 struct drm_display_mode *mode) 1209 struct drm_display_mode *mode)
1210 { 1210 {
1211 return MODE_OK; 1211 return MODE_OK;
1212 } 1212 }
1213 1213
1214 static struct drm_encoder * 1214 static struct drm_encoder *
1215 exynos_dsi_best_encoder(struct drm_connector *connector) 1215 exynos_dsi_best_encoder(struct drm_connector *connector)
1216 { 1216 {
1217 struct exynos_dsi *dsi = connector_to_dsi(connector); 1217 struct exynos_dsi *dsi = connector_to_dsi(connector);
1218 1218
1219 return dsi->encoder; 1219 return dsi->encoder;
1220 } 1220 }
1221 1221
1222 static struct drm_connector_helper_funcs exynos_dsi_connector_helper_funcs = { 1222 static struct drm_connector_helper_funcs exynos_dsi_connector_helper_funcs = {
1223 .get_modes = exynos_dsi_get_modes, 1223 .get_modes = exynos_dsi_get_modes,
1224 .mode_valid = exynos_dsi_mode_valid, 1224 .mode_valid = exynos_dsi_mode_valid,
1225 .best_encoder = exynos_dsi_best_encoder, 1225 .best_encoder = exynos_dsi_best_encoder,
1226 }; 1226 };
1227 1227
1228 static int exynos_dsi_create_connector(struct exynos_drm_display *display, 1228 static int exynos_dsi_create_connector(struct exynos_drm_display *display,
1229 struct drm_encoder *encoder) 1229 struct drm_encoder *encoder)
1230 { 1230 {
1231 struct exynos_dsi *dsi = display->ctx; 1231 struct exynos_dsi *dsi = display->ctx;
1232 struct drm_connector *connector = &dsi->connector; 1232 struct drm_connector *connector = &dsi->connector;
1233 int ret; 1233 int ret;
1234 1234
1235 dsi->encoder = encoder; 1235 dsi->encoder = encoder;
1236 1236
1237 connector->polled = DRM_CONNECTOR_POLL_HPD; 1237 connector->polled = DRM_CONNECTOR_POLL_HPD;
1238 1238
1239 ret = drm_connector_init(encoder->dev, connector, 1239 ret = drm_connector_init(encoder->dev, connector,
1240 &exynos_dsi_connector_funcs, 1240 &exynos_dsi_connector_funcs,
1241 DRM_MODE_CONNECTOR_DSI); 1241 DRM_MODE_CONNECTOR_DSI);
1242 if (ret) { 1242 if (ret) {
1243 DRM_ERROR("Failed to initialize connector with drm\n"); 1243 DRM_ERROR("Failed to initialize connector with drm\n");
1244 return ret; 1244 return ret;
1245 } 1245 }
1246 1246
1247 drm_connector_helper_add(connector, &exynos_dsi_connector_helper_funcs); 1247 drm_connector_helper_add(connector, &exynos_dsi_connector_helper_funcs);
1248 drm_sysfs_connector_add(connector); 1248 drm_sysfs_connector_add(connector);
1249 drm_mode_connector_attach_encoder(connector, encoder); 1249 drm_mode_connector_attach_encoder(connector, encoder);
1250 1250
1251 return 0; 1251 return 0;
1252 } 1252 }
1253 1253
1254 static void exynos_dsi_mode_set(struct exynos_drm_display *display, 1254 static void exynos_dsi_mode_set(struct exynos_drm_display *display,
1255 struct drm_display_mode *mode) 1255 struct drm_display_mode *mode)
1256 { 1256 {
1257 struct exynos_dsi *dsi = display->ctx; 1257 struct exynos_dsi *dsi = display->ctx;
1258 struct videomode *vm = &dsi->vm; 1258 struct videomode *vm = &dsi->vm;
1259 1259
1260 vm->hactive = mode->hdisplay; 1260 vm->hactive = mode->hdisplay;
1261 vm->vactive = mode->vdisplay; 1261 vm->vactive = mode->vdisplay;
1262 vm->vfront_porch = mode->vsync_start - mode->vdisplay; 1262 vm->vfront_porch = mode->vsync_start - mode->vdisplay;
1263 vm->vback_porch = mode->vtotal - mode->vsync_end; 1263 vm->vback_porch = mode->vtotal - mode->vsync_end;
1264 vm->vsync_len = mode->vsync_end - mode->vsync_start; 1264 vm->vsync_len = mode->vsync_end - mode->vsync_start;
1265 vm->hfront_porch = mode->hsync_start - mode->hdisplay; 1265 vm->hfront_porch = mode->hsync_start - mode->hdisplay;
1266 vm->hback_porch = mode->htotal - mode->hsync_end; 1266 vm->hback_porch = mode->htotal - mode->hsync_end;
1267 vm->hsync_len = mode->hsync_end - mode->hsync_start; 1267 vm->hsync_len = mode->hsync_end - mode->hsync_start;
1268 } 1268 }
1269 1269
1270 static struct exynos_drm_display_ops exynos_dsi_display_ops = { 1270 static struct exynos_drm_display_ops exynos_dsi_display_ops = {
1271 .create_connector = exynos_dsi_create_connector, 1271 .create_connector = exynos_dsi_create_connector,
1272 .mode_set = exynos_dsi_mode_set, 1272 .mode_set = exynos_dsi_mode_set,
1273 .dpms = exynos_dsi_dpms 1273 .dpms = exynos_dsi_dpms
1274 }; 1274 };
1275 1275
1276 static struct exynos_drm_display exynos_dsi_display = { 1276 static struct exynos_drm_display exynos_dsi_display = {
1277 .type = EXYNOS_DISPLAY_TYPE_LCD, 1277 .type = EXYNOS_DISPLAY_TYPE_LCD,
1278 .ops = &exynos_dsi_display_ops, 1278 .ops = &exynos_dsi_display_ops,
1279 }; 1279 };
1280 1280
1281 /* of_* functions will be removed after merge of of_graph patches */ 1281 /* of_* functions will be removed after merge of of_graph patches */
1282 static struct device_node * 1282 static struct device_node *
1283 of_get_child_by_name_reg(struct device_node *parent, const char *name, u32 reg) 1283 of_get_child_by_name_reg(struct device_node *parent, const char *name, u32 reg)
1284 { 1284 {
1285 struct device_node *np; 1285 struct device_node *np;
1286 1286
1287 for_each_child_of_node(parent, np) { 1287 for_each_child_of_node(parent, np) {
1288 u32 r; 1288 u32 r;
1289 1289
1290 if (!np->name || of_node_cmp(np->name, name)) 1290 if (!np->name || of_node_cmp(np->name, name))
1291 continue; 1291 continue;
1292 1292
1293 if (of_property_read_u32(np, "reg", &r) < 0) 1293 if (of_property_read_u32(np, "reg", &r) < 0)
1294 r = 0; 1294 r = 0;
1295 1295
1296 if (reg == r) 1296 if (reg == r)
1297 break; 1297 break;
1298 } 1298 }
1299 1299
1300 return np; 1300 return np;
1301 } 1301 }
1302 1302
1303 static struct device_node *of_graph_get_port_by_reg(struct device_node *parent, 1303 static struct device_node *of_graph_get_port_by_reg(struct device_node *parent,
1304 u32 reg) 1304 u32 reg)
1305 { 1305 {
1306 struct device_node *ports, *port; 1306 struct device_node *ports, *port;
1307 1307
1308 ports = of_get_child_by_name(parent, "ports"); 1308 ports = of_get_child_by_name(parent, "ports");
1309 if (ports) 1309 if (ports)
1310 parent = ports; 1310 parent = ports;
1311 1311
1312 port = of_get_child_by_name_reg(parent, "port", reg); 1312 port = of_get_child_by_name_reg(parent, "port", reg);
1313 1313
1314 of_node_put(ports); 1314 of_node_put(ports);
1315 1315
1316 return port; 1316 return port;
1317 } 1317 }
1318 1318
1319 static struct device_node * 1319 static struct device_node *
1320 of_graph_get_endpoint_by_reg(struct device_node *port, u32 reg) 1320 of_graph_get_endpoint_by_reg(struct device_node *port, u32 reg)
1321 { 1321 {
1322 return of_get_child_by_name_reg(port, "endpoint", reg); 1322 return of_get_child_by_name_reg(port, "endpoint", reg);
1323 } 1323 }
1324 1324
1325 static int exynos_dsi_of_read_u32(const struct device_node *np, 1325 static int exynos_dsi_of_read_u32(const struct device_node *np,
1326 const char *propname, u32 *out_value) 1326 const char *propname, u32 *out_value)
1327 { 1327 {
1328 int ret = of_property_read_u32(np, propname, out_value); 1328 int ret = of_property_read_u32(np, propname, out_value);
1329 1329
1330 if (ret < 0) 1330 if (ret < 0)
1331 pr_err("%s: failed to get '%s' property\n", np->full_name, 1331 pr_err("%s: failed to get '%s' property\n", np->full_name,
1332 propname); 1332 propname);
1333 1333
1334 return ret; 1334 return ret;
1335 } 1335 }
1336 1336
1337 enum { 1337 enum {
1338 DSI_PORT_IN, 1338 DSI_PORT_IN,
1339 DSI_PORT_OUT 1339 DSI_PORT_OUT
1340 }; 1340 };
1341 1341
1342 static int exynos_dsi_parse_dt(struct exynos_dsi *dsi) 1342 static int exynos_dsi_parse_dt(struct exynos_dsi *dsi)
1343 { 1343 {
1344 struct device *dev = dsi->dev; 1344 struct device *dev = dsi->dev;
1345 struct device_node *node = dev->of_node; 1345 struct device_node *node = dev->of_node;
1346 struct device_node *port, *ep; 1346 struct device_node *port, *ep;
1347 int ret; 1347 int ret;
1348 1348
1349 ret = exynos_dsi_of_read_u32(node, "samsung,pll-clock-frequency", 1349 ret = exynos_dsi_of_read_u32(node, "samsung,pll-clock-frequency",
1350 &dsi->pll_clk_rate); 1350 &dsi->pll_clk_rate);
1351 if (ret < 0) 1351 if (ret < 0)
1352 return ret; 1352 return ret;
1353 1353
1354 port = of_graph_get_port_by_reg(node, DSI_PORT_OUT); 1354 port = of_graph_get_port_by_reg(node, DSI_PORT_OUT);
1355 if (!port) { 1355 if (!port) {
1356 dev_err(dev, "no output port specified\n"); 1356 dev_err(dev, "no output port specified\n");
1357 return -EINVAL; 1357 return -EINVAL;
1358 } 1358 }
1359 1359
1360 ep = of_graph_get_endpoint_by_reg(port, 0); 1360 ep = of_graph_get_endpoint_by_reg(port, 0);
1361 of_node_put(port); 1361 of_node_put(port);
1362 if (!ep) { 1362 if (!ep) {
1363 dev_err(dev, "no endpoint specified in output port\n"); 1363 dev_err(dev, "no endpoint specified in output port\n");
1364 return -EINVAL; 1364 return -EINVAL;
1365 } 1365 }
1366 1366
1367 ret = exynos_dsi_of_read_u32(ep, "samsung,burst-clock-frequency", 1367 ret = exynos_dsi_of_read_u32(ep, "samsung,burst-clock-frequency",
1368 &dsi->burst_clk_rate); 1368 &dsi->burst_clk_rate);
1369 if (ret < 0) 1369 if (ret < 0)
1370 goto end; 1370 goto end;
1371 1371
1372 ret = exynos_dsi_of_read_u32(ep, "samsung,esc-clock-frequency", 1372 ret = exynos_dsi_of_read_u32(ep, "samsung,esc-clock-frequency",
1373 &dsi->esc_clk_rate); 1373 &dsi->esc_clk_rate);
1374 1374
1375 end: 1375 end:
1376 of_node_put(ep); 1376 of_node_put(ep);
1377 1377
1378 return ret; 1378 return ret;
1379 } 1379 }
1380 1380
1381 static int exynos_dsi_probe(struct platform_device *pdev) 1381 static int exynos_dsi_probe(struct platform_device *pdev)
1382 { 1382 {
1383 struct resource *res; 1383 struct resource *res;
1384 struct exynos_dsi *dsi; 1384 struct exynos_dsi *dsi;
1385 int ret; 1385 int ret;
1386 1386
1387 dsi = devm_kzalloc(&pdev->dev, sizeof(*dsi), GFP_KERNEL); 1387 dsi = devm_kzalloc(&pdev->dev, sizeof(*dsi), GFP_KERNEL);
1388 if (!dsi) { 1388 if (!dsi) {
1389 dev_err(&pdev->dev, "failed to allocate dsi object.\n"); 1389 dev_err(&pdev->dev, "failed to allocate dsi object.\n");
1390 return -ENOMEM; 1390 return -ENOMEM;
1391 } 1391 }
1392 1392
1393 init_completion(&dsi->completed); 1393 init_completion(&dsi->completed);
1394 spin_lock_init(&dsi->transfer_lock); 1394 spin_lock_init(&dsi->transfer_lock);
1395 INIT_LIST_HEAD(&dsi->transfer_list); 1395 INIT_LIST_HEAD(&dsi->transfer_list);
1396 1396
1397 dsi->dsi_host.ops = &exynos_dsi_ops; 1397 dsi->dsi_host.ops = &exynos_dsi_ops;
1398 dsi->dsi_host.dev = &pdev->dev; 1398 dsi->dsi_host.dev = &pdev->dev;
1399 1399
1400 dsi->dev = &pdev->dev; 1400 dsi->dev = &pdev->dev;
1401 1401
1402 ret = exynos_dsi_parse_dt(dsi); 1402 ret = exynos_dsi_parse_dt(dsi);
1403 if (ret) 1403 if (ret)
1404 return ret; 1404 return ret;
1405 1405
1406 dsi->supplies[0].supply = "vddcore"; 1406 dsi->supplies[0].supply = "vddcore";
1407 dsi->supplies[1].supply = "vddio"; 1407 dsi->supplies[1].supply = "vddio";
1408 ret = devm_regulator_bulk_get(&pdev->dev, ARRAY_SIZE(dsi->supplies), 1408 ret = devm_regulator_bulk_get(&pdev->dev, ARRAY_SIZE(dsi->supplies),
1409 dsi->supplies); 1409 dsi->supplies);
1410 if (ret) { 1410 if (ret) {
1411 dev_info(&pdev->dev, "failed to get regulators: %d\n", ret); 1411 dev_info(&pdev->dev, "failed to get regulators: %d\n", ret);
1412 return -EPROBE_DEFER; 1412 return -EPROBE_DEFER;
1413 } 1413 }
1414 1414
1415 dsi->pll_clk = devm_clk_get(&pdev->dev, "pll_clk"); 1415 dsi->pll_clk = devm_clk_get(&pdev->dev, "pll_clk");
1416 if (IS_ERR(dsi->pll_clk)) { 1416 if (IS_ERR(dsi->pll_clk)) {
1417 dev_info(&pdev->dev, "failed to get dsi pll input clock\n"); 1417 dev_info(&pdev->dev, "failed to get dsi pll input clock\n");
1418 return -EPROBE_DEFER; 1418 return -EPROBE_DEFER;
1419 } 1419 }
1420 1420
1421 dsi->bus_clk = devm_clk_get(&pdev->dev, "bus_clk"); 1421 dsi->bus_clk = devm_clk_get(&pdev->dev, "bus_clk");
1422 if (IS_ERR(dsi->bus_clk)) { 1422 if (IS_ERR(dsi->bus_clk)) {
1423 dev_info(&pdev->dev, "failed to get dsi bus clock\n"); 1423 dev_info(&pdev->dev, "failed to get dsi bus clock\n");
1424 return -EPROBE_DEFER; 1424 return -EPROBE_DEFER;
1425 } 1425 }
1426 1426
1427 res = platform_get_resource(pdev, IORESOURCE_MEM, 0); 1427 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1428 dsi->reg_base = devm_ioremap_resource(&pdev->dev, res); 1428 dsi->reg_base = devm_ioremap_resource(&pdev->dev, res);
1429 if (!dsi->reg_base) { 1429 if (IS_ERR(dsi->reg_base)) {
1430 dev_err(&pdev->dev, "failed to remap io region\n"); 1430 dev_err(&pdev->dev, "failed to remap io region\n");
1431 return -EADDRNOTAVAIL; 1431 return PTR_ERR(dsi->reg_base);
1432 } 1432 }
1433 1433
1434 dsi->phy = devm_phy_get(&pdev->dev, "dsim"); 1434 dsi->phy = devm_phy_get(&pdev->dev, "dsim");
1435 if (IS_ERR(dsi->phy)) { 1435 if (IS_ERR(dsi->phy)) {
1436 dev_info(&pdev->dev, "failed to get dsim phy\n"); 1436 dev_info(&pdev->dev, "failed to get dsim phy\n");
1437 return -EPROBE_DEFER; 1437 return -EPROBE_DEFER;
1438 } 1438 }
1439 1439
1440 dsi->irq = platform_get_irq(pdev, 0); 1440 dsi->irq = platform_get_irq(pdev, 0);
1441 if (dsi->irq < 0) { 1441 if (dsi->irq < 0) {
1442 dev_err(&pdev->dev, "failed to request dsi irq resource\n"); 1442 dev_err(&pdev->dev, "failed to request dsi irq resource\n");
1443 return dsi->irq; 1443 return dsi->irq;
1444 } 1444 }
1445 1445
1446 irq_set_status_flags(dsi->irq, IRQ_NOAUTOEN); 1446 irq_set_status_flags(dsi->irq, IRQ_NOAUTOEN);
1447 ret = devm_request_threaded_irq(&pdev->dev, dsi->irq, NULL, 1447 ret = devm_request_threaded_irq(&pdev->dev, dsi->irq, NULL,
1448 exynos_dsi_irq, IRQF_ONESHOT, 1448 exynos_dsi_irq, IRQF_ONESHOT,
1449 dev_name(&pdev->dev), dsi); 1449 dev_name(&pdev->dev), dsi);
1450 if (ret) { 1450 if (ret) {
1451 dev_err(&pdev->dev, "failed to request dsi irq\n"); 1451 dev_err(&pdev->dev, "failed to request dsi irq\n");
1452 return ret; 1452 return ret;
1453 } 1453 }
1454 1454
1455 exynos_dsi_display.ctx = dsi; 1455 exynos_dsi_display.ctx = dsi;
1456 1456
1457 platform_set_drvdata(pdev, &exynos_dsi_display); 1457 platform_set_drvdata(pdev, &exynos_dsi_display);
1458 exynos_drm_display_register(&exynos_dsi_display); 1458 exynos_drm_display_register(&exynos_dsi_display);
1459 1459
1460 return mipi_dsi_host_register(&dsi->dsi_host); 1460 return mipi_dsi_host_register(&dsi->dsi_host);
1461 } 1461 }
1462 1462
1463 static int exynos_dsi_remove(struct platform_device *pdev) 1463 static int exynos_dsi_remove(struct platform_device *pdev)
1464 { 1464 {
1465 struct exynos_dsi *dsi = exynos_dsi_display.ctx; 1465 struct exynos_dsi *dsi = exynos_dsi_display.ctx;
1466 1466
1467 exynos_dsi_dpms(&exynos_dsi_display, DRM_MODE_DPMS_OFF); 1467 exynos_dsi_dpms(&exynos_dsi_display, DRM_MODE_DPMS_OFF);
1468 1468
1469 exynos_drm_display_unregister(&exynos_dsi_display); 1469 exynos_drm_display_unregister(&exynos_dsi_display);
1470 mipi_dsi_host_unregister(&dsi->dsi_host); 1470 mipi_dsi_host_unregister(&dsi->dsi_host);
1471 1471
1472 return 0; 1472 return 0;
1473 } 1473 }
1474 1474
1475 #if CONFIG_PM_SLEEP 1475 #if CONFIG_PM_SLEEP
1476 static int exynos_dsi_resume(struct device *dev) 1476 static int exynos_dsi_resume(struct device *dev)
1477 { 1477 {
1478 struct exynos_dsi *dsi = exynos_dsi_display.ctx; 1478 struct exynos_dsi *dsi = exynos_dsi_display.ctx;
1479 1479
1480 if (dsi->state & DSIM_STATE_ENABLED) { 1480 if (dsi->state & DSIM_STATE_ENABLED) {
1481 dsi->state &= ~DSIM_STATE_ENABLED; 1481 dsi->state &= ~DSIM_STATE_ENABLED;
1482 exynos_dsi_enable(dsi); 1482 exynos_dsi_enable(dsi);
1483 } 1483 }
1484 1484
1485 return 0; 1485 return 0;
1486 } 1486 }
1487 1487
1488 static int exynos_dsi_suspend(struct device *dev) 1488 static int exynos_dsi_suspend(struct device *dev)
1489 { 1489 {
1490 struct exynos_dsi *dsi = exynos_dsi_display.ctx; 1490 struct exynos_dsi *dsi = exynos_dsi_display.ctx;
1491 1491
1492 if (dsi->state & DSIM_STATE_ENABLED) { 1492 if (dsi->state & DSIM_STATE_ENABLED) {
1493 exynos_dsi_disable(dsi); 1493 exynos_dsi_disable(dsi);
1494 dsi->state |= DSIM_STATE_ENABLED; 1494 dsi->state |= DSIM_STATE_ENABLED;
1495 } 1495 }
1496 1496
1497 return 0; 1497 return 0;
1498 } 1498 }
1499 #endif 1499 #endif
1500 1500
1501 static const struct dev_pm_ops exynos_dsi_pm_ops = { 1501 static const struct dev_pm_ops exynos_dsi_pm_ops = {
1502 SET_SYSTEM_SLEEP_PM_OPS(exynos_dsi_suspend, exynos_dsi_resume) 1502 SET_SYSTEM_SLEEP_PM_OPS(exynos_dsi_suspend, exynos_dsi_resume)
1503 }; 1503 };
1504 1504
1505 static struct of_device_id exynos_dsi_of_match[] = { 1505 static struct of_device_id exynos_dsi_of_match[] = {
1506 { .compatible = "samsung,exynos4210-mipi-dsi" }, 1506 { .compatible = "samsung,exynos4210-mipi-dsi" },
1507 { } 1507 { }
1508 }; 1508 };
1509 1509
1510 struct platform_driver dsi_driver = { 1510 struct platform_driver dsi_driver = {
1511 .probe = exynos_dsi_probe, 1511 .probe = exynos_dsi_probe,
1512 .remove = exynos_dsi_remove, 1512 .remove = exynos_dsi_remove,
1513 .driver = { 1513 .driver = {
1514 .name = "exynos-dsi", 1514 .name = "exynos-dsi",
1515 .owner = THIS_MODULE, 1515 .owner = THIS_MODULE,
1516 .pm = &exynos_dsi_pm_ops, 1516 .pm = &exynos_dsi_pm_ops,
1517 .of_match_table = exynos_dsi_of_match, 1517 .of_match_table = exynos_dsi_of_match,
1518 }, 1518 },
1519 }; 1519 };
1520 1520
1521 MODULE_AUTHOR("Tomasz Figa <t.figa@samsung.com>"); 1521 MODULE_AUTHOR("Tomasz Figa <t.figa@samsung.com>");
1522 MODULE_AUTHOR("Andrzej Hajda <a.hajda@samsung.com>"); 1522 MODULE_AUTHOR("Andrzej Hajda <a.hajda@samsung.com>");
1523 MODULE_DESCRIPTION("Samsung SoC MIPI DSI Master"); 1523 MODULE_DESCRIPTION("Samsung SoC MIPI DSI Master");
1524 MODULE_LICENSE("GPL v2"); 1524 MODULE_LICENSE("GPL v2");
1525 1525