Blame view

drivers/gpu/drm/i915/intel_pm.c 207 KB
85208be01   Eugeni Dodonov   drm/i915: move fb...
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
  /*
   * Copyright © 2012 Intel Corporation
   *
   * Permission is hereby granted, free of charge, to any person obtaining a
   * copy of this software and associated documentation files (the "Software"),
   * to deal in the Software without restriction, including without limitation
   * the rights to use, copy, modify, merge, publish, distribute, sublicense,
   * and/or sell copies of the Software, and to permit persons to whom the
   * Software is furnished to do so, subject to the following conditions:
   *
   * The above copyright notice and this permission notice (including the next
   * paragraph) shall be included in all copies or substantial portions of the
   * Software.
   *
   * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
   * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
   * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
   * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
   * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
   * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
   * IN THE SOFTWARE.
   *
   * Authors:
   *    Eugeni Dodonov <eugeni.dodonov@intel.com>
   *
   */
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
27
  #include <linux/cpufreq.h>
85208be01   Eugeni Dodonov   drm/i915: move fb...
28
29
  #include "i915_drv.h"
  #include "intel_drv.h"
eb48eb005   Daniel Vetter   drm/i915: move th...
30
31
  #include "../../../platform/x86/intel_ips.h"
  #include <linux/module.h>
f9dcb0dfe   Paulo Zanoni   drm/i915: touch V...
32
  #include <linux/vgaarb.h>
f4db9321a   Damien Lespiau   drm/i915: Fix a c...
33
  #include <drm/i915_powerwell.h>
8a1874559   Paulo Zanoni   drm/i915: add ini...
34
  #include <linux/pm_runtime.h>
85208be01   Eugeni Dodonov   drm/i915: move fb...
35

dc39fff72   Ben Widawsky   drm/i915: Print R...
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
  /**
   * RC6 is a special power stage which allows the GPU to enter an very
   * low-voltage mode when idle, using down to 0V while at this stage.  This
   * stage is entered automatically when the GPU is idle when RC6 support is
   * enabled, and as soon as new workload arises GPU wakes up automatically as well.
   *
   * There are different RC6 modes available in Intel GPU, which differentiate
   * among each other with the latency required to enter and leave RC6 and
   * voltage consumed by the GPU in different states.
   *
   * The combination of the following flags define which states GPU is allowed
   * to enter, while RC6 is the normal RC6 state, RC6p is the deep RC6, and
   * RC6pp is deepest RC6. Their support by hardware varies according to the
   * GPU, BIOS, chipset and platform. RC6 is usually the safest one and the one
   * which brings the most power savings; deeper states save more power, but
   * require higher latency to switch to and wake up.
   */
  #define INTEL_RC6_ENABLE			(1<<0)
  #define INTEL_RC6p_ENABLE			(1<<1)
  #define INTEL_RC6pp_ENABLE			(1<<2)
f6750b3cc   Eugeni Dodonov   drm/i915: fix lin...
56
57
58
  /* FBC, or Frame Buffer Compression, is a technique employed to compress the
   * framebuffer contents in-memory, aiming at reducing the required bandwidth
   * during in-memory transfers and, therefore, reduce the power packet.
85208be01   Eugeni Dodonov   drm/i915: move fb...
59
   *
f6750b3cc   Eugeni Dodonov   drm/i915: fix lin...
60
61
   * The benefits of FBC are mostly visible with solid backgrounds and
   * variation-less patterns.
85208be01   Eugeni Dodonov   drm/i915: move fb...
62
   *
f6750b3cc   Eugeni Dodonov   drm/i915: fix lin...
63
64
   * FBC-related functionality can be enabled by the means of the
   * i915.i915_enable_fbc parameter
85208be01   Eugeni Dodonov   drm/i915: move fb...
65
   */
1fa611065   Eugeni Dodonov   drm/i915: add gen...
66
  static void i8xx_disable_fbc(struct drm_device *dev)
85208be01   Eugeni Dodonov   drm/i915: move fb...
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  	u32 fbc_ctl;
  
  	/* Disable compression */
  	fbc_ctl = I915_READ(FBC_CONTROL);
  	if ((fbc_ctl & FBC_CTL_EN) == 0)
  		return;
  
  	fbc_ctl &= ~FBC_CTL_EN;
  	I915_WRITE(FBC_CONTROL, fbc_ctl);
  
  	/* Wait for compressing bit to clear */
  	if (wait_for((I915_READ(FBC_STATUS) & FBC_STAT_COMPRESSING) == 0, 10)) {
  		DRM_DEBUG_KMS("FBC idle timed out
  ");
  		return;
  	}
  
  	DRM_DEBUG_KMS("disabled FBC
  ");
  }
993495ae9   Ville Syrjälä   drm/i915: Rework ...
89
  static void i8xx_enable_fbc(struct drm_crtc *crtc)
85208be01   Eugeni Dodonov   drm/i915: move fb...
90
91
92
  {
  	struct drm_device *dev = crtc->dev;
  	struct drm_i915_private *dev_priv = dev->dev_private;
f4510a275   Matt Roper   drm: Replace crtc...
93
  	struct drm_framebuffer *fb = crtc->primary->fb;
2ff8fde1e   Matt Roper   drm/i915: Make us...
94
  	struct drm_i915_gem_object *obj = intel_fb_obj(fb);
85208be01   Eugeni Dodonov   drm/i915: move fb...
95
96
  	struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
  	int cfb_pitch;
7f2cf220b   Ville Syrjälä   drm/i915: Improve...
97
  	int i;
159f98750   Ville Syrjälä   drm/i915: FBC_CON...
98
  	u32 fbc_ctl;
85208be01   Eugeni Dodonov   drm/i915: move fb...
99

5c3fe8b03   Ben Widawsky   drm/i915: Move fb...
100
  	cfb_pitch = dev_priv->fbc.size / FBC_LL_SIZE;
85208be01   Eugeni Dodonov   drm/i915: move fb...
101
102
  	if (fb->pitches[0] < cfb_pitch)
  		cfb_pitch = fb->pitches[0];
42a430f51   Ville Syrjälä   drm/i915: Gen2 FB...
103
104
105
106
107
  	/* FBC_CTL wants 32B or 64B units */
  	if (IS_GEN2(dev))
  		cfb_pitch = (cfb_pitch / 32) - 1;
  	else
  		cfb_pitch = (cfb_pitch / 64) - 1;
85208be01   Eugeni Dodonov   drm/i915: move fb...
108
109
110
111
  
  	/* Clear old tags */
  	for (i = 0; i < (FBC_LL_SIZE / 32) + 1; i++)
  		I915_WRITE(FBC_TAG + (i * 4), 0);
159f98750   Ville Syrjälä   drm/i915: FBC_CON...
112
113
114
115
116
  	if (IS_GEN4(dev)) {
  		u32 fbc_ctl2;
  
  		/* Set it up... */
  		fbc_ctl2 = FBC_CTL_FENCE_DBL | FBC_CTL_IDLE_IMM | FBC_CTL_CPU_FENCE;
7f2cf220b   Ville Syrjälä   drm/i915: Improve...
117
  		fbc_ctl2 |= FBC_CTL_PLANE(intel_crtc->plane);
159f98750   Ville Syrjälä   drm/i915: FBC_CON...
118
119
120
  		I915_WRITE(FBC_CONTROL2, fbc_ctl2);
  		I915_WRITE(FBC_FENCE_OFF, crtc->y);
  	}
85208be01   Eugeni Dodonov   drm/i915: move fb...
121
122
  
  	/* enable it... */
993495ae9   Ville Syrjälä   drm/i915: Rework ...
123
124
125
  	fbc_ctl = I915_READ(FBC_CONTROL);
  	fbc_ctl &= 0x3fff << FBC_CTL_INTERVAL_SHIFT;
  	fbc_ctl |= FBC_CTL_EN | FBC_CTL_PERIODIC;
85208be01   Eugeni Dodonov   drm/i915: move fb...
126
127
128
  	if (IS_I945GM(dev))
  		fbc_ctl |= FBC_CTL_C3_IDLE; /* 945 needs special SR handling */
  	fbc_ctl |= (cfb_pitch & 0xff) << FBC_CTL_STRIDE_SHIFT;
85208be01   Eugeni Dodonov   drm/i915: move fb...
129
130
  	fbc_ctl |= obj->fence_reg;
  	I915_WRITE(FBC_CONTROL, fbc_ctl);
5cd5410e9   Ville Syrjälä   drm/i915: Fix FBC...
131
132
  	DRM_DEBUG_KMS("enabled FBC, pitch %d, yoff %d, plane %c
  ",
84f44ce79   Ville Syrjälä   drm/i915: Print p...
133
  		      cfb_pitch, crtc->y, plane_name(intel_crtc->plane));
85208be01   Eugeni Dodonov   drm/i915: move fb...
134
  }
1fa611065   Eugeni Dodonov   drm/i915: add gen...
135
  static bool i8xx_fbc_enabled(struct drm_device *dev)
85208be01   Eugeni Dodonov   drm/i915: move fb...
136
137
138
139
140
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  
  	return I915_READ(FBC_CONTROL) & FBC_CTL_EN;
  }
993495ae9   Ville Syrjälä   drm/i915: Rework ...
141
  static void g4x_enable_fbc(struct drm_crtc *crtc)
85208be01   Eugeni Dodonov   drm/i915: move fb...
142
143
144
  {
  	struct drm_device *dev = crtc->dev;
  	struct drm_i915_private *dev_priv = dev->dev_private;
f4510a275   Matt Roper   drm: Replace crtc...
145
  	struct drm_framebuffer *fb = crtc->primary->fb;
2ff8fde1e   Matt Roper   drm/i915: Make us...
146
  	struct drm_i915_gem_object *obj = intel_fb_obj(fb);
85208be01   Eugeni Dodonov   drm/i915: move fb...
147
  	struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
85208be01   Eugeni Dodonov   drm/i915: move fb...
148
  	u32 dpfc_ctl;
3fa2e0eec   Ville Syrjälä   drm/i915: Use 1/2...
149
150
151
152
153
  	dpfc_ctl = DPFC_CTL_PLANE(intel_crtc->plane) | DPFC_SR_EN;
  	if (drm_format_plane_cpp(fb->pixel_format, 0) == 2)
  		dpfc_ctl |= DPFC_CTL_LIMIT_2X;
  	else
  		dpfc_ctl |= DPFC_CTL_LIMIT_1X;
85208be01   Eugeni Dodonov   drm/i915: move fb...
154
  	dpfc_ctl |= DPFC_CTL_FENCE_EN | obj->fence_reg;
85208be01   Eugeni Dodonov   drm/i915: move fb...
155

85208be01   Eugeni Dodonov   drm/i915: move fb...
156
157
158
  	I915_WRITE(DPFC_FENCE_YOFF, crtc->y);
  
  	/* enable it... */
fe74c1a54   Ville Syrjälä   drm/i915: Actuall...
159
  	I915_WRITE(DPFC_CONTROL, dpfc_ctl | DPFC_CTL_EN);
85208be01   Eugeni Dodonov   drm/i915: move fb...
160

84f44ce79   Ville Syrjälä   drm/i915: Print p...
161
162
  	DRM_DEBUG_KMS("enabled fbc on plane %c
  ", plane_name(intel_crtc->plane));
85208be01   Eugeni Dodonov   drm/i915: move fb...
163
  }
1fa611065   Eugeni Dodonov   drm/i915: add gen...
164
  static void g4x_disable_fbc(struct drm_device *dev)
85208be01   Eugeni Dodonov   drm/i915: move fb...
165
166
167
168
169
170
171
172
173
174
175
176
177
178
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  	u32 dpfc_ctl;
  
  	/* Disable compression */
  	dpfc_ctl = I915_READ(DPFC_CONTROL);
  	if (dpfc_ctl & DPFC_CTL_EN) {
  		dpfc_ctl &= ~DPFC_CTL_EN;
  		I915_WRITE(DPFC_CONTROL, dpfc_ctl);
  
  		DRM_DEBUG_KMS("disabled FBC
  ");
  	}
  }
1fa611065   Eugeni Dodonov   drm/i915: add gen...
179
  static bool g4x_fbc_enabled(struct drm_device *dev)
85208be01   Eugeni Dodonov   drm/i915: move fb...
180
181
182
183
184
185
186
187
188
189
190
191
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  
  	return I915_READ(DPFC_CONTROL) & DPFC_CTL_EN;
  }
  
  static void sandybridge_blit_fbc_update(struct drm_device *dev)
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  	u32 blt_ecoskpd;
  
  	/* Make sure blitter notifies FBC of writes */
940aece47   Deepak S   drm/i915/vlv: Val...
192
193
194
195
  
  	/* Blitter is part of Media powerwell on VLV. No impact of
  	 * his param in other platforms for now */
  	gen6_gt_force_wake_get(dev_priv, FORCEWAKE_MEDIA);
c8d9a5905   Deepak S   drm/i915: Add pow...
196

85208be01   Eugeni Dodonov   drm/i915: move fb...
197
198
199
200
201
202
203
204
205
206
  	blt_ecoskpd = I915_READ(GEN6_BLITTER_ECOSKPD);
  	blt_ecoskpd |= GEN6_BLITTER_FBC_NOTIFY <<
  		GEN6_BLITTER_LOCK_SHIFT;
  	I915_WRITE(GEN6_BLITTER_ECOSKPD, blt_ecoskpd);
  	blt_ecoskpd |= GEN6_BLITTER_FBC_NOTIFY;
  	I915_WRITE(GEN6_BLITTER_ECOSKPD, blt_ecoskpd);
  	blt_ecoskpd &= ~(GEN6_BLITTER_FBC_NOTIFY <<
  			 GEN6_BLITTER_LOCK_SHIFT);
  	I915_WRITE(GEN6_BLITTER_ECOSKPD, blt_ecoskpd);
  	POSTING_READ(GEN6_BLITTER_ECOSKPD);
c8d9a5905   Deepak S   drm/i915: Add pow...
207

940aece47   Deepak S   drm/i915/vlv: Val...
208
  	gen6_gt_force_wake_put(dev_priv, FORCEWAKE_MEDIA);
85208be01   Eugeni Dodonov   drm/i915: move fb...
209
  }
993495ae9   Ville Syrjälä   drm/i915: Rework ...
210
  static void ironlake_enable_fbc(struct drm_crtc *crtc)
85208be01   Eugeni Dodonov   drm/i915: move fb...
211
212
213
  {
  	struct drm_device *dev = crtc->dev;
  	struct drm_i915_private *dev_priv = dev->dev_private;
f4510a275   Matt Roper   drm: Replace crtc...
214
  	struct drm_framebuffer *fb = crtc->primary->fb;
2ff8fde1e   Matt Roper   drm/i915: Make us...
215
  	struct drm_i915_gem_object *obj = intel_fb_obj(fb);
85208be01   Eugeni Dodonov   drm/i915: move fb...
216
  	struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
85208be01   Eugeni Dodonov   drm/i915: move fb...
217
  	u32 dpfc_ctl;
46f3dab92   Ville Syrjälä   drm/i915: Don't p...
218
  	dpfc_ctl = DPFC_CTL_PLANE(intel_crtc->plane);
3fa2e0eec   Ville Syrjälä   drm/i915: Use 1/2...
219
  	if (drm_format_plane_cpp(fb->pixel_format, 0) == 2)
5e59f7175   Ben Widawsky   drm/i915: Try har...
220
221
222
223
224
225
226
227
  		dev_priv->fbc.threshold++;
  
  	switch (dev_priv->fbc.threshold) {
  	case 4:
  	case 3:
  		dpfc_ctl |= DPFC_CTL_LIMIT_4X;
  		break;
  	case 2:
3fa2e0eec   Ville Syrjälä   drm/i915: Use 1/2...
228
  		dpfc_ctl |= DPFC_CTL_LIMIT_2X;
5e59f7175   Ben Widawsky   drm/i915: Try har...
229
230
  		break;
  	case 1:
3fa2e0eec   Ville Syrjälä   drm/i915: Use 1/2...
231
  		dpfc_ctl |= DPFC_CTL_LIMIT_1X;
5e59f7175   Ben Widawsky   drm/i915: Try har...
232
233
  		break;
  	}
d629336b6   Ville Syrjälä   drm/i915: Don't s...
234
235
236
  	dpfc_ctl |= DPFC_CTL_FENCE_EN;
  	if (IS_GEN5(dev))
  		dpfc_ctl |= obj->fence_reg;
85208be01   Eugeni Dodonov   drm/i915: move fb...
237

85208be01   Eugeni Dodonov   drm/i915: move fb...
238
  	I915_WRITE(ILK_DPFC_FENCE_YOFF, crtc->y);
f343c5f64   Ben Widawsky   drm/i915: Getter/...
239
  	I915_WRITE(ILK_FBC_RT_BASE, i915_gem_obj_ggtt_offset(obj) | ILK_FBC_RT_VALID);
85208be01   Eugeni Dodonov   drm/i915: move fb...
240
241
242
243
244
245
246
247
248
  	/* enable it... */
  	I915_WRITE(ILK_DPFC_CONTROL, dpfc_ctl | DPFC_CTL_EN);
  
  	if (IS_GEN6(dev)) {
  		I915_WRITE(SNB_DPFC_CTL_SA,
  			   SNB_CPU_FENCE_ENABLE | obj->fence_reg);
  		I915_WRITE(DPFC_CPU_FENCE_OFFSET, crtc->y);
  		sandybridge_blit_fbc_update(dev);
  	}
84f44ce79   Ville Syrjälä   drm/i915: Print p...
249
250
  	DRM_DEBUG_KMS("enabled fbc on plane %c
  ", plane_name(intel_crtc->plane));
85208be01   Eugeni Dodonov   drm/i915: move fb...
251
  }
1fa611065   Eugeni Dodonov   drm/i915: add gen...
252
  static void ironlake_disable_fbc(struct drm_device *dev)
85208be01   Eugeni Dodonov   drm/i915: move fb...
253
254
255
256
257
258
259
260
261
262
263
264
265
266
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  	u32 dpfc_ctl;
  
  	/* Disable compression */
  	dpfc_ctl = I915_READ(ILK_DPFC_CONTROL);
  	if (dpfc_ctl & DPFC_CTL_EN) {
  		dpfc_ctl &= ~DPFC_CTL_EN;
  		I915_WRITE(ILK_DPFC_CONTROL, dpfc_ctl);
  
  		DRM_DEBUG_KMS("disabled FBC
  ");
  	}
  }
1fa611065   Eugeni Dodonov   drm/i915: add gen...
267
  static bool ironlake_fbc_enabled(struct drm_device *dev)
85208be01   Eugeni Dodonov   drm/i915: move fb...
268
269
270
271
272
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  
  	return I915_READ(ILK_DPFC_CONTROL) & DPFC_CTL_EN;
  }
993495ae9   Ville Syrjälä   drm/i915: Rework ...
273
  static void gen7_enable_fbc(struct drm_crtc *crtc)
abe959c7e   Rodrigo Vivi   drm/i915: Add sup...
274
275
276
  {
  	struct drm_device *dev = crtc->dev;
  	struct drm_i915_private *dev_priv = dev->dev_private;
f4510a275   Matt Roper   drm: Replace crtc...
277
  	struct drm_framebuffer *fb = crtc->primary->fb;
2ff8fde1e   Matt Roper   drm/i915: Make us...
278
  	struct drm_i915_gem_object *obj = intel_fb_obj(fb);
abe959c7e   Rodrigo Vivi   drm/i915: Add sup...
279
  	struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
3fa2e0eec   Ville Syrjälä   drm/i915: Use 1/2...
280
  	u32 dpfc_ctl;
abe959c7e   Rodrigo Vivi   drm/i915: Add sup...
281

3fa2e0eec   Ville Syrjälä   drm/i915: Use 1/2...
282
283
  	dpfc_ctl = IVB_DPFC_CTL_PLANE(intel_crtc->plane);
  	if (drm_format_plane_cpp(fb->pixel_format, 0) == 2)
5e59f7175   Ben Widawsky   drm/i915: Try har...
284
285
286
287
288
289
290
291
  		dev_priv->fbc.threshold++;
  
  	switch (dev_priv->fbc.threshold) {
  	case 4:
  	case 3:
  		dpfc_ctl |= DPFC_CTL_LIMIT_4X;
  		break;
  	case 2:
3fa2e0eec   Ville Syrjälä   drm/i915: Use 1/2...
292
  		dpfc_ctl |= DPFC_CTL_LIMIT_2X;
5e59f7175   Ben Widawsky   drm/i915: Try har...
293
294
  		break;
  	case 1:
3fa2e0eec   Ville Syrjälä   drm/i915: Use 1/2...
295
  		dpfc_ctl |= DPFC_CTL_LIMIT_1X;
5e59f7175   Ben Widawsky   drm/i915: Try har...
296
297
  		break;
  	}
3fa2e0eec   Ville Syrjälä   drm/i915: Use 1/2...
298
  	dpfc_ctl |= IVB_DPFC_CTL_FENCE_EN;
da46f936b   Rodrigo Vivi   drm/i915: Introdu...
299
300
  	if (dev_priv->fbc.false_color)
  		dpfc_ctl |= FBC_CTL_FALSE_COLOR;
3fa2e0eec   Ville Syrjälä   drm/i915: Use 1/2...
301
  	I915_WRITE(ILK_DPFC_CONTROL, dpfc_ctl | DPFC_CTL_EN);
abe959c7e   Rodrigo Vivi   drm/i915: Add sup...
302

891348b2b   Rodrigo Vivi   drm/i915: Enable ...
303
  	if (IS_IVYBRIDGE(dev)) {
7dd23ba08   Damien Lespiau   drm/i915: Add mis...
304
  		/* WaFbcAsynchFlipDisableFbcQueue:ivb */
2adb6db8d   Ville Syrjälä   drm/i915: Use RMW...
305
306
307
  		I915_WRITE(ILK_DISPLAY_CHICKEN1,
  			   I915_READ(ILK_DISPLAY_CHICKEN1) |
  			   ILK_FBCQ_DIS);
285541647   Rodrigo Vivi   drm/i915: HSW FBC...
308
  	} else {
2adb6db8d   Ville Syrjälä   drm/i915: Use RMW...
309
  		/* WaFbcAsynchFlipDisableFbcQueue:hsw,bdw */
8f670bb15   Ville Syrjälä   drm/i915: Unify C...
310
311
312
  		I915_WRITE(CHICKEN_PIPESL_1(intel_crtc->pipe),
  			   I915_READ(CHICKEN_PIPESL_1(intel_crtc->pipe)) |
  			   HSW_FBCQ_DIS);
891348b2b   Rodrigo Vivi   drm/i915: Enable ...
313
  	}
b74ea102b   Rodrigo Vivi   drm/i915: IVB FBC...
314

abe959c7e   Rodrigo Vivi   drm/i915: Add sup...
315
316
317
318
319
  	I915_WRITE(SNB_DPFC_CTL_SA,
  		   SNB_CPU_FENCE_ENABLE | obj->fence_reg);
  	I915_WRITE(DPFC_CPU_FENCE_OFFSET, crtc->y);
  
  	sandybridge_blit_fbc_update(dev);
b19870ee6   Ville Syrjälä   drm/i915: Use pla...
320
321
  	DRM_DEBUG_KMS("enabled fbc on plane %c
  ", plane_name(intel_crtc->plane));
abe959c7e   Rodrigo Vivi   drm/i915: Add sup...
322
  }
85208be01   Eugeni Dodonov   drm/i915: move fb...
323
324
325
326
327
328
329
330
331
  bool intel_fbc_enabled(struct drm_device *dev)
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  
  	if (!dev_priv->display.fbc_enabled)
  		return false;
  
  	return dev_priv->display.fbc_enabled(dev);
  }
c5ad011d7   Rodrigo Vivi   drm/i915: FBC flu...
332
333
334
335
336
337
338
339
340
  void gen8_fbc_sw_flush(struct drm_device *dev, u32 value)
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  
  	if (!IS_GEN8(dev))
  		return;
  
  	I915_WRITE(MSG_FBC_REND_STATE, value);
  }
85208be01   Eugeni Dodonov   drm/i915: move fb...
341
342
343
344
345
346
347
348
349
  static void intel_fbc_work_fn(struct work_struct *__work)
  {
  	struct intel_fbc_work *work =
  		container_of(to_delayed_work(__work),
  			     struct intel_fbc_work, work);
  	struct drm_device *dev = work->crtc->dev;
  	struct drm_i915_private *dev_priv = dev->dev_private;
  
  	mutex_lock(&dev->struct_mutex);
5c3fe8b03   Ben Widawsky   drm/i915: Move fb...
350
  	if (work == dev_priv->fbc.fbc_work) {
85208be01   Eugeni Dodonov   drm/i915: move fb...
351
352
353
  		/* Double check that we haven't switched fb without cancelling
  		 * the prior work.
  		 */
f4510a275   Matt Roper   drm: Replace crtc...
354
  		if (work->crtc->primary->fb == work->fb) {
993495ae9   Ville Syrjälä   drm/i915: Rework ...
355
  			dev_priv->display.enable_fbc(work->crtc);
85208be01   Eugeni Dodonov   drm/i915: move fb...
356

5c3fe8b03   Ben Widawsky   drm/i915: Move fb...
357
  			dev_priv->fbc.plane = to_intel_crtc(work->crtc)->plane;
f4510a275   Matt Roper   drm: Replace crtc...
358
  			dev_priv->fbc.fb_id = work->crtc->primary->fb->base.id;
5c3fe8b03   Ben Widawsky   drm/i915: Move fb...
359
  			dev_priv->fbc.y = work->crtc->y;
85208be01   Eugeni Dodonov   drm/i915: move fb...
360
  		}
5c3fe8b03   Ben Widawsky   drm/i915: Move fb...
361
  		dev_priv->fbc.fbc_work = NULL;
85208be01   Eugeni Dodonov   drm/i915: move fb...
362
363
364
365
366
367
368
369
  	}
  	mutex_unlock(&dev->struct_mutex);
  
  	kfree(work);
  }
  
  static void intel_cancel_fbc_work(struct drm_i915_private *dev_priv)
  {
5c3fe8b03   Ben Widawsky   drm/i915: Move fb...
370
  	if (dev_priv->fbc.fbc_work == NULL)
85208be01   Eugeni Dodonov   drm/i915: move fb...
371
372
373
374
375
376
  		return;
  
  	DRM_DEBUG_KMS("cancelling pending FBC enable
  ");
  
  	/* Synchronisation is provided by struct_mutex and checking of
5c3fe8b03   Ben Widawsky   drm/i915: Move fb...
377
  	 * dev_priv->fbc.fbc_work, so we can perform the cancellation
85208be01   Eugeni Dodonov   drm/i915: move fb...
378
379
  	 * entirely asynchronously.
  	 */
5c3fe8b03   Ben Widawsky   drm/i915: Move fb...
380
  	if (cancel_delayed_work(&dev_priv->fbc.fbc_work->work))
85208be01   Eugeni Dodonov   drm/i915: move fb...
381
  		/* tasklet was killed before being run, clean up */
5c3fe8b03   Ben Widawsky   drm/i915: Move fb...
382
  		kfree(dev_priv->fbc.fbc_work);
85208be01   Eugeni Dodonov   drm/i915: move fb...
383
384
385
386
387
388
  
  	/* Mark the work as no longer wanted so that if it does
  	 * wake-up (because the work was already running and waiting
  	 * for our mutex), it will discover that is no longer
  	 * necessary to run.
  	 */
5c3fe8b03   Ben Widawsky   drm/i915: Move fb...
389
  	dev_priv->fbc.fbc_work = NULL;
85208be01   Eugeni Dodonov   drm/i915: move fb...
390
  }
993495ae9   Ville Syrjälä   drm/i915: Rework ...
391
  static void intel_enable_fbc(struct drm_crtc *crtc)
85208be01   Eugeni Dodonov   drm/i915: move fb...
392
393
394
395
396
397
398
399
400
  {
  	struct intel_fbc_work *work;
  	struct drm_device *dev = crtc->dev;
  	struct drm_i915_private *dev_priv = dev->dev_private;
  
  	if (!dev_priv->display.enable_fbc)
  		return;
  
  	intel_cancel_fbc_work(dev_priv);
b14c5679d   Daniel Vetter   drm/i915: use poi...
401
  	work = kzalloc(sizeof(*work), GFP_KERNEL);
85208be01   Eugeni Dodonov   drm/i915: move fb...
402
  	if (work == NULL) {
6cdcb5e73   Paulo Zanoni   drm/i915: invert ...
403
404
  		DRM_ERROR("Failed to allocate FBC work structure
  ");
993495ae9   Ville Syrjälä   drm/i915: Rework ...
405
  		dev_priv->display.enable_fbc(crtc);
85208be01   Eugeni Dodonov   drm/i915: move fb...
406
407
408
409
  		return;
  	}
  
  	work->crtc = crtc;
f4510a275   Matt Roper   drm: Replace crtc...
410
  	work->fb = crtc->primary->fb;
85208be01   Eugeni Dodonov   drm/i915: move fb...
411
  	INIT_DELAYED_WORK(&work->work, intel_fbc_work_fn);
5c3fe8b03   Ben Widawsky   drm/i915: Move fb...
412
  	dev_priv->fbc.fbc_work = work;
85208be01   Eugeni Dodonov   drm/i915: move fb...
413

85208be01   Eugeni Dodonov   drm/i915: move fb...
414
415
416
417
418
419
420
421
422
423
  	/* Delay the actual enabling to let pageflipping cease and the
  	 * display to settle before starting the compression. Note that
  	 * this delay also serves a second purpose: it allows for a
  	 * vblank to pass after disabling the FBC before we attempt
  	 * to modify the control registers.
  	 *
  	 * A more complicated solution would involve tracking vblanks
  	 * following the termination of the page-flipping sequence
  	 * and indeed performing the enable as a co-routine and not
  	 * waiting synchronously upon the vblank.
7457d6174   Damien Lespiau   drm/i915: We impl...
424
425
  	 *
  	 * WaFbcWaitForVBlankBeforeEnable:ilk,snb
85208be01   Eugeni Dodonov   drm/i915: move fb...
426
427
428
429
430
431
432
433
434
435
436
437
438
439
  	 */
  	schedule_delayed_work(&work->work, msecs_to_jiffies(50));
  }
  
  void intel_disable_fbc(struct drm_device *dev)
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  
  	intel_cancel_fbc_work(dev_priv);
  
  	if (!dev_priv->display.disable_fbc)
  		return;
  
  	dev_priv->display.disable_fbc(dev);
5c3fe8b03   Ben Widawsky   drm/i915: Move fb...
440
  	dev_priv->fbc.plane = -1;
85208be01   Eugeni Dodonov   drm/i915: move fb...
441
  }
29ebf90f8   Chris Wilson   drm/i915: Squelch...
442
443
444
445
446
447
448
449
450
  static bool set_no_fbc_reason(struct drm_i915_private *dev_priv,
  			      enum no_fbc_reason reason)
  {
  	if (dev_priv->fbc.no_fbc_reason == reason)
  		return false;
  
  	dev_priv->fbc.no_fbc_reason = reason;
  	return true;
  }
85208be01   Eugeni Dodonov   drm/i915: move fb...
451
452
453
454
455
456
457
458
459
460
  /**
   * intel_update_fbc - enable/disable FBC as needed
   * @dev: the drm_device
   *
   * Set up the framebuffer compression hardware at mode set time.  We
   * enable it if possible:
   *   - plane A only (on pre-965)
   *   - no pixel mulitply/line duplication
   *   - no alpha buffer discard
   *   - no dual wide
f85da868e   Paulo Zanoni   drm/i915: update ...
461
   *   - framebuffer <= max_hdisplay in width, max_vdisplay in height
85208be01   Eugeni Dodonov   drm/i915: move fb...
462
463
464
465
466
467
468
469
470
471
472
473
474
475
   *
   * We can't assume that any compression will take place (worst case),
   * so the compressed buffer has to be the same size as the uncompressed
   * one.  It also must reside (along with the line length buffer) in
   * stolen memory.
   *
   * We need to enable/disable FBC on a global basis.
   */
  void intel_update_fbc(struct drm_device *dev)
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  	struct drm_crtc *crtc = NULL, *tmp_crtc;
  	struct intel_crtc *intel_crtc;
  	struct drm_framebuffer *fb;
85208be01   Eugeni Dodonov   drm/i915: move fb...
476
  	struct drm_i915_gem_object *obj;
ef644fdac   Ville Syrjälä   drm/i915: Use adj...
477
  	const struct drm_display_mode *adjusted_mode;
37327abdf   Ville Syrjälä   drm/i915: Add exp...
478
  	unsigned int max_width, max_height;
85208be01   Eugeni Dodonov   drm/i915: move fb...
479

3a77c4c44   Daniel Vetter   drm/i915: Drop I9...
480
  	if (!HAS_FBC(dev)) {
29ebf90f8   Chris Wilson   drm/i915: Squelch...
481
  		set_no_fbc_reason(dev_priv, FBC_UNSUPPORTED);
85208be01   Eugeni Dodonov   drm/i915: move fb...
482
  		return;
29ebf90f8   Chris Wilson   drm/i915: Squelch...
483
  	}
85208be01   Eugeni Dodonov   drm/i915: move fb...
484

d330a9530   Jani Nikula   drm/i915: move mo...
485
  	if (!i915.powersave) {
29ebf90f8   Chris Wilson   drm/i915: Squelch...
486
487
488
  		if (set_no_fbc_reason(dev_priv, FBC_MODULE_PARAM))
  			DRM_DEBUG_KMS("fbc disabled per module param
  ");
85208be01   Eugeni Dodonov   drm/i915: move fb...
489
  		return;
29ebf90f8   Chris Wilson   drm/i915: Squelch...
490
  	}
85208be01   Eugeni Dodonov   drm/i915: move fb...
491
492
493
494
495
496
497
498
499
500
  
  	/*
  	 * If FBC is already on, we just have to verify that we can
  	 * keep it that way...
  	 * Need to disable if:
  	 *   - more than one pipe is active
  	 *   - changing FBC params (stride, fence, mode)
  	 *   - new fb is too large to fit in compressed buffer
  	 *   - going to an unsupported config (interlace, pixel multiply, etc.)
  	 */
70e1e0ec0   Damien Lespiau   drm/i915: Use for...
501
  	for_each_crtc(dev, tmp_crtc) {
3490ea5de   Chris Wilson   drm/i915: Treat c...
502
  		if (intel_crtc_active(tmp_crtc) &&
4c445e0eb   Ville Syrjälä   drm/i915: Rename ...
503
  		    to_intel_crtc(tmp_crtc)->primary_enabled) {
85208be01   Eugeni Dodonov   drm/i915: move fb...
504
  			if (crtc) {
29ebf90f8   Chris Wilson   drm/i915: Squelch...
505
506
507
  				if (set_no_fbc_reason(dev_priv, FBC_MULTIPLE_PIPES))
  					DRM_DEBUG_KMS("more than one pipe active, disabling compression
  ");
85208be01   Eugeni Dodonov   drm/i915: move fb...
508
509
510
511
512
  				goto out_disable;
  			}
  			crtc = tmp_crtc;
  		}
  	}
f4510a275   Matt Roper   drm: Replace crtc...
513
  	if (!crtc || crtc->primary->fb == NULL) {
29ebf90f8   Chris Wilson   drm/i915: Squelch...
514
515
516
  		if (set_no_fbc_reason(dev_priv, FBC_NO_OUTPUT))
  			DRM_DEBUG_KMS("no output, disabling
  ");
85208be01   Eugeni Dodonov   drm/i915: move fb...
517
518
519
520
  		goto out_disable;
  	}
  
  	intel_crtc = to_intel_crtc(crtc);
f4510a275   Matt Roper   drm: Replace crtc...
521
  	fb = crtc->primary->fb;
2ff8fde1e   Matt Roper   drm/i915: Make us...
522
  	obj = intel_fb_obj(fb);
ef644fdac   Ville Syrjälä   drm/i915: Use adj...
523
  	adjusted_mode = &intel_crtc->config.adjusted_mode;
85208be01   Eugeni Dodonov   drm/i915: move fb...
524

0368920e5   Chris Wilson   drm/i915: Disable...
525
  	if (i915.enable_fbc < 0) {
29ebf90f8   Chris Wilson   drm/i915: Squelch...
526
527
528
  		if (set_no_fbc_reason(dev_priv, FBC_CHIP_DEFAULT))
  			DRM_DEBUG_KMS("disabled per chip default
  ");
8a5729a37   Damien Lespiau   drm/i915: Fix rea...
529
  		goto out_disable;
85208be01   Eugeni Dodonov   drm/i915: move fb...
530
  	}
d330a9530   Jani Nikula   drm/i915: move mo...
531
  	if (!i915.enable_fbc) {
29ebf90f8   Chris Wilson   drm/i915: Squelch...
532
533
534
  		if (set_no_fbc_reason(dev_priv, FBC_MODULE_PARAM))
  			DRM_DEBUG_KMS("fbc disabled per module param
  ");
85208be01   Eugeni Dodonov   drm/i915: move fb...
535
536
  		goto out_disable;
  	}
ef644fdac   Ville Syrjälä   drm/i915: Use adj...
537
538
  	if ((adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE) ||
  	    (adjusted_mode->flags & DRM_MODE_FLAG_DBLSCAN)) {
29ebf90f8   Chris Wilson   drm/i915: Squelch...
539
540
541
542
  		if (set_no_fbc_reason(dev_priv, FBC_UNSUPPORTED_MODE))
  			DRM_DEBUG_KMS("mode incompatible with compression, "
  				      "disabling
  ");
85208be01   Eugeni Dodonov   drm/i915: move fb...
543
544
  		goto out_disable;
  	}
f85da868e   Paulo Zanoni   drm/i915: update ...
545

032843a5a   Daisy Sun   drm/i915: Broaden...
546
547
548
549
  	if (INTEL_INFO(dev)->gen >= 8 || IS_HASWELL(dev)) {
  		max_width = 4096;
  		max_height = 4096;
  	} else if (IS_G4X(dev) || INTEL_INFO(dev)->gen >= 5) {
37327abdf   Ville Syrjälä   drm/i915: Add exp...
550
551
  		max_width = 4096;
  		max_height = 2048;
f85da868e   Paulo Zanoni   drm/i915: update ...
552
  	} else {
37327abdf   Ville Syrjälä   drm/i915: Add exp...
553
554
  		max_width = 2048;
  		max_height = 1536;
f85da868e   Paulo Zanoni   drm/i915: update ...
555
  	}
37327abdf   Ville Syrjälä   drm/i915: Add exp...
556
557
  	if (intel_crtc->config.pipe_src_w > max_width ||
  	    intel_crtc->config.pipe_src_h > max_height) {
29ebf90f8   Chris Wilson   drm/i915: Squelch...
558
559
560
  		if (set_no_fbc_reason(dev_priv, FBC_MODE_TOO_LARGE))
  			DRM_DEBUG_KMS("mode too large for compression, disabling
  ");
85208be01   Eugeni Dodonov   drm/i915: move fb...
561
562
  		goto out_disable;
  	}
8f94d24b7   Ben Widawsky   drm/i915/bdw: Add...
563
  	if ((INTEL_INFO(dev)->gen < 4 || HAS_DDI(dev)) &&
c5a44aa01   Ville Syrjälä   drm/i915: Fix FBC...
564
  	    intel_crtc->plane != PLANE_A) {
29ebf90f8   Chris Wilson   drm/i915: Squelch...
565
  		if (set_no_fbc_reason(dev_priv, FBC_BAD_PLANE))
c5a44aa01   Ville Syrjälä   drm/i915: Fix FBC...
566
567
  			DRM_DEBUG_KMS("plane not A, disabling compression
  ");
85208be01   Eugeni Dodonov   drm/i915: move fb...
568
569
570
571
572
573
574
575
  		goto out_disable;
  	}
  
  	/* The use of a CPU fence is mandatory in order to detect writes
  	 * by the CPU to the scanout and trigger updates to the FBC.
  	 */
  	if (obj->tiling_mode != I915_TILING_X ||
  	    obj->fence_reg == I915_FENCE_REG_NONE) {
29ebf90f8   Chris Wilson   drm/i915: Squelch...
576
577
578
  		if (set_no_fbc_reason(dev_priv, FBC_NOT_TILED))
  			DRM_DEBUG_KMS("framebuffer not tiled or fenced, disabling compression
  ");
85208be01   Eugeni Dodonov   drm/i915: move fb...
579
580
  		goto out_disable;
  	}
48404c1e5   Sonika Jindal   drm/i915: Add 180...
581
582
583
584
585
586
587
  	if (INTEL_INFO(dev)->gen <= 4 && !IS_G4X(dev) &&
  	    to_intel_plane(crtc->primary)->rotation != BIT(DRM_ROTATE_0)) {
  		if (set_no_fbc_reason(dev_priv, FBC_UNSUPPORTED_MODE))
  			DRM_DEBUG_KMS("Rotation unsupported, disabling
  ");
  		goto out_disable;
  	}
85208be01   Eugeni Dodonov   drm/i915: move fb...
588
589
590
591
  
  	/* If the kernel debugger is active, always disable compression */
  	if (in_dbg_master())
  		goto out_disable;
2ff8fde1e   Matt Roper   drm/i915: Make us...
592
  	if (i915_gem_stolen_setup_compression(dev, obj->base.size,
5e59f7175   Ben Widawsky   drm/i915: Try har...
593
  					      drm_format_plane_cpp(fb->pixel_format, 0))) {
29ebf90f8   Chris Wilson   drm/i915: Squelch...
594
595
596
  		if (set_no_fbc_reason(dev_priv, FBC_STOLEN_TOO_SMALL))
  			DRM_DEBUG_KMS("framebuffer too large, disabling compression
  ");
11be49eb4   Chris Wilson   drm/i915: Delay a...
597
598
  		goto out_disable;
  	}
85208be01   Eugeni Dodonov   drm/i915: move fb...
599
600
601
602
603
  	/* If the scanout has not changed, don't modify the FBC settings.
  	 * Note that we make the fundamental assumption that the fb->obj
  	 * cannot be unpinned (and have its GTT offset and fence revoked)
  	 * without first being decoupled from the scanout and FBC disabled.
  	 */
5c3fe8b03   Ben Widawsky   drm/i915: Move fb...
604
605
606
  	if (dev_priv->fbc.plane == intel_crtc->plane &&
  	    dev_priv->fbc.fb_id == fb->base.id &&
  	    dev_priv->fbc.y == crtc->y)
85208be01   Eugeni Dodonov   drm/i915: move fb...
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
  		return;
  
  	if (intel_fbc_enabled(dev)) {
  		/* We update FBC along two paths, after changing fb/crtc
  		 * configuration (modeswitching) and after page-flipping
  		 * finishes. For the latter, we know that not only did
  		 * we disable the FBC at the start of the page-flip
  		 * sequence, but also more than one vblank has passed.
  		 *
  		 * For the former case of modeswitching, it is possible
  		 * to switch between two FBC valid configurations
  		 * instantaneously so we do need to disable the FBC
  		 * before we can modify its control registers. We also
  		 * have to wait for the next vblank for that to take
  		 * effect. However, since we delay enabling FBC we can
  		 * assume that a vblank has passed since disabling and
  		 * that we can safely alter the registers in the deferred
  		 * callback.
  		 *
  		 * In the scenario that we go from a valid to invalid
  		 * and then back to valid FBC configuration we have
  		 * no strict enforcement that a vblank occurred since
  		 * disabling the FBC. However, along all current pipe
  		 * disabling paths we do need to wait for a vblank at
  		 * some point. And we wait before enabling FBC anyway.
  		 */
  		DRM_DEBUG_KMS("disabling active FBC for update
  ");
  		intel_disable_fbc(dev);
  	}
993495ae9   Ville Syrjälä   drm/i915: Rework ...
637
  	intel_enable_fbc(crtc);
29ebf90f8   Chris Wilson   drm/i915: Squelch...
638
  	dev_priv->fbc.no_fbc_reason = FBC_OK;
85208be01   Eugeni Dodonov   drm/i915: move fb...
639
640
641
642
643
644
645
646
647
  	return;
  
  out_disable:
  	/* Multiple disables should be harmless */
  	if (intel_fbc_enabled(dev)) {
  		DRM_DEBUG_KMS("unsupported config, disabling FBC
  ");
  		intel_disable_fbc(dev);
  	}
11be49eb4   Chris Wilson   drm/i915: Delay a...
648
  	i915_gem_stolen_cleanup_compression(dev);
85208be01   Eugeni Dodonov   drm/i915: move fb...
649
  }
c921aba84   Daniel Vetter   drm/i915: move pn...
650
651
  static void i915_pineview_get_mem_freq(struct drm_device *dev)
  {
50227e1ca   Jani Nikula   drm/i915: prefer ...
652
  	struct drm_i915_private *dev_priv = dev->dev_private;
c921aba84   Daniel Vetter   drm/i915: move pn...
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
  	u32 tmp;
  
  	tmp = I915_READ(CLKCFG);
  
  	switch (tmp & CLKCFG_FSB_MASK) {
  	case CLKCFG_FSB_533:
  		dev_priv->fsb_freq = 533; /* 133*4 */
  		break;
  	case CLKCFG_FSB_800:
  		dev_priv->fsb_freq = 800; /* 200*4 */
  		break;
  	case CLKCFG_FSB_667:
  		dev_priv->fsb_freq =  667; /* 167*4 */
  		break;
  	case CLKCFG_FSB_400:
  		dev_priv->fsb_freq = 400; /* 100*4 */
  		break;
  	}
  
  	switch (tmp & CLKCFG_MEM_MASK) {
  	case CLKCFG_MEM_533:
  		dev_priv->mem_freq = 533;
  		break;
  	case CLKCFG_MEM_667:
  		dev_priv->mem_freq = 667;
  		break;
  	case CLKCFG_MEM_800:
  		dev_priv->mem_freq = 800;
  		break;
  	}
  
  	/* detect pineview DDR3 setting */
  	tmp = I915_READ(CSHRDDR3CTL);
  	dev_priv->is_ddr3 = (tmp & CSHRDDR3CTL_DDR3) ? 1 : 0;
  }
  
  static void i915_ironlake_get_mem_freq(struct drm_device *dev)
  {
50227e1ca   Jani Nikula   drm/i915: prefer ...
691
  	struct drm_i915_private *dev_priv = dev->dev_private;
c921aba84   Daniel Vetter   drm/i915: move pn...
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
  	u16 ddrpll, csipll;
  
  	ddrpll = I915_READ16(DDRMPLL1);
  	csipll = I915_READ16(CSIPLL0);
  
  	switch (ddrpll & 0xff) {
  	case 0xc:
  		dev_priv->mem_freq = 800;
  		break;
  	case 0x10:
  		dev_priv->mem_freq = 1066;
  		break;
  	case 0x14:
  		dev_priv->mem_freq = 1333;
  		break;
  	case 0x18:
  		dev_priv->mem_freq = 1600;
  		break;
  	default:
  		DRM_DEBUG_DRIVER("unknown memory frequency 0x%02x
  ",
  				 ddrpll & 0xff);
  		dev_priv->mem_freq = 0;
  		break;
  	}
20e4d407f   Daniel Vetter   drm/ips: move drp...
717
  	dev_priv->ips.r_t = dev_priv->mem_freq;
c921aba84   Daniel Vetter   drm/i915: move pn...
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
  
  	switch (csipll & 0x3ff) {
  	case 0x00c:
  		dev_priv->fsb_freq = 3200;
  		break;
  	case 0x00e:
  		dev_priv->fsb_freq = 3733;
  		break;
  	case 0x010:
  		dev_priv->fsb_freq = 4266;
  		break;
  	case 0x012:
  		dev_priv->fsb_freq = 4800;
  		break;
  	case 0x014:
  		dev_priv->fsb_freq = 5333;
  		break;
  	case 0x016:
  		dev_priv->fsb_freq = 5866;
  		break;
  	case 0x018:
  		dev_priv->fsb_freq = 6400;
  		break;
  	default:
  		DRM_DEBUG_DRIVER("unknown fsb frequency 0x%04x
  ",
  				 csipll & 0x3ff);
  		dev_priv->fsb_freq = 0;
  		break;
  	}
  
  	if (dev_priv->fsb_freq == 3200) {
20e4d407f   Daniel Vetter   drm/ips: move drp...
750
  		dev_priv->ips.c_m = 0;
c921aba84   Daniel Vetter   drm/i915: move pn...
751
  	} else if (dev_priv->fsb_freq > 3200 && dev_priv->fsb_freq <= 4800) {
20e4d407f   Daniel Vetter   drm/ips: move drp...
752
  		dev_priv->ips.c_m = 1;
c921aba84   Daniel Vetter   drm/i915: move pn...
753
  	} else {
20e4d407f   Daniel Vetter   drm/ips: move drp...
754
  		dev_priv->ips.c_m = 2;
c921aba84   Daniel Vetter   drm/i915: move pn...
755
756
  	}
  }
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
  static const struct cxsr_latency cxsr_latency_table[] = {
  	{1, 0, 800, 400, 3382, 33382, 3983, 33983},    /* DDR2-400 SC */
  	{1, 0, 800, 667, 3354, 33354, 3807, 33807},    /* DDR2-667 SC */
  	{1, 0, 800, 800, 3347, 33347, 3763, 33763},    /* DDR2-800 SC */
  	{1, 1, 800, 667, 6420, 36420, 6873, 36873},    /* DDR3-667 SC */
  	{1, 1, 800, 800, 5902, 35902, 6318, 36318},    /* DDR3-800 SC */
  
  	{1, 0, 667, 400, 3400, 33400, 4021, 34021},    /* DDR2-400 SC */
  	{1, 0, 667, 667, 3372, 33372, 3845, 33845},    /* DDR2-667 SC */
  	{1, 0, 667, 800, 3386, 33386, 3822, 33822},    /* DDR2-800 SC */
  	{1, 1, 667, 667, 6438, 36438, 6911, 36911},    /* DDR3-667 SC */
  	{1, 1, 667, 800, 5941, 35941, 6377, 36377},    /* DDR3-800 SC */
  
  	{1, 0, 400, 400, 3472, 33472, 4173, 34173},    /* DDR2-400 SC */
  	{1, 0, 400, 667, 3443, 33443, 3996, 33996},    /* DDR2-667 SC */
  	{1, 0, 400, 800, 3430, 33430, 3946, 33946},    /* DDR2-800 SC */
  	{1, 1, 400, 667, 6509, 36509, 7062, 37062},    /* DDR3-667 SC */
  	{1, 1, 400, 800, 5985, 35985, 6501, 36501},    /* DDR3-800 SC */
  
  	{0, 0, 800, 400, 3438, 33438, 4065, 34065},    /* DDR2-400 SC */
  	{0, 0, 800, 667, 3410, 33410, 3889, 33889},    /* DDR2-667 SC */
  	{0, 0, 800, 800, 3403, 33403, 3845, 33845},    /* DDR2-800 SC */
  	{0, 1, 800, 667, 6476, 36476, 6955, 36955},    /* DDR3-667 SC */
  	{0, 1, 800, 800, 5958, 35958, 6400, 36400},    /* DDR3-800 SC */
  
  	{0, 0, 667, 400, 3456, 33456, 4103, 34106},    /* DDR2-400 SC */
  	{0, 0, 667, 667, 3428, 33428, 3927, 33927},    /* DDR2-667 SC */
  	{0, 0, 667, 800, 3443, 33443, 3905, 33905},    /* DDR2-800 SC */
  	{0, 1, 667, 667, 6494, 36494, 6993, 36993},    /* DDR3-667 SC */
  	{0, 1, 667, 800, 5998, 35998, 6460, 36460},    /* DDR3-800 SC */
  
  	{0, 0, 400, 400, 3528, 33528, 4255, 34255},    /* DDR2-400 SC */
  	{0, 0, 400, 667, 3500, 33500, 4079, 34079},    /* DDR2-667 SC */
  	{0, 0, 400, 800, 3487, 33487, 4029, 34029},    /* DDR2-800 SC */
  	{0, 1, 400, 667, 6566, 36566, 7145, 37145},    /* DDR3-667 SC */
  	{0, 1, 400, 800, 6042, 36042, 6584, 36584},    /* DDR3-800 SC */
  };
63c62275e   Daniel Vetter   drm/i915: re-add ...
794
  static const struct cxsr_latency *intel_get_cxsr_latency(int is_desktop,
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
  							 int is_ddr3,
  							 int fsb,
  							 int mem)
  {
  	const struct cxsr_latency *latency;
  	int i;
  
  	if (fsb == 0 || mem == 0)
  		return NULL;
  
  	for (i = 0; i < ARRAY_SIZE(cxsr_latency_table); i++) {
  		latency = &cxsr_latency_table[i];
  		if (is_desktop == latency->is_desktop &&
  		    is_ddr3 == latency->is_ddr3 &&
  		    fsb == latency->fsb_freq && mem == latency->mem_freq)
  			return latency;
  	}
  
  	DRM_DEBUG_KMS("Unknown FSB/MEM found, disable CxSR
  ");
  
  	return NULL;
  }
5209b1f4c   Imre Deak   drm/i915: gmch: f...
818
  void intel_set_memory_cxsr(struct drm_i915_private *dev_priv, bool enable)
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
819
  {
5209b1f4c   Imre Deak   drm/i915: gmch: f...
820
821
  	struct drm_device *dev = dev_priv->dev;
  	u32 val;
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
822

5209b1f4c   Imre Deak   drm/i915: gmch: f...
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
  	if (IS_VALLEYVIEW(dev)) {
  		I915_WRITE(FW_BLC_SELF_VLV, enable ? FW_CSPWRDWNEN : 0);
  	} else if (IS_G4X(dev) || IS_CRESTLINE(dev)) {
  		I915_WRITE(FW_BLC_SELF, enable ? FW_BLC_SELF_EN : 0);
  	} else if (IS_PINEVIEW(dev)) {
  		val = I915_READ(DSPFW3) & ~PINEVIEW_SELF_REFRESH_EN;
  		val |= enable ? PINEVIEW_SELF_REFRESH_EN : 0;
  		I915_WRITE(DSPFW3, val);
  	} else if (IS_I945G(dev) || IS_I945GM(dev)) {
  		val = enable ? _MASKED_BIT_ENABLE(FW_BLC_SELF_EN) :
  			       _MASKED_BIT_DISABLE(FW_BLC_SELF_EN);
  		I915_WRITE(FW_BLC_SELF, val);
  	} else if (IS_I915GM(dev)) {
  		val = enable ? _MASKED_BIT_ENABLE(INSTPM_SELF_EN) :
  			       _MASKED_BIT_DISABLE(INSTPM_SELF_EN);
  		I915_WRITE(INSTPM, val);
  	} else {
  		return;
  	}
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
842

5209b1f4c   Imre Deak   drm/i915: gmch: f...
843
844
845
  	DRM_DEBUG_KMS("memory self-refresh is %s
  ",
  		      enable ? "enabled" : "disabled");
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
  }
  
  /*
   * Latency for FIFO fetches is dependent on several factors:
   *   - memory configuration (speed, channels)
   *   - chipset
   *   - current MCH state
   * It can be fairly high in some situations, so here we assume a fairly
   * pessimal value.  It's a tradeoff between extra memory fetches (if we
   * set this value too high, the FIFO will fetch frequently to stay full)
   * and power consumption (set it too low to save power and we might see
   * FIFO underruns and display "flicker").
   *
   * A value of 5us seems to be a good balance; safe for very low end
   * platforms but not overly aggressive on lower latency configs.
   */
5aef60032   Chris Wilson   drm/i915: Rename ...
862
  static const int pessimal_latency_ns = 5000;
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
863

1fa611065   Eugeni Dodonov   drm/i915: add gen...
864
  static int i9xx_get_fifo_size(struct drm_device *dev, int plane)
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  	uint32_t dsparb = I915_READ(DSPARB);
  	int size;
  
  	size = dsparb & 0x7f;
  	if (plane)
  		size = ((dsparb >> DSPARB_CSTART_SHIFT) & 0x7f) - size;
  
  	DRM_DEBUG_KMS("FIFO size - (0x%08x) %s: %d
  ", dsparb,
  		      plane ? "B" : "A", size);
  
  	return size;
  }
feb56b934   Daniel Vetter   drm/i915: i830M h...
880
  static int i830_get_fifo_size(struct drm_device *dev, int plane)
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  	uint32_t dsparb = I915_READ(DSPARB);
  	int size;
  
  	size = dsparb & 0x1ff;
  	if (plane)
  		size = ((dsparb >> DSPARB_BEND_SHIFT) & 0x1ff) - size;
  	size >>= 1; /* Convert to cachelines */
  
  	DRM_DEBUG_KMS("FIFO size - (0x%08x) %s: %d
  ", dsparb,
  		      plane ? "B" : "A", size);
  
  	return size;
  }
1fa611065   Eugeni Dodonov   drm/i915: add gen...
897
  static int i845_get_fifo_size(struct drm_device *dev, int plane)
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  	uint32_t dsparb = I915_READ(DSPARB);
  	int size;
  
  	size = dsparb & 0x7f;
  	size >>= 2; /* Convert to cachelines */
  
  	DRM_DEBUG_KMS("FIFO size - (0x%08x) %s: %d
  ", dsparb,
  		      plane ? "B" : "A",
  		      size);
  
  	return size;
  }
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
913
914
  /* Pineview has different values for various configs */
  static const struct intel_watermark_params pineview_display_wm = {
e0f0273eb   Ville Syrjälä   drm/i915: Use nam...
915
916
917
918
919
  	.fifo_size = PINEVIEW_DISPLAY_FIFO,
  	.max_wm = PINEVIEW_MAX_WM,
  	.default_wm = PINEVIEW_DFT_WM,
  	.guard_size = PINEVIEW_GUARD_WM,
  	.cacheline_size = PINEVIEW_FIFO_LINE_SIZE,
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
920
921
  };
  static const struct intel_watermark_params pineview_display_hplloff_wm = {
e0f0273eb   Ville Syrjälä   drm/i915: Use nam...
922
923
924
925
926
  	.fifo_size = PINEVIEW_DISPLAY_FIFO,
  	.max_wm = PINEVIEW_MAX_WM,
  	.default_wm = PINEVIEW_DFT_HPLLOFF_WM,
  	.guard_size = PINEVIEW_GUARD_WM,
  	.cacheline_size = PINEVIEW_FIFO_LINE_SIZE,
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
927
928
  };
  static const struct intel_watermark_params pineview_cursor_wm = {
e0f0273eb   Ville Syrjälä   drm/i915: Use nam...
929
930
931
932
933
  	.fifo_size = PINEVIEW_CURSOR_FIFO,
  	.max_wm = PINEVIEW_CURSOR_MAX_WM,
  	.default_wm = PINEVIEW_CURSOR_DFT_WM,
  	.guard_size = PINEVIEW_CURSOR_GUARD_WM,
  	.cacheline_size = PINEVIEW_FIFO_LINE_SIZE,
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
934
935
  };
  static const struct intel_watermark_params pineview_cursor_hplloff_wm = {
e0f0273eb   Ville Syrjälä   drm/i915: Use nam...
936
937
938
939
940
  	.fifo_size = PINEVIEW_CURSOR_FIFO,
  	.max_wm = PINEVIEW_CURSOR_MAX_WM,
  	.default_wm = PINEVIEW_CURSOR_DFT_WM,
  	.guard_size = PINEVIEW_CURSOR_GUARD_WM,
  	.cacheline_size = PINEVIEW_FIFO_LINE_SIZE,
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
941
942
  };
  static const struct intel_watermark_params g4x_wm_info = {
e0f0273eb   Ville Syrjälä   drm/i915: Use nam...
943
944
945
946
947
  	.fifo_size = G4X_FIFO_SIZE,
  	.max_wm = G4X_MAX_WM,
  	.default_wm = G4X_MAX_WM,
  	.guard_size = 2,
  	.cacheline_size = G4X_FIFO_LINE_SIZE,
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
948
949
  };
  static const struct intel_watermark_params g4x_cursor_wm_info = {
e0f0273eb   Ville Syrjälä   drm/i915: Use nam...
950
951
952
953
954
  	.fifo_size = I965_CURSOR_FIFO,
  	.max_wm = I965_CURSOR_MAX_WM,
  	.default_wm = I965_CURSOR_DFT_WM,
  	.guard_size = 2,
  	.cacheline_size = G4X_FIFO_LINE_SIZE,
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
955
956
  };
  static const struct intel_watermark_params valleyview_wm_info = {
e0f0273eb   Ville Syrjälä   drm/i915: Use nam...
957
958
959
960
961
  	.fifo_size = VALLEYVIEW_FIFO_SIZE,
  	.max_wm = VALLEYVIEW_MAX_WM,
  	.default_wm = VALLEYVIEW_MAX_WM,
  	.guard_size = 2,
  	.cacheline_size = G4X_FIFO_LINE_SIZE,
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
962
963
  };
  static const struct intel_watermark_params valleyview_cursor_wm_info = {
e0f0273eb   Ville Syrjälä   drm/i915: Use nam...
964
965
966
967
968
  	.fifo_size = I965_CURSOR_FIFO,
  	.max_wm = VALLEYVIEW_CURSOR_MAX_WM,
  	.default_wm = I965_CURSOR_DFT_WM,
  	.guard_size = 2,
  	.cacheline_size = G4X_FIFO_LINE_SIZE,
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
969
970
  };
  static const struct intel_watermark_params i965_cursor_wm_info = {
e0f0273eb   Ville Syrjälä   drm/i915: Use nam...
971
972
973
974
975
  	.fifo_size = I965_CURSOR_FIFO,
  	.max_wm = I965_CURSOR_MAX_WM,
  	.default_wm = I965_CURSOR_DFT_WM,
  	.guard_size = 2,
  	.cacheline_size = I915_FIFO_LINE_SIZE,
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
976
977
  };
  static const struct intel_watermark_params i945_wm_info = {
e0f0273eb   Ville Syrjälä   drm/i915: Use nam...
978
979
980
981
982
  	.fifo_size = I945_FIFO_SIZE,
  	.max_wm = I915_MAX_WM,
  	.default_wm = 1,
  	.guard_size = 2,
  	.cacheline_size = I915_FIFO_LINE_SIZE,
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
983
984
  };
  static const struct intel_watermark_params i915_wm_info = {
e0f0273eb   Ville Syrjälä   drm/i915: Use nam...
985
986
987
988
989
  	.fifo_size = I915_FIFO_SIZE,
  	.max_wm = I915_MAX_WM,
  	.default_wm = 1,
  	.guard_size = 2,
  	.cacheline_size = I915_FIFO_LINE_SIZE,
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
990
  };
9d5391058   Ville Syrjälä   drm/i915: Fix gen...
991
  static const struct intel_watermark_params i830_a_wm_info = {
e0f0273eb   Ville Syrjälä   drm/i915: Use nam...
992
993
994
995
996
  	.fifo_size = I855GM_FIFO_SIZE,
  	.max_wm = I915_MAX_WM,
  	.default_wm = 1,
  	.guard_size = 2,
  	.cacheline_size = I830_FIFO_LINE_SIZE,
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
997
  };
9d5391058   Ville Syrjälä   drm/i915: Fix gen...
998
999
1000
1001
1002
1003
1004
  static const struct intel_watermark_params i830_bc_wm_info = {
  	.fifo_size = I855GM_FIFO_SIZE,
  	.max_wm = I915_MAX_WM/2,
  	.default_wm = 1,
  	.guard_size = 2,
  	.cacheline_size = I830_FIFO_LINE_SIZE,
  };
feb56b934   Daniel Vetter   drm/i915: i830M h...
1005
  static const struct intel_watermark_params i845_wm_info = {
e0f0273eb   Ville Syrjälä   drm/i915: Use nam...
1006
1007
1008
1009
1010
  	.fifo_size = I830_FIFO_SIZE,
  	.max_wm = I915_MAX_WM,
  	.default_wm = 1,
  	.guard_size = 2,
  	.cacheline_size = I830_FIFO_LINE_SIZE,
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1011
  };
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
  /**
   * intel_calculate_wm - calculate watermark level
   * @clock_in_khz: pixel clock
   * @wm: chip FIFO params
   * @pixel_size: display pixel size
   * @latency_ns: memory latency for the platform
   *
   * Calculate the watermark level (the level at which the display plane will
   * start fetching from memory again).  Each chip has a different display
   * FIFO size and allocation, so the caller needs to figure that out and pass
   * in the correct intel_watermark_params structure.
   *
   * As the pixel clock runs, the FIFO will be drained at a rate that depends
   * on the pixel size.  When it reaches the watermark level, it'll start
   * fetching FIFO line sized based chunks from memory until the FIFO fills
   * past the watermark point.  If the FIFO drains completely, a FIFO underrun
   * will occur, and a display engine hang could result.
   */
  static unsigned long intel_calculate_wm(unsigned long clock_in_khz,
  					const struct intel_watermark_params *wm,
  					int fifo_size,
  					int pixel_size,
  					unsigned long latency_ns)
  {
  	long entries_required, wm_size;
  
  	/*
  	 * Note: we need to make sure we don't overflow for various clock &
  	 * latency values.
  	 * clocks go from a few thousand to several hundred thousand.
  	 * latency is usually a few thousand
  	 */
  	entries_required = ((clock_in_khz / 1000) * pixel_size * latency_ns) /
  		1000;
  	entries_required = DIV_ROUND_UP(entries_required, wm->cacheline_size);
  
  	DRM_DEBUG_KMS("FIFO entries required for mode: %ld
  ", entries_required);
  
  	wm_size = fifo_size - (entries_required + wm->guard_size);
  
  	DRM_DEBUG_KMS("FIFO watermark level: %ld
  ", wm_size);
  
  	/* Don't promote wm_size to unsigned... */
  	if (wm_size > (long)wm->max_wm)
  		wm_size = wm->max_wm;
  	if (wm_size <= 0)
  		wm_size = wm->default_wm;
d6feb1962   Ville Syrjälä   drm/i915: Limit t...
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
  
  	/*
  	 * Bspec seems to indicate that the value shouldn't be lower than
  	 * 'burst size + 1'. Certainly 830 is quite unhappy with low values.
  	 * Lets go for 8 which is the burst size since certain platforms
  	 * already use a hardcoded 8 (which is what the spec says should be
  	 * done).
  	 */
  	if (wm_size <= 8)
  		wm_size = 8;
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1071
1072
1073
1074
1075
1076
  	return wm_size;
  }
  
  static struct drm_crtc *single_enabled_crtc(struct drm_device *dev)
  {
  	struct drm_crtc *crtc, *enabled = NULL;
70e1e0ec0   Damien Lespiau   drm/i915: Use for...
1077
  	for_each_crtc(dev, crtc) {
3490ea5de   Chris Wilson   drm/i915: Treat c...
1078
  		if (intel_crtc_active(crtc)) {
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1079
1080
1081
1082
1083
1084
1085
1086
  			if (enabled)
  				return NULL;
  			enabled = crtc;
  		}
  	}
  
  	return enabled;
  }
46ba614c0   Ville Syrjälä   drm/i915: Pass cr...
1087
  static void pineview_update_wm(struct drm_crtc *unused_crtc)
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1088
  {
46ba614c0   Ville Syrjälä   drm/i915: Pass cr...
1089
  	struct drm_device *dev = unused_crtc->dev;
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
  	struct drm_i915_private *dev_priv = dev->dev_private;
  	struct drm_crtc *crtc;
  	const struct cxsr_latency *latency;
  	u32 reg;
  	unsigned long wm;
  
  	latency = intel_get_cxsr_latency(IS_PINEVIEW_G(dev), dev_priv->is_ddr3,
  					 dev_priv->fsb_freq, dev_priv->mem_freq);
  	if (!latency) {
  		DRM_DEBUG_KMS("Unknown FSB/MEM found, disable CxSR
  ");
5209b1f4c   Imre Deak   drm/i915: gmch: f...
1101
  		intel_set_memory_cxsr(dev_priv, false);
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1102
1103
1104
1105
1106
  		return;
  	}
  
  	crtc = single_enabled_crtc(dev);
  	if (crtc) {
241bfc389   Damien Lespiau   drm/i915: Use crt...
1107
  		const struct drm_display_mode *adjusted_mode;
f4510a275   Matt Roper   drm: Replace crtc...
1108
  		int pixel_size = crtc->primary->fb->bits_per_pixel / 8;
241bfc389   Damien Lespiau   drm/i915: Use crt...
1109
1110
1111
1112
  		int clock;
  
  		adjusted_mode = &to_intel_crtc(crtc)->config.adjusted_mode;
  		clock = adjusted_mode->crtc_clock;
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
  
  		/* Display SR */
  		wm = intel_calculate_wm(clock, &pineview_display_wm,
  					pineview_display_wm.fifo_size,
  					pixel_size, latency->display_sr);
  		reg = I915_READ(DSPFW1);
  		reg &= ~DSPFW_SR_MASK;
  		reg |= wm << DSPFW_SR_SHIFT;
  		I915_WRITE(DSPFW1, reg);
  		DRM_DEBUG_KMS("DSPFW1 register is %x
  ", reg);
  
  		/* cursor SR */
  		wm = intel_calculate_wm(clock, &pineview_cursor_wm,
  					pineview_display_wm.fifo_size,
  					pixel_size, latency->cursor_sr);
  		reg = I915_READ(DSPFW3);
  		reg &= ~DSPFW_CURSOR_SR_MASK;
  		reg |= (wm & 0x3f) << DSPFW_CURSOR_SR_SHIFT;
  		I915_WRITE(DSPFW3, reg);
  
  		/* Display HPLL off SR */
  		wm = intel_calculate_wm(clock, &pineview_display_hplloff_wm,
  					pineview_display_hplloff_wm.fifo_size,
  					pixel_size, latency->display_hpll_disable);
  		reg = I915_READ(DSPFW3);
  		reg &= ~DSPFW_HPLL_SR_MASK;
  		reg |= wm & DSPFW_HPLL_SR_MASK;
  		I915_WRITE(DSPFW3, reg);
  
  		/* cursor HPLL off SR */
  		wm = intel_calculate_wm(clock, &pineview_cursor_hplloff_wm,
  					pineview_display_hplloff_wm.fifo_size,
  					pixel_size, latency->cursor_hpll_disable);
  		reg = I915_READ(DSPFW3);
  		reg &= ~DSPFW_HPLL_CURSOR_MASK;
  		reg |= (wm & 0x3f) << DSPFW_HPLL_CURSOR_SHIFT;
  		I915_WRITE(DSPFW3, reg);
  		DRM_DEBUG_KMS("DSPFW3 register is %x
  ", reg);
5209b1f4c   Imre Deak   drm/i915: gmch: f...
1153
  		intel_set_memory_cxsr(dev_priv, true);
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1154
  	} else {
5209b1f4c   Imre Deak   drm/i915: gmch: f...
1155
  		intel_set_memory_cxsr(dev_priv, false);
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
  	}
  }
  
  static bool g4x_compute_wm0(struct drm_device *dev,
  			    int plane,
  			    const struct intel_watermark_params *display,
  			    int display_latency_ns,
  			    const struct intel_watermark_params *cursor,
  			    int cursor_latency_ns,
  			    int *plane_wm,
  			    int *cursor_wm)
  {
  	struct drm_crtc *crtc;
4fe8590a9   Ville Syrjälä   drm/i915: Use adj...
1169
  	const struct drm_display_mode *adjusted_mode;
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1170
1171
1172
1173
1174
  	int htotal, hdisplay, clock, pixel_size;
  	int line_time_us, line_count;
  	int entries, tlb_miss;
  
  	crtc = intel_get_crtc_for_plane(dev, plane);
3490ea5de   Chris Wilson   drm/i915: Treat c...
1175
  	if (!intel_crtc_active(crtc)) {
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1176
1177
1178
1179
  		*cursor_wm = cursor->guard_size;
  		*plane_wm = display->guard_size;
  		return false;
  	}
4fe8590a9   Ville Syrjälä   drm/i915: Use adj...
1180
  	adjusted_mode = &to_intel_crtc(crtc)->config.adjusted_mode;
241bfc389   Damien Lespiau   drm/i915: Use crt...
1181
  	clock = adjusted_mode->crtc_clock;
fec8cba30   Jesse Barnes   drm/i915: use crt...
1182
  	htotal = adjusted_mode->crtc_htotal;
37327abdf   Ville Syrjälä   drm/i915: Add exp...
1183
  	hdisplay = to_intel_crtc(crtc)->config.pipe_src_w;
f4510a275   Matt Roper   drm: Replace crtc...
1184
  	pixel_size = crtc->primary->fb->bits_per_pixel / 8;
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
  
  	/* Use the small buffer method to calculate plane watermark */
  	entries = ((clock * pixel_size / 1000) * display_latency_ns) / 1000;
  	tlb_miss = display->fifo_size*display->cacheline_size - hdisplay * 8;
  	if (tlb_miss > 0)
  		entries += tlb_miss;
  	entries = DIV_ROUND_UP(entries, display->cacheline_size);
  	*plane_wm = entries + display->guard_size;
  	if (*plane_wm > (int)display->max_wm)
  		*plane_wm = display->max_wm;
  
  	/* Use the large buffer method to calculate cursor watermark */
922044c9d   Ville Syrjälä   drm/i915: Avoid d...
1197
  	line_time_us = max(htotal * 1000 / clock, 1);
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1198
  	line_count = (cursor_latency_ns / line_time_us + 1000) / 1000;
7bb836dd1   Chris Wilson   drm/i915: Compute...
1199
  	entries = line_count * to_intel_crtc(crtc)->cursor_width * pixel_size;
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
  	tlb_miss = cursor->fifo_size*cursor->cacheline_size - hdisplay * 8;
  	if (tlb_miss > 0)
  		entries += tlb_miss;
  	entries = DIV_ROUND_UP(entries, cursor->cacheline_size);
  	*cursor_wm = entries + cursor->guard_size;
  	if (*cursor_wm > (int)cursor->max_wm)
  		*cursor_wm = (int)cursor->max_wm;
  
  	return true;
  }
  
  /*
   * Check the wm result.
   *
   * If any calculated watermark values is larger than the maximum value that
   * can be programmed into the associated watermark register, that watermark
   * must be disabled.
   */
  static bool g4x_check_srwm(struct drm_device *dev,
  			   int display_wm, int cursor_wm,
  			   const struct intel_watermark_params *display,
  			   const struct intel_watermark_params *cursor)
  {
  	DRM_DEBUG_KMS("SR watermark: display plane %d, cursor %d
  ",
  		      display_wm, cursor_wm);
  
  	if (display_wm > display->max_wm) {
  		DRM_DEBUG_KMS("display watermark is too large(%d/%ld), disabling
  ",
  			      display_wm, display->max_wm);
  		return false;
  	}
  
  	if (cursor_wm > cursor->max_wm) {
  		DRM_DEBUG_KMS("cursor watermark is too large(%d/%ld), disabling
  ",
  			      cursor_wm, cursor->max_wm);
  		return false;
  	}
  
  	if (!(display_wm || cursor_wm)) {
  		DRM_DEBUG_KMS("SR latency is 0, disabling
  ");
  		return false;
  	}
  
  	return true;
  }
  
  static bool g4x_compute_srwm(struct drm_device *dev,
  			     int plane,
  			     int latency_ns,
  			     const struct intel_watermark_params *display,
  			     const struct intel_watermark_params *cursor,
  			     int *display_wm, int *cursor_wm)
  {
  	struct drm_crtc *crtc;
4fe8590a9   Ville Syrjälä   drm/i915: Use adj...
1258
  	const struct drm_display_mode *adjusted_mode;
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
  	int hdisplay, htotal, pixel_size, clock;
  	unsigned long line_time_us;
  	int line_count, line_size;
  	int small, large;
  	int entries;
  
  	if (!latency_ns) {
  		*display_wm = *cursor_wm = 0;
  		return false;
  	}
  
  	crtc = intel_get_crtc_for_plane(dev, plane);
4fe8590a9   Ville Syrjälä   drm/i915: Use adj...
1271
  	adjusted_mode = &to_intel_crtc(crtc)->config.adjusted_mode;
241bfc389   Damien Lespiau   drm/i915: Use crt...
1272
  	clock = adjusted_mode->crtc_clock;
fec8cba30   Jesse Barnes   drm/i915: use crt...
1273
  	htotal = adjusted_mode->crtc_htotal;
37327abdf   Ville Syrjälä   drm/i915: Add exp...
1274
  	hdisplay = to_intel_crtc(crtc)->config.pipe_src_w;
f4510a275   Matt Roper   drm: Replace crtc...
1275
  	pixel_size = crtc->primary->fb->bits_per_pixel / 8;
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1276

922044c9d   Ville Syrjälä   drm/i915: Avoid d...
1277
  	line_time_us = max(htotal * 1000 / clock, 1);
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
  	line_count = (latency_ns / line_time_us + 1000) / 1000;
  	line_size = hdisplay * pixel_size;
  
  	/* Use the minimum of the small and large buffer method for primary */
  	small = ((clock * pixel_size / 1000) * latency_ns) / 1000;
  	large = line_count * line_size;
  
  	entries = DIV_ROUND_UP(min(small, large), display->cacheline_size);
  	*display_wm = entries + display->guard_size;
  
  	/* calculate the self-refresh watermark for display cursor */
7bb836dd1   Chris Wilson   drm/i915: Compute...
1289
  	entries = line_count * pixel_size * to_intel_crtc(crtc)->cursor_width;
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1290
1291
1292
1293
1294
1295
1296
  	entries = DIV_ROUND_UP(entries, cursor->cacheline_size);
  	*cursor_wm = entries + cursor->guard_size;
  
  	return g4x_check_srwm(dev,
  			      *display_wm, *cursor_wm,
  			      display, cursor);
  }
0948c2651   Gajanan Bhat   drm/i915: General...
1297
1298
1299
1300
  static bool vlv_compute_drain_latency(struct drm_crtc *crtc,
  				      int pixel_size,
  				      int *prec_mult,
  				      int *drain_latency)
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1301
  {
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1302
  	int entries;
0948c2651   Gajanan Bhat   drm/i915: General...
1303
  	int clock = to_intel_crtc(crtc)->config.adjusted_mode.crtc_clock;
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1304

0948c2651   Gajanan Bhat   drm/i915: General...
1305
1306
  	if (WARN(clock == 0, "Pixel clock is zero!
  "))
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1307
  		return false;
0948c2651   Gajanan Bhat   drm/i915: General...
1308
1309
1310
  	if (WARN(pixel_size == 0, "Pixel size is zero!
  "))
  		return false;
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1311

a398e9c79   Gajanan Bhat   drm/i915: Round-u...
1312
  	entries = DIV_ROUND_UP(clock, 1000) * pixel_size;
0948c2651   Gajanan Bhat   drm/i915: General...
1313
1314
1315
  	*prec_mult = (entries > 128) ? DRAIN_LATENCY_PRECISION_64 :
  				       DRAIN_LATENCY_PRECISION_32;
  	*drain_latency = (64 * (*prec_mult) * 4) / entries;
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1316

a398e9c79   Gajanan Bhat   drm/i915: Round-u...
1317
1318
  	if (*drain_latency > DRAIN_LATENCY_MASK)
  		*drain_latency = DRAIN_LATENCY_MASK;
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
  
  	return true;
  }
  
  /*
   * Update drain latency registers of memory arbiter
   *
   * Valleyview SoC has a new memory arbiter and needs drain latency registers
   * to be programmed. Each plane has a drain latency multiplier and a drain
   * latency value.
   */
41aad816d   Gajanan Bhat   drm/i915: Update ...
1330
  static void vlv_update_drain_latency(struct drm_crtc *crtc)
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1331
  {
0948c2651   Gajanan Bhat   drm/i915: General...
1332
1333
1334
1335
1336
1337
  	struct drm_i915_private *dev_priv = crtc->dev->dev_private;
  	struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
  	int pixel_size;
  	int drain_latency;
  	enum pipe pipe = intel_crtc->pipe;
  	int plane_prec, prec_mult, plane_dl;
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1338

0948c2651   Gajanan Bhat   drm/i915: General...
1339
1340
1341
1342
1343
1344
1345
1346
  	plane_dl = I915_READ(VLV_DDL(pipe)) & ~(DDL_PLANE_PRECISION_64 |
  		   DRAIN_LATENCY_MASK | DDL_CURSOR_PRECISION_64 |
  		   (DRAIN_LATENCY_MASK << DDL_CURSOR_SHIFT));
  
  	if (!intel_crtc_active(crtc)) {
  		I915_WRITE(VLV_DDL(pipe), plane_dl);
  		return;
  	}
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1347

0948c2651   Gajanan Bhat   drm/i915: General...
1348
1349
1350
1351
1352
1353
1354
  	/* Primary plane Drain Latency */
  	pixel_size = crtc->primary->fb->bits_per_pixel / 8;	/* BPP */
  	if (vlv_compute_drain_latency(crtc, pixel_size, &prec_mult, &drain_latency)) {
  		plane_prec = (prec_mult == DRAIN_LATENCY_PRECISION_64) ?
  					   DDL_PLANE_PRECISION_64 :
  					   DDL_PLANE_PRECISION_32;
  		plane_dl |= plane_prec | drain_latency;
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1355
  	}
0948c2651   Gajanan Bhat   drm/i915: General...
1356
1357
1358
1359
  	/* Cursor Drain Latency
  	 * BPP is always 4 for cursor
  	 */
  	pixel_size = 4;
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1360

0948c2651   Gajanan Bhat   drm/i915: General...
1361
1362
1363
1364
1365
1366
1367
  	/* Program cursor DL only if it is enabled */
  	if (intel_crtc->cursor_base &&
  	    vlv_compute_drain_latency(crtc, pixel_size, &prec_mult, &drain_latency)) {
  		plane_prec = (prec_mult == DRAIN_LATENCY_PRECISION_64) ?
  					   DDL_CURSOR_PRECISION_64 :
  					   DDL_CURSOR_PRECISION_32;
  		plane_dl |= plane_prec | (drain_latency << DDL_CURSOR_SHIFT);
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1368
  	}
0948c2651   Gajanan Bhat   drm/i915: General...
1369
1370
  
  	I915_WRITE(VLV_DDL(pipe), plane_dl);
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1371
1372
1373
  }
  
  #define single_plane_enabled(mask) is_power_of_2(mask)
46ba614c0   Ville Syrjälä   drm/i915: Pass cr...
1374
  static void valleyview_update_wm(struct drm_crtc *crtc)
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1375
  {
46ba614c0   Ville Syrjälä   drm/i915: Pass cr...
1376
  	struct drm_device *dev = crtc->dev;
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1377
1378
1379
1380
  	static const int sr_latency_ns = 12000;
  	struct drm_i915_private *dev_priv = dev->dev_private;
  	int planea_wm, planeb_wm, cursora_wm, cursorb_wm;
  	int plane_sr, cursor_sr;
af6c4575a   Chris Wilson   drm/i915: Double ...
1381
  	int ignore_plane_sr, ignore_cursor_sr;
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1382
  	unsigned int enabled = 0;
9858425c8   Imre Deak   drm/i915: gmch: s...
1383
  	bool cxsr_enabled;
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1384

41aad816d   Gajanan Bhat   drm/i915: Update ...
1385
  	vlv_update_drain_latency(crtc);
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1386

51cea1f46   Ville Syrjälä   drm/i915: Fix pip...
1387
  	if (g4x_compute_wm0(dev, PIPE_A,
5aef60032   Chris Wilson   drm/i915: Rename ...
1388
1389
  			    &valleyview_wm_info, pessimal_latency_ns,
  			    &valleyview_cursor_wm_info, pessimal_latency_ns,
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1390
  			    &planea_wm, &cursora_wm))
51cea1f46   Ville Syrjälä   drm/i915: Fix pip...
1391
  		enabled |= 1 << PIPE_A;
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1392

51cea1f46   Ville Syrjälä   drm/i915: Fix pip...
1393
  	if (g4x_compute_wm0(dev, PIPE_B,
5aef60032   Chris Wilson   drm/i915: Rename ...
1394
1395
  			    &valleyview_wm_info, pessimal_latency_ns,
  			    &valleyview_cursor_wm_info, pessimal_latency_ns,
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1396
  			    &planeb_wm, &cursorb_wm))
51cea1f46   Ville Syrjälä   drm/i915: Fix pip...
1397
  		enabled |= 1 << PIPE_B;
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1398

b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1399
1400
1401
1402
1403
  	if (single_plane_enabled(enabled) &&
  	    g4x_compute_srwm(dev, ffs(enabled) - 1,
  			     sr_latency_ns,
  			     &valleyview_wm_info,
  			     &valleyview_cursor_wm_info,
af6c4575a   Chris Wilson   drm/i915: Double ...
1404
1405
1406
1407
1408
  			     &plane_sr, &ignore_cursor_sr) &&
  	    g4x_compute_srwm(dev, ffs(enabled) - 1,
  			     2*sr_latency_ns,
  			     &valleyview_wm_info,
  			     &valleyview_cursor_wm_info,
52bd02d8e   Chris Wilson   drm/i915: Clear s...
1409
  			     &ignore_plane_sr, &cursor_sr)) {
9858425c8   Imre Deak   drm/i915: gmch: s...
1410
  		cxsr_enabled = true;
52bd02d8e   Chris Wilson   drm/i915: Clear s...
1411
  	} else {
9858425c8   Imre Deak   drm/i915: gmch: s...
1412
  		cxsr_enabled = false;
5209b1f4c   Imre Deak   drm/i915: gmch: f...
1413
  		intel_set_memory_cxsr(dev_priv, false);
52bd02d8e   Chris Wilson   drm/i915: Clear s...
1414
1415
  		plane_sr = cursor_sr = 0;
  	}
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1416

a5043453a   Ville Syrjälä   drm/i915: Split a...
1417
1418
1419
  	DRM_DEBUG_KMS("Setting FIFO watermarks - A: plane=%d, cursor=%d, "
  		      "B: plane=%d, cursor=%d, SR: plane=%d, cursor=%d
  ",
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1420
1421
1422
1423
1424
1425
1426
1427
  		      planea_wm, cursora_wm,
  		      planeb_wm, cursorb_wm,
  		      plane_sr, cursor_sr);
  
  	I915_WRITE(DSPFW1,
  		   (plane_sr << DSPFW_SR_SHIFT) |
  		   (cursorb_wm << DSPFW_CURSORB_SHIFT) |
  		   (planeb_wm << DSPFW_PLANEB_SHIFT) |
0a5606746   Ville Syrjälä   drm/i915: Fill ou...
1428
  		   (planea_wm << DSPFW_PLANEA_SHIFT));
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1429
  	I915_WRITE(DSPFW2,
8c919b289   Chris Wilson   drm/i915: Clear t...
1430
  		   (I915_READ(DSPFW2) & ~DSPFW_CURSORA_MASK) |
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1431
1432
  		   (cursora_wm << DSPFW_CURSORA_SHIFT));
  	I915_WRITE(DSPFW3,
8c919b289   Chris Wilson   drm/i915: Clear t...
1433
1434
  		   (I915_READ(DSPFW3) & ~DSPFW_CURSOR_SR_MASK) |
  		   (cursor_sr << DSPFW_CURSOR_SR_SHIFT));
9858425c8   Imre Deak   drm/i915: gmch: s...
1435
1436
1437
  
  	if (cxsr_enabled)
  		intel_set_memory_cxsr(dev_priv, true);
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1438
  }
3c2777fd2   Ville Syrjälä   drm/i915: Add che...
1439
1440
1441
1442
1443
1444
1445
1446
1447
1448
1449
1450
1451
1452
1453
  static void cherryview_update_wm(struct drm_crtc *crtc)
  {
  	struct drm_device *dev = crtc->dev;
  	static const int sr_latency_ns = 12000;
  	struct drm_i915_private *dev_priv = dev->dev_private;
  	int planea_wm, planeb_wm, planec_wm;
  	int cursora_wm, cursorb_wm, cursorc_wm;
  	int plane_sr, cursor_sr;
  	int ignore_plane_sr, ignore_cursor_sr;
  	unsigned int enabled = 0;
  	bool cxsr_enabled;
  
  	vlv_update_drain_latency(crtc);
  
  	if (g4x_compute_wm0(dev, PIPE_A,
5aef60032   Chris Wilson   drm/i915: Rename ...
1454
1455
  			    &valleyview_wm_info, pessimal_latency_ns,
  			    &valleyview_cursor_wm_info, pessimal_latency_ns,
3c2777fd2   Ville Syrjälä   drm/i915: Add che...
1456
1457
1458
1459
  			    &planea_wm, &cursora_wm))
  		enabled |= 1 << PIPE_A;
  
  	if (g4x_compute_wm0(dev, PIPE_B,
5aef60032   Chris Wilson   drm/i915: Rename ...
1460
1461
  			    &valleyview_wm_info, pessimal_latency_ns,
  			    &valleyview_cursor_wm_info, pessimal_latency_ns,
3c2777fd2   Ville Syrjälä   drm/i915: Add che...
1462
1463
1464
1465
  			    &planeb_wm, &cursorb_wm))
  		enabled |= 1 << PIPE_B;
  
  	if (g4x_compute_wm0(dev, PIPE_C,
5aef60032   Chris Wilson   drm/i915: Rename ...
1466
1467
  			    &valleyview_wm_info, pessimal_latency_ns,
  			    &valleyview_cursor_wm_info, pessimal_latency_ns,
3c2777fd2   Ville Syrjälä   drm/i915: Add che...
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482
1483
1484
1485
1486
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
1498
1499
1500
1501
1502
1503
1504
1505
1506
1507
1508
1509
1510
1511
1512
1513
1514
1515
1516
1517
  			    &planec_wm, &cursorc_wm))
  		enabled |= 1 << PIPE_C;
  
  	if (single_plane_enabled(enabled) &&
  	    g4x_compute_srwm(dev, ffs(enabled) - 1,
  			     sr_latency_ns,
  			     &valleyview_wm_info,
  			     &valleyview_cursor_wm_info,
  			     &plane_sr, &ignore_cursor_sr) &&
  	    g4x_compute_srwm(dev, ffs(enabled) - 1,
  			     2*sr_latency_ns,
  			     &valleyview_wm_info,
  			     &valleyview_cursor_wm_info,
  			     &ignore_plane_sr, &cursor_sr)) {
  		cxsr_enabled = true;
  	} else {
  		cxsr_enabled = false;
  		intel_set_memory_cxsr(dev_priv, false);
  		plane_sr = cursor_sr = 0;
  	}
  
  	DRM_DEBUG_KMS("Setting FIFO watermarks - A: plane=%d, cursor=%d, "
  		      "B: plane=%d, cursor=%d, C: plane=%d, cursor=%d, "
  		      "SR: plane=%d, cursor=%d
  ",
  		      planea_wm, cursora_wm,
  		      planeb_wm, cursorb_wm,
  		      planec_wm, cursorc_wm,
  		      plane_sr, cursor_sr);
  
  	I915_WRITE(DSPFW1,
  		   (plane_sr << DSPFW_SR_SHIFT) |
  		   (cursorb_wm << DSPFW_CURSORB_SHIFT) |
  		   (planeb_wm << DSPFW_PLANEB_SHIFT) |
  		   (planea_wm << DSPFW_PLANEA_SHIFT));
  	I915_WRITE(DSPFW2,
  		   (I915_READ(DSPFW2) & ~DSPFW_CURSORA_MASK) |
  		   (cursora_wm << DSPFW_CURSORA_SHIFT));
  	I915_WRITE(DSPFW3,
  		   (I915_READ(DSPFW3) & ~DSPFW_CURSOR_SR_MASK) |
  		   (cursor_sr << DSPFW_CURSOR_SR_SHIFT));
  	I915_WRITE(DSPFW9_CHV,
  		   (I915_READ(DSPFW9_CHV) & ~(DSPFW_PLANEC_MASK |
  					      DSPFW_CURSORC_MASK)) |
  		   (planec_wm << DSPFW_PLANEC_SHIFT) |
  		   (cursorc_wm << DSPFW_CURSORC_SHIFT));
  
  	if (cxsr_enabled)
  		intel_set_memory_cxsr(dev_priv, true);
  }
01e184cc8   Gajanan Bhat   drm/i915: Add spr...
1518
1519
1520
1521
1522
1523
1524
1525
1526
1527
1528
1529
1530
1531
1532
1533
1534
1535
1536
1537
1538
1539
1540
1541
1542
1543
1544
1545
1546
1547
  static void valleyview_update_sprite_wm(struct drm_plane *plane,
  					struct drm_crtc *crtc,
  					uint32_t sprite_width,
  					uint32_t sprite_height,
  					int pixel_size,
  					bool enabled, bool scaled)
  {
  	struct drm_device *dev = crtc->dev;
  	struct drm_i915_private *dev_priv = dev->dev_private;
  	int pipe = to_intel_plane(plane)->pipe;
  	int sprite = to_intel_plane(plane)->plane;
  	int drain_latency;
  	int plane_prec;
  	int sprite_dl;
  	int prec_mult;
  
  	sprite_dl = I915_READ(VLV_DDL(pipe)) & ~(DDL_SPRITE_PRECISION_64(sprite) |
  		    (DRAIN_LATENCY_MASK << DDL_SPRITE_SHIFT(sprite)));
  
  	if (enabled && vlv_compute_drain_latency(crtc, pixel_size, &prec_mult,
  						 &drain_latency)) {
  		plane_prec = (prec_mult == DRAIN_LATENCY_PRECISION_64) ?
  					   DDL_SPRITE_PRECISION_64(sprite) :
  					   DDL_SPRITE_PRECISION_32(sprite);
  		sprite_dl |= plane_prec |
  			     (drain_latency << DDL_SPRITE_SHIFT(sprite));
  	}
  
  	I915_WRITE(VLV_DDL(pipe), sprite_dl);
  }
46ba614c0   Ville Syrjälä   drm/i915: Pass cr...
1548
  static void g4x_update_wm(struct drm_crtc *crtc)
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1549
  {
46ba614c0   Ville Syrjälä   drm/i915: Pass cr...
1550
  	struct drm_device *dev = crtc->dev;
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1551
1552
1553
1554
1555
  	static const int sr_latency_ns = 12000;
  	struct drm_i915_private *dev_priv = dev->dev_private;
  	int planea_wm, planeb_wm, cursora_wm, cursorb_wm;
  	int plane_sr, cursor_sr;
  	unsigned int enabled = 0;
9858425c8   Imre Deak   drm/i915: gmch: s...
1556
  	bool cxsr_enabled;
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1557

51cea1f46   Ville Syrjälä   drm/i915: Fix pip...
1558
  	if (g4x_compute_wm0(dev, PIPE_A,
5aef60032   Chris Wilson   drm/i915: Rename ...
1559
1560
  			    &g4x_wm_info, pessimal_latency_ns,
  			    &g4x_cursor_wm_info, pessimal_latency_ns,
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1561
  			    &planea_wm, &cursora_wm))
51cea1f46   Ville Syrjälä   drm/i915: Fix pip...
1562
  		enabled |= 1 << PIPE_A;
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1563

51cea1f46   Ville Syrjälä   drm/i915: Fix pip...
1564
  	if (g4x_compute_wm0(dev, PIPE_B,
5aef60032   Chris Wilson   drm/i915: Rename ...
1565
1566
  			    &g4x_wm_info, pessimal_latency_ns,
  			    &g4x_cursor_wm_info, pessimal_latency_ns,
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1567
  			    &planeb_wm, &cursorb_wm))
51cea1f46   Ville Syrjälä   drm/i915: Fix pip...
1568
  		enabled |= 1 << PIPE_B;
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1569

b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1570
1571
1572
1573
1574
  	if (single_plane_enabled(enabled) &&
  	    g4x_compute_srwm(dev, ffs(enabled) - 1,
  			     sr_latency_ns,
  			     &g4x_wm_info,
  			     &g4x_cursor_wm_info,
52bd02d8e   Chris Wilson   drm/i915: Clear s...
1575
  			     &plane_sr, &cursor_sr)) {
9858425c8   Imre Deak   drm/i915: gmch: s...
1576
  		cxsr_enabled = true;
52bd02d8e   Chris Wilson   drm/i915: Clear s...
1577
  	} else {
9858425c8   Imre Deak   drm/i915: gmch: s...
1578
  		cxsr_enabled = false;
5209b1f4c   Imre Deak   drm/i915: gmch: f...
1579
  		intel_set_memory_cxsr(dev_priv, false);
52bd02d8e   Chris Wilson   drm/i915: Clear s...
1580
1581
  		plane_sr = cursor_sr = 0;
  	}
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1582

a5043453a   Ville Syrjälä   drm/i915: Split a...
1583
1584
1585
  	DRM_DEBUG_KMS("Setting FIFO watermarks - A: plane=%d, cursor=%d, "
  		      "B: plane=%d, cursor=%d, SR: plane=%d, cursor=%d
  ",
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1586
1587
1588
1589
1590
1591
1592
1593
  		      planea_wm, cursora_wm,
  		      planeb_wm, cursorb_wm,
  		      plane_sr, cursor_sr);
  
  	I915_WRITE(DSPFW1,
  		   (plane_sr << DSPFW_SR_SHIFT) |
  		   (cursorb_wm << DSPFW_CURSORB_SHIFT) |
  		   (planeb_wm << DSPFW_PLANEB_SHIFT) |
0a5606746   Ville Syrjälä   drm/i915: Fill ou...
1594
  		   (planea_wm << DSPFW_PLANEA_SHIFT));
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1595
  	I915_WRITE(DSPFW2,
8c919b289   Chris Wilson   drm/i915: Clear t...
1596
  		   (I915_READ(DSPFW2) & ~DSPFW_CURSORA_MASK) |
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1597
1598
1599
  		   (cursora_wm << DSPFW_CURSORA_SHIFT));
  	/* HPLL off in SR has some issues on G4x... disable it */
  	I915_WRITE(DSPFW3,
8c919b289   Chris Wilson   drm/i915: Clear t...
1600
  		   (I915_READ(DSPFW3) & ~(DSPFW_HPLL_SR_EN | DSPFW_CURSOR_SR_MASK)) |
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1601
  		   (cursor_sr << DSPFW_CURSOR_SR_SHIFT));
9858425c8   Imre Deak   drm/i915: gmch: s...
1602
1603
1604
  
  	if (cxsr_enabled)
  		intel_set_memory_cxsr(dev_priv, true);
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1605
  }
46ba614c0   Ville Syrjälä   drm/i915: Pass cr...
1606
  static void i965_update_wm(struct drm_crtc *unused_crtc)
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1607
  {
46ba614c0   Ville Syrjälä   drm/i915: Pass cr...
1608
  	struct drm_device *dev = unused_crtc->dev;
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1609
1610
1611
1612
  	struct drm_i915_private *dev_priv = dev->dev_private;
  	struct drm_crtc *crtc;
  	int srwm = 1;
  	int cursor_sr = 16;
9858425c8   Imre Deak   drm/i915: gmch: s...
1613
  	bool cxsr_enabled;
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1614
1615
1616
1617
1618
1619
  
  	/* Calc sr entries for one plane configs */
  	crtc = single_enabled_crtc(dev);
  	if (crtc) {
  		/* self-refresh has much higher latency */
  		static const int sr_latency_ns = 12000;
4fe8590a9   Ville Syrjälä   drm/i915: Use adj...
1620
1621
  		const struct drm_display_mode *adjusted_mode =
  			&to_intel_crtc(crtc)->config.adjusted_mode;
241bfc389   Damien Lespiau   drm/i915: Use crt...
1622
  		int clock = adjusted_mode->crtc_clock;
fec8cba30   Jesse Barnes   drm/i915: use crt...
1623
  		int htotal = adjusted_mode->crtc_htotal;
37327abdf   Ville Syrjälä   drm/i915: Add exp...
1624
  		int hdisplay = to_intel_crtc(crtc)->config.pipe_src_w;
f4510a275   Matt Roper   drm: Replace crtc...
1625
  		int pixel_size = crtc->primary->fb->bits_per_pixel / 8;
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1626
1627
  		unsigned long line_time_us;
  		int entries;
922044c9d   Ville Syrjälä   drm/i915: Avoid d...
1628
  		line_time_us = max(htotal * 1000 / clock, 1);
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1629
1630
1631
1632
1633
1634
1635
1636
1637
1638
1639
1640
1641
1642
  
  		/* Use ns/us then divide to preserve precision */
  		entries = (((sr_latency_ns / line_time_us) + 1000) / 1000) *
  			pixel_size * hdisplay;
  		entries = DIV_ROUND_UP(entries, I915_FIFO_LINE_SIZE);
  		srwm = I965_FIFO_SIZE - entries;
  		if (srwm < 0)
  			srwm = 1;
  		srwm &= 0x1ff;
  		DRM_DEBUG_KMS("self-refresh entries: %d, wm: %d
  ",
  			      entries, srwm);
  
  		entries = (((sr_latency_ns / line_time_us) + 1000) / 1000) *
7bb836dd1   Chris Wilson   drm/i915: Compute...
1643
  			pixel_size * to_intel_crtc(crtc)->cursor_width;
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1644
1645
1646
1647
1648
1649
1650
1651
1652
1653
1654
  		entries = DIV_ROUND_UP(entries,
  					  i965_cursor_wm_info.cacheline_size);
  		cursor_sr = i965_cursor_wm_info.fifo_size -
  			(entries + i965_cursor_wm_info.guard_size);
  
  		if (cursor_sr > i965_cursor_wm_info.max_wm)
  			cursor_sr = i965_cursor_wm_info.max_wm;
  
  		DRM_DEBUG_KMS("self-refresh watermark: display plane %d "
  			      "cursor %d
  ", srwm, cursor_sr);
9858425c8   Imre Deak   drm/i915: gmch: s...
1655
  		cxsr_enabled = true;
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1656
  	} else {
9858425c8   Imre Deak   drm/i915: gmch: s...
1657
  		cxsr_enabled = false;
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1658
  		/* Turn off self refresh if both pipes are enabled */
5209b1f4c   Imre Deak   drm/i915: gmch: f...
1659
  		intel_set_memory_cxsr(dev_priv, false);
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1660
1661
1662
1663
1664
1665
1666
1667
  	}
  
  	DRM_DEBUG_KMS("Setting FIFO watermarks - A: 8, B: 8, C: 8, SR %d
  ",
  		      srwm);
  
  	/* 965 has limitations... */
  	I915_WRITE(DSPFW1, (srwm << DSPFW_SR_SHIFT) |
0a5606746   Ville Syrjälä   drm/i915: Fill ou...
1668
1669
1670
1671
1672
  		   (8 << DSPFW_CURSORB_SHIFT) |
  		   (8 << DSPFW_PLANEB_SHIFT) |
  		   (8 << DSPFW_PLANEA_SHIFT));
  	I915_WRITE(DSPFW2, (8 << DSPFW_CURSORA_SHIFT) |
  		   (8 << DSPFW_PLANEC_SHIFT_OLD));
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1673
1674
  	/* update cursor SR watermark */
  	I915_WRITE(DSPFW3, (cursor_sr << DSPFW_CURSOR_SR_SHIFT));
9858425c8   Imre Deak   drm/i915: gmch: s...
1675
1676
1677
  
  	if (cxsr_enabled)
  		intel_set_memory_cxsr(dev_priv, true);
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1678
  }
46ba614c0   Ville Syrjälä   drm/i915: Pass cr...
1679
  static void i9xx_update_wm(struct drm_crtc *unused_crtc)
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1680
  {
46ba614c0   Ville Syrjälä   drm/i915: Pass cr...
1681
  	struct drm_device *dev = unused_crtc->dev;
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1682
1683
1684
1685
1686
1687
1688
1689
1690
1691
1692
1693
1694
1695
  	struct drm_i915_private *dev_priv = dev->dev_private;
  	const struct intel_watermark_params *wm_info;
  	uint32_t fwater_lo;
  	uint32_t fwater_hi;
  	int cwm, srwm = 1;
  	int fifo_size;
  	int planea_wm, planeb_wm;
  	struct drm_crtc *crtc, *enabled = NULL;
  
  	if (IS_I945GM(dev))
  		wm_info = &i945_wm_info;
  	else if (!IS_GEN2(dev))
  		wm_info = &i915_wm_info;
  	else
9d5391058   Ville Syrjälä   drm/i915: Fix gen...
1696
  		wm_info = &i830_a_wm_info;
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1697
1698
1699
  
  	fifo_size = dev_priv->display.get_fifo_size(dev, 0);
  	crtc = intel_get_crtc_for_plane(dev, 0);
3490ea5de   Chris Wilson   drm/i915: Treat c...
1700
  	if (intel_crtc_active(crtc)) {
241bfc389   Damien Lespiau   drm/i915: Use crt...
1701
  		const struct drm_display_mode *adjusted_mode;
f4510a275   Matt Roper   drm: Replace crtc...
1702
  		int cpp = crtc->primary->fb->bits_per_pixel / 8;
b9e0bda3c   Chris Wilson   drm/i915: Always ...
1703
1704
  		if (IS_GEN2(dev))
  			cpp = 4;
241bfc389   Damien Lespiau   drm/i915: Use crt...
1705
1706
  		adjusted_mode = &to_intel_crtc(crtc)->config.adjusted_mode;
  		planea_wm = intel_calculate_wm(adjusted_mode->crtc_clock,
b9e0bda3c   Chris Wilson   drm/i915: Always ...
1707
  					       wm_info, fifo_size, cpp,
5aef60032   Chris Wilson   drm/i915: Rename ...
1708
  					       pessimal_latency_ns);
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1709
  		enabled = crtc;
9d5391058   Ville Syrjälä   drm/i915: Fix gen...
1710
  	} else {
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1711
  		planea_wm = fifo_size - wm_info->guard_size;
9d5391058   Ville Syrjälä   drm/i915: Fix gen...
1712
1713
1714
1715
1716
1717
  		if (planea_wm > (long)wm_info->max_wm)
  			planea_wm = wm_info->max_wm;
  	}
  
  	if (IS_GEN2(dev))
  		wm_info = &i830_bc_wm_info;
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1718
1719
1720
  
  	fifo_size = dev_priv->display.get_fifo_size(dev, 1);
  	crtc = intel_get_crtc_for_plane(dev, 1);
3490ea5de   Chris Wilson   drm/i915: Treat c...
1721
  	if (intel_crtc_active(crtc)) {
241bfc389   Damien Lespiau   drm/i915: Use crt...
1722
  		const struct drm_display_mode *adjusted_mode;
f4510a275   Matt Roper   drm: Replace crtc...
1723
  		int cpp = crtc->primary->fb->bits_per_pixel / 8;
b9e0bda3c   Chris Wilson   drm/i915: Always ...
1724
1725
  		if (IS_GEN2(dev))
  			cpp = 4;
241bfc389   Damien Lespiau   drm/i915: Use crt...
1726
1727
  		adjusted_mode = &to_intel_crtc(crtc)->config.adjusted_mode;
  		planeb_wm = intel_calculate_wm(adjusted_mode->crtc_clock,
b9e0bda3c   Chris Wilson   drm/i915: Always ...
1728
  					       wm_info, fifo_size, cpp,
5aef60032   Chris Wilson   drm/i915: Rename ...
1729
  					       pessimal_latency_ns);
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1730
1731
1732
1733
  		if (enabled == NULL)
  			enabled = crtc;
  		else
  			enabled = NULL;
9d5391058   Ville Syrjälä   drm/i915: Fix gen...
1734
  	} else {
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1735
  		planeb_wm = fifo_size - wm_info->guard_size;
9d5391058   Ville Syrjälä   drm/i915: Fix gen...
1736
1737
1738
  		if (planeb_wm > (long)wm_info->max_wm)
  			planeb_wm = wm_info->max_wm;
  	}
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1739
1740
1741
  
  	DRM_DEBUG_KMS("FIFO watermarks - A: %d, B: %d
  ", planea_wm, planeb_wm);
2ab1bc9df   Daniel Vetter   drm/i915: Disable...
1742
  	if (IS_I915GM(dev) && enabled) {
2ff8fde1e   Matt Roper   drm/i915: Make us...
1743
  		struct drm_i915_gem_object *obj;
2ab1bc9df   Daniel Vetter   drm/i915: Disable...
1744

2ff8fde1e   Matt Roper   drm/i915: Make us...
1745
  		obj = intel_fb_obj(enabled->primary->fb);
2ab1bc9df   Daniel Vetter   drm/i915: Disable...
1746
1747
  
  		/* self-refresh seems busted with untiled */
2ff8fde1e   Matt Roper   drm/i915: Make us...
1748
  		if (obj->tiling_mode == I915_TILING_NONE)
2ab1bc9df   Daniel Vetter   drm/i915: Disable...
1749
1750
  			enabled = NULL;
  	}
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1751
1752
1753
1754
1755
1756
  	/*
  	 * Overlay gets an aggressive default since video jitter is bad.
  	 */
  	cwm = 2;
  
  	/* Play safe and disable self-refresh before adjusting watermarks. */
5209b1f4c   Imre Deak   drm/i915: gmch: f...
1757
  	intel_set_memory_cxsr(dev_priv, false);
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1758
1759
1760
1761
1762
  
  	/* Calc sr entries for one plane configs */
  	if (HAS_FW_BLC(dev) && enabled) {
  		/* self-refresh has much higher latency */
  		static const int sr_latency_ns = 6000;
4fe8590a9   Ville Syrjälä   drm/i915: Use adj...
1763
1764
  		const struct drm_display_mode *adjusted_mode =
  			&to_intel_crtc(enabled)->config.adjusted_mode;
241bfc389   Damien Lespiau   drm/i915: Use crt...
1765
  		int clock = adjusted_mode->crtc_clock;
fec8cba30   Jesse Barnes   drm/i915: use crt...
1766
  		int htotal = adjusted_mode->crtc_htotal;
f727b490e   Daniel Vetter   drm/i915: Fix gen...
1767
  		int hdisplay = to_intel_crtc(enabled)->config.pipe_src_w;
f4510a275   Matt Roper   drm: Replace crtc...
1768
  		int pixel_size = enabled->primary->fb->bits_per_pixel / 8;
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1769
1770
  		unsigned long line_time_us;
  		int entries;
922044c9d   Ville Syrjälä   drm/i915: Avoid d...
1771
  		line_time_us = max(htotal * 1000 / clock, 1);
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1772
1773
1774
1775
1776
1777
1778
1779
1780
1781
1782
1783
1784
1785
1786
1787
1788
1789
1790
1791
1792
1793
1794
1795
1796
1797
1798
1799
1800
1801
1802
  
  		/* Use ns/us then divide to preserve precision */
  		entries = (((sr_latency_ns / line_time_us) + 1000) / 1000) *
  			pixel_size * hdisplay;
  		entries = DIV_ROUND_UP(entries, wm_info->cacheline_size);
  		DRM_DEBUG_KMS("self-refresh entries: %d
  ", entries);
  		srwm = wm_info->fifo_size - entries;
  		if (srwm < 0)
  			srwm = 1;
  
  		if (IS_I945G(dev) || IS_I945GM(dev))
  			I915_WRITE(FW_BLC_SELF,
  				   FW_BLC_SELF_FIFO_MASK | (srwm & 0xff));
  		else if (IS_I915GM(dev))
  			I915_WRITE(FW_BLC_SELF, srwm & 0x3f);
  	}
  
  	DRM_DEBUG_KMS("Setting FIFO watermarks - A: %d, B: %d, C: %d, SR %d
  ",
  		      planea_wm, planeb_wm, cwm, srwm);
  
  	fwater_lo = ((planeb_wm & 0x3f) << 16) | (planea_wm & 0x3f);
  	fwater_hi = (cwm & 0x1f);
  
  	/* Set request length to 8 cachelines per fetch */
  	fwater_lo = fwater_lo | (1 << 24) | (1 << 8);
  	fwater_hi = fwater_hi | (1 << 8);
  
  	I915_WRITE(FW_BLC, fwater_lo);
  	I915_WRITE(FW_BLC2, fwater_hi);
5209b1f4c   Imre Deak   drm/i915: gmch: f...
1803
1804
  	if (enabled)
  		intel_set_memory_cxsr(dev_priv, true);
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1805
  }
feb56b934   Daniel Vetter   drm/i915: i830M h...
1806
  static void i845_update_wm(struct drm_crtc *unused_crtc)
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1807
  {
46ba614c0   Ville Syrjälä   drm/i915: Pass cr...
1808
  	struct drm_device *dev = unused_crtc->dev;
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1809
1810
  	struct drm_i915_private *dev_priv = dev->dev_private;
  	struct drm_crtc *crtc;
241bfc389   Damien Lespiau   drm/i915: Use crt...
1811
  	const struct drm_display_mode *adjusted_mode;
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1812
1813
1814
1815
1816
1817
  	uint32_t fwater_lo;
  	int planea_wm;
  
  	crtc = single_enabled_crtc(dev);
  	if (crtc == NULL)
  		return;
241bfc389   Damien Lespiau   drm/i915: Use crt...
1818
1819
  	adjusted_mode = &to_intel_crtc(crtc)->config.adjusted_mode;
  	planea_wm = intel_calculate_wm(adjusted_mode->crtc_clock,
feb56b934   Daniel Vetter   drm/i915: i830M h...
1820
  				       &i845_wm_info,
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1821
  				       dev_priv->display.get_fifo_size(dev, 0),
5aef60032   Chris Wilson   drm/i915: Rename ...
1822
  				       4, pessimal_latency_ns);
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
1823
1824
1825
1826
1827
1828
1829
1830
  	fwater_lo = I915_READ(FW_BLC) & ~0xfff;
  	fwater_lo |= (3<<8) | planea_wm;
  
  	DRM_DEBUG_KMS("Setting FIFO watermarks - A: %d
  ", planea_wm);
  
  	I915_WRITE(FW_BLC, fwater_lo);
  }
3658729a7   Ville Syrjälä   drm/i915: Rename ...
1831
1832
  static uint32_t ilk_pipe_pixel_rate(struct drm_device *dev,
  				    struct drm_crtc *crtc)
801bcfffb   Paulo Zanoni   drm/i915: properl...
1833
1834
  {
  	struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
fd4daa9ce   Chris Wilson   drm/i915: Track p...
1835
  	uint32_t pixel_rate;
801bcfffb   Paulo Zanoni   drm/i915: properl...
1836

241bfc389   Damien Lespiau   drm/i915: Use crt...
1837
  	pixel_rate = intel_crtc->config.adjusted_mode.crtc_clock;
801bcfffb   Paulo Zanoni   drm/i915: properl...
1838
1839
1840
  
  	/* We only use IF-ID interlacing. If we ever use PF-ID we'll need to
  	 * adjust the pixel_rate here. */
fd4daa9ce   Chris Wilson   drm/i915: Track p...
1841
  	if (intel_crtc->config.pch_pfit.enabled) {
801bcfffb   Paulo Zanoni   drm/i915: properl...
1842
  		uint64_t pipe_w, pipe_h, pfit_w, pfit_h;
fd4daa9ce   Chris Wilson   drm/i915: Track p...
1843
  		uint32_t pfit_size = intel_crtc->config.pch_pfit.size;
801bcfffb   Paulo Zanoni   drm/i915: properl...
1844

37327abdf   Ville Syrjälä   drm/i915: Add exp...
1845
1846
  		pipe_w = intel_crtc->config.pipe_src_w;
  		pipe_h = intel_crtc->config.pipe_src_h;
801bcfffb   Paulo Zanoni   drm/i915: properl...
1847
1848
1849
1850
1851
1852
1853
1854
1855
1856
1857
1858
1859
  		pfit_w = (pfit_size >> 16) & 0xFFFF;
  		pfit_h = pfit_size & 0xFFFF;
  		if (pipe_w < pfit_w)
  			pipe_w = pfit_w;
  		if (pipe_h < pfit_h)
  			pipe_h = pfit_h;
  
  		pixel_rate = div_u64((uint64_t) pixel_rate * pipe_w * pipe_h,
  				     pfit_w * pfit_h);
  	}
  
  	return pixel_rate;
  }
37126462a   Ville Syrjälä   drm/i915: Add com...
1860
  /* latency must be in 0.1us units. */
23297044a   Ville Syrjälä   drm/i915: Rename ...
1861
  static uint32_t ilk_wm_method1(uint32_t pixel_rate, uint8_t bytes_per_pixel,
801bcfffb   Paulo Zanoni   drm/i915: properl...
1862
1863
1864
  			       uint32_t latency)
  {
  	uint64_t ret;
3312ba65c   Ville Syrjälä   drm/i915: Disable...
1865
1866
1867
  	if (WARN(latency == 0, "Latency value missing
  "))
  		return UINT_MAX;
801bcfffb   Paulo Zanoni   drm/i915: properl...
1868
1869
1870
1871
1872
  	ret = (uint64_t) pixel_rate * bytes_per_pixel * latency;
  	ret = DIV_ROUND_UP_ULL(ret, 64 * 10000) + 2;
  
  	return ret;
  }
37126462a   Ville Syrjälä   drm/i915: Add com...
1873
  /* latency must be in 0.1us units. */
23297044a   Ville Syrjälä   drm/i915: Rename ...
1874
  static uint32_t ilk_wm_method2(uint32_t pixel_rate, uint32_t pipe_htotal,
801bcfffb   Paulo Zanoni   drm/i915: properl...
1875
1876
1877
1878
  			       uint32_t horiz_pixels, uint8_t bytes_per_pixel,
  			       uint32_t latency)
  {
  	uint32_t ret;
3312ba65c   Ville Syrjälä   drm/i915: Disable...
1879
1880
1881
  	if (WARN(latency == 0, "Latency value missing
  "))
  		return UINT_MAX;
801bcfffb   Paulo Zanoni   drm/i915: properl...
1882
1883
1884
1885
1886
  	ret = (latency * pixel_rate) / (pipe_htotal * 10000);
  	ret = (ret + 1) * horiz_pixels * bytes_per_pixel;
  	ret = DIV_ROUND_UP(ret, 64) + 2;
  	return ret;
  }
23297044a   Ville Syrjälä   drm/i915: Rename ...
1887
  static uint32_t ilk_wm_fbc(uint32_t pri_val, uint32_t horiz_pixels,
cca32e9ad   Paulo Zanoni   drm/i915: properl...
1888
1889
1890
1891
  			   uint8_t bytes_per_pixel)
  {
  	return DIV_ROUND_UP(pri_val * 64, horiz_pixels * bytes_per_pixel) + 2;
  }
820c19803   Imre Deak   drm/i915: s/haswe...
1892
  struct ilk_pipe_wm_parameters {
801bcfffb   Paulo Zanoni   drm/i915: properl...
1893
  	bool active;
801bcfffb   Paulo Zanoni   drm/i915: properl...
1894
1895
  	uint32_t pipe_htotal;
  	uint32_t pixel_rate;
c35426d2b   Ville Syrjälä   drm/i915: Split p...
1896
1897
1898
  	struct intel_plane_wm_parameters pri;
  	struct intel_plane_wm_parameters spr;
  	struct intel_plane_wm_parameters cur;
801bcfffb   Paulo Zanoni   drm/i915: properl...
1899
  };
820c19803   Imre Deak   drm/i915: s/haswe...
1900
  struct ilk_wm_maximums {
cca32e9ad   Paulo Zanoni   drm/i915: properl...
1901
1902
1903
1904
1905
  	uint16_t pri;
  	uint16_t spr;
  	uint16_t cur;
  	uint16_t fbc;
  };
240264f49   Ville Syrjälä   drm/i915: Pull so...
1906
1907
1908
1909
1910
  /* used in computing the new watermarks state */
  struct intel_wm_config {
  	unsigned int num_pipes_active;
  	bool sprites_enabled;
  	bool sprites_scaled;
240264f49   Ville Syrjälä   drm/i915: Pull so...
1911
  };
37126462a   Ville Syrjälä   drm/i915: Add com...
1912
1913
1914
1915
  /*
   * For both WM_PIPE and WM_LP.
   * mem_value must be in 0.1us units.
   */
820c19803   Imre Deak   drm/i915: s/haswe...
1916
  static uint32_t ilk_compute_pri_wm(const struct ilk_pipe_wm_parameters *params,
cca32e9ad   Paulo Zanoni   drm/i915: properl...
1917
1918
  				   uint32_t mem_value,
  				   bool is_lp)
801bcfffb   Paulo Zanoni   drm/i915: properl...
1919
  {
cca32e9ad   Paulo Zanoni   drm/i915: properl...
1920
  	uint32_t method1, method2;
c35426d2b   Ville Syrjälä   drm/i915: Split p...
1921
  	if (!params->active || !params->pri.enabled)
801bcfffb   Paulo Zanoni   drm/i915: properl...
1922
  		return 0;
23297044a   Ville Syrjälä   drm/i915: Rename ...
1923
  	method1 = ilk_wm_method1(params->pixel_rate,
c35426d2b   Ville Syrjälä   drm/i915: Split p...
1924
  				 params->pri.bytes_per_pixel,
cca32e9ad   Paulo Zanoni   drm/i915: properl...
1925
1926
1927
1928
  				 mem_value);
  
  	if (!is_lp)
  		return method1;
23297044a   Ville Syrjälä   drm/i915: Rename ...
1929
  	method2 = ilk_wm_method2(params->pixel_rate,
cca32e9ad   Paulo Zanoni   drm/i915: properl...
1930
  				 params->pipe_htotal,
c35426d2b   Ville Syrjälä   drm/i915: Split p...
1931
1932
  				 params->pri.horiz_pixels,
  				 params->pri.bytes_per_pixel,
cca32e9ad   Paulo Zanoni   drm/i915: properl...
1933
1934
1935
  				 mem_value);
  
  	return min(method1, method2);
801bcfffb   Paulo Zanoni   drm/i915: properl...
1936
  }
37126462a   Ville Syrjälä   drm/i915: Add com...
1937
1938
1939
1940
  /*
   * For both WM_PIPE and WM_LP.
   * mem_value must be in 0.1us units.
   */
820c19803   Imre Deak   drm/i915: s/haswe...
1941
  static uint32_t ilk_compute_spr_wm(const struct ilk_pipe_wm_parameters *params,
801bcfffb   Paulo Zanoni   drm/i915: properl...
1942
1943
1944
  				   uint32_t mem_value)
  {
  	uint32_t method1, method2;
c35426d2b   Ville Syrjälä   drm/i915: Split p...
1945
  	if (!params->active || !params->spr.enabled)
801bcfffb   Paulo Zanoni   drm/i915: properl...
1946
  		return 0;
23297044a   Ville Syrjälä   drm/i915: Rename ...
1947
  	method1 = ilk_wm_method1(params->pixel_rate,
c35426d2b   Ville Syrjälä   drm/i915: Split p...
1948
  				 params->spr.bytes_per_pixel,
801bcfffb   Paulo Zanoni   drm/i915: properl...
1949
  				 mem_value);
23297044a   Ville Syrjälä   drm/i915: Rename ...
1950
  	method2 = ilk_wm_method2(params->pixel_rate,
801bcfffb   Paulo Zanoni   drm/i915: properl...
1951
  				 params->pipe_htotal,
c35426d2b   Ville Syrjälä   drm/i915: Split p...
1952
1953
  				 params->spr.horiz_pixels,
  				 params->spr.bytes_per_pixel,
801bcfffb   Paulo Zanoni   drm/i915: properl...
1954
1955
1956
  				 mem_value);
  	return min(method1, method2);
  }
37126462a   Ville Syrjälä   drm/i915: Add com...
1957
1958
1959
1960
  /*
   * For both WM_PIPE and WM_LP.
   * mem_value must be in 0.1us units.
   */
820c19803   Imre Deak   drm/i915: s/haswe...
1961
  static uint32_t ilk_compute_cur_wm(const struct ilk_pipe_wm_parameters *params,
801bcfffb   Paulo Zanoni   drm/i915: properl...
1962
1963
  				   uint32_t mem_value)
  {
c35426d2b   Ville Syrjälä   drm/i915: Split p...
1964
  	if (!params->active || !params->cur.enabled)
801bcfffb   Paulo Zanoni   drm/i915: properl...
1965
  		return 0;
23297044a   Ville Syrjälä   drm/i915: Rename ...
1966
  	return ilk_wm_method2(params->pixel_rate,
801bcfffb   Paulo Zanoni   drm/i915: properl...
1967
  			      params->pipe_htotal,
c35426d2b   Ville Syrjälä   drm/i915: Split p...
1968
1969
  			      params->cur.horiz_pixels,
  			      params->cur.bytes_per_pixel,
801bcfffb   Paulo Zanoni   drm/i915: properl...
1970
1971
  			      mem_value);
  }
cca32e9ad   Paulo Zanoni   drm/i915: properl...
1972
  /* Only for WM_LP. */
820c19803   Imre Deak   drm/i915: s/haswe...
1973
  static uint32_t ilk_compute_fbc_wm(const struct ilk_pipe_wm_parameters *params,
1fda9882c   Ville Syrjälä   drm/i915: Don't p...
1974
  				   uint32_t pri_val)
cca32e9ad   Paulo Zanoni   drm/i915: properl...
1975
  {
c35426d2b   Ville Syrjälä   drm/i915: Split p...
1976
  	if (!params->active || !params->pri.enabled)
cca32e9ad   Paulo Zanoni   drm/i915: properl...
1977
  		return 0;
23297044a   Ville Syrjälä   drm/i915: Rename ...
1978
  	return ilk_wm_fbc(pri_val,
c35426d2b   Ville Syrjälä   drm/i915: Split p...
1979
1980
  			  params->pri.horiz_pixels,
  			  params->pri.bytes_per_pixel);
cca32e9ad   Paulo Zanoni   drm/i915: properl...
1981
  }
158ae64f8   Ville Syrjälä   drm/i915: Calcula...
1982
1983
  static unsigned int ilk_display_fifo_size(const struct drm_device *dev)
  {
416f4727a   Ville Syrjälä   drm/i915/bdw: Add...
1984
1985
1986
  	if (INTEL_INFO(dev)->gen >= 8)
  		return 3072;
  	else if (INTEL_INFO(dev)->gen >= 7)
158ae64f8   Ville Syrjälä   drm/i915: Calcula...
1987
1988
1989
1990
  		return 768;
  	else
  		return 512;
  }
4e9750812   Ville Syrjälä   drm/i916: Refacto...
1991
1992
1993
1994
1995
1996
1997
1998
1999
2000
2001
2002
2003
2004
2005
2006
2007
2008
2009
2010
2011
2012
2013
2014
2015
2016
2017
2018
2019
2020
2021
2022
2023
  static unsigned int ilk_plane_wm_reg_max(const struct drm_device *dev,
  					 int level, bool is_sprite)
  {
  	if (INTEL_INFO(dev)->gen >= 8)
  		/* BDW primary/sprite plane watermarks */
  		return level == 0 ? 255 : 2047;
  	else if (INTEL_INFO(dev)->gen >= 7)
  		/* IVB/HSW primary/sprite plane watermarks */
  		return level == 0 ? 127 : 1023;
  	else if (!is_sprite)
  		/* ILK/SNB primary plane watermarks */
  		return level == 0 ? 127 : 511;
  	else
  		/* ILK/SNB sprite plane watermarks */
  		return level == 0 ? 63 : 255;
  }
  
  static unsigned int ilk_cursor_wm_reg_max(const struct drm_device *dev,
  					  int level)
  {
  	if (INTEL_INFO(dev)->gen >= 7)
  		return level == 0 ? 63 : 255;
  	else
  		return level == 0 ? 31 : 63;
  }
  
  static unsigned int ilk_fbc_wm_reg_max(const struct drm_device *dev)
  {
  	if (INTEL_INFO(dev)->gen >= 8)
  		return 31;
  	else
  		return 15;
  }
158ae64f8   Ville Syrjälä   drm/i915: Calcula...
2024
2025
2026
  /* Calculate the maximum primary/sprite plane watermark */
  static unsigned int ilk_plane_wm_max(const struct drm_device *dev,
  				     int level,
240264f49   Ville Syrjälä   drm/i915: Pull so...
2027
  				     const struct intel_wm_config *config,
158ae64f8   Ville Syrjälä   drm/i915: Calcula...
2028
2029
2030
2031
  				     enum intel_ddb_partitioning ddb_partitioning,
  				     bool is_sprite)
  {
  	unsigned int fifo_size = ilk_display_fifo_size(dev);
158ae64f8   Ville Syrjälä   drm/i915: Calcula...
2032
2033
  
  	/* if sprites aren't enabled, sprites get nothing */
240264f49   Ville Syrjälä   drm/i915: Pull so...
2034
  	if (is_sprite && !config->sprites_enabled)
158ae64f8   Ville Syrjälä   drm/i915: Calcula...
2035
2036
2037
  		return 0;
  
  	/* HSW allows LP1+ watermarks even with multiple pipes */
240264f49   Ville Syrjälä   drm/i915: Pull so...
2038
  	if (level == 0 || config->num_pipes_active > 1) {
158ae64f8   Ville Syrjälä   drm/i915: Calcula...
2039
2040
2041
2042
2043
2044
2045
2046
2047
2048
  		fifo_size /= INTEL_INFO(dev)->num_pipes;
  
  		/*
  		 * For some reason the non self refresh
  		 * FIFO size is only half of the self
  		 * refresh FIFO size on ILK/SNB.
  		 */
  		if (INTEL_INFO(dev)->gen <= 6)
  			fifo_size /= 2;
  	}
240264f49   Ville Syrjälä   drm/i915: Pull so...
2049
  	if (config->sprites_enabled) {
158ae64f8   Ville Syrjälä   drm/i915: Calcula...
2050
2051
2052
2053
2054
2055
2056
2057
2058
2059
2060
  		/* level 0 is always calculated with 1:1 split */
  		if (level > 0 && ddb_partitioning == INTEL_DDB_PART_5_6) {
  			if (is_sprite)
  				fifo_size *= 5;
  			fifo_size /= 6;
  		} else {
  			fifo_size /= 2;
  		}
  	}
  
  	/* clamp to max that the registers can hold */
4e9750812   Ville Syrjälä   drm/i916: Refacto...
2061
  	return min(fifo_size, ilk_plane_wm_reg_max(dev, level, is_sprite));
158ae64f8   Ville Syrjälä   drm/i915: Calcula...
2062
2063
2064
2065
  }
  
  /* Calculate the maximum cursor plane watermark */
  static unsigned int ilk_cursor_wm_max(const struct drm_device *dev,
240264f49   Ville Syrjälä   drm/i915: Pull so...
2066
2067
  				      int level,
  				      const struct intel_wm_config *config)
158ae64f8   Ville Syrjälä   drm/i915: Calcula...
2068
2069
  {
  	/* HSW LP1+ watermarks w/ multiple pipes */
240264f49   Ville Syrjälä   drm/i915: Pull so...
2070
  	if (level > 0 && config->num_pipes_active > 1)
158ae64f8   Ville Syrjälä   drm/i915: Calcula...
2071
2072
2073
  		return 64;
  
  	/* otherwise just report max that registers can hold */
4e9750812   Ville Syrjälä   drm/i916: Refacto...
2074
  	return ilk_cursor_wm_reg_max(dev, level);
158ae64f8   Ville Syrjälä   drm/i915: Calcula...
2075
  }
d34ff9c66   Damien Lespiau   drm/i915: Constif...
2076
  static void ilk_compute_wm_maximums(const struct drm_device *dev,
34982fe13   Ville Syrjälä   drm/i915: Rename ...
2077
2078
2079
  				    int level,
  				    const struct intel_wm_config *config,
  				    enum intel_ddb_partitioning ddb_partitioning,
820c19803   Imre Deak   drm/i915: s/haswe...
2080
  				    struct ilk_wm_maximums *max)
158ae64f8   Ville Syrjälä   drm/i915: Calcula...
2081
  {
240264f49   Ville Syrjälä   drm/i915: Pull so...
2082
2083
2084
  	max->pri = ilk_plane_wm_max(dev, level, config, ddb_partitioning, false);
  	max->spr = ilk_plane_wm_max(dev, level, config, ddb_partitioning, true);
  	max->cur = ilk_cursor_wm_max(dev, level, config);
4e9750812   Ville Syrjälä   drm/i916: Refacto...
2085
  	max->fbc = ilk_fbc_wm_reg_max(dev);
158ae64f8   Ville Syrjälä   drm/i915: Calcula...
2086
  }
a3cb40483   Ville Syrjälä   drm/i915: Make su...
2087
2088
2089
2090
2091
2092
2093
2094
2095
  static void ilk_compute_wm_reg_maximums(struct drm_device *dev,
  					int level,
  					struct ilk_wm_maximums *max)
  {
  	max->pri = ilk_plane_wm_reg_max(dev, level, false);
  	max->spr = ilk_plane_wm_reg_max(dev, level, true);
  	max->cur = ilk_cursor_wm_reg_max(dev, level);
  	max->fbc = ilk_fbc_wm_reg_max(dev);
  }
d9395655b   Ville Syrjälä   drm/i915: Rename ...
2096
  static bool ilk_validate_wm_level(int level,
820c19803   Imre Deak   drm/i915: s/haswe...
2097
  				  const struct ilk_wm_maximums *max,
d9395655b   Ville Syrjälä   drm/i915: Rename ...
2098
  				  struct intel_wm_level *result)
a9786a119   Ville Syrjälä   drm/i915: Pull wa...
2099
2100
2101
2102
2103
2104
2105
2106
2107
2108
2109
2110
2111
2112
2113
2114
2115
2116
2117
2118
2119
2120
2121
2122
2123
2124
2125
2126
2127
2128
2129
2130
2131
2132
2133
2134
2135
  {
  	bool ret;
  
  	/* already determined to be invalid? */
  	if (!result->enable)
  		return false;
  
  	result->enable = result->pri_val <= max->pri &&
  			 result->spr_val <= max->spr &&
  			 result->cur_val <= max->cur;
  
  	ret = result->enable;
  
  	/*
  	 * HACK until we can pre-compute everything,
  	 * and thus fail gracefully if LP0 watermarks
  	 * are exceeded...
  	 */
  	if (level == 0 && !result->enable) {
  		if (result->pri_val > max->pri)
  			DRM_DEBUG_KMS("Primary WM%d too large %u (max %u)
  ",
  				      level, result->pri_val, max->pri);
  		if (result->spr_val > max->spr)
  			DRM_DEBUG_KMS("Sprite WM%d too large %u (max %u)
  ",
  				      level, result->spr_val, max->spr);
  		if (result->cur_val > max->cur)
  			DRM_DEBUG_KMS("Cursor WM%d too large %u (max %u)
  ",
  				      level, result->cur_val, max->cur);
  
  		result->pri_val = min_t(uint32_t, result->pri_val, max->pri);
  		result->spr_val = min_t(uint32_t, result->spr_val, max->spr);
  		result->cur_val = min_t(uint32_t, result->cur_val, max->cur);
  		result->enable = true;
  	}
a9786a119   Ville Syrjälä   drm/i915: Pull wa...
2136
2137
  	return ret;
  }
d34ff9c66   Damien Lespiau   drm/i915: Constif...
2138
  static void ilk_compute_wm_level(const struct drm_i915_private *dev_priv,
6f5ddd170   Ville Syrjälä   drm/i915: Split w...
2139
  				 int level,
820c19803   Imre Deak   drm/i915: s/haswe...
2140
  				 const struct ilk_pipe_wm_parameters *p,
1fd527cc3   Ville Syrjälä   drm/i915: Rename ...
2141
  				 struct intel_wm_level *result)
6f5ddd170   Ville Syrjälä   drm/i915: Split w...
2142
2143
2144
2145
2146
2147
2148
2149
2150
2151
2152
2153
2154
2155
2156
2157
2158
2159
  {
  	uint16_t pri_latency = dev_priv->wm.pri_latency[level];
  	uint16_t spr_latency = dev_priv->wm.spr_latency[level];
  	uint16_t cur_latency = dev_priv->wm.cur_latency[level];
  
  	/* WM1+ latency values stored in 0.5us units */
  	if (level > 0) {
  		pri_latency *= 5;
  		spr_latency *= 5;
  		cur_latency *= 5;
  	}
  
  	result->pri_val = ilk_compute_pri_wm(p, pri_latency, level);
  	result->spr_val = ilk_compute_spr_wm(p, spr_latency);
  	result->cur_val = ilk_compute_cur_wm(p, cur_latency);
  	result->fbc_val = ilk_compute_fbc_wm(p, result->pri_val);
  	result->enable = true;
  }
801bcfffb   Paulo Zanoni   drm/i915: properl...
2160
2161
  static uint32_t
  hsw_compute_linetime_wm(struct drm_device *dev, struct drm_crtc *crtc)
1f8eeabf2   Eugeni Dodonov   drm/i915: program...
2162
2163
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
1011d8c43   Paulo Zanoni   drm/i915: remove ...
2164
  	struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
1011d8c43   Paulo Zanoni   drm/i915: remove ...
2165
  	struct drm_display_mode *mode = &intel_crtc->config.adjusted_mode;
85a02deb4   Paulo Zanoni   drm/i915: set the...
2166
  	u32 linetime, ips_linetime;
1f8eeabf2   Eugeni Dodonov   drm/i915: program...
2167

801bcfffb   Paulo Zanoni   drm/i915: properl...
2168
2169
  	if (!intel_crtc_active(crtc))
  		return 0;
1011d8c43   Paulo Zanoni   drm/i915: remove ...
2170

1f8eeabf2   Eugeni Dodonov   drm/i915: program...
2171
2172
2173
  	/* The WM are computed with base on how long it takes to fill a single
  	 * row at the given clock rate, multiplied by 8.
  	 * */
fec8cba30   Jesse Barnes   drm/i915: use crt...
2174
2175
2176
  	linetime = DIV_ROUND_CLOSEST(mode->crtc_htotal * 1000 * 8,
  				     mode->crtc_clock);
  	ips_linetime = DIV_ROUND_CLOSEST(mode->crtc_htotal * 1000 * 8,
85a02deb4   Paulo Zanoni   drm/i915: set the...
2177
  					 intel_ddi_get_cdclk_freq(dev_priv));
1f8eeabf2   Eugeni Dodonov   drm/i915: program...
2178

801bcfffb   Paulo Zanoni   drm/i915: properl...
2179
2180
  	return PIPE_WM_LINETIME_IPS_LINETIME(ips_linetime) |
  	       PIPE_WM_LINETIME_TIME(linetime);
1f8eeabf2   Eugeni Dodonov   drm/i915: program...
2181
  }
12b134df4   Ville Syrjälä   drm/i915: Split o...
2182
2183
2184
  static void intel_read_wm_latency(struct drm_device *dev, uint16_t wm[5])
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
a42a57196   Ville Syrjälä   drm/i915: Fix wat...
2185
  	if (IS_HASWELL(dev) || IS_BROADWELL(dev)) {
12b134df4   Ville Syrjälä   drm/i915: Split o...
2186
2187
2188
2189
2190
  		uint64_t sskpd = I915_READ64(MCH_SSKPD);
  
  		wm[0] = (sskpd >> 56) & 0xFF;
  		if (wm[0] == 0)
  			wm[0] = sskpd & 0xF;
e5d5019e9   Ville Syrjälä   drm/i915: Don't m...
2191
2192
2193
2194
  		wm[1] = (sskpd >> 4) & 0xFF;
  		wm[2] = (sskpd >> 12) & 0xFF;
  		wm[3] = (sskpd >> 20) & 0x1FF;
  		wm[4] = (sskpd >> 32) & 0x1FF;
63cf9a131   Ville Syrjälä   drm/i915: Add SNB...
2195
2196
2197
2198
2199
2200
2201
  	} else if (INTEL_INFO(dev)->gen >= 6) {
  		uint32_t sskpd = I915_READ(MCH_SSKPD);
  
  		wm[0] = (sskpd >> SSKPD_WM0_SHIFT) & SSKPD_WM_MASK;
  		wm[1] = (sskpd >> SSKPD_WM1_SHIFT) & SSKPD_WM_MASK;
  		wm[2] = (sskpd >> SSKPD_WM2_SHIFT) & SSKPD_WM_MASK;
  		wm[3] = (sskpd >> SSKPD_WM3_SHIFT) & SSKPD_WM_MASK;
3a88d0ac8   Ville Syrjälä   drm/i915: Add ILK...
2202
2203
2204
2205
2206
2207
2208
  	} else if (INTEL_INFO(dev)->gen >= 5) {
  		uint32_t mltr = I915_READ(MLTR_ILK);
  
  		/* ILK primary LP0 latency is 700 ns */
  		wm[0] = 7;
  		wm[1] = (mltr >> MLTR_WM1_SHIFT) & ILK_SRLT_MASK;
  		wm[2] = (mltr >> MLTR_WM2_SHIFT) & ILK_SRLT_MASK;
12b134df4   Ville Syrjälä   drm/i915: Split o...
2209
2210
  	}
  }
53615a5e1   Ville Syrjälä   drm/i915: Store t...
2211
2212
2213
2214
2215
2216
2217
2218
2219
2220
2221
2222
2223
2224
2225
2226
2227
  static void intel_fixup_spr_wm_latency(struct drm_device *dev, uint16_t wm[5])
  {
  	/* ILK sprite LP0 latency is 1300 ns */
  	if (INTEL_INFO(dev)->gen == 5)
  		wm[0] = 13;
  }
  
  static void intel_fixup_cur_wm_latency(struct drm_device *dev, uint16_t wm[5])
  {
  	/* ILK cursor LP0 latency is 1300 ns */
  	if (INTEL_INFO(dev)->gen == 5)
  		wm[0] = 13;
  
  	/* WaDoubleCursorLP3Latency:ivb */
  	if (IS_IVYBRIDGE(dev))
  		wm[3] *= 2;
  }
546c81fd1   Damien Lespiau   drm/i915: Use ilk...
2228
  int ilk_wm_max_level(const struct drm_device *dev)
26ec971e3   Ville Syrjälä   drm/i915: Print t...
2229
  {
26ec971e3   Ville Syrjälä   drm/i915: Print t...
2230
  	/* how many WM levels are we expecting */
a42a57196   Ville Syrjälä   drm/i915: Fix wat...
2231
  	if (IS_HASWELL(dev) || IS_BROADWELL(dev))
ad0d6dc48   Ville Syrjälä   drm/i915: Refacto...
2232
  		return 4;
26ec971e3   Ville Syrjälä   drm/i915: Print t...
2233
  	else if (INTEL_INFO(dev)->gen >= 6)
ad0d6dc48   Ville Syrjälä   drm/i915: Refacto...
2234
  		return 3;
26ec971e3   Ville Syrjälä   drm/i915: Print t...
2235
  	else
ad0d6dc48   Ville Syrjälä   drm/i915: Refacto...
2236
2237
  		return 2;
  }
7526ed79b   Daniel Vetter   Revert "drm/i915/...
2238

ad0d6dc48   Ville Syrjälä   drm/i915: Refacto...
2239
2240
2241
2242
2243
  static void intel_print_wm_latency(struct drm_device *dev,
  				   const char *name,
  				   const uint16_t wm[5])
  {
  	int level, max_level = ilk_wm_max_level(dev);
26ec971e3   Ville Syrjälä   drm/i915: Print t...
2244
2245
2246
2247
2248
2249
2250
2251
2252
2253
2254
2255
2256
2257
2258
2259
2260
2261
2262
2263
2264
  
  	for (level = 0; level <= max_level; level++) {
  		unsigned int latency = wm[level];
  
  		if (latency == 0) {
  			DRM_ERROR("%s WM%d latency not provided
  ",
  				  name, level);
  			continue;
  		}
  
  		/* WM1+ latency values in 0.5us units */
  		if (level > 0)
  			latency *= 5;
  
  		DRM_DEBUG_KMS("%s WM%d latency %u (%u.%u usec)
  ",
  			      name, level, wm[level],
  			      latency / 10, latency % 10);
  	}
  }
e95a2f750   Ville Syrjälä   drm/i915: Increas...
2265
2266
2267
2268
2269
2270
2271
2272
2273
2274
2275
2276
2277
2278
2279
2280
2281
2282
2283
2284
2285
2286
2287
2288
2289
2290
2291
2292
2293
2294
2295
2296
2297
2298
2299
2300
2301
  static bool ilk_increase_wm_latency(struct drm_i915_private *dev_priv,
  				    uint16_t wm[5], uint16_t min)
  {
  	int level, max_level = ilk_wm_max_level(dev_priv->dev);
  
  	if (wm[0] >= min)
  		return false;
  
  	wm[0] = max(wm[0], min);
  	for (level = 1; level <= max_level; level++)
  		wm[level] = max_t(uint16_t, wm[level], DIV_ROUND_UP(min, 5));
  
  	return true;
  }
  
  static void snb_wm_latency_quirk(struct drm_device *dev)
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  	bool changed;
  
  	/*
  	 * The BIOS provided WM memory latency values are often
  	 * inadequate for high resolution displays. Adjust them.
  	 */
  	changed = ilk_increase_wm_latency(dev_priv, dev_priv->wm.pri_latency, 12) |
  		ilk_increase_wm_latency(dev_priv, dev_priv->wm.spr_latency, 12) |
  		ilk_increase_wm_latency(dev_priv, dev_priv->wm.cur_latency, 12);
  
  	if (!changed)
  		return;
  
  	DRM_DEBUG_KMS("WM latency values increased to avoid potential underruns
  ");
  	intel_print_wm_latency(dev, "Primary", dev_priv->wm.pri_latency);
  	intel_print_wm_latency(dev, "Sprite", dev_priv->wm.spr_latency);
  	intel_print_wm_latency(dev, "Cursor", dev_priv->wm.cur_latency);
  }
fa50ad614   Damien Lespiau   drm/i915: Rename ...
2302
  static void ilk_setup_wm_latency(struct drm_device *dev)
53615a5e1   Ville Syrjälä   drm/i915: Store t...
2303
2304
2305
2306
2307
2308
2309
2310
2311
2312
2313
2314
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  
  	intel_read_wm_latency(dev, dev_priv->wm.pri_latency);
  
  	memcpy(dev_priv->wm.spr_latency, dev_priv->wm.pri_latency,
  	       sizeof(dev_priv->wm.pri_latency));
  	memcpy(dev_priv->wm.cur_latency, dev_priv->wm.pri_latency,
  	       sizeof(dev_priv->wm.pri_latency));
  
  	intel_fixup_spr_wm_latency(dev, dev_priv->wm.spr_latency);
  	intel_fixup_cur_wm_latency(dev, dev_priv->wm.cur_latency);
26ec971e3   Ville Syrjälä   drm/i915: Print t...
2315
2316
2317
2318
  
  	intel_print_wm_latency(dev, "Primary", dev_priv->wm.pri_latency);
  	intel_print_wm_latency(dev, "Sprite", dev_priv->wm.spr_latency);
  	intel_print_wm_latency(dev, "Cursor", dev_priv->wm.cur_latency);
e95a2f750   Ville Syrjälä   drm/i915: Increas...
2319
2320
2321
  
  	if (IS_GEN6(dev))
  		snb_wm_latency_quirk(dev);
53615a5e1   Ville Syrjälä   drm/i915: Store t...
2322
  }
820c19803   Imre Deak   drm/i915: s/haswe...
2323
  static void ilk_compute_wm_parameters(struct drm_crtc *crtc,
2a44b76bb   Ville Syrjälä   drm/i915: Add som...
2324
  				      struct ilk_pipe_wm_parameters *p)
1011d8c43   Paulo Zanoni   drm/i915: remove ...
2325
  {
7c4a395ff   Ville Syrjälä   drm/i915: Don't r...
2326
2327
2328
  	struct drm_device *dev = crtc->dev;
  	struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
  	enum pipe pipe = intel_crtc->pipe;
7c4a395ff   Ville Syrjälä   drm/i915: Don't r...
2329
  	struct drm_plane *plane;
1011d8c43   Paulo Zanoni   drm/i915: remove ...
2330

2a44b76bb   Ville Syrjälä   drm/i915: Add som...
2331
2332
  	if (!intel_crtc_active(crtc))
  		return;
801bcfffb   Paulo Zanoni   drm/i915: properl...
2333

2a44b76bb   Ville Syrjälä   drm/i915: Add som...
2334
2335
2336
2337
2338
2339
2340
2341
2342
2343
  	p->active = true;
  	p->pipe_htotal = intel_crtc->config.adjusted_mode.crtc_htotal;
  	p->pixel_rate = ilk_pipe_pixel_rate(dev, crtc);
  	p->pri.bytes_per_pixel = crtc->primary->fb->bits_per_pixel / 8;
  	p->cur.bytes_per_pixel = 4;
  	p->pri.horiz_pixels = intel_crtc->config.pipe_src_w;
  	p->cur.horiz_pixels = intel_crtc->cursor_width;
  	/* TODO: for now, assume primary and cursor planes are always enabled. */
  	p->pri.enabled = true;
  	p->cur.enabled = true;
7c4a395ff   Ville Syrjälä   drm/i915: Don't r...
2344

af2b653bf   Matt Roper   drm/i915: Restric...
2345
  	drm_for_each_legacy_plane(plane, &dev->mode_config.plane_list) {
801bcfffb   Paulo Zanoni   drm/i915: properl...
2346
  		struct intel_plane *intel_plane = to_intel_plane(plane);
801bcfffb   Paulo Zanoni   drm/i915: properl...
2347

2a44b76bb   Ville Syrjälä   drm/i915: Add som...
2348
  		if (intel_plane->pipe == pipe) {
7c4a395ff   Ville Syrjälä   drm/i915: Don't r...
2349
  			p->spr = intel_plane->wm;
2a44b76bb   Ville Syrjälä   drm/i915: Add som...
2350
2351
2352
2353
2354
2355
2356
2357
2358
2359
2360
  			break;
  		}
  	}
  }
  
  static void ilk_compute_wm_config(struct drm_device *dev,
  				  struct intel_wm_config *config)
  {
  	struct intel_crtc *intel_crtc;
  
  	/* Compute the currently _active_ config */
d3fcc808b   Damien Lespiau   drm/i915: Use for...
2361
  	for_each_intel_crtc(dev, intel_crtc) {
2a44b76bb   Ville Syrjälä   drm/i915: Add som...
2362
  		const struct intel_pipe_wm *wm = &intel_crtc->wm.active;
cca32e9ad   Paulo Zanoni   drm/i915: properl...
2363

2a44b76bb   Ville Syrjälä   drm/i915: Add som...
2364
2365
  		if (!wm->pipe_enabled)
  			continue;
cca32e9ad   Paulo Zanoni   drm/i915: properl...
2366

2a44b76bb   Ville Syrjälä   drm/i915: Add som...
2367
2368
2369
  		config->sprites_enabled |= wm->sprites_enabled;
  		config->sprites_scaled |= wm->sprites_scaled;
  		config->num_pipes_active++;
cca32e9ad   Paulo Zanoni   drm/i915: properl...
2370
  	}
801bcfffb   Paulo Zanoni   drm/i915: properl...
2371
  }
0b2ae6d72   Ville Syrjälä   drm/i915: Add int...
2372
2373
  /* Compute new watermarks for the pipe */
  static bool intel_compute_pipe_wm(struct drm_crtc *crtc,
820c19803   Imre Deak   drm/i915: s/haswe...
2374
  				  const struct ilk_pipe_wm_parameters *params,
0b2ae6d72   Ville Syrjälä   drm/i915: Add int...
2375
2376
2377
  				  struct intel_pipe_wm *pipe_wm)
  {
  	struct drm_device *dev = crtc->dev;
d34ff9c66   Damien Lespiau   drm/i915: Constif...
2378
  	const struct drm_i915_private *dev_priv = dev->dev_private;
0b2ae6d72   Ville Syrjälä   drm/i915: Add int...
2379
2380
2381
2382
2383
2384
2385
  	int level, max_level = ilk_wm_max_level(dev);
  	/* LP0 watermark maximums depend on this pipe alone */
  	struct intel_wm_config config = {
  		.num_pipes_active = 1,
  		.sprites_enabled = params->spr.enabled,
  		.sprites_scaled = params->spr.scaled,
  	};
820c19803   Imre Deak   drm/i915: s/haswe...
2386
  	struct ilk_wm_maximums max;
0b2ae6d72   Ville Syrjälä   drm/i915: Add int...
2387

2a44b76bb   Ville Syrjälä   drm/i915: Add som...
2388
2389
2390
  	pipe_wm->pipe_enabled = params->active;
  	pipe_wm->sprites_enabled = params->spr.enabled;
  	pipe_wm->sprites_scaled = params->spr.scaled;
7b39a0b79   Ville Syrjälä   drm/i915: Avoid c...
2391
2392
2393
2394
2395
2396
2397
  	/* ILK/SNB: LP2+ watermarks only w/o sprites */
  	if (INTEL_INFO(dev)->gen <= 6 && params->spr.enabled)
  		max_level = 1;
  
  	/* ILK/SNB/IVB: LP1+ watermarks only w/o scaling */
  	if (params->spr.scaled)
  		max_level = 0;
a3cb40483   Ville Syrjälä   drm/i915: Make su...
2398
  	ilk_compute_wm_level(dev_priv, 0, params, &pipe_wm->wm[0]);
0b2ae6d72   Ville Syrjälä   drm/i915: Add int...
2399

a42a57196   Ville Syrjälä   drm/i915: Fix wat...
2400
  	if (IS_HASWELL(dev) || IS_BROADWELL(dev))
ce0e0713a   Ville Syrjälä   drm/i915: Linetim...
2401
  		pipe_wm->linetime = hsw_compute_linetime_wm(dev, crtc);
0b2ae6d72   Ville Syrjälä   drm/i915: Add int...
2402

a3cb40483   Ville Syrjälä   drm/i915: Make su...
2403
2404
  	/* LP0 watermarks always use 1/2 DDB partitioning */
  	ilk_compute_wm_maximums(dev, 0, &config, INTEL_DDB_PART_1_2, &max);
0b2ae6d72   Ville Syrjälä   drm/i915: Add int...
2405
  	/* At least LP0 must be valid */
a3cb40483   Ville Syrjälä   drm/i915: Make su...
2406
2407
2408
2409
2410
2411
2412
2413
2414
2415
2416
2417
2418
2419
2420
2421
2422
2423
2424
2425
2426
2427
  	if (!ilk_validate_wm_level(0, &max, &pipe_wm->wm[0]))
  		return false;
  
  	ilk_compute_wm_reg_maximums(dev, 1, &max);
  
  	for (level = 1; level <= max_level; level++) {
  		struct intel_wm_level wm = {};
  
  		ilk_compute_wm_level(dev_priv, level, params, &wm);
  
  		/*
  		 * Disable any watermark level that exceeds the
  		 * register maximums since such watermarks are
  		 * always invalid.
  		 */
  		if (!ilk_validate_wm_level(level, &max, &wm))
  			break;
  
  		pipe_wm->wm[level] = wm;
  	}
  
  	return true;
0b2ae6d72   Ville Syrjälä   drm/i915: Add int...
2428
2429
2430
2431
2432
2433
2434
2435
2436
2437
  }
  
  /*
   * Merge the watermarks from all active pipes for a specific level.
   */
  static void ilk_merge_wm_level(struct drm_device *dev,
  			       int level,
  			       struct intel_wm_level *ret_wm)
  {
  	const struct intel_crtc *intel_crtc;
d52fea5be   Ville Syrjälä   drm/i915: Merge L...
2438
  	ret_wm->enable = true;
d3fcc808b   Damien Lespiau   drm/i915: Use for...
2439
  	for_each_intel_crtc(dev, intel_crtc) {
fe392efda   Ville Syrjälä   drm/i915: Skip wa...
2440
2441
2442
2443
2444
  		const struct intel_pipe_wm *active = &intel_crtc->wm.active;
  		const struct intel_wm_level *wm = &active->wm[level];
  
  		if (!active->pipe_enabled)
  			continue;
0b2ae6d72   Ville Syrjälä   drm/i915: Add int...
2445

d52fea5be   Ville Syrjälä   drm/i915: Merge L...
2446
2447
2448
2449
2450
  		/*
  		 * The watermark values may have been used in the past,
  		 * so we must maintain them in the registers for some
  		 * time even if the level is now disabled.
  		 */
0b2ae6d72   Ville Syrjälä   drm/i915: Add int...
2451
  		if (!wm->enable)
d52fea5be   Ville Syrjälä   drm/i915: Merge L...
2452
  			ret_wm->enable = false;
0b2ae6d72   Ville Syrjälä   drm/i915: Add int...
2453
2454
2455
2456
2457
2458
  
  		ret_wm->pri_val = max(ret_wm->pri_val, wm->pri_val);
  		ret_wm->spr_val = max(ret_wm->spr_val, wm->spr_val);
  		ret_wm->cur_val = max(ret_wm->cur_val, wm->cur_val);
  		ret_wm->fbc_val = max(ret_wm->fbc_val, wm->fbc_val);
  	}
0b2ae6d72   Ville Syrjälä   drm/i915: Add int...
2459
2460
2461
2462
2463
2464
  }
  
  /*
   * Merge all low power watermarks for all active pipes.
   */
  static void ilk_wm_merge(struct drm_device *dev,
0ba22e26f   Ville Syrjälä   drm/i915: Don't m...
2465
  			 const struct intel_wm_config *config,
820c19803   Imre Deak   drm/i915: s/haswe...
2466
  			 const struct ilk_wm_maximums *max,
0b2ae6d72   Ville Syrjälä   drm/i915: Add int...
2467
2468
2469
  			 struct intel_pipe_wm *merged)
  {
  	int level, max_level = ilk_wm_max_level(dev);
d52fea5be   Ville Syrjälä   drm/i915: Merge L...
2470
  	int last_enabled_level = max_level;
0b2ae6d72   Ville Syrjälä   drm/i915: Add int...
2471

0ba22e26f   Ville Syrjälä   drm/i915: Don't m...
2472
2473
2474
2475
  	/* ILK/SNB/IVB: LP1+ watermarks only w/ single pipe */
  	if ((INTEL_INFO(dev)->gen <= 6 || IS_IVYBRIDGE(dev)) &&
  	    config->num_pipes_active > 1)
  		return;
6c8b6c288   Ville Syrjälä   drm/i915: Disable...
2476
2477
  	/* ILK: FBC WM must be disabled always */
  	merged->fbc_wm_enabled = INTEL_INFO(dev)->gen >= 6;
0b2ae6d72   Ville Syrjälä   drm/i915: Add int...
2478
2479
2480
2481
2482
2483
  
  	/* merge each WM1+ level */
  	for (level = 1; level <= max_level; level++) {
  		struct intel_wm_level *wm = &merged->wm[level];
  
  		ilk_merge_wm_level(dev, level, wm);
d52fea5be   Ville Syrjälä   drm/i915: Merge L...
2484
2485
2486
2487
2488
  		if (level > last_enabled_level)
  			wm->enable = false;
  		else if (!ilk_validate_wm_level(level, max, wm))
  			/* make sure all following levels get disabled */
  			last_enabled_level = level - 1;
0b2ae6d72   Ville Syrjälä   drm/i915: Add int...
2489
2490
2491
2492
2493
2494
  
  		/*
  		 * The spec says it is preferred to disable
  		 * FBC WMs instead of disabling a WM level.
  		 */
  		if (wm->fbc_val > max->fbc) {
d52fea5be   Ville Syrjälä   drm/i915: Merge L...
2495
2496
  			if (wm->enable)
  				merged->fbc_wm_enabled = false;
0b2ae6d72   Ville Syrjälä   drm/i915: Add int...
2497
2498
2499
  			wm->fbc_val = 0;
  		}
  	}
6c8b6c288   Ville Syrjälä   drm/i915: Disable...
2500
2501
2502
2503
2504
2505
2506
2507
2508
2509
2510
2511
2512
2513
  
  	/* ILK: LP2+ must be disabled when FBC WM is disabled but FBC enabled */
  	/*
  	 * FIXME this is racy. FBC might get enabled later.
  	 * What we should check here is whether FBC can be
  	 * enabled sometime later.
  	 */
  	if (IS_GEN5(dev) && !merged->fbc_wm_enabled && intel_fbc_enabled(dev)) {
  		for (level = 2; level <= max_level; level++) {
  			struct intel_wm_level *wm = &merged->wm[level];
  
  			wm->enable = false;
  		}
  	}
0b2ae6d72   Ville Syrjälä   drm/i915: Add int...
2514
  }
b380ca3ca   Ville Syrjälä   drm/i915: Refacto...
2515
2516
2517
2518
2519
  static int ilk_wm_lp_to_level(int wm_lp, const struct intel_pipe_wm *pipe_wm)
  {
  	/* LP1,LP2,LP3 levels are either 1,2,3 or 1,3,4 */
  	return wm_lp + (wm_lp >= 2 && pipe_wm->wm[4].enable);
  }
a68d68eeb   Ville Syrjälä   drm/i915: Add ILK...
2520
2521
2522
2523
  /* The value we need to program into the WM_LPx latency field */
  static unsigned int ilk_wm_lp_latency(struct drm_device *dev, int level)
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
a42a57196   Ville Syrjälä   drm/i915: Fix wat...
2524
  	if (IS_HASWELL(dev) || IS_BROADWELL(dev))
a68d68eeb   Ville Syrjälä   drm/i915: Add ILK...
2525
2526
2527
2528
  		return 2 * level;
  	else
  		return dev_priv->wm.pri_latency[level];
  }
820c19803   Imre Deak   drm/i915: s/haswe...
2529
  static void ilk_compute_wm_results(struct drm_device *dev,
0362c7816   Ville Syrjälä   drm/i915: Move LP...
2530
  				   const struct intel_pipe_wm *merged,
609cedef6   Ville Syrjälä   drm/i915: Store c...
2531
  				   enum intel_ddb_partitioning partitioning,
820c19803   Imre Deak   drm/i915: s/haswe...
2532
  				   struct ilk_wm_values *results)
801bcfffb   Paulo Zanoni   drm/i915: properl...
2533
  {
0b2ae6d72   Ville Syrjälä   drm/i915: Add int...
2534
2535
  	struct intel_crtc *intel_crtc;
  	int level, wm_lp;
cca32e9ad   Paulo Zanoni   drm/i915: properl...
2536

0362c7816   Ville Syrjälä   drm/i915: Move LP...
2537
  	results->enable_fbc_wm = merged->fbc_wm_enabled;
609cedef6   Ville Syrjälä   drm/i915: Store c...
2538
  	results->partitioning = partitioning;
cca32e9ad   Paulo Zanoni   drm/i915: properl...
2539

0b2ae6d72   Ville Syrjälä   drm/i915: Add int...
2540
  	/* LP1+ register values */
cca32e9ad   Paulo Zanoni   drm/i915: properl...
2541
  	for (wm_lp = 1; wm_lp <= 3; wm_lp++) {
1fd527cc3   Ville Syrjälä   drm/i915: Rename ...
2542
  		const struct intel_wm_level *r;
801bcfffb   Paulo Zanoni   drm/i915: properl...
2543

b380ca3ca   Ville Syrjälä   drm/i915: Refacto...
2544
  		level = ilk_wm_lp_to_level(wm_lp, merged);
0b2ae6d72   Ville Syrjälä   drm/i915: Add int...
2545

0362c7816   Ville Syrjälä   drm/i915: Move LP...
2546
  		r = &merged->wm[level];
cca32e9ad   Paulo Zanoni   drm/i915: properl...
2547

d52fea5be   Ville Syrjälä   drm/i915: Merge L...
2548
2549
2550
2551
2552
  		/*
  		 * Maintain the watermark values even if the level is
  		 * disabled. Doing otherwise could cause underruns.
  		 */
  		results->wm_lp[wm_lp - 1] =
a68d68eeb   Ville Syrjälä   drm/i915: Add ILK...
2553
  			(ilk_wm_lp_latency(dev, level) << WM1_LP_LATENCY_SHIFT) |
416f4727a   Ville Syrjälä   drm/i915/bdw: Add...
2554
2555
  			(r->pri_val << WM1_LP_SR_SHIFT) |
  			r->cur_val;
d52fea5be   Ville Syrjälä   drm/i915: Merge L...
2556
2557
  		if (r->enable)
  			results->wm_lp[wm_lp - 1] |= WM1_LP_SR_EN;
416f4727a   Ville Syrjälä   drm/i915/bdw: Add...
2558
2559
2560
2561
2562
2563
  		if (INTEL_INFO(dev)->gen >= 8)
  			results->wm_lp[wm_lp - 1] |=
  				r->fbc_val << WM1_LP_FBC_SHIFT_BDW;
  		else
  			results->wm_lp[wm_lp - 1] |=
  				r->fbc_val << WM1_LP_FBC_SHIFT;
d52fea5be   Ville Syrjälä   drm/i915: Merge L...
2564
2565
2566
2567
  		/*
  		 * Always set WM1S_LP_EN when spr_val != 0, even if the
  		 * level is disabled. Doing otherwise could cause underruns.
  		 */
6cef2b8a5   Ville Syrjälä   drm/i915: Fix LP1...
2568
2569
2570
2571
2572
  		if (INTEL_INFO(dev)->gen <= 6 && r->spr_val) {
  			WARN_ON(wm_lp != 1);
  			results->wm_lp_spr[wm_lp - 1] = WM1S_LP_EN | r->spr_val;
  		} else
  			results->wm_lp_spr[wm_lp - 1] = r->spr_val;
cca32e9ad   Paulo Zanoni   drm/i915: properl...
2573
  	}
801bcfffb   Paulo Zanoni   drm/i915: properl...
2574

0b2ae6d72   Ville Syrjälä   drm/i915: Add int...
2575
  	/* LP0 register values */
d3fcc808b   Damien Lespiau   drm/i915: Use for...
2576
  	for_each_intel_crtc(dev, intel_crtc) {
0b2ae6d72   Ville Syrjälä   drm/i915: Add int...
2577
2578
2579
2580
2581
2582
2583
2584
  		enum pipe pipe = intel_crtc->pipe;
  		const struct intel_wm_level *r =
  			&intel_crtc->wm.active.wm[0];
  
  		if (WARN_ON(!r->enable))
  			continue;
  
  		results->wm_linetime[pipe] = intel_crtc->wm.active.linetime;
1011d8c43   Paulo Zanoni   drm/i915: remove ...
2585

0b2ae6d72   Ville Syrjälä   drm/i915: Add int...
2586
2587
2588
2589
  		results->wm_pipe[pipe] =
  			(r->pri_val << WM0_PIPE_PLANE_SHIFT) |
  			(r->spr_val << WM0_PIPE_SPRITE_SHIFT) |
  			r->cur_val;
801bcfffb   Paulo Zanoni   drm/i915: properl...
2590
2591
  	}
  }
861f3389c   Paulo Zanoni   drm/i915: add sup...
2592
2593
  /* Find the result with the highest level enabled. Check for enable_fbc_wm in
   * case both are at the same level. Prefer r1 in case they're the same. */
820c19803   Imre Deak   drm/i915: s/haswe...
2594
  static struct intel_pipe_wm *ilk_find_best_result(struct drm_device *dev,
198a1e9b1   Ville Syrjälä   drm/i915: Use int...
2595
2596
  						  struct intel_pipe_wm *r1,
  						  struct intel_pipe_wm *r2)
861f3389c   Paulo Zanoni   drm/i915: add sup...
2597
  {
198a1e9b1   Ville Syrjälä   drm/i915: Use int...
2598
2599
  	int level, max_level = ilk_wm_max_level(dev);
  	int level1 = 0, level2 = 0;
861f3389c   Paulo Zanoni   drm/i915: add sup...
2600

198a1e9b1   Ville Syrjälä   drm/i915: Use int...
2601
2602
2603
2604
2605
  	for (level = 1; level <= max_level; level++) {
  		if (r1->wm[level].enable)
  			level1 = level;
  		if (r2->wm[level].enable)
  			level2 = level;
861f3389c   Paulo Zanoni   drm/i915: add sup...
2606
  	}
198a1e9b1   Ville Syrjälä   drm/i915: Use int...
2607
2608
  	if (level1 == level2) {
  		if (r2->fbc_wm_enabled && !r1->fbc_wm_enabled)
861f3389c   Paulo Zanoni   drm/i915: add sup...
2609
2610
2611
  			return r2;
  		else
  			return r1;
198a1e9b1   Ville Syrjälä   drm/i915: Use int...
2612
  	} else if (level1 > level2) {
861f3389c   Paulo Zanoni   drm/i915: add sup...
2613
2614
2615
2616
2617
  		return r1;
  	} else {
  		return r2;
  	}
  }
49a687c47   Ville Syrjälä   drm/i915: Improve...
2618
2619
2620
2621
2622
2623
2624
  /* dirty bits used to track which watermarks need changes */
  #define WM_DIRTY_PIPE(pipe) (1 << (pipe))
  #define WM_DIRTY_LINETIME(pipe) (1 << (8 + (pipe)))
  #define WM_DIRTY_LP(wm_lp) (1 << (15 + (wm_lp)))
  #define WM_DIRTY_LP_ALL (WM_DIRTY_LP(1) | WM_DIRTY_LP(2) | WM_DIRTY_LP(3))
  #define WM_DIRTY_FBC (1 << 24)
  #define WM_DIRTY_DDB (1 << 25)
055e393fa   Damien Lespiau   drm/i915: Use dev...
2625
  static unsigned int ilk_compute_wm_dirty(struct drm_i915_private *dev_priv,
820c19803   Imre Deak   drm/i915: s/haswe...
2626
2627
  					 const struct ilk_wm_values *old,
  					 const struct ilk_wm_values *new)
49a687c47   Ville Syrjälä   drm/i915: Improve...
2628
2629
2630
2631
  {
  	unsigned int dirty = 0;
  	enum pipe pipe;
  	int wm_lp;
055e393fa   Damien Lespiau   drm/i915: Use dev...
2632
  	for_each_pipe(dev_priv, pipe) {
49a687c47   Ville Syrjälä   drm/i915: Improve...
2633
2634
2635
2636
2637
2638
2639
2640
2641
2642
2643
2644
2645
2646
2647
2648
2649
2650
2651
2652
2653
2654
2655
2656
2657
2658
2659
2660
2661
2662
2663
2664
2665
2666
2667
2668
2669
2670
2671
2672
2673
2674
  		if (old->wm_linetime[pipe] != new->wm_linetime[pipe]) {
  			dirty |= WM_DIRTY_LINETIME(pipe);
  			/* Must disable LP1+ watermarks too */
  			dirty |= WM_DIRTY_LP_ALL;
  		}
  
  		if (old->wm_pipe[pipe] != new->wm_pipe[pipe]) {
  			dirty |= WM_DIRTY_PIPE(pipe);
  			/* Must disable LP1+ watermarks too */
  			dirty |= WM_DIRTY_LP_ALL;
  		}
  	}
  
  	if (old->enable_fbc_wm != new->enable_fbc_wm) {
  		dirty |= WM_DIRTY_FBC;
  		/* Must disable LP1+ watermarks too */
  		dirty |= WM_DIRTY_LP_ALL;
  	}
  
  	if (old->partitioning != new->partitioning) {
  		dirty |= WM_DIRTY_DDB;
  		/* Must disable LP1+ watermarks too */
  		dirty |= WM_DIRTY_LP_ALL;
  	}
  
  	/* LP1+ watermarks already deemed dirty, no need to continue */
  	if (dirty & WM_DIRTY_LP_ALL)
  		return dirty;
  
  	/* Find the lowest numbered LP1+ watermark in need of an update... */
  	for (wm_lp = 1; wm_lp <= 3; wm_lp++) {
  		if (old->wm_lp[wm_lp - 1] != new->wm_lp[wm_lp - 1] ||
  		    old->wm_lp_spr[wm_lp - 1] != new->wm_lp_spr[wm_lp - 1])
  			break;
  	}
  
  	/* ...and mark it and all higher numbered LP1+ watermarks as dirty */
  	for (; wm_lp <= 3; wm_lp++)
  		dirty |= WM_DIRTY_LP(wm_lp);
  
  	return dirty;
  }
8553c18ea   Ville Syrjälä   drm/i915: Try to ...
2675
2676
  static bool _ilk_disable_lp_wm(struct drm_i915_private *dev_priv,
  			       unsigned int dirty)
801bcfffb   Paulo Zanoni   drm/i915: properl...
2677
  {
820c19803   Imre Deak   drm/i915: s/haswe...
2678
  	struct ilk_wm_values *previous = &dev_priv->wm.hw;
8553c18ea   Ville Syrjälä   drm/i915: Try to ...
2679
  	bool changed = false;
801bcfffb   Paulo Zanoni   drm/i915: properl...
2680

facd619b8   Ville Syrjälä   drm/i915: Fix LP1...
2681
2682
2683
  	if (dirty & WM_DIRTY_LP(3) && previous->wm_lp[2] & WM1_LP_SR_EN) {
  		previous->wm_lp[2] &= ~WM1_LP_SR_EN;
  		I915_WRITE(WM3_LP_ILK, previous->wm_lp[2]);
8553c18ea   Ville Syrjälä   drm/i915: Try to ...
2684
  		changed = true;
facd619b8   Ville Syrjälä   drm/i915: Fix LP1...
2685
2686
2687
2688
  	}
  	if (dirty & WM_DIRTY_LP(2) && previous->wm_lp[1] & WM1_LP_SR_EN) {
  		previous->wm_lp[1] &= ~WM1_LP_SR_EN;
  		I915_WRITE(WM2_LP_ILK, previous->wm_lp[1]);
8553c18ea   Ville Syrjälä   drm/i915: Try to ...
2689
  		changed = true;
facd619b8   Ville Syrjälä   drm/i915: Fix LP1...
2690
2691
2692
2693
  	}
  	if (dirty & WM_DIRTY_LP(1) && previous->wm_lp[0] & WM1_LP_SR_EN) {
  		previous->wm_lp[0] &= ~WM1_LP_SR_EN;
  		I915_WRITE(WM1_LP_ILK, previous->wm_lp[0]);
8553c18ea   Ville Syrjälä   drm/i915: Try to ...
2694
  		changed = true;
facd619b8   Ville Syrjälä   drm/i915: Fix LP1...
2695
  	}
801bcfffb   Paulo Zanoni   drm/i915: properl...
2696

facd619b8   Ville Syrjälä   drm/i915: Fix LP1...
2697
2698
2699
2700
  	/*
  	 * Don't touch WM1S_LP_EN here.
  	 * Doing so could cause underruns.
  	 */
6cef2b8a5   Ville Syrjälä   drm/i915: Fix LP1...
2701

8553c18ea   Ville Syrjälä   drm/i915: Try to ...
2702
2703
2704
2705
2706
2707
2708
  	return changed;
  }
  
  /*
   * The spec says we shouldn't write when we don't need, because every write
   * causes WMs to be re-evaluated, expending some power.
   */
820c19803   Imre Deak   drm/i915: s/haswe...
2709
2710
  static void ilk_write_wm_values(struct drm_i915_private *dev_priv,
  				struct ilk_wm_values *results)
8553c18ea   Ville Syrjälä   drm/i915: Try to ...
2711
2712
  {
  	struct drm_device *dev = dev_priv->dev;
820c19803   Imre Deak   drm/i915: s/haswe...
2713
  	struct ilk_wm_values *previous = &dev_priv->wm.hw;
8553c18ea   Ville Syrjälä   drm/i915: Try to ...
2714
2715
  	unsigned int dirty;
  	uint32_t val;
055e393fa   Damien Lespiau   drm/i915: Use dev...
2716
  	dirty = ilk_compute_wm_dirty(dev_priv, previous, results);
8553c18ea   Ville Syrjälä   drm/i915: Try to ...
2717
2718
2719
2720
  	if (!dirty)
  		return;
  
  	_ilk_disable_lp_wm(dev_priv, dirty);
49a687c47   Ville Syrjälä   drm/i915: Improve...
2721
  	if (dirty & WM_DIRTY_PIPE(PIPE_A))
801bcfffb   Paulo Zanoni   drm/i915: properl...
2722
  		I915_WRITE(WM0_PIPEA_ILK, results->wm_pipe[0]);
49a687c47   Ville Syrjälä   drm/i915: Improve...
2723
  	if (dirty & WM_DIRTY_PIPE(PIPE_B))
801bcfffb   Paulo Zanoni   drm/i915: properl...
2724
  		I915_WRITE(WM0_PIPEB_ILK, results->wm_pipe[1]);
49a687c47   Ville Syrjälä   drm/i915: Improve...
2725
  	if (dirty & WM_DIRTY_PIPE(PIPE_C))
801bcfffb   Paulo Zanoni   drm/i915: properl...
2726
  		I915_WRITE(WM0_PIPEC_IVB, results->wm_pipe[2]);
49a687c47   Ville Syrjälä   drm/i915: Improve...
2727
  	if (dirty & WM_DIRTY_LINETIME(PIPE_A))
801bcfffb   Paulo Zanoni   drm/i915: properl...
2728
  		I915_WRITE(PIPE_WM_LINETIME(PIPE_A), results->wm_linetime[0]);
49a687c47   Ville Syrjälä   drm/i915: Improve...
2729
  	if (dirty & WM_DIRTY_LINETIME(PIPE_B))
801bcfffb   Paulo Zanoni   drm/i915: properl...
2730
  		I915_WRITE(PIPE_WM_LINETIME(PIPE_B), results->wm_linetime[1]);
49a687c47   Ville Syrjälä   drm/i915: Improve...
2731
  	if (dirty & WM_DIRTY_LINETIME(PIPE_C))
801bcfffb   Paulo Zanoni   drm/i915: properl...
2732
  		I915_WRITE(PIPE_WM_LINETIME(PIPE_C), results->wm_linetime[2]);
49a687c47   Ville Syrjälä   drm/i915: Improve...
2733
  	if (dirty & WM_DIRTY_DDB) {
a42a57196   Ville Syrjälä   drm/i915: Fix wat...
2734
  		if (IS_HASWELL(dev) || IS_BROADWELL(dev)) {
ac9545fda   Ville Syrjälä   drm/i915: Add IVB...
2735
2736
2737
2738
2739
2740
2741
2742
2743
2744
2745
2746
2747
2748
  			val = I915_READ(WM_MISC);
  			if (results->partitioning == INTEL_DDB_PART_1_2)
  				val &= ~WM_MISC_DATA_PARTITION_5_6;
  			else
  				val |= WM_MISC_DATA_PARTITION_5_6;
  			I915_WRITE(WM_MISC, val);
  		} else {
  			val = I915_READ(DISP_ARB_CTL2);
  			if (results->partitioning == INTEL_DDB_PART_1_2)
  				val &= ~DISP_DATA_PARTITION_5_6;
  			else
  				val |= DISP_DATA_PARTITION_5_6;
  			I915_WRITE(DISP_ARB_CTL2, val);
  		}
1011d8c43   Paulo Zanoni   drm/i915: remove ...
2749
  	}
49a687c47   Ville Syrjälä   drm/i915: Improve...
2750
  	if (dirty & WM_DIRTY_FBC) {
cca32e9ad   Paulo Zanoni   drm/i915: properl...
2751
2752
2753
2754
2755
2756
2757
  		val = I915_READ(DISP_ARB_CTL);
  		if (results->enable_fbc_wm)
  			val &= ~DISP_FBC_WM_DIS;
  		else
  			val |= DISP_FBC_WM_DIS;
  		I915_WRITE(DISP_ARB_CTL, val);
  	}
954911ebb   Imre Deak   drm/i915: simplif...
2758
2759
2760
2761
2762
  	if (dirty & WM_DIRTY_LP(1) &&
  	    previous->wm_lp_spr[0] != results->wm_lp_spr[0])
  		I915_WRITE(WM1S_LP_ILK, results->wm_lp_spr[0]);
  
  	if (INTEL_INFO(dev)->gen >= 7) {
6cef2b8a5   Ville Syrjälä   drm/i915: Fix LP1...
2763
2764
2765
2766
2767
  		if (dirty & WM_DIRTY_LP(2) && previous->wm_lp_spr[1] != results->wm_lp_spr[1])
  			I915_WRITE(WM2S_LP_IVB, results->wm_lp_spr[1]);
  		if (dirty & WM_DIRTY_LP(3) && previous->wm_lp_spr[2] != results->wm_lp_spr[2])
  			I915_WRITE(WM3S_LP_IVB, results->wm_lp_spr[2]);
  	}
801bcfffb   Paulo Zanoni   drm/i915: properl...
2768

facd619b8   Ville Syrjälä   drm/i915: Fix LP1...
2769
  	if (dirty & WM_DIRTY_LP(1) && previous->wm_lp[0] != results->wm_lp[0])
801bcfffb   Paulo Zanoni   drm/i915: properl...
2770
  		I915_WRITE(WM1_LP_ILK, results->wm_lp[0]);
facd619b8   Ville Syrjälä   drm/i915: Fix LP1...
2771
  	if (dirty & WM_DIRTY_LP(2) && previous->wm_lp[1] != results->wm_lp[1])
801bcfffb   Paulo Zanoni   drm/i915: properl...
2772
  		I915_WRITE(WM2_LP_ILK, results->wm_lp[1]);
facd619b8   Ville Syrjälä   drm/i915: Fix LP1...
2773
  	if (dirty & WM_DIRTY_LP(3) && previous->wm_lp[2] != results->wm_lp[2])
801bcfffb   Paulo Zanoni   drm/i915: properl...
2774
  		I915_WRITE(WM3_LP_ILK, results->wm_lp[2]);
609cedef6   Ville Syrjälä   drm/i915: Store c...
2775
2776
  
  	dev_priv->wm.hw = *results;
801bcfffb   Paulo Zanoni   drm/i915: properl...
2777
  }
8553c18ea   Ville Syrjälä   drm/i915: Try to ...
2778
2779
2780
2781
2782
2783
  static bool ilk_disable_lp_wm(struct drm_device *dev)
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  
  	return _ilk_disable_lp_wm(dev_priv, WM_DIRTY_LP_ALL);
  }
820c19803   Imre Deak   drm/i915: s/haswe...
2784
  static void ilk_update_wm(struct drm_crtc *crtc)
801bcfffb   Paulo Zanoni   drm/i915: properl...
2785
  {
7c4a395ff   Ville Syrjälä   drm/i915: Don't r...
2786
  	struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
46ba614c0   Ville Syrjälä   drm/i915: Pass cr...
2787
  	struct drm_device *dev = crtc->dev;
801bcfffb   Paulo Zanoni   drm/i915: properl...
2788
  	struct drm_i915_private *dev_priv = dev->dev_private;
820c19803   Imre Deak   drm/i915: s/haswe...
2789
2790
2791
  	struct ilk_wm_maximums max;
  	struct ilk_pipe_wm_parameters params = {};
  	struct ilk_wm_values results = {};
77c122bcc   Ville Syrjälä   drm/i915: Rename ...
2792
  	enum intel_ddb_partitioning partitioning;
7c4a395ff   Ville Syrjälä   drm/i915: Don't r...
2793
  	struct intel_pipe_wm pipe_wm = {};
198a1e9b1   Ville Syrjälä   drm/i915: Use int...
2794
  	struct intel_pipe_wm lp_wm_1_2 = {}, lp_wm_5_6 = {}, *best_lp_wm;
a485bfb8d   Ville Syrjälä   drm/i915: Move so...
2795
  	struct intel_wm_config config = {};
7c4a395ff   Ville Syrjälä   drm/i915: Don't r...
2796

2a44b76bb   Ville Syrjälä   drm/i915: Add som...
2797
  	ilk_compute_wm_parameters(crtc, &params);
7c4a395ff   Ville Syrjälä   drm/i915: Don't r...
2798
2799
2800
2801
2802
  
  	intel_compute_pipe_wm(crtc, &params, &pipe_wm);
  
  	if (!memcmp(&intel_crtc->wm.active, &pipe_wm, sizeof(pipe_wm)))
  		return;
861f3389c   Paulo Zanoni   drm/i915: add sup...
2803

7c4a395ff   Ville Syrjälä   drm/i915: Don't r...
2804
  	intel_crtc->wm.active = pipe_wm;
861f3389c   Paulo Zanoni   drm/i915: add sup...
2805

2a44b76bb   Ville Syrjälä   drm/i915: Add som...
2806
  	ilk_compute_wm_config(dev, &config);
34982fe13   Ville Syrjälä   drm/i915: Rename ...
2807
  	ilk_compute_wm_maximums(dev, 1, &config, INTEL_DDB_PART_1_2, &max);
0ba22e26f   Ville Syrjälä   drm/i915: Don't m...
2808
  	ilk_wm_merge(dev, &config, &max, &lp_wm_1_2);
a485bfb8d   Ville Syrjälä   drm/i915: Move so...
2809
2810
  
  	/* 5/6 split only in single pipe config on IVB+ */
ec98c8d1f   Ville Syrjälä   drm/i915: Check 5...
2811
2812
  	if (INTEL_INFO(dev)->gen >= 7 &&
  	    config.num_pipes_active == 1 && config.sprites_enabled) {
34982fe13   Ville Syrjälä   drm/i915: Rename ...
2813
  		ilk_compute_wm_maximums(dev, 1, &config, INTEL_DDB_PART_5_6, &max);
0ba22e26f   Ville Syrjälä   drm/i915: Don't m...
2814
  		ilk_wm_merge(dev, &config, &max, &lp_wm_5_6);
0362c7816   Ville Syrjälä   drm/i915: Move LP...
2815

820c19803   Imre Deak   drm/i915: s/haswe...
2816
  		best_lp_wm = ilk_find_best_result(dev, &lp_wm_1_2, &lp_wm_5_6);
861f3389c   Paulo Zanoni   drm/i915: add sup...
2817
  	} else {
198a1e9b1   Ville Syrjälä   drm/i915: Use int...
2818
  		best_lp_wm = &lp_wm_1_2;
861f3389c   Paulo Zanoni   drm/i915: add sup...
2819
  	}
198a1e9b1   Ville Syrjälä   drm/i915: Use int...
2820
  	partitioning = (best_lp_wm == &lp_wm_1_2) ?
77c122bcc   Ville Syrjälä   drm/i915: Rename ...
2821
  		       INTEL_DDB_PART_1_2 : INTEL_DDB_PART_5_6;
801bcfffb   Paulo Zanoni   drm/i915: properl...
2822

820c19803   Imre Deak   drm/i915: s/haswe...
2823
  	ilk_compute_wm_results(dev, best_lp_wm, partitioning, &results);
609cedef6   Ville Syrjälä   drm/i915: Store c...
2824

820c19803   Imre Deak   drm/i915: s/haswe...
2825
  	ilk_write_wm_values(dev_priv, &results);
1011d8c43   Paulo Zanoni   drm/i915: remove ...
2826
  }
ed57cb8a5   Damien Lespiau   drm/i915: Also gi...
2827
2828
2829
2830
2831
  static void
  ilk_update_sprite_wm(struct drm_plane *plane,
  		     struct drm_crtc *crtc,
  		     uint32_t sprite_width, uint32_t sprite_height,
  		     int pixel_size, bool enabled, bool scaled)
526682e9f   Paulo Zanoni   drm/i915: add has...
2832
  {
8553c18ea   Ville Syrjälä   drm/i915: Try to ...
2833
  	struct drm_device *dev = plane->dev;
adf3d35e4   Ville Syrjälä   drm/i915: Pass pl...
2834
  	struct intel_plane *intel_plane = to_intel_plane(plane);
526682e9f   Paulo Zanoni   drm/i915: add has...
2835

adf3d35e4   Ville Syrjälä   drm/i915: Pass pl...
2836
2837
2838
  	intel_plane->wm.enabled = enabled;
  	intel_plane->wm.scaled = scaled;
  	intel_plane->wm.horiz_pixels = sprite_width;
ed57cb8a5   Damien Lespiau   drm/i915: Also gi...
2839
  	intel_plane->wm.vert_pixels = sprite_width;
adf3d35e4   Ville Syrjälä   drm/i915: Pass pl...
2840
  	intel_plane->wm.bytes_per_pixel = pixel_size;
526682e9f   Paulo Zanoni   drm/i915: add has...
2841

8553c18ea   Ville Syrjälä   drm/i915: Try to ...
2842
2843
2844
2845
2846
2847
2848
2849
2850
  	/*
  	 * IVB workaround: must disable low power watermarks for at least
  	 * one frame before enabling scaling.  LP watermarks can be re-enabled
  	 * when scaling is disabled.
  	 *
  	 * WaCxSRDisabledForSpriteScaling:ivb
  	 */
  	if (IS_IVYBRIDGE(dev) && scaled && ilk_disable_lp_wm(dev))
  		intel_wait_for_vblank(dev, intel_plane->pipe);
820c19803   Imre Deak   drm/i915: s/haswe...
2851
  	ilk_update_wm(crtc);
526682e9f   Paulo Zanoni   drm/i915: add has...
2852
  }
243e6a44b   Ville Syrjälä   drm/i915: Init HS...
2853
2854
2855
2856
  static void ilk_pipe_wm_get_hw_state(struct drm_crtc *crtc)
  {
  	struct drm_device *dev = crtc->dev;
  	struct drm_i915_private *dev_priv = dev->dev_private;
820c19803   Imre Deak   drm/i915: s/haswe...
2857
  	struct ilk_wm_values *hw = &dev_priv->wm.hw;
243e6a44b   Ville Syrjälä   drm/i915: Init HS...
2858
2859
2860
2861
2862
2863
2864
2865
2866
2867
  	struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
  	struct intel_pipe_wm *active = &intel_crtc->wm.active;
  	enum pipe pipe = intel_crtc->pipe;
  	static const unsigned int wm0_pipe_reg[] = {
  		[PIPE_A] = WM0_PIPEA_ILK,
  		[PIPE_B] = WM0_PIPEB_ILK,
  		[PIPE_C] = WM0_PIPEC_IVB,
  	};
  
  	hw->wm_pipe[pipe] = I915_READ(wm0_pipe_reg[pipe]);
a42a57196   Ville Syrjälä   drm/i915: Fix wat...
2868
  	if (IS_HASWELL(dev) || IS_BROADWELL(dev))
ce0e0713a   Ville Syrjälä   drm/i915: Linetim...
2869
  		hw->wm_linetime[pipe] = I915_READ(PIPE_WM_LINETIME(pipe));
243e6a44b   Ville Syrjälä   drm/i915: Init HS...
2870

2a44b76bb   Ville Syrjälä   drm/i915: Add som...
2871
2872
2873
  	active->pipe_enabled = intel_crtc_active(crtc);
  
  	if (active->pipe_enabled) {
243e6a44b   Ville Syrjälä   drm/i915: Init HS...
2874
2875
2876
2877
2878
2879
2880
2881
2882
2883
2884
2885
2886
2887
2888
2889
2890
2891
2892
2893
2894
2895
2896
2897
2898
2899
2900
2901
2902
  		u32 tmp = hw->wm_pipe[pipe];
  
  		/*
  		 * For active pipes LP0 watermark is marked as
  		 * enabled, and LP1+ watermaks as disabled since
  		 * we can't really reverse compute them in case
  		 * multiple pipes are active.
  		 */
  		active->wm[0].enable = true;
  		active->wm[0].pri_val = (tmp & WM0_PIPE_PLANE_MASK) >> WM0_PIPE_PLANE_SHIFT;
  		active->wm[0].spr_val = (tmp & WM0_PIPE_SPRITE_MASK) >> WM0_PIPE_SPRITE_SHIFT;
  		active->wm[0].cur_val = tmp & WM0_PIPE_CURSOR_MASK;
  		active->linetime = hw->wm_linetime[pipe];
  	} else {
  		int level, max_level = ilk_wm_max_level(dev);
  
  		/*
  		 * For inactive pipes, all watermark levels
  		 * should be marked as enabled but zeroed,
  		 * which is what we'd compute them to.
  		 */
  		for (level = 0; level <= max_level; level++)
  			active->wm[level].enable = true;
  	}
  }
  
  void ilk_wm_get_hw_state(struct drm_device *dev)
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
820c19803   Imre Deak   drm/i915: s/haswe...
2903
  	struct ilk_wm_values *hw = &dev_priv->wm.hw;
243e6a44b   Ville Syrjälä   drm/i915: Init HS...
2904
  	struct drm_crtc *crtc;
70e1e0ec0   Damien Lespiau   drm/i915: Use for...
2905
  	for_each_crtc(dev, crtc)
243e6a44b   Ville Syrjälä   drm/i915: Init HS...
2906
2907
2908
2909
2910
2911
2912
  		ilk_pipe_wm_get_hw_state(crtc);
  
  	hw->wm_lp[0] = I915_READ(WM1_LP_ILK);
  	hw->wm_lp[1] = I915_READ(WM2_LP_ILK);
  	hw->wm_lp[2] = I915_READ(WM3_LP_ILK);
  
  	hw->wm_lp_spr[0] = I915_READ(WM1S_LP_ILK);
cfa7698bd   Ville Syrjälä   drm/i915: Don't r...
2913
2914
2915
2916
  	if (INTEL_INFO(dev)->gen >= 7) {
  		hw->wm_lp_spr[1] = I915_READ(WM2S_LP_IVB);
  		hw->wm_lp_spr[2] = I915_READ(WM3S_LP_IVB);
  	}
243e6a44b   Ville Syrjälä   drm/i915: Init HS...
2917

a42a57196   Ville Syrjälä   drm/i915: Fix wat...
2918
  	if (IS_HASWELL(dev) || IS_BROADWELL(dev))
ac9545fda   Ville Syrjälä   drm/i915: Add IVB...
2919
2920
2921
2922
2923
  		hw->partitioning = (I915_READ(WM_MISC) & WM_MISC_DATA_PARTITION_5_6) ?
  			INTEL_DDB_PART_5_6 : INTEL_DDB_PART_1_2;
  	else if (IS_IVYBRIDGE(dev))
  		hw->partitioning = (I915_READ(DISP_ARB_CTL2) & DISP_DATA_PARTITION_5_6) ?
  			INTEL_DDB_PART_5_6 : INTEL_DDB_PART_1_2;
243e6a44b   Ville Syrjälä   drm/i915: Init HS...
2924
2925
2926
2927
  
  	hw->enable_fbc_wm =
  		!(I915_READ(DISP_ARB_CTL) & DISP_FBC_WM_DIS);
  }
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
2928
2929
2930
2931
2932
2933
2934
2935
2936
2937
2938
2939
2940
2941
2942
2943
2944
2945
2946
2947
2948
2949
2950
2951
2952
2953
2954
2955
2956
2957
2958
2959
  /**
   * intel_update_watermarks - update FIFO watermark values based on current modes
   *
   * Calculate watermark values for the various WM regs based on current mode
   * and plane configuration.
   *
   * There are several cases to deal with here:
   *   - normal (i.e. non-self-refresh)
   *   - self-refresh (SR) mode
   *   - lines are large relative to FIFO size (buffer can hold up to 2)
   *   - lines are small relative to FIFO size (buffer can hold more than 2
   *     lines), so need to account for TLB latency
   *
   *   The normal calculation is:
   *     watermark = dotclock * bytes per pixel * latency
   *   where latency is platform & configuration dependent (we assume pessimal
   *   values here).
   *
   *   The SR calculation is:
   *     watermark = (trunc(latency/line time)+1) * surface width *
   *       bytes per pixel
   *   where
   *     line time = htotal / dotclock
   *     surface width = hdisplay for normal plane and 64 for cursor
   *   and latency is assumed to be high, as above.
   *
   * The final value programmed to the register should always be rounded up,
   * and include an extra 2 entries to account for clock crossings.
   *
   * We don't use the sprite, so we can ignore that.  And on Crestline we have
   * to set the non-SR watermarks to 8.
   */
46ba614c0   Ville Syrjälä   drm/i915: Pass cr...
2960
  void intel_update_watermarks(struct drm_crtc *crtc)
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
2961
  {
46ba614c0   Ville Syrjälä   drm/i915: Pass cr...
2962
  	struct drm_i915_private *dev_priv = crtc->dev->dev_private;
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
2963
2964
  
  	if (dev_priv->display.update_wm)
46ba614c0   Ville Syrjälä   drm/i915: Pass cr...
2965
  		dev_priv->display.update_wm(crtc);
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
2966
  }
adf3d35e4   Ville Syrjälä   drm/i915: Pass pl...
2967
2968
  void intel_update_sprite_watermarks(struct drm_plane *plane,
  				    struct drm_crtc *crtc,
ed57cb8a5   Damien Lespiau   drm/i915: Also gi...
2969
2970
2971
  				    uint32_t sprite_width,
  				    uint32_t sprite_height,
  				    int pixel_size,
39db4a4d7   Ville Syrjälä   drm/i915: Use 'en...
2972
  				    bool enabled, bool scaled)
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
2973
  {
adf3d35e4   Ville Syrjälä   drm/i915: Pass pl...
2974
  	struct drm_i915_private *dev_priv = plane->dev->dev_private;
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
2975
2976
  
  	if (dev_priv->display.update_sprite_wm)
ed57cb8a5   Damien Lespiau   drm/i915: Also gi...
2977
2978
  		dev_priv->display.update_sprite_wm(plane, crtc,
  						   sprite_width, sprite_height,
39db4a4d7   Ville Syrjälä   drm/i915: Use 'en...
2979
  						   pixel_size, enabled, scaled);
b445e3b01   Eugeni Dodonov   drm/i915: move wa...
2980
  }
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
2981
2982
2983
2984
2985
2986
2987
2988
2989
2990
2991
2992
2993
2994
  static struct drm_i915_gem_object *
  intel_alloc_context_page(struct drm_device *dev)
  {
  	struct drm_i915_gem_object *ctx;
  	int ret;
  
  	WARN_ON(!mutex_is_locked(&dev->struct_mutex));
  
  	ctx = i915_gem_alloc_object(dev, 4096);
  	if (!ctx) {
  		DRM_DEBUG("failed to alloc power context, RC6 disabled
  ");
  		return NULL;
  	}
c69766f2b   Daniel Vetter   drm/i915: Don't a...
2995
  	ret = i915_gem_obj_ggtt_pin(ctx, 4096, 0);
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
2996
2997
2998
2999
3000
3001
3002
3003
3004
3005
3006
3007
3008
3009
3010
3011
  	if (ret) {
  		DRM_ERROR("failed to pin power context: %d
  ", ret);
  		goto err_unref;
  	}
  
  	ret = i915_gem_object_set_to_gtt_domain(ctx, 1);
  	if (ret) {
  		DRM_ERROR("failed to set-domain on power context: %d
  ", ret);
  		goto err_unpin;
  	}
  
  	return ctx;
  
  err_unpin:
d7f46fc4e   Ben Widawsky   drm/i915: Make pi...
3012
  	i915_gem_object_ggtt_unpin(ctx);
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3013
3014
  err_unref:
  	drm_gem_object_unreference(&ctx->base);
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3015
3016
  	return NULL;
  }
9270388e1   Daniel Vetter   drm/i915: fix up ...
3017
3018
  /**
   * Lock protecting IPS related data structures
9270388e1   Daniel Vetter   drm/i915: fix up ...
3019
3020
3021
3022
3023
3024
   */
  DEFINE_SPINLOCK(mchdev_lock);
  
  /* Global for IPS driver to get at the current i915 device. Protected by
   * mchdev_lock. */
  static struct drm_i915_private *i915_mch_dev;
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3025
3026
3027
3028
  bool ironlake_set_drps(struct drm_device *dev, u8 val)
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  	u16 rgvswctl;
9270388e1   Daniel Vetter   drm/i915: fix up ...
3029
  	assert_spin_locked(&mchdev_lock);
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3030
3031
3032
3033
3034
3035
3036
3037
3038
3039
3040
3041
3042
3043
3044
3045
3046
  	rgvswctl = I915_READ16(MEMSWCTL);
  	if (rgvswctl & MEMCTL_CMD_STS) {
  		DRM_DEBUG("gpu busy, RCS change rejected
  ");
  		return false; /* still busy with another command */
  	}
  
  	rgvswctl = (MEMCTL_CMD_CHFREQ << MEMCTL_CMD_SHIFT) |
  		(val << MEMCTL_FREQ_SHIFT) | MEMCTL_SFCAVM;
  	I915_WRITE16(MEMSWCTL, rgvswctl);
  	POSTING_READ16(MEMSWCTL);
  
  	rgvswctl |= MEMCTL_CMD_STS;
  	I915_WRITE16(MEMSWCTL, rgvswctl);
  
  	return true;
  }
8090c6b9d   Daniel Vetter   drm/i915: wrap up...
3047
  static void ironlake_enable_drps(struct drm_device *dev)
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3048
3049
3050
3051
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  	u32 rgvmodectl = I915_READ(MEMMODECTL);
  	u8 fmax, fmin, fstart, vstart;
9270388e1   Daniel Vetter   drm/i915: fix up ...
3052
  	spin_lock_irq(&mchdev_lock);
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3053
3054
3055
3056
3057
3058
3059
3060
3061
3062
3063
3064
3065
3066
3067
3068
3069
3070
3071
3072
3073
3074
  	/* Enable temp reporting */
  	I915_WRITE16(PMMISC, I915_READ(PMMISC) | MCPPCE_EN);
  	I915_WRITE16(TSC1, I915_READ(TSC1) | TSE);
  
  	/* 100ms RC evaluation intervals */
  	I915_WRITE(RCUPEI, 100000);
  	I915_WRITE(RCDNEI, 100000);
  
  	/* Set max/min thresholds to 90ms and 80ms respectively */
  	I915_WRITE(RCBMAXAVG, 90000);
  	I915_WRITE(RCBMINAVG, 80000);
  
  	I915_WRITE(MEMIHYST, 1);
  
  	/* Set up min, max, and cur for interrupt handling */
  	fmax = (rgvmodectl & MEMMODE_FMAX_MASK) >> MEMMODE_FMAX_SHIFT;
  	fmin = (rgvmodectl & MEMMODE_FMIN_MASK);
  	fstart = (rgvmodectl & MEMMODE_FSTART_MASK) >>
  		MEMMODE_FSTART_SHIFT;
  
  	vstart = (I915_READ(PXVFREQ_BASE + (fstart * 4)) & PXVFREQ_PX_MASK) >>
  		PXVFREQ_PX_SHIFT;
20e4d407f   Daniel Vetter   drm/ips: move drp...
3075
3076
  	dev_priv->ips.fmax = fmax; /* IPS callback will increase this */
  	dev_priv->ips.fstart = fstart;
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3077

20e4d407f   Daniel Vetter   drm/ips: move drp...
3078
3079
3080
  	dev_priv->ips.max_delay = fstart;
  	dev_priv->ips.min_delay = fmin;
  	dev_priv->ips.cur_delay = fstart;
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3081
3082
3083
3084
3085
3086
3087
3088
3089
3090
3091
3092
3093
3094
3095
3096
  
  	DRM_DEBUG_DRIVER("fmax: %d, fmin: %d, fstart: %d
  ",
  			 fmax, fmin, fstart);
  
  	I915_WRITE(MEMINTREN, MEMINT_CX_SUPR_EN | MEMINT_EVAL_CHG_EN);
  
  	/*
  	 * Interrupts will be enabled in ironlake_irq_postinstall
  	 */
  
  	I915_WRITE(VIDSTART, vstart);
  	POSTING_READ(VIDSTART);
  
  	rgvmodectl |= MEMMODE_SWMODE_EN;
  	I915_WRITE(MEMMODECTL, rgvmodectl);
9270388e1   Daniel Vetter   drm/i915: fix up ...
3097
  	if (wait_for_atomic((I915_READ(MEMSWCTL) & MEMCTL_CMD_STS) == 0, 10))
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3098
3099
  		DRM_ERROR("stuck trying to change perf mode
  ");
9270388e1   Daniel Vetter   drm/i915: fix up ...
3100
  	mdelay(1);
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3101
3102
  
  	ironlake_set_drps(dev, fstart);
20e4d407f   Daniel Vetter   drm/ips: move drp...
3103
  	dev_priv->ips.last_count1 = I915_READ(0x112e4) + I915_READ(0x112e8) +
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3104
  		I915_READ(0x112e0);
20e4d407f   Daniel Vetter   drm/ips: move drp...
3105
3106
  	dev_priv->ips.last_time1 = jiffies_to_msecs(jiffies);
  	dev_priv->ips.last_count2 = I915_READ(0x112f4);
5ed0bdf21   Thomas Gleixner   drm: i915: Use ns...
3107
  	dev_priv->ips.last_time2 = ktime_get_raw_ns();
9270388e1   Daniel Vetter   drm/i915: fix up ...
3108
3109
  
  	spin_unlock_irq(&mchdev_lock);
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3110
  }
8090c6b9d   Daniel Vetter   drm/i915: wrap up...
3111
  static void ironlake_disable_drps(struct drm_device *dev)
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3112
3113
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
9270388e1   Daniel Vetter   drm/i915: fix up ...
3114
3115
3116
3117
3118
  	u16 rgvswctl;
  
  	spin_lock_irq(&mchdev_lock);
  
  	rgvswctl = I915_READ16(MEMSWCTL);
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3119
3120
3121
3122
3123
3124
3125
3126
3127
  
  	/* Ack interrupts, disable EFC interrupt */
  	I915_WRITE(MEMINTREN, I915_READ(MEMINTREN) & ~MEMINT_EVAL_CHG_EN);
  	I915_WRITE(MEMINTRSTS, MEMINT_EVAL_CHG);
  	I915_WRITE(DEIER, I915_READ(DEIER) & ~DE_PCU_EVENT);
  	I915_WRITE(DEIIR, DE_PCU_EVENT);
  	I915_WRITE(DEIMR, I915_READ(DEIMR) | DE_PCU_EVENT);
  
  	/* Go back to the starting frequency */
20e4d407f   Daniel Vetter   drm/ips: move drp...
3128
  	ironlake_set_drps(dev, dev_priv->ips.fstart);
9270388e1   Daniel Vetter   drm/i915: fix up ...
3129
  	mdelay(1);
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3130
3131
  	rgvswctl |= MEMCTL_CMD_STS;
  	I915_WRITE(MEMSWCTL, rgvswctl);
9270388e1   Daniel Vetter   drm/i915: fix up ...
3132
  	mdelay(1);
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3133

9270388e1   Daniel Vetter   drm/i915: fix up ...
3134
  	spin_unlock_irq(&mchdev_lock);
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3135
  }
acbe94755   Daniel Vetter   drm/i915: rip out...
3136
3137
3138
3139
3140
  /* There's a funny hw issue where the hw returns all 0 when reading from
   * GEN6_RP_INTERRUPT_LIMITS. Hence we always need to compute the desired value
   * ourselves, instead of doing a rmw cycle (which might result in us clearing
   * all limits and the gpu stuck at whatever frequency it is at atm).
   */
6917c7b9d   Chris Wilson   drm/i915: Initial...
3141
  static u32 gen6_rps_limits(struct drm_i915_private *dev_priv, u8 val)
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3142
  {
7b9e0ae6d   Chris Wilson   drm/i915: Always ...
3143
  	u32 limits;
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3144

20b46e59d   Daniel Vetter   drm/i915: Only se...
3145
3146
3147
3148
3149
3150
  	/* Only set the down limit when we've reached the lowest level to avoid
  	 * getting more interrupts, otherwise leave this clear. This prevents a
  	 * race in the hw when coming out of rc6: There's a tiny window where
  	 * the hw runs at the minimal clock before selecting the desired
  	 * frequency, if the down threshold expires in that window we will not
  	 * receive a down interrupt. */
b39fb2977   Ben Widawsky   drm/i915: Rename ...
3151
3152
3153
  	limits = dev_priv->rps.max_freq_softlimit << 24;
  	if (val <= dev_priv->rps.min_freq_softlimit)
  		limits |= dev_priv->rps.min_freq_softlimit << 16;
20b46e59d   Daniel Vetter   drm/i915: Only se...
3154
3155
3156
  
  	return limits;
  }
dd75fdc8c   Chris Wilson   drm/i915: Tweak R...
3157
3158
3159
3160
3161
3162
3163
  static void gen6_set_rps_thresholds(struct drm_i915_private *dev_priv, u8 val)
  {
  	int new_power;
  
  	new_power = dev_priv->rps.power;
  	switch (dev_priv->rps.power) {
  	case LOW_POWER:
b39fb2977   Ben Widawsky   drm/i915: Rename ...
3164
  		if (val > dev_priv->rps.efficient_freq + 1 && val > dev_priv->rps.cur_freq)
dd75fdc8c   Chris Wilson   drm/i915: Tweak R...
3165
3166
3167
3168
  			new_power = BETWEEN;
  		break;
  
  	case BETWEEN:
b39fb2977   Ben Widawsky   drm/i915: Rename ...
3169
  		if (val <= dev_priv->rps.efficient_freq && val < dev_priv->rps.cur_freq)
dd75fdc8c   Chris Wilson   drm/i915: Tweak R...
3170
  			new_power = LOW_POWER;
b39fb2977   Ben Widawsky   drm/i915: Rename ...
3171
  		else if (val >= dev_priv->rps.rp0_freq && val > dev_priv->rps.cur_freq)
dd75fdc8c   Chris Wilson   drm/i915: Tweak R...
3172
3173
3174
3175
  			new_power = HIGH_POWER;
  		break;
  
  	case HIGH_POWER:
b39fb2977   Ben Widawsky   drm/i915: Rename ...
3176
  		if (val < (dev_priv->rps.rp1_freq + dev_priv->rps.rp0_freq) >> 1 && val < dev_priv->rps.cur_freq)
dd75fdc8c   Chris Wilson   drm/i915: Tweak R...
3177
3178
3179
3180
  			new_power = BETWEEN;
  		break;
  	}
  	/* Max/min bins are special */
b39fb2977   Ben Widawsky   drm/i915: Rename ...
3181
  	if (val == dev_priv->rps.min_freq_softlimit)
dd75fdc8c   Chris Wilson   drm/i915: Tweak R...
3182
  		new_power = LOW_POWER;
b39fb2977   Ben Widawsky   drm/i915: Rename ...
3183
  	if (val == dev_priv->rps.max_freq_softlimit)
dd75fdc8c   Chris Wilson   drm/i915: Tweak R...
3184
3185
3186
3187
3188
3189
3190
3191
3192
3193
3194
3195
3196
3197
3198
3199
3200
3201
3202
3203
3204
3205
3206
3207
3208
3209
3210
3211
3212
3213
3214
3215
3216
3217
3218
3219
3220
3221
3222
3223
3224
3225
3226
3227
3228
3229
3230
3231
3232
3233
3234
3235
3236
3237
3238
3239
3240
3241
3242
3243
3244
3245
3246
3247
  		new_power = HIGH_POWER;
  	if (new_power == dev_priv->rps.power)
  		return;
  
  	/* Note the units here are not exactly 1us, but 1280ns. */
  	switch (new_power) {
  	case LOW_POWER:
  		/* Upclock if more than 95% busy over 16ms */
  		I915_WRITE(GEN6_RP_UP_EI, 12500);
  		I915_WRITE(GEN6_RP_UP_THRESHOLD, 11800);
  
  		/* Downclock if less than 85% busy over 32ms */
  		I915_WRITE(GEN6_RP_DOWN_EI, 25000);
  		I915_WRITE(GEN6_RP_DOWN_THRESHOLD, 21250);
  
  		I915_WRITE(GEN6_RP_CONTROL,
  			   GEN6_RP_MEDIA_TURBO |
  			   GEN6_RP_MEDIA_HW_NORMAL_MODE |
  			   GEN6_RP_MEDIA_IS_GFX |
  			   GEN6_RP_ENABLE |
  			   GEN6_RP_UP_BUSY_AVG |
  			   GEN6_RP_DOWN_IDLE_AVG);
  		break;
  
  	case BETWEEN:
  		/* Upclock if more than 90% busy over 13ms */
  		I915_WRITE(GEN6_RP_UP_EI, 10250);
  		I915_WRITE(GEN6_RP_UP_THRESHOLD, 9225);
  
  		/* Downclock if less than 75% busy over 32ms */
  		I915_WRITE(GEN6_RP_DOWN_EI, 25000);
  		I915_WRITE(GEN6_RP_DOWN_THRESHOLD, 18750);
  
  		I915_WRITE(GEN6_RP_CONTROL,
  			   GEN6_RP_MEDIA_TURBO |
  			   GEN6_RP_MEDIA_HW_NORMAL_MODE |
  			   GEN6_RP_MEDIA_IS_GFX |
  			   GEN6_RP_ENABLE |
  			   GEN6_RP_UP_BUSY_AVG |
  			   GEN6_RP_DOWN_IDLE_AVG);
  		break;
  
  	case HIGH_POWER:
  		/* Upclock if more than 85% busy over 10ms */
  		I915_WRITE(GEN6_RP_UP_EI, 8000);
  		I915_WRITE(GEN6_RP_UP_THRESHOLD, 6800);
  
  		/* Downclock if less than 60% busy over 32ms */
  		I915_WRITE(GEN6_RP_DOWN_EI, 25000);
  		I915_WRITE(GEN6_RP_DOWN_THRESHOLD, 15000);
  
  		I915_WRITE(GEN6_RP_CONTROL,
  			   GEN6_RP_MEDIA_TURBO |
  			   GEN6_RP_MEDIA_HW_NORMAL_MODE |
  			   GEN6_RP_MEDIA_IS_GFX |
  			   GEN6_RP_ENABLE |
  			   GEN6_RP_UP_BUSY_AVG |
  			   GEN6_RP_DOWN_IDLE_AVG);
  		break;
  	}
  
  	dev_priv->rps.power = new_power;
  	dev_priv->rps.last_adj = 0;
  }
2876ce734   Chris Wilson   drm/i915: Mask PM...
3248
3249
3250
3251
3252
3253
3254
3255
  static u32 gen6_rps_pm_mask(struct drm_i915_private *dev_priv, u8 val)
  {
  	u32 mask = 0;
  
  	if (val > dev_priv->rps.min_freq_softlimit)
  		mask |= GEN6_PM_RP_DOWN_THRESHOLD | GEN6_PM_RP_DOWN_TIMEOUT;
  	if (val < dev_priv->rps.max_freq_softlimit)
  		mask |= GEN6_PM_RP_UP_THRESHOLD;
7b3c29f6f   Chris Wilson   drm/i915: Make th...
3256
3257
  	mask |= dev_priv->pm_rps_events & (GEN6_PM_RP_DOWN_EI_EXPIRED | GEN6_PM_RP_UP_EI_EXPIRED);
  	mask &= dev_priv->pm_rps_events;
2876ce734   Chris Wilson   drm/i915: Mask PM...
3258
3259
3260
3261
3262
  	/* IVB and SNB hard hangs on looping batchbuffer
  	 * if GEN6_PM_UP_EI_EXPIRED is masked.
  	 */
  	if (INTEL_INFO(dev_priv->dev)->gen <= 7 && !IS_HASWELL(dev_priv->dev))
  		mask |= GEN6_PM_RP_UP_EI_EXPIRED;
baccd4586   Deepak S   drm/i915: Enable ...
3263
3264
  	if (IS_GEN8(dev_priv->dev))
  		mask |= GEN8_PMINTR_REDIRECT_TO_NON_DISP;
2876ce734   Chris Wilson   drm/i915: Mask PM...
3265
3266
  	return ~mask;
  }
b8a5ff8d7   Jeff McGee   drm/i915: Update ...
3267
3268
3269
  /* gen6_set_rps is called to update the frequency request, but should also be
   * called when the range (min_delay and max_delay) is modified so that we can
   * update the GEN6_RP_INTERRUPT_LIMITS register accordingly. */
20b46e59d   Daniel Vetter   drm/i915: Only se...
3270
3271
3272
  void gen6_set_rps(struct drm_device *dev, u8 val)
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
7b9e0ae6d   Chris Wilson   drm/i915: Always ...
3273

4fc688ce7   Jesse Barnes   drm/i915: protect...
3274
  	WARN_ON(!mutex_is_locked(&dev_priv->rps.hw_lock));
b39fb2977   Ben Widawsky   drm/i915: Rename ...
3275
3276
  	WARN_ON(val > dev_priv->rps.max_freq_softlimit);
  	WARN_ON(val < dev_priv->rps.min_freq_softlimit);
004777cb2   Daniel Vetter   drm/i915: fixup u...
3277

eb64cad1c   Chris Wilson   drm/i915: Refacto...
3278
3279
3280
3281
3282
  	/* min/max delay may still have been modified so be sure to
  	 * write the limits value.
  	 */
  	if (val != dev_priv->rps.cur_freq) {
  		gen6_set_rps_thresholds(dev_priv, val);
b8a5ff8d7   Jeff McGee   drm/i915: Update ...
3283

50e6a2a74   Ben Widawsky   drm/i915/bdw: RPS...
3284
  		if (IS_HASWELL(dev) || IS_BROADWELL(dev))
eb64cad1c   Chris Wilson   drm/i915: Refacto...
3285
3286
3287
3288
3289
3290
3291
  			I915_WRITE(GEN6_RPNSWREQ,
  				   HSW_FREQUENCY(val));
  		else
  			I915_WRITE(GEN6_RPNSWREQ,
  				   GEN6_FREQUENCY(val) |
  				   GEN6_OFFSET(0) |
  				   GEN6_AGGRESSIVE_TURBO);
b8a5ff8d7   Jeff McGee   drm/i915: Update ...
3292
  	}
7b9e0ae6d   Chris Wilson   drm/i915: Always ...
3293

7b9e0ae6d   Chris Wilson   drm/i915: Always ...
3294
3295
3296
  	/* Make sure we continue to get interrupts
  	 * until we hit the minimum or maximum frequencies.
  	 */
eb64cad1c   Chris Wilson   drm/i915: Refacto...
3297
  	I915_WRITE(GEN6_RP_INTERRUPT_LIMITS, gen6_rps_limits(dev_priv, val));
2876ce734   Chris Wilson   drm/i915: Mask PM...
3298
  	I915_WRITE(GEN6_PMINTRMSK, gen6_rps_pm_mask(dev_priv, val));
7b9e0ae6d   Chris Wilson   drm/i915: Always ...
3299

d5570a724   Ben Widawsky   drm/i915: POSTING...
3300
  	POSTING_READ(GEN6_RPNSWREQ);
b39fb2977   Ben Widawsky   drm/i915: Rename ...
3301
  	dev_priv->rps.cur_freq = val;
be2cde9a6   Daniel Vetter   drm/i915: add a t...
3302
  	trace_intel_gpu_freq_change(val * 50);
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3303
  }
76c3552f9   Deepak S   drm/i915/vlv: WA ...
3304
3305
3306
3307
3308
3309
3310
3311
3312
3313
3314
  /* vlv_set_rps_idle: Set the frequency to Rpn if Gfx clocks are down
   *
   * * If Gfx is Idle, then
   * 1. Mask Turbo interrupts
   * 2. Bring up Gfx clock
   * 3. Change the freq to Rpn and wait till P-Unit updates freq
   * 4. Clear the Force GFX CLK ON bit so that Gfx can down
   * 5. Unmask Turbo interrupts
  */
  static void vlv_set_rps_idle(struct drm_i915_private *dev_priv)
  {
5549d25f6   Deepak S   drm/i915: Drop ea...
3315
3316
3317
3318
3319
3320
3321
  	struct drm_device *dev = dev_priv->dev;
  
  	/* Latest VLV doesn't need to force the gfx clock */
  	if (dev->pdev->revision >= 0xd) {
  		valleyview_set_rps(dev_priv->dev, dev_priv->rps.min_freq_softlimit);
  		return;
  	}
76c3552f9   Deepak S   drm/i915/vlv: WA ...
3322
3323
3324
  	/*
  	 * When we are idle.  Drop to min voltage state.
  	 */
b39fb2977   Ben Widawsky   drm/i915: Rename ...
3325
  	if (dev_priv->rps.cur_freq <= dev_priv->rps.min_freq_softlimit)
76c3552f9   Deepak S   drm/i915/vlv: WA ...
3326
3327
3328
3329
  		return;
  
  	/* Mask turbo interrupt so that they will not come in between */
  	I915_WRITE(GEN6_PMINTRMSK, 0xffffffff);
650ad970a   Imre Deak   drm/i915: vlv: fa...
3330
  	vlv_force_gfx_clock(dev_priv, true);
76c3552f9   Deepak S   drm/i915/vlv: WA ...
3331

b39fb2977   Ben Widawsky   drm/i915: Rename ...
3332
  	dev_priv->rps.cur_freq = dev_priv->rps.min_freq_softlimit;
76c3552f9   Deepak S   drm/i915/vlv: WA ...
3333
3334
  
  	vlv_punit_write(dev_priv, PUNIT_REG_GPU_FREQ_REQ,
b39fb2977   Ben Widawsky   drm/i915: Rename ...
3335
  					dev_priv->rps.min_freq_softlimit);
76c3552f9   Deepak S   drm/i915/vlv: WA ...
3336
3337
3338
3339
3340
  
  	if (wait_for(((vlv_punit_read(dev_priv, PUNIT_REG_GPU_FREQ_STS))
  				& GENFREQSTATUS) == 0, 5))
  		DRM_ERROR("timed out waiting for Punit
  ");
650ad970a   Imre Deak   drm/i915: vlv: fa...
3341
  	vlv_force_gfx_clock(dev_priv, false);
76c3552f9   Deepak S   drm/i915/vlv: WA ...
3342

2876ce734   Chris Wilson   drm/i915: Mask PM...
3343
3344
  	I915_WRITE(GEN6_PMINTRMSK,
  		   gen6_rps_pm_mask(dev_priv, dev_priv->rps.cur_freq));
76c3552f9   Deepak S   drm/i915/vlv: WA ...
3345
  }
b29c19b64   Chris Wilson   drm/i915: Boost R...
3346
3347
  void gen6_rps_idle(struct drm_i915_private *dev_priv)
  {
691bb7175   Damien Lespiau   drm/i915: Use IS_...
3348
  	struct drm_device *dev = dev_priv->dev;
b29c19b64   Chris Wilson   drm/i915: Boost R...
3349
  	mutex_lock(&dev_priv->rps.hw_lock);
c0951f0c9   Chris Wilson   drm/i915: Avoid t...
3350
  	if (dev_priv->rps.enabled) {
34638118f   Deepak S   drm/i915/chv: Dro...
3351
3352
3353
  		if (IS_CHERRYVIEW(dev))
  			valleyview_set_rps(dev_priv->dev, dev_priv->rps.min_freq_softlimit);
  		else if (IS_VALLEYVIEW(dev))
76c3552f9   Deepak S   drm/i915/vlv: WA ...
3354
  			vlv_set_rps_idle(dev_priv);
7526ed79b   Daniel Vetter   Revert "drm/i915/...
3355
  		else
b39fb2977   Ben Widawsky   drm/i915: Rename ...
3356
  			gen6_set_rps(dev_priv->dev, dev_priv->rps.min_freq_softlimit);
c0951f0c9   Chris Wilson   drm/i915: Avoid t...
3357
3358
  		dev_priv->rps.last_adj = 0;
  	}
b29c19b64   Chris Wilson   drm/i915: Boost R...
3359
3360
3361
3362
3363
  	mutex_unlock(&dev_priv->rps.hw_lock);
  }
  
  void gen6_rps_boost(struct drm_i915_private *dev_priv)
  {
691bb7175   Damien Lespiau   drm/i915: Use IS_...
3364
  	struct drm_device *dev = dev_priv->dev;
b29c19b64   Chris Wilson   drm/i915: Boost R...
3365
  	mutex_lock(&dev_priv->rps.hw_lock);
c0951f0c9   Chris Wilson   drm/i915: Avoid t...
3366
  	if (dev_priv->rps.enabled) {
691bb7175   Damien Lespiau   drm/i915: Use IS_...
3367
  		if (IS_VALLEYVIEW(dev))
b39fb2977   Ben Widawsky   drm/i915: Rename ...
3368
  			valleyview_set_rps(dev_priv->dev, dev_priv->rps.max_freq_softlimit);
7526ed79b   Daniel Vetter   Revert "drm/i915/...
3369
  		else
b39fb2977   Ben Widawsky   drm/i915: Rename ...
3370
  			gen6_set_rps(dev_priv->dev, dev_priv->rps.max_freq_softlimit);
c0951f0c9   Chris Wilson   drm/i915: Avoid t...
3371
3372
  		dev_priv->rps.last_adj = 0;
  	}
b29c19b64   Chris Wilson   drm/i915: Boost R...
3373
3374
  	mutex_unlock(&dev_priv->rps.hw_lock);
  }
0a073b843   Jesse Barnes   drm/i915: turbo &...
3375
3376
3377
  void valleyview_set_rps(struct drm_device *dev, u8 val)
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
7a67092a2   Ville Syrjälä   drm/i915: GEN6_RP...
3378

0a073b843   Jesse Barnes   drm/i915: turbo &...
3379
  	WARN_ON(!mutex_is_locked(&dev_priv->rps.hw_lock));
b39fb2977   Ben Widawsky   drm/i915: Rename ...
3380
3381
  	WARN_ON(val > dev_priv->rps.max_freq_softlimit);
  	WARN_ON(val < dev_priv->rps.min_freq_softlimit);
0a073b843   Jesse Barnes   drm/i915: turbo &...
3382

1c14762d0   Ville Syrjälä   drm/i915: Warn ab...
3383
3384
3385
3386
  	if (WARN_ONCE(IS_CHERRYVIEW(dev) && (val & 1),
  		      "Odd GPU freq value
  "))
  		val &= ~1;
67956867a   Ville Syrjälä   drm/i915: Don't s...
3387
3388
3389
3390
3391
3392
  	if (val != dev_priv->rps.cur_freq) {
  		DRM_DEBUG_DRIVER("GPU freq request from %d MHz (%u) to %d MHz (%u)
  ",
  				 vlv_gpu_freq(dev_priv, dev_priv->rps.cur_freq),
  				 dev_priv->rps.cur_freq,
  				 vlv_gpu_freq(dev_priv, val), val);
2876ce734   Chris Wilson   drm/i915: Mask PM...
3393
  		vlv_punit_write(dev_priv, PUNIT_REG_GPU_FREQ_REQ, val);
67956867a   Ville Syrjälä   drm/i915: Don't s...
3394
  	}
0a073b843   Jesse Barnes   drm/i915: turbo &...
3395

09c87db8b   Imre Deak   drm/i915: vlv: fi...
3396
  	I915_WRITE(GEN6_PMINTRMSK, gen6_rps_pm_mask(dev_priv, val));
0a073b843   Jesse Barnes   drm/i915: turbo &...
3397

b39fb2977   Ben Widawsky   drm/i915: Rename ...
3398
  	dev_priv->rps.cur_freq = val;
2ec3815f2   Ville Syrjälä   drm/i915: Pass de...
3399
  	trace_intel_gpu_freq_change(vlv_gpu_freq(dev_priv, val));
0a073b843   Jesse Barnes   drm/i915: turbo &...
3400
  }
0961021ae   Ben Widawsky   drm/i915/bdw: Imp...
3401
3402
3403
  static void gen8_disable_rps_interrupts(struct drm_device *dev)
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
7526ed79b   Daniel Vetter   Revert "drm/i915/...
3404
3405
3406
3407
3408
3409
3410
3411
3412
3413
3414
3415
3416
3417
3418
  
  	I915_WRITE(GEN6_PMINTRMSK, ~GEN8_PMINTR_REDIRECT_TO_NON_DISP);
  	I915_WRITE(GEN8_GT_IER(2), I915_READ(GEN8_GT_IER(2)) &
  				   ~dev_priv->pm_rps_events);
  	/* Complete PM interrupt masking here doesn't race with the rps work
  	 * item again unmasking PM interrupts because that is using a different
  	 * register (GEN8_GT_IMR(2)) to mask PM interrupts. The only risk is in
  	 * leaving stale bits in GEN8_GT_IIR(2) and GEN8_GT_IMR(2) which
  	 * gen8_enable_rps will clean up. */
  
  	spin_lock_irq(&dev_priv->irq_lock);
  	dev_priv->rps.pm_iir = 0;
  	spin_unlock_irq(&dev_priv->irq_lock);
  
  	I915_WRITE(GEN8_GT_IIR(2), dev_priv->pm_rps_events);
0961021ae   Ben Widawsky   drm/i915/bdw: Imp...
3419
  }
44fc7d5cf   Daniel Vetter   drm/i915: extract...
3420
  static void gen6_disable_rps_interrupts(struct drm_device *dev)
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3421
3422
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3423
  	I915_WRITE(GEN6_PMINTRMSK, 0xffffffff);
a6706b45a   Deepak S   drm/i915: Track t...
3424
3425
  	I915_WRITE(GEN6_PMIER, I915_READ(GEN6_PMIER) &
  				~dev_priv->pm_rps_events);
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3426
3427
3428
3429
  	/* Complete PM interrupt masking here doesn't race with the rps work
  	 * item again unmasking PM interrupts because that is using a different
  	 * register (PMIMR) to mask PM interrupts. The only risk is in leaving
  	 * stale bits in PMIIR and PMIMR which gen6_enable_rps will clean up. */
59cdb63d5   Daniel Vetter   drm/i915: kill de...
3430
  	spin_lock_irq(&dev_priv->irq_lock);
c6a828d32   Daniel Vetter   drm/i915: move al...
3431
  	dev_priv->rps.pm_iir = 0;
59cdb63d5   Daniel Vetter   drm/i915: kill de...
3432
  	spin_unlock_irq(&dev_priv->irq_lock);
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3433

a6706b45a   Deepak S   drm/i915: Track t...
3434
  	I915_WRITE(GEN6_PMIIR, dev_priv->pm_rps_events);
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3435
  }
44fc7d5cf   Daniel Vetter   drm/i915: extract...
3436
  static void gen6_disable_rps(struct drm_device *dev)
d20d4f0ca   Jesse Barnes   drm/i915: create ...
3437
3438
3439
3440
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  
  	I915_WRITE(GEN6_RC_CONTROL, 0);
44fc7d5cf   Daniel Vetter   drm/i915: extract...
3441
  	I915_WRITE(GEN6_RPNSWREQ, 1 << 31);
d20d4f0ca   Jesse Barnes   drm/i915: create ...
3442

0961021ae   Ben Widawsky   drm/i915/bdw: Imp...
3443
3444
3445
3446
  	if (IS_BROADWELL(dev))
  		gen8_disable_rps_interrupts(dev);
  	else
  		gen6_disable_rps_interrupts(dev);
44fc7d5cf   Daniel Vetter   drm/i915: extract...
3447
  }
38807746f   Deepak S   drm/i915/chv: Ena...
3448
3449
3450
3451
3452
  static void cherryview_disable_rps(struct drm_device *dev)
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  
  	I915_WRITE(GEN6_RC_CONTROL, 0);
3497a5620   Deepak S   drm/i915/chv: Add...
3453
3454
  
  	gen8_disable_rps_interrupts(dev);
38807746f   Deepak S   drm/i915/chv: Ena...
3455
  }
44fc7d5cf   Daniel Vetter   drm/i915: extract...
3456
3457
3458
  static void valleyview_disable_rps(struct drm_device *dev)
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
98a2e5f94   Deepak S   drm/i915: Bring U...
3459
3460
3461
  	/* we're doing forcewake before Disabling RC6,
  	 * This what the BIOS expects when going into suspend */
  	gen6_gt_force_wake_get(dev_priv, FORCEWAKE_ALL);
44fc7d5cf   Daniel Vetter   drm/i915: extract...
3462
  	I915_WRITE(GEN6_RC_CONTROL, 0);
d20d4f0ca   Jesse Barnes   drm/i915: create ...
3463

98a2e5f94   Deepak S   drm/i915: Bring U...
3464
  	gen6_gt_force_wake_put(dev_priv, FORCEWAKE_ALL);
44fc7d5cf   Daniel Vetter   drm/i915: extract...
3465
  	gen6_disable_rps_interrupts(dev);
d20d4f0ca   Jesse Barnes   drm/i915: create ...
3466
  }
dc39fff72   Ben Widawsky   drm/i915: Print R...
3467
3468
  static void intel_print_rc6_info(struct drm_device *dev, u32 mode)
  {
91ca689a0   Imre Deak   drm/i915: fix the...
3469
3470
3471
3472
3473
3474
  	if (IS_VALLEYVIEW(dev)) {
  		if (mode & (GEN7_RC_CTL_TO_MODE | GEN6_RC_CTL_EI_MODE(1)))
  			mode = GEN6_RC_CTL_RC6_ENABLE;
  		else
  			mode = 0;
  	}
8dfd1f044   Daniel Vetter   drm/i915: Tune do...
3475
3476
3477
3478
3479
  	DRM_DEBUG_KMS("Enabling RC6 states: RC6 %s, RC6p %s, RC6pp %s
  ",
  		      (mode & GEN6_RC_CTL_RC6_ENABLE) ? "on" : "off",
  		      (mode & GEN6_RC_CTL_RC6p_ENABLE) ? "on" : "off",
  		      (mode & GEN6_RC_CTL_RC6pp_ENABLE) ? "on" : "off");
dc39fff72   Ben Widawsky   drm/i915: Print R...
3480
  }
e6069ca84   Imre Deak   drm/i915: sanitiz...
3481
  static int sanitize_rc6_option(const struct drm_device *dev, int enable_rc6)
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3482
  {
eb4926e4a   Damien Lespiau   drm/i915: Don't t...
3483
3484
3485
  	/* No RC6 before Ironlake */
  	if (INTEL_INFO(dev)->gen < 5)
  		return 0;
e6069ca84   Imre Deak   drm/i915: sanitiz...
3486
3487
3488
  	/* RC6 is only on Ironlake mobile not on desktop */
  	if (INTEL_INFO(dev)->gen == 5 && !IS_IRONLAKE_M(dev))
  		return 0;
456470eb5   Daniel Vetter   drm/i915: enable ...
3489
  	/* Respect the kernel parameter if it is set */
e6069ca84   Imre Deak   drm/i915: sanitiz...
3490
3491
3492
3493
3494
3495
3496
3497
3498
3499
  	if (enable_rc6 >= 0) {
  		int mask;
  
  		if (INTEL_INFO(dev)->gen == 6 || IS_IVYBRIDGE(dev))
  			mask = INTEL_RC6_ENABLE | INTEL_RC6p_ENABLE |
  			       INTEL_RC6pp_ENABLE;
  		else
  			mask = INTEL_RC6_ENABLE;
  
  		if ((enable_rc6 & mask) != enable_rc6)
8dfd1f044   Daniel Vetter   drm/i915: Tune do...
3500
3501
3502
  			DRM_DEBUG_KMS("Adjusting RC6 mask to %d (requested %d, valid %d)
  ",
  				      enable_rc6 & mask, enable_rc6, mask);
e6069ca84   Imre Deak   drm/i915: sanitiz...
3503
3504
3505
  
  		return enable_rc6 & mask;
  	}
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3506

6567d748c   Chris Wilson   Revert "drm/i915:...
3507
3508
3509
  	/* Disable RC6 on Ironlake */
  	if (INTEL_INFO(dev)->gen == 5)
  		return 0;
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3510

8bade1adc   Ben Widawsky   drm/i915: Stop pr...
3511
  	if (IS_IVYBRIDGE(dev))
cca84a1ff   Ben Widawsky   drm/i915: Clarify...
3512
  		return (INTEL_RC6_ENABLE | INTEL_RC6p_ENABLE);
8bade1adc   Ben Widawsky   drm/i915: Stop pr...
3513
3514
  
  	return INTEL_RC6_ENABLE;
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3515
  }
e6069ca84   Imre Deak   drm/i915: sanitiz...
3516
3517
3518
3519
  int intel_enable_rc6(const struct drm_device *dev)
  {
  	return i915.enable_rc6;
  }
0961021ae   Ben Widawsky   drm/i915/bdw: Imp...
3520
3521
3522
3523
3524
3525
  static void gen8_enable_rps_interrupts(struct drm_device *dev)
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  
  	spin_lock_irq(&dev_priv->irq_lock);
  	WARN_ON(dev_priv->rps.pm_iir);
480c80338   Daniel Vetter   drm/i915: Use gen...
3526
  	gen8_enable_pm_irq(dev_priv, dev_priv->pm_rps_events);
0961021ae   Ben Widawsky   drm/i915/bdw: Imp...
3527
3528
3529
  	I915_WRITE(GEN8_GT_IIR(2), dev_priv->pm_rps_events);
  	spin_unlock_irq(&dev_priv->irq_lock);
  }
44fc7d5cf   Daniel Vetter   drm/i915: extract...
3530
3531
3532
3533
3534
  static void gen6_enable_rps_interrupts(struct drm_device *dev)
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  
  	spin_lock_irq(&dev_priv->irq_lock);
a0b3335a2   Daniel Vetter   drm/i915: simplif...
3535
  	WARN_ON(dev_priv->rps.pm_iir);
480c80338   Daniel Vetter   drm/i915: Use gen...
3536
  	gen6_enable_pm_irq(dev_priv, dev_priv->pm_rps_events);
a6706b45a   Deepak S   drm/i915: Track t...
3537
  	I915_WRITE(GEN6_PMIIR, dev_priv->pm_rps_events);
44fc7d5cf   Daniel Vetter   drm/i915: extract...
3538
  	spin_unlock_irq(&dev_priv->irq_lock);
44fc7d5cf   Daniel Vetter   drm/i915: extract...
3539
  }
3280e8b08   Ben Widawsky   drm/i915/bdw: Ext...
3540
3541
3542
3543
3544
3545
3546
3547
3548
3549
3550
3551
3552
3553
3554
3555
3556
3557
3558
3559
  static void parse_rp_state_cap(struct drm_i915_private *dev_priv, u32 rp_state_cap)
  {
  	/* All of these values are in units of 50MHz */
  	dev_priv->rps.cur_freq		= 0;
  	/* static values from HW: RP0 < RPe < RP1 < RPn (min_freq) */
  	dev_priv->rps.rp1_freq		= (rp_state_cap >>  8) & 0xff;
  	dev_priv->rps.rp0_freq		= (rp_state_cap >>  0) & 0xff;
  	dev_priv->rps.min_freq		= (rp_state_cap >> 16) & 0xff;
  	/* XXX: only BYT has a special efficient freq */
  	dev_priv->rps.efficient_freq	= dev_priv->rps.rp1_freq;
  	/* hw_max = RP0 until we check for overclocking */
  	dev_priv->rps.max_freq		= dev_priv->rps.rp0_freq;
  
  	/* Preserve min/max settings in case of re-init */
  	if (dev_priv->rps.max_freq_softlimit == 0)
  		dev_priv->rps.max_freq_softlimit = dev_priv->rps.max_freq;
  
  	if (dev_priv->rps.min_freq_softlimit == 0)
  		dev_priv->rps.min_freq_softlimit = dev_priv->rps.min_freq;
  }
6edee7f3e   Ben Widawsky   drm/i915/bdw: Cre...
3560
3561
3562
  static void gen8_enable_rps(struct drm_device *dev)
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
a4872ba6d   Oscar Mateo   drm/i915: s/intel...
3563
  	struct intel_engine_cs *ring;
6edee7f3e   Ben Widawsky   drm/i915/bdw: Cre...
3564
3565
3566
3567
3568
3569
3570
3571
  	uint32_t rc6_mask = 0, rp_state_cap;
  	int unused;
  
  	/* 1a: Software RC state - RC0 */
  	I915_WRITE(GEN6_RC_STATE, 0);
  
  	/* 1c & 1d: Get forcewake during program sequence. Although the driver
  	 * hasn't enabled a state yet where we need forcewake, BIOS may have.*/
c8d9a5905   Deepak S   drm/i915: Add pow...
3572
  	gen6_gt_force_wake_get(dev_priv, FORCEWAKE_ALL);
6edee7f3e   Ben Widawsky   drm/i915/bdw: Cre...
3573
3574
3575
3576
3577
  
  	/* 2a: Disable RC states. */
  	I915_WRITE(GEN6_RC_CONTROL, 0);
  
  	rp_state_cap = I915_READ(GEN6_RP_STATE_CAP);
3280e8b08   Ben Widawsky   drm/i915/bdw: Ext...
3578
  	parse_rp_state_cap(dev_priv, rp_state_cap);
6edee7f3e   Ben Widawsky   drm/i915/bdw: Cre...
3579
3580
3581
3582
3583
3584
3585
3586
  
  	/* 2b: Program RC6 thresholds.*/
  	I915_WRITE(GEN6_RC6_WAKE_RATE_LIMIT, 40 << 16);
  	I915_WRITE(GEN6_RC_EVALUATION_INTERVAL, 125000); /* 12500 * 1280ns */
  	I915_WRITE(GEN6_RC_IDLE_HYSTERSIS, 25); /* 25 * 1280ns */
  	for_each_ring(ring, dev_priv, unused)
  		I915_WRITE(RING_MAX_IDLE(ring->mmio_base), 10);
  	I915_WRITE(GEN6_RC_SLEEP, 0);
0d68b25e9   Tom O'Rourke   drm/i915/bdw: Use...
3587
3588
3589
3590
  	if (IS_BROADWELL(dev))
  		I915_WRITE(GEN6_RC6_THRESHOLD, 625); /* 800us/1.28 for TO */
  	else
  		I915_WRITE(GEN6_RC6_THRESHOLD, 50000); /* 50/125ms per EI */
6edee7f3e   Ben Widawsky   drm/i915/bdw: Cre...
3591
3592
3593
3594
  
  	/* 3: Enable RC6 */
  	if (intel_enable_rc6(dev) & INTEL_RC6_ENABLE)
  		rc6_mask = GEN6_RC_CTL_RC6_ENABLE;
abbf9d2c4   Ben Widawsky   drm/i915/bdw: Use...
3595
  	intel_print_rc6_info(dev, rc6_mask);
0d68b25e9   Tom O'Rourke   drm/i915/bdw: Use...
3596
3597
3598
3599
3600
3601
3602
3603
  	if (IS_BROADWELL(dev))
  		I915_WRITE(GEN6_RC_CONTROL, GEN6_RC_CTL_HW_ENABLE |
  				GEN7_RC_CTL_TO_MODE |
  				rc6_mask);
  	else
  		I915_WRITE(GEN6_RC_CONTROL, GEN6_RC_CTL_HW_ENABLE |
  				GEN6_RC_CTL_EI_MODE(1) |
  				rc6_mask);
6edee7f3e   Ben Widawsky   drm/i915/bdw: Cre...
3604
3605
  
  	/* 4 Program defaults and thresholds for RPS*/
f9bdc5855   Ben Widawsky   drm/i915/bdw: Set...
3606
3607
3608
3609
  	I915_WRITE(GEN6_RPNSWREQ,
  		   HSW_FREQUENCY(dev_priv->rps.rp1_freq));
  	I915_WRITE(GEN6_RC_VIDEO_FREQ,
  		   HSW_FREQUENCY(dev_priv->rps.rp1_freq));
7526ed79b   Daniel Vetter   Revert "drm/i915/...
3610
3611
3612
3613
3614
3615
3616
3617
3618
3619
3620
3621
3622
3623
  	/* NB: Docs say 1s, and 1000000 - which aren't equivalent */
  	I915_WRITE(GEN6_RP_DOWN_TIMEOUT, 100000000 / 128); /* 1 second timeout */
  
  	/* Docs recommend 900MHz, and 300 MHz respectively */
  	I915_WRITE(GEN6_RP_INTERRUPT_LIMITS,
  		   dev_priv->rps.max_freq_softlimit << 24 |
  		   dev_priv->rps.min_freq_softlimit << 16);
  
  	I915_WRITE(GEN6_RP_UP_THRESHOLD, 7600000 / 128); /* 76ms busyness per EI, 90% */
  	I915_WRITE(GEN6_RP_DOWN_THRESHOLD, 31300000 / 128); /* 313ms busyness per EI, 70%*/
  	I915_WRITE(GEN6_RP_UP_EI, 66000); /* 84.48ms, XXX: random? */
  	I915_WRITE(GEN6_RP_DOWN_EI, 350000); /* 448ms, XXX: random? */
  
  	I915_WRITE(GEN6_RP_IDLE_HYSTERSIS, 10);
6edee7f3e   Ben Widawsky   drm/i915/bdw: Cre...
3624
3625
  
  	/* 5: Enable RPS */
7526ed79b   Daniel Vetter   Revert "drm/i915/...
3626
3627
3628
3629
3630
3631
3632
3633
3634
  	I915_WRITE(GEN6_RP_CONTROL,
  		   GEN6_RP_MEDIA_TURBO |
  		   GEN6_RP_MEDIA_HW_NORMAL_MODE |
  		   GEN6_RP_MEDIA_IS_GFX |
  		   GEN6_RP_ENABLE |
  		   GEN6_RP_UP_BUSY_AVG |
  		   GEN6_RP_DOWN_IDLE_AVG);
  
  	/* 6: Ring frequency + overclocking (our driver does this later */
6edee7f3e   Ben Widawsky   drm/i915/bdw: Cre...
3635
  	gen6_set_rps(dev, (I915_READ(GEN6_GT_PERF_STATUS) & 0xff00) >> 8);
7526ed79b   Daniel Vetter   Revert "drm/i915/...
3636
3637
  
  	gen8_enable_rps_interrupts(dev);
6edee7f3e   Ben Widawsky   drm/i915/bdw: Cre...
3638

c8d9a5905   Deepak S   drm/i915: Add pow...
3639
  	gen6_gt_force_wake_put(dev_priv, FORCEWAKE_ALL);
6edee7f3e   Ben Widawsky   drm/i915/bdw: Cre...
3640
  }
79f5b2c75   Daniel Vetter   drm/i915: make en...
3641
  static void gen6_enable_rps(struct drm_device *dev)
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3642
  {
79f5b2c75   Daniel Vetter   drm/i915: make en...
3643
  	struct drm_i915_private *dev_priv = dev->dev_private;
a4872ba6d   Oscar Mateo   drm/i915: s/intel...
3644
  	struct intel_engine_cs *ring;
2a5913a86   Ben Widawsky   drm/i915: remove ...
3645
  	u32 rp_state_cap;
d060c1695   Ben Widawsky   drm/i915: Reorgan...
3646
  	u32 rc6vids, pcu_mbox = 0, rc6_mask = 0;
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3647
  	u32 gtfifodbg;
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3648
  	int rc6_mode;
42c0526c9   Ben Widawsky   drm/i915: Extract...
3649
  	int i, ret;
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3650

4fc688ce7   Jesse Barnes   drm/i915: protect...
3651
  	WARN_ON(!mutex_is_locked(&dev_priv->rps.hw_lock));
79f5b2c75   Daniel Vetter   drm/i915: make en...
3652

2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3653
3654
3655
3656
3657
3658
3659
  	/* Here begins a magic sequence of register writes to enable
  	 * auto-downclocking.
  	 *
  	 * Perhaps there might be some value in exposing these to
  	 * userspace...
  	 */
  	I915_WRITE(GEN6_RC_STATE, 0);
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3660
3661
3662
3663
3664
3665
3666
  
  	/* Clear the DBG now so we don't confuse earlier errors */
  	if ((gtfifodbg = I915_READ(GTFIFODBG))) {
  		DRM_ERROR("GT fifo had a previous error %x
  ", gtfifodbg);
  		I915_WRITE(GTFIFODBG, gtfifodbg);
  	}
c8d9a5905   Deepak S   drm/i915: Add pow...
3667
  	gen6_gt_force_wake_get(dev_priv, FORCEWAKE_ALL);
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3668

7b9e0ae6d   Chris Wilson   drm/i915: Always ...
3669
  	rp_state_cap = I915_READ(GEN6_RP_STATE_CAP);
7b9e0ae6d   Chris Wilson   drm/i915: Always ...
3670

3280e8b08   Ben Widawsky   drm/i915/bdw: Ext...
3671
  	parse_rp_state_cap(dev_priv, rp_state_cap);
dd0a1aa19   Jeff McGee   drm/i915: Restore...
3672

2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3673
3674
3675
3676
3677
3678
3679
3680
  	/* disable the counters and set deterministic thresholds */
  	I915_WRITE(GEN6_RC_CONTROL, 0);
  
  	I915_WRITE(GEN6_RC1_WAKE_RATE_LIMIT, 1000 << 16);
  	I915_WRITE(GEN6_RC6_WAKE_RATE_LIMIT, 40 << 16 | 30);
  	I915_WRITE(GEN6_RC6pp_WAKE_RATE_LIMIT, 30);
  	I915_WRITE(GEN6_RC_EVALUATION_INTERVAL, 125000);
  	I915_WRITE(GEN6_RC_IDLE_HYSTERSIS, 25);
b4519513e   Chris Wilson   drm/i915: Introdu...
3681
3682
  	for_each_ring(ring, dev_priv, i)
  		I915_WRITE(RING_MAX_IDLE(ring->mmio_base), 10);
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3683
3684
3685
  
  	I915_WRITE(GEN6_RC_SLEEP, 0);
  	I915_WRITE(GEN6_RC1e_THRESHOLD, 1000);
29c78f609   Daniel Vetter   Partially revert ...
3686
  	if (IS_IVYBRIDGE(dev))
351aa5666   Stéphane Marchesin   drm/i915: tune th...
3687
3688
3689
  		I915_WRITE(GEN6_RC6_THRESHOLD, 125000);
  	else
  		I915_WRITE(GEN6_RC6_THRESHOLD, 50000);
0920a4871   Stéphane Marchesin   drm/i915: Increas...
3690
  	I915_WRITE(GEN6_RC6p_THRESHOLD, 150000);
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3691
  	I915_WRITE(GEN6_RC6pp_THRESHOLD, 64000); /* unused */
5a7dc92a0   Eugeni Dodonov   drm/i915: add RPS...
3692
  	/* Check if we are enabling RC6 */
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3693
3694
3695
  	rc6_mode = intel_enable_rc6(dev_priv->dev);
  	if (rc6_mode & INTEL_RC6_ENABLE)
  		rc6_mask |= GEN6_RC_CTL_RC6_ENABLE;
5a7dc92a0   Eugeni Dodonov   drm/i915: add RPS...
3696
3697
3698
3699
  	/* We don't use those on Haswell */
  	if (!IS_HASWELL(dev)) {
  		if (rc6_mode & INTEL_RC6p_ENABLE)
  			rc6_mask |= GEN6_RC_CTL_RC6p_ENABLE;
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3700

5a7dc92a0   Eugeni Dodonov   drm/i915: add RPS...
3701
3702
3703
  		if (rc6_mode & INTEL_RC6pp_ENABLE)
  			rc6_mask |= GEN6_RC_CTL_RC6pp_ENABLE;
  	}
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3704

dc39fff72   Ben Widawsky   drm/i915: Print R...
3705
  	intel_print_rc6_info(dev, rc6_mask);
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3706
3707
3708
3709
3710
  
  	I915_WRITE(GEN6_RC_CONTROL,
  		   rc6_mask |
  		   GEN6_RC_CTL_EI_MODE(1) |
  		   GEN6_RC_CTL_HW_ENABLE);
dd75fdc8c   Chris Wilson   drm/i915: Tweak R...
3711
3712
  	/* Power down if completely idle for over 50ms */
  	I915_WRITE(GEN6_RP_DOWN_TIMEOUT, 50000);
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3713
  	I915_WRITE(GEN6_RP_IDLE_HYSTERSIS, 10);
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3714

42c0526c9   Ben Widawsky   drm/i915: Extract...
3715
  	ret = sandybridge_pcode_write(dev_priv, GEN6_PCODE_WRITE_MIN_FREQ_TABLE, 0);
d060c1695   Ben Widawsky   drm/i915: Reorgan...
3716
  	if (ret)
42c0526c9   Ben Widawsky   drm/i915: Extract...
3717
3718
  		DRM_DEBUG_DRIVER("Failed to set the min frequency
  ");
d060c1695   Ben Widawsky   drm/i915: Reorgan...
3719
3720
3721
3722
3723
  
  	ret = sandybridge_pcode_read(dev_priv, GEN6_READ_OC_PARAMS, &pcu_mbox);
  	if (!ret && (pcu_mbox & (1<<31))) { /* OC supported */
  		DRM_DEBUG_DRIVER("Overclocking supported. Max: %dMHz, Overclock max: %dMHz
  ",
b39fb2977   Ben Widawsky   drm/i915: Rename ...
3724
  				 (dev_priv->rps.max_freq_softlimit & 0xff) * 50,
d060c1695   Ben Widawsky   drm/i915: Reorgan...
3725
  				 (pcu_mbox & 0xff) * 50);
b39fb2977   Ben Widawsky   drm/i915: Rename ...
3726
  		dev_priv->rps.max_freq = pcu_mbox & 0xff;
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3727
  	}
dd75fdc8c   Chris Wilson   drm/i915: Tweak R...
3728
  	dev_priv->rps.power = HIGH_POWER; /* force a reset */
b39fb2977   Ben Widawsky   drm/i915: Rename ...
3729
  	gen6_set_rps(dev_priv->dev, dev_priv->rps.min_freq_softlimit);
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3730

44fc7d5cf   Daniel Vetter   drm/i915: extract...
3731
  	gen6_enable_rps_interrupts(dev);
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3732

31643d54a   Ben Widawsky   drm/i915: Workaro...
3733
3734
3735
3736
3737
3738
3739
3740
3741
3742
3743
3744
3745
3746
3747
3748
  	rc6vids = 0;
  	ret = sandybridge_pcode_read(dev_priv, GEN6_PCODE_READ_RC6VIDS, &rc6vids);
  	if (IS_GEN6(dev) && ret) {
  		DRM_DEBUG_DRIVER("Couldn't check for BIOS workaround
  ");
  	} else if (IS_GEN6(dev) && (GEN6_DECODE_RC6_VID(rc6vids & 0xff) < 450)) {
  		DRM_DEBUG_DRIVER("You should update your BIOS. Correcting minimum rc6 voltage (%dmV->%dmV)
  ",
  			  GEN6_DECODE_RC6_VID(rc6vids & 0xff), 450);
  		rc6vids &= 0xffff00;
  		rc6vids |= GEN6_ENCODE_RC6_VID(450);
  		ret = sandybridge_pcode_write(dev_priv, GEN6_PCODE_WRITE_RC6VIDS, rc6vids);
  		if (ret)
  			DRM_ERROR("Couldn't fix incorrect rc6 voltage
  ");
  	}
c8d9a5905   Deepak S   drm/i915: Add pow...
3749
  	gen6_gt_force_wake_put(dev_priv, FORCEWAKE_ALL);
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3750
  }
c2bc2fc54   Imre Deak   drm/i915: factor ...
3751
  static void __gen6_update_ring_freq(struct drm_device *dev)
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3752
  {
79f5b2c75   Daniel Vetter   drm/i915: make en...
3753
  	struct drm_i915_private *dev_priv = dev->dev_private;
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3754
  	int min_freq = 15;
3ebecd07d   Chris Wilson   drm/i915: Scale r...
3755
3756
  	unsigned int gpu_freq;
  	unsigned int max_ia_freq, min_ring_freq;
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3757
  	int scaling_factor = 180;
eda796422   Ben Widawsky   drm/i915: Use the...
3758
  	struct cpufreq_policy *policy;
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3759

4fc688ce7   Jesse Barnes   drm/i915: protect...
3760
  	WARN_ON(!mutex_is_locked(&dev_priv->rps.hw_lock));
79f5b2c75   Daniel Vetter   drm/i915: make en...
3761

eda796422   Ben Widawsky   drm/i915: Use the...
3762
3763
3764
3765
3766
3767
3768
3769
3770
  	policy = cpufreq_cpu_get(0);
  	if (policy) {
  		max_ia_freq = policy->cpuinfo.max_freq;
  		cpufreq_cpu_put(policy);
  	} else {
  		/*
  		 * Default to measured freq if none found, PCU will ensure we
  		 * don't go over
  		 */
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3771
  		max_ia_freq = tsc_khz;
eda796422   Ben Widawsky   drm/i915: Use the...
3772
  	}
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3773
3774
3775
  
  	/* Convert from kHz to MHz */
  	max_ia_freq /= 1000;
153b4b954   Ben Widawsky   drm/i915: Convert...
3776
  	min_ring_freq = I915_READ(DCLK) & 0xf;
f6aca45c0   Ben Widawsky   drm/i915: Clean u...
3777
3778
  	/* convert DDR frequency from units of 266.6MHz to bandwidth */
  	min_ring_freq = mult_frac(min_ring_freq, 8, 3);
3ebecd07d   Chris Wilson   drm/i915: Scale r...
3779

2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3780
3781
3782
3783
3784
  	/*
  	 * For each potential GPU frequency, load a ring frequency we'd like
  	 * to use for memory access.  We do this by specifying the IA frequency
  	 * the PCU should use as a reference to determine the ring frequency.
  	 */
b39fb2977   Ben Widawsky   drm/i915: Rename ...
3785
  	for (gpu_freq = dev_priv->rps.max_freq_softlimit; gpu_freq >= dev_priv->rps.min_freq_softlimit;
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3786
  	     gpu_freq--) {
b39fb2977   Ben Widawsky   drm/i915: Rename ...
3787
  		int diff = dev_priv->rps.max_freq_softlimit - gpu_freq;
3ebecd07d   Chris Wilson   drm/i915: Scale r...
3788
  		unsigned int ia_freq = 0, ring_freq = 0;
46c764d41   Ben Widawsky   drm/i915/bdw: Use...
3789
3790
3791
3792
  		if (INTEL_INFO(dev)->gen >= 8) {
  			/* max(2 * GT, DDR). NB: GT is 50MHz units */
  			ring_freq = max(min_ring_freq, gpu_freq);
  		} else if (IS_HASWELL(dev)) {
f6aca45c0   Ben Widawsky   drm/i915: Clean u...
3793
  			ring_freq = mult_frac(gpu_freq, 5, 4);
3ebecd07d   Chris Wilson   drm/i915: Scale r...
3794
3795
3796
3797
3798
3799
3800
3801
3802
3803
3804
3805
3806
3807
3808
3809
  			ring_freq = max(min_ring_freq, ring_freq);
  			/* leave ia_freq as the default, chosen by cpufreq */
  		} else {
  			/* On older processors, there is no separate ring
  			 * clock domain, so in order to boost the bandwidth
  			 * of the ring, we need to upclock the CPU (ia_freq).
  			 *
  			 * For GPU frequencies less than 750MHz,
  			 * just use the lowest ring freq.
  			 */
  			if (gpu_freq < min_freq)
  				ia_freq = 800;
  			else
  				ia_freq = max_ia_freq - ((diff * scaling_factor) / 2);
  			ia_freq = DIV_ROUND_CLOSEST(ia_freq, 100);
  		}
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3810

42c0526c9   Ben Widawsky   drm/i915: Extract...
3811
3812
  		sandybridge_pcode_write(dev_priv,
  					GEN6_PCODE_WRITE_MIN_FREQ_TABLE,
3ebecd07d   Chris Wilson   drm/i915: Scale r...
3813
3814
3815
  					ia_freq << GEN6_PCODE_FREQ_IA_RATIO_SHIFT |
  					ring_freq << GEN6_PCODE_FREQ_RING_RATIO_SHIFT |
  					gpu_freq);
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3816
  	}
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
3817
  }
c2bc2fc54   Imre Deak   drm/i915: factor ...
3818
3819
3820
3821
3822
3823
3824
3825
3826
3827
3828
  void gen6_update_ring_freq(struct drm_device *dev)
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  
  	if (INTEL_INFO(dev)->gen < 6 || IS_VALLEYVIEW(dev))
  		return;
  
  	mutex_lock(&dev_priv->rps.hw_lock);
  	__gen6_update_ring_freq(dev);
  	mutex_unlock(&dev_priv->rps.hw_lock);
  }
03af20458   Ville Syrjälä   drm/i915: Use the...
3829
  static int cherryview_rps_max_freq(struct drm_i915_private *dev_priv)
2b6b3a099   Deepak S   drm/i915/chv: Ena...
3830
3831
3832
3833
3834
3835
3836
3837
3838
3839
3840
3841
3842
3843
3844
3845
3846
3847
  {
  	u32 val, rp0;
  
  	val = vlv_punit_read(dev_priv, PUNIT_GPU_STATUS_REG);
  	rp0 = (val >> PUNIT_GPU_STATUS_MAX_FREQ_SHIFT) & PUNIT_GPU_STATUS_MAX_FREQ_MASK;
  
  	return rp0;
  }
  
  static int cherryview_rps_rpe_freq(struct drm_i915_private *dev_priv)
  {
  	u32 val, rpe;
  
  	val = vlv_punit_read(dev_priv, PUNIT_GPU_DUTYCYCLE_REG);
  	rpe = (val >> PUNIT_GPU_DUTYCYCLE_RPE_FREQ_SHIFT) & PUNIT_GPU_DUTYCYCLE_RPE_FREQ_MASK;
  
  	return rpe;
  }
7707df4ad   Deepak S   drm/i915: Add RP1...
3848
3849
3850
3851
3852
3853
3854
3855
3856
  static int cherryview_rps_guar_freq(struct drm_i915_private *dev_priv)
  {
  	u32 val, rp1;
  
  	val = vlv_punit_read(dev_priv, PUNIT_REG_GPU_FREQ_STS);
  	rp1 = (val >> PUNIT_GPU_STATUS_MAX_FREQ_SHIFT) & PUNIT_GPU_STATUS_MAX_FREQ_MASK;
  
  	return rp1;
  }
03af20458   Ville Syrjälä   drm/i915: Use the...
3857
  static int cherryview_rps_min_freq(struct drm_i915_private *dev_priv)
2b6b3a099   Deepak S   drm/i915/chv: Ena...
3858
3859
3860
3861
3862
3863
3864
  {
  	u32 val, rpn;
  
  	val = vlv_punit_read(dev_priv, PUNIT_GPU_STATUS_REG);
  	rpn = (val >> PUNIT_GPU_STATIS_GFX_MIN_FREQ_SHIFT) & PUNIT_GPU_STATUS_GFX_MIN_FREQ_MASK;
  	return rpn;
  }
f8f2b001a   Deepak S   drm/i915: Read gu...
3865
3866
3867
3868
3869
3870
3871
3872
3873
3874
  static int valleyview_rps_guar_freq(struct drm_i915_private *dev_priv)
  {
  	u32 val, rp1;
  
  	val = vlv_nc_read(dev_priv, IOSF_NC_FB_GFX_FREQ_FUSE);
  
  	rp1 = (val & FB_GFX_FGUARANTEED_FREQ_FUSE_MASK) >> FB_GFX_FGUARANTEED_FREQ_FUSE_SHIFT;
  
  	return rp1;
  }
03af20458   Ville Syrjälä   drm/i915: Use the...
3875
  static int valleyview_rps_max_freq(struct drm_i915_private *dev_priv)
0a073b843   Jesse Barnes   drm/i915: turbo &...
3876
3877
  {
  	u32 val, rp0;
64936258d   Jani Nikula   drm/i915: change ...
3878
  	val = vlv_nc_read(dev_priv, IOSF_NC_FB_GFX_FREQ_FUSE);
0a073b843   Jesse Barnes   drm/i915: turbo &...
3879
3880
3881
3882
3883
3884
3885
3886
3887
3888
3889
  
  	rp0 = (val & FB_GFX_MAX_FREQ_FUSE_MASK) >> FB_GFX_MAX_FREQ_FUSE_SHIFT;
  	/* Clamp to max */
  	rp0 = min_t(u32, rp0, 0xea);
  
  	return rp0;
  }
  
  static int valleyview_rps_rpe_freq(struct drm_i915_private *dev_priv)
  {
  	u32 val, rpe;
64936258d   Jani Nikula   drm/i915: change ...
3890
  	val = vlv_nc_read(dev_priv, IOSF_NC_FB_GFX_FMAX_FUSE_LO);
0a073b843   Jesse Barnes   drm/i915: turbo &...
3891
  	rpe = (val & FB_FMAX_VMIN_FREQ_LO_MASK) >> FB_FMAX_VMIN_FREQ_LO_SHIFT;
64936258d   Jani Nikula   drm/i915: change ...
3892
  	val = vlv_nc_read(dev_priv, IOSF_NC_FB_GFX_FMAX_FUSE_HI);
0a073b843   Jesse Barnes   drm/i915: turbo &...
3893
3894
3895
3896
  	rpe |= (val & FB_FMAX_VMIN_FREQ_HI_MASK) << 5;
  
  	return rpe;
  }
03af20458   Ville Syrjälä   drm/i915: Use the...
3897
  static int valleyview_rps_min_freq(struct drm_i915_private *dev_priv)
0a073b843   Jesse Barnes   drm/i915: turbo &...
3898
  {
64936258d   Jani Nikula   drm/i915: change ...
3899
  	return vlv_punit_read(dev_priv, PUNIT_REG_GPU_LFM) & 0xff;
0a073b843   Jesse Barnes   drm/i915: turbo &...
3900
  }
ae48434c2   Imre Deak   drm/i915: vlv: re...
3901
3902
3903
3904
3905
3906
3907
3908
  /* Check that the pctx buffer wasn't move under us. */
  static void valleyview_check_pctx(struct drm_i915_private *dev_priv)
  {
  	unsigned long pctx_addr = I915_READ(VLV_PCBR) & ~4095;
  
  	WARN_ON(pctx_addr != dev_priv->mm.stolen_base +
  			     dev_priv->vlv_pctx->stolen->start);
  }
38807746f   Deepak S   drm/i915/chv: Ena...
3909
3910
3911
3912
3913
3914
3915
3916
3917
3918
3919
3920
3921
3922
3923
3924
3925
3926
3927
3928
3929
3930
3931
3932
3933
3934
3935
3936
  
  /* Check that the pcbr address is not empty. */
  static void cherryview_check_pctx(struct drm_i915_private *dev_priv)
  {
  	unsigned long pctx_addr = I915_READ(VLV_PCBR) & ~4095;
  
  	WARN_ON((pctx_addr >> VLV_PCBR_ADDR_SHIFT) == 0);
  }
  
  static void cherryview_setup_pctx(struct drm_device *dev)
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  	unsigned long pctx_paddr, paddr;
  	struct i915_gtt *gtt = &dev_priv->gtt;
  	u32 pcbr;
  	int pctx_size = 32*1024;
  
  	WARN_ON(!mutex_is_locked(&dev->struct_mutex));
  
  	pcbr = I915_READ(VLV_PCBR);
  	if ((pcbr >> VLV_PCBR_ADDR_SHIFT) == 0) {
  		paddr = (dev_priv->mm.stolen_base +
  			 (gtt->stolen_size - pctx_size));
  
  		pctx_paddr = (paddr & (~4095));
  		I915_WRITE(VLV_PCBR, pctx_paddr);
  	}
  }
c9cddffc6   Jesse Barnes   drm/i915: BIOS an...
3937
3938
3939
3940
3941
3942
3943
  static void valleyview_setup_pctx(struct drm_device *dev)
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  	struct drm_i915_gem_object *pctx;
  	unsigned long pctx_paddr;
  	u32 pcbr;
  	int pctx_size = 24*1024;
17b0c1f78   Imre Deak   drm/i915: vlv: re...
3944
  	WARN_ON(!mutex_is_locked(&dev->struct_mutex));
c9cddffc6   Jesse Barnes   drm/i915: BIOS an...
3945
3946
3947
3948
3949
3950
3951
3952
  	pcbr = I915_READ(VLV_PCBR);
  	if (pcbr) {
  		/* BIOS set it up already, grab the pre-alloc'd space */
  		int pcbr_offset;
  
  		pcbr_offset = (pcbr & (~4095)) - dev_priv->mm.stolen_base;
  		pctx = i915_gem_object_create_stolen_for_preallocated(dev_priv->dev,
  								      pcbr_offset,
190d6cd5c   Daniel Vetter   drm/i915: less ma...
3953
  								      I915_GTT_OFFSET_NONE,
c9cddffc6   Jesse Barnes   drm/i915: BIOS an...
3954
3955
3956
3957
3958
3959
3960
3961
3962
3963
3964
3965
3966
3967
3968
3969
3970
3971
3972
3973
3974
3975
3976
3977
3978
  								      pctx_size);
  		goto out;
  	}
  
  	/*
  	 * From the Gunit register HAS:
  	 * The Gfx driver is expected to program this register and ensure
  	 * proper allocation within Gfx stolen memory.  For example, this
  	 * register should be programmed such than the PCBR range does not
  	 * overlap with other ranges, such as the frame buffer, protected
  	 * memory, or any other relevant ranges.
  	 */
  	pctx = i915_gem_object_create_stolen(dev, pctx_size);
  	if (!pctx) {
  		DRM_DEBUG("not enough stolen space for PCTX, disabling
  ");
  		return;
  	}
  
  	pctx_paddr = dev_priv->mm.stolen_base + pctx->stolen->start;
  	I915_WRITE(VLV_PCBR, pctx_paddr);
  
  out:
  	dev_priv->vlv_pctx = pctx;
  }
ae48434c2   Imre Deak   drm/i915: vlv: re...
3979
3980
3981
3982
3983
3984
3985
3986
3987
3988
  static void valleyview_cleanup_pctx(struct drm_device *dev)
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  
  	if (WARN_ON(!dev_priv->vlv_pctx))
  		return;
  
  	drm_gem_object_unreference(&dev_priv->vlv_pctx->base);
  	dev_priv->vlv_pctx = NULL;
  }
4e80519e3   Imre Deak   drm/i915: vlv: se...
3989
3990
3991
  static void valleyview_init_gt_powersave(struct drm_device *dev)
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
2bb25c17b   Ville Syrjälä   drm/i915: Populat...
3992
  	u32 val;
4e80519e3   Imre Deak   drm/i915: vlv: se...
3993
3994
3995
3996
  
  	valleyview_setup_pctx(dev);
  
  	mutex_lock(&dev_priv->rps.hw_lock);
2bb25c17b   Ville Syrjälä   drm/i915: Populat...
3997
3998
3999
4000
4001
4002
4003
4004
4005
4006
4007
4008
4009
4010
  	val = vlv_punit_read(dev_priv, PUNIT_REG_GPU_FREQ_STS);
  	switch ((val >> 6) & 3) {
  	case 0:
  	case 1:
  		dev_priv->mem_freq = 800;
  		break;
  	case 2:
  		dev_priv->mem_freq = 1066;
  		break;
  	case 3:
  		dev_priv->mem_freq = 1333;
  		break;
  	}
  	DRM_DEBUG_DRIVER("DDR speed: %d MHz", dev_priv->mem_freq);
4e80519e3   Imre Deak   drm/i915: vlv: se...
4011
4012
4013
4014
4015
4016
4017
4018
4019
4020
4021
4022
  	dev_priv->rps.max_freq = valleyview_rps_max_freq(dev_priv);
  	dev_priv->rps.rp0_freq = dev_priv->rps.max_freq;
  	DRM_DEBUG_DRIVER("max GPU freq: %d MHz (%u)
  ",
  			 vlv_gpu_freq(dev_priv, dev_priv->rps.max_freq),
  			 dev_priv->rps.max_freq);
  
  	dev_priv->rps.efficient_freq = valleyview_rps_rpe_freq(dev_priv);
  	DRM_DEBUG_DRIVER("RPe GPU freq: %d MHz (%u)
  ",
  			 vlv_gpu_freq(dev_priv, dev_priv->rps.efficient_freq),
  			 dev_priv->rps.efficient_freq);
f8f2b001a   Deepak S   drm/i915: Read gu...
4023
4024
4025
4026
4027
  	dev_priv->rps.rp1_freq = valleyview_rps_guar_freq(dev_priv);
  	DRM_DEBUG_DRIVER("RP1(Guar Freq) GPU freq: %d MHz (%u)
  ",
  			 vlv_gpu_freq(dev_priv, dev_priv->rps.rp1_freq),
  			 dev_priv->rps.rp1_freq);
4e80519e3   Imre Deak   drm/i915: vlv: se...
4028
4029
4030
4031
4032
4033
4034
4035
4036
4037
4038
4039
4040
4041
4042
  	dev_priv->rps.min_freq = valleyview_rps_min_freq(dev_priv);
  	DRM_DEBUG_DRIVER("min GPU freq: %d MHz (%u)
  ",
  			 vlv_gpu_freq(dev_priv, dev_priv->rps.min_freq),
  			 dev_priv->rps.min_freq);
  
  	/* Preserve min/max settings in case of re-init */
  	if (dev_priv->rps.max_freq_softlimit == 0)
  		dev_priv->rps.max_freq_softlimit = dev_priv->rps.max_freq;
  
  	if (dev_priv->rps.min_freq_softlimit == 0)
  		dev_priv->rps.min_freq_softlimit = dev_priv->rps.min_freq;
  
  	mutex_unlock(&dev_priv->rps.hw_lock);
  }
38807746f   Deepak S   drm/i915/chv: Ena...
4043
4044
  static void cherryview_init_gt_powersave(struct drm_device *dev)
  {
2b6b3a099   Deepak S   drm/i915/chv: Ena...
4045
  	struct drm_i915_private *dev_priv = dev->dev_private;
2bb25c17b   Ville Syrjälä   drm/i915: Populat...
4046
  	u32 val;
2b6b3a099   Deepak S   drm/i915/chv: Ena...
4047

38807746f   Deepak S   drm/i915/chv: Ena...
4048
  	cherryview_setup_pctx(dev);
2b6b3a099   Deepak S   drm/i915/chv: Ena...
4049
4050
  
  	mutex_lock(&dev_priv->rps.hw_lock);
2bb25c17b   Ville Syrjälä   drm/i915: Populat...
4051
4052
4053
4054
4055
4056
4057
4058
4059
4060
4061
4062
4063
4064
4065
4066
4067
4068
4069
4070
4071
4072
4073
4074
4075
  	val = vlv_punit_read(dev_priv, CCK_FUSE_REG);
  	switch ((val >> 2) & 0x7) {
  	case 0:
  	case 1:
  		dev_priv->rps.cz_freq = 200;
  		dev_priv->mem_freq = 1600;
  		break;
  	case 2:
  		dev_priv->rps.cz_freq = 267;
  		dev_priv->mem_freq = 1600;
  		break;
  	case 3:
  		dev_priv->rps.cz_freq = 333;
  		dev_priv->mem_freq = 2000;
  		break;
  	case 4:
  		dev_priv->rps.cz_freq = 320;
  		dev_priv->mem_freq = 1600;
  		break;
  	case 5:
  		dev_priv->rps.cz_freq = 400;
  		dev_priv->mem_freq = 1600;
  		break;
  	}
  	DRM_DEBUG_DRIVER("DDR speed: %d MHz", dev_priv->mem_freq);
2b6b3a099   Deepak S   drm/i915/chv: Ena...
4076
4077
4078
4079
4080
4081
4082
4083
4084
4085
4086
4087
  	dev_priv->rps.max_freq = cherryview_rps_max_freq(dev_priv);
  	dev_priv->rps.rp0_freq = dev_priv->rps.max_freq;
  	DRM_DEBUG_DRIVER("max GPU freq: %d MHz (%u)
  ",
  			 vlv_gpu_freq(dev_priv, dev_priv->rps.max_freq),
  			 dev_priv->rps.max_freq);
  
  	dev_priv->rps.efficient_freq = cherryview_rps_rpe_freq(dev_priv);
  	DRM_DEBUG_DRIVER("RPe GPU freq: %d MHz (%u)
  ",
  			 vlv_gpu_freq(dev_priv, dev_priv->rps.efficient_freq),
  			 dev_priv->rps.efficient_freq);
7707df4ad   Deepak S   drm/i915: Add RP1...
4088
4089
4090
4091
4092
  	dev_priv->rps.rp1_freq = cherryview_rps_guar_freq(dev_priv);
  	DRM_DEBUG_DRIVER("RP1(Guar) GPU freq: %d MHz (%u)
  ",
  			 vlv_gpu_freq(dev_priv, dev_priv->rps.rp1_freq),
  			 dev_priv->rps.rp1_freq);
2b6b3a099   Deepak S   drm/i915/chv: Ena...
4093
4094
4095
4096
4097
  	dev_priv->rps.min_freq = cherryview_rps_min_freq(dev_priv);
  	DRM_DEBUG_DRIVER("min GPU freq: %d MHz (%u)
  ",
  			 vlv_gpu_freq(dev_priv, dev_priv->rps.min_freq),
  			 dev_priv->rps.min_freq);
1c14762d0   Ville Syrjälä   drm/i915: Warn ab...
4098
4099
4100
4101
4102
4103
  	WARN_ONCE((dev_priv->rps.max_freq |
  		   dev_priv->rps.efficient_freq |
  		   dev_priv->rps.rp1_freq |
  		   dev_priv->rps.min_freq) & 1,
  		  "Odd GPU freq values
  ");
2b6b3a099   Deepak S   drm/i915/chv: Ena...
4104
4105
4106
4107
4108
4109
4110
4111
  	/* Preserve min/max settings in case of re-init */
  	if (dev_priv->rps.max_freq_softlimit == 0)
  		dev_priv->rps.max_freq_softlimit = dev_priv->rps.max_freq;
  
  	if (dev_priv->rps.min_freq_softlimit == 0)
  		dev_priv->rps.min_freq_softlimit = dev_priv->rps.min_freq;
  
  	mutex_unlock(&dev_priv->rps.hw_lock);
38807746f   Deepak S   drm/i915/chv: Ena...
4112
  }
4e80519e3   Imre Deak   drm/i915: vlv: se...
4113
4114
4115
4116
  static void valleyview_cleanup_gt_powersave(struct drm_device *dev)
  {
  	valleyview_cleanup_pctx(dev);
  }
38807746f   Deepak S   drm/i915/chv: Ena...
4117
4118
4119
4120
  static void cherryview_enable_rps(struct drm_device *dev)
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  	struct intel_engine_cs *ring;
2b6b3a099   Deepak S   drm/i915/chv: Ena...
4121
  	u32 gtfifodbg, val, rc6_mode = 0, pcbr;
38807746f   Deepak S   drm/i915/chv: Ena...
4122
4123
4124
4125
4126
4127
4128
4129
4130
4131
4132
4133
4134
4135
4136
4137
4138
4139
4140
4141
4142
4143
4144
4145
4146
4147
4148
4149
4150
4151
4152
4153
4154
4155
4156
4157
4158
4159
4160
4161
4162
4163
4164
4165
4166
4167
4168
  	int i;
  
  	WARN_ON(!mutex_is_locked(&dev_priv->rps.hw_lock));
  
  	gtfifodbg = I915_READ(GTFIFODBG);
  	if (gtfifodbg) {
  		DRM_DEBUG_DRIVER("GT fifo had a previous error %x
  ",
  				 gtfifodbg);
  		I915_WRITE(GTFIFODBG, gtfifodbg);
  	}
  
  	cherryview_check_pctx(dev_priv);
  
  	/* 1a & 1b: Get forcewake during program sequence. Although the driver
  	 * hasn't enabled a state yet where we need forcewake, BIOS may have.*/
  	gen6_gt_force_wake_get(dev_priv, FORCEWAKE_ALL);
  
  	/* 2a: Program RC6 thresholds.*/
  	I915_WRITE(GEN6_RC6_WAKE_RATE_LIMIT, 40 << 16);
  	I915_WRITE(GEN6_RC_EVALUATION_INTERVAL, 125000); /* 12500 * 1280ns */
  	I915_WRITE(GEN6_RC_IDLE_HYSTERSIS, 25); /* 25 * 1280ns */
  
  	for_each_ring(ring, dev_priv, i)
  		I915_WRITE(RING_MAX_IDLE(ring->mmio_base), 10);
  	I915_WRITE(GEN6_RC_SLEEP, 0);
  
  	I915_WRITE(GEN6_RC6_THRESHOLD, 50000); /* 50/125ms per EI */
  
  	/* allows RC6 residency counter to work */
  	I915_WRITE(VLV_COUNTER_CONTROL,
  		   _MASKED_BIT_ENABLE(VLV_COUNT_RANGE_HIGH |
  				      VLV_MEDIA_RC6_COUNT_EN |
  				      VLV_RENDER_RC6_COUNT_EN));
  
  	/* For now we assume BIOS is allocating and populating the PCBR  */
  	pcbr = I915_READ(VLV_PCBR);
  
  	DRM_DEBUG_DRIVER("PCBR offset : 0x%x
  ", pcbr);
  
  	/* 3: Enable RC6 */
  	if ((intel_enable_rc6(dev) & INTEL_RC6_ENABLE) &&
  						(pcbr >> VLV_PCBR_ADDR_SHIFT))
  		rc6_mode = GEN6_RC_CTL_EI_MODE(1);
  
  	I915_WRITE(GEN6_RC_CONTROL, rc6_mode);
2b6b3a099   Deepak S   drm/i915/chv: Ena...
4169
4170
4171
4172
4173
4174
4175
  	/* 4 Program defaults and thresholds for RPS*/
  	I915_WRITE(GEN6_RP_UP_THRESHOLD, 59400);
  	I915_WRITE(GEN6_RP_DOWN_THRESHOLD, 245000);
  	I915_WRITE(GEN6_RP_UP_EI, 66000);
  	I915_WRITE(GEN6_RP_DOWN_EI, 350000);
  
  	I915_WRITE(GEN6_RP_IDLE_HYSTERSIS, 10);
7405f42c7   Tom O'Rourke   drm/i915/chv: Fix...
4176
4177
4178
  	/* WaDisablePwrmtrEvent:chv (pre-production hw) */
  	I915_WRITE(0xA80C, I915_READ(0xA80C) & 0x00ffffff);
  	I915_WRITE(0xA810, I915_READ(0xA810) & 0xffffff00);
2b6b3a099   Deepak S   drm/i915/chv: Ena...
4179
4180
4181
  	/* 5: Enable RPS */
  	I915_WRITE(GEN6_RP_CONTROL,
  		   GEN6_RP_MEDIA_HW_NORMAL_MODE |
7405f42c7   Tom O'Rourke   drm/i915/chv: Fix...
4182
  		   GEN6_RP_MEDIA_IS_GFX | /* WaSetMaskForGfxBusyness:chv (pre-production hw ?) */
2b6b3a099   Deepak S   drm/i915/chv: Ena...
4183
4184
4185
4186
4187
4188
4189
4190
4191
4192
4193
4194
4195
4196
4197
4198
4199
4200
4201
4202
4203
4204
4205
  		   GEN6_RP_ENABLE |
  		   GEN6_RP_UP_BUSY_AVG |
  		   GEN6_RP_DOWN_IDLE_AVG);
  
  	val = vlv_punit_read(dev_priv, PUNIT_REG_GPU_FREQ_STS);
  
  	DRM_DEBUG_DRIVER("GPLL enabled? %s
  ", val & 0x10 ? "yes" : "no");
  	DRM_DEBUG_DRIVER("GPU status: 0x%08x
  ", val);
  
  	dev_priv->rps.cur_freq = (val >> 8) & 0xff;
  	DRM_DEBUG_DRIVER("current GPU freq: %d MHz (%u)
  ",
  			 vlv_gpu_freq(dev_priv, dev_priv->rps.cur_freq),
  			 dev_priv->rps.cur_freq);
  
  	DRM_DEBUG_DRIVER("setting GPU freq to %d MHz (%u)
  ",
  			 vlv_gpu_freq(dev_priv, dev_priv->rps.efficient_freq),
  			 dev_priv->rps.efficient_freq);
  
  	valleyview_set_rps(dev_priv->dev, dev_priv->rps.efficient_freq);
3497a5620   Deepak S   drm/i915/chv: Add...
4206
  	gen8_enable_rps_interrupts(dev);
38807746f   Deepak S   drm/i915/chv: Ena...
4207
4208
  	gen6_gt_force_wake_put(dev_priv, FORCEWAKE_ALL);
  }
0a073b843   Jesse Barnes   drm/i915: turbo &...
4209
4210
4211
  static void valleyview_enable_rps(struct drm_device *dev)
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
a4872ba6d   Oscar Mateo   drm/i915: s/intel...
4212
  	struct intel_engine_cs *ring;
2a5913a86   Ben Widawsky   drm/i915: remove ...
4213
  	u32 gtfifodbg, val, rc6_mode = 0;
0a073b843   Jesse Barnes   drm/i915: turbo &...
4214
4215
4216
  	int i;
  
  	WARN_ON(!mutex_is_locked(&dev_priv->rps.hw_lock));
ae48434c2   Imre Deak   drm/i915: vlv: re...
4217
  	valleyview_check_pctx(dev_priv);
0a073b843   Jesse Barnes   drm/i915: turbo &...
4218
  	if ((gtfifodbg = I915_READ(GTFIFODBG))) {
f7d85c1ed   Jesse Barnes   drm/i915/vlv: red...
4219
4220
4221
  		DRM_DEBUG_DRIVER("GT fifo had a previous error %x
  ",
  				 gtfifodbg);
0a073b843   Jesse Barnes   drm/i915: turbo &...
4222
4223
  		I915_WRITE(GTFIFODBG, gtfifodbg);
  	}
c8d9a5905   Deepak S   drm/i915: Add pow...
4224
4225
  	/* If VLV, Forcewake all wells, else re-direct to regular path */
  	gen6_gt_force_wake_get(dev_priv, FORCEWAKE_ALL);
0a073b843   Jesse Barnes   drm/i915: turbo &...
4226
4227
4228
4229
4230
4231
4232
  
  	I915_WRITE(GEN6_RP_UP_THRESHOLD, 59400);
  	I915_WRITE(GEN6_RP_DOWN_THRESHOLD, 245000);
  	I915_WRITE(GEN6_RP_UP_EI, 66000);
  	I915_WRITE(GEN6_RP_DOWN_EI, 350000);
  
  	I915_WRITE(GEN6_RP_IDLE_HYSTERSIS, 10);
31685c258   Deepak S   drm/i915/vlv: WA ...
4233
  	I915_WRITE(GEN6_RP_DOWN_TIMEOUT, 0xf4240);
0a073b843   Jesse Barnes   drm/i915: turbo &...
4234
4235
4236
4237
4238
4239
4240
4241
4242
4243
4244
4245
4246
4247
4248
  
  	I915_WRITE(GEN6_RP_CONTROL,
  		   GEN6_RP_MEDIA_TURBO |
  		   GEN6_RP_MEDIA_HW_NORMAL_MODE |
  		   GEN6_RP_MEDIA_IS_GFX |
  		   GEN6_RP_ENABLE |
  		   GEN6_RP_UP_BUSY_AVG |
  		   GEN6_RP_DOWN_IDLE_CONT);
  
  	I915_WRITE(GEN6_RC6_WAKE_RATE_LIMIT, 0x00280000);
  	I915_WRITE(GEN6_RC_EVALUATION_INTERVAL, 125000);
  	I915_WRITE(GEN6_RC_IDLE_HYSTERSIS, 25);
  
  	for_each_ring(ring, dev_priv, i)
  		I915_WRITE(RING_MAX_IDLE(ring->mmio_base), 10);
2f0aa3042   Jesse Barnes   drm/i915/vlv: use...
4249
  	I915_WRITE(GEN6_RC6_THRESHOLD, 0x557);
0a073b843   Jesse Barnes   drm/i915: turbo &...
4250
4251
  
  	/* allows RC6 residency counter to work */
49798eb2f   Jesse Barnes   drm/i915/vlv: use...
4252
  	I915_WRITE(VLV_COUNTER_CONTROL,
31685c258   Deepak S   drm/i915/vlv: WA ...
4253
4254
  		   _MASKED_BIT_ENABLE(VLV_MEDIA_RC0_COUNT_EN |
  				      VLV_RENDER_RC0_COUNT_EN |
49798eb2f   Jesse Barnes   drm/i915/vlv: use...
4255
4256
  				      VLV_MEDIA_RC6_COUNT_EN |
  				      VLV_RENDER_RC6_COUNT_EN));
31685c258   Deepak S   drm/i915/vlv: WA ...
4257

a2b23fe04   Jesse Barnes   drm/i915/vlv: hon...
4258
  	if (intel_enable_rc6(dev) & INTEL_RC6_ENABLE)
6b88f2956   Jesse Barnes   drm/i915/vlv: use...
4259
  		rc6_mode = GEN7_RC_CTL_TO_MODE | VLV_RC_CTL_CTX_RST_PARALLEL;
dc39fff72   Ben Widawsky   drm/i915: Print R...
4260
4261
  
  	intel_print_rc6_info(dev, rc6_mode);
a2b23fe04   Jesse Barnes   drm/i915/vlv: hon...
4262
  	I915_WRITE(GEN6_RC_CONTROL, rc6_mode);
0a073b843   Jesse Barnes   drm/i915: turbo &...
4263

64936258d   Jani Nikula   drm/i915: change ...
4264
  	val = vlv_punit_read(dev_priv, PUNIT_REG_GPU_FREQ_STS);
0a073b843   Jesse Barnes   drm/i915: turbo &...
4265
4266
4267
4268
4269
  
  	DRM_DEBUG_DRIVER("GPLL enabled? %s
  ", val & 0x10 ? "yes" : "no");
  	DRM_DEBUG_DRIVER("GPU status: 0x%08x
  ", val);
b39fb2977   Ben Widawsky   drm/i915: Rename ...
4270
  	dev_priv->rps.cur_freq = (val >> 8) & 0xff;
73008b989   Ville Syrjälä   drm/i915: Clean u...
4271
4272
  	DRM_DEBUG_DRIVER("current GPU freq: %d MHz (%u)
  ",
b39fb2977   Ben Widawsky   drm/i915: Rename ...
4273
4274
  			 vlv_gpu_freq(dev_priv, dev_priv->rps.cur_freq),
  			 dev_priv->rps.cur_freq);
0a073b843   Jesse Barnes   drm/i915: turbo &...
4275

73008b989   Ville Syrjälä   drm/i915: Clean u...
4276
4277
  	DRM_DEBUG_DRIVER("setting GPU freq to %d MHz (%u)
  ",
b39fb2977   Ben Widawsky   drm/i915: Rename ...
4278
4279
  			 vlv_gpu_freq(dev_priv, dev_priv->rps.efficient_freq),
  			 dev_priv->rps.efficient_freq);
0a073b843   Jesse Barnes   drm/i915: turbo &...
4280

b39fb2977   Ben Widawsky   drm/i915: Rename ...
4281
  	valleyview_set_rps(dev_priv->dev, dev_priv->rps.efficient_freq);
0a073b843   Jesse Barnes   drm/i915: turbo &...
4282

44fc7d5cf   Daniel Vetter   drm/i915: extract...
4283
  	gen6_enable_rps_interrupts(dev);
0a073b843   Jesse Barnes   drm/i915: turbo &...
4284

c8d9a5905   Deepak S   drm/i915: Add pow...
4285
  	gen6_gt_force_wake_put(dev_priv, FORCEWAKE_ALL);
0a073b843   Jesse Barnes   drm/i915: turbo &...
4286
  }
930ebb462   Daniel Vetter   drm/i915: fix up ...
4287
  void ironlake_teardown_rc6(struct drm_device *dev)
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
4288
4289
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
3e3739480   Daniel Vetter   drm/i915: move pw...
4290
  	if (dev_priv->ips.renderctx) {
d7f46fc4e   Ben Widawsky   drm/i915: Make pi...
4291
  		i915_gem_object_ggtt_unpin(dev_priv->ips.renderctx);
3e3739480   Daniel Vetter   drm/i915: move pw...
4292
4293
  		drm_gem_object_unreference(&dev_priv->ips.renderctx->base);
  		dev_priv->ips.renderctx = NULL;
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
4294
  	}
3e3739480   Daniel Vetter   drm/i915: move pw...
4295
  	if (dev_priv->ips.pwrctx) {
d7f46fc4e   Ben Widawsky   drm/i915: Make pi...
4296
  		i915_gem_object_ggtt_unpin(dev_priv->ips.pwrctx);
3e3739480   Daniel Vetter   drm/i915: move pw...
4297
4298
  		drm_gem_object_unreference(&dev_priv->ips.pwrctx->base);
  		dev_priv->ips.pwrctx = NULL;
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
4299
4300
  	}
  }
930ebb462   Daniel Vetter   drm/i915: fix up ...
4301
  static void ironlake_disable_rc6(struct drm_device *dev)
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
4302
4303
4304
4305
4306
4307
4308
4309
4310
4311
4312
4313
4314
4315
4316
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  
  	if (I915_READ(PWRCTXA)) {
  		/* Wake the GPU, prevent RC6, then restore RSTDBYCTL */
  		I915_WRITE(RSTDBYCTL, I915_READ(RSTDBYCTL) | RCX_SW_EXIT);
  		wait_for(((I915_READ(RSTDBYCTL) & RSX_STATUS_MASK) == RSX_STATUS_ON),
  			 50);
  
  		I915_WRITE(PWRCTXA, 0);
  		POSTING_READ(PWRCTXA);
  
  		I915_WRITE(RSTDBYCTL, I915_READ(RSTDBYCTL) & ~RCX_SW_EXIT);
  		POSTING_READ(RSTDBYCTL);
  	}
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
4317
4318
4319
4320
4321
  }
  
  static int ironlake_setup_rc6(struct drm_device *dev)
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
3e3739480   Daniel Vetter   drm/i915: move pw...
4322
4323
4324
  	if (dev_priv->ips.renderctx == NULL)
  		dev_priv->ips.renderctx = intel_alloc_context_page(dev);
  	if (!dev_priv->ips.renderctx)
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
4325
  		return -ENOMEM;
3e3739480   Daniel Vetter   drm/i915: move pw...
4326
4327
4328
  	if (dev_priv->ips.pwrctx == NULL)
  		dev_priv->ips.pwrctx = intel_alloc_context_page(dev);
  	if (!dev_priv->ips.pwrctx) {
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
4329
4330
4331
4332
4333
4334
  		ironlake_teardown_rc6(dev);
  		return -ENOMEM;
  	}
  
  	return 0;
  }
930ebb462   Daniel Vetter   drm/i915: fix up ...
4335
  static void ironlake_enable_rc6(struct drm_device *dev)
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
4336
4337
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
a4872ba6d   Oscar Mateo   drm/i915: s/intel...
4338
  	struct intel_engine_cs *ring = &dev_priv->ring[RCS];
3e9605018   Chris Wilson   drm/i915: Rearran...
4339
  	bool was_interruptible;
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
4340
4341
4342
4343
4344
4345
4346
  	int ret;
  
  	/* rc6 disabled by default due to repeated reports of hanging during
  	 * boot and resume.
  	 */
  	if (!intel_enable_rc6(dev))
  		return;
79f5b2c75   Daniel Vetter   drm/i915: make en...
4347
  	WARN_ON(!mutex_is_locked(&dev->struct_mutex));
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
4348
  	ret = ironlake_setup_rc6(dev);
79f5b2c75   Daniel Vetter   drm/i915: make en...
4349
  	if (ret)
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
4350
  		return;
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
4351

3e9605018   Chris Wilson   drm/i915: Rearran...
4352
4353
  	was_interruptible = dev_priv->mm.interruptible;
  	dev_priv->mm.interruptible = false;
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
4354
4355
4356
4357
  	/*
  	 * GPU can automatically power down the render unit if given a page
  	 * to save state.
  	 */
6d90c952c   Daniel Vetter   drm/i915: remove ...
4358
  	ret = intel_ring_begin(ring, 6);
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
4359
4360
  	if (ret) {
  		ironlake_teardown_rc6(dev);
3e9605018   Chris Wilson   drm/i915: Rearran...
4361
  		dev_priv->mm.interruptible = was_interruptible;
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
4362
4363
  		return;
  	}
6d90c952c   Daniel Vetter   drm/i915: remove ...
4364
4365
  	intel_ring_emit(ring, MI_SUSPEND_FLUSH | MI_SUSPEND_FLUSH_EN);
  	intel_ring_emit(ring, MI_SET_CONTEXT);
f343c5f64   Ben Widawsky   drm/i915: Getter/...
4366
  	intel_ring_emit(ring, i915_gem_obj_ggtt_offset(dev_priv->ips.renderctx) |
6d90c952c   Daniel Vetter   drm/i915: remove ...
4367
4368
4369
4370
4371
4372
4373
4374
  			MI_MM_SPACE_GTT |
  			MI_SAVE_EXT_STATE_EN |
  			MI_RESTORE_EXT_STATE_EN |
  			MI_RESTORE_INHIBIT);
  	intel_ring_emit(ring, MI_SUSPEND_FLUSH);
  	intel_ring_emit(ring, MI_NOOP);
  	intel_ring_emit(ring, MI_FLUSH);
  	intel_ring_advance(ring);
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
4375
4376
4377
4378
4379
4380
  
  	/*
  	 * Wait for the command parser to advance past MI_SET_CONTEXT. The HW
  	 * does an implicit flush, combined with MI_FLUSH above, it should be
  	 * safe to assume that renderctx is valid
  	 */
3e9605018   Chris Wilson   drm/i915: Rearran...
4381
4382
  	ret = intel_ring_idle(ring);
  	dev_priv->mm.interruptible = was_interruptible;
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
4383
  	if (ret) {
def27a582   Jani Nikula   drm/i915: reduce ...
4384
4385
  		DRM_ERROR("failed to enable ironlake power savings
  ");
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
4386
  		ironlake_teardown_rc6(dev);
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
4387
4388
  		return;
  	}
f343c5f64   Ben Widawsky   drm/i915: Getter/...
4389
  	I915_WRITE(PWRCTXA, i915_gem_obj_ggtt_offset(dev_priv->ips.pwrctx) | PWRCTX_EN);
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
4390
  	I915_WRITE(RSTDBYCTL, I915_READ(RSTDBYCTL) & ~RCX_SW_EXIT);
dc39fff72   Ben Widawsky   drm/i915: Print R...
4391

91ca689a0   Imre Deak   drm/i915: fix the...
4392
  	intel_print_rc6_info(dev, GEN6_RC_CTL_RC6_ENABLE);
2b4e57bd7   Eugeni Dodonov   drm/i915: move dr...
4393
  }
dde18883d   Eugeni Dodonov   drm/i915: move em...
4394
4395
4396
4397
4398
4399
4400
4401
4402
4403
4404
4405
4406
4407
  static unsigned long intel_pxfreq(u32 vidfreq)
  {
  	unsigned long freq;
  	int div = (vidfreq & 0x3f0000) >> 16;
  	int post = (vidfreq & 0x3000) >> 12;
  	int pre = (vidfreq & 0x7);
  
  	if (!pre)
  		return 0;
  
  	freq = ((div * 133333) / ((1<<post) * pre));
  
  	return freq;
  }
eb48eb005   Daniel Vetter   drm/i915: move th...
4408
4409
4410
4411
4412
4413
4414
4415
4416
4417
4418
4419
4420
  static const struct cparams {
  	u16 i;
  	u16 t;
  	u16 m;
  	u16 c;
  } cparams[] = {
  	{ 1, 1333, 301, 28664 },
  	{ 1, 1066, 294, 24460 },
  	{ 1, 800, 294, 25192 },
  	{ 0, 1333, 276, 27605 },
  	{ 0, 1066, 276, 27605 },
  	{ 0, 800, 231, 23784 },
  };
f531dcb23   Chris Wilson   drm/i915: Wrap ex...
4421
  static unsigned long __i915_chipset_val(struct drm_i915_private *dev_priv)
eb48eb005   Daniel Vetter   drm/i915: move th...
4422
4423
4424
4425
4426
  {
  	u64 total_count, diff, ret;
  	u32 count1, count2, count3, m = 0, c = 0;
  	unsigned long now = jiffies_to_msecs(jiffies), diff1;
  	int i;
02d719562   Daniel Vetter   drm/i915: properl...
4427
  	assert_spin_locked(&mchdev_lock);
20e4d407f   Daniel Vetter   drm/ips: move drp...
4428
  	diff1 = now - dev_priv->ips.last_time1;
eb48eb005   Daniel Vetter   drm/i915: move th...
4429
4430
4431
4432
4433
4434
4435
  
  	/* Prevent division-by-zero if we are asking too fast.
  	 * Also, we don't get interesting results if we are polling
  	 * faster than once in 10ms, so just return the saved value
  	 * in such cases.
  	 */
  	if (diff1 <= 10)
20e4d407f   Daniel Vetter   drm/ips: move drp...
4436
  		return dev_priv->ips.chipset_power;
eb48eb005   Daniel Vetter   drm/i915: move th...
4437
4438
4439
4440
4441
4442
4443
4444
  
  	count1 = I915_READ(DMIEC);
  	count2 = I915_READ(DDREC);
  	count3 = I915_READ(CSIEC);
  
  	total_count = count1 + count2 + count3;
  
  	/* FIXME: handle per-counter overflow */
20e4d407f   Daniel Vetter   drm/ips: move drp...
4445
4446
  	if (total_count < dev_priv->ips.last_count1) {
  		diff = ~0UL - dev_priv->ips.last_count1;
eb48eb005   Daniel Vetter   drm/i915: move th...
4447
4448
  		diff += total_count;
  	} else {
20e4d407f   Daniel Vetter   drm/ips: move drp...
4449
  		diff = total_count - dev_priv->ips.last_count1;
eb48eb005   Daniel Vetter   drm/i915: move th...
4450
4451
4452
  	}
  
  	for (i = 0; i < ARRAY_SIZE(cparams); i++) {
20e4d407f   Daniel Vetter   drm/ips: move drp...
4453
4454
  		if (cparams[i].i == dev_priv->ips.c_m &&
  		    cparams[i].t == dev_priv->ips.r_t) {
eb48eb005   Daniel Vetter   drm/i915: move th...
4455
4456
4457
4458
4459
4460
4461
4462
4463
  			m = cparams[i].m;
  			c = cparams[i].c;
  			break;
  		}
  	}
  
  	diff = div_u64(diff, diff1);
  	ret = ((m * diff) + c);
  	ret = div_u64(ret, 10);
20e4d407f   Daniel Vetter   drm/ips: move drp...
4464
4465
  	dev_priv->ips.last_count1 = total_count;
  	dev_priv->ips.last_time1 = now;
eb48eb005   Daniel Vetter   drm/i915: move th...
4466

20e4d407f   Daniel Vetter   drm/ips: move drp...
4467
  	dev_priv->ips.chipset_power = ret;
eb48eb005   Daniel Vetter   drm/i915: move th...
4468
4469
4470
  
  	return ret;
  }
f531dcb23   Chris Wilson   drm/i915: Wrap ex...
4471
4472
  unsigned long i915_chipset_val(struct drm_i915_private *dev_priv)
  {
3d13ef2e2   Damien Lespiau   drm/i915: Always ...
4473
  	struct drm_device *dev = dev_priv->dev;
f531dcb23   Chris Wilson   drm/i915: Wrap ex...
4474
  	unsigned long val;
3d13ef2e2   Damien Lespiau   drm/i915: Always ...
4475
  	if (INTEL_INFO(dev)->gen != 5)
f531dcb23   Chris Wilson   drm/i915: Wrap ex...
4476
4477
4478
4479
4480
4481
4482
4483
4484
4485
  		return 0;
  
  	spin_lock_irq(&mchdev_lock);
  
  	val = __i915_chipset_val(dev_priv);
  
  	spin_unlock_irq(&mchdev_lock);
  
  	return val;
  }
eb48eb005   Daniel Vetter   drm/i915: move th...
4486
4487
4488
4489
4490
4491
4492
4493
4494
4495
4496
4497
4498
4499
4500
4501
4502
  unsigned long i915_mch_val(struct drm_i915_private *dev_priv)
  {
  	unsigned long m, x, b;
  	u32 tsfs;
  
  	tsfs = I915_READ(TSFS);
  
  	m = ((tsfs & TSFS_SLOPE_MASK) >> TSFS_SLOPE_SHIFT);
  	x = I915_READ8(TR1);
  
  	b = tsfs & TSFS_INTR_MASK;
  
  	return ((m * x) / 127) - b;
  }
  
  static u16 pvid_to_extvid(struct drm_i915_private *dev_priv, u8 pxvid)
  {
3d13ef2e2   Damien Lespiau   drm/i915: Always ...
4503
  	struct drm_device *dev = dev_priv->dev;
eb48eb005   Daniel Vetter   drm/i915: move th...
4504
4505
4506
4507
4508
4509
4510
4511
4512
4513
4514
4515
4516
4517
4518
4519
4520
4521
4522
4523
4524
4525
4526
4527
4528
4529
4530
4531
4532
4533
4534
4535
4536
4537
4538
4539
4540
4541
4542
4543
4544
4545
4546
4547
4548
4549
4550
4551
4552
4553
4554
4555
4556
4557
4558
4559
4560
4561
4562
4563
4564
4565
4566
4567
4568
4569
4570
4571
4572
4573
4574
4575
4576
4577
4578
4579
4580
4581
4582
4583
4584
4585
4586
4587
4588
4589
4590
4591
4592
4593
4594
4595
4596
4597
4598
4599
4600
4601
4602
4603
4604
4605
4606
4607
4608
4609
4610
4611
4612
4613
4614
4615
4616
4617
4618
4619
4620
4621
4622
4623
4624
4625
4626
4627
4628
4629
4630
4631
4632
4633
4634
4635
4636
  	static const struct v_table {
  		u16 vd; /* in .1 mil */
  		u16 vm; /* in .1 mil */
  	} v_table[] = {
  		{ 0, 0, },
  		{ 375, 0, },
  		{ 500, 0, },
  		{ 625, 0, },
  		{ 750, 0, },
  		{ 875, 0, },
  		{ 1000, 0, },
  		{ 1125, 0, },
  		{ 4125, 3000, },
  		{ 4125, 3000, },
  		{ 4125, 3000, },
  		{ 4125, 3000, },
  		{ 4125, 3000, },
  		{ 4125, 3000, },
  		{ 4125, 3000, },
  		{ 4125, 3000, },
  		{ 4125, 3000, },
  		{ 4125, 3000, },
  		{ 4125, 3000, },
  		{ 4125, 3000, },
  		{ 4125, 3000, },
  		{ 4125, 3000, },
  		{ 4125, 3000, },
  		{ 4125, 3000, },
  		{ 4125, 3000, },
  		{ 4125, 3000, },
  		{ 4125, 3000, },
  		{ 4125, 3000, },
  		{ 4125, 3000, },
  		{ 4125, 3000, },
  		{ 4125, 3000, },
  		{ 4125, 3000, },
  		{ 4250, 3125, },
  		{ 4375, 3250, },
  		{ 4500, 3375, },
  		{ 4625, 3500, },
  		{ 4750, 3625, },
  		{ 4875, 3750, },
  		{ 5000, 3875, },
  		{ 5125, 4000, },
  		{ 5250, 4125, },
  		{ 5375, 4250, },
  		{ 5500, 4375, },
  		{ 5625, 4500, },
  		{ 5750, 4625, },
  		{ 5875, 4750, },
  		{ 6000, 4875, },
  		{ 6125, 5000, },
  		{ 6250, 5125, },
  		{ 6375, 5250, },
  		{ 6500, 5375, },
  		{ 6625, 5500, },
  		{ 6750, 5625, },
  		{ 6875, 5750, },
  		{ 7000, 5875, },
  		{ 7125, 6000, },
  		{ 7250, 6125, },
  		{ 7375, 6250, },
  		{ 7500, 6375, },
  		{ 7625, 6500, },
  		{ 7750, 6625, },
  		{ 7875, 6750, },
  		{ 8000, 6875, },
  		{ 8125, 7000, },
  		{ 8250, 7125, },
  		{ 8375, 7250, },
  		{ 8500, 7375, },
  		{ 8625, 7500, },
  		{ 8750, 7625, },
  		{ 8875, 7750, },
  		{ 9000, 7875, },
  		{ 9125, 8000, },
  		{ 9250, 8125, },
  		{ 9375, 8250, },
  		{ 9500, 8375, },
  		{ 9625, 8500, },
  		{ 9750, 8625, },
  		{ 9875, 8750, },
  		{ 10000, 8875, },
  		{ 10125, 9000, },
  		{ 10250, 9125, },
  		{ 10375, 9250, },
  		{ 10500, 9375, },
  		{ 10625, 9500, },
  		{ 10750, 9625, },
  		{ 10875, 9750, },
  		{ 11000, 9875, },
  		{ 11125, 10000, },
  		{ 11250, 10125, },
  		{ 11375, 10250, },
  		{ 11500, 10375, },
  		{ 11625, 10500, },
  		{ 11750, 10625, },
  		{ 11875, 10750, },
  		{ 12000, 10875, },
  		{ 12125, 11000, },
  		{ 12250, 11125, },
  		{ 12375, 11250, },
  		{ 12500, 11375, },
  		{ 12625, 11500, },
  		{ 12750, 11625, },
  		{ 12875, 11750, },
  		{ 13000, 11875, },
  		{ 13125, 12000, },
  		{ 13250, 12125, },
  		{ 13375, 12250, },
  		{ 13500, 12375, },
  		{ 13625, 12500, },
  		{ 13750, 12625, },
  		{ 13875, 12750, },
  		{ 14000, 12875, },
  		{ 14125, 13000, },
  		{ 14250, 13125, },
  		{ 14375, 13250, },
  		{ 14500, 13375, },
  		{ 14625, 13500, },
  		{ 14750, 13625, },
  		{ 14875, 13750, },
  		{ 15000, 13875, },
  		{ 15125, 14000, },
  		{ 15250, 14125, },
  		{ 15375, 14250, },
  		{ 15500, 14375, },
  		{ 15625, 14500, },
  		{ 15750, 14625, },
  		{ 15875, 14750, },
  		{ 16000, 14875, },
  		{ 16125, 15000, },
  	};
3d13ef2e2   Damien Lespiau   drm/i915: Always ...
4637
  	if (INTEL_INFO(dev)->is_mobile)
eb48eb005   Daniel Vetter   drm/i915: move th...
4638
4639
4640
4641
  		return v_table[pxvid].vm;
  	else
  		return v_table[pxvid].vd;
  }
02d719562   Daniel Vetter   drm/i915: properl...
4642
  static void __i915_update_gfx_val(struct drm_i915_private *dev_priv)
eb48eb005   Daniel Vetter   drm/i915: move th...
4643
  {
5ed0bdf21   Thomas Gleixner   drm: i915: Use ns...
4644
  	u64 now, diff, diffms;
eb48eb005   Daniel Vetter   drm/i915: move th...
4645
  	u32 count;
02d719562   Daniel Vetter   drm/i915: properl...
4646
  	assert_spin_locked(&mchdev_lock);
eb48eb005   Daniel Vetter   drm/i915: move th...
4647

5ed0bdf21   Thomas Gleixner   drm: i915: Use ns...
4648
4649
4650
  	now = ktime_get_raw_ns();
  	diffms = now - dev_priv->ips.last_time2;
  	do_div(diffms, NSEC_PER_MSEC);
eb48eb005   Daniel Vetter   drm/i915: move th...
4651
4652
  
  	/* Don't divide by 0 */
eb48eb005   Daniel Vetter   drm/i915: move th...
4653
4654
4655
4656
  	if (!diffms)
  		return;
  
  	count = I915_READ(GFXEC);
20e4d407f   Daniel Vetter   drm/ips: move drp...
4657
4658
  	if (count < dev_priv->ips.last_count2) {
  		diff = ~0UL - dev_priv->ips.last_count2;
eb48eb005   Daniel Vetter   drm/i915: move th...
4659
4660
  		diff += count;
  	} else {
20e4d407f   Daniel Vetter   drm/ips: move drp...
4661
  		diff = count - dev_priv->ips.last_count2;
eb48eb005   Daniel Vetter   drm/i915: move th...
4662
  	}
20e4d407f   Daniel Vetter   drm/ips: move drp...
4663
4664
  	dev_priv->ips.last_count2 = count;
  	dev_priv->ips.last_time2 = now;
eb48eb005   Daniel Vetter   drm/i915: move th...
4665
4666
4667
4668
  
  	/* More magic constants... */
  	diff = diff * 1181;
  	diff = div_u64(diff, diffms * 10);
20e4d407f   Daniel Vetter   drm/ips: move drp...
4669
  	dev_priv->ips.gfx_power = diff;
eb48eb005   Daniel Vetter   drm/i915: move th...
4670
  }
02d719562   Daniel Vetter   drm/i915: properl...
4671
4672
  void i915_update_gfx_val(struct drm_i915_private *dev_priv)
  {
3d13ef2e2   Damien Lespiau   drm/i915: Always ...
4673
4674
4675
  	struct drm_device *dev = dev_priv->dev;
  
  	if (INTEL_INFO(dev)->gen != 5)
02d719562   Daniel Vetter   drm/i915: properl...
4676
  		return;
9270388e1   Daniel Vetter   drm/i915: fix up ...
4677
  	spin_lock_irq(&mchdev_lock);
02d719562   Daniel Vetter   drm/i915: properl...
4678
4679
  
  	__i915_update_gfx_val(dev_priv);
9270388e1   Daniel Vetter   drm/i915: fix up ...
4680
  	spin_unlock_irq(&mchdev_lock);
02d719562   Daniel Vetter   drm/i915: properl...
4681
  }
f531dcb23   Chris Wilson   drm/i915: Wrap ex...
4682
  static unsigned long __i915_gfx_val(struct drm_i915_private *dev_priv)
eb48eb005   Daniel Vetter   drm/i915: move th...
4683
4684
4685
  {
  	unsigned long t, corr, state1, corr2, state2;
  	u32 pxvid, ext_v;
02d719562   Daniel Vetter   drm/i915: properl...
4686
  	assert_spin_locked(&mchdev_lock);
b39fb2977   Ben Widawsky   drm/i915: Rename ...
4687
  	pxvid = I915_READ(PXVFREQ_BASE + (dev_priv->rps.cur_freq * 4));
eb48eb005   Daniel Vetter   drm/i915: move th...
4688
4689
4690
4691
4692
4693
4694
4695
4696
4697
4698
4699
4700
4701
4702
4703
4704
4705
4706
  	pxvid = (pxvid >> 24) & 0x7f;
  	ext_v = pvid_to_extvid(dev_priv, pxvid);
  
  	state1 = ext_v;
  
  	t = i915_mch_val(dev_priv);
  
  	/* Revel in the empirically derived constants */
  
  	/* Correction factor in 1/100000 units */
  	if (t > 80)
  		corr = ((t * 2349) + 135940);
  	else if (t >= 50)
  		corr = ((t * 964) + 29317);
  	else /* < 50 */
  		corr = ((t * 301) + 1004);
  
  	corr = corr * ((150142 * state1) / 10000 - 78642);
  	corr /= 100000;
20e4d407f   Daniel Vetter   drm/ips: move drp...
4707
  	corr2 = (corr * dev_priv->ips.corr);
eb48eb005   Daniel Vetter   drm/i915: move th...
4708
4709
4710
  
  	state2 = (corr2 * state1) / 10000;
  	state2 /= 100; /* convert to mW */
02d719562   Daniel Vetter   drm/i915: properl...
4711
  	__i915_update_gfx_val(dev_priv);
eb48eb005   Daniel Vetter   drm/i915: move th...
4712

20e4d407f   Daniel Vetter   drm/ips: move drp...
4713
  	return dev_priv->ips.gfx_power + state2;
eb48eb005   Daniel Vetter   drm/i915: move th...
4714
  }
f531dcb23   Chris Wilson   drm/i915: Wrap ex...
4715
4716
  unsigned long i915_gfx_val(struct drm_i915_private *dev_priv)
  {
3d13ef2e2   Damien Lespiau   drm/i915: Always ...
4717
  	struct drm_device *dev = dev_priv->dev;
f531dcb23   Chris Wilson   drm/i915: Wrap ex...
4718
  	unsigned long val;
3d13ef2e2   Damien Lespiau   drm/i915: Always ...
4719
  	if (INTEL_INFO(dev)->gen != 5)
f531dcb23   Chris Wilson   drm/i915: Wrap ex...
4720
4721
4722
4723
4724
4725
4726
4727
4728
4729
  		return 0;
  
  	spin_lock_irq(&mchdev_lock);
  
  	val = __i915_gfx_val(dev_priv);
  
  	spin_unlock_irq(&mchdev_lock);
  
  	return val;
  }
eb48eb005   Daniel Vetter   drm/i915: move th...
4730
4731
4732
4733
4734
4735
4736
4737
4738
4739
  /**
   * i915_read_mch_val - return value for IPS use
   *
   * Calculate and return a value for the IPS driver to use when deciding whether
   * we have thermal and power headroom to increase CPU or GPU power budget.
   */
  unsigned long i915_read_mch_val(void)
  {
  	struct drm_i915_private *dev_priv;
  	unsigned long chipset_val, graphics_val, ret = 0;
9270388e1   Daniel Vetter   drm/i915: fix up ...
4740
  	spin_lock_irq(&mchdev_lock);
eb48eb005   Daniel Vetter   drm/i915: move th...
4741
4742
4743
  	if (!i915_mch_dev)
  		goto out_unlock;
  	dev_priv = i915_mch_dev;
f531dcb23   Chris Wilson   drm/i915: Wrap ex...
4744
4745
  	chipset_val = __i915_chipset_val(dev_priv);
  	graphics_val = __i915_gfx_val(dev_priv);
eb48eb005   Daniel Vetter   drm/i915: move th...
4746
4747
4748
4749
  
  	ret = chipset_val + graphics_val;
  
  out_unlock:
9270388e1   Daniel Vetter   drm/i915: fix up ...
4750
  	spin_unlock_irq(&mchdev_lock);
eb48eb005   Daniel Vetter   drm/i915: move th...
4751
4752
4753
4754
4755
4756
4757
4758
4759
4760
4761
4762
4763
4764
  
  	return ret;
  }
  EXPORT_SYMBOL_GPL(i915_read_mch_val);
  
  /**
   * i915_gpu_raise - raise GPU frequency limit
   *
   * Raise the limit; IPS indicates we have thermal headroom.
   */
  bool i915_gpu_raise(void)
  {
  	struct drm_i915_private *dev_priv;
  	bool ret = true;
9270388e1   Daniel Vetter   drm/i915: fix up ...
4765
  	spin_lock_irq(&mchdev_lock);
eb48eb005   Daniel Vetter   drm/i915: move th...
4766
4767
4768
4769
4770
  	if (!i915_mch_dev) {
  		ret = false;
  		goto out_unlock;
  	}
  	dev_priv = i915_mch_dev;
20e4d407f   Daniel Vetter   drm/ips: move drp...
4771
4772
  	if (dev_priv->ips.max_delay > dev_priv->ips.fmax)
  		dev_priv->ips.max_delay--;
eb48eb005   Daniel Vetter   drm/i915: move th...
4773
4774
  
  out_unlock:
9270388e1   Daniel Vetter   drm/i915: fix up ...
4775
  	spin_unlock_irq(&mchdev_lock);
eb48eb005   Daniel Vetter   drm/i915: move th...
4776
4777
4778
4779
4780
4781
4782
4783
4784
4785
4786
4787
4788
4789
4790
  
  	return ret;
  }
  EXPORT_SYMBOL_GPL(i915_gpu_raise);
  
  /**
   * i915_gpu_lower - lower GPU frequency limit
   *
   * IPS indicates we're close to a thermal limit, so throttle back the GPU
   * frequency maximum.
   */
  bool i915_gpu_lower(void)
  {
  	struct drm_i915_private *dev_priv;
  	bool ret = true;
9270388e1   Daniel Vetter   drm/i915: fix up ...
4791
  	spin_lock_irq(&mchdev_lock);
eb48eb005   Daniel Vetter   drm/i915: move th...
4792
4793
4794
4795
4796
  	if (!i915_mch_dev) {
  		ret = false;
  		goto out_unlock;
  	}
  	dev_priv = i915_mch_dev;
20e4d407f   Daniel Vetter   drm/ips: move drp...
4797
4798
  	if (dev_priv->ips.max_delay < dev_priv->ips.min_delay)
  		dev_priv->ips.max_delay++;
eb48eb005   Daniel Vetter   drm/i915: move th...
4799
4800
  
  out_unlock:
9270388e1   Daniel Vetter   drm/i915: fix up ...
4801
  	spin_unlock_irq(&mchdev_lock);
eb48eb005   Daniel Vetter   drm/i915: move th...
4802
4803
4804
4805
4806
4807
4808
4809
4810
4811
4812
4813
4814
  
  	return ret;
  }
  EXPORT_SYMBOL_GPL(i915_gpu_lower);
  
  /**
   * i915_gpu_busy - indicate GPU business to IPS
   *
   * Tell the IPS driver whether or not the GPU is busy.
   */
  bool i915_gpu_busy(void)
  {
  	struct drm_i915_private *dev_priv;
a4872ba6d   Oscar Mateo   drm/i915: s/intel...
4815
  	struct intel_engine_cs *ring;
eb48eb005   Daniel Vetter   drm/i915: move th...
4816
  	bool ret = false;
f047e395d   Chris Wilson   drm/i915: Avoid c...
4817
  	int i;
eb48eb005   Daniel Vetter   drm/i915: move th...
4818

9270388e1   Daniel Vetter   drm/i915: fix up ...
4819
  	spin_lock_irq(&mchdev_lock);
eb48eb005   Daniel Vetter   drm/i915: move th...
4820
4821
4822
  	if (!i915_mch_dev)
  		goto out_unlock;
  	dev_priv = i915_mch_dev;
f047e395d   Chris Wilson   drm/i915: Avoid c...
4823
4824
  	for_each_ring(ring, dev_priv, i)
  		ret |= !list_empty(&ring->request_list);
eb48eb005   Daniel Vetter   drm/i915: move th...
4825
4826
  
  out_unlock:
9270388e1   Daniel Vetter   drm/i915: fix up ...
4827
  	spin_unlock_irq(&mchdev_lock);
eb48eb005   Daniel Vetter   drm/i915: move th...
4828
4829
4830
4831
4832
4833
4834
4835
4836
4837
4838
4839
4840
4841
4842
  
  	return ret;
  }
  EXPORT_SYMBOL_GPL(i915_gpu_busy);
  
  /**
   * i915_gpu_turbo_disable - disable graphics turbo
   *
   * Disable graphics turbo by resetting the max frequency and setting the
   * current frequency to the default.
   */
  bool i915_gpu_turbo_disable(void)
  {
  	struct drm_i915_private *dev_priv;
  	bool ret = true;
9270388e1   Daniel Vetter   drm/i915: fix up ...
4843
  	spin_lock_irq(&mchdev_lock);
eb48eb005   Daniel Vetter   drm/i915: move th...
4844
4845
4846
4847
4848
  	if (!i915_mch_dev) {
  		ret = false;
  		goto out_unlock;
  	}
  	dev_priv = i915_mch_dev;
20e4d407f   Daniel Vetter   drm/ips: move drp...
4849
  	dev_priv->ips.max_delay = dev_priv->ips.fstart;
eb48eb005   Daniel Vetter   drm/i915: move th...
4850

20e4d407f   Daniel Vetter   drm/ips: move drp...
4851
  	if (!ironlake_set_drps(dev_priv->dev, dev_priv->ips.fstart))
eb48eb005   Daniel Vetter   drm/i915: move th...
4852
4853
4854
  		ret = false;
  
  out_unlock:
9270388e1   Daniel Vetter   drm/i915: fix up ...
4855
  	spin_unlock_irq(&mchdev_lock);
eb48eb005   Daniel Vetter   drm/i915: move th...
4856
4857
4858
4859
4860
4861
4862
4863
4864
4865
4866
4867
4868
4869
4870
4871
4872
4873
4874
4875
4876
4877
4878
4879
4880
4881
4882
  
  	return ret;
  }
  EXPORT_SYMBOL_GPL(i915_gpu_turbo_disable);
  
  /**
   * Tells the intel_ips driver that the i915 driver is now loaded, if
   * IPS got loaded first.
   *
   * This awkward dance is so that neither module has to depend on the
   * other in order for IPS to do the appropriate communication of
   * GPU turbo limits to i915.
   */
  static void
  ips_ping_for_i915_load(void)
  {
  	void (*link)(void);
  
  	link = symbol_get(ips_link_to_i915_driver);
  	if (link) {
  		link();
  		symbol_put(ips_link_to_i915_driver);
  	}
  }
  
  void intel_gpu_ips_init(struct drm_i915_private *dev_priv)
  {
02d719562   Daniel Vetter   drm/i915: properl...
4883
4884
  	/* We only register the i915 ips part with intel-ips once everything is
  	 * set up, to avoid intel-ips sneaking in and reading bogus values. */
9270388e1   Daniel Vetter   drm/i915: fix up ...
4885
  	spin_lock_irq(&mchdev_lock);
eb48eb005   Daniel Vetter   drm/i915: move th...
4886
  	i915_mch_dev = dev_priv;
9270388e1   Daniel Vetter   drm/i915: fix up ...
4887
  	spin_unlock_irq(&mchdev_lock);
eb48eb005   Daniel Vetter   drm/i915: move th...
4888
4889
4890
4891
4892
4893
  
  	ips_ping_for_i915_load();
  }
  
  void intel_gpu_ips_teardown(void)
  {
9270388e1   Daniel Vetter   drm/i915: fix up ...
4894
  	spin_lock_irq(&mchdev_lock);
eb48eb005   Daniel Vetter   drm/i915: move th...
4895
  	i915_mch_dev = NULL;
9270388e1   Daniel Vetter   drm/i915: fix up ...
4896
  	spin_unlock_irq(&mchdev_lock);
eb48eb005   Daniel Vetter   drm/i915: move th...
4897
  }
76c3552f9   Deepak S   drm/i915/vlv: WA ...
4898

8090c6b9d   Daniel Vetter   drm/i915: wrap up...
4899
  static void intel_init_emon(struct drm_device *dev)
dde18883d   Eugeni Dodonov   drm/i915: move em...
4900
4901
4902
4903
4904
4905
4906
4907
4908
4909
4910
4911
4912
4913
4914
4915
4916
4917
4918
4919
4920
4921
4922
4923
4924
4925
4926
4927
4928
4929
4930
4931
4932
4933
4934
4935
4936
4937
4938
4939
4940
4941
4942
4943
4944
4945
4946
4947
4948
4949
4950
4951
4952
4953
4954
4955
4956
4957
4958
4959
4960
4961
4962
4963
4964
4965
4966
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  	u32 lcfuse;
  	u8 pxw[16];
  	int i;
  
  	/* Disable to program */
  	I915_WRITE(ECR, 0);
  	POSTING_READ(ECR);
  
  	/* Program energy weights for various events */
  	I915_WRITE(SDEW, 0x15040d00);
  	I915_WRITE(CSIEW0, 0x007f0000);
  	I915_WRITE(CSIEW1, 0x1e220004);
  	I915_WRITE(CSIEW2, 0x04000004);
  
  	for (i = 0; i < 5; i++)
  		I915_WRITE(PEW + (i * 4), 0);
  	for (i = 0; i < 3; i++)
  		I915_WRITE(DEW + (i * 4), 0);
  
  	/* Program P-state weights to account for frequency power adjustment */
  	for (i = 0; i < 16; i++) {
  		u32 pxvidfreq = I915_READ(PXVFREQ_BASE + (i * 4));
  		unsigned long freq = intel_pxfreq(pxvidfreq);
  		unsigned long vid = (pxvidfreq & PXVFREQ_PX_MASK) >>
  			PXVFREQ_PX_SHIFT;
  		unsigned long val;
  
  		val = vid * vid;
  		val *= (freq / 1000);
  		val *= 255;
  		val /= (127*127*900);
  		if (val > 0xff)
  			DRM_ERROR("bad pxval: %ld
  ", val);
  		pxw[i] = val;
  	}
  	/* Render standby states get 0 weight */
  	pxw[14] = 0;
  	pxw[15] = 0;
  
  	for (i = 0; i < 4; i++) {
  		u32 val = (pxw[i*4] << 24) | (pxw[(i*4)+1] << 16) |
  			(pxw[(i*4)+2] << 8) | (pxw[(i*4)+3]);
  		I915_WRITE(PXW + (i * 4), val);
  	}
  
  	/* Adjust magic regs to magic values (more experimental results) */
  	I915_WRITE(OGW0, 0);
  	I915_WRITE(OGW1, 0);
  	I915_WRITE(EG0, 0x00007f00);
  	I915_WRITE(EG1, 0x0000000e);
  	I915_WRITE(EG2, 0x000e0000);
  	I915_WRITE(EG3, 0x68000300);
  	I915_WRITE(EG4, 0x42000000);
  	I915_WRITE(EG5, 0x00140031);
  	I915_WRITE(EG6, 0);
  	I915_WRITE(EG7, 0);
  
  	for (i = 0; i < 8; i++)
  		I915_WRITE(PXWL + (i * 4), 0);
  
  	/* Enable PMON + select events */
  	I915_WRITE(ECR, 0x80000019);
  
  	lcfuse = I915_READ(LCFUSE02);
20e4d407f   Daniel Vetter   drm/ips: move drp...
4967
  	dev_priv->ips.corr = (lcfuse & LCFUSE_HIV_MASK);
dde18883d   Eugeni Dodonov   drm/i915: move em...
4968
  }
ae48434c2   Imre Deak   drm/i915: vlv: re...
4969
4970
  void intel_init_gt_powersave(struct drm_device *dev)
  {
e6069ca84   Imre Deak   drm/i915: sanitiz...
4971
  	i915.enable_rc6 = sanitize_rc6_option(dev, i915.enable_rc6);
38807746f   Deepak S   drm/i915/chv: Ena...
4972
4973
4974
  	if (IS_CHERRYVIEW(dev))
  		cherryview_init_gt_powersave(dev);
  	else if (IS_VALLEYVIEW(dev))
4e80519e3   Imre Deak   drm/i915: vlv: se...
4975
  		valleyview_init_gt_powersave(dev);
ae48434c2   Imre Deak   drm/i915: vlv: re...
4976
4977
4978
4979
  }
  
  void intel_cleanup_gt_powersave(struct drm_device *dev)
  {
38807746f   Deepak S   drm/i915/chv: Ena...
4980
4981
4982
  	if (IS_CHERRYVIEW(dev))
  		return;
  	else if (IS_VALLEYVIEW(dev))
4e80519e3   Imre Deak   drm/i915: vlv: se...
4983
  		valleyview_cleanup_gt_powersave(dev);
ae48434c2   Imre Deak   drm/i915: vlv: re...
4984
  }
156c7ca08   Jesse Barnes   drm/i915: leave r...
4985
4986
4987
4988
4989
4990
4991
4992
4993
4994
4995
4996
4997
  /**
   * intel_suspend_gt_powersave - suspend PM work and helper threads
   * @dev: drm device
   *
   * We don't want to disable RC6 or other features here, we just want
   * to make sure any work we've queued has finished and won't bother
   * us while we're suspended.
   */
  void intel_suspend_gt_powersave(struct drm_device *dev)
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  
  	/* Interrupts should be disabled already to avoid re-arming. */
9df7575f1   Jesse Barnes   drm/i915: add hel...
4998
  	WARN_ON(intel_irqs_enabled(dev_priv));
156c7ca08   Jesse Barnes   drm/i915: leave r...
4999
5000
5001
5002
  
  	flush_delayed_work(&dev_priv->rps.delayed_resume_work);
  
  	cancel_work_sync(&dev_priv->rps.work);
b47adc179   Deepak S   drm/i915: Force G...
5003
5004
5005
  
  	/* Force GPU to min freq during suspend */
  	gen6_rps_idle(dev_priv);
156c7ca08   Jesse Barnes   drm/i915: leave r...
5006
  }
8090c6b9d   Daniel Vetter   drm/i915: wrap up...
5007
5008
  void intel_disable_gt_powersave(struct drm_device *dev)
  {
1a01ab3b2   Jesse Barnes   drm/i915: put rin...
5009
  	struct drm_i915_private *dev_priv = dev->dev_private;
fd0c06420   Daniel Vetter   drm/i915: disable...
5010
  	/* Interrupts should be disabled already to avoid re-arming. */
9df7575f1   Jesse Barnes   drm/i915: add hel...
5011
  	WARN_ON(intel_irqs_enabled(dev_priv));
fd0c06420   Daniel Vetter   drm/i915: disable...
5012

930ebb462   Daniel Vetter   drm/i915: fix up ...
5013
  	if (IS_IRONLAKE_M(dev)) {
8090c6b9d   Daniel Vetter   drm/i915: wrap up...
5014
  		ironlake_disable_drps(dev);
930ebb462   Daniel Vetter   drm/i915: fix up ...
5015
  		ironlake_disable_rc6(dev);
38807746f   Deepak S   drm/i915/chv: Ena...
5016
  	} else if (INTEL_INFO(dev)->gen >= 6) {
10d8d3667   Daniel Vetter   drm/i915: Unifiy ...
5017
  		intel_suspend_gt_powersave(dev);
e494837ae   Imre Deak   drm/i915: fix pos...
5018

4fc688ce7   Jesse Barnes   drm/i915: protect...
5019
  		mutex_lock(&dev_priv->rps.hw_lock);
38807746f   Deepak S   drm/i915/chv: Ena...
5020
5021
5022
  		if (IS_CHERRYVIEW(dev))
  			cherryview_disable_rps(dev);
  		else if (IS_VALLEYVIEW(dev))
d20d4f0ca   Jesse Barnes   drm/i915: create ...
5023
5024
5025
  			valleyview_disable_rps(dev);
  		else
  			gen6_disable_rps(dev);
c0951f0c9   Chris Wilson   drm/i915: Avoid t...
5026
  		dev_priv->rps.enabled = false;
4fc688ce7   Jesse Barnes   drm/i915: protect...
5027
  		mutex_unlock(&dev_priv->rps.hw_lock);
930ebb462   Daniel Vetter   drm/i915: fix up ...
5028
  	}
8090c6b9d   Daniel Vetter   drm/i915: wrap up...
5029
  }
1a01ab3b2   Jesse Barnes   drm/i915: put rin...
5030
5031
5032
5033
5034
5035
  static void intel_gen6_powersave_work(struct work_struct *work)
  {
  	struct drm_i915_private *dev_priv =
  		container_of(work, struct drm_i915_private,
  			     rps.delayed_resume_work.work);
  	struct drm_device *dev = dev_priv->dev;
4fc688ce7   Jesse Barnes   drm/i915: protect...
5036
  	mutex_lock(&dev_priv->rps.hw_lock);
0a073b843   Jesse Barnes   drm/i915: turbo &...
5037

38807746f   Deepak S   drm/i915/chv: Ena...
5038
5039
5040
  	if (IS_CHERRYVIEW(dev)) {
  		cherryview_enable_rps(dev);
  	} else if (IS_VALLEYVIEW(dev)) {
0a073b843   Jesse Barnes   drm/i915: turbo &...
5041
  		valleyview_enable_rps(dev);
6edee7f3e   Ben Widawsky   drm/i915/bdw: Cre...
5042
5043
  	} else if (IS_BROADWELL(dev)) {
  		gen8_enable_rps(dev);
c2bc2fc54   Imre Deak   drm/i915: factor ...
5044
  		__gen6_update_ring_freq(dev);
0a073b843   Jesse Barnes   drm/i915: turbo &...
5045
5046
  	} else {
  		gen6_enable_rps(dev);
c2bc2fc54   Imre Deak   drm/i915: factor ...
5047
  		__gen6_update_ring_freq(dev);
0a073b843   Jesse Barnes   drm/i915: turbo &...
5048
  	}
c0951f0c9   Chris Wilson   drm/i915: Avoid t...
5049
  	dev_priv->rps.enabled = true;
4fc688ce7   Jesse Barnes   drm/i915: protect...
5050
  	mutex_unlock(&dev_priv->rps.hw_lock);
c6df39b5e   Imre Deak   drm/i915: get a r...
5051
5052
  
  	intel_runtime_pm_put(dev_priv);
1a01ab3b2   Jesse Barnes   drm/i915: put rin...
5053
  }
8090c6b9d   Daniel Vetter   drm/i915: wrap up...
5054
5055
  void intel_enable_gt_powersave(struct drm_device *dev)
  {
1a01ab3b2   Jesse Barnes   drm/i915: put rin...
5056
  	struct drm_i915_private *dev_priv = dev->dev_private;
8090c6b9d   Daniel Vetter   drm/i915: wrap up...
5057
  	if (IS_IRONLAKE_M(dev)) {
dc1d0136a   Imre Deak   drm/i915: move ge...
5058
  		mutex_lock(&dev->struct_mutex);
8090c6b9d   Daniel Vetter   drm/i915: wrap up...
5059
5060
5061
  		ironlake_enable_drps(dev);
  		ironlake_enable_rc6(dev);
  		intel_init_emon(dev);
dc1d0136a   Imre Deak   drm/i915: move ge...
5062
  		mutex_unlock(&dev->struct_mutex);
38807746f   Deepak S   drm/i915/chv: Ena...
5063
  	} else if (INTEL_INFO(dev)->gen >= 6) {
1a01ab3b2   Jesse Barnes   drm/i915: put rin...
5064
5065
5066
5067
  		/*
  		 * PCU communication is slow and this doesn't need to be
  		 * done at any specific time, so do this out of our fast path
  		 * to make resume and init faster.
c6df39b5e   Imre Deak   drm/i915: get a r...
5068
5069
5070
5071
5072
5073
5074
  		 *
  		 * We depend on the HW RC6 power context save/restore
  		 * mechanism when entering D3 through runtime PM suspend. So
  		 * disable RPM until RPS/RC6 is properly setup. We can only
  		 * get here via the driver load/system resume/runtime resume
  		 * paths, so the _noresume version is enough (and in case of
  		 * runtime resume it's necessary).
1a01ab3b2   Jesse Barnes   drm/i915: put rin...
5075
  		 */
c6df39b5e   Imre Deak   drm/i915: get a r...
5076
5077
5078
  		if (schedule_delayed_work(&dev_priv->rps.delayed_resume_work,
  					   round_jiffies_up_relative(HZ)))
  			intel_runtime_pm_get_noresume(dev_priv);
8090c6b9d   Daniel Vetter   drm/i915: wrap up...
5079
5080
  	}
  }
c6df39b5e   Imre Deak   drm/i915: get a r...
5081
5082
5083
5084
5085
5086
5087
  void intel_reset_gt_powersave(struct drm_device *dev)
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  
  	dev_priv->rps.enabled = false;
  	intel_enable_gt_powersave(dev);
  }
3107bd48b   Daniel Vetter   drm/i915: kill pc...
5088
5089
5090
5091
5092
5093
5094
5095
5096
5097
5098
  static void ibx_init_clock_gating(struct drm_device *dev)
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  
  	/*
  	 * On Ibex Peak and Cougar Point, we need to disable clock
  	 * gating for the panel power sequencer or it will fail to
  	 * start up when no ports are active.
  	 */
  	I915_WRITE(SOUTH_DSPCLK_GATE_D, PCH_DPLSUNIT_CLOCK_GATE_DISABLE);
  }
0e088b8f3   Ville Syrjälä   drm/i915: Refacto...
5099
5100
5101
5102
  static void g4x_disable_trickle_feed(struct drm_device *dev)
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  	int pipe;
055e393fa   Damien Lespiau   drm/i915: Use dev...
5103
  	for_each_pipe(dev_priv, pipe) {
0e088b8f3   Ville Syrjälä   drm/i915: Refacto...
5104
5105
5106
  		I915_WRITE(DSPCNTR(pipe),
  			   I915_READ(DSPCNTR(pipe)) |
  			   DISPPLANE_TRICKLE_FEED_DISABLE);
1dba99f49   Ville Syrjälä   drm/i915: Rename ...
5107
  		intel_flush_primary_plane(dev_priv, pipe);
0e088b8f3   Ville Syrjälä   drm/i915: Refacto...
5108
5109
  	}
  }
017636cc0   Ville Syrjälä   drm/i915: Disable...
5110
5111
5112
5113
5114
5115
5116
5117
5118
5119
5120
5121
5122
  static void ilk_init_lp_watermarks(struct drm_device *dev)
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  
  	I915_WRITE(WM3_LP_ILK, I915_READ(WM3_LP_ILK) & ~WM1_LP_SR_EN);
  	I915_WRITE(WM2_LP_ILK, I915_READ(WM2_LP_ILK) & ~WM1_LP_SR_EN);
  	I915_WRITE(WM1_LP_ILK, I915_READ(WM1_LP_ILK) & ~WM1_LP_SR_EN);
  
  	/*
  	 * Don't touch WM1S_LP_EN here.
  	 * Doing so could cause underruns.
  	 */
  }
1fa611065   Eugeni Dodonov   drm/i915: add gen...
5123
  static void ironlake_init_clock_gating(struct drm_device *dev)
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5124
5125
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
231e54f63   Damien Lespiau   drm/i915: Consoli...
5126
  	uint32_t dspclk_gate = ILK_VRHUNIT_CLOCK_GATE_DISABLE;
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5127

f1e8fa56f   Damien Lespiau   drm/i915: We impl...
5128
5129
5130
5131
  	/*
  	 * Required for FBC
  	 * WaFbcDisableDpfcClockGating:ilk
  	 */
4d47e4f57   Damien Lespiau   drm/i915: Program...
5132
5133
5134
  	dspclk_gate |= ILK_DPFCRUNIT_CLOCK_GATE_DISABLE |
  		   ILK_DPFCUNIT_CLOCK_GATE_DISABLE |
  		   ILK_DPFDUNIT_CLOCK_GATE_ENABLE;
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5135
5136
5137
5138
5139
5140
  
  	I915_WRITE(PCH_3DCGDIS0,
  		   MARIUNIT_CLOCK_GATE_DISABLE |
  		   SVSMUNIT_CLOCK_GATE_DISABLE);
  	I915_WRITE(PCH_3DCGDIS1,
  		   VFMUNIT_CLOCK_GATE_DISABLE);
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5141
5142
5143
5144
5145
5146
5147
5148
5149
5150
  	/*
  	 * According to the spec the following bits should be set in
  	 * order to enable memory self-refresh
  	 * The bit 22/21 of 0x42004
  	 * The bit 5 of 0x42020
  	 * The bit 15 of 0x45000
  	 */
  	I915_WRITE(ILK_DISPLAY_CHICKEN2,
  		   (I915_READ(ILK_DISPLAY_CHICKEN2) |
  		    ILK_DPARB_GATE | ILK_VSDPFD_FULL));
4d47e4f57   Damien Lespiau   drm/i915: Program...
5151
  	dspclk_gate |= ILK_DPARBUNIT_CLOCK_GATE_ENABLE;
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5152
5153
5154
  	I915_WRITE(DISP_ARB_CTL,
  		   (I915_READ(DISP_ARB_CTL) |
  		    DISP_FBC_WM_DIS));
017636cc0   Ville Syrjälä   drm/i915: Disable...
5155
5156
  
  	ilk_init_lp_watermarks(dev);
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5157
5158
5159
5160
5161
5162
5163
5164
5165
  
  	/*
  	 * Based on the document from hardware guys the following bits
  	 * should be set unconditionally in order to enable FBC.
  	 * The bit 22 of 0x42000
  	 * The bit 22 of 0x42004
  	 * The bit 7,8,9 of 0x42020.
  	 */
  	if (IS_IRONLAKE_M(dev)) {
4bb353343   Damien Lespiau   drm/i915: We impl...
5166
  		/* WaFbcAsynchFlipDisableFbcQueue:ilk */
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5167
5168
5169
5170
5171
5172
  		I915_WRITE(ILK_DISPLAY_CHICKEN1,
  			   I915_READ(ILK_DISPLAY_CHICKEN1) |
  			   ILK_FBCQ_DIS);
  		I915_WRITE(ILK_DISPLAY_CHICKEN2,
  			   I915_READ(ILK_DISPLAY_CHICKEN2) |
  			   ILK_DPARB_GATE);
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5173
  	}
4d47e4f57   Damien Lespiau   drm/i915: Program...
5174
  	I915_WRITE(ILK_DSPCLK_GATE_D, dspclk_gate);
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5175
5176
5177
5178
5179
5180
  	I915_WRITE(ILK_DISPLAY_CHICKEN2,
  		   I915_READ(ILK_DISPLAY_CHICKEN2) |
  		   ILK_ELPIN_409_SELECT);
  	I915_WRITE(_3D_CHICKEN2,
  		   _3D_CHICKEN2_WM_READ_PIPELINED << 16 |
  		   _3D_CHICKEN2_WM_READ_PIPELINED);
4358a3748   Daniel Vetter   drm/i915: impleme...
5181

ecdb4eb71   Damien Lespiau   drm/i915: Add pla...
5182
  	/* WaDisableRenderCachePipelinedFlush:ilk */
4358a3748   Daniel Vetter   drm/i915: impleme...
5183
5184
  	I915_WRITE(CACHE_MODE_0,
  		   _MASKED_BIT_ENABLE(CM0_PIPELINED_RENDER_FLUSH_DISABLE));
3107bd48b   Daniel Vetter   drm/i915: kill pc...
5185

4e04632e8   Akash Goel   drm/i915/vlv:Impl...
5186
5187
  	/* WaDisable_RenderCache_OperationalFlush:ilk */
  	I915_WRITE(CACHE_MODE_0, _MASKED_BIT_DISABLE(RC_OP_FLUSH_ENABLE));
0e088b8f3   Ville Syrjälä   drm/i915: Refacto...
5188
  	g4x_disable_trickle_feed(dev);
bdad2b2f3   Ville Syrjälä   drm/i915: Disable...
5189

3107bd48b   Daniel Vetter   drm/i915: kill pc...
5190
5191
5192
5193
5194
5195
5196
  	ibx_init_clock_gating(dev);
  }
  
  static void cpt_init_clock_gating(struct drm_device *dev)
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  	int pipe;
3f704fa27   Paulo Zanoni   drm/i915: set CPT...
5197
  	uint32_t val;
3107bd48b   Daniel Vetter   drm/i915: kill pc...
5198
5199
5200
5201
5202
5203
  
  	/*
  	 * On Ibex Peak and Cougar Point, we need to disable clock
  	 * gating for the panel power sequencer or it will fail to
  	 * start up when no ports are active.
  	 */
cd6640781   Jesse Barnes   drm/i915: disable...
5204
5205
5206
  	I915_WRITE(SOUTH_DSPCLK_GATE_D, PCH_DPLSUNIT_CLOCK_GATE_DISABLE |
  		   PCH_DPLUNIT_CLOCK_GATE_DISABLE |
  		   PCH_CPUNIT_CLOCK_GATE_DISABLE);
3107bd48b   Daniel Vetter   drm/i915: kill pc...
5207
5208
  	I915_WRITE(SOUTH_CHICKEN2, I915_READ(SOUTH_CHICKEN2) |
  		   DPLS_EDP_PPS_FIX_DIS);
335c07b79   Takashi Iwai   drm/i915: Fix shi...
5209
5210
5211
  	/* The below fixes the weird display corruption, a few pixels shifted
  	 * downward, on (only) LVDS of some HP laptops with IVY.
  	 */
055e393fa   Damien Lespiau   drm/i915: Use dev...
5212
  	for_each_pipe(dev_priv, pipe) {
dc4bd2d10   Paulo Zanoni   drm/i915: preserv...
5213
5214
5215
  		val = I915_READ(TRANS_CHICKEN2(pipe));
  		val |= TRANS_CHICKEN2_TIMING_OVERRIDE;
  		val &= ~TRANS_CHICKEN2_FDI_POLARITY_REVERSED;
41aa34486   Rodrigo Vivi   drm/i915: Organiz...
5216
  		if (dev_priv->vbt.fdi_rx_polarity_inverted)
3f704fa27   Paulo Zanoni   drm/i915: set CPT...
5217
  			val |= TRANS_CHICKEN2_FDI_POLARITY_REVERSED;
dc4bd2d10   Paulo Zanoni   drm/i915: preserv...
5218
5219
5220
  		val &= ~TRANS_CHICKEN2_FRAME_START_DELAY_MASK;
  		val &= ~TRANS_CHICKEN2_DISABLE_DEEP_COLOR_COUNTER;
  		val &= ~TRANS_CHICKEN2_DISABLE_DEEP_COLOR_MODESWITCH;
3f704fa27   Paulo Zanoni   drm/i915: set CPT...
5221
5222
  		I915_WRITE(TRANS_CHICKEN2(pipe), val);
  	}
3107bd48b   Daniel Vetter   drm/i915: kill pc...
5223
  	/* WADP0ClockGatingDisable */
055e393fa   Damien Lespiau   drm/i915: Use dev...
5224
  	for_each_pipe(dev_priv, pipe) {
3107bd48b   Daniel Vetter   drm/i915: kill pc...
5225
5226
5227
  		I915_WRITE(TRANS_CHICKEN1(pipe),
  			   TRANS_CHICKEN1_DP0UNIT_GC_DISABLE);
  	}
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5228
  }
1d7aaa0cf   Daniel Vetter   drm/i915: detect ...
5229
5230
5231
5232
5233
5234
  static void gen6_check_mch_setup(struct drm_device *dev)
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  	uint32_t tmp;
  
  	tmp = I915_READ(MCH_SSKPD);
df662a28c   Daniel Vetter   drm/i915: Tune do...
5235
5236
5237
5238
  	if ((tmp & MCH_SSKPD_WM0_MASK) != MCH_SSKPD_WM0_VAL)
  		DRM_DEBUG_KMS("Wrong MCH_SSKPD value: 0x%08x This can cause underruns.
  ",
  			      tmp);
1d7aaa0cf   Daniel Vetter   drm/i915: detect ...
5239
  }
1fa611065   Eugeni Dodonov   drm/i915: add gen...
5240
  static void gen6_init_clock_gating(struct drm_device *dev)
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5241
5242
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
231e54f63   Damien Lespiau   drm/i915: Consoli...
5243
  	uint32_t dspclk_gate = ILK_VRHUNIT_CLOCK_GATE_DISABLE;
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5244

231e54f63   Damien Lespiau   drm/i915: Consoli...
5245
  	I915_WRITE(ILK_DSPCLK_GATE_D, dspclk_gate);
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5246
5247
5248
5249
  
  	I915_WRITE(ILK_DISPLAY_CHICKEN2,
  		   I915_READ(ILK_DISPLAY_CHICKEN2) |
  		   ILK_ELPIN_409_SELECT);
ecdb4eb71   Damien Lespiau   drm/i915: Add pla...
5250
  	/* WaDisableHiZPlanesWhenMSAAEnabled:snb */
4283908ef   Daniel Vetter   drm/i915: Impleme...
5251
5252
  	I915_WRITE(_3D_CHICKEN,
  		   _MASKED_BIT_ENABLE(_3D_CHICKEN_HIZ_PLANE_DISABLE_MSAA_4X_SNB));
4e04632e8   Akash Goel   drm/i915/vlv:Impl...
5253
5254
  	/* WaDisable_RenderCache_OperationalFlush:snb */
  	I915_WRITE(CACHE_MODE_0, _MASKED_BIT_DISABLE(RC_OP_FLUSH_ENABLE));
8d85d2728   Ville Syrjälä   drm/i915: Fix SNB...
5255
5256
5257
  	/*
  	 * BSpec recoomends 8x4 when MSAA is used,
  	 * however in practice 16x4 seems fastest.
c5c98a589   Ville Syrjälä   drm/i915: Add a c...
5258
5259
5260
5261
  	 *
  	 * Note that PS/WM thread counts depend on the WIZ hashing
  	 * disable bit, which we don't touch here, but it's good
  	 * to keep in mind (see 3DSTATE_PS and 3DSTATE_WM).
8d85d2728   Ville Syrjälä   drm/i915: Fix SNB...
5262
5263
5264
  	 */
  	I915_WRITE(GEN6_GT_MODE,
  		   GEN6_WIZ_HASHING_MASK | GEN6_WIZ_HASHING_16x4);
017636cc0   Ville Syrjälä   drm/i915: Disable...
5265
  	ilk_init_lp_watermarks(dev);
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5266

6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5267
  	I915_WRITE(CACHE_MODE_0,
507432986   Daniel Vetter   drm/i915: use the...
5268
  		   _MASKED_BIT_DISABLE(CM0_STC_EVICT_DISABLE_LRA_SNB));
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5269
5270
5271
5272
5273
5274
5275
5276
5277
5278
5279
5280
5281
5282
5283
  
  	I915_WRITE(GEN6_UCGCTL1,
  		   I915_READ(GEN6_UCGCTL1) |
  		   GEN6_BLBUNIT_CLOCK_GATE_DISABLE |
  		   GEN6_CSUNIT_CLOCK_GATE_DISABLE);
  
  	/* According to the BSpec vol1g, bit 12 (RCPBUNIT) clock
  	 * gating disable must be set.  Failure to set it results in
  	 * flickering pixels due to Z write ordering failures after
  	 * some amount of runtime in the Mesa "fire" demo, and Unigine
  	 * Sanctuary and Tropics, and apparently anything else with
  	 * alpha test or pixel discard.
  	 *
  	 * According to the spec, bit 11 (RCCUNIT) must also be set,
  	 * but we didn't debug actual testcases to find it out.
0f846f81a   Jesse Barnes   drm/i915: disable...
5284
  	 *
ef59318cb   Ville Syrjälä   drm/i915: WaDisab...
5285
5286
  	 * WaDisableRCCUnitClockGating:snb
  	 * WaDisableRCPBUnitClockGating:snb
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5287
5288
5289
5290
  	 */
  	I915_WRITE(GEN6_UCGCTL2,
  		   GEN6_RCPBUNIT_CLOCK_GATE_DISABLE |
  		   GEN6_RCCUNIT_CLOCK_GATE_DISABLE);
5eb146dd0   Ville Syrjälä   drm/i915: Assume ...
5291
  	/* WaStripsFansDisableFastClipPerformanceFix:snb */
743b57d83   Ville Syrjälä   drm/i915: There's...
5292
5293
  	I915_WRITE(_3D_CHICKEN3,
  		   _MASKED_BIT_ENABLE(_3D_CHICKEN3_SF_DISABLE_FASTCLIP_CULL));
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5294
5295
  
  	/*
e927ecde5   Ville Syrjälä   drm/i915: Disable...
5296
5297
5298
5299
5300
5301
5302
5303
  	 * Bspec says:
  	 * "This bit must be set if 3DSTATE_CLIP clip mode is set to normal and
  	 * 3DSTATE_SF number of SF output attributes is more than 16."
  	 */
  	I915_WRITE(_3D_CHICKEN3,
  		   _MASKED_BIT_ENABLE(_3D_CHICKEN3_SF_DISABLE_PIPELINED_ATTR_FETCH));
  
  	/*
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5304
5305
5306
5307
5308
5309
5310
  	 * According to the spec the following bits should be
  	 * set in order to enable memory self-refresh and fbc:
  	 * The bit21 and bit22 of 0x42000
  	 * The bit21 and bit22 of 0x42004
  	 * The bit5 and bit7 of 0x42020
  	 * The bit14 of 0x70180
  	 * The bit14 of 0x71180
4bb353343   Damien Lespiau   drm/i915: We impl...
5311
5312
  	 *
  	 * WaFbcAsynchFlipDisableFbcQueue:snb
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5313
5314
5315
5316
5317
5318
5319
  	 */
  	I915_WRITE(ILK_DISPLAY_CHICKEN1,
  		   I915_READ(ILK_DISPLAY_CHICKEN1) |
  		   ILK_FBCQ_DIS | ILK_PABSTRETCH_DIS);
  	I915_WRITE(ILK_DISPLAY_CHICKEN2,
  		   I915_READ(ILK_DISPLAY_CHICKEN2) |
  		   ILK_DPARB_GATE | ILK_VSDPFD_FULL);
231e54f63   Damien Lespiau   drm/i915: Consoli...
5320
5321
5322
5323
  	I915_WRITE(ILK_DSPCLK_GATE_D,
  		   I915_READ(ILK_DSPCLK_GATE_D) |
  		   ILK_DPARBUNIT_CLOCK_GATE_ENABLE  |
  		   ILK_DPFDUNIT_CLOCK_GATE_ENABLE);
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5324

0e088b8f3   Ville Syrjälä   drm/i915: Refacto...
5325
  	g4x_disable_trickle_feed(dev);
f8f2ac9a7   Ben Widawsky   drm/i915: Fix GT_...
5326

3107bd48b   Daniel Vetter   drm/i915: kill pc...
5327
  	cpt_init_clock_gating(dev);
1d7aaa0cf   Daniel Vetter   drm/i915: detect ...
5328
5329
  
  	gen6_check_mch_setup(dev);
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5330
5331
5332
5333
5334
  }
  
  static void gen7_setup_fixed_func_scheduler(struct drm_i915_private *dev_priv)
  {
  	uint32_t reg = I915_READ(GEN7_FF_THREAD_MODE);
3aad9059a   Ville Syrjälä   drm/i915: gen7_se...
5335
  	/*
46680e0a4   Ville Syrjälä   drm/i915: VLV wan...
5336
  	 * WaVSThreadDispatchOverride:ivb,vlv
3aad9059a   Ville Syrjälä   drm/i915: gen7_se...
5337
5338
5339
5340
  	 *
  	 * This actually overrides the dispatch
  	 * mode for all thread types.
  	 */
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5341
5342
5343
5344
5345
5346
5347
  	reg &= ~GEN7_FF_SCHED_MASK;
  	reg |= GEN7_FF_TS_SCHED_HW;
  	reg |= GEN7_FF_VS_SCHED_HW;
  	reg |= GEN7_FF_DS_SCHED_HW;
  
  	I915_WRITE(GEN7_FF_THREAD_MODE, reg);
  }
17a303ec7   Paulo Zanoni   drm/i915: make DP...
5348
5349
5350
5351
5352
5353
5354
5355
5356
5357
5358
5359
  static void lpt_init_clock_gating(struct drm_device *dev)
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  
  	/*
  	 * TODO: this bit should only be enabled when really needed, then
  	 * disabled when not needed anymore in order to save power.
  	 */
  	if (dev_priv->pch_id == INTEL_PCH_LPT_LP_DEVICE_ID_TYPE)
  		I915_WRITE(SOUTH_DSPCLK_GATE_D,
  			   I915_READ(SOUTH_DSPCLK_GATE_D) |
  			   PCH_LP_PARTITION_LEVEL_DISABLE);
0a790cdbf   Paulo Zanoni   drm/i915: impleme...
5360
5361
5362
5363
5364
  
  	/* WADPOClockGatingDisable:hsw */
  	I915_WRITE(_TRANSA_CHICKEN1,
  		   I915_READ(_TRANSA_CHICKEN1) |
  		   TRANS_CHICKEN1_DP0UNIT_GC_DISABLE);
17a303ec7   Paulo Zanoni   drm/i915: make DP...
5365
  }
7d708ee40   Imre Deak   drm/i915: HSW: al...
5366
5367
5368
5369
5370
5371
5372
5373
5374
5375
5376
  static void lpt_suspend_hw(struct drm_device *dev)
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  
  	if (dev_priv->pch_id == INTEL_PCH_LPT_LP_DEVICE_ID_TYPE) {
  		uint32_t val = I915_READ(SOUTH_DSPCLK_GATE_D);
  
  		val &= ~PCH_LP_PARTITION_LEVEL_DISABLE;
  		I915_WRITE(SOUTH_DSPCLK_GATE_D, val);
  	}
  }
47c2bd97c   Paulo Zanoni   drm/i915: rename ...
5377
  static void broadwell_init_clock_gating(struct drm_device *dev)
1020a5c2d   Ben Widawsky   drm/i915/bdw: Clo...
5378
5379
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
07d27e20b   Damien Lespiau   drm/i915: Replace...
5380
  	enum pipe pipe;
1020a5c2d   Ben Widawsky   drm/i915/bdw: Clo...
5381
5382
5383
5384
  
  	I915_WRITE(WM3_LP_ILK, 0);
  	I915_WRITE(WM2_LP_ILK, 0);
  	I915_WRITE(WM1_LP_ILK, 0);
50ed5fbd9   Ben Widawsky   drm/i915/bdw: Imp...
5385
5386
5387
  
  	/* FIXME(BDW): Check all the w/a, some might only apply to
  	 * pre-production hw. */
c8966e105   Kenneth Graunke   drm/i915: Add a p...
5388

4afe8d334   Ben Widawsky   drm/i915/bdw: BWG...
5389
  	I915_WRITE(GAMTARBMODE, _MASKED_BIT_ENABLE(ARB_MODE_BWGTLB_DISABLE));
7f88da0cf   Ben Widawsky   drm/i915/bdw: Lim...
5390
  	I915_WRITE(_3D_CHICKEN3,
b3f9ad93b   Michel Thierry   drm/i915/bdw: 3D_...
5391
  		   _MASKED_BIT_ENABLE(_3D_CHICKEN_SDE_LIMIT_FIFO_POLY_DEPTH(2)));
7f88da0cf   Ben Widawsky   drm/i915/bdw: Lim...
5392

242a4018c   Ben Widawsky   drm/i915/bdw: Dis...
5393

ab57fff13   Ben Widawsky   drm/i915/bdw: Imp...
5394
  	/* WaSwitchSolVfFArbitrationPriority:bdw */
50ed5fbd9   Ben Widawsky   drm/i915/bdw: Imp...
5395
  	I915_WRITE(GAM_ECOCHK, I915_READ(GAM_ECOCHK) | HSW_ECOCHK_ARB_PRIO_SOL);
fe4ab3cee   Ben Widawsky   drm/i915/bdw: Imp...
5396

ab57fff13   Ben Widawsky   drm/i915/bdw: Imp...
5397
  	/* WaPsrDPAMaskVBlankInSRD:bdw */
fe4ab3cee   Ben Widawsky   drm/i915/bdw: Imp...
5398
5399
  	I915_WRITE(CHICKEN_PAR1_1,
  		   I915_READ(CHICKEN_PAR1_1) | DPA_MASK_VBLANK_SRD);
ab57fff13   Ben Widawsky   drm/i915/bdw: Imp...
5400
  	/* WaPsrDPRSUnmaskVBlankInSRD:bdw */
055e393fa   Damien Lespiau   drm/i915: Use dev...
5401
  	for_each_pipe(dev_priv, pipe) {
07d27e20b   Damien Lespiau   drm/i915: Replace...
5402
  		I915_WRITE(CHICKEN_PIPESL_1(pipe),
c7c656226   Ville Syrjälä   drm/i915: Don't c...
5403
  			   I915_READ(CHICKEN_PIPESL_1(pipe)) |
8f670bb15   Ville Syrjälä   drm/i915: Unify C...
5404
  			   BDW_DPRS_MASK_VBLANK_SRD);
fe4ab3cee   Ben Widawsky   drm/i915/bdw: Imp...
5405
  	}
63801f211   Ben Widawsky   drm/i915/bdw: For...
5406

ab57fff13   Ben Widawsky   drm/i915/bdw: Imp...
5407
5408
5409
5410
5411
  	/* WaVSRefCountFullforceMissDisable:bdw */
  	/* WaDSRefCountFullforceMissDisable:bdw */
  	I915_WRITE(GEN7_FF_THREAD_MODE,
  		   I915_READ(GEN7_FF_THREAD_MODE) &
  		   ~(GEN8_FF_DS_REF_CNT_FFME | GEN7_FF_VS_REF_CNT_FFME));
36075a4ca   Ville Syrjälä   drm/i915: Change ...
5412

295e8bb73   Ville Syrjälä   drm/i915: Disable...
5413
5414
  	I915_WRITE(GEN6_RC_SLEEP_PSMI_CONTROL,
  		   _MASKED_BIT_ENABLE(GEN8_RC_SEMA_IDLE_MSG_DISABLE));
4f1ca9e94   Ville Syrjälä   drm/i915: Impleme...
5415
5416
5417
5418
  
  	/* WaDisableSDEUnitClockGating:bdw */
  	I915_WRITE(GEN8_UCGCTL6, I915_READ(GEN8_UCGCTL6) |
  		   GEN8_SDEUNIT_CLOCK_GATE_DISABLE);
5d708680e   Damien Lespiau   drm/i915/bdw: Imp...
5419

89d6b2b81   Paulo Zanoni   drm/i915: call lp...
5420
  	lpt_init_clock_gating(dev);
1020a5c2d   Ben Widawsky   drm/i915/bdw: Clo...
5421
  }
cad2a2d77   Eugeni Dodonov   drm/i915: introdu...
5422
5423
5424
  static void haswell_init_clock_gating(struct drm_device *dev)
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
cad2a2d77   Eugeni Dodonov   drm/i915: introdu...
5425

017636cc0   Ville Syrjälä   drm/i915: Disable...
5426
  	ilk_init_lp_watermarks(dev);
cad2a2d77   Eugeni Dodonov   drm/i915: introdu...
5427

f3fc4884e   Francisco Jerez   drm/i915/hsw: Dis...
5428
5429
5430
5431
  	/* L3 caching of data atomics doesn't work -- disable it. */
  	I915_WRITE(HSW_SCRATCH1, HSW_SCRATCH1_L3_DATA_ATOMICS_DISABLE);
  	I915_WRITE(HSW_ROW_CHICKEN3,
  		   _MASKED_BIT_ENABLE(HSW_ROW_CHICKEN3_L3_GLOBAL_ATOMICS_DISABLE));
ecdb4eb71   Damien Lespiau   drm/i915: Add pla...
5432
  	/* This is required by WaCatErrorRejectionIssue:hsw */
cad2a2d77   Eugeni Dodonov   drm/i915: introdu...
5433
5434
5435
  	I915_WRITE(GEN7_SQ_CHICKEN_MBCUNIT_CONFIG,
  			I915_READ(GEN7_SQ_CHICKEN_MBCUNIT_CONFIG) |
  			GEN7_SQ_CHICKEN_MBCUNIT_SQINTMOB);
e36ea7ff4   Ville Syrjälä   drm/i915: Don't a...
5436
5437
5438
  	/* WaVSRefCountFullforceMissDisable:hsw */
  	I915_WRITE(GEN7_FF_THREAD_MODE,
  		   I915_READ(GEN7_FF_THREAD_MODE) & ~GEN7_FF_VS_REF_CNT_FFME);
cad2a2d77   Eugeni Dodonov   drm/i915: introdu...
5439

4e04632e8   Akash Goel   drm/i915/vlv:Impl...
5440
5441
  	/* WaDisable_RenderCache_OperationalFlush:hsw */
  	I915_WRITE(CACHE_MODE_0_GEN7, _MASKED_BIT_DISABLE(RC_OP_FLUSH_ENABLE));
fe27c6066   Chia-I Wu   drm/i915: enable ...
5442
5443
5444
  	/* enable HiZ Raw Stall Optimization */
  	I915_WRITE(CACHE_MODE_0_GEN7,
  		   _MASKED_BIT_DISABLE(HIZ_RAW_STALL_OPT_DISABLE));
ecdb4eb71   Damien Lespiau   drm/i915: Add pla...
5445
  	/* WaDisable4x2SubspanOptimization:hsw */
cad2a2d77   Eugeni Dodonov   drm/i915: introdu...
5446
5447
  	I915_WRITE(CACHE_MODE_1,
  		   _MASKED_BIT_ENABLE(PIXEL_SUBSPAN_COLLECT_OPT_DISABLE));
1544d9d57   Eugeni Dodonov   drm/i915: enable ...
5448

a12c4967c   Ville Syrjälä   drm/i915: Change ...
5449
5450
5451
  	/*
  	 * BSpec recommends 8x4 when MSAA is used,
  	 * however in practice 16x4 seems fastest.
c5c98a589   Ville Syrjälä   drm/i915: Add a c...
5452
5453
5454
5455
  	 *
  	 * Note that PS/WM thread counts depend on the WIZ hashing
  	 * disable bit, which we don't touch here, but it's good
  	 * to keep in mind (see 3DSTATE_PS and 3DSTATE_WM).
a12c4967c   Ville Syrjälä   drm/i915: Change ...
5456
5457
5458
  	 */
  	I915_WRITE(GEN7_GT_MODE,
  		   GEN6_WIZ_HASHING_MASK | GEN6_WIZ_HASHING_16x4);
ecdb4eb71   Damien Lespiau   drm/i915: Add pla...
5459
  	/* WaSwitchSolVfFArbitrationPriority:hsw */
e3dff5855   Ben Widawsky   drm/i915: Impleme...
5460
  	I915_WRITE(GAM_ECOCHK, I915_READ(GAM_ECOCHK) | HSW_ECOCHK_ARB_PRIO_SOL);
90a886432   Paulo Zanoni   drm/i915: set FOR...
5461
5462
5463
  	/* WaRsPkgCStateDisplayPMReq:hsw */
  	I915_WRITE(CHICKEN_PAR1_1,
  		   I915_READ(CHICKEN_PAR1_1) | FORCE_ARB_IDLE_PLANES);
1544d9d57   Eugeni Dodonov   drm/i915: enable ...
5464

17a303ec7   Paulo Zanoni   drm/i915: make DP...
5465
  	lpt_init_clock_gating(dev);
cad2a2d77   Eugeni Dodonov   drm/i915: introdu...
5466
  }
1fa611065   Eugeni Dodonov   drm/i915: add gen...
5467
  static void ivybridge_init_clock_gating(struct drm_device *dev)
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5468
5469
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
208482232   Ben Widawsky   drm/i915: set IDI...
5470
  	uint32_t snpcr;
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5471

017636cc0   Ville Syrjälä   drm/i915: Disable...
5472
  	ilk_init_lp_watermarks(dev);
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5473

231e54f63   Damien Lespiau   drm/i915: Consoli...
5474
  	I915_WRITE(ILK_DSPCLK_GATE_D, ILK_VRHUNIT_CLOCK_GATE_DISABLE);
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5475

ecdb4eb71   Damien Lespiau   drm/i915: Add pla...
5476
  	/* WaDisableEarlyCull:ivb */
87f8020ec   Jesse Barnes   drm/i915: impleme...
5477
5478
  	I915_WRITE(_3D_CHICKEN3,
  		   _MASKED_BIT_ENABLE(_3D_CHICKEN_SF_DISABLE_OBJEND_CULL));
ecdb4eb71   Damien Lespiau   drm/i915: Add pla...
5479
  	/* WaDisableBackToBackFlipFix:ivb */
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5480
5481
5482
  	I915_WRITE(IVB_CHICKEN3,
  		   CHICKEN3_DGMG_REQ_OUT_FIX_DISABLE |
  		   CHICKEN3_DGMG_DONE_FIX_DISABLE);
ecdb4eb71   Damien Lespiau   drm/i915: Add pla...
5483
  	/* WaDisablePSDDualDispatchEnable:ivb */
12f3382bc   Jesse Barnes   drm/i915: impleme...
5484
5485
5486
  	if (IS_IVB_GT1(dev))
  		I915_WRITE(GEN7_HALF_SLICE_CHICKEN1,
  			   _MASKED_BIT_ENABLE(GEN7_PSD_SINGLE_PORT_DISPATCH_ENABLE));
12f3382bc   Jesse Barnes   drm/i915: impleme...
5487

4e04632e8   Akash Goel   drm/i915/vlv:Impl...
5488
5489
  	/* WaDisable_RenderCache_OperationalFlush:ivb */
  	I915_WRITE(CACHE_MODE_0_GEN7, _MASKED_BIT_DISABLE(RC_OP_FLUSH_ENABLE));
ecdb4eb71   Damien Lespiau   drm/i915: Add pla...
5490
  	/* Apply the WaDisableRHWOOptimizationForRenderHang:ivb workaround. */
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5491
5492
  	I915_WRITE(GEN7_COMMON_SLICE_CHICKEN1,
  		   GEN7_CSC1_RHWO_OPT_DISABLE_IN_RCC);
ecdb4eb71   Damien Lespiau   drm/i915: Add pla...
5493
  	/* WaApplyL3ControlAndL3ChickenMode:ivb */
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5494
5495
5496
  	I915_WRITE(GEN7_L3CNTLREG1,
  			GEN7_WA_FOR_GEN7_L3_CONTROL);
  	I915_WRITE(GEN7_L3_CHICKEN_MODE_REGISTER,
8ab439764   Jesse Barnes   drm/i915: impleme...
5497
5498
5499
5500
  		   GEN7_WA_L3_CHICKEN_MODE);
  	if (IS_IVB_GT1(dev))
  		I915_WRITE(GEN7_ROW_CHICKEN2,
  			   _MASKED_BIT_ENABLE(DOP_CLOCK_GATING_DISABLE));
412236c2c   Ville Syrjälä   drm/i915: Fix IVB...
5501
5502
5503
5504
  	else {
  		/* must write both registers */
  		I915_WRITE(GEN7_ROW_CHICKEN2,
  			   _MASKED_BIT_ENABLE(DOP_CLOCK_GATING_DISABLE));
8ab439764   Jesse Barnes   drm/i915: impleme...
5505
5506
  		I915_WRITE(GEN7_ROW_CHICKEN2_GT2,
  			   _MASKED_BIT_ENABLE(DOP_CLOCK_GATING_DISABLE));
412236c2c   Ville Syrjälä   drm/i915: Fix IVB...
5507
  	}
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5508

ecdb4eb71   Damien Lespiau   drm/i915: Add pla...
5509
  	/* WaForceL3Serialization:ivb */
61939d977   Jesse Barnes   drm/i915: impleme...
5510
5511
  	I915_WRITE(GEN7_L3SQCREG4, I915_READ(GEN7_L3SQCREG4) &
  		   ~L3SQ_URB_READ_CAM_MATCH_DISABLE);
1b80a19aa   Ville Syrjälä   drm/i915: Drop bo...
5512
  	/*
0f846f81a   Jesse Barnes   drm/i915: disable...
5513
  	 * According to the spec, bit 13 (RCZUNIT) must be set on IVB.
ecdb4eb71   Damien Lespiau   drm/i915: Add pla...
5514
  	 * This implements the WaDisableRCZUnitClockGating:ivb workaround.
0f846f81a   Jesse Barnes   drm/i915: disable...
5515
5516
  	 */
  	I915_WRITE(GEN6_UCGCTL2,
28acf3b20   Ville Syrjälä   drm/i915: WaDisab...
5517
  		   GEN6_RCZUNIT_CLOCK_GATE_DISABLE);
0f846f81a   Jesse Barnes   drm/i915: disable...
5518

ecdb4eb71   Damien Lespiau   drm/i915: Add pla...
5519
  	/* This is required by WaCatErrorRejectionIssue:ivb */
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5520
5521
5522
  	I915_WRITE(GEN7_SQ_CHICKEN_MBCUNIT_CONFIG,
  			I915_READ(GEN7_SQ_CHICKEN_MBCUNIT_CONFIG) |
  			GEN7_SQ_CHICKEN_MBCUNIT_SQINTMOB);
0e088b8f3   Ville Syrjälä   drm/i915: Refacto...
5523
  	g4x_disable_trickle_feed(dev);
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5524
5525
  
  	gen7_setup_fixed_func_scheduler(dev_priv);
97e1930f0   Daniel Vetter   drm/i915: impleme...
5526

227213438   Chris Wilson   Revert "drm/i915:...
5527
5528
5529
5530
5531
  	if (0) { /* causes HiZ corruption on ivb:gt1 */
  		/* enable HiZ Raw Stall Optimization */
  		I915_WRITE(CACHE_MODE_0_GEN7,
  			   _MASKED_BIT_DISABLE(HIZ_RAW_STALL_OPT_DISABLE));
  	}
116f2b6da   Chia-I Wu   drm/i915: enable ...
5532

ecdb4eb71   Damien Lespiau   drm/i915: Add pla...
5533
  	/* WaDisable4x2SubspanOptimization:ivb */
97e1930f0   Daniel Vetter   drm/i915: impleme...
5534
5535
  	I915_WRITE(CACHE_MODE_1,
  		   _MASKED_BIT_ENABLE(PIXEL_SUBSPAN_COLLECT_OPT_DISABLE));
208482232   Ben Widawsky   drm/i915: set IDI...
5536

a607c1a41   Ville Syrjälä   drm/i915: Change ...
5537
5538
5539
  	/*
  	 * BSpec recommends 8x4 when MSAA is used,
  	 * however in practice 16x4 seems fastest.
c5c98a589   Ville Syrjälä   drm/i915: Add a c...
5540
5541
5542
5543
  	 *
  	 * Note that PS/WM thread counts depend on the WIZ hashing
  	 * disable bit, which we don't touch here, but it's good
  	 * to keep in mind (see 3DSTATE_PS and 3DSTATE_WM).
a607c1a41   Ville Syrjälä   drm/i915: Change ...
5544
5545
5546
  	 */
  	I915_WRITE(GEN7_GT_MODE,
  		   GEN6_WIZ_HASHING_MASK | GEN6_WIZ_HASHING_16x4);
208482232   Ben Widawsky   drm/i915: set IDI...
5547
5548
5549
5550
  	snpcr = I915_READ(GEN6_MBCUNIT_SNPCR);
  	snpcr &= ~GEN6_MBC_SNPCR_MASK;
  	snpcr |= GEN6_MBC_SNPCR_MED;
  	I915_WRITE(GEN6_MBCUNIT_SNPCR, snpcr);
3107bd48b   Daniel Vetter   drm/i915: kill pc...
5551

ab5c608b2   Ben Widawsky   drm/i915: Don't t...
5552
5553
  	if (!HAS_PCH_NOP(dev))
  		cpt_init_clock_gating(dev);
1d7aaa0cf   Daniel Vetter   drm/i915: detect ...
5554
5555
  
  	gen6_check_mch_setup(dev);
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5556
  }
1fa611065   Eugeni Dodonov   drm/i915: add gen...
5557
  static void valleyview_init_clock_gating(struct drm_device *dev)
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5558
5559
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5560

d7fe0cc0f   Ville Syrjälä   drm/i915: Fix DSP...
5561
  	I915_WRITE(DSPCLK_GATE_D, VRHUNIT_CLOCK_GATE_DISABLE);
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5562

ecdb4eb71   Damien Lespiau   drm/i915: Add pla...
5563
  	/* WaDisableEarlyCull:vlv */
87f8020ec   Jesse Barnes   drm/i915: impleme...
5564
5565
  	I915_WRITE(_3D_CHICKEN3,
  		   _MASKED_BIT_ENABLE(_3D_CHICKEN_SF_DISABLE_OBJEND_CULL));
ecdb4eb71   Damien Lespiau   drm/i915: Add pla...
5566
  	/* WaDisableBackToBackFlipFix:vlv */
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5567
5568
5569
  	I915_WRITE(IVB_CHICKEN3,
  		   CHICKEN3_DGMG_REQ_OUT_FIX_DISABLE |
  		   CHICKEN3_DGMG_DONE_FIX_DISABLE);
fad7d36e4   Ville Syrjälä   drm/i915: WaPsdDi...
5570
  	/* WaPsdDispatchEnable:vlv */
ecdb4eb71   Damien Lespiau   drm/i915: Add pla...
5571
  	/* WaDisablePSDDualDispatchEnable:vlv */
12f3382bc   Jesse Barnes   drm/i915: impleme...
5572
  	I915_WRITE(GEN7_HALF_SLICE_CHICKEN1,
d3bc03030   Jesse Barnes   drm/i915: fix WaD...
5573
5574
  		   _MASKED_BIT_ENABLE(GEN7_MAX_PS_THREAD_DEP |
  				      GEN7_PSD_SINGLE_PORT_DISPATCH_ENABLE));
12f3382bc   Jesse Barnes   drm/i915: impleme...
5575

4e04632e8   Akash Goel   drm/i915/vlv:Impl...
5576
5577
  	/* WaDisable_RenderCache_OperationalFlush:vlv */
  	I915_WRITE(CACHE_MODE_0_GEN7, _MASKED_BIT_DISABLE(RC_OP_FLUSH_ENABLE));
ecdb4eb71   Damien Lespiau   drm/i915: Add pla...
5578
  	/* WaForceL3Serialization:vlv */
61939d977   Jesse Barnes   drm/i915: impleme...
5579
5580
  	I915_WRITE(GEN7_L3SQCREG4, I915_READ(GEN7_L3SQCREG4) &
  		   ~L3SQ_URB_READ_CAM_MATCH_DISABLE);
ecdb4eb71   Damien Lespiau   drm/i915: Add pla...
5581
  	/* WaDisableDopClockGating:vlv */
8ab439764   Jesse Barnes   drm/i915: impleme...
5582
5583
  	I915_WRITE(GEN7_ROW_CHICKEN2,
  		   _MASKED_BIT_ENABLE(DOP_CLOCK_GATING_DISABLE));
ecdb4eb71   Damien Lespiau   drm/i915: Add pla...
5584
  	/* This is required by WaCatErrorRejectionIssue:vlv */
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5585
5586
5587
  	I915_WRITE(GEN7_SQ_CHICKEN_MBCUNIT_CONFIG,
  		   I915_READ(GEN7_SQ_CHICKEN_MBCUNIT_CONFIG) |
  		   GEN7_SQ_CHICKEN_MBCUNIT_SQINTMOB);
46680e0a4   Ville Syrjälä   drm/i915: VLV wan...
5588
  	gen7_setup_fixed_func_scheduler(dev_priv);
3c0edaebb   Ville Syrjälä   drm/i915: Drop Wa...
5589
  	/*
0f846f81a   Jesse Barnes   drm/i915: disable...
5590
  	 * According to the spec, bit 13 (RCZUNIT) must be set on IVB.
ecdb4eb71   Damien Lespiau   drm/i915: Add pla...
5591
  	 * This implements the WaDisableRCZUnitClockGating:vlv workaround.
0f846f81a   Jesse Barnes   drm/i915: disable...
5592
5593
  	 */
  	I915_WRITE(GEN6_UCGCTL2,
3c0edaebb   Ville Syrjälä   drm/i915: Drop Wa...
5594
  		   GEN6_RCZUNIT_CLOCK_GATE_DISABLE);
0f846f81a   Jesse Barnes   drm/i915: disable...
5595

c98f50628   Akash Goel   drm/i915/vlv: Mod...
5596
5597
5598
5599
5600
  	/* WaDisableL3Bank2xClockGate:vlv
  	 * Disabling L3 clock gating- MMIO 940c[25] = 1
  	 * Set bit 25, to disable L3_BANK_2x_CLK_GATING */
  	I915_WRITE(GEN7_UCGCTL4,
  		   I915_READ(GEN7_UCGCTL4) | GEN7_L3BANK2X_CLOCK_GATE_DISABLE);
e3f33d46f   Jesse Barnes   drm/i915: add L3 ...
5601

e0d8d59b0   Ville Syrjälä   drm/i915: Try har...
5602
  	I915_WRITE(MI_ARB_VLV, MI_ARB_DISPLAY_TRICKLE_FEED_DISABLE);
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5603

afd58e79f   Ville Syrjälä   drm/i915: Clarify...
5604
5605
5606
5607
  	/*
  	 * BSpec says this must be set, even though
  	 * WaDisable4x2SubspanOptimization isn't listed for VLV.
  	 */
6b26c86d6   Daniel Vetter   drm/i915: create ...
5608
5609
  	I915_WRITE(CACHE_MODE_1,
  		   _MASKED_BIT_ENABLE(PIXEL_SUBSPAN_COLLECT_OPT_DISABLE));
7983117f0   Jesse Barnes   drm/i915: enable ...
5610
5611
  
  	/*
031994ee8   Ville Syrjälä   drm/i915: Impleme...
5612
5613
5614
5615
5616
5617
  	 * WaIncreaseL3CreditsForVLVB0:vlv
  	 * This is the hardware default actually.
  	 */
  	I915_WRITE(GEN7_L3SQCREG1, VLV_B0_WA_L3SQCREG1_VALUE);
  
  	/*
ecdb4eb71   Damien Lespiau   drm/i915: Add pla...
5618
  	 * WaDisableVLVClockGating_VBIIssue:vlv
2d809570c   Jesse Barnes   drm/i915: impleme...
5619
5620
5621
  	 * Disable clock gating on th GCFG unit to prevent a delay
  	 * in the reporting of vblank events.
  	 */
7a0d1eedd   Ville Syrjälä   Revert "drm/i915:...
5622
  	I915_WRITE(VLV_GUNIT_CLOCK_GATE, GCFG_DIS);
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5623
  }
a4565da8a   Ville Syrjälä   drm/i915/chv: Ini...
5624
5625
5626
5627
5628
5629
5630
  static void cherryview_init_clock_gating(struct drm_device *dev)
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  
  	I915_WRITE(DSPCLK_GATE_D, VRHUNIT_CLOCK_GATE_DISABLE);
  
  	I915_WRITE(MI_ARB_VLV, MI_ARB_DISPLAY_TRICKLE_FEED_DISABLE);
dd811e70c   Ville Syrjälä   drm/i915/chv: Imp...
5631

232ce3374   Ville Syrjälä   drm/i915/chv: Imp...
5632
5633
5634
5635
5636
  	/* WaVSRefCountFullforceMissDisable:chv */
  	/* WaDSRefCountFullforceMissDisable:chv */
  	I915_WRITE(GEN7_FF_THREAD_MODE,
  		   I915_READ(GEN7_FF_THREAD_MODE) &
  		   ~(GEN8_FF_DS_REF_CNT_FFME | GEN7_FF_VS_REF_CNT_FFME));
acea6f957   Ville Syrjälä   drm/i915/chv: Imp...
5637
5638
5639
5640
  
  	/* WaDisableSemaphoreAndSyncFlipWait:chv */
  	I915_WRITE(GEN6_RC_SLEEP_PSMI_CONTROL,
  		   _MASKED_BIT_ENABLE(GEN8_RC_SEMA_IDLE_MSG_DISABLE));
0846697c6   Ville Syrjälä   drm/i915/chv: Imp...
5641
5642
5643
5644
  
  	/* WaDisableCSUnitClockGating:chv */
  	I915_WRITE(GEN6_UCGCTL1, I915_READ(GEN6_UCGCTL1) |
  		   GEN6_CSUNIT_CLOCK_GATE_DISABLE);
c631780fc   Ville Syrjälä   drm/i915/chv: Imp...
5645
5646
5647
5648
  
  	/* WaDisableSDEUnitClockGating:chv */
  	I915_WRITE(GEN8_UCGCTL6, I915_READ(GEN8_UCGCTL6) |
  		   GEN8_SDEUNIT_CLOCK_GATE_DISABLE);
e0d34ce7d   Rafael Barbalho   drm/i915/chv: Imp...
5649

e4443e459   Ville Syrjälä   drm/i915/chv: Add...
5650
5651
5652
5653
5654
5655
5656
5657
5658
  	/* WaDisableGunitClockGating:chv (pre-production hw) */
  	I915_WRITE(VLV_GUNIT_CLOCK_GATE, I915_READ(VLV_GUNIT_CLOCK_GATE) |
  		   GINT_DIS);
  
  	/* WaDisableFfDopClockGating:chv (pre-production hw) */
  	I915_WRITE(GEN6_RC_SLEEP_PSMI_CONTROL,
  		   _MASKED_BIT_ENABLE(GEN8_FF_DOP_CLOCK_GATE_DISABLE));
  
  	/* WaDisableDopClockGating:chv (pre-production hw) */
e4443e459   Ville Syrjälä   drm/i915/chv: Add...
5659
5660
  	I915_WRITE(GEN6_UCGCTL1, I915_READ(GEN6_UCGCTL1) |
  		   GEN6_EU_TCUNIT_CLOCK_GATE_DISABLE);
a4565da8a   Ville Syrjälä   drm/i915/chv: Ini...
5661
  }
1fa611065   Eugeni Dodonov   drm/i915: add gen...
5662
  static void g4x_init_clock_gating(struct drm_device *dev)
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5663
5664
5665
5666
5667
5668
5669
5670
5671
5672
5673
5674
5675
5676
5677
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  	uint32_t dspclk_gate;
  
  	I915_WRITE(RENCLK_GATE_D1, 0);
  	I915_WRITE(RENCLK_GATE_D2, VF_UNIT_CLOCK_GATE_DISABLE |
  		   GS_UNIT_CLOCK_GATE_DISABLE |
  		   CL_UNIT_CLOCK_GATE_DISABLE);
  	I915_WRITE(RAMCLK_GATE_D, 0);
  	dspclk_gate = VRHUNIT_CLOCK_GATE_DISABLE |
  		OVRUNIT_CLOCK_GATE_DISABLE |
  		OVCUNIT_CLOCK_GATE_DISABLE;
  	if (IS_GM45(dev))
  		dspclk_gate |= DSSUNIT_CLOCK_GATE_DISABLE;
  	I915_WRITE(DSPCLK_GATE_D, dspclk_gate);
4358a3748   Daniel Vetter   drm/i915: impleme...
5678
5679
5680
5681
  
  	/* WaDisableRenderCachePipelinedFlush */
  	I915_WRITE(CACHE_MODE_0,
  		   _MASKED_BIT_ENABLE(CM0_PIPELINED_RENDER_FLUSH_DISABLE));
de1aa629a   Ville Syrjälä   drm/i915: Disable...
5682

4e04632e8   Akash Goel   drm/i915/vlv:Impl...
5683
5684
  	/* WaDisable_RenderCache_OperationalFlush:g4x */
  	I915_WRITE(CACHE_MODE_0, _MASKED_BIT_DISABLE(RC_OP_FLUSH_ENABLE));
0e088b8f3   Ville Syrjälä   drm/i915: Refacto...
5685
  	g4x_disable_trickle_feed(dev);
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5686
  }
1fa611065   Eugeni Dodonov   drm/i915: add gen...
5687
  static void crestline_init_clock_gating(struct drm_device *dev)
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5688
5689
5690
5691
5692
5693
5694
5695
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  
  	I915_WRITE(RENCLK_GATE_D1, I965_RCC_CLOCK_GATE_DISABLE);
  	I915_WRITE(RENCLK_GATE_D2, 0);
  	I915_WRITE(DSPCLK_GATE_D, 0);
  	I915_WRITE(RAMCLK_GATE_D, 0);
  	I915_WRITE16(DEUC, 0);
20f949670   Ville Syrjälä   drm/i915: Disable...
5696
5697
  	I915_WRITE(MI_ARB_STATE,
  		   _MASKED_BIT_ENABLE(MI_ARB_DISPLAY_TRICKLE_FEED_DISABLE));
4e04632e8   Akash Goel   drm/i915/vlv:Impl...
5698
5699
5700
  
  	/* WaDisable_RenderCache_OperationalFlush:gen4 */
  	I915_WRITE(CACHE_MODE_0, _MASKED_BIT_DISABLE(RC_OP_FLUSH_ENABLE));
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5701
  }
1fa611065   Eugeni Dodonov   drm/i915: add gen...
5702
  static void broadwater_init_clock_gating(struct drm_device *dev)
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5703
5704
5705
5706
5707
5708
5709
5710
5711
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  
  	I915_WRITE(RENCLK_GATE_D1, I965_RCZ_CLOCK_GATE_DISABLE |
  		   I965_RCC_CLOCK_GATE_DISABLE |
  		   I965_RCPB_CLOCK_GATE_DISABLE |
  		   I965_ISC_CLOCK_GATE_DISABLE |
  		   I965_FBC_CLOCK_GATE_DISABLE);
  	I915_WRITE(RENCLK_GATE_D2, 0);
20f949670   Ville Syrjälä   drm/i915: Disable...
5712
5713
  	I915_WRITE(MI_ARB_STATE,
  		   _MASKED_BIT_ENABLE(MI_ARB_DISPLAY_TRICKLE_FEED_DISABLE));
4e04632e8   Akash Goel   drm/i915/vlv:Impl...
5714
5715
5716
  
  	/* WaDisable_RenderCache_OperationalFlush:gen4 */
  	I915_WRITE(CACHE_MODE_0, _MASKED_BIT_DISABLE(RC_OP_FLUSH_ENABLE));
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5717
  }
1fa611065   Eugeni Dodonov   drm/i915: add gen...
5718
  static void gen3_init_clock_gating(struct drm_device *dev)
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5719
5720
5721
5722
5723
5724
5725
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  	u32 dstate = I915_READ(D_STATE);
  
  	dstate |= DSTATE_PLL_D3_OFF | DSTATE_GFX_CLOCK_GATING |
  		DSTATE_DOT_CLOCK_GATING;
  	I915_WRITE(D_STATE, dstate);
13a86b85a   Chris Wilson   drm/i915: CR cloc...
5726
5727
5728
  
  	if (IS_PINEVIEW(dev))
  		I915_WRITE(ECOSKPD, _MASKED_BIT_ENABLE(ECO_GATING_CX_ONLY));
974a3b0f9   Daniel Vetter   drm/i915: set the...
5729
5730
5731
  
  	/* IIR "flip pending" means done if this bit is set */
  	I915_WRITE(ECOSKPD, _MASKED_BIT_DISABLE(ECO_FLIP_DONE));
12fabbcb9   Ville Syrjälä   drm/i915: Set AGP...
5732
5733
  
  	/* interrupts should cause a wake up from C3 */
3299254ff   Ville Syrjälä   drm/i915: Flip th...
5734
  	I915_WRITE(INSTPM, _MASKED_BIT_ENABLE(INSTPM_AGPBUSY_INT_EN));
dbb42748a   Ville Syrjälä   drm/i915: Move th...
5735
5736
5737
  
  	/* On GEN3 we really need to make sure the ARB C3 LP bit is set */
  	I915_WRITE(MI_ARB_STATE, _MASKED_BIT_ENABLE(MI_ARB_C3_LP_WRITE_ENABLE));
1038392b4   Ville Syrjälä   drm/i915: Disable...
5738
5739
5740
  
  	I915_WRITE(MI_ARB_STATE,
  		   _MASKED_BIT_ENABLE(MI_ARB_DISPLAY_TRICKLE_FEED_DISABLE));
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5741
  }
1fa611065   Eugeni Dodonov   drm/i915: add gen...
5742
  static void i85x_init_clock_gating(struct drm_device *dev)
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5743
5744
5745
5746
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  
  	I915_WRITE(RENCLK_GATE_D1, SV_CLOCK_GATE_DISABLE);
54e472ae9   Ville Syrjälä   drm/i915: Enable ...
5747
5748
5749
5750
  
  	/* interrupts should cause a wake up from C3 */
  	I915_WRITE(MI_STATE, _MASKED_BIT_ENABLE(MI_AGPBUSY_INT_EN) |
  		   _MASKED_BIT_DISABLE(MI_AGPBUSY_830_MODE));
1038392b4   Ville Syrjälä   drm/i915: Disable...
5751
5752
5753
  
  	I915_WRITE(MEM_MODE,
  		   _MASKED_BIT_ENABLE(MEM_DISPLAY_TRICKLE_FEED_DISABLE));
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5754
  }
1fa611065   Eugeni Dodonov   drm/i915: add gen...
5755
  static void i830_init_clock_gating(struct drm_device *dev)
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5756
5757
5758
5759
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  
  	I915_WRITE(DSPCLK_GATE_D, OVRUNIT_CLOCK_GATE_DISABLE);
1038392b4   Ville Syrjälä   drm/i915: Disable...
5760
5761
5762
5763
  
  	I915_WRITE(MEM_MODE,
  		   _MASKED_BIT_ENABLE(MEM_DISPLAY_A_TRICKLE_FEED_DISABLE) |
  		   _MASKED_BIT_ENABLE(MEM_DISPLAY_B_TRICKLE_FEED_DISABLE));
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5764
  }
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5765
5766
5767
5768
5769
  void intel_init_clock_gating(struct drm_device *dev)
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
  
  	dev_priv->display.init_clock_gating(dev);
6f1d69b04   Eugeni Dodonov   drm/i915: move cl...
5770
  }
7d708ee40   Imre Deak   drm/i915: HSW: al...
5771
5772
5773
5774
5775
  void intel_suspend_hw(struct drm_device *dev)
  {
  	if (HAS_PCH_LPT(dev))
  		lpt_suspend_hw(dev);
  }
c1ca727f8   Imre Deak   drm/i915: support...
5776
5777
5778
5779
5780
5781
5782
5783
5784
5785
5786
5787
  #define for_each_power_well(i, power_well, domain_mask, power_domains)	\
  	for (i = 0;							\
  	     i < (power_domains)->power_well_count &&			\
  		 ((power_well) = &(power_domains)->power_wells[i]);	\
  	     i++)							\
  		if ((power_well)->domains & (domain_mask))
  
  #define for_each_power_well_rev(i, power_well, domain_mask, power_domains) \
  	for (i = (power_domains)->power_well_count - 1;			 \
  	     i >= 0 && ((power_well) = &(power_domains)->power_wells[i]);\
  	     i--)							 \
  		if ((power_well)->domains & (domain_mask))
15d199ea1   Paulo Zanoni   drm/i915: add int...
5788
5789
5790
5791
5792
  /**
   * We should only use the power well if we explicitly asked the hardware to
   * enable it, so check if it's enabled and also check if we've requested it to
   * be enabled.
   */
da7e29bd5   Imre Deak   drm/i915: use drm...
5793
  static bool hsw_power_well_enabled(struct drm_i915_private *dev_priv,
c1ca727f8   Imre Deak   drm/i915: support...
5794
5795
  				   struct i915_power_well *power_well)
  {
c1ca727f8   Imre Deak   drm/i915: support...
5796
5797
5798
  	return I915_READ(HSW_PWR_WELL_DRIVER) ==
  		     (HSW_PWR_WELL_ENABLE_REQUEST | HSW_PWR_WELL_STATE_ENABLED);
  }
bfafe93a1   Imre Deak   drm/i915: cache h...
5799
5800
  bool intel_display_power_enabled_unlocked(struct drm_i915_private *dev_priv,
  					  enum intel_display_power_domain domain)
ddf9c5362   Imre Deak   drm/i915: add int...
5801
  {
ddf9c5362   Imre Deak   drm/i915: add int...
5802
  	struct i915_power_domains *power_domains;
b8c000d9b   Imre Deak   drm/i915: fix dis...
5803
5804
5805
5806
5807
5808
  	struct i915_power_well *power_well;
  	bool is_enabled;
  	int i;
  
  	if (dev_priv->pm.suspended)
  		return false;
ddf9c5362   Imre Deak   drm/i915: add int...
5809
5810
  
  	power_domains = &dev_priv->power_domains;
bfafe93a1   Imre Deak   drm/i915: cache h...
5811

b8c000d9b   Imre Deak   drm/i915: fix dis...
5812
  	is_enabled = true;
bfafe93a1   Imre Deak   drm/i915: cache h...
5813

b8c000d9b   Imre Deak   drm/i915: fix dis...
5814
5815
5816
  	for_each_power_well_rev(i, power_well, BIT(domain), power_domains) {
  		if (power_well->always_on)
  			continue;
ddf9c5362   Imre Deak   drm/i915: add int...
5817

bfafe93a1   Imre Deak   drm/i915: cache h...
5818
  		if (!power_well->hw_enabled) {
b8c000d9b   Imre Deak   drm/i915: fix dis...
5819
5820
5821
5822
  			is_enabled = false;
  			break;
  		}
  	}
bfafe93a1   Imre Deak   drm/i915: cache h...
5823

b8c000d9b   Imre Deak   drm/i915: fix dis...
5824
  	return is_enabled;
ddf9c5362   Imre Deak   drm/i915: add int...
5825
  }
da7e29bd5   Imre Deak   drm/i915: use drm...
5826
  bool intel_display_power_enabled(struct drm_i915_private *dev_priv,
b97186f0d   Paulo Zanoni   drm/i915: add int...
5827
  				 enum intel_display_power_domain domain)
15d199ea1   Paulo Zanoni   drm/i915: add int...
5828
  {
c1ca727f8   Imre Deak   drm/i915: support...
5829
  	struct i915_power_domains *power_domains;
bfafe93a1   Imre Deak   drm/i915: cache h...
5830
  	bool ret;
882244a34   Paulo Zanoni   drm/i915: fix WAR...
5831

c1ca727f8   Imre Deak   drm/i915: support...
5832
  	power_domains = &dev_priv->power_domains;
c1ca727f8   Imre Deak   drm/i915: support...
5833
  	mutex_lock(&power_domains->lock);
bfafe93a1   Imre Deak   drm/i915: cache h...
5834
  	ret = intel_display_power_enabled_unlocked(dev_priv, domain);
c1ca727f8   Imre Deak   drm/i915: support...
5835
  	mutex_unlock(&power_domains->lock);
bfafe93a1   Imre Deak   drm/i915: cache h...
5836
  	return ret;
15d199ea1   Paulo Zanoni   drm/i915: add int...
5837
  }
93c73e8c6   Imre Deak   drm/i915: move hs...
5838
5839
5840
5841
5842
5843
  /*
   * Starting with Haswell, we have a "Power Down Well" that can be turned off
   * when not needed anymore. We have 4 registers that can request the power well
   * to be enabled, and it will only be disabled if none of the registers is
   * requesting it to be enabled.
   */
d5e8fdc8c   Paulo Zanoni   drm/i915: extract...
5844
5845
5846
  static void hsw_power_well_post_enable(struct drm_i915_private *dev_priv)
  {
  	struct drm_device *dev = dev_priv->dev;
d5e8fdc8c   Paulo Zanoni   drm/i915: extract...
5847

f9dcb0dfe   Paulo Zanoni   drm/i915: touch V...
5848
5849
5850
5851
5852
5853
5854
5855
5856
5857
5858
5859
5860
  	/*
  	 * After we re-enable the power well, if we touch VGA register 0x3d5
  	 * we'll get unclaimed register interrupts. This stops after we write
  	 * anything to the VGA MSR register. The vgacon module uses this
  	 * register all the time, so if we unbind our driver and, as a
  	 * consequence, bind vgacon, we'll get stuck in an infinite loop at
  	 * console_unlock(). So make here we touch the VGA MSR register, making
  	 * sure vgacon can keep working normally without triggering interrupts
  	 * and error messages.
  	 */
  	vga_get_uninterruptible(dev->pdev, VGA_RSRC_LEGACY_IO);
  	outb(inb(VGA_MSR_READ), VGA_MSR_WRITE);
  	vga_put(dev->pdev, VGA_RSRC_LEGACY_IO);
d49bdb0e1   Paulo Zanoni   drm/i915: extract...
5861
5862
  	if (IS_BROADWELL(dev))
  		gen8_irq_power_well_post_enable(dev_priv);
d5e8fdc8c   Paulo Zanoni   drm/i915: extract...
5863
  }
da7e29bd5   Imre Deak   drm/i915: use drm...
5864
  static void hsw_set_power_well(struct drm_i915_private *dev_priv,
c1ca727f8   Imre Deak   drm/i915: support...
5865
  			       struct i915_power_well *power_well, bool enable)
d0d3e5136   Eugeni Dodonov   drm/i915: enable ...
5866
  {
fa42e23c1   Paulo Zanoni   drm/i915: fix int...
5867
5868
  	bool is_enabled, enable_requested;
  	uint32_t tmp;
d0d3e5136   Eugeni Dodonov   drm/i915: enable ...
5869

fa42e23c1   Paulo Zanoni   drm/i915: fix int...
5870
  	tmp = I915_READ(HSW_PWR_WELL_DRIVER);
6aedd1f53   Paulo Zanoni   drm/i915: clarify...
5871
5872
  	is_enabled = tmp & HSW_PWR_WELL_STATE_ENABLED;
  	enable_requested = tmp & HSW_PWR_WELL_ENABLE_REQUEST;
d0d3e5136   Eugeni Dodonov   drm/i915: enable ...
5873

fa42e23c1   Paulo Zanoni   drm/i915: fix int...
5874
5875
  	if (enable) {
  		if (!enable_requested)
6aedd1f53   Paulo Zanoni   drm/i915: clarify...
5876
5877
  			I915_WRITE(HSW_PWR_WELL_DRIVER,
  				   HSW_PWR_WELL_ENABLE_REQUEST);
d0d3e5136   Eugeni Dodonov   drm/i915: enable ...
5878

fa42e23c1   Paulo Zanoni   drm/i915: fix int...
5879
5880
5881
5882
  		if (!is_enabled) {
  			DRM_DEBUG_KMS("Enabling power well
  ");
  			if (wait_for((I915_READ(HSW_PWR_WELL_DRIVER) &
6aedd1f53   Paulo Zanoni   drm/i915: clarify...
5883
  				      HSW_PWR_WELL_STATE_ENABLED), 20))
fa42e23c1   Paulo Zanoni   drm/i915: fix int...
5884
5885
5886
  				DRM_ERROR("Timeout enabling power well
  ");
  		}
596cc11e7   Ben Widawsky   drm/i915/bdw: PIP...
5887

d5e8fdc8c   Paulo Zanoni   drm/i915: extract...
5888
  		hsw_power_well_post_enable(dev_priv);
fa42e23c1   Paulo Zanoni   drm/i915: fix int...
5889
5890
5891
  	} else {
  		if (enable_requested) {
  			I915_WRITE(HSW_PWR_WELL_DRIVER, 0);
9dbd8febb   Paulo Zanoni   drm/i915: update ...
5892
  			POSTING_READ(HSW_PWR_WELL_DRIVER);
fa42e23c1   Paulo Zanoni   drm/i915: fix int...
5893
5894
  			DRM_DEBUG_KMS("Requesting to disable the power well
  ");
d0d3e5136   Eugeni Dodonov   drm/i915: enable ...
5895
5896
  		}
  	}
fa42e23c1   Paulo Zanoni   drm/i915: fix int...
5897
  }
d0d3e5136   Eugeni Dodonov   drm/i915: enable ...
5898

c6cb582e6   Imre Deak   drm/i915: split p...
5899
5900
5901
5902
5903
5904
5905
5906
5907
5908
5909
5910
5911
5912
5913
5914
  static void hsw_power_well_sync_hw(struct drm_i915_private *dev_priv,
  				   struct i915_power_well *power_well)
  {
  	hsw_set_power_well(dev_priv, power_well, power_well->count > 0);
  
  	/*
  	 * We're taking over the BIOS, so clear any requests made by it since
  	 * the driver is in charge now.
  	 */
  	if (I915_READ(HSW_PWR_WELL_BIOS) & HSW_PWR_WELL_ENABLE_REQUEST)
  		I915_WRITE(HSW_PWR_WELL_BIOS, 0);
  }
  
  static void hsw_power_well_enable(struct drm_i915_private *dev_priv,
  				  struct i915_power_well *power_well)
  {
c6cb582e6   Imre Deak   drm/i915: split p...
5915
5916
5917
5918
5919
5920
5921
  	hsw_set_power_well(dev_priv, power_well, true);
  }
  
  static void hsw_power_well_disable(struct drm_i915_private *dev_priv,
  				   struct i915_power_well *power_well)
  {
  	hsw_set_power_well(dev_priv, power_well, false);
c6cb582e6   Imre Deak   drm/i915: split p...
5922
  }
a45f4466e   Imre Deak   drm/i915: add noo...
5923
5924
5925
5926
5927
5928
5929
5930
5931
5932
  static void i9xx_always_on_power_well_noop(struct drm_i915_private *dev_priv,
  					   struct i915_power_well *power_well)
  {
  }
  
  static bool i9xx_always_on_power_well_enabled(struct drm_i915_private *dev_priv,
  					     struct i915_power_well *power_well)
  {
  	return true;
  }
d2011dc8d   Ville Syrjälä   drm/i915: Move VL...
5933
5934
  static void vlv_set_power_well(struct drm_i915_private *dev_priv,
  			       struct i915_power_well *power_well, bool enable)
77961eb98   Imre Deak   drm/i915: power d...
5935
  {
d2011dc8d   Ville Syrjälä   drm/i915: Move VL...
5936
  	enum punit_power_well power_well_id = power_well->data;
77961eb98   Imre Deak   drm/i915: power d...
5937
5938
5939
5940
5941
5942
5943
5944
5945
5946
5947
5948
5949
5950
5951
5952
5953
5954
5955
5956
5957
5958
5959
5960
5961
5962
5963
5964
5965
5966
5967
5968
5969
5970
5971
5972
5973
5974
5975
5976
5977
5978
5979
5980
5981
5982
5983
5984
5985
5986
5987
5988
5989
5990
5991
5992
5993
5994
5995
5996
5997
5998
5999
6000
6001
6002
6003
6004
6005
6006
6007
6008
6009
6010
6011
6012
6013
6014
6015
6016
6017
6018
6019
6020
6021
6022
6023
6024
6025
6026
6027
6028
6029
6030
6031
6032
6033
6034
6035
  	u32 mask;
  	u32 state;
  	u32 ctrl;
  
  	mask = PUNIT_PWRGT_MASK(power_well_id);
  	state = enable ? PUNIT_PWRGT_PWR_ON(power_well_id) :
  			 PUNIT_PWRGT_PWR_GATE(power_well_id);
  
  	mutex_lock(&dev_priv->rps.hw_lock);
  
  #define COND \
  	((vlv_punit_read(dev_priv, PUNIT_REG_PWRGT_STATUS) & mask) == state)
  
  	if (COND)
  		goto out;
  
  	ctrl = vlv_punit_read(dev_priv, PUNIT_REG_PWRGT_CTRL);
  	ctrl &= ~mask;
  	ctrl |= state;
  	vlv_punit_write(dev_priv, PUNIT_REG_PWRGT_CTRL, ctrl);
  
  	if (wait_for(COND, 100))
  		DRM_ERROR("timout setting power well state %08x (%08x)
  ",
  			  state,
  			  vlv_punit_read(dev_priv, PUNIT_REG_PWRGT_CTRL));
  
  #undef COND
  
  out:
  	mutex_unlock(&dev_priv->rps.hw_lock);
  }
  
  static void vlv_power_well_sync_hw(struct drm_i915_private *dev_priv,
  				   struct i915_power_well *power_well)
  {
  	vlv_set_power_well(dev_priv, power_well, power_well->count > 0);
  }
  
  static void vlv_power_well_enable(struct drm_i915_private *dev_priv,
  				  struct i915_power_well *power_well)
  {
  	vlv_set_power_well(dev_priv, power_well, true);
  }
  
  static void vlv_power_well_disable(struct drm_i915_private *dev_priv,
  				   struct i915_power_well *power_well)
  {
  	vlv_set_power_well(dev_priv, power_well, false);
  }
  
  static bool vlv_power_well_enabled(struct drm_i915_private *dev_priv,
  				   struct i915_power_well *power_well)
  {
  	int power_well_id = power_well->data;
  	bool enabled = false;
  	u32 mask;
  	u32 state;
  	u32 ctrl;
  
  	mask = PUNIT_PWRGT_MASK(power_well_id);
  	ctrl = PUNIT_PWRGT_PWR_ON(power_well_id);
  
  	mutex_lock(&dev_priv->rps.hw_lock);
  
  	state = vlv_punit_read(dev_priv, PUNIT_REG_PWRGT_STATUS) & mask;
  	/*
  	 * We only ever set the power-on and power-gate states, anything
  	 * else is unexpected.
  	 */
  	WARN_ON(state != PUNIT_PWRGT_PWR_ON(power_well_id) &&
  		state != PUNIT_PWRGT_PWR_GATE(power_well_id));
  	if (state == ctrl)
  		enabled = true;
  
  	/*
  	 * A transient state at this point would mean some unexpected party
  	 * is poking at the power controls too.
  	 */
  	ctrl = vlv_punit_read(dev_priv, PUNIT_REG_PWRGT_CTRL) & mask;
  	WARN_ON(ctrl != state);
  
  	mutex_unlock(&dev_priv->rps.hw_lock);
  
  	return enabled;
  }
  
  static void vlv_display_power_well_enable(struct drm_i915_private *dev_priv,
  					  struct i915_power_well *power_well)
  {
  	WARN_ON_ONCE(power_well->data != PUNIT_POWER_WELL_DISP2D);
  
  	vlv_set_power_well(dev_priv, power_well, true);
  
  	spin_lock_irq(&dev_priv->irq_lock);
  	valleyview_enable_display_irqs(dev_priv);
  	spin_unlock_irq(&dev_priv->irq_lock);
  
  	/*
0d116a29a   Imre Deak   drm/i915: vlv: in...
6036
6037
  	 * During driver initialization/resume we can avoid restoring the
  	 * part of the HW/SW state that will be inited anyway explicitly.
77961eb98   Imre Deak   drm/i915: power d...
6038
  	 */
0d116a29a   Imre Deak   drm/i915: vlv: in...
6039
6040
6041
6042
  	if (dev_priv->power_domains.initializing)
  		return;
  
  	intel_hpd_init(dev_priv->dev);
77961eb98   Imre Deak   drm/i915: power d...
6043
6044
6045
6046
6047
6048
6049
  
  	i915_redisable_vga_power_on(dev_priv->dev);
  }
  
  static void vlv_display_power_well_disable(struct drm_i915_private *dev_priv,
  					   struct i915_power_well *power_well)
  {
77961eb98   Imre Deak   drm/i915: power d...
6050
6051
6052
  	WARN_ON_ONCE(power_well->data != PUNIT_POWER_WELL_DISP2D);
  
  	spin_lock_irq(&dev_priv->irq_lock);
77961eb98   Imre Deak   drm/i915: power d...
6053
6054
  	valleyview_disable_display_irqs(dev_priv);
  	spin_unlock_irq(&dev_priv->irq_lock);
77961eb98   Imre Deak   drm/i915: power d...
6055
  	vlv_set_power_well(dev_priv, power_well, false);
773538e86   Ville Syrjälä   drm/i915: Reset p...
6056
6057
  
  	vlv_power_sequencer_reset(dev_priv);
77961eb98   Imre Deak   drm/i915: power d...
6058
  }
aa519f231   Ville Syrjälä   drm/i915: Pull th...
6059
6060
6061
6062
6063
6064
6065
6066
6067
6068
6069
6070
6071
6072
6073
6074
6075
6076
6077
6078
6079
6080
6081
6082
6083
6084
6085
6086
6087
6088
6089
6090
6091
  static void vlv_dpio_cmn_power_well_enable(struct drm_i915_private *dev_priv,
  					   struct i915_power_well *power_well)
  {
  	WARN_ON_ONCE(power_well->data != PUNIT_POWER_WELL_DPIO_CMN_BC);
  
  	/*
  	 * Enable the CRI clock source so we can get at the
  	 * display and the reference clock for VGA
  	 * hotplug / manual detection.
  	 */
  	I915_WRITE(DPLL(PIPE_B), I915_READ(DPLL(PIPE_B)) |
  		   DPLL_REFA_CLK_ENABLE_VLV | DPLL_INTEGRATED_CRI_CLK_VLV);
  	udelay(1); /* >10ns for cmnreset, >0ns for sidereset */
  
  	vlv_set_power_well(dev_priv, power_well, true);
  
  	/*
  	 * From VLV2A0_DP_eDP_DPIO_driver_vbios_notes_10.docx -
  	 *  6.	De-assert cmn_reset/side_reset. Same as VLV X0.
  	 *   a.	GUnit 0x2110 bit[0] set to 1 (def 0)
  	 *   b.	The other bits such as sfr settings / modesel may all
  	 *	be set to 0.
  	 *
  	 * This should only be done on init and resume from S3 with
  	 * both PLLs disabled, or we risk losing DPIO and PLL
  	 * synchronization.
  	 */
  	I915_WRITE(DPIO_CTL, I915_READ(DPIO_CTL) | DPIO_CMNRST);
  }
  
  static void vlv_dpio_cmn_power_well_disable(struct drm_i915_private *dev_priv,
  					    struct i915_power_well *power_well)
  {
aa519f231   Ville Syrjälä   drm/i915: Pull th...
6092
6093
6094
  	enum pipe pipe;
  
  	WARN_ON_ONCE(power_well->data != PUNIT_POWER_WELL_DPIO_CMN_BC);
055e393fa   Damien Lespiau   drm/i915: Use dev...
6095
  	for_each_pipe(dev_priv, pipe)
aa519f231   Ville Syrjälä   drm/i915: Pull th...
6096
6097
6098
6099
6100
6101
6102
  		assert_pll_disabled(dev_priv, pipe);
  
  	/* Assert common reset */
  	I915_WRITE(DPIO_CTL, I915_READ(DPIO_CTL) & ~DPIO_CMNRST);
  
  	vlv_set_power_well(dev_priv, power_well, false);
  }
5d6f7ea75   Ville Syrjälä   drm/i915: Add chv...
6103
6104
6105
6106
6107
6108
6109
6110
6111
6112
6113
6114
6115
6116
6117
6118
6119
6120
6121
6122
6123
6124
6125
6126
6127
6128
6129
6130
6131
6132
6133
  static void chv_dpio_cmn_power_well_enable(struct drm_i915_private *dev_priv,
  					   struct i915_power_well *power_well)
  {
  	enum dpio_phy phy;
  
  	WARN_ON_ONCE(power_well->data != PUNIT_POWER_WELL_DPIO_CMN_BC &&
  		     power_well->data != PUNIT_POWER_WELL_DPIO_CMN_D);
  
  	/*
  	 * Enable the CRI clock source so we can get at the
  	 * display and the reference clock for VGA
  	 * hotplug / manual detection.
  	 */
  	if (power_well->data == PUNIT_POWER_WELL_DPIO_CMN_BC) {
  		phy = DPIO_PHY0;
  		I915_WRITE(DPLL(PIPE_B), I915_READ(DPLL(PIPE_B)) |
  			   DPLL_REFA_CLK_ENABLE_VLV);
  		I915_WRITE(DPLL(PIPE_B), I915_READ(DPLL(PIPE_B)) |
  			   DPLL_REFA_CLK_ENABLE_VLV | DPLL_INTEGRATED_CRI_CLK_VLV);
  	} else {
  		phy = DPIO_PHY1;
  		I915_WRITE(DPLL(PIPE_C), I915_READ(DPLL(PIPE_C)) |
  			   DPLL_REFA_CLK_ENABLE_VLV | DPLL_INTEGRATED_CRI_CLK_VLV);
  	}
  	udelay(1); /* >10ns for cmnreset, >0ns for sidereset */
  	vlv_set_power_well(dev_priv, power_well, true);
  
  	/* Poll for phypwrgood signal */
  	if (wait_for(I915_READ(DISPLAY_PHY_STATUS) & PHY_POWERGOOD(phy), 1))
  		DRM_ERROR("Display PHY %d is not power up
  ", phy);
efd814b73   Ville Syrjälä   drm/i915: Polish ...
6134
6135
  	I915_WRITE(DISPLAY_PHY_CONTROL, I915_READ(DISPLAY_PHY_CONTROL) |
  		   PHY_COM_LANE_RESET_DEASSERT(phy));
5d6f7ea75   Ville Syrjälä   drm/i915: Add chv...
6136
6137
6138
6139
6140
6141
6142
6143
6144
6145
6146
6147
6148
6149
6150
6151
6152
6153
  }
  
  static void chv_dpio_cmn_power_well_disable(struct drm_i915_private *dev_priv,
  					    struct i915_power_well *power_well)
  {
  	enum dpio_phy phy;
  
  	WARN_ON_ONCE(power_well->data != PUNIT_POWER_WELL_DPIO_CMN_BC &&
  		     power_well->data != PUNIT_POWER_WELL_DPIO_CMN_D);
  
  	if (power_well->data == PUNIT_POWER_WELL_DPIO_CMN_BC) {
  		phy = DPIO_PHY0;
  		assert_pll_disabled(dev_priv, PIPE_A);
  		assert_pll_disabled(dev_priv, PIPE_B);
  	} else {
  		phy = DPIO_PHY1;
  		assert_pll_disabled(dev_priv, PIPE_C);
  	}
efd814b73   Ville Syrjälä   drm/i915: Polish ...
6154
6155
  	I915_WRITE(DISPLAY_PHY_CONTROL, I915_READ(DISPLAY_PHY_CONTROL) &
  		   ~PHY_COM_LANE_RESET_DEASSERT(phy));
5d6f7ea75   Ville Syrjälä   drm/i915: Add chv...
6156
6157
6158
  
  	vlv_set_power_well(dev_priv, power_well, false);
  }
26972b0a8   Ville Syrjälä   drm/i915: Add per...
6159
6160
6161
6162
6163
6164
6165
6166
6167
6168
6169
6170
6171
6172
6173
6174
6175
6176
6177
6178
6179
6180
6181
6182
6183
6184
6185
6186
6187
6188
6189
6190
6191
6192
6193
6194
6195
6196
6197
6198
6199
6200
6201
6202
6203
6204
6205
6206
6207
6208
6209
6210
6211
6212
6213
6214
6215
6216
6217
6218
6219
6220
6221
6222
6223
6224
6225
6226
6227
6228
6229
6230
6231
6232
6233
6234
6235
6236
6237
6238
6239
6240
6241
6242
6243
6244
6245
6246
6247
  static bool chv_pipe_power_well_enabled(struct drm_i915_private *dev_priv,
  					struct i915_power_well *power_well)
  {
  	enum pipe pipe = power_well->data;
  	bool enabled;
  	u32 state, ctrl;
  
  	mutex_lock(&dev_priv->rps.hw_lock);
  
  	state = vlv_punit_read(dev_priv, PUNIT_REG_DSPFREQ) & DP_SSS_MASK(pipe);
  	/*
  	 * We only ever set the power-on and power-gate states, anything
  	 * else is unexpected.
  	 */
  	WARN_ON(state != DP_SSS_PWR_ON(pipe) && state != DP_SSS_PWR_GATE(pipe));
  	enabled = state == DP_SSS_PWR_ON(pipe);
  
  	/*
  	 * A transient state at this point would mean some unexpected party
  	 * is poking at the power controls too.
  	 */
  	ctrl = vlv_punit_read(dev_priv, PUNIT_REG_DSPFREQ) & DP_SSC_MASK(pipe);
  	WARN_ON(ctrl << 16 != state);
  
  	mutex_unlock(&dev_priv->rps.hw_lock);
  
  	return enabled;
  }
  
  static void chv_set_pipe_power_well(struct drm_i915_private *dev_priv,
  				    struct i915_power_well *power_well,
  				    bool enable)
  {
  	enum pipe pipe = power_well->data;
  	u32 state;
  	u32 ctrl;
  
  	state = enable ? DP_SSS_PWR_ON(pipe) : DP_SSS_PWR_GATE(pipe);
  
  	mutex_lock(&dev_priv->rps.hw_lock);
  
  #define COND \
  	((vlv_punit_read(dev_priv, PUNIT_REG_DSPFREQ) & DP_SSS_MASK(pipe)) == state)
  
  	if (COND)
  		goto out;
  
  	ctrl = vlv_punit_read(dev_priv, PUNIT_REG_DSPFREQ);
  	ctrl &= ~DP_SSC_MASK(pipe);
  	ctrl |= enable ? DP_SSC_PWR_ON(pipe) : DP_SSC_PWR_GATE(pipe);
  	vlv_punit_write(dev_priv, PUNIT_REG_DSPFREQ, ctrl);
  
  	if (wait_for(COND, 100))
  		DRM_ERROR("timout setting power well state %08x (%08x)
  ",
  			  state,
  			  vlv_punit_read(dev_priv, PUNIT_REG_DSPFREQ));
  
  #undef COND
  
  out:
  	mutex_unlock(&dev_priv->rps.hw_lock);
  }
  
  static void chv_pipe_power_well_sync_hw(struct drm_i915_private *dev_priv,
  					struct i915_power_well *power_well)
  {
  	chv_set_pipe_power_well(dev_priv, power_well, power_well->count > 0);
  }
  
  static void chv_pipe_power_well_enable(struct drm_i915_private *dev_priv,
  				       struct i915_power_well *power_well)
  {
  	WARN_ON_ONCE(power_well->data != PIPE_A &&
  		     power_well->data != PIPE_B &&
  		     power_well->data != PIPE_C);
  
  	chv_set_pipe_power_well(dev_priv, power_well, true);
  }
  
  static void chv_pipe_power_well_disable(struct drm_i915_private *dev_priv,
  					struct i915_power_well *power_well)
  {
  	WARN_ON_ONCE(power_well->data != PIPE_A &&
  		     power_well->data != PIPE_B &&
  		     power_well->data != PIPE_C);
  
  	chv_set_pipe_power_well(dev_priv, power_well, false);
  }
da7e29bd5   Imre Deak   drm/i915: use drm...
6248
  void intel_display_power_get(struct drm_i915_private *dev_priv,
6765625e0   Ville Syrjälä   drm/i915: Add int...
6249
6250
  			     enum intel_display_power_domain domain)
  {
83c00f553   Imre Deak   drm/i915: prepare...
6251
  	struct i915_power_domains *power_domains;
c1ca727f8   Imre Deak   drm/i915: support...
6252
6253
  	struct i915_power_well *power_well;
  	int i;
6765625e0   Ville Syrjälä   drm/i915: Add int...
6254

9e6ea71a5   Paulo Zanoni   drm/i915: get/put...
6255
  	intel_runtime_pm_get(dev_priv);
83c00f553   Imre Deak   drm/i915: prepare...
6256
6257
6258
  	power_domains = &dev_priv->power_domains;
  
  	mutex_lock(&power_domains->lock);
1da51581b   Imre Deak   drm/i915: add a d...
6259

25eaa003b   Imre Deak   drm/i915: sanity ...
6260
6261
6262
6263
  	for_each_power_well(i, power_well, BIT(domain), power_domains) {
  		if (!power_well->count++) {
  			DRM_DEBUG_KMS("enabling %s
  ", power_well->name);
c6cb582e6   Imre Deak   drm/i915: split p...
6264
  			power_well->ops->enable(dev_priv, power_well);
bfafe93a1   Imre Deak   drm/i915: cache h...
6265
  			power_well->hw_enabled = true;
25eaa003b   Imre Deak   drm/i915: sanity ...
6266
  		}
25eaa003b   Imre Deak   drm/i915: sanity ...
6267
  	}
1da51581b   Imre Deak   drm/i915: add a d...
6268

ddf9c5362   Imre Deak   drm/i915: add int...
6269
  	power_domains->domain_use_count[domain]++;
83c00f553   Imre Deak   drm/i915: prepare...
6270
  	mutex_unlock(&power_domains->lock);
6765625e0   Ville Syrjälä   drm/i915: Add int...
6271
  }
da7e29bd5   Imre Deak   drm/i915: use drm...
6272
  void intel_display_power_put(struct drm_i915_private *dev_priv,
6765625e0   Ville Syrjälä   drm/i915: Add int...
6273
6274
  			     enum intel_display_power_domain domain)
  {
83c00f553   Imre Deak   drm/i915: prepare...
6275
  	struct i915_power_domains *power_domains;
c1ca727f8   Imre Deak   drm/i915: support...
6276
6277
  	struct i915_power_well *power_well;
  	int i;
6765625e0   Ville Syrjälä   drm/i915: Add int...
6278

83c00f553   Imre Deak   drm/i915: prepare...
6279
6280
6281
  	power_domains = &dev_priv->power_domains;
  
  	mutex_lock(&power_domains->lock);
1da51581b   Imre Deak   drm/i915: add a d...
6282

1da51581b   Imre Deak   drm/i915: add a d...
6283
6284
  	WARN_ON(!power_domains->domain_use_count[domain]);
  	power_domains->domain_use_count[domain]--;
ddf9c5362   Imre Deak   drm/i915: add int...
6285

70bf407c8   Imre Deak   drm/i915: fold in...
6286
6287
  	for_each_power_well_rev(i, power_well, BIT(domain), power_domains) {
  		WARN_ON(!power_well->count);
25eaa003b   Imre Deak   drm/i915: sanity ...
6288
6289
6290
  		if (!--power_well->count && i915.disable_power_well) {
  			DRM_DEBUG_KMS("disabling %s
  ", power_well->name);
bfafe93a1   Imre Deak   drm/i915: cache h...
6291
  			power_well->hw_enabled = false;
c6cb582e6   Imre Deak   drm/i915: split p...
6292
  			power_well->ops->disable(dev_priv, power_well);
25eaa003b   Imre Deak   drm/i915: sanity ...
6293
  		}
70bf407c8   Imre Deak   drm/i915: fold in...
6294
  	}
1da51581b   Imre Deak   drm/i915: add a d...
6295

83c00f553   Imre Deak   drm/i915: prepare...
6296
  	mutex_unlock(&power_domains->lock);
9e6ea71a5   Paulo Zanoni   drm/i915: get/put...
6297
6298
  
  	intel_runtime_pm_put(dev_priv);
6765625e0   Ville Syrjälä   drm/i915: Add int...
6299
  }
83c00f553   Imre Deak   drm/i915: prepare...
6300
  static struct i915_power_domains *hsw_pwr;
a38911a3f   Wang Xingchao   i915/drm: Add pri...
6301
6302
  
  /* Display audio driver power well request */
74b0c2d75   Takashi Iwai   drm/i915, HD-audi...
6303
  int i915_request_power_well(void)
a38911a3f   Wang Xingchao   i915/drm: Add pri...
6304
  {
b4ed44844   Imre Deak   drm/i915: remove ...
6305
  	struct drm_i915_private *dev_priv;
74b0c2d75   Takashi Iwai   drm/i915, HD-audi...
6306
6307
  	if (!hsw_pwr)
  		return -ENODEV;
a38911a3f   Wang Xingchao   i915/drm: Add pri...
6308

b4ed44844   Imre Deak   drm/i915: remove ...
6309
6310
  	dev_priv = container_of(hsw_pwr, struct drm_i915_private,
  				power_domains);
da7e29bd5   Imre Deak   drm/i915: use drm...
6311
  	intel_display_power_get(dev_priv, POWER_DOMAIN_AUDIO);
74b0c2d75   Takashi Iwai   drm/i915, HD-audi...
6312
  	return 0;
a38911a3f   Wang Xingchao   i915/drm: Add pri...
6313
6314
6315
6316
  }
  EXPORT_SYMBOL_GPL(i915_request_power_well);
  
  /* Display audio driver power well release */
74b0c2d75   Takashi Iwai   drm/i915, HD-audi...
6317
  int i915_release_power_well(void)
a38911a3f   Wang Xingchao   i915/drm: Add pri...
6318
  {
b4ed44844   Imre Deak   drm/i915: remove ...
6319
  	struct drm_i915_private *dev_priv;
74b0c2d75   Takashi Iwai   drm/i915, HD-audi...
6320
6321
  	if (!hsw_pwr)
  		return -ENODEV;
a38911a3f   Wang Xingchao   i915/drm: Add pri...
6322

b4ed44844   Imre Deak   drm/i915: remove ...
6323
6324
  	dev_priv = container_of(hsw_pwr, struct drm_i915_private,
  				power_domains);
da7e29bd5   Imre Deak   drm/i915: use drm...
6325
  	intel_display_power_put(dev_priv, POWER_DOMAIN_AUDIO);
74b0c2d75   Takashi Iwai   drm/i915, HD-audi...
6326
  	return 0;
a38911a3f   Wang Xingchao   i915/drm: Add pri...
6327
6328
  }
  EXPORT_SYMBOL_GPL(i915_release_power_well);
c149dcb5c   Jani Nikula   drm/i915: provide...
6329
6330
6331
6332
6333
6334
6335
6336
6337
6338
6339
6340
6341
6342
6343
6344
6345
6346
6347
  /*
   * Private interface for the audio driver to get CDCLK in kHz.
   *
   * Caller must request power well using i915_request_power_well() prior to
   * making the call.
   */
  int i915_get_cdclk_freq(void)
  {
  	struct drm_i915_private *dev_priv;
  
  	if (!hsw_pwr)
  		return -ENODEV;
  
  	dev_priv = container_of(hsw_pwr, struct drm_i915_private,
  				power_domains);
  
  	return intel_ddi_get_cdclk_freq(dev_priv);
  }
  EXPORT_SYMBOL_GPL(i915_get_cdclk_freq);
efcad9174   Imre Deak   drm/i915: move po...
6348
6349
6350
6351
  #define POWER_DOMAIN_MASK (BIT(POWER_DOMAIN_NUM) - 1)
  
  #define HSW_ALWAYS_ON_POWER_DOMAINS (			\
  	BIT(POWER_DOMAIN_PIPE_A) |			\
f5938f363   Imre Deak   drm/i915: add ini...
6352
  	BIT(POWER_DOMAIN_TRANSCODER_EDP) |		\
319be8ae8   Imre Deak   drm/i915: add por...
6353
6354
6355
6356
6357
6358
6359
6360
6361
  	BIT(POWER_DOMAIN_PORT_DDI_A_2_LANES) |		\
  	BIT(POWER_DOMAIN_PORT_DDI_A_4_LANES) |		\
  	BIT(POWER_DOMAIN_PORT_DDI_B_2_LANES) |		\
  	BIT(POWER_DOMAIN_PORT_DDI_B_4_LANES) |		\
  	BIT(POWER_DOMAIN_PORT_DDI_C_2_LANES) |		\
  	BIT(POWER_DOMAIN_PORT_DDI_C_4_LANES) |		\
  	BIT(POWER_DOMAIN_PORT_DDI_D_2_LANES) |		\
  	BIT(POWER_DOMAIN_PORT_DDI_D_4_LANES) |		\
  	BIT(POWER_DOMAIN_PORT_CRT) |			\
bd2bb1b9a   Paulo Zanoni   drm/i915: add POW...
6362
  	BIT(POWER_DOMAIN_PLLS) |			\
f5938f363   Imre Deak   drm/i915: add ini...
6363
  	BIT(POWER_DOMAIN_INIT))
efcad9174   Imre Deak   drm/i915: move po...
6364
6365
6366
6367
6368
6369
6370
6371
6372
6373
  #define HSW_DISPLAY_POWER_DOMAINS (				\
  	(POWER_DOMAIN_MASK & ~HSW_ALWAYS_ON_POWER_DOMAINS) |	\
  	BIT(POWER_DOMAIN_INIT))
  
  #define BDW_ALWAYS_ON_POWER_DOMAINS (			\
  	HSW_ALWAYS_ON_POWER_DOMAINS |			\
  	BIT(POWER_DOMAIN_PIPE_A_PANEL_FITTER))
  #define BDW_DISPLAY_POWER_DOMAINS (				\
  	(POWER_DOMAIN_MASK & ~BDW_ALWAYS_ON_POWER_DOMAINS) |	\
  	BIT(POWER_DOMAIN_INIT))
77961eb98   Imre Deak   drm/i915: power d...
6374
6375
6376
6377
6378
6379
6380
6381
6382
6383
6384
6385
6386
6387
6388
6389
6390
6391
6392
6393
6394
6395
6396
6397
6398
6399
6400
6401
  #define VLV_ALWAYS_ON_POWER_DOMAINS	BIT(POWER_DOMAIN_INIT)
  #define VLV_DISPLAY_POWER_DOMAINS	POWER_DOMAIN_MASK
  
  #define VLV_DPIO_CMN_BC_POWER_DOMAINS (		\
  	BIT(POWER_DOMAIN_PORT_DDI_B_2_LANES) |	\
  	BIT(POWER_DOMAIN_PORT_DDI_B_4_LANES) |	\
  	BIT(POWER_DOMAIN_PORT_DDI_C_2_LANES) |	\
  	BIT(POWER_DOMAIN_PORT_DDI_C_4_LANES) |	\
  	BIT(POWER_DOMAIN_PORT_CRT) |		\
  	BIT(POWER_DOMAIN_INIT))
  
  #define VLV_DPIO_TX_B_LANES_01_POWER_DOMAINS (	\
  	BIT(POWER_DOMAIN_PORT_DDI_B_2_LANES) |	\
  	BIT(POWER_DOMAIN_PORT_DDI_B_4_LANES) |	\
  	BIT(POWER_DOMAIN_INIT))
  
  #define VLV_DPIO_TX_B_LANES_23_POWER_DOMAINS (	\
  	BIT(POWER_DOMAIN_PORT_DDI_B_4_LANES) |	\
  	BIT(POWER_DOMAIN_INIT))
  
  #define VLV_DPIO_TX_C_LANES_01_POWER_DOMAINS (	\
  	BIT(POWER_DOMAIN_PORT_DDI_C_2_LANES) |	\
  	BIT(POWER_DOMAIN_PORT_DDI_C_4_LANES) |	\
  	BIT(POWER_DOMAIN_INIT))
  
  #define VLV_DPIO_TX_C_LANES_23_POWER_DOMAINS (	\
  	BIT(POWER_DOMAIN_PORT_DDI_C_4_LANES) |	\
  	BIT(POWER_DOMAIN_INIT))
26972b0a8   Ville Syrjälä   drm/i915: Add per...
6402
6403
6404
6405
6406
6407
6408
6409
6410
6411
6412
  #define CHV_PIPE_A_POWER_DOMAINS (	\
  	BIT(POWER_DOMAIN_PIPE_A) |	\
  	BIT(POWER_DOMAIN_INIT))
  
  #define CHV_PIPE_B_POWER_DOMAINS (	\
  	BIT(POWER_DOMAIN_PIPE_B) |	\
  	BIT(POWER_DOMAIN_INIT))
  
  #define CHV_PIPE_C_POWER_DOMAINS (	\
  	BIT(POWER_DOMAIN_PIPE_C) |	\
  	BIT(POWER_DOMAIN_INIT))
5d6f7ea75   Ville Syrjälä   drm/i915: Add chv...
6413
6414
6415
6416
6417
6418
6419
6420
6421
6422
6423
  #define CHV_DPIO_CMN_BC_POWER_DOMAINS (		\
  	BIT(POWER_DOMAIN_PORT_DDI_B_2_LANES) |	\
  	BIT(POWER_DOMAIN_PORT_DDI_B_4_LANES) |	\
  	BIT(POWER_DOMAIN_PORT_DDI_C_2_LANES) |	\
  	BIT(POWER_DOMAIN_PORT_DDI_C_4_LANES) |	\
  	BIT(POWER_DOMAIN_INIT))
  
  #define CHV_DPIO_CMN_D_POWER_DOMAINS (		\
  	BIT(POWER_DOMAIN_PORT_DDI_D_2_LANES) |	\
  	BIT(POWER_DOMAIN_PORT_DDI_D_4_LANES) |	\
  	BIT(POWER_DOMAIN_INIT))
2ce147f36   Ville Syrjälä   drm/i915: Add chv...
6424
6425
6426
6427
6428
6429
6430
6431
  #define CHV_DPIO_TX_D_LANES_01_POWER_DOMAINS (	\
  	BIT(POWER_DOMAIN_PORT_DDI_D_2_LANES) |	\
  	BIT(POWER_DOMAIN_PORT_DDI_D_4_LANES) |	\
  	BIT(POWER_DOMAIN_INIT))
  
  #define CHV_DPIO_TX_D_LANES_23_POWER_DOMAINS (	\
  	BIT(POWER_DOMAIN_PORT_DDI_D_4_LANES) |	\
  	BIT(POWER_DOMAIN_INIT))
a45f4466e   Imre Deak   drm/i915: add noo...
6432
6433
6434
6435
6436
6437
  static const struct i915_power_well_ops i9xx_always_on_power_well_ops = {
  	.sync_hw = i9xx_always_on_power_well_noop,
  	.enable = i9xx_always_on_power_well_noop,
  	.disable = i9xx_always_on_power_well_noop,
  	.is_enabled = i9xx_always_on_power_well_enabled,
  };
c6cb582e6   Imre Deak   drm/i915: split p...
6438

26972b0a8   Ville Syrjälä   drm/i915: Add per...
6439
6440
6441
6442
6443
6444
  static const struct i915_power_well_ops chv_pipe_power_well_ops = {
  	.sync_hw = chv_pipe_power_well_sync_hw,
  	.enable = chv_pipe_power_well_enable,
  	.disable = chv_pipe_power_well_disable,
  	.is_enabled = chv_pipe_power_well_enabled,
  };
5d6f7ea75   Ville Syrjälä   drm/i915: Add chv...
6445
6446
6447
6448
6449
6450
  static const struct i915_power_well_ops chv_dpio_cmn_power_well_ops = {
  	.sync_hw = vlv_power_well_sync_hw,
  	.enable = chv_dpio_cmn_power_well_enable,
  	.disable = chv_dpio_cmn_power_well_disable,
  	.is_enabled = vlv_power_well_enabled,
  };
1c2256df2   Imre Deak   drm/i915: add a d...
6451
6452
6453
6454
6455
  static struct i915_power_well i9xx_always_on_power_well[] = {
  	{
  		.name = "always-on",
  		.always_on = 1,
  		.domains = POWER_DOMAIN_MASK,
c6cb582e6   Imre Deak   drm/i915: split p...
6456
  		.ops = &i9xx_always_on_power_well_ops,
1c2256df2   Imre Deak   drm/i915: add a d...
6457
6458
  	},
  };
c6cb582e6   Imre Deak   drm/i915: split p...
6459
6460
6461
6462
6463
6464
  static const struct i915_power_well_ops hsw_power_well_ops = {
  	.sync_hw = hsw_power_well_sync_hw,
  	.enable = hsw_power_well_enable,
  	.disable = hsw_power_well_disable,
  	.is_enabled = hsw_power_well_enabled,
  };
c1ca727f8   Imre Deak   drm/i915: support...
6465
6466
  static struct i915_power_well hsw_power_wells[] = {
  	{
6f3ef5dda   Imre Deak   drm/i915: add alw...
6467
6468
6469
  		.name = "always-on",
  		.always_on = 1,
  		.domains = HSW_ALWAYS_ON_POWER_DOMAINS,
c6cb582e6   Imre Deak   drm/i915: split p...
6470
  		.ops = &i9xx_always_on_power_well_ops,
6f3ef5dda   Imre Deak   drm/i915: add alw...
6471
6472
  	},
  	{
c1ca727f8   Imre Deak   drm/i915: support...
6473
  		.name = "display",
efcad9174   Imre Deak   drm/i915: move po...
6474
  		.domains = HSW_DISPLAY_POWER_DOMAINS,
c6cb582e6   Imre Deak   drm/i915: split p...
6475
  		.ops = &hsw_power_well_ops,
c1ca727f8   Imre Deak   drm/i915: support...
6476
6477
6478
6479
6480
  	},
  };
  
  static struct i915_power_well bdw_power_wells[] = {
  	{
6f3ef5dda   Imre Deak   drm/i915: add alw...
6481
6482
6483
  		.name = "always-on",
  		.always_on = 1,
  		.domains = BDW_ALWAYS_ON_POWER_DOMAINS,
c6cb582e6   Imre Deak   drm/i915: split p...
6484
  		.ops = &i9xx_always_on_power_well_ops,
6f3ef5dda   Imre Deak   drm/i915: add alw...
6485
6486
  	},
  	{
c1ca727f8   Imre Deak   drm/i915: support...
6487
  		.name = "display",
efcad9174   Imre Deak   drm/i915: move po...
6488
  		.domains = BDW_DISPLAY_POWER_DOMAINS,
c6cb582e6   Imre Deak   drm/i915: split p...
6489
  		.ops = &hsw_power_well_ops,
c1ca727f8   Imre Deak   drm/i915: support...
6490
6491
  	},
  };
77961eb98   Imre Deak   drm/i915: power d...
6492
6493
6494
6495
6496
6497
  static const struct i915_power_well_ops vlv_display_power_well_ops = {
  	.sync_hw = vlv_power_well_sync_hw,
  	.enable = vlv_display_power_well_enable,
  	.disable = vlv_display_power_well_disable,
  	.is_enabled = vlv_power_well_enabled,
  };
aa519f231   Ville Syrjälä   drm/i915: Pull th...
6498
6499
6500
6501
6502
6503
  static const struct i915_power_well_ops vlv_dpio_cmn_power_well_ops = {
  	.sync_hw = vlv_power_well_sync_hw,
  	.enable = vlv_dpio_cmn_power_well_enable,
  	.disable = vlv_dpio_cmn_power_well_disable,
  	.is_enabled = vlv_power_well_enabled,
  };
77961eb98   Imre Deak   drm/i915: power d...
6504
6505
6506
6507
6508
6509
6510
6511
6512
6513
6514
6515
6516
6517
6518
6519
6520
6521
6522
6523
6524
  static const struct i915_power_well_ops vlv_dpio_power_well_ops = {
  	.sync_hw = vlv_power_well_sync_hw,
  	.enable = vlv_power_well_enable,
  	.disable = vlv_power_well_disable,
  	.is_enabled = vlv_power_well_enabled,
  };
  
  static struct i915_power_well vlv_power_wells[] = {
  	{
  		.name = "always-on",
  		.always_on = 1,
  		.domains = VLV_ALWAYS_ON_POWER_DOMAINS,
  		.ops = &i9xx_always_on_power_well_ops,
  	},
  	{
  		.name = "display",
  		.domains = VLV_DISPLAY_POWER_DOMAINS,
  		.data = PUNIT_POWER_WELL_DISP2D,
  		.ops = &vlv_display_power_well_ops,
  	},
  	{
77961eb98   Imre Deak   drm/i915: power d...
6525
6526
6527
6528
6529
6530
6531
6532
6533
6534
6535
6536
6537
6538
6539
6540
6541
6542
6543
6544
6545
6546
6547
6548
6549
6550
6551
6552
6553
6554
6555
6556
6557
6558
6559
  		.name = "dpio-tx-b-01",
  		.domains = VLV_DPIO_TX_B_LANES_01_POWER_DOMAINS |
  			   VLV_DPIO_TX_B_LANES_23_POWER_DOMAINS |
  			   VLV_DPIO_TX_C_LANES_01_POWER_DOMAINS |
  			   VLV_DPIO_TX_C_LANES_23_POWER_DOMAINS,
  		.ops = &vlv_dpio_power_well_ops,
  		.data = PUNIT_POWER_WELL_DPIO_TX_B_LANES_01,
  	},
  	{
  		.name = "dpio-tx-b-23",
  		.domains = VLV_DPIO_TX_B_LANES_01_POWER_DOMAINS |
  			   VLV_DPIO_TX_B_LANES_23_POWER_DOMAINS |
  			   VLV_DPIO_TX_C_LANES_01_POWER_DOMAINS |
  			   VLV_DPIO_TX_C_LANES_23_POWER_DOMAINS,
  		.ops = &vlv_dpio_power_well_ops,
  		.data = PUNIT_POWER_WELL_DPIO_TX_B_LANES_23,
  	},
  	{
  		.name = "dpio-tx-c-01",
  		.domains = VLV_DPIO_TX_B_LANES_01_POWER_DOMAINS |
  			   VLV_DPIO_TX_B_LANES_23_POWER_DOMAINS |
  			   VLV_DPIO_TX_C_LANES_01_POWER_DOMAINS |
  			   VLV_DPIO_TX_C_LANES_23_POWER_DOMAINS,
  		.ops = &vlv_dpio_power_well_ops,
  		.data = PUNIT_POWER_WELL_DPIO_TX_C_LANES_01,
  	},
  	{
  		.name = "dpio-tx-c-23",
  		.domains = VLV_DPIO_TX_B_LANES_01_POWER_DOMAINS |
  			   VLV_DPIO_TX_B_LANES_23_POWER_DOMAINS |
  			   VLV_DPIO_TX_C_LANES_01_POWER_DOMAINS |
  			   VLV_DPIO_TX_C_LANES_23_POWER_DOMAINS,
  		.ops = &vlv_dpio_power_well_ops,
  		.data = PUNIT_POWER_WELL_DPIO_TX_C_LANES_23,
  	},
f099a3c60   Jesse Barnes   drm/i915/vlv: re-...
6560
6561
6562
6563
  	{
  		.name = "dpio-common",
  		.domains = VLV_DPIO_CMN_BC_POWER_DOMAINS,
  		.data = PUNIT_POWER_WELL_DPIO_CMN_BC,
aa519f231   Ville Syrjälä   drm/i915: Pull th...
6564
  		.ops = &vlv_dpio_cmn_power_well_ops,
f099a3c60   Jesse Barnes   drm/i915/vlv: re-...
6565
  	},
77961eb98   Imre Deak   drm/i915: power d...
6566
  };
4811ff4f2   Ville Syrjälä   drm/i915: Add chv...
6567
6568
6569
6570
6571
6572
6573
  static struct i915_power_well chv_power_wells[] = {
  	{
  		.name = "always-on",
  		.always_on = 1,
  		.domains = VLV_ALWAYS_ON_POWER_DOMAINS,
  		.ops = &i9xx_always_on_power_well_ops,
  	},
f07057d13   Ville Syrjälä   drm/i915: Add dis...
6574
6575
6576
6577
6578
6579
6580
  #if 0
  	{
  		.name = "display",
  		.domains = VLV_DISPLAY_POWER_DOMAINS,
  		.data = PUNIT_POWER_WELL_DISP2D,
  		.ops = &vlv_display_power_well_ops,
  	},
26972b0a8   Ville Syrjälä   drm/i915: Add per...
6581
6582
6583
6584
6585
6586
6587
6588
6589
6590
6591
6592
6593
6594
6595
6596
6597
6598
  	{
  		.name = "pipe-a",
  		.domains = CHV_PIPE_A_POWER_DOMAINS,
  		.data = PIPE_A,
  		.ops = &chv_pipe_power_well_ops,
  	},
  	{
  		.name = "pipe-b",
  		.domains = CHV_PIPE_B_POWER_DOMAINS,
  		.data = PIPE_B,
  		.ops = &chv_pipe_power_well_ops,
  	},
  	{
  		.name = "pipe-c",
  		.domains = CHV_PIPE_C_POWER_DOMAINS,
  		.data = PIPE_C,
  		.ops = &chv_pipe_power_well_ops,
  	},
f07057d13   Ville Syrjälä   drm/i915: Add dis...
6599
  #endif
5d6f7ea75   Ville Syrjälä   drm/i915: Add chv...
6600
6601
  	{
  		.name = "dpio-common-bc",
3dd7b9745   Ville Syrjälä   drm/i915: Hack to...
6602
6603
6604
6605
6606
6607
  		/*
  		 * XXX: cmnreset for one PHY seems to disturb the other.
  		 * As a workaround keep both powered on at the same
  		 * time for now.
  		 */
  		.domains = CHV_DPIO_CMN_BC_POWER_DOMAINS | CHV_DPIO_CMN_D_POWER_DOMAINS,
5d6f7ea75   Ville Syrjälä   drm/i915: Add chv...
6608
6609
6610
6611
6612
  		.data = PUNIT_POWER_WELL_DPIO_CMN_BC,
  		.ops = &chv_dpio_cmn_power_well_ops,
  	},
  	{
  		.name = "dpio-common-d",
3dd7b9745   Ville Syrjälä   drm/i915: Hack to...
6613
6614
6615
6616
6617
6618
  		/*
  		 * XXX: cmnreset for one PHY seems to disturb the other.
  		 * As a workaround keep both powered on at the same
  		 * time for now.
  		 */
  		.domains = CHV_DPIO_CMN_BC_POWER_DOMAINS | CHV_DPIO_CMN_D_POWER_DOMAINS,
5d6f7ea75   Ville Syrjälä   drm/i915: Add chv...
6619
6620
6621
  		.data = PUNIT_POWER_WELL_DPIO_CMN_D,
  		.ops = &chv_dpio_cmn_power_well_ops,
  	},
825835653   Ville Syrjälä   drm/i915: Add chv...
6622
6623
6624
6625
6626
6627
6628
6629
6630
6631
6632
6633
6634
6635
6636
6637
6638
6639
6640
6641
6642
6643
6644
6645
6646
6647
6648
6649
6650
  #if 0
  	{
  		.name = "dpio-tx-b-01",
  		.domains = VLV_DPIO_TX_B_LANES_01_POWER_DOMAINS |
  			   VLV_DPIO_TX_B_LANES_23_POWER_DOMAINS,
  		.ops = &vlv_dpio_power_well_ops,
  		.data = PUNIT_POWER_WELL_DPIO_TX_B_LANES_01,
  	},
  	{
  		.name = "dpio-tx-b-23",
  		.domains = VLV_DPIO_TX_B_LANES_01_POWER_DOMAINS |
  			   VLV_DPIO_TX_B_LANES_23_POWER_DOMAINS,
  		.ops = &vlv_dpio_power_well_ops,
  		.data = PUNIT_POWER_WELL_DPIO_TX_B_LANES_23,
  	},
  	{
  		.name = "dpio-tx-c-01",
  		.domains = VLV_DPIO_TX_C_LANES_01_POWER_DOMAINS |
  			   VLV_DPIO_TX_C_LANES_23_POWER_DOMAINS,
  		.ops = &vlv_dpio_power_well_ops,
  		.data = PUNIT_POWER_WELL_DPIO_TX_C_LANES_01,
  	},
  	{
  		.name = "dpio-tx-c-23",
  		.domains = VLV_DPIO_TX_C_LANES_01_POWER_DOMAINS |
  			   VLV_DPIO_TX_C_LANES_23_POWER_DOMAINS,
  		.ops = &vlv_dpio_power_well_ops,
  		.data = PUNIT_POWER_WELL_DPIO_TX_C_LANES_23,
  	},
2ce147f36   Ville Syrjälä   drm/i915: Add chv...
6651
6652
6653
6654
6655
6656
6657
6658
6659
6660
6661
6662
6663
6664
  	{
  		.name = "dpio-tx-d-01",
  		.domains = CHV_DPIO_TX_D_LANES_01_POWER_DOMAINS |
  			   CHV_DPIO_TX_D_LANES_23_POWER_DOMAINS,
  		.ops = &vlv_dpio_power_well_ops,
  		.data = PUNIT_POWER_WELL_DPIO_TX_D_LANES_01,
  	},
  	{
  		.name = "dpio-tx-d-23",
  		.domains = CHV_DPIO_TX_D_LANES_01_POWER_DOMAINS |
  			   CHV_DPIO_TX_D_LANES_23_POWER_DOMAINS,
  		.ops = &vlv_dpio_power_well_ops,
  		.data = PUNIT_POWER_WELL_DPIO_TX_D_LANES_23,
  	},
825835653   Ville Syrjälä   drm/i915: Add chv...
6665
  #endif
4811ff4f2   Ville Syrjälä   drm/i915: Add chv...
6666
  };
d2011dc8d   Ville Syrjälä   drm/i915: Move VL...
6667
6668
6669
6670
6671
6672
6673
6674
6675
6676
6677
6678
6679
6680
  static struct i915_power_well *lookup_power_well(struct drm_i915_private *dev_priv,
  						 enum punit_power_well power_well_id)
  {
  	struct i915_power_domains *power_domains = &dev_priv->power_domains;
  	struct i915_power_well *power_well;
  	int i;
  
  	for_each_power_well(i, power_well, POWER_DOMAIN_MASK, power_domains) {
  		if (power_well->data == power_well_id)
  			return power_well;
  	}
  
  	return NULL;
  }
c1ca727f8   Imre Deak   drm/i915: support...
6681
6682
6683
6684
  #define set_power_wells(power_domains, __power_wells) ({		\
  	(power_domains)->power_wells = (__power_wells);			\
  	(power_domains)->power_well_count = ARRAY_SIZE(__power_wells);	\
  })
da7e29bd5   Imre Deak   drm/i915: use drm...
6685
  int intel_power_domains_init(struct drm_i915_private *dev_priv)
a38911a3f   Wang Xingchao   i915/drm: Add pri...
6686
  {
83c00f553   Imre Deak   drm/i915: prepare...
6687
  	struct i915_power_domains *power_domains = &dev_priv->power_domains;
c1ca727f8   Imre Deak   drm/i915: support...
6688

83c00f553   Imre Deak   drm/i915: prepare...
6689
  	mutex_init(&power_domains->lock);
a38911a3f   Wang Xingchao   i915/drm: Add pri...
6690

c1ca727f8   Imre Deak   drm/i915: support...
6691
6692
6693
6694
  	/*
  	 * The enabling order will be from lower to higher indexed wells,
  	 * the disabling order is reversed.
  	 */
da7e29bd5   Imre Deak   drm/i915: use drm...
6695
  	if (IS_HASWELL(dev_priv->dev)) {
c1ca727f8   Imre Deak   drm/i915: support...
6696
6697
  		set_power_wells(power_domains, hsw_power_wells);
  		hsw_pwr = power_domains;
da7e29bd5   Imre Deak   drm/i915: use drm...
6698
  	} else if (IS_BROADWELL(dev_priv->dev)) {
c1ca727f8   Imre Deak   drm/i915: support...
6699
6700
  		set_power_wells(power_domains, bdw_power_wells);
  		hsw_pwr = power_domains;
4811ff4f2   Ville Syrjälä   drm/i915: Add chv...
6701
6702
  	} else if (IS_CHERRYVIEW(dev_priv->dev)) {
  		set_power_wells(power_domains, chv_power_wells);
77961eb98   Imre Deak   drm/i915: power d...
6703
6704
  	} else if (IS_VALLEYVIEW(dev_priv->dev)) {
  		set_power_wells(power_domains, vlv_power_wells);
c1ca727f8   Imre Deak   drm/i915: support...
6705
  	} else {
1c2256df2   Imre Deak   drm/i915: add a d...
6706
  		set_power_wells(power_domains, i9xx_always_on_power_well);
c1ca727f8   Imre Deak   drm/i915: support...
6707
  	}
a38911a3f   Wang Xingchao   i915/drm: Add pri...
6708
6709
6710
  
  	return 0;
  }
da7e29bd5   Imre Deak   drm/i915: use drm...
6711
  void intel_power_domains_remove(struct drm_i915_private *dev_priv)
a38911a3f   Wang Xingchao   i915/drm: Add pri...
6712
6713
6714
  {
  	hsw_pwr = NULL;
  }
da7e29bd5   Imre Deak   drm/i915: use drm...
6715
  static void intel_power_domains_resume(struct drm_i915_private *dev_priv)
9cdb826c1   Ville Syrjälä   drm/i915: Change ...
6716
  {
83c00f553   Imre Deak   drm/i915: prepare...
6717
6718
  	struct i915_power_domains *power_domains = &dev_priv->power_domains;
  	struct i915_power_well *power_well;
c1ca727f8   Imre Deak   drm/i915: support...
6719
  	int i;
9cdb826c1   Ville Syrjälä   drm/i915: Change ...
6720

83c00f553   Imre Deak   drm/i915: prepare...
6721
  	mutex_lock(&power_domains->lock);
bfafe93a1   Imre Deak   drm/i915: cache h...
6722
  	for_each_power_well(i, power_well, POWER_DOMAIN_MASK, power_domains) {
a45f4466e   Imre Deak   drm/i915: add noo...
6723
  		power_well->ops->sync_hw(dev_priv, power_well);
bfafe93a1   Imre Deak   drm/i915: cache h...
6724
6725
6726
  		power_well->hw_enabled = power_well->ops->is_enabled(dev_priv,
  								     power_well);
  	}
83c00f553   Imre Deak   drm/i915: prepare...
6727
  	mutex_unlock(&power_domains->lock);
a38911a3f   Wang Xingchao   i915/drm: Add pri...
6728
  }
d2011dc8d   Ville Syrjälä   drm/i915: Move VL...
6729
6730
6731
6732
6733
6734
6735
6736
6737
6738
6739
6740
6741
6742
6743
6744
6745
6746
6747
6748
6749
6750
6751
6752
6753
6754
6755
6756
6757
6758
6759
  static void vlv_cmnlane_wa(struct drm_i915_private *dev_priv)
  {
  	struct i915_power_well *cmn =
  		lookup_power_well(dev_priv, PUNIT_POWER_WELL_DPIO_CMN_BC);
  	struct i915_power_well *disp2d =
  		lookup_power_well(dev_priv, PUNIT_POWER_WELL_DISP2D);
  
  	/* nothing to do if common lane is already off */
  	if (!cmn->ops->is_enabled(dev_priv, cmn))
  		return;
  
  	/* If the display might be already active skip this */
  	if (disp2d->ops->is_enabled(dev_priv, disp2d) &&
  	    I915_READ(DPIO_CTL) & DPIO_CMNRST)
  		return;
  
  	DRM_DEBUG_KMS("toggling display PHY side reset
  ");
  
  	/* cmnlane needs DPLL registers */
  	disp2d->ops->enable(dev_priv, disp2d);
  
  	/*
  	 * From VLV2A0_DP_eDP_HDMI_DPIO_driver_vbios_notes_11.docx:
  	 * Need to assert and de-assert PHY SB reset by gating the
  	 * common lane power, then un-gating it.
  	 * Simply ungating isn't enough to reset the PHY enough to get
  	 * ports and lanes running.
  	 */
  	cmn->ops->disable(dev_priv, cmn);
  }
da7e29bd5   Imre Deak   drm/i915: use drm...
6760
  void intel_power_domains_init_hw(struct drm_i915_private *dev_priv)
d0d3e5136   Eugeni Dodonov   drm/i915: enable ...
6761
  {
d2011dc8d   Ville Syrjälä   drm/i915: Move VL...
6762
  	struct drm_device *dev = dev_priv->dev;
0d116a29a   Imre Deak   drm/i915: vlv: in...
6763
6764
6765
  	struct i915_power_domains *power_domains = &dev_priv->power_domains;
  
  	power_domains->initializing = true;
d2011dc8d   Ville Syrjälä   drm/i915: Move VL...
6766
6767
6768
6769
6770
6771
  
  	if (IS_VALLEYVIEW(dev) && !IS_CHERRYVIEW(dev)) {
  		mutex_lock(&power_domains->lock);
  		vlv_cmnlane_wa(dev_priv);
  		mutex_unlock(&power_domains->lock);
  	}
fa42e23c1   Paulo Zanoni   drm/i915: fix int...
6772
  	/* For now, we need the power well to be always enabled. */
da7e29bd5   Imre Deak   drm/i915: use drm...
6773
6774
  	intel_display_set_init_power(dev_priv, true);
  	intel_power_domains_resume(dev_priv);
0d116a29a   Imre Deak   drm/i915: vlv: in...
6775
  	power_domains->initializing = false;
d0d3e5136   Eugeni Dodonov   drm/i915: enable ...
6776
  }
c67a470b1   Paulo Zanoni   drm/i915: allow p...
6777
6778
  void intel_aux_display_runtime_get(struct drm_i915_private *dev_priv)
  {
d361ae269   Paulo Zanoni   drm/i915: make in...
6779
  	intel_runtime_pm_get(dev_priv);
c67a470b1   Paulo Zanoni   drm/i915: allow p...
6780
6781
6782
6783
  }
  
  void intel_aux_display_runtime_put(struct drm_i915_private *dev_priv)
  {
d361ae269   Paulo Zanoni   drm/i915: make in...
6784
  	intel_runtime_pm_put(dev_priv);
c67a470b1   Paulo Zanoni   drm/i915: allow p...
6785
  }
8a1874559   Paulo Zanoni   drm/i915: add ini...
6786
6787
6788
6789
6790
6791
6792
6793
6794
6795
6796
6797
  void intel_runtime_pm_get(struct drm_i915_private *dev_priv)
  {
  	struct drm_device *dev = dev_priv->dev;
  	struct device *device = &dev->pdev->dev;
  
  	if (!HAS_RUNTIME_PM(dev))
  		return;
  
  	pm_runtime_get_sync(device);
  	WARN(dev_priv->pm.suspended, "Device still suspended.
  ");
  }
c6df39b5e   Imre Deak   drm/i915: get a r...
6798
6799
6800
6801
6802
6803
6804
6805
6806
6807
6808
6809
  void intel_runtime_pm_get_noresume(struct drm_i915_private *dev_priv)
  {
  	struct drm_device *dev = dev_priv->dev;
  	struct device *device = &dev->pdev->dev;
  
  	if (!HAS_RUNTIME_PM(dev))
  		return;
  
  	WARN(dev_priv->pm.suspended, "Getting nosync-ref while suspended.
  ");
  	pm_runtime_get_noresume(device);
  }
8a1874559   Paulo Zanoni   drm/i915: add ini...
6810
6811
6812
6813
6814
6815
6816
6817
6818
6819
6820
6821
6822
6823
6824
6825
  void intel_runtime_pm_put(struct drm_i915_private *dev_priv)
  {
  	struct drm_device *dev = dev_priv->dev;
  	struct device *device = &dev->pdev->dev;
  
  	if (!HAS_RUNTIME_PM(dev))
  		return;
  
  	pm_runtime_mark_last_busy(device);
  	pm_runtime_put_autosuspend(device);
  }
  
  void intel_init_runtime_pm(struct drm_i915_private *dev_priv)
  {
  	struct drm_device *dev = dev_priv->dev;
  	struct device *device = &dev->pdev->dev;
8a1874559   Paulo Zanoni   drm/i915: add ini...
6826
6827
6828
6829
  	if (!HAS_RUNTIME_PM(dev))
  		return;
  
  	pm_runtime_set_active(device);
aeab0b5af   Imre Deak   drm/i915: disable...
6830
6831
6832
6833
6834
6835
6836
6837
6838
  	/*
  	 * RPM depends on RC6 to save restore the GT HW context, so make RC6 a
  	 * requirement.
  	 */
  	if (!intel_enable_rc6(dev)) {
  		DRM_INFO("RC6 disabled, disabling runtime PM support
  ");
  		return;
  	}
8a1874559   Paulo Zanoni   drm/i915: add ini...
6839
6840
6841
  	pm_runtime_set_autosuspend_delay(device, 10000); /* 10s */
  	pm_runtime_mark_last_busy(device);
  	pm_runtime_use_autosuspend(device);
ba0239e03   Paulo Zanoni   drm/i915: remove ...
6842
6843
  
  	pm_runtime_put_autosuspend(device);
8a1874559   Paulo Zanoni   drm/i915: add ini...
6844
6845
6846
6847
6848
6849
6850
6851
6852
  }
  
  void intel_fini_runtime_pm(struct drm_i915_private *dev_priv)
  {
  	struct drm_device *dev = dev_priv->dev;
  	struct device *device = &dev->pdev->dev;
  
  	if (!HAS_RUNTIME_PM(dev))
  		return;
aeab0b5af   Imre Deak   drm/i915: disable...
6853
6854
  	if (!intel_enable_rc6(dev))
  		return;
8a1874559   Paulo Zanoni   drm/i915: add ini...
6855
6856
6857
6858
  	/* Make sure we're not suspended first. */
  	pm_runtime_get_sync(device);
  	pm_runtime_disable(device);
  }
1fa611065   Eugeni Dodonov   drm/i915: add gen...
6859
6860
6861
6862
  /* Set up chip specific power management-related functions */
  void intel_init_pm(struct drm_device *dev)
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
3a77c4c44   Daniel Vetter   drm/i915: Drop I9...
6863
  	if (HAS_FBC(dev)) {
40045465a   Ville Syrjälä   drm/i915: Reorgan...
6864
  		if (INTEL_INFO(dev)->gen >= 7) {
1fa611065   Eugeni Dodonov   drm/i915: add gen...
6865
  			dev_priv->display.fbc_enabled = ironlake_fbc_enabled;
40045465a   Ville Syrjälä   drm/i915: Reorgan...
6866
6867
6868
6869
6870
  			dev_priv->display.enable_fbc = gen7_enable_fbc;
  			dev_priv->display.disable_fbc = ironlake_disable_fbc;
  		} else if (INTEL_INFO(dev)->gen >= 5) {
  			dev_priv->display.fbc_enabled = ironlake_fbc_enabled;
  			dev_priv->display.enable_fbc = ironlake_enable_fbc;
1fa611065   Eugeni Dodonov   drm/i915: add gen...
6871
6872
6873
6874
6875
  			dev_priv->display.disable_fbc = ironlake_disable_fbc;
  		} else if (IS_GM45(dev)) {
  			dev_priv->display.fbc_enabled = g4x_fbc_enabled;
  			dev_priv->display.enable_fbc = g4x_enable_fbc;
  			dev_priv->display.disable_fbc = g4x_disable_fbc;
40045465a   Ville Syrjälä   drm/i915: Reorgan...
6876
  		} else {
1fa611065   Eugeni Dodonov   drm/i915: add gen...
6877
6878
6879
  			dev_priv->display.fbc_enabled = i8xx_fbc_enabled;
  			dev_priv->display.enable_fbc = i8xx_enable_fbc;
  			dev_priv->display.disable_fbc = i8xx_disable_fbc;
993495ae9   Ville Syrjälä   drm/i915: Rework ...
6880
6881
6882
  
  			/* This value was pulled out of someone's hat */
  			I915_WRITE(FBC_CONTROL, 500 << FBC_CTL_INTERVAL_SHIFT);
1fa611065   Eugeni Dodonov   drm/i915: add gen...
6883
  		}
1fa611065   Eugeni Dodonov   drm/i915: add gen...
6884
  	}
c921aba84   Daniel Vetter   drm/i915: move pn...
6885
6886
6887
6888
6889
  	/* For cxsr */
  	if (IS_PINEVIEW(dev))
  		i915_pineview_get_mem_freq(dev);
  	else if (IS_GEN5(dev))
  		i915_ironlake_get_mem_freq(dev);
1fa611065   Eugeni Dodonov   drm/i915: add gen...
6890
6891
  	/* For FIFO watermark updates */
  	if (HAS_PCH_SPLIT(dev)) {
fa50ad614   Damien Lespiau   drm/i915: Rename ...
6892
  		ilk_setup_wm_latency(dev);
53615a5e1   Ville Syrjälä   drm/i915: Store t...
6893

bd6025447   Ville Syrjälä   drm/i915: Simplif...
6894
6895
6896
6897
6898
6899
6900
6901
6902
6903
6904
6905
6906
  		if ((IS_GEN5(dev) && dev_priv->wm.pri_latency[1] &&
  		     dev_priv->wm.spr_latency[1] && dev_priv->wm.cur_latency[1]) ||
  		    (!IS_GEN5(dev) && dev_priv->wm.pri_latency[0] &&
  		     dev_priv->wm.spr_latency[0] && dev_priv->wm.cur_latency[0])) {
  			dev_priv->display.update_wm = ilk_update_wm;
  			dev_priv->display.update_sprite_wm = ilk_update_sprite_wm;
  		} else {
  			DRM_DEBUG_KMS("Failed to read display plane latency. "
  				      "Disable CxSR
  ");
  		}
  
  		if (IS_GEN5(dev))
1fa611065   Eugeni Dodonov   drm/i915: add gen...
6907
  			dev_priv->display.init_clock_gating = ironlake_init_clock_gating;
bd6025447   Ville Syrjälä   drm/i915: Simplif...
6908
  		else if (IS_GEN6(dev))
1fa611065   Eugeni Dodonov   drm/i915: add gen...
6909
  			dev_priv->display.init_clock_gating = gen6_init_clock_gating;
bd6025447   Ville Syrjälä   drm/i915: Simplif...
6910
  		else if (IS_IVYBRIDGE(dev))
1fa611065   Eugeni Dodonov   drm/i915: add gen...
6911
  			dev_priv->display.init_clock_gating = ivybridge_init_clock_gating;
bd6025447   Ville Syrjälä   drm/i915: Simplif...
6912
  		else if (IS_HASWELL(dev))
cad2a2d77   Eugeni Dodonov   drm/i915: introdu...
6913
  			dev_priv->display.init_clock_gating = haswell_init_clock_gating;
bd6025447   Ville Syrjälä   drm/i915: Simplif...
6914
  		else if (INTEL_INFO(dev)->gen == 8)
47c2bd97c   Paulo Zanoni   drm/i915: rename ...
6915
  			dev_priv->display.init_clock_gating = broadwell_init_clock_gating;
a4565da8a   Ville Syrjälä   drm/i915/chv: Ini...
6916
  	} else if (IS_CHERRYVIEW(dev)) {
3c2777fd2   Ville Syrjälä   drm/i915: Add che...
6917
  		dev_priv->display.update_wm = cherryview_update_wm;
01e184cc8   Gajanan Bhat   drm/i915: Add spr...
6918
  		dev_priv->display.update_sprite_wm = valleyview_update_sprite_wm;
a4565da8a   Ville Syrjälä   drm/i915/chv: Ini...
6919
6920
  		dev_priv->display.init_clock_gating =
  			cherryview_init_clock_gating;
1fa611065   Eugeni Dodonov   drm/i915: add gen...
6921
6922
  	} else if (IS_VALLEYVIEW(dev)) {
  		dev_priv->display.update_wm = valleyview_update_wm;
01e184cc8   Gajanan Bhat   drm/i915: Add spr...
6923
  		dev_priv->display.update_sprite_wm = valleyview_update_sprite_wm;
1fa611065   Eugeni Dodonov   drm/i915: add gen...
6924
6925
  		dev_priv->display.init_clock_gating =
  			valleyview_init_clock_gating;
1fa611065   Eugeni Dodonov   drm/i915: add gen...
6926
6927
6928
6929
6930
6931
6932
6933
6934
6935
6936
6937
  	} else if (IS_PINEVIEW(dev)) {
  		if (!intel_get_cxsr_latency(IS_PINEVIEW_G(dev),
  					    dev_priv->is_ddr3,
  					    dev_priv->fsb_freq,
  					    dev_priv->mem_freq)) {
  			DRM_INFO("failed to find known CxSR latency "
  				 "(found ddr%s fsb freq %d, mem freq %d), "
  				 "disabling CxSR
  ",
  				 (dev_priv->is_ddr3 == 1) ? "3" : "2",
  				 dev_priv->fsb_freq, dev_priv->mem_freq);
  			/* Disable CxSR and never update its watermark again */
5209b1f4c   Imre Deak   drm/i915: gmch: f...
6938
  			intel_set_memory_cxsr(dev_priv, false);
1fa611065   Eugeni Dodonov   drm/i915: add gen...
6939
6940
6941
6942
6943
6944
6945
6946
6947
6948
6949
6950
6951
6952
6953
6954
6955
  			dev_priv->display.update_wm = NULL;
  		} else
  			dev_priv->display.update_wm = pineview_update_wm;
  		dev_priv->display.init_clock_gating = gen3_init_clock_gating;
  	} else if (IS_G4X(dev)) {
  		dev_priv->display.update_wm = g4x_update_wm;
  		dev_priv->display.init_clock_gating = g4x_init_clock_gating;
  	} else if (IS_GEN4(dev)) {
  		dev_priv->display.update_wm = i965_update_wm;
  		if (IS_CRESTLINE(dev))
  			dev_priv->display.init_clock_gating = crestline_init_clock_gating;
  		else if (IS_BROADWATER(dev))
  			dev_priv->display.init_clock_gating = broadwater_init_clock_gating;
  	} else if (IS_GEN3(dev)) {
  		dev_priv->display.update_wm = i9xx_update_wm;
  		dev_priv->display.get_fifo_size = i9xx_get_fifo_size;
  		dev_priv->display.init_clock_gating = gen3_init_clock_gating;
feb56b934   Daniel Vetter   drm/i915: i830M h...
6956
6957
6958
  	} else if (IS_GEN2(dev)) {
  		if (INTEL_INFO(dev)->num_pipes == 1) {
  			dev_priv->display.update_wm = i845_update_wm;
1fa611065   Eugeni Dodonov   drm/i915: add gen...
6959
  			dev_priv->display.get_fifo_size = i845_get_fifo_size;
feb56b934   Daniel Vetter   drm/i915: i830M h...
6960
6961
  		} else {
  			dev_priv->display.update_wm = i9xx_update_wm;
1fa611065   Eugeni Dodonov   drm/i915: add gen...
6962
  			dev_priv->display.get_fifo_size = i830_get_fifo_size;
feb56b934   Daniel Vetter   drm/i915: i830M h...
6963
6964
6965
6966
6967
6968
6969
6970
6971
  		}
  
  		if (IS_I85X(dev) || IS_I865G(dev))
  			dev_priv->display.init_clock_gating = i85x_init_clock_gating;
  		else
  			dev_priv->display.init_clock_gating = i830_init_clock_gating;
  	} else {
  		DRM_ERROR("unexpected fall-through in intel_init_pm
  ");
1fa611065   Eugeni Dodonov   drm/i915: add gen...
6972
6973
  	}
  }
42c0526c9   Ben Widawsky   drm/i915: Extract...
6974
6975
  int sandybridge_pcode_read(struct drm_i915_private *dev_priv, u8 mbox, u32 *val)
  {
4fc688ce7   Jesse Barnes   drm/i915: protect...
6976
  	WARN_ON(!mutex_is_locked(&dev_priv->rps.hw_lock));
42c0526c9   Ben Widawsky   drm/i915: Extract...
6977
6978
6979
6980
6981
6982
6983
6984
6985
6986
6987
6988
6989
6990
6991
6992
6993
6994
6995
6996
6997
6998
6999
7000
7001
  
  	if (I915_READ(GEN6_PCODE_MAILBOX) & GEN6_PCODE_READY) {
  		DRM_DEBUG_DRIVER("warning: pcode (read) mailbox access failed
  ");
  		return -EAGAIN;
  	}
  
  	I915_WRITE(GEN6_PCODE_DATA, *val);
  	I915_WRITE(GEN6_PCODE_MAILBOX, GEN6_PCODE_READY | mbox);
  
  	if (wait_for((I915_READ(GEN6_PCODE_MAILBOX) & GEN6_PCODE_READY) == 0,
  		     500)) {
  		DRM_ERROR("timeout waiting for pcode read (%d) to finish
  ", mbox);
  		return -ETIMEDOUT;
  	}
  
  	*val = I915_READ(GEN6_PCODE_DATA);
  	I915_WRITE(GEN6_PCODE_DATA, 0);
  
  	return 0;
  }
  
  int sandybridge_pcode_write(struct drm_i915_private *dev_priv, u8 mbox, u32 val)
  {
4fc688ce7   Jesse Barnes   drm/i915: protect...
7002
  	WARN_ON(!mutex_is_locked(&dev_priv->rps.hw_lock));
42c0526c9   Ben Widawsky   drm/i915: Extract...
7003
7004
7005
7006
7007
7008
7009
7010
7011
7012
7013
7014
7015
7016
7017
7018
7019
7020
7021
7022
7023
  
  	if (I915_READ(GEN6_PCODE_MAILBOX) & GEN6_PCODE_READY) {
  		DRM_DEBUG_DRIVER("warning: pcode (write) mailbox access failed
  ");
  		return -EAGAIN;
  	}
  
  	I915_WRITE(GEN6_PCODE_DATA, val);
  	I915_WRITE(GEN6_PCODE_MAILBOX, GEN6_PCODE_READY | mbox);
  
  	if (wait_for((I915_READ(GEN6_PCODE_MAILBOX) & GEN6_PCODE_READY) == 0,
  		     500)) {
  		DRM_ERROR("timeout waiting for pcode write (%d) to finish
  ", mbox);
  		return -ETIMEDOUT;
  	}
  
  	I915_WRITE(GEN6_PCODE_DATA, 0);
  
  	return 0;
  }
a0e4e199a   Jesse Barnes   drm/i915: add Pun...
7024

b55dd6472   Fengguang Wu   drm/i915: byt_gpu...
7025
  static int byt_gpu_freq(struct drm_i915_private *dev_priv, int val)
855ba3be1   Jesse Barnes   drm/i915: VLV GPU...
7026
  {
07ab118b3   Ville Syrjälä   drm/i915: Improve...
7027
  	int div;
855ba3be1   Jesse Barnes   drm/i915: VLV GPU...
7028

07ab118b3   Ville Syrjälä   drm/i915: Improve...
7029
  	/* 4 x czclk */
2ec3815f2   Ville Syrjälä   drm/i915: Pass de...
7030
  	switch (dev_priv->mem_freq) {
855ba3be1   Jesse Barnes   drm/i915: VLV GPU...
7031
  	case 800:
07ab118b3   Ville Syrjälä   drm/i915: Improve...
7032
  		div = 10;
855ba3be1   Jesse Barnes   drm/i915: VLV GPU...
7033
7034
  		break;
  	case 1066:
07ab118b3   Ville Syrjälä   drm/i915: Improve...
7035
  		div = 12;
855ba3be1   Jesse Barnes   drm/i915: VLV GPU...
7036
7037
  		break;
  	case 1333:
07ab118b3   Ville Syrjälä   drm/i915: Improve...
7038
  		div = 16;
855ba3be1   Jesse Barnes   drm/i915: VLV GPU...
7039
7040
7041
7042
  		break;
  	default:
  		return -1;
  	}
2ec3815f2   Ville Syrjälä   drm/i915: Pass de...
7043
  	return DIV_ROUND_CLOSEST(dev_priv->mem_freq * (val + 6 - 0xbd), 4 * div);
855ba3be1   Jesse Barnes   drm/i915: VLV GPU...
7044
  }
b55dd6472   Fengguang Wu   drm/i915: byt_gpu...
7045
  static int byt_freq_opcode(struct drm_i915_private *dev_priv, int val)
855ba3be1   Jesse Barnes   drm/i915: VLV GPU...
7046
  {
07ab118b3   Ville Syrjälä   drm/i915: Improve...
7047
  	int mul;
855ba3be1   Jesse Barnes   drm/i915: VLV GPU...
7048

07ab118b3   Ville Syrjälä   drm/i915: Improve...
7049
  	/* 4 x czclk */
2ec3815f2   Ville Syrjälä   drm/i915: Pass de...
7050
  	switch (dev_priv->mem_freq) {
855ba3be1   Jesse Barnes   drm/i915: VLV GPU...
7051
  	case 800:
07ab118b3   Ville Syrjälä   drm/i915: Improve...
7052
  		mul = 10;
855ba3be1   Jesse Barnes   drm/i915: VLV GPU...
7053
7054
  		break;
  	case 1066:
07ab118b3   Ville Syrjälä   drm/i915: Improve...
7055
  		mul = 12;
855ba3be1   Jesse Barnes   drm/i915: VLV GPU...
7056
7057
  		break;
  	case 1333:
07ab118b3   Ville Syrjälä   drm/i915: Improve...
7058
  		mul = 16;
855ba3be1   Jesse Barnes   drm/i915: VLV GPU...
7059
7060
7061
7062
  		break;
  	default:
  		return -1;
  	}
2ec3815f2   Ville Syrjälä   drm/i915: Pass de...
7063
  	return DIV_ROUND_CLOSEST(4 * mul * val, dev_priv->mem_freq) + 0xbd - 6;
855ba3be1   Jesse Barnes   drm/i915: VLV GPU...
7064
  }
b55dd6472   Fengguang Wu   drm/i915: byt_gpu...
7065
  static int chv_gpu_freq(struct drm_i915_private *dev_priv, int val)
22b1b2f86   Deepak S   drm/i915: CHV GPU...
7066
7067
7068
7069
7070
7071
7072
7073
7074
7075
7076
7077
7078
7079
7080
7081
7082
7083
7084
7085
7086
7087
7088
  {
  	int div, freq;
  
  	switch (dev_priv->rps.cz_freq) {
  	case 200:
  		div = 5;
  		break;
  	case 267:
  		div = 6;
  		break;
  	case 320:
  	case 333:
  	case 400:
  		div = 8;
  		break;
  	default:
  		return -1;
  	}
  
  	freq = (DIV_ROUND_CLOSEST((dev_priv->rps.cz_freq * val), 2 * div) / 2);
  
  	return freq;
  }
b55dd6472   Fengguang Wu   drm/i915: byt_gpu...
7089
  static int chv_freq_opcode(struct drm_i915_private *dev_priv, int val)
22b1b2f86   Deepak S   drm/i915: CHV GPU...
7090
7091
7092
7093
7094
7095
7096
7097
7098
7099
7100
7101
7102
7103
7104
7105
7106
7107
  {
  	int mul, opcode;
  
  	switch (dev_priv->rps.cz_freq) {
  	case 200:
  		mul = 5;
  		break;
  	case 267:
  		mul = 6;
  		break;
  	case 320:
  	case 333:
  	case 400:
  		mul = 8;
  		break;
  	default:
  		return -1;
  	}
1c14762d0   Ville Syrjälä   drm/i915: Warn ab...
7108
  	/* CHV needs even values */
22b1b2f86   Deepak S   drm/i915: CHV GPU...
7109
7110
7111
7112
7113
7114
7115
7116
7117
7118
7119
7120
7121
7122
7123
7124
7125
7126
7127
7128
7129
7130
7131
7132
7133
7134
7135
7136
  	opcode = (DIV_ROUND_CLOSEST((val * 2 * mul), dev_priv->rps.cz_freq) * 2);
  
  	return opcode;
  }
  
  int vlv_gpu_freq(struct drm_i915_private *dev_priv, int val)
  {
  	int ret = -1;
  
  	if (IS_CHERRYVIEW(dev_priv->dev))
  		ret = chv_gpu_freq(dev_priv, val);
  	else if (IS_VALLEYVIEW(dev_priv->dev))
  		ret = byt_gpu_freq(dev_priv, val);
  
  	return ret;
  }
  
  int vlv_freq_opcode(struct drm_i915_private *dev_priv, int val)
  {
  	int ret = -1;
  
  	if (IS_CHERRYVIEW(dev_priv->dev))
  		ret = chv_freq_opcode(dev_priv, val);
  	else if (IS_VALLEYVIEW(dev_priv->dev))
  		ret = byt_freq_opcode(dev_priv, val);
  
  	return ret;
  }
f742a5523   Daniel Vetter   drm/i915: fix pm ...
7137
  void intel_pm_setup(struct drm_device *dev)
907b28c56   Chris Wilson   drm/i915: Colocat...
7138
7139
  {
  	struct drm_i915_private *dev_priv = dev->dev_private;
f742a5523   Daniel Vetter   drm/i915: fix pm ...
7140
  	mutex_init(&dev_priv->rps.hw_lock);
907b28c56   Chris Wilson   drm/i915: Colocat...
7141
7142
  	INIT_DELAYED_WORK(&dev_priv->rps.delayed_resume_work,
  			  intel_gen6_powersave_work);
5d584b2ec   Paulo Zanoni   drm/i915: move pc...
7143

33688d95c   Paulo Zanoni   drm/i915: init pm...
7144
  	dev_priv->pm.suspended = false;
9df7575f1   Jesse Barnes   drm/i915: add hel...
7145
  	dev_priv->pm._irqs_disabled = false;
907b28c56   Chris Wilson   drm/i915: Colocat...
7146
  }