Commit bc9025bdc4e2b591734cca17697093845007b63d
Committed by
Dave Airlie
1 parent
c3ae90c099
Exists in
master
and in
7 other branches
Use drm_gem_object_[handle_]unreference_unlocked where possible
Mostly obvious simplifications. The i915 pread/pwrite ioctls, intel_overlay_put_image and nouveau_gem_new were incorrectly using the locked versions without locking: this is also fixed in this patch. Signed-off-by: Luca Barbieri <luca@luca-barbieri.com> Signed-off-by: Dave Airlie <airlied@redhat.com>
Showing 15 changed files with 47 additions and 121 deletions Side-by-side Diff
- drivers/gpu/drm/drm_gem.c
- drivers/gpu/drm/i915/i915_gem.c
- drivers/gpu/drm/i915/i915_gem_tiling.c
- drivers/gpu/drm/i915/intel_display.c
- drivers/gpu/drm/i915/intel_overlay.c
- drivers/gpu/drm/nouveau/nouveau_display.c
- drivers/gpu/drm/nouveau/nouveau_fbcon.c
- drivers/gpu/drm/nouveau/nouveau_gem.c
- drivers/gpu/drm/nouveau/nouveau_notifier.c
- drivers/gpu/drm/nouveau/nv04_crtc.c
- drivers/gpu/drm/nouveau/nv50_crtc.c
- drivers/gpu/drm/radeon/radeon_cs.c
- drivers/gpu/drm/radeon/radeon_cursor.c
- drivers/gpu/drm/radeon/radeon_display.c
- drivers/gpu/drm/radeon/radeon_gem.c
drivers/gpu/drm/drm_gem.c
... | ... | @@ -192,9 +192,7 @@ |
192 | 192 | idr_remove(&filp->object_idr, handle); |
193 | 193 | spin_unlock(&filp->table_lock); |
194 | 194 | |
195 | - mutex_lock(&dev->struct_mutex); | |
196 | - drm_gem_object_handle_unreference(obj); | |
197 | - mutex_unlock(&dev->struct_mutex); | |
195 | + drm_gem_object_handle_unreference_unlocked(obj); | |
198 | 196 | |
199 | 197 | return 0; |
200 | 198 | } |
... | ... | @@ -325,9 +323,7 @@ |
325 | 323 | } |
326 | 324 | |
327 | 325 | err: |
328 | - mutex_lock(&dev->struct_mutex); | |
329 | - drm_gem_object_unreference(obj); | |
330 | - mutex_unlock(&dev->struct_mutex); | |
326 | + drm_gem_object_unreference_unlocked(obj); | |
331 | 327 | return ret; |
332 | 328 | } |
333 | 329 | |
... | ... | @@ -358,9 +354,7 @@ |
358 | 354 | return -ENOENT; |
359 | 355 | |
360 | 356 | ret = drm_gem_handle_create(file_priv, obj, &handle); |
361 | - mutex_lock(&dev->struct_mutex); | |
362 | - drm_gem_object_unreference(obj); | |
363 | - mutex_unlock(&dev->struct_mutex); | |
357 | + drm_gem_object_unreference_unlocked(obj); | |
364 | 358 | if (ret) |
365 | 359 | return ret; |
366 | 360 | |
... | ... | @@ -390,7 +384,7 @@ |
390 | 384 | { |
391 | 385 | struct drm_gem_object *obj = ptr; |
392 | 386 | |
393 | - drm_gem_object_handle_unreference(obj); | |
387 | + drm_gem_object_handle_unreference_unlocked(obj); | |
394 | 388 | |
395 | 389 | return 0; |
396 | 390 | } |
397 | 391 | |
... | ... | @@ -403,12 +397,10 @@ |
403 | 397 | void |
404 | 398 | drm_gem_release(struct drm_device *dev, struct drm_file *file_private) |
405 | 399 | { |
406 | - mutex_lock(&dev->struct_mutex); | |
407 | 400 | idr_for_each(&file_private->object_idr, |
408 | 401 | &drm_gem_object_release_handle, NULL); |
409 | 402 | |
410 | 403 | idr_destroy(&file_private->object_idr); |
411 | - mutex_unlock(&dev->struct_mutex); | |
412 | 404 | } |
413 | 405 | |
414 | 406 | static void |
415 | 407 | |
... | ... | @@ -516,11 +508,8 @@ |
516 | 508 | void drm_gem_vm_close(struct vm_area_struct *vma) |
517 | 509 | { |
518 | 510 | struct drm_gem_object *obj = vma->vm_private_data; |
519 | - struct drm_device *dev = obj->dev; | |
520 | 511 | |
521 | - mutex_lock(&dev->struct_mutex); | |
522 | - drm_gem_object_unreference(obj); | |
523 | - mutex_unlock(&dev->struct_mutex); | |
512 | + drm_gem_object_unreference_unlocked(obj); | |
524 | 513 | } |
525 | 514 | EXPORT_SYMBOL(drm_gem_vm_close); |
526 | 515 |
drivers/gpu/drm/i915/i915_gem.c
... | ... | @@ -128,9 +128,7 @@ |
128 | 128 | return -ENOMEM; |
129 | 129 | |
130 | 130 | ret = drm_gem_handle_create(file_priv, obj, &handle); |
131 | - mutex_lock(&dev->struct_mutex); | |
132 | - drm_gem_object_handle_unreference(obj); | |
133 | - mutex_unlock(&dev->struct_mutex); | |
131 | + drm_gem_object_handle_unreference_unlocked(obj); | |
134 | 132 | |
135 | 133 | if (ret) |
136 | 134 | return ret; |
... | ... | @@ -488,7 +486,7 @@ |
488 | 486 | */ |
489 | 487 | if (args->offset > obj->size || args->size > obj->size || |
490 | 488 | args->offset + args->size > obj->size) { |
491 | - drm_gem_object_unreference(obj); | |
489 | + drm_gem_object_unreference_unlocked(obj); | |
492 | 490 | return -EINVAL; |
493 | 491 | } |
494 | 492 | |
... | ... | @@ -501,7 +499,7 @@ |
501 | 499 | file_priv); |
502 | 500 | } |
503 | 501 | |
504 | - drm_gem_object_unreference(obj); | |
502 | + drm_gem_object_unreference_unlocked(obj); | |
505 | 503 | |
506 | 504 | return ret; |
507 | 505 | } |
... | ... | @@ -961,7 +959,7 @@ |
961 | 959 | */ |
962 | 960 | if (args->offset > obj->size || args->size > obj->size || |
963 | 961 | args->offset + args->size > obj->size) { |
964 | - drm_gem_object_unreference(obj); | |
962 | + drm_gem_object_unreference_unlocked(obj); | |
965 | 963 | return -EINVAL; |
966 | 964 | } |
967 | 965 | |
... | ... | @@ -995,7 +993,7 @@ |
995 | 993 | DRM_INFO("pwrite failed %d\n", ret); |
996 | 994 | #endif |
997 | 995 | |
998 | - drm_gem_object_unreference(obj); | |
996 | + drm_gem_object_unreference_unlocked(obj); | |
999 | 997 | |
1000 | 998 | return ret; |
1001 | 999 | } |
... | ... | @@ -1138,9 +1136,7 @@ |
1138 | 1136 | PROT_READ | PROT_WRITE, MAP_SHARED, |
1139 | 1137 | args->offset); |
1140 | 1138 | up_write(¤t->mm->mmap_sem); |
1141 | - mutex_lock(&dev->struct_mutex); | |
1142 | - drm_gem_object_unreference(obj); | |
1143 | - mutex_unlock(&dev->struct_mutex); | |
1139 | + drm_gem_object_unreference_unlocked(obj); | |
1144 | 1140 | if (IS_ERR((void *)addr)) |
1145 | 1141 | return addr; |
1146 | 1142 |
drivers/gpu/drm/i915/i915_gem_tiling.c
... | ... | @@ -438,9 +438,7 @@ |
438 | 438 | obj_priv = obj->driver_private; |
439 | 439 | |
440 | 440 | if (!i915_tiling_ok(dev, args->stride, obj->size, args->tiling_mode)) { |
441 | - mutex_lock(&dev->struct_mutex); | |
442 | - drm_gem_object_unreference(obj); | |
443 | - mutex_unlock(&dev->struct_mutex); | |
441 | + drm_gem_object_unreference_unlocked(obj); | |
444 | 442 | return -EINVAL; |
445 | 443 | } |
446 | 444 |
drivers/gpu/drm/i915/intel_display.c
... | ... | @@ -3434,11 +3434,10 @@ |
3434 | 3434 | intel_crtc->cursor_bo = bo; |
3435 | 3435 | |
3436 | 3436 | return 0; |
3437 | -fail: | |
3438 | - mutex_lock(&dev->struct_mutex); | |
3439 | 3437 | fail_locked: |
3440 | - drm_gem_object_unreference(bo); | |
3441 | 3438 | mutex_unlock(&dev->struct_mutex); |
3439 | +fail: | |
3440 | + drm_gem_object_unreference_unlocked(bo); | |
3442 | 3441 | return ret; |
3443 | 3442 | } |
3444 | 3443 | |
... | ... | @@ -4351,9 +4350,7 @@ |
4351 | 4350 | intelfb_remove(dev, fb); |
4352 | 4351 | |
4353 | 4352 | drm_framebuffer_cleanup(fb); |
4354 | - mutex_lock(&dev->struct_mutex); | |
4355 | - drm_gem_object_unreference(intel_fb->obj); | |
4356 | - mutex_unlock(&dev->struct_mutex); | |
4353 | + drm_gem_object_unreference_unlocked(intel_fb->obj); | |
4357 | 4354 | |
4358 | 4355 | kfree(intel_fb); |
4359 | 4356 | } |
... | ... | @@ -4416,9 +4413,7 @@ |
4416 | 4413 | |
4417 | 4414 | ret = intel_framebuffer_create(dev, mode_cmd, &fb, obj); |
4418 | 4415 | if (ret) { |
4419 | - mutex_lock(&dev->struct_mutex); | |
4420 | - drm_gem_object_unreference(obj); | |
4421 | - mutex_unlock(&dev->struct_mutex); | |
4416 | + drm_gem_object_unreference_unlocked(obj); | |
4422 | 4417 | return NULL; |
4423 | 4418 | } |
4424 | 4419 |
drivers/gpu/drm/i915/intel_overlay.c
drivers/gpu/drm/nouveau/nouveau_display.c
... | ... | @@ -39,11 +39,8 @@ |
39 | 39 | if (drm_fb->fbdev) |
40 | 40 | nouveau_fbcon_remove(dev, drm_fb); |
41 | 41 | |
42 | - if (fb->nvbo) { | |
43 | - mutex_lock(&dev->struct_mutex); | |
44 | - drm_gem_object_unreference(fb->nvbo->gem); | |
45 | - mutex_unlock(&dev->struct_mutex); | |
46 | - } | |
42 | + if (fb->nvbo) | |
43 | + drm_gem_object_unreference_unlocked(fb->nvbo->gem); | |
47 | 44 | |
48 | 45 | drm_framebuffer_cleanup(drm_fb); |
49 | 46 | kfree(fb); |
drivers/gpu/drm/nouveau/nouveau_fbcon.c
... | ... | @@ -401,10 +401,8 @@ |
401 | 401 | |
402 | 402 | unregister_framebuffer(info); |
403 | 403 | nouveau_bo_unmap(nouveau_fb->nvbo); |
404 | - mutex_lock(&dev->struct_mutex); | |
405 | - drm_gem_object_unreference(nouveau_fb->nvbo->gem); | |
404 | + drm_gem_object_unreference_unlocked(nouveau_fb->nvbo->gem); | |
406 | 405 | nouveau_fb->nvbo = NULL; |
407 | - mutex_unlock(&dev->struct_mutex); | |
408 | 406 | if (par) |
409 | 407 | drm_fb_helper_free(&par->helper); |
410 | 408 | framebuffer_release(info); |
drivers/gpu/drm/nouveau/nouveau_gem.c
... | ... | @@ -167,12 +167,10 @@ |
167 | 167 | |
168 | 168 | ret = drm_gem_handle_create(file_priv, nvbo->gem, &req->info.handle); |
169 | 169 | out: |
170 | - mutex_lock(&dev->struct_mutex); | |
171 | - drm_gem_object_handle_unreference(nvbo->gem); | |
172 | - mutex_unlock(&dev->struct_mutex); | |
170 | + drm_gem_object_handle_unreference_unlocked(nvbo->gem); | |
173 | 171 | |
174 | 172 | if (ret) |
175 | - drm_gem_object_unreference(nvbo->gem); | |
173 | + drm_gem_object_unreference_unlocked(nvbo->gem); | |
176 | 174 | return ret; |
177 | 175 | } |
178 | 176 | |
... | ... | @@ -865,9 +863,7 @@ |
865 | 863 | req->domain = NOUVEAU_GEM_DOMAIN_VRAM; |
866 | 864 | |
867 | 865 | out: |
868 | - mutex_lock(&dev->struct_mutex); | |
869 | - drm_gem_object_unreference(gem); | |
870 | - mutex_unlock(&dev->struct_mutex); | |
866 | + drm_gem_object_unreference_unlocked(gem); | |
871 | 867 | |
872 | 868 | return ret; |
873 | 869 | } |
... | ... | @@ -891,9 +887,7 @@ |
891 | 887 | |
892 | 888 | ret = nouveau_bo_unpin(nouveau_gem_object(gem)); |
893 | 889 | |
894 | - mutex_lock(&dev->struct_mutex); | |
895 | - drm_gem_object_unreference(gem); | |
896 | - mutex_unlock(&dev->struct_mutex); | |
890 | + drm_gem_object_unreference_unlocked(gem); | |
897 | 891 | |
898 | 892 | return ret; |
899 | 893 | } |
... | ... | @@ -935,9 +929,7 @@ |
935 | 929 | } |
936 | 930 | |
937 | 931 | out: |
938 | - mutex_lock(&dev->struct_mutex); | |
939 | - drm_gem_object_unreference(gem); | |
940 | - mutex_unlock(&dev->struct_mutex); | |
932 | + drm_gem_object_unreference_unlocked(gem); | |
941 | 933 | return ret; |
942 | 934 | } |
943 | 935 | |
... | ... | @@ -965,9 +957,7 @@ |
965 | 957 | ret = 0; |
966 | 958 | |
967 | 959 | out: |
968 | - mutex_lock(&dev->struct_mutex); | |
969 | - drm_gem_object_unreference(gem); | |
970 | - mutex_unlock(&dev->struct_mutex); | |
960 | + drm_gem_object_unreference_unlocked(gem); | |
971 | 961 | return ret; |
972 | 962 | } |
973 | 963 | |
... | ... | @@ -986,9 +976,7 @@ |
986 | 976 | return -EINVAL; |
987 | 977 | |
988 | 978 | ret = nouveau_gem_info(gem, req); |
989 | - mutex_lock(&dev->struct_mutex); | |
990 | - drm_gem_object_unreference(gem); | |
991 | - mutex_unlock(&dev->struct_mutex); | |
979 | + drm_gem_object_unreference_unlocked(gem); | |
992 | 980 | return ret; |
993 | 981 | } |
drivers/gpu/drm/nouveau/nouveau_notifier.c
... | ... | @@ -61,11 +61,8 @@ |
61 | 61 | |
62 | 62 | chan->notifier_bo = ntfy; |
63 | 63 | out_err: |
64 | - if (ret) { | |
65 | - mutex_lock(&dev->struct_mutex); | |
66 | - drm_gem_object_unreference(ntfy->gem); | |
67 | - mutex_unlock(&dev->struct_mutex); | |
68 | - } | |
64 | + if (ret) | |
65 | + drm_gem_object_unreference_unlocked(ntfy->gem); | |
69 | 66 | |
70 | 67 | return ret; |
71 | 68 | } |
72 | 69 | |
... | ... | @@ -81,8 +78,8 @@ |
81 | 78 | nouveau_bo_unmap(chan->notifier_bo); |
82 | 79 | mutex_lock(&dev->struct_mutex); |
83 | 80 | nouveau_bo_unpin(chan->notifier_bo); |
84 | - drm_gem_object_unreference(chan->notifier_bo->gem); | |
85 | 81 | mutex_unlock(&dev->struct_mutex); |
82 | + drm_gem_object_unreference_unlocked(chan->notifier_bo->gem); | |
86 | 83 | nouveau_mem_takedown(&chan->notifier_heap); |
87 | 84 | } |
88 | 85 |
drivers/gpu/drm/nouveau/nv04_crtc.c
... | ... | @@ -926,9 +926,7 @@ |
926 | 926 | nv_crtc->cursor.set_offset(nv_crtc, nv_crtc->cursor.offset); |
927 | 927 | nv_crtc->cursor.show(nv_crtc, true); |
928 | 928 | out: |
929 | - mutex_lock(&dev->struct_mutex); | |
930 | - drm_gem_object_unreference(gem); | |
931 | - mutex_unlock(&dev->struct_mutex); | |
929 | + drm_gem_object_unreference_unlocked(gem); | |
932 | 930 | return ret; |
933 | 931 | } |
934 | 932 |
drivers/gpu/drm/nouveau/nv50_crtc.c
drivers/gpu/drm/radeon/radeon_cs.c
... | ... | @@ -196,11 +196,8 @@ |
196 | 196 | radeon_bo_list_unreserve(&parser->validated); |
197 | 197 | } |
198 | 198 | for (i = 0; i < parser->nrelocs; i++) { |
199 | - if (parser->relocs[i].gobj) { | |
200 | - mutex_lock(&parser->rdev->ddev->struct_mutex); | |
201 | - drm_gem_object_unreference(parser->relocs[i].gobj); | |
202 | - mutex_unlock(&parser->rdev->ddev->struct_mutex); | |
203 | - } | |
199 | + if (parser->relocs[i].gobj) | |
200 | + drm_gem_object_unreference_unlocked(parser->relocs[i].gobj); | |
204 | 201 | } |
205 | 202 | kfree(parser->track); |
206 | 203 | kfree(parser->relocs); |
drivers/gpu/drm/radeon/radeon_cursor.c
... | ... | @@ -169,17 +169,13 @@ |
169 | 169 | unpin: |
170 | 170 | if (radeon_crtc->cursor_bo) { |
171 | 171 | radeon_gem_object_unpin(radeon_crtc->cursor_bo); |
172 | - mutex_lock(&crtc->dev->struct_mutex); | |
173 | - drm_gem_object_unreference(radeon_crtc->cursor_bo); | |
174 | - mutex_unlock(&crtc->dev->struct_mutex); | |
172 | + drm_gem_object_unreference_unlocked(radeon_crtc->cursor_bo); | |
175 | 173 | } |
176 | 174 | |
177 | 175 | radeon_crtc->cursor_bo = obj; |
178 | 176 | return 0; |
179 | 177 | fail: |
180 | - mutex_lock(&crtc->dev->struct_mutex); | |
181 | - drm_gem_object_unreference(obj); | |
182 | - mutex_unlock(&crtc->dev->struct_mutex); | |
178 | + drm_gem_object_unreference_unlocked(obj); | |
183 | 179 | |
184 | 180 | return 0; |
185 | 181 | } |
drivers/gpu/drm/radeon/radeon_display.c
... | ... | @@ -679,11 +679,8 @@ |
679 | 679 | if (fb->fbdev) |
680 | 680 | radeonfb_remove(dev, fb); |
681 | 681 | |
682 | - if (radeon_fb->obj) { | |
683 | - mutex_lock(&dev->struct_mutex); | |
684 | - drm_gem_object_unreference(radeon_fb->obj); | |
685 | - mutex_unlock(&dev->struct_mutex); | |
686 | - } | |
682 | + if (radeon_fb->obj) | |
683 | + drm_gem_object_unreference_unlocked(radeon_fb->obj); | |
687 | 684 | drm_framebuffer_cleanup(fb); |
688 | 685 | kfree(radeon_fb); |
689 | 686 | } |
drivers/gpu/drm/radeon/radeon_gem.c
... | ... | @@ -69,9 +69,7 @@ |
69 | 69 | if (r != -ERESTARTSYS) |
70 | 70 | DRM_ERROR("Failed to allocate GEM object (%d, %d, %u, %d)\n", |
71 | 71 | size, initial_domain, alignment, r); |
72 | - mutex_lock(&rdev->ddev->struct_mutex); | |
73 | - drm_gem_object_unreference(gobj); | |
74 | - mutex_unlock(&rdev->ddev->struct_mutex); | |
72 | + drm_gem_object_unreference_unlocked(gobj); | |
75 | 73 | return r; |
76 | 74 | } |
77 | 75 | gobj->driver_private = robj; |
78 | 76 | |
... | ... | @@ -202,14 +200,10 @@ |
202 | 200 | } |
203 | 201 | r = drm_gem_handle_create(filp, gobj, &handle); |
204 | 202 | if (r) { |
205 | - mutex_lock(&dev->struct_mutex); | |
206 | - drm_gem_object_unreference(gobj); | |
207 | - mutex_unlock(&dev->struct_mutex); | |
203 | + drm_gem_object_unreference_unlocked(gobj); | |
208 | 204 | return r; |
209 | 205 | } |
210 | - mutex_lock(&dev->struct_mutex); | |
211 | - drm_gem_object_handle_unreference(gobj); | |
212 | - mutex_unlock(&dev->struct_mutex); | |
206 | + drm_gem_object_handle_unreference_unlocked(gobj); | |
213 | 207 | args->handle = handle; |
214 | 208 | return 0; |
215 | 209 | } |
... | ... | @@ -236,9 +230,7 @@ |
236 | 230 | |
237 | 231 | r = radeon_gem_set_domain(gobj, args->read_domains, args->write_domain); |
238 | 232 | |
239 | - mutex_lock(&dev->struct_mutex); | |
240 | - drm_gem_object_unreference(gobj); | |
241 | - mutex_unlock(&dev->struct_mutex); | |
233 | + drm_gem_object_unreference_unlocked(gobj); | |
242 | 234 | return r; |
243 | 235 | } |
244 | 236 | |
... | ... | @@ -255,9 +247,7 @@ |
255 | 247 | } |
256 | 248 | robj = gobj->driver_private; |
257 | 249 | args->addr_ptr = radeon_bo_mmap_offset(robj); |
258 | - mutex_lock(&dev->struct_mutex); | |
259 | - drm_gem_object_unreference(gobj); | |
260 | - mutex_unlock(&dev->struct_mutex); | |
250 | + drm_gem_object_unreference_unlocked(gobj); | |
261 | 251 | return 0; |
262 | 252 | } |
263 | 253 | |
... | ... | @@ -288,9 +278,7 @@ |
288 | 278 | default: |
289 | 279 | break; |
290 | 280 | } |
291 | - mutex_lock(&dev->struct_mutex); | |
292 | - drm_gem_object_unreference(gobj); | |
293 | - mutex_unlock(&dev->struct_mutex); | |
281 | + drm_gem_object_unreference_unlocked(gobj); | |
294 | 282 | return r; |
295 | 283 | } |
296 | 284 | |
... | ... | @@ -311,9 +299,7 @@ |
311 | 299 | /* callback hw specific functions if any */ |
312 | 300 | if (robj->rdev->asic->ioctl_wait_idle) |
313 | 301 | robj->rdev->asic->ioctl_wait_idle(robj->rdev, robj); |
314 | - mutex_lock(&dev->struct_mutex); | |
315 | - drm_gem_object_unreference(gobj); | |
316 | - mutex_unlock(&dev->struct_mutex); | |
302 | + drm_gem_object_unreference_unlocked(gobj); | |
317 | 303 | return r; |
318 | 304 | } |
319 | 305 | |
... | ... | @@ -331,9 +317,7 @@ |
331 | 317 | return -EINVAL; |
332 | 318 | robj = gobj->driver_private; |
333 | 319 | r = radeon_bo_set_tiling_flags(robj, args->tiling_flags, args->pitch); |
334 | - mutex_lock(&dev->struct_mutex); | |
335 | - drm_gem_object_unreference(gobj); | |
336 | - mutex_unlock(&dev->struct_mutex); | |
320 | + drm_gem_object_unreference_unlocked(gobj); | |
337 | 321 | return r; |
338 | 322 | } |
339 | 323 | |
... | ... | @@ -356,9 +340,7 @@ |
356 | 340 | radeon_bo_get_tiling_flags(rbo, &args->tiling_flags, &args->pitch); |
357 | 341 | radeon_bo_unreserve(rbo); |
358 | 342 | out: |
359 | - mutex_lock(&dev->struct_mutex); | |
360 | - drm_gem_object_unreference(gobj); | |
361 | - mutex_unlock(&dev->struct_mutex); | |
343 | + drm_gem_object_unreference_unlocked(gobj); | |
362 | 344 | return r; |
363 | 345 | } |