@@ -1605,17 +1605,17 @@ void i915_vma_close(struct i915_vma *vma)
1605
1605
1606
1606
static void __i915_vma_remove_closed (struct i915_vma * vma )
1607
1607
{
1608
- struct intel_gt * gt = vma -> vm -> gt ;
1609
-
1610
- spin_lock_irq (& gt -> closed_lock );
1611
1608
list_del_init (& vma -> closed_link );
1612
- spin_unlock_irq (& gt -> closed_lock );
1613
1609
}
1614
1610
1615
1611
void i915_vma_reopen (struct i915_vma * vma )
1616
1612
{
1613
+ struct intel_gt * gt = vma -> vm -> gt ;
1614
+
1615
+ spin_lock_irq (& gt -> closed_lock );
1617
1616
if (i915_vma_is_closed (vma ))
1618
1617
__i915_vma_remove_closed (vma );
1618
+ spin_unlock_irq (& gt -> closed_lock );
1619
1619
}
1620
1620
1621
1621
void i915_vma_release (struct kref * ref )
@@ -1641,6 +1641,7 @@ static void force_unbind(struct i915_vma *vma)
1641
1641
static void release_references (struct i915_vma * vma )
1642
1642
{
1643
1643
struct drm_i915_gem_object * obj = vma -> obj ;
1644
+ struct intel_gt * gt = vma -> vm -> gt ;
1644
1645
1645
1646
GEM_BUG_ON (i915_vma_is_active (vma ));
1646
1647
@@ -1650,7 +1651,9 @@ static void release_references(struct i915_vma *vma)
1650
1651
rb_erase (& vma -> obj_node , & obj -> vma .tree );
1651
1652
spin_unlock (& obj -> vma .lock );
1652
1653
1654
+ spin_lock_irq (& gt -> closed_lock );
1653
1655
__i915_vma_remove_closed (vma );
1656
+ spin_unlock_irq (& gt -> closed_lock );
1654
1657
1655
1658
__i915_vma_put (vma );
1656
1659
}
0 commit comments