@@ -258,12 +258,94 @@ gp100_vmm_pd0_unmap(struct nvkm_vmm *vmm,
258
258
VMM_FO128 (pt , vmm , pdei * 0x10 , 0ULL , 0ULL , pdes );
259
259
}
260
260
261
+ static void
262
+ gp100_vmm_pd0_pfn_unmap (struct nvkm_vmm * vmm ,
263
+ struct nvkm_mmu_pt * pt , u32 ptei , u32 ptes )
264
+ {
265
+ struct device * dev = vmm -> mmu -> subdev .device -> dev ;
266
+ dma_addr_t addr ;
267
+
268
+ nvkm_kmap (pt -> memory );
269
+ while (ptes -- ) {
270
+ u32 datalo = nvkm_ro32 (pt -> memory , pt -> base + ptei * 16 + 0 );
271
+ u32 datahi = nvkm_ro32 (pt -> memory , pt -> base + ptei * 16 + 4 );
272
+ u64 data = (u64 )datahi << 32 | datalo ;
273
+
274
+ if ((data & (3ULL << 1 )) != 0 ) {
275
+ addr = (data >> 8 ) << 12 ;
276
+ dma_unmap_page (dev , addr , 1UL << 21 , DMA_BIDIRECTIONAL );
277
+ }
278
+ ptei ++ ;
279
+ }
280
+ nvkm_done (pt -> memory );
281
+ }
282
+
283
+ static bool
284
+ gp100_vmm_pd0_pfn_clear (struct nvkm_vmm * vmm ,
285
+ struct nvkm_mmu_pt * pt , u32 ptei , u32 ptes )
286
+ {
287
+ bool dma = false;
288
+
289
+ nvkm_kmap (pt -> memory );
290
+ while (ptes -- ) {
291
+ u32 datalo = nvkm_ro32 (pt -> memory , pt -> base + ptei * 16 + 0 );
292
+ u32 datahi = nvkm_ro32 (pt -> memory , pt -> base + ptei * 16 + 4 );
293
+ u64 data = (u64 )datahi << 32 | datalo ;
294
+
295
+ if ((data & BIT_ULL (0 )) && (data & (3ULL << 1 )) != 0 ) {
296
+ VMM_WO064 (pt , vmm , ptei * 16 , data & ~BIT_ULL (0 ));
297
+ dma = true;
298
+ }
299
+ ptei ++ ;
300
+ }
301
+ nvkm_done (pt -> memory );
302
+ return dma ;
303
+ }
304
+
305
+ static void
306
+ gp100_vmm_pd0_pfn (struct nvkm_vmm * vmm , struct nvkm_mmu_pt * pt ,
307
+ u32 ptei , u32 ptes , struct nvkm_vmm_map * map )
308
+ {
309
+ struct device * dev = vmm -> mmu -> subdev .device -> dev ;
310
+ dma_addr_t addr ;
311
+
312
+ nvkm_kmap (pt -> memory );
313
+ while (ptes -- ) {
314
+ u64 data = 0 ;
315
+
316
+ if (!(* map -> pfn & NVKM_VMM_PFN_W ))
317
+ data |= BIT_ULL (6 ); /* RO. */
318
+
319
+ if (!(* map -> pfn & NVKM_VMM_PFN_VRAM )) {
320
+ addr = * map -> pfn >> NVKM_VMM_PFN_ADDR_SHIFT ;
321
+ addr = dma_map_page (dev , pfn_to_page (addr ), 0 ,
322
+ 1UL << 21 , DMA_BIDIRECTIONAL );
323
+ if (!WARN_ON (dma_mapping_error (dev , addr ))) {
324
+ data |= addr >> 4 ;
325
+ data |= 2ULL << 1 ; /* SYSTEM_COHERENT_MEMORY. */
326
+ data |= BIT_ULL (3 ); /* VOL. */
327
+ data |= BIT_ULL (0 ); /* VALID. */
328
+ }
329
+ } else {
330
+ data |= (* map -> pfn & NVKM_VMM_PFN_ADDR ) >> 4 ;
331
+ data |= BIT_ULL (0 ); /* VALID. */
332
+ }
333
+
334
+ VMM_WO064 (pt , vmm , ptei ++ * 16 , data );
335
+ map -> pfn ++ ;
336
+ }
337
+ nvkm_done (pt -> memory );
338
+ }
339
+
261
340
static const struct nvkm_vmm_desc_func
262
341
gp100_vmm_desc_pd0 = {
263
342
.unmap = gp100_vmm_pd0_unmap ,
264
343
.sparse = gp100_vmm_pd0_sparse ,
265
344
.pde = gp100_vmm_pd0_pde ,
266
345
.mem = gp100_vmm_pd0_mem ,
346
+ .pfn = gp100_vmm_pd0_pfn ,
347
+ .pfn_clear = gp100_vmm_pd0_pfn_clear ,
348
+ .pfn_unmap = gp100_vmm_pd0_pfn_unmap ,
267
349
};
268
350
269
351
static void
0 commit comments