@@ -272,3 +272,267 @@ void amdgpu_gfx_rlc_fini(struct amdgpu_device *adev)
272272 & adev -> gfx .rlc .cp_table_gpu_addr ,
273273 (void * * )& adev -> gfx .rlc .cp_table_ptr );
274274}
275+
276+ static int amdgpu_gfx_rlc_init_microcode_v2_0 (struct amdgpu_device * adev )
277+ {
278+ const struct common_firmware_header * common_hdr ;
279+ const struct rlc_firmware_header_v2_0 * rlc_hdr ;
280+ struct amdgpu_firmware_info * info ;
281+ unsigned int * tmp ;
282+ unsigned int i ;
283+
284+ rlc_hdr = (const struct rlc_firmware_header_v2_0 * )adev -> gfx .rlc_fw -> data ;
285+
286+ adev -> gfx .rlc_fw_version = le32_to_cpu (rlc_hdr -> header .ucode_version );
287+ adev -> gfx .rlc_feature_version = le32_to_cpu (rlc_hdr -> ucode_feature_version );
288+ adev -> gfx .rlc .save_and_restore_offset =
289+ le32_to_cpu (rlc_hdr -> save_and_restore_offset );
290+ adev -> gfx .rlc .clear_state_descriptor_offset =
291+ le32_to_cpu (rlc_hdr -> clear_state_descriptor_offset );
292+ adev -> gfx .rlc .avail_scratch_ram_locations =
293+ le32_to_cpu (rlc_hdr -> avail_scratch_ram_locations );
294+ adev -> gfx .rlc .reg_restore_list_size =
295+ le32_to_cpu (rlc_hdr -> reg_restore_list_size );
296+ adev -> gfx .rlc .reg_list_format_start =
297+ le32_to_cpu (rlc_hdr -> reg_list_format_start );
298+ adev -> gfx .rlc .reg_list_format_separate_start =
299+ le32_to_cpu (rlc_hdr -> reg_list_format_separate_start );
300+ adev -> gfx .rlc .starting_offsets_start =
301+ le32_to_cpu (rlc_hdr -> starting_offsets_start );
302+ adev -> gfx .rlc .reg_list_format_size_bytes =
303+ le32_to_cpu (rlc_hdr -> reg_list_format_size_bytes );
304+ adev -> gfx .rlc .reg_list_size_bytes =
305+ le32_to_cpu (rlc_hdr -> reg_list_size_bytes );
306+ adev -> gfx .rlc .register_list_format =
307+ kmalloc (adev -> gfx .rlc .reg_list_format_size_bytes +
308+ adev -> gfx .rlc .reg_list_size_bytes , GFP_KERNEL );
309+ if (!adev -> gfx .rlc .register_list_format ) {
310+ dev_err (adev -> dev , "failed to allocate memory for rlc register_list_format\n" );
311+ return - ENOMEM ;
312+ }
313+
314+ tmp = (unsigned int * )((uintptr_t )rlc_hdr +
315+ le32_to_cpu (rlc_hdr -> reg_list_format_array_offset_bytes ));
316+ for (i = 0 ; i < (rlc_hdr -> reg_list_format_size_bytes >> 2 ); i ++ )
317+ adev -> gfx .rlc .register_list_format [i ] = le32_to_cpu (tmp [i ]);
318+
319+ adev -> gfx .rlc .register_restore = adev -> gfx .rlc .register_list_format + i ;
320+
321+ tmp = (unsigned int * )((uintptr_t )rlc_hdr +
322+ le32_to_cpu (rlc_hdr -> reg_list_array_offset_bytes ));
323+ for (i = 0 ; i < (rlc_hdr -> reg_list_size_bytes >> 2 ); i ++ )
324+ adev -> gfx .rlc .register_restore [i ] = le32_to_cpu (tmp [i ]);
325+
326+ if (adev -> firmware .load_type == AMDGPU_FW_LOAD_PSP ) {
327+ info = & adev -> firmware .ucode [AMDGPU_UCODE_ID_RLC_G ];
328+ info -> ucode_id = AMDGPU_UCODE_ID_RLC_G ;
329+ info -> fw = adev -> gfx .rlc_fw ;
330+ if (info -> fw ) {
331+ common_hdr = (const struct common_firmware_header * )info -> fw -> data ;
332+ adev -> firmware .fw_size +=
333+ ALIGN (le32_to_cpu (common_hdr -> ucode_size_bytes ), PAGE_SIZE );
334+ }
335+ }
336+
337+ return 0 ;
338+ }
339+
340+ static void amdgpu_gfx_rlc_init_microcode_v2_1 (struct amdgpu_device * adev )
341+ {
342+ const struct rlc_firmware_header_v2_1 * rlc_hdr ;
343+ struct amdgpu_firmware_info * info ;
344+
345+ rlc_hdr = (const struct rlc_firmware_header_v2_1 * )adev -> gfx .rlc_fw -> data ;
346+ adev -> gfx .rlc_srlc_fw_version = le32_to_cpu (rlc_hdr -> save_restore_list_cntl_ucode_ver );
347+ adev -> gfx .rlc_srlc_feature_version = le32_to_cpu (rlc_hdr -> save_restore_list_cntl_feature_ver );
348+ adev -> gfx .rlc .save_restore_list_cntl_size_bytes = le32_to_cpu (rlc_hdr -> save_restore_list_cntl_size_bytes );
349+ adev -> gfx .rlc .save_restore_list_cntl = (u8 * )rlc_hdr + le32_to_cpu (rlc_hdr -> save_restore_list_cntl_offset_bytes );
350+ adev -> gfx .rlc_srlg_fw_version = le32_to_cpu (rlc_hdr -> save_restore_list_gpm_ucode_ver );
351+ adev -> gfx .rlc_srlg_feature_version = le32_to_cpu (rlc_hdr -> save_restore_list_gpm_feature_ver );
352+ adev -> gfx .rlc .save_restore_list_gpm_size_bytes = le32_to_cpu (rlc_hdr -> save_restore_list_gpm_size_bytes );
353+ adev -> gfx .rlc .save_restore_list_gpm = (u8 * )rlc_hdr + le32_to_cpu (rlc_hdr -> save_restore_list_gpm_offset_bytes );
354+ adev -> gfx .rlc_srls_fw_version = le32_to_cpu (rlc_hdr -> save_restore_list_srm_ucode_ver );
355+ adev -> gfx .rlc_srls_feature_version = le32_to_cpu (rlc_hdr -> save_restore_list_srm_feature_ver );
356+ adev -> gfx .rlc .save_restore_list_srm_size_bytes = le32_to_cpu (rlc_hdr -> save_restore_list_srm_size_bytes );
357+ adev -> gfx .rlc .save_restore_list_srm = (u8 * )rlc_hdr + le32_to_cpu (rlc_hdr -> save_restore_list_srm_offset_bytes );
358+ adev -> gfx .rlc .reg_list_format_direct_reg_list_length =
359+ le32_to_cpu (rlc_hdr -> reg_list_format_direct_reg_list_length );
360+
361+ if (adev -> firmware .load_type == AMDGPU_FW_LOAD_PSP ) {
362+ if (adev -> gfx .rlc .save_restore_list_gpm_size_bytes ) {
363+ info = & adev -> firmware .ucode [AMDGPU_UCODE_ID_RLC_RESTORE_LIST_GPM_MEM ];
364+ info -> ucode_id = AMDGPU_UCODE_ID_RLC_RESTORE_LIST_GPM_MEM ;
365+ info -> fw = adev -> gfx .rlc_fw ;
366+ adev -> firmware .fw_size +=
367+ ALIGN (adev -> gfx .rlc .save_restore_list_gpm_size_bytes , PAGE_SIZE );
368+ }
369+
370+ if (adev -> gfx .rlc .save_restore_list_srm_size_bytes ) {
371+ info = & adev -> firmware .ucode [AMDGPU_UCODE_ID_RLC_RESTORE_LIST_SRM_MEM ];
372+ info -> ucode_id = AMDGPU_UCODE_ID_RLC_RESTORE_LIST_SRM_MEM ;
373+ info -> fw = adev -> gfx .rlc_fw ;
374+ adev -> firmware .fw_size +=
375+ ALIGN (adev -> gfx .rlc .save_restore_list_srm_size_bytes , PAGE_SIZE );
376+ }
377+ }
378+ }
379+
380+ static void amdgpu_gfx_rlc_init_microcode_v2_2 (struct amdgpu_device * adev )
381+ {
382+ const struct rlc_firmware_header_v2_2 * rlc_hdr ;
383+ struct amdgpu_firmware_info * info ;
384+
385+ rlc_hdr = (const struct rlc_firmware_header_v2_2 * )adev -> gfx .rlc_fw -> data ;
386+ adev -> gfx .rlc .rlc_iram_ucode_size_bytes = le32_to_cpu (rlc_hdr -> rlc_iram_ucode_size_bytes );
387+ adev -> gfx .rlc .rlc_iram_ucode = (u8 * )rlc_hdr + le32_to_cpu (rlc_hdr -> rlc_iram_ucode_offset_bytes );
388+ adev -> gfx .rlc .rlc_dram_ucode_size_bytes = le32_to_cpu (rlc_hdr -> rlc_dram_ucode_size_bytes );
389+ adev -> gfx .rlc .rlc_dram_ucode = (u8 * )rlc_hdr + le32_to_cpu (rlc_hdr -> rlc_dram_ucode_offset_bytes );
390+
391+ if (adev -> firmware .load_type == AMDGPU_FW_LOAD_PSP ) {
392+ if (adev -> gfx .rlc .rlc_iram_ucode_size_bytes ) {
393+ info = & adev -> firmware .ucode [AMDGPU_UCODE_ID_RLC_IRAM ];
394+ info -> ucode_id = AMDGPU_UCODE_ID_RLC_IRAM ;
395+ info -> fw = adev -> gfx .rlc_fw ;
396+ adev -> firmware .fw_size +=
397+ ALIGN (adev -> gfx .rlc .rlc_iram_ucode_size_bytes , PAGE_SIZE );
398+ }
399+
400+ if (adev -> gfx .rlc .rlc_dram_ucode_size_bytes ) {
401+ info = & adev -> firmware .ucode [AMDGPU_UCODE_ID_RLC_DRAM ];
402+ info -> ucode_id = AMDGPU_UCODE_ID_RLC_DRAM ;
403+ info -> fw = adev -> gfx .rlc_fw ;
404+ adev -> firmware .fw_size +=
405+ ALIGN (adev -> gfx .rlc .rlc_dram_ucode_size_bytes , PAGE_SIZE );
406+ }
407+ }
408+ }
409+
410+ static void amdgpu_gfx_rlc_init_microcode_v2_3 (struct amdgpu_device * adev )
411+ {
412+ const struct rlc_firmware_header_v2_3 * rlc_hdr ;
413+ struct amdgpu_firmware_info * info ;
414+
415+ rlc_hdr = (const struct rlc_firmware_header_v2_3 * )adev -> gfx .rlc_fw -> data ;
416+ adev -> gfx .rlcp_ucode_version = le32_to_cpu (rlc_hdr -> rlcp_ucode_version );
417+ adev -> gfx .rlcp_ucode_feature_version = le32_to_cpu (rlc_hdr -> rlcp_ucode_feature_version );
418+ adev -> gfx .rlc .rlcp_ucode_size_bytes = le32_to_cpu (rlc_hdr -> rlcp_ucode_size_bytes );
419+ adev -> gfx .rlc .rlcp_ucode = (u8 * )rlc_hdr + le32_to_cpu (rlc_hdr -> rlcp_ucode_offset_bytes );
420+
421+ adev -> gfx .rlcv_ucode_version = le32_to_cpu (rlc_hdr -> rlcv_ucode_version );
422+ adev -> gfx .rlcv_ucode_feature_version = le32_to_cpu (rlc_hdr -> rlcv_ucode_feature_version );
423+ adev -> gfx .rlc .rlcv_ucode_size_bytes = le32_to_cpu (rlc_hdr -> rlcv_ucode_size_bytes );
424+ adev -> gfx .rlc .rlcv_ucode = (u8 * )rlc_hdr + le32_to_cpu (rlc_hdr -> rlcv_ucode_offset_bytes );
425+
426+ if (adev -> firmware .load_type == AMDGPU_FW_LOAD_PSP ) {
427+ if (adev -> gfx .rlc .rlcp_ucode_size_bytes ) {
428+ info = & adev -> firmware .ucode [AMDGPU_UCODE_ID_RLC_P ];
429+ info -> ucode_id = AMDGPU_UCODE_ID_RLC_P ;
430+ info -> fw = adev -> gfx .rlc_fw ;
431+ adev -> firmware .fw_size +=
432+ ALIGN (adev -> gfx .rlc .rlcp_ucode_size_bytes , PAGE_SIZE );
433+ }
434+
435+ if (adev -> gfx .rlc .rlcv_ucode_size_bytes ) {
436+ info = & adev -> firmware .ucode [AMDGPU_UCODE_ID_RLC_V ];
437+ info -> ucode_id = AMDGPU_UCODE_ID_RLC_V ;
438+ info -> fw = adev -> gfx .rlc_fw ;
439+ adev -> firmware .fw_size +=
440+ ALIGN (adev -> gfx .rlc .rlcv_ucode_size_bytes , PAGE_SIZE );
441+ }
442+ }
443+ }
444+
445+ static void amdgpu_gfx_rlc_init_microcode_v2_4 (struct amdgpu_device * adev )
446+ {
447+ const struct rlc_firmware_header_v2_4 * rlc_hdr ;
448+ struct amdgpu_firmware_info * info ;
449+
450+ rlc_hdr = (const struct rlc_firmware_header_v2_4 * )adev -> gfx .rlc_fw -> data ;
451+ adev -> gfx .rlc .global_tap_delays_ucode_size_bytes = le32_to_cpu (rlc_hdr -> global_tap_delays_ucode_size_bytes );
452+ adev -> gfx .rlc .global_tap_delays_ucode = (u8 * )rlc_hdr + le32_to_cpu (rlc_hdr -> global_tap_delays_ucode_offset_bytes );
453+ adev -> gfx .rlc .se0_tap_delays_ucode_size_bytes = le32_to_cpu (rlc_hdr -> se0_tap_delays_ucode_size_bytes );
454+ adev -> gfx .rlc .se0_tap_delays_ucode = (u8 * )rlc_hdr + le32_to_cpu (rlc_hdr -> se0_tap_delays_ucode_offset_bytes );
455+ adev -> gfx .rlc .se1_tap_delays_ucode_size_bytes = le32_to_cpu (rlc_hdr -> se1_tap_delays_ucode_size_bytes );
456+ adev -> gfx .rlc .se1_tap_delays_ucode = (u8 * )rlc_hdr + le32_to_cpu (rlc_hdr -> se1_tap_delays_ucode_offset_bytes );
457+ adev -> gfx .rlc .se2_tap_delays_ucode_size_bytes = le32_to_cpu (rlc_hdr -> se2_tap_delays_ucode_size_bytes );
458+ adev -> gfx .rlc .se2_tap_delays_ucode = (u8 * )rlc_hdr + le32_to_cpu (rlc_hdr -> se2_tap_delays_ucode_offset_bytes );
459+ adev -> gfx .rlc .se3_tap_delays_ucode_size_bytes = le32_to_cpu (rlc_hdr -> se3_tap_delays_ucode_size_bytes );
460+ adev -> gfx .rlc .se3_tap_delays_ucode = (u8 * )rlc_hdr + le32_to_cpu (rlc_hdr -> se3_tap_delays_ucode_offset_bytes );
461+
462+ if (adev -> firmware .load_type == AMDGPU_FW_LOAD_PSP ) {
463+ if (adev -> gfx .rlc .global_tap_delays_ucode_size_bytes ) {
464+ info = & adev -> firmware .ucode [AMDGPU_UCODE_ID_GLOBAL_TAP_DELAYS ];
465+ info -> ucode_id = AMDGPU_UCODE_ID_GLOBAL_TAP_DELAYS ;
466+ info -> fw = adev -> gfx .rlc_fw ;
467+ adev -> firmware .fw_size +=
468+ ALIGN (adev -> gfx .rlc .global_tap_delays_ucode_size_bytes , PAGE_SIZE );
469+ }
470+
471+ if (adev -> gfx .rlc .se0_tap_delays_ucode_size_bytes ) {
472+ info = & adev -> firmware .ucode [AMDGPU_UCODE_ID_SE0_TAP_DELAYS ];
473+ info -> ucode_id = AMDGPU_UCODE_ID_SE0_TAP_DELAYS ;
474+ info -> fw = adev -> gfx .rlc_fw ;
475+ adev -> firmware .fw_size +=
476+ ALIGN (adev -> gfx .rlc .se0_tap_delays_ucode_size_bytes , PAGE_SIZE );
477+ }
478+
479+ if (adev -> gfx .rlc .se1_tap_delays_ucode_size_bytes ) {
480+ info = & adev -> firmware .ucode [AMDGPU_UCODE_ID_SE1_TAP_DELAYS ];
481+ info -> ucode_id = AMDGPU_UCODE_ID_SE1_TAP_DELAYS ;
482+ info -> fw = adev -> gfx .rlc_fw ;
483+ adev -> firmware .fw_size +=
484+ ALIGN (adev -> gfx .rlc .se1_tap_delays_ucode_size_bytes , PAGE_SIZE );
485+ }
486+
487+ if (adev -> gfx .rlc .se2_tap_delays_ucode_size_bytes ) {
488+ info = & adev -> firmware .ucode [AMDGPU_UCODE_ID_SE2_TAP_DELAYS ];
489+ info -> ucode_id = AMDGPU_UCODE_ID_SE2_TAP_DELAYS ;
490+ info -> fw = adev -> gfx .rlc_fw ;
491+ adev -> firmware .fw_size +=
492+ ALIGN (adev -> gfx .rlc .se2_tap_delays_ucode_size_bytes , PAGE_SIZE );
493+ }
494+
495+ if (adev -> gfx .rlc .se3_tap_delays_ucode_size_bytes ) {
496+ info = & adev -> firmware .ucode [AMDGPU_UCODE_ID_SE3_TAP_DELAYS ];
497+ info -> ucode_id = AMDGPU_UCODE_ID_SE3_TAP_DELAYS ;
498+ info -> fw = adev -> gfx .rlc_fw ;
499+ adev -> firmware .fw_size +=
500+ ALIGN (adev -> gfx .rlc .se3_tap_delays_ucode_size_bytes , PAGE_SIZE );
501+ }
502+ }
503+ }
504+
505+ int amdgpu_gfx_rlc_init_microcode (struct amdgpu_device * adev ,
506+ uint16_t version_major ,
507+ uint16_t version_minor )
508+ {
509+ int err ;
510+
511+ if (version_major < 2 ) {
512+ /* only support rlc_hdr v2.x and onwards */
513+ dev_err (adev -> dev , "unsupported rlc fw hdr\n" );
514+ return - EINVAL ;
515+ }
516+
517+ /* is_rlc_v2_1 is still used in APU code path */
518+ if (version_major == 2 && version_minor == 1 )
519+ adev -> gfx .rlc .is_rlc_v2_1 = true;
520+
521+ if (version_minor >= 0 ) {
522+ err = amdgpu_gfx_rlc_init_microcode_v2_0 (adev );
523+ if (err ) {
524+ dev_err (adev -> dev , "fail to init rlc v2_0 microcode\n" );
525+ return err ;
526+ }
527+ }
528+ if (version_minor >= 1 )
529+ amdgpu_gfx_rlc_init_microcode_v2_1 (adev );
530+ if (version_minor >= 2 )
531+ amdgpu_gfx_rlc_init_microcode_v2_2 (adev );
532+ if (version_minor == 3 )
533+ amdgpu_gfx_rlc_init_microcode_v2_3 (adev );
534+ if (version_minor == 4 )
535+ amdgpu_gfx_rlc_init_microcode_v2_4 (adev );
536+
537+ return 0 ;
538+ }
0 commit comments