@@ -642,11 +642,257 @@ static void dwc_pcie_ltssm_debugfs_init(struct dw_pcie *pci, struct dentry *dir)
642
642
& dwc_pcie_ltssm_status_ops );
643
643
}
644
644
645
+ static int dw_pcie_ptm_check_capability (void * drvdata )
646
+ {
647
+ struct dw_pcie * pci = drvdata ;
648
+
649
+ pci -> ptm_vsec_offset = dw_pcie_find_ptm_capability (pci );
650
+
651
+ return pci -> ptm_vsec_offset ;
652
+ }
653
+
654
+ static int dw_pcie_ptm_context_update_write (void * drvdata , u8 mode )
655
+ {
656
+ struct dw_pcie * pci = drvdata ;
657
+ u32 val ;
658
+
659
+ if (mode == PCIE_PTM_CONTEXT_UPDATE_AUTO ) {
660
+ val = dw_pcie_readl_dbi (pci , pci -> ptm_vsec_offset + PTM_RES_REQ_CTRL );
661
+ val |= PTM_REQ_AUTO_UPDATE_ENABLED ;
662
+ dw_pcie_writel_dbi (pci , pci -> ptm_vsec_offset + PTM_RES_REQ_CTRL , val );
663
+ } else if (mode == PCIE_PTM_CONTEXT_UPDATE_MANUAL ) {
664
+ val = dw_pcie_readl_dbi (pci , pci -> ptm_vsec_offset + PTM_RES_REQ_CTRL );
665
+ val &= ~PTM_REQ_AUTO_UPDATE_ENABLED ;
666
+ val |= PTM_REQ_START_UPDATE ;
667
+ dw_pcie_writel_dbi (pci , pci -> ptm_vsec_offset + PTM_RES_REQ_CTRL , val );
668
+ } else {
669
+ return - EINVAL ;
670
+ }
671
+
672
+ return 0 ;
673
+ }
674
+
675
+ static int dw_pcie_ptm_context_update_read (void * drvdata , u8 * mode )
676
+ {
677
+ struct dw_pcie * pci = drvdata ;
678
+ u32 val ;
679
+
680
+ val = dw_pcie_readl_dbi (pci , pci -> ptm_vsec_offset + PTM_RES_REQ_CTRL );
681
+ if (FIELD_GET (PTM_REQ_AUTO_UPDATE_ENABLED , val ))
682
+ * mode = PCIE_PTM_CONTEXT_UPDATE_AUTO ;
683
+ else
684
+ /*
685
+ * PTM_REQ_START_UPDATE is a self clearing register bit. So if
686
+ * PTM_REQ_AUTO_UPDATE_ENABLED is not set, then it implies that
687
+ * manual update is used.
688
+ */
689
+ * mode = PCIE_PTM_CONTEXT_UPDATE_MANUAL ;
690
+
691
+ return 0 ;
692
+ }
693
+
694
+ static int dw_pcie_ptm_context_valid_write (void * drvdata , bool valid )
695
+ {
696
+ struct dw_pcie * pci = drvdata ;
697
+ u32 val ;
698
+
699
+ if (valid ) {
700
+ val = dw_pcie_readl_dbi (pci , pci -> ptm_vsec_offset + PTM_RES_REQ_CTRL );
701
+ val |= PTM_RES_CCONTEXT_VALID ;
702
+ dw_pcie_writel_dbi (pci , pci -> ptm_vsec_offset + PTM_RES_REQ_CTRL , val );
703
+ } else {
704
+ val = dw_pcie_readl_dbi (pci , pci -> ptm_vsec_offset + PTM_RES_REQ_CTRL );
705
+ val &= ~PTM_RES_CCONTEXT_VALID ;
706
+ dw_pcie_writel_dbi (pci , pci -> ptm_vsec_offset + PTM_RES_REQ_CTRL , val );
707
+ }
708
+
709
+ return 0 ;
710
+ }
711
+
712
+ static int dw_pcie_ptm_context_valid_read (void * drvdata , bool * valid )
713
+ {
714
+ struct dw_pcie * pci = drvdata ;
715
+ u32 val ;
716
+
717
+ val = dw_pcie_readl_dbi (pci , pci -> ptm_vsec_offset + PTM_RES_REQ_CTRL );
718
+ * valid = !!FIELD_GET (PTM_RES_CCONTEXT_VALID , val );
719
+
720
+ return 0 ;
721
+ }
722
+
723
+ static int dw_pcie_ptm_local_clock_read (void * drvdata , u64 * clock )
724
+ {
725
+ struct dw_pcie * pci = drvdata ;
726
+ u32 msb , lsb ;
727
+
728
+ do {
729
+ msb = dw_pcie_readl_dbi (pci , pci -> ptm_vsec_offset + PTM_LOCAL_MSB );
730
+ lsb = dw_pcie_readl_dbi (pci , pci -> ptm_vsec_offset + PTM_LOCAL_LSB );
731
+ } while (msb != dw_pcie_readl_dbi (pci , pci -> ptm_vsec_offset + PTM_LOCAL_MSB ));
732
+
733
+ * clock = ((u64 ) msb ) << 32 | lsb ;
734
+
735
+ return 0 ;
736
+ }
737
+
738
+ static int dw_pcie_ptm_master_clock_read (void * drvdata , u64 * clock )
739
+ {
740
+ struct dw_pcie * pci = drvdata ;
741
+ u32 msb , lsb ;
742
+
743
+ do {
744
+ msb = dw_pcie_readl_dbi (pci , pci -> ptm_vsec_offset + PTM_MASTER_MSB );
745
+ lsb = dw_pcie_readl_dbi (pci , pci -> ptm_vsec_offset + PTM_MASTER_LSB );
746
+ } while (msb != dw_pcie_readl_dbi (pci , pci -> ptm_vsec_offset + PTM_MASTER_MSB ));
747
+
748
+ * clock = ((u64 ) msb ) << 32 | lsb ;
749
+
750
+ return 0 ;
751
+ }
752
+
753
+ static int dw_pcie_ptm_t1_read (void * drvdata , u64 * clock )
754
+ {
755
+ struct dw_pcie * pci = drvdata ;
756
+ u32 msb , lsb ;
757
+
758
+ do {
759
+ msb = dw_pcie_readl_dbi (pci , pci -> ptm_vsec_offset + PTM_T1_T2_MSB );
760
+ lsb = dw_pcie_readl_dbi (pci , pci -> ptm_vsec_offset + PTM_T1_T2_LSB );
761
+ } while (msb != dw_pcie_readl_dbi (pci , pci -> ptm_vsec_offset + PTM_T1_T2_MSB ));
762
+
763
+ * clock = ((u64 ) msb ) << 32 | lsb ;
764
+
765
+ return 0 ;
766
+ }
767
+
768
+ static int dw_pcie_ptm_t2_read (void * drvdata , u64 * clock )
769
+ {
770
+ struct dw_pcie * pci = drvdata ;
771
+ u32 msb , lsb ;
772
+
773
+ do {
774
+ msb = dw_pcie_readl_dbi (pci , pci -> ptm_vsec_offset + PTM_T1_T2_MSB );
775
+ lsb = dw_pcie_readl_dbi (pci , pci -> ptm_vsec_offset + PTM_T1_T2_LSB );
776
+ } while (msb != dw_pcie_readl_dbi (pci , pci -> ptm_vsec_offset + PTM_T1_T2_MSB ));
777
+
778
+ * clock = ((u64 ) msb ) << 32 | lsb ;
779
+
780
+ return 0 ;
781
+ }
782
+
783
+ static int dw_pcie_ptm_t3_read (void * drvdata , u64 * clock )
784
+ {
785
+ struct dw_pcie * pci = drvdata ;
786
+ u32 msb , lsb ;
787
+
788
+ do {
789
+ msb = dw_pcie_readl_dbi (pci , pci -> ptm_vsec_offset + PTM_T3_T4_MSB );
790
+ lsb = dw_pcie_readl_dbi (pci , pci -> ptm_vsec_offset + PTM_T3_T4_LSB );
791
+ } while (msb != dw_pcie_readl_dbi (pci , pci -> ptm_vsec_offset + PTM_T3_T4_MSB ));
792
+
793
+ * clock = ((u64 ) msb ) << 32 | lsb ;
794
+
795
+ return 0 ;
796
+ }
797
+
798
+ static int dw_pcie_ptm_t4_read (void * drvdata , u64 * clock )
799
+ {
800
+ struct dw_pcie * pci = drvdata ;
801
+ u32 msb , lsb ;
802
+
803
+ do {
804
+ msb = dw_pcie_readl_dbi (pci , pci -> ptm_vsec_offset + PTM_T3_T4_MSB );
805
+ lsb = dw_pcie_readl_dbi (pci , pci -> ptm_vsec_offset + PTM_T3_T4_LSB );
806
+ } while (msb != dw_pcie_readl_dbi (pci , pci -> ptm_vsec_offset + PTM_T3_T4_MSB ));
807
+
808
+ * clock = ((u64 ) msb ) << 32 | lsb ;
809
+
810
+ return 0 ;
811
+ }
812
+
813
+ static bool dw_pcie_ptm_context_update_visible (void * drvdata )
814
+ {
815
+ struct dw_pcie * pci = drvdata ;
816
+
817
+ return (pci -> mode == DW_PCIE_EP_TYPE ) ? true : false;
818
+ }
819
+
820
+ static bool dw_pcie_ptm_context_valid_visible (void * drvdata )
821
+ {
822
+ struct dw_pcie * pci = drvdata ;
823
+
824
+ return (pci -> mode == DW_PCIE_RC_TYPE ) ? true : false;
825
+ }
826
+
827
+ static bool dw_pcie_ptm_local_clock_visible (void * drvdata )
828
+ {
829
+ /* PTM local clock is always visible */
830
+ return true;
831
+ }
832
+
833
+ static bool dw_pcie_ptm_master_clock_visible (void * drvdata )
834
+ {
835
+ struct dw_pcie * pci = drvdata ;
836
+
837
+ return (pci -> mode == DW_PCIE_EP_TYPE ) ? true : false;
838
+ }
839
+
840
+ static bool dw_pcie_ptm_t1_visible (void * drvdata )
841
+ {
842
+ struct dw_pcie * pci = drvdata ;
843
+
844
+ return (pci -> mode == DW_PCIE_EP_TYPE ) ? true : false;
845
+ }
846
+
847
+ static bool dw_pcie_ptm_t2_visible (void * drvdata )
848
+ {
849
+ struct dw_pcie * pci = drvdata ;
850
+
851
+ return (pci -> mode == DW_PCIE_RC_TYPE ) ? true : false;
852
+ }
853
+
854
+ static bool dw_pcie_ptm_t3_visible (void * drvdata )
855
+ {
856
+ struct dw_pcie * pci = drvdata ;
857
+
858
+ return (pci -> mode == DW_PCIE_RC_TYPE ) ? true : false;
859
+ }
860
+
861
+ static bool dw_pcie_ptm_t4_visible (void * drvdata )
862
+ {
863
+ struct dw_pcie * pci = drvdata ;
864
+
865
+ return (pci -> mode == DW_PCIE_EP_TYPE ) ? true : false;
866
+ }
867
+
868
+ const struct pcie_ptm_ops dw_pcie_ptm_ops = {
869
+ .check_capability = dw_pcie_ptm_check_capability ,
870
+ .context_update_write = dw_pcie_ptm_context_update_write ,
871
+ .context_update_read = dw_pcie_ptm_context_update_read ,
872
+ .context_valid_write = dw_pcie_ptm_context_valid_write ,
873
+ .context_valid_read = dw_pcie_ptm_context_valid_read ,
874
+ .local_clock_read = dw_pcie_ptm_local_clock_read ,
875
+ .master_clock_read = dw_pcie_ptm_master_clock_read ,
876
+ .t1_read = dw_pcie_ptm_t1_read ,
877
+ .t2_read = dw_pcie_ptm_t2_read ,
878
+ .t3_read = dw_pcie_ptm_t3_read ,
879
+ .t4_read = dw_pcie_ptm_t4_read ,
880
+ .context_update_visible = dw_pcie_ptm_context_update_visible ,
881
+ .context_valid_visible = dw_pcie_ptm_context_valid_visible ,
882
+ .local_clock_visible = dw_pcie_ptm_local_clock_visible ,
883
+ .master_clock_visible = dw_pcie_ptm_master_clock_visible ,
884
+ .t1_visible = dw_pcie_ptm_t1_visible ,
885
+ .t2_visible = dw_pcie_ptm_t2_visible ,
886
+ .t3_visible = dw_pcie_ptm_t3_visible ,
887
+ .t4_visible = dw_pcie_ptm_t4_visible ,
888
+ };
889
+
645
890
void dwc_pcie_debugfs_deinit (struct dw_pcie * pci )
646
891
{
647
892
if (!pci -> debugfs )
648
893
return ;
649
894
895
+ pcie_ptm_destroy_debugfs (pci -> ptm_debugfs );
650
896
dwc_pcie_rasdes_debugfs_deinit (pci );
651
897
debugfs_remove_recursive (pci -> debugfs -> debug_dir );
652
898
}
@@ -676,4 +922,6 @@ void dwc_pcie_debugfs_init(struct dw_pcie *pci, enum dw_pcie_device_mode mode)
676
922
dwc_pcie_ltssm_debugfs_init (pci , dir );
677
923
678
924
pci -> mode = mode ;
925
+ pci -> ptm_debugfs = pcie_ptm_create_debugfs (pci -> dev , pci ,
926
+ & dw_pcie_ptm_ops );
679
927
}
0 commit comments