2121// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
2222// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
2323
24- #import "fishhook.h"
24+ #include "fishhook.h"
2525
26- #import <dlfcn.h>
27- #import <stdlib.h>
28- #import <string.h>
29- #import <sys/types.h>
30- #import <mach-o/dyld.h>
31- #import <mach-o/loader.h>
32- #import <mach-o/nlist.h>
26+ #include <dlfcn.h>
27+ #include <stdbool.h>
28+ #include <stdlib.h>
29+ #include <string.h>
30+ #include <sys/mman.h>
31+ #include <sys/types.h>
32+ #include <mach/mach.h>
33+ #include <mach/vm_map.h>
34+ #include <mach/vm_region.h>
35+ #include <mach-o/dyld.h>
36+ #include <mach-o/loader.h>
37+ #include <mach-o/nlist.h>
3338
3439#ifdef __LP64__
3540typedef struct mach_header_64 mach_header_t ;
@@ -76,6 +81,36 @@ static int prepend_rebindings(struct rebindings_entry **rebindings_head,
7681 return 0 ;
7782}
7883
84+ #if 0
85+ static int get_protection (void * addr , vm_prot_t * prot , vm_prot_t * max_prot ) {
86+ mach_port_t task = mach_task_self ();
87+ vm_size_t size = 0 ;
88+ vm_address_t address = (vm_address_t )addr ;
89+ memory_object_name_t object ;
90+ #ifdef __LP64__
91+ mach_msg_type_number_t count = VM_REGION_BASIC_INFO_COUNT_64 ;
92+ vm_region_basic_info_data_64_t info ;
93+ kern_return_t info_ret = vm_region_64 (
94+ task , & address , & size , VM_REGION_BASIC_INFO_64 , (vm_region_info_64_t )& info , & count , & object );
95+ #else
96+ mach_msg_type_number_t count = VM_REGION_BASIC_INFO_COUNT ;
97+ vm_region_basic_info_data_t info ;
98+ kern_return_t info_ret = vm_region (task , & address , & size , VM_REGION_BASIC_INFO , (vm_region_info_t )& info , & count , & object );
99+ #endif
100+ if (info_ret == KERN_SUCCESS ) {
101+ if (prot != NULL )
102+ * prot = info .protection ;
103+
104+ if (max_prot != NULL )
105+ * max_prot = info .max_protection ;
106+
107+ return 0 ;
108+ }
109+
110+ return -1 ;
111+ }
112+ #endif
113+
79114static void perform_rebinding_with_section (struct rebindings_entry * rebindings ,
80115 section_t * section ,
81116 intptr_t slide ,
@@ -84,6 +119,7 @@ static void perform_rebinding_with_section(struct rebindings_entry *rebindings,
84119 uint32_t * indirect_symtab ) {
85120 uint32_t * indirect_symbol_indices = indirect_symtab + section -> reserved1 ;
86121 void * * indirect_symbol_bindings = (void * * )((uintptr_t )slide + section -> addr );
122+
87123 for (uint i = 0 ; i < section -> size / sizeof (void * ); i ++ ) {
88124 uint32_t symtab_index = indirect_symbol_indices [i ];
89125 if (symtab_index == INDIRECT_SYMBOL_ABS || symtab_index == INDIRECT_SYMBOL_LOCAL ||
@@ -92,18 +128,33 @@ static void perform_rebinding_with_section(struct rebindings_entry *rebindings,
92128 }
93129 uint32_t strtab_offset = symtab [symtab_index ].n_un .n_strx ;
94130 char * symbol_name = strtab + strtab_offset ;
95- if (strnlen (symbol_name , 2 ) < 2 ) {
96- continue ;
97- }
131+ bool symbol_name_longer_than_1 = symbol_name [0 ] && symbol_name [1 ];
98132 struct rebindings_entry * cur = rebindings ;
99133 while (cur ) {
100134 for (uint j = 0 ; j < cur -> rebindings_nel ; j ++ ) {
101- if (strcmp (& symbol_name [1 ], cur -> rebindings [j ].name ) == 0 ) {
102- if (cur -> rebindings [j ].replaced != NULL &&
103- indirect_symbol_bindings [i ] != cur -> rebindings [j ].replacement ) {
135+ if (symbol_name_longer_than_1 && strcmp (& symbol_name [1 ], cur -> rebindings [j ].name ) == 0 ) {
136+ kern_return_t err ;
137+
138+ if (cur -> rebindings [j ].replaced != NULL && indirect_symbol_bindings [i ] != cur -> rebindings [j ].replacement )
104139 * (cur -> rebindings [j ].replaced ) = indirect_symbol_bindings [i ];
140+
141+ /**
142+ * 1. Moved the vm protection modifying codes to here to reduce the
143+ * changing scope.
144+ * 2. Adding VM_PROT_WRITE mode unconditionally because vm_region
145+ * API on some iOS/Mac reports mismatch vm protection attributes.
146+ * -- Lianfu Hao Jun 16th, 2021
147+ **/
148+ err = vm_protect (mach_task_self (), (uintptr_t )indirect_symbol_bindings , section -> size , 0 , VM_PROT_READ | VM_PROT_WRITE | VM_PROT_COPY );
149+ if (err == KERN_SUCCESS ) {
150+ /**
151+ * Once we failed to change the vm protection, we
152+ * MUST NOT continue the following write actions!
153+ * iOS 15 has corrected the const segments prot.
154+ * -- Lionfore Hao Jun 11th, 2021
155+ **/
156+ indirect_symbol_bindings [i ] = cur -> rebindings [j ].replacement ;
105157 }
106- indirect_symbol_bindings [i ] = cur -> rebindings [j ].replacement ;
107158 goto symbol_loop ;
108159 }
109160 }
@@ -187,6 +238,9 @@ int rebind_symbols_image(void *header,
187238 struct rebindings_entry * rebindings_head = NULL ;
188239 int retval = prepend_rebindings (& rebindings_head , rebindings , rebindings_nel );
189240 rebind_symbols_for_image (rebindings_head , (const struct mach_header * ) header , slide );
241+ if (rebindings_head ) {
242+ free (rebindings_head -> rebindings );
243+ }
190244 free (rebindings_head );
191245 return retval ;
192246}
0 commit comments