Skip to content

Commit f458ac0

Browse files
authored
Fix compiler warnings in remote debugging (python#141060)
Example of fixed warnings on 32-bit Windows: Python\remote_debugging.c(24,53): warning C4244: 'function': conversion from 'uint64_t' to 'uintptr_t', possible loss of data Modules\_remote_debugging_module.c(789,44): warning C4244: 'function': conversion from 'uint64_t' to 'size_t', possible loss of data
1 parent 1d25b75 commit f458ac0

File tree

2 files changed

+36
-36
lines changed

2 files changed

+36
-36
lines changed

Modules/_remote_debugging_module.c

Lines changed: 29 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -503,7 +503,7 @@ iterate_threads(
503503

504504
if (0 > _Py_RemoteDebug_PagedReadRemoteMemory(
505505
&unwinder->handle,
506-
unwinder->interpreter_addr + unwinder->debug_offsets.interpreter_state.threads_main,
506+
unwinder->interpreter_addr + (uintptr_t)unwinder->debug_offsets.interpreter_state.threads_main,
507507
sizeof(void*),
508508
&thread_state_addr))
509509
{
@@ -514,7 +514,7 @@ iterate_threads(
514514
while (thread_state_addr != 0) {
515515
if (0 > _Py_RemoteDebug_PagedReadRemoteMemory(
516516
&unwinder->handle,
517-
thread_state_addr + unwinder->debug_offsets.thread_state.native_thread_id,
517+
thread_state_addr + (uintptr_t)unwinder->debug_offsets.thread_state.native_thread_id,
518518
sizeof(tid),
519519
&tid))
520520
{
@@ -530,7 +530,7 @@ iterate_threads(
530530
// Move to next thread
531531
if (0 > _Py_RemoteDebug_PagedReadRemoteMemory(
532532
&unwinder->handle,
533-
thread_state_addr + unwinder->debug_offsets.thread_state.next,
533+
thread_state_addr + (uintptr_t)unwinder->debug_offsets.thread_state.next,
534534
sizeof(void*),
535535
&thread_state_addr))
536536
{
@@ -686,7 +686,7 @@ read_py_str(
686686
return NULL;
687687
}
688688

689-
size_t offset = unwinder->debug_offsets.unicode_object.asciiobject_size;
689+
size_t offset = (size_t)unwinder->debug_offsets.unicode_object.asciiobject_size;
690690
res = _Py_RemoteDebug_PagedReadRemoteMemory(&unwinder->handle, address + offset, len, buf);
691691
if (res < 0) {
692692
set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read string data from remote memory");
@@ -748,7 +748,7 @@ read_py_bytes(
748748
return NULL;
749749
}
750750

751-
size_t offset = unwinder->debug_offsets.bytes_object.ob_sval;
751+
size_t offset = (size_t)unwinder->debug_offsets.bytes_object.ob_sval;
752752
res = _Py_RemoteDebug_PagedReadRemoteMemory(&unwinder->handle, address + offset, len, buf);
753753
if (res < 0) {
754754
set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read bytes data from remote memory");
@@ -786,7 +786,7 @@ read_py_long(
786786
int bytes_read = _Py_RemoteDebug_PagedReadRemoteMemory(
787787
&unwinder->handle,
788788
address,
789-
unwinder->debug_offsets.long_object.size,
789+
(size_t)unwinder->debug_offsets.long_object.size,
790790
long_obj);
791791
if (bytes_read < 0) {
792792
set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read PyLongObject");
@@ -823,7 +823,7 @@ read_py_long(
823823

824824
bytes_read = _Py_RemoteDebug_PagedReadRemoteMemory(
825825
&unwinder->handle,
826-
address + unwinder->debug_offsets.long_object.ob_digit,
826+
address + (uintptr_t)unwinder->debug_offsets.long_object.ob_digit,
827827
sizeof(digit) * size,
828828
digits
829829
);
@@ -933,7 +933,7 @@ parse_task_name(
933933
int err = _Py_RemoteDebug_PagedReadRemoteMemory(
934934
&unwinder->handle,
935935
task_address,
936-
unwinder->async_debug_offsets.asyncio_task_object.size,
936+
(size_t)unwinder->async_debug_offsets.asyncio_task_object.size,
937937
task_obj);
938938
if (err < 0) {
939939
set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read task object");
@@ -1040,7 +1040,7 @@ handle_yield_from_frame(
10401040
uintptr_t gi_await_addr_type_addr;
10411041
err = read_ptr(
10421042
unwinder,
1043-
gi_await_addr + unwinder->debug_offsets.pyobject.ob_type,
1043+
gi_await_addr + (uintptr_t)unwinder->debug_offsets.pyobject.ob_type,
10441044
&gi_await_addr_type_addr);
10451045
if (err) {
10461046
set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read gi_await type address");
@@ -1101,7 +1101,7 @@ parse_coro_chain(
11011101

11021102
// Parse the previous frame using the gi_iframe from local copy
11031103
uintptr_t prev_frame;
1104-
uintptr_t gi_iframe_addr = coro_address + unwinder->debug_offsets.gen_object.gi_iframe;
1104+
uintptr_t gi_iframe_addr = coro_address + (uintptr_t)unwinder->debug_offsets.gen_object.gi_iframe;
11051105
uintptr_t address_of_code_object = 0;
11061106
if (parse_frame_object(unwinder, &name, gi_iframe_addr, &address_of_code_object, &prev_frame) < 0) {
11071107
set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to parse frame object in coro chain");
@@ -1153,7 +1153,7 @@ create_task_result(
11531153

11541154
// Parse coroutine chain
11551155
if (_Py_RemoteDebug_PagedReadRemoteMemory(&unwinder->handle, task_address,
1156-
unwinder->async_debug_offsets.asyncio_task_object.size,
1156+
(size_t)unwinder->async_debug_offsets.asyncio_task_object.size,
11571157
task_obj) < 0) {
11581158
set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read task object for coro chain");
11591159
goto error;
@@ -1206,7 +1206,7 @@ parse_task(
12061206

12071207
err = read_char(
12081208
unwinder,
1209-
task_address + unwinder->async_debug_offsets.asyncio_task_object.task_is_task,
1209+
task_address + (uintptr_t)unwinder->async_debug_offsets.asyncio_task_object.task_is_task,
12101210
&is_task);
12111211
if (err) {
12121212
set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read is_task flag");
@@ -1354,7 +1354,7 @@ process_thread_for_awaited_by(
13541354
void *context
13551355
) {
13561356
PyObject *result = (PyObject *)context;
1357-
uintptr_t head_addr = thread_state_addr + unwinder->async_debug_offsets.asyncio_thread_state.asyncio_tasks_head;
1357+
uintptr_t head_addr = thread_state_addr + (uintptr_t)unwinder->async_debug_offsets.asyncio_thread_state.asyncio_tasks_head;
13581358
return append_awaited_by(unwinder, tid, head_addr, result);
13591359
}
13601360

@@ -1369,7 +1369,7 @@ process_task_awaited_by(
13691369
// Read the entire TaskObj at once
13701370
char task_obj[SIZEOF_TASK_OBJ];
13711371
if (_Py_RemoteDebug_PagedReadRemoteMemory(&unwinder->handle, task_address,
1372-
unwinder->async_debug_offsets.asyncio_task_object.size,
1372+
(size_t)unwinder->async_debug_offsets.asyncio_task_object.size,
13731373
task_obj) < 0) {
13741374
set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read task object");
13751375
return -1;
@@ -1526,7 +1526,7 @@ find_running_task_in_thread(
15261526
uintptr_t address_of_running_loop;
15271527
int bytes_read = read_py_ptr(
15281528
unwinder,
1529-
thread_state_addr + unwinder->async_debug_offsets.asyncio_thread_state.asyncio_running_loop,
1529+
thread_state_addr + (uintptr_t)unwinder->async_debug_offsets.asyncio_thread_state.asyncio_running_loop,
15301530
&address_of_running_loop);
15311531
if (bytes_read == -1) {
15321532
set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read running loop address");
@@ -1540,7 +1540,7 @@ find_running_task_in_thread(
15401540

15411541
int err = read_ptr(
15421542
unwinder,
1543-
thread_state_addr + unwinder->async_debug_offsets.asyncio_thread_state.asyncio_running_task,
1543+
thread_state_addr + (uintptr_t)unwinder->async_debug_offsets.asyncio_thread_state.asyncio_running_task,
15441544
running_task_addr);
15451545
if (err) {
15461546
set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read running task address");
@@ -1556,7 +1556,7 @@ get_task_code_object(RemoteUnwinderObject *unwinder, uintptr_t task_addr, uintpt
15561556

15571557
if(read_py_ptr(
15581558
unwinder,
1559-
task_addr + unwinder->async_debug_offsets.asyncio_task_object.task_coro,
1559+
task_addr + (uintptr_t)unwinder->async_debug_offsets.asyncio_task_object.task_coro,
15601560
&running_coro_addr) < 0) {
15611561
set_exception_cause(unwinder, PyExc_RuntimeError, "Running task coro read failed");
15621562
return -1;
@@ -1572,7 +1572,7 @@ get_task_code_object(RemoteUnwinderObject *unwinder, uintptr_t task_addr, uintpt
15721572
// the offset leads directly to its first field: f_executable
15731573
if (read_py_ptr(
15741574
unwinder,
1575-
running_coro_addr + unwinder->debug_offsets.gen_object.gi_iframe, code_obj_addr) < 0) {
1575+
running_coro_addr + (uintptr_t)unwinder->debug_offsets.gen_object.gi_iframe, code_obj_addr) < 0) {
15761576
set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read running task code object");
15771577
return -1;
15781578
}
@@ -1741,7 +1741,7 @@ static bool
17411741
parse_linetable(const uintptr_t addrq, const char* linetable, int firstlineno, LocationInfo* info)
17421742
{
17431743
const uint8_t* ptr = (const uint8_t*)(linetable);
1744-
uint64_t addr = 0;
1744+
uintptr_t addr = 0;
17451745
info->lineno = firstlineno;
17461746

17471747
while (*ptr != '\0') {
@@ -1870,7 +1870,7 @@ parse_code_object(RemoteUnwinderObject *unwinder,
18701870
meta->file_name = file;
18711871
meta->linetable = linetable;
18721872
meta->first_lineno = GET_MEMBER(int, code_object, unwinder->debug_offsets.code_object.firstlineno);
1873-
meta->addr_code_adaptive = real_address + unwinder->debug_offsets.code_object.co_code_adaptive;
1873+
meta->addr_code_adaptive = real_address + (uintptr_t)unwinder->debug_offsets.code_object.co_code_adaptive;
18741874

18751875
if (unwinder && unwinder->code_object_cache && _Py_hashtable_set(unwinder->code_object_cache, key, meta) < 0) {
18761876
cached_code_metadata_destroy(meta);
@@ -2037,7 +2037,7 @@ copy_stack_chunks(RemoteUnwinderObject *unwinder,
20372037
size_t count = 0;
20382038
size_t max_chunks = 16;
20392039

2040-
if (read_ptr(unwinder, tstate_addr + unwinder->debug_offsets.thread_state.datastack_chunk, &chunk_addr)) {
2040+
if (read_ptr(unwinder, tstate_addr + (uintptr_t)unwinder->debug_offsets.thread_state.datastack_chunk, &chunk_addr)) {
20412041
set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read initial stack chunk address");
20422042
return -1;
20432043
}
@@ -2146,8 +2146,8 @@ populate_initial_state_data(
21462146
uintptr_t *interpreter_state,
21472147
uintptr_t *tstate
21482148
) {
2149-
uint64_t interpreter_state_list_head =
2150-
unwinder->debug_offsets.runtime_state.interpreters_head;
2149+
uintptr_t interpreter_state_list_head =
2150+
(uintptr_t)unwinder->debug_offsets.runtime_state.interpreters_head;
21512151

21522152
uintptr_t address_of_interpreter_state;
21532153
int bytes_read = _Py_RemoteDebug_PagedReadRemoteMemory(
@@ -2174,7 +2174,7 @@ populate_initial_state_data(
21742174
}
21752175

21762176
uintptr_t address_of_thread = address_of_interpreter_state +
2177-
unwinder->debug_offsets.interpreter_state.threads_main;
2177+
(uintptr_t)unwinder->debug_offsets.interpreter_state.threads_main;
21782178

21792179
if (_Py_RemoteDebug_PagedReadRemoteMemory(
21802180
&unwinder->handle,
@@ -2198,7 +2198,7 @@ find_running_frame(
21982198
if ((void*)address_of_thread != NULL) {
21992199
int err = read_ptr(
22002200
unwinder,
2201-
address_of_thread + unwinder->debug_offsets.thread_state.current_frame,
2201+
address_of_thread + (uintptr_t)unwinder->debug_offsets.thread_state.current_frame,
22022202
frame);
22032203
if (err) {
22042204
set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read current frame pointer");
@@ -2370,7 +2370,7 @@ append_awaited_by_for_thread(
23702370
}
23712371

23722372
uintptr_t task_addr = (uintptr_t)GET_MEMBER(uintptr_t, task_node, unwinder->debug_offsets.llist_node.next)
2373-
- unwinder->async_debug_offsets.asyncio_task_object.task_node;
2373+
- (uintptr_t)unwinder->async_debug_offsets.asyncio_task_object.task_node;
23742374

23752375
if (process_single_task_node(unwinder, task_addr, NULL, result) < 0) {
23762376
set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to process task node in awaited_by");
@@ -2605,7 +2605,7 @@ get_thread_status(RemoteUnwinderObject *unwinder, uint64_t tid, uint64_t pthread
26052605
}
26062606

26072607
SYSTEM_THREAD_INFORMATION *ti = (SYSTEM_THREAD_INFORMATION *)((char *)pi + sizeof(SYSTEM_PROCESS_INFORMATION));
2608-
for (Py_ssize_t i = 0; i < pi->NumberOfThreads; i++, ti++) {
2608+
for (size_t i = 0; i < pi->NumberOfThreads; i++, ti++) {
26092609
if (ti->ClientId.UniqueThread == (HANDLE)tid) {
26102610
return ti->ThreadState != WIN32_THREADSTATE_RUNNING ? THREAD_STATE_IDLE : THREAD_STATE_RUNNING;
26112611
}
@@ -2642,7 +2642,7 @@ unwind_stack_for_thread(
26422642

26432643
char ts[SIZEOF_THREAD_STATE];
26442644
int bytes_read = _Py_RemoteDebug_PagedReadRemoteMemory(
2645-
&unwinder->handle, *current_tstate, unwinder->debug_offsets.thread_state.size, ts);
2645+
&unwinder->handle, *current_tstate, (size_t)unwinder->debug_offsets.thread_state.size, ts);
26462646
if (bytes_read < 0) {
26472647
set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read thread state");
26482648
goto error;
@@ -3174,7 +3174,7 @@ _remote_debugging_RemoteUnwinder_get_all_awaited_by_impl(RemoteUnwinderObject *s
31743174
}
31753175

31763176
uintptr_t head_addr = self->interpreter_addr
3177-
+ self->async_debug_offsets.asyncio_interpreter_state.asyncio_tasks_head;
3177+
+ (uintptr_t)self->async_debug_offsets.asyncio_interpreter_state.asyncio_tasks_head;
31783178

31793179
// On top of a per-thread task lists used by default by asyncio to avoid
31803180
// contention, there is also a fallback per-interpreter list of tasks;

Python/remote_debugging.c

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ cleanup_proc_handle(proc_handle_t *handle) {
1919
}
2020

2121
static int
22-
read_memory(proc_handle_t *handle, uint64_t remote_address, size_t len, void* dst)
22+
read_memory(proc_handle_t *handle, uintptr_t remote_address, size_t len, void* dst)
2323
{
2424
return _Py_RemoteDebug_ReadRemoteMemory(handle, remote_address, len, dst);
2525
}
@@ -235,7 +235,7 @@ send_exec_to_proc_handle(proc_handle_t *handle, int tid, const char *debugger_sc
235235
int is_remote_debugging_enabled = 0;
236236
if (0 != read_memory(
237237
handle,
238-
interpreter_state_addr + debug_offsets.debugger_support.remote_debugging_enabled,
238+
interpreter_state_addr + (uintptr_t)debug_offsets.debugger_support.remote_debugging_enabled,
239239
sizeof(int),
240240
&is_remote_debugging_enabled))
241241
{
@@ -255,7 +255,7 @@ send_exec_to_proc_handle(proc_handle_t *handle, int tid, const char *debugger_sc
255255
if (tid != 0) {
256256
if (0 != read_memory(
257257
handle,
258-
interpreter_state_addr + debug_offsets.interpreter_state.threads_head,
258+
interpreter_state_addr + (uintptr_t)debug_offsets.interpreter_state.threads_head,
259259
sizeof(void*),
260260
&thread_state_addr))
261261
{
@@ -264,7 +264,7 @@ send_exec_to_proc_handle(proc_handle_t *handle, int tid, const char *debugger_sc
264264
while (thread_state_addr != 0) {
265265
if (0 != read_memory(
266266
handle,
267-
thread_state_addr + debug_offsets.thread_state.native_thread_id,
267+
thread_state_addr + (uintptr_t)debug_offsets.thread_state.native_thread_id,
268268
sizeof(this_tid),
269269
&this_tid))
270270
{
@@ -277,7 +277,7 @@ send_exec_to_proc_handle(proc_handle_t *handle, int tid, const char *debugger_sc
277277

278278
if (0 != read_memory(
279279
handle,
280-
thread_state_addr + debug_offsets.thread_state.next,
280+
thread_state_addr + (uintptr_t)debug_offsets.thread_state.next,
281281
sizeof(void*),
282282
&thread_state_addr))
283283
{
@@ -294,7 +294,7 @@ send_exec_to_proc_handle(proc_handle_t *handle, int tid, const char *debugger_sc
294294
} else {
295295
if (0 != read_memory(
296296
handle,
297-
interpreter_state_addr + debug_offsets.interpreter_state.threads_main,
297+
interpreter_state_addr + (uintptr_t)debug_offsets.interpreter_state.threads_main,
298298
sizeof(void*),
299299
&thread_state_addr))
300300
{
@@ -346,7 +346,7 @@ send_exec_to_proc_handle(proc_handle_t *handle, int tid, const char *debugger_sc
346346
uintptr_t eval_breaker;
347347
if (0 != read_memory(
348348
handle,
349-
thread_state_addr + debug_offsets.debugger_support.eval_breaker,
349+
thread_state_addr + (uintptr_t)debug_offsets.debugger_support.eval_breaker,
350350
sizeof(uintptr_t),
351351
&eval_breaker))
352352
{

0 commit comments

Comments
 (0)