Skip to content
Open
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 8 additions & 2 deletions docs/en_US/preferences.rst
Original file line number Diff line number Diff line change
Expand Up @@ -444,11 +444,17 @@ Use the fields on the *File Downloads* panel to manage file downloads related pr

* When the *Automatically open downloaded files?* switch is set to *True*
the downloaded file will automatically open in the system's default
application associated with that file type.
application associated with that file type. **Note:** This option is applicable and
visible only in desktop mode.

* When the *Enable binary data download?* switch is set to *True*,
binary data can be downloaded from the result grid. Default is set to *False*
to prevent excessive memory usage on the server.

* When the *Prompt for the download location?* switch is set to *True*
a prompt will appear after clicking the download button, allowing you
to choose the download location.
to choose the download location. **Note:** This option is applicable and
visible only in desktop mode.

**Note:** File Downloads related settings are applicable and visible only in desktop mode.

Expand Down
12 changes: 12 additions & 0 deletions web/pgadmin/misc/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -166,6 +166,18 @@ def register_preferences(self):
)
)

self.preference.register(
'file_downloads', 'enable_binary_data_download',
gettext("Enable binary data download?"),
'boolean', False,
category_label=PREF_LABEL_FILE_DOWNLOADS,
help_str=gettext(
'If set to True, binary data can be downloaded '
'from the result grid. The default is False to '
'prevent excessive memory usage on the server.'
)
)

def get_exposed_url_endpoints(self):
"""
Returns:
Expand Down
78 changes: 77 additions & 1 deletion web/pgadmin/tools/sqleditor/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
import secrets
from urllib.parse import unquote
from threading import Lock
from io import BytesIO
import threading
import math

Expand All @@ -23,7 +24,8 @@

from config import PG_DEFAULT_DRIVER, ALLOW_SAVE_PASSWORD
from werkzeug.user_agent import UserAgent
from flask import Response, url_for, render_template, session, current_app
from flask import Response, url_for, render_template, session, current_app, \
send_file
from flask import request
from flask_babel import gettext
from pgadmin.tools.sqleditor.utils.query_tool_connection_check \
Expand Down Expand Up @@ -70,6 +72,8 @@
from pgadmin.browser.server_groups.servers.utils import \
convert_connection_parameter, get_db_disp_restriction
from pgadmin.misc.workspaces import check_and_delete_adhoc_server
from pgadmin.utils.driver.psycopg3.typecast import \
register_binary_data_typecasters

MODULE_NAME = 'sqleditor'
TRANSACTION_STATUS_CHECK_FAILED = gettext("Transaction status check failed.")
Expand Down Expand Up @@ -147,6 +151,7 @@ def get_exposed_url_endpoints(self):
'sqleditor.server_cursor',
'sqleditor.nlq_chat_stream',
'sqleditor.explain_analyze_stream',
'sqleditor.download_binary_data',
]

def on_logout(self):
Expand Down Expand Up @@ -2182,6 +2187,77 @@ def start_query_download_tool(trans_id):
return internal_server_error(errormsg=err_msg)


@blueprint.route(
'/download_binary_data/<int:trans_id>',
methods=["POST"], endpoint='download_binary_data'
)
@pga_login_required
def download_binary_data(trans_id):
"""
This method is used to download binary data.
"""

(status, error_msg, conn, trans_obj,
session_obj) = check_transaction_status(trans_id)

if error_msg:
return internal_server_error(
errormsg=error_msg
)

if not status or conn is None or trans_obj is None or \
session_obj is None:
return internal_server_error(
errormsg=TRANSACTION_STATUS_CHECK_FAILED
)

cur = conn._Connection__async_cursor
if cur is None:
return internal_server_error(
errormsg=gettext('No active result cursor.')
)
register_binary_data_typecasters(cur)
Comment on lines +2219 to +2224
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

🧩 Analysis chain

🏁 Script executed:

# Locate and examine register_binary_data_typecasters implementation
fd "typecast" web/pgadmin/ --type f | head -20

Repository: pgadmin-org/pgadmin4

Length of output: 109


🏁 Script executed:

# Search for the function definition and usage
rg -n "def register_binary_data_typecasters" web/pgadmin/ -A 15

Repository: pgadmin-org/pgadmin4

Length of output: 1378


🏁 Script executed:

# Check the typecast module in psycopg3 driver
find web/pgadmin -name "*.py" -path "*/psycopg3/*" | xargs grep -l "ByteaDataLoader\|register.*typecaster" | head -5

Repository: pgadmin-org/pgadmin4

Length of output: 157


🏁 Script executed:

# Check how async_cursor is used in fetch operations
rg -n "__async_cursor" web/pgadmin/tools/sqleditor/__init__.py -B 3 -A 3 | head -80

Repository: pgadmin-org/pgadmin4

Length of output: 739


🏁 Script executed:

# Check if there's cleanup or restoration of adapters after binary download
rg -n "register_binary_data_typecasters" web/pgadmin/tools/sqleditor/__init__.py -B 10 -A 10

Repository: pgadmin-org/pgadmin4

Length of output: 1740


🏁 Script executed:

# Get the full function context around line 2224
sed -n '2200,2280p' web/pgadmin/tools/sqleditor/__init__.py

Repository: pgadmin-org/pgadmin4

Length of output: 2469


🏁 Script executed:

# Check what function contains line 2224
rg -n "^def " web/pgadmin/tools/sqleditor/__init__.py | awk -F: '$1 <= 2224' | tail -1

Repository: pgadmin-org/pgadmin4

Length of output: 104


🏁 Script executed:

# Search for any adapter reset or restoration in the file
rg -n "adapters\|ByteaDataLoader\|ByteaBinaryDataLoader" web/pgadmin/tools/sqleditor/__init__.py

Repository: pgadmin-org/pgadmin4

Length of output: 46


🏁 Script executed:

# Check the typecast module to understand the loaders
rg -n "class ByteaDataLoader\|class ByteaBinaryDataLoader" web/pgadmin/utils/driver/psycopg3/typecast.py -A 10

Repository: pgadmin-org/pgadmin4

Length of output: 46


🏁 Script executed:

# Get the full typecast.py file to understand ByteaDataLoader and ByteaBinaryDataLoader
wc -l web/pgadmin/utils/driver/psycopg3/typecast.py

Repository: pgadmin-org/pgadmin4

Length of output: 113


🏁 Script executed:

# Check the loaders
sed -n '1,230p' web/pgadmin/utils/driver/psycopg3/typecast.py

Repository: pgadmin-org/pgadmin4

Length of output: 7820


🏁 Script executed:

# Search for any documentation or comments about ByteaBinaryDataLoader
rg -n "ByteaBinaryDataLoader" web/pgadmin/utils/driver/psycopg3/typecast.py -B 5 -A 10

Repository: pgadmin-org/pgadmin4

Length of output: 1118


🏁 Script executed:

# Check if there are any other calls to register_binary_data_typecasters or adapter cleanup
rg -n "register_binary_data_typecasters\|adapters\.register\|adapters\.deregister" web/pgadmin/ -i

Repository: pgadmin-org/pgadmin4

Length of output: 46


🏁 Script executed:

# Check the full ByteaDataLoader to understand what it returns
sed -n '258,276p' web/pgadmin/utils/driver/psycopg3/typecast.py

Repository: pgadmin-org/pgadmin4

Length of output: 688


🏁 Script executed:

# Check if there's a separate cursor creation for binary download or cleanup after
rg -n "download_binary_data" web/pgadmin/tools/sqleditor/__init__.py -A 100 | grep -E "adapters|cursor|finally|cleanup" -i

Repository: pgadmin-org/pgadmin4

Length of output: 348


🏁 Script executed:

# Check if fetch_window or other grid operations might reuse the same cursor
rg -n "def fetch_window\|def async_fetchmany_2darray" web/pgadmin/tools/sqleditor/__init__.py -A 20 | head -80

Repository: pgadmin-org/pgadmin4

Length of output: 46


🏁 Script executed:

# Get the complete download_binary_data function with context
sed -n '2195,2270p' web/pgadmin/tools/sqleditor/__init__.py

Repository: pgadmin-org/pgadmin4

Length of output: 2273


🏁 Script executed:

# Check if there's any adapter cleanup or cursor reset logic elsewhere
rg -n "def query_tool\|def fetch_window\|adapters\." web/pgadmin/tools/sqleditor/__init__.py | head -40

Repository: pgadmin-org/pgadmin4

Length of output: 46


🏁 Script executed:

# Check what the grid display expects for bytea columns
rg -n "bytea\|binary" web/pgadmin/tools/sqleditor/ -i --type js | head -30

Repository: pgadmin-org/pgadmin4

Length of output: 46


🏁 Script executed:

# Check if the async cursor is created fresh for each operation or persisted
rg -n "__async_cursor\s*=" web/pgadmin/utils/driver/psycopg3/ -B 3 -A 3

Repository: pgadmin-org/pgadmin4

Length of output: 1859


register_binary_data_typecasters permanently alters the shared cursor's type adapters, potentially breaking subsequent grid operations.

register_binary_data_typecasters(cur) modifies the adapter registry on conn._Connection__async_cursor by registering ByteaBinaryDataLoader for OIDs 17 and 1001 (bytea types). This cursor is reused for all subsequent fetch operations within the same query execution. After this call, if the user fetches additional rows or pages, bytea columns will return raw binary data instead of the hex-encoded strings the grid expects, breaking the display.

Consider either: (a) saving and restoring the original adapters after the binary fetch, or (b) using a separate cursor for the binary download.

🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@web/pgadmin/tools/sqleditor/__init__.py` around lines 2219 - 2224, The call
to register_binary_data_typecasters(cur) mutates adapters on the shared cursor
conn._Connection__async_cursor (registering ByteaBinaryDataLoader for OIDs 17
and 1001) and breaks subsequent grid fetches; fix by creating and using a
dedicated cursor for the binary-download path instead of mutating the shared
cursor — e.g., obtain a new cursor (not conn._Connection__async_cursor), call
register_binary_data_typecasters(new_cur) and perform the binary fetch/streaming
on new_cur, then close it; alternatively, if creating a separate cursor is
infeasible, capture the original typecasters from the shared cursor, call
register_binary_data_typecasters(cur) for the binary read, then restore the
saved adapters immediately after the download completes.


data = request.values if request.values else request.get_json(silent=True)
if data is None:
return make_json_response(
status=410,
success=0,
errormsg=gettext(
"Could not find the required parameter (query)."
)
)
Comment on lines +2226 to +2234
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟡 Minor

Misleading error message — copy-pasted from another endpoint.

The error text "Could not find the required parameter (query)." was copied from the query_tool_download handler. For this endpoint, it should reference the expected binary data parameters.

✏️ Suggested fix
             errormsg=gettext(
-                "Could not find the required parameter (query)."
+                "Could not find the required parameters (rowpos, colpos)."
             )
🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@web/pgadmin/tools/sqleditor/__init__.py` around lines 2226 - 2234, The error
message returned when request data is missing is incorrect/copy-pasted; update
the gettext string in the block that sets data = request.values if
request.values else request.get_json(silent=True) so the make_json_response call
(success=0, status=410) references the expected binary data parameters rather
than "query" — e.g. change the gettext text to mention the required binary
payload/parameter names (for example "file", "binary" or whatever this endpoint
expects) so callers know which binary parameters are missing; keep the existing
use of gettext and make_json_response.

col_pos = data['colpos']
cur.scroll(int(data['rowpos']))
binary_data = cur.fetchone()
binary_data = binary_data[col_pos]
Comment on lines +2235 to +2238
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🔴 Critical

Critical: Unvalidated duplicate code executes before the try block, corrupting cursor state.

Lines 2235–2238 scroll the cursor and fetch a row without any validation or error handling, then lines 2240–2256 repeat the same logic properly inside a try/except. This appears to be leftover code from before the validation fix was applied. Problems:

  1. data['colpos'] raises an unhandled KeyError if the key is missing.
  2. col_pos remains a string (not cast to int), so binary_data[col_pos] will fail or behave incorrectly.
  3. The cursor is scrolled and a row fetched for no reason — the validated block below repeats this.

Remove these four lines entirely.

🐛 Proposed fix
     register_binary_data_typecasters(cur)
 
     data = request.values if request.values else request.get_json(silent=True)
     if data is None:
         return make_json_response(
             status=410,
             success=0,
             errormsg=gettext(
                 "Could not find the required parameter (query)."
             )
         )
-    col_pos = data['colpos']
-    cur.scroll(int(data['rowpos']))
-    binary_data = cur.fetchone()
-    binary_data = binary_data[col_pos]
-
     try:
         row_pos = int(data['rowpos'])
🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@web/pgadmin/tools/sqleditor/__init__.py` around lines 2235 - 2238, Remove the
unvalidated duplicate cursor operations: delete the four lines that set col_pos
= data['colpos'], call cur.scroll(int(data['rowpos'])), assign binary_data =
cur.fetchone(), and then binary_data = binary_data[col_pos]; these are run
before the existing try/except and can raise KeyError or use an uncast col_pos
and corrupt cursor state—keep only the validated logic already present in the
try/except that performs cur.scroll, cur.fetchone and integer casting of
col_pos.


try:
row_pos = int(data['rowpos'])
col_pos = int(data['colpos'])
if row_pos < 0 or col_pos < 0:
raise ValueError
cur.scroll(row_pos)
row = cur.fetchone()
if row is None or col_pos >= len(row):
return internal_server_error(
errormsg=gettext('Requested cell is out of range.')
)
binary_data = row[col_pos]
except (ValueError, IndexError, TypeError) as e:
current_app.logger.error(e)
return internal_server_error(
errormsg='Invalid row/column position.'
)

return send_file(
BytesIO(binary_data),
as_attachment=True,
download_name='binary_data',
mimetype='application/octet-stream'
)


@blueprint.route(
'/status/<int:trans_id>',
methods=["GET"],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ export const QUERY_TOOL_EVENTS = {
TRIGGER_SELECT_ALL: 'TRIGGER_SELECT_ALL',
TRIGGER_SAVE_QUERY_TOOL_DATA: 'TRIGGER_SAVE_QUERY_TOOL_DATA',
TRIGGER_GET_QUERY_CONTENT: 'TRIGGER_GET_QUERY_CONTENT',
TRIGGER_SAVE_BINARY_DATA: 'TRIGGER_SAVE_BINARY_DATA',

COPY_DATA: 'COPY_DATA',
SET_LIMIT_VALUE: 'SET_LIMIT_VALUE',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,17 @@
// This software is released under the PostgreSQL Licence
//
//////////////////////////////////////////////////////////////
import { useContext } from 'react';
import { styled } from '@mui/material/styles';
import _ from 'lodash';
import PropTypes from 'prop-types';
import gettext from 'sources/gettext';
import CustomPropTypes from '../../../../../../static/js/custom_prop_types';
import usePreferences from '../../../../../../preferences/static/js/store';

import GetAppRoundedIcon from '@mui/icons-material/GetAppRounded';
import { PgIconButton } from '../../../../../../static/js/components/Buttons';
import { QUERY_TOOL_EVENTS } from '../QueryToolConstants';
import { QueryToolEventsContext } from '../QueryToolComponent';

const StyledNullAndDefaultFormatter = styled(NullAndDefaultFormatter)(({theme}) => ({
'& .Formatters-disabledCell': {
Expand Down Expand Up @@ -68,12 +73,16 @@ export function NumberFormatter({row, column}) {
}
NumberFormatter.propTypes = FormatterPropTypes;

export function BinaryFormatter({row, column}) {
export function BinaryFormatter({row, column, ...props}) {
let value = row[column.key];

const eventBus = useContext(QueryToolEventsContext);
const downloadBinaryData = usePreferences().getPreferences('misc', 'enable_binary_data_download').value;
return (
<StyledNullAndDefaultFormatter value={value} column={column}>
<span className='Formatters-disabledCell'>[{value}]</span>
<span className='Formatters-disabledCell'>[{value}]</span>&nbsp;&nbsp;
{downloadBinaryData &&
<PgIconButton size="xs" title={gettext('Download binary data')} icon={<GetAppRoundedIcon />}
onClick={()=>eventBus.fireEvent(QUERY_TOOL_EVENTS.TRIGGER_SAVE_BINARY_DATA, props.rowIdx, column.pos)}/>}
</StyledNullAndDefaultFormatter>
);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -493,6 +493,23 @@ export class ResultSetUtils {
}
}

async saveBinaryResultsToFile(fileName, rowPos, colPos, onProgress) {
try {
await DownloadUtils.downloadFileStream({
url: url_for('sqleditor.download_binary_data', {
'trans_id': this.transId,
}),
options: {
method: 'POST',
body: JSON.stringify({filename: fileName, rowpos: rowPos, colpos: colPos})
}}, fileName, 'application/octet-stream', onProgress);
this.eventBus.fireEvent(QUERY_TOOL_EVENTS.TRIGGER_SAVE_RESULTS_END);
} catch (error) {
this.eventBus.fireEvent(QUERY_TOOL_EVENTS.TRIGGER_SAVE_RESULTS_END);
this.eventBus.fireEvent(QUERY_TOOL_EVENTS.HANDLE_API_ERROR, error);
}
}

includeFilter(reqData) {
return this.api.post(
url_for('sqleditor.inclusive_filter', {
Expand Down Expand Up @@ -1038,6 +1055,15 @@ export function ResultSet() {
setLoaderText('');
});

eventBus.registerListener(QUERY_TOOL_EVENTS.TRIGGER_SAVE_BINARY_DATA, async (rowPos, colPos)=>{
let fileName = 'data-' + new Date().getTime();
setLoaderText(gettext('Downloading results...'));
await rsu.current.saveBinaryResultsToFile(fileName, rowPos, colPos, (p)=>{
setLoaderText(gettext('Downloading results(%s)...', p));
});
setLoaderText('');
});

eventBus.registerListener(QUERY_TOOL_EVENTS.TRIGGER_SET_LIMIT, async (limit)=>{
setLoaderText(gettext('Setting the limit on the result...'));
try {
Expand Down
41 changes: 41 additions & 0 deletions web/pgadmin/utils/driver/psycopg3/typecast.py
Original file line number Diff line number Diff line change
Expand Up @@ -212,6 +212,21 @@ def register_array_to_string_typecasters(connection=None):
TextLoaderpgAdmin)


def register_binary_data_typecasters(cur):
# Register type caster to fetch original binary data for bytea type.
cur.adapters.register_loader(17,
ByteaDataLoader)

cur.adapters.register_loader(1001,
ByteaDataLoader)

cur.adapters.register_loader(17,
ByteaBinaryDataLoader)

cur.adapters.register_loader(1001,
ByteaBinaryDataLoader)


class InetLoader(InetLoader):
def load(self, data):
if isinstance(data, memoryview):
Expand Down Expand Up @@ -240,6 +255,32 @@ def load(self, data):
return 'binary data' if data is not None else None


class ByteaDataLoader(Loader):
# Loads the actual binary data.
def load(self, data):
if data:
if isinstance(data, memoryview):
data = bytes(data).decode()
if data.startswith('\\x'):
data = data[2:]
try:
return bytes.fromhex(data)
except ValueError:
# In case of error while converting hex to bytes, return
# original data.
return data
else:
return data
return data if data is not None else None


class ByteaBinaryDataLoader(Loader):
format = _pq_Format.BINARY

def load(self, data):
return data if data is not None else None


class TextLoaderpgAdmin(TextLoader):
def load(self, data):
postgres_encoding, python_encoding = get_encoding(
Expand Down
Loading