Skip to content
This repository was archived by the owner on Jul 19, 2025. It is now read-only.
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions .flake8
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
[flake8]
ignore = E203, E231, E501, W503, E402, E741
exclude = ./.venv
max-line-length = 99
max-complexity = 18
select = B,C,E,F,W,T4,B9
62 changes: 62 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -153,6 +153,68 @@ if __name__ == '__main__':

```

## S3 upload support
Direct S3 support is can be enabled by installing the package with `s3` option (`python -m pip install dash-uploader[s3]`).
In order to upload the files directly to an S3 bucket, you could set up an `S3Configuration` object and pass it
to the `configure_upload` method.

Here is a minimal example;

```python
import boto3
session = boto3.Session()
# credentials will be fetched from environment variables or local aws configuration
# see https://boto3.amazonaws.com/v1/documentation/api/latest/guide/credentials.html on
# how to configure credentials for s3
credentials = session.get_credentials()
credentials = credentials.get_frozen_credentials()
access_key = credentials.access_key
secret_key = credentials.secret_key
from dash_uploader import s3
s3_config = s3.S3Configuration(
location=s3.S3Location(
region_name = "eu-central-1",
endpoint_url="https://s3.eu-central-1.amazonaws.com",
use_ssl=True,
bucket="my-bucket",
prefix="my-prefix",
),
credentials=s3.S3Credentials(
aws_access_key_id=credentials.access_key,
aws_secret_access_key=credentials.secret_key,
)
)

UPLOAD_FOLDER_ROOT = r"/tmp/Uploads"
du.configure_upload(app=app, folder=UPLOAD_FOLDER_ROOT, s3_config=s3_config)

```

> If `s3_config` is not specified, then a standard upload to local storage will be used.

Files will be uploaded to `<endpoint_url>/<bucket>/<prefix>/[<upload_id>/]`. (`<upload_id>/` will be used only if `use_upload_id` set to `True` in `du.configure_upload`)

>⚠️ Large files will be uploaded to s3 in chunks using multiple upload functionality.
`boto3` supports multiple upload only if the chunks are larger than 5Mib. This can be set while creating the `du.Upload` component.

> `UPLOAD_FOLDER_ROOT` is still required to store the chunks of large files being uploaded to s3.


## Passing multiple states to `du.callback`

Now it is possible to capture Dash components states in a `du.callback`. Simply pass `State` object(s) like you would typically do for a Dash callback.

```python
@du.callback(
output=Output("callback-output", "children"),
id="dash-uploader",
state=State("callback-output", "children"),
)
def callback_on_completion(status: du.UploadStatus, state):
return html.Ul([html.Li(str(x)) for x in status.uploaded_files])

```


## Contributing

Expand Down
40 changes: 34 additions & 6 deletions dash_uploader/callbacks.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
from pathlib import Path
from urllib.parse import urljoin

from dash.exceptions import PreventUpdate
from dash.dependencies import Input, State
from dash_uploader.s3 import S3Location

import dash_uploader.settings as settings
from dash_uploader.uploadstatus import UploadStatus
Expand All @@ -21,36 +23,53 @@ def wrapper(
uploaded_files_size,
total_files_size,
upload_id,
*args,
**kwargs,
):
if not callbackbump:
raise PreventUpdate()

uploadedfilepaths = []
s3_location = None
if uploaded_filenames is not None:
if upload_id:
root_folder = Path(settings.UPLOAD_FOLDER_ROOT) / upload_id

# get config and upload id
s3_config = settings.s3_config
s3_location:S3Location = s3_config.location if s3_config else None
_upload_id = upload_id or ""

# build root folder
if s3_location:
_url = urljoin(s3_location.endpoint_url, s3_location.bucket)
_url = urljoin(_url, s3_location.prefix, allow_fragments=True)
_url = urljoin(_url, _upload_id, allow_fragments=True)
root_folder = Path(_url)
else:
root_folder = Path(settings.UPLOAD_FOLDER_ROOT)
root_folder = Path(settings.UPLOAD_FOLDER_ROOT) / _upload_id

# construct full paths to the uploaded files, local or s3
for filename in uploaded_filenames:
file = root_folder / filename
uploadedfilepaths.append(str(file))


status = UploadStatus(
uploaded_files=uploadedfilepaths,
n_total=total_files_count,
uploaded_size_mb=uploaded_files_size,
total_size_mb=total_files_size,
upload_id=upload_id,
s3_location=s3_location,
)
return callback(status)
return callback(status, *args, **kwargs)

return wrapper


def callback(
output,
id="dash-uploader",
state=None,
):
"""
Add a callback to dash application.
Expand All @@ -63,12 +82,15 @@ def callback(
The output dash component
id: str
The id of the du.Upload component.
state: dash State(s)
The state dash component

Example
-------
@du.callback(
output=Output('callback-output', 'children'),
id='dash-uploader',
state=State('callback-state', 'children'),
)
def get_a_list(filenames):
return html.Ul([html.Li(filenames)])
Expand Down Expand Up @@ -108,6 +130,11 @@ def add_callback(function):
# the `prevent_initial_call` option was added in Dash v.1.12
kwargs["prevent_initial_call"] = True

# input states from application
extra_states = []
if state:
extra_states = [state] if isinstance(state, State) else state

# Input: Change in the props will trigger callback.
# Whenever 'this.props.setProps' is called on the JS side,
# (dash specific special prop that is passed to every
Expand All @@ -126,8 +153,9 @@ def add_callback(function):
State(id, "uploadedFilesSize"),
State(id, "totalFilesSize"),
State(id, "upload_id"),
],
**kwargs
]
+ extra_states,
**kwargs,
)(dash_callback)

return function
Expand Down
30 changes: 25 additions & 5 deletions dash_uploader/configure_upload.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import logging

from dash_uploader import s3
import dash_uploader.settings as settings
from dash_uploader.upload import update_upload_api
from dash_uploader.httprequesthandler import HttpRequestHandler
Expand All @@ -9,7 +9,12 @@


def configure_upload(
app, folder, use_upload_id=True, upload_api=None, http_request_handler=None
app,
folder,
use_upload_id=True,
upload_api=None,
http_request_handler=None,
s3_config: s3.S3Configuration = None,
):
r"""
Configure the upload APIs for dash app.
Expand All @@ -28,8 +33,10 @@ def configure_upload(
use_upload_id: bool
Determines if the uploads are put into
folders defined by a "upload id" (upload_id).
If True, uploads will be put into `folder`/<upload_id>/;
that is, every user (for example with different
If True, uploads will be put into
`folder`/<upload_id>/ or `s3_config.location.prefix`//<upload_id>/
if s3_config is provided.
That is, every user (for example with different
session id) will use their own folder. If False,
all files from all sessions are uploaded into
same folder (not recommended).
Expand All @@ -44,9 +51,14 @@ def configure_upload(
If you provide a class, use a subclass of HttpRequestHandler.
See the documentation of dash_uploader.HttpRequestHandler for
more details.
s3_config: None or class
Used for uploading file to a s3 bucket. If provided, `folder` will be used for
temp folder for chunks during multipart upload

"""
settings.UPLOAD_FOLDER_ROOT = folder
settings.app = app
settings.s3_config = s3_config

if upload_api is None:
upload_api = settings.upload_api
Expand All @@ -71,6 +83,7 @@ def configure_upload(
upload_api,
http_request_handler=http_request_handler,
use_upload_id=use_upload_id,
s3_config=s3_config,
)


Expand All @@ -80,6 +93,7 @@ def decorate_server(
upload_api,
http_request_handler,
use_upload_id=True,
s3_config: s3.S3Configuration = None,
):
"""
Parameters
Expand All @@ -100,10 +114,16 @@ def decorate_server(
session id) will use their own folder. If False,
all files from all sessions are uploaded into
same folder (not recommended).
s3_config: None or class
Used for uploading file to a s3 bucket. If provided, `folder` will be used for
temp folder for chunks during multipart upload
"""

handler = http_request_handler(
server, upload_folder=temp_base, use_upload_id=use_upload_id
server,
upload_folder=temp_base,
use_upload_id=use_upload_id,
s3_config=s3_config,
)

server.add_url_rule(upload_api, None, handler.get, methods=["GET"])
Expand Down
Loading