Skip to content

Commit 324a29b

Browse files
authored
Merge pull request #423 from Azure/dev
Merge dev to main to update Python V2 samples
2 parents 9539fd3 + 0ed7e36 commit 324a29b

File tree

16 files changed

+453
-0
lines changed

16 files changed

+453
-0
lines changed
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
.git*
2+
.vscode
3+
local.settings.json
4+
test
5+
.venv
Lines changed: 130 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,130 @@
1+
# Byte-compiled / optimized / DLL files
2+
__pycache__/
3+
*.py[cod]
4+
*$py.class
5+
6+
# C extensions
7+
*.so
8+
9+
# Distribution / packaging
10+
.Python
11+
build/
12+
develop-eggs/
13+
dist/
14+
downloads/
15+
eggs/
16+
.eggs/
17+
lib/
18+
lib64/
19+
parts/
20+
sdist/
21+
var/
22+
wheels/
23+
pip-wheel-metadata/
24+
share/python-wheels/
25+
*.egg-info/
26+
.installed.cfg
27+
*.egg
28+
MANIFEST
29+
30+
# PyInstaller
31+
# Usually these files are written by a python script from a template
32+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
33+
*.manifest
34+
*.spec
35+
36+
# Installer logs
37+
pip-log.txt
38+
pip-delete-this-directory.txt
39+
40+
# Unit test / coverage reports
41+
htmlcov/
42+
.tox/
43+
.nox/
44+
.coverage
45+
.coverage.*
46+
.cache
47+
nosetests.xml
48+
coverage.xml
49+
*.cover
50+
.hypothesis/
51+
.pytest_cache/
52+
53+
# Translations
54+
*.mo
55+
*.pot
56+
57+
# Django stuff:
58+
*.log
59+
local_settings.py
60+
db.sqlite3
61+
62+
# Flask stuff:
63+
instance/
64+
.webassets-cache
65+
66+
# Scrapy stuff:
67+
.scrapy
68+
69+
# Sphinx documentation
70+
docs/_build/
71+
72+
# PyBuilder
73+
target/
74+
75+
# Jupyter Notebook
76+
.ipynb_checkpoints
77+
78+
# IPython
79+
profile_default/
80+
ipython_config.py
81+
82+
# pyenv
83+
.python-version
84+
85+
# pipenv
86+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
87+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
88+
# having no cross-platform support, pipenv may install dependencies that don’t work, or not
89+
# install all needed dependencies.
90+
#Pipfile.lock
91+
92+
# celery beat schedule file
93+
celerybeat-schedule
94+
95+
# SageMath parsed files
96+
*.sage.py
97+
98+
# Environments
99+
.env
100+
.venv
101+
env/
102+
venv/
103+
ENV/
104+
env.bak/
105+
venv.bak/
106+
107+
# Spyder project settings
108+
.spyderproject
109+
.spyproject
110+
111+
# Rope project settings
112+
.ropeproject
113+
114+
# mkdocs documentation
115+
/site
116+
117+
# mypy
118+
.mypy_cache/
119+
.dmypy.json
120+
dmypy.json
121+
122+
# Pyre type checker
123+
.pyre/
124+
125+
# Azure Functions artifacts
126+
bin
127+
obj
128+
appsettings.json
129+
local.settings.json
130+
.python_packages
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
# Fan-Out Fan-In
2+
3+
This directory contains an executable version of [this](https://docs.microsoft.com/en-us/azure/azure-functions/durable/durable-functions-cloud-backup?tabs=python) tutorial. Please review the link above for instructions on how to run it.
Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
<Project Sdk="Microsoft.NET.Sdk">
2+
<PropertyGroup>
3+
<TargetFramework>netcoreapp3.1</TargetFramework>
4+
<WarningsAsErrors></WarningsAsErrors>
5+
<DefaultItemExcludes>**</DefaultItemExcludes>
6+
</PropertyGroup>
7+
<ItemGroup>
8+
<PackageReference Include="Microsoft.Azure.WebJobs.Extensions.DurableTask" Version="2.9.1" />
9+
<PackageReference Include="Microsoft.Azure.WebJobs.Script.ExtensionsMetadataGenerator" Version="1.1.3" />
10+
</ItemGroup>
11+
</Project>
Lines changed: 87 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,87 @@
1+
from typing import List
2+
3+
import os
4+
from os.path import dirname
5+
6+
import json
7+
import pathlib
8+
import logging
9+
10+
from azure.storage.blob import BlobServiceClient
11+
from azure.core.exceptions import ResourceExistsError
12+
13+
import azure.functions as func
14+
import azure.durable_functions as df
15+
16+
myApp = df.DFApp(http_auth_level=func.AuthLevel.ANONYMOUS)
17+
18+
@myApp.route(route="orchestrators/{functionName}")
19+
@myApp.durable_client_input(client_name="client")
20+
async def HttpStart(req: func.HttpRequest, client):
21+
payload: str = json.loads(req.get_body().decode()) # Load JSON post request data
22+
instance_id = await client.start_new(req.route_params["functionName"], client_input=payload)
23+
24+
logging.info(f"Started orchestration with ID = '{instance_id}'.")
25+
26+
return client.create_check_status_response(req, instance_id)
27+
28+
@myApp.orchestration_trigger(context_name="context")
29+
def E2_BackupSiteContent(context: df.DurableOrchestrationContext):
30+
root_directory: str = context.get_input()
31+
32+
if not root_directory:
33+
raise Exception("A directory path is required as input")
34+
35+
files = yield context.call_activity("E2_GetFileList", root_directory)
36+
tasks = []
37+
for file in files:
38+
tasks.append(context.call_activity("E2_CopyFileToBlob", file))
39+
40+
results = yield context.task_all(tasks)
41+
total_bytes = sum(results)
42+
return total_bytes
43+
44+
connect_str = os.getenv('AzureWebJobsStorage')
45+
46+
@myApp.activity_trigger(input_name="rootDirectory")
47+
def E2_GetFileList(rootDirectory):
48+
all_file_paths = []
49+
# We walk the file system
50+
for path, _, files in os.walk(rootDirectory):
51+
# We copy the code for activities and orchestrators
52+
if "E2_" in path:
53+
# For each file, we add their full-path to the list
54+
for name in files:
55+
if name == "__init__.py" or name == "function.json":
56+
file_path = os.path.join(path, name)
57+
all_file_paths.append(file_path)
58+
59+
return all_file_paths
60+
61+
@myApp.activity_trigger(input_name="filePath")
62+
def E2_CopyFileToBlob(filePath):
63+
# Create the BlobServiceClient object which will be used to create a container client
64+
blob_service_client = BlobServiceClient.from_connection_string(connect_str)
65+
66+
# Create a unique name for the container
67+
container_name = "backups"
68+
69+
# Create the container if it does not exist
70+
try:
71+
blob_service_client.create_container(container_name)
72+
except ResourceExistsError:
73+
pass
74+
75+
# Create a blob client using the local file name as the name for the blob
76+
parent_dir, fname = pathlib.Path(filePath).parts[-2:] # Get last two path components
77+
blob_name = parent_dir + "_" + fname
78+
blob_client = blob_service_client.get_blob_client(container=container_name, blob=blob_name)
79+
80+
# Count bytes in file
81+
byte_count = os.path.getsize(filePath)
82+
83+
# Upload the created file
84+
with open(filePath, "rb") as data:
85+
blob_client.upload_blob(data)
86+
87+
return byte_count
Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
{
2+
"version": "2.0",
3+
"logging": {
4+
"applicationInsights": {
5+
"samplingSettings": {
6+
"isEnabled": true,
7+
"excludedTypes": "Request"
8+
}
9+
}
10+
}
11+
}
Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
{
2+
"$schema": "http://json.schemastore.org/proxies",
3+
"proxies": {}
4+
}
Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
# DO NOT include azure-functions-worker in this file
2+
# The Python Worker is managed by Azure Functions platform
3+
# Manually managing azure-functions-worker may cause unexpected issues
4+
5+
azure-functions
6+
azure-functions-durable
7+
azure-storage-blob
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
.git*
2+
.vscode
3+
local.settings.json
4+
test
5+
.venv

0 commit comments

Comments
 (0)