Skip to content

Commit b900ded

Browse files
authored
Merge branch 'master' into master
2 parents 374e856 + de7faf9 commit b900ded

File tree

1,297 files changed

+40366
-19119
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

1,297 files changed

+40366
-19119
lines changed

.actions/assistant.py

Lines changed: 75 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414
import glob
15+
import logging
1516
import os
1617
import pathlib
1718
import re
@@ -25,7 +26,7 @@
2526
from pathlib import Path
2627
from typing import Any, Dict, Iterable, Iterator, List, Optional, Sequence, Tuple, Union
2728

28-
from pkg_resources import parse_requirements, Requirement, yield_lines
29+
from pkg_resources import Requirement, parse_requirements, yield_lines
2930

3031
REQUIREMENT_FILES = {
3132
"pytorch": (
@@ -35,14 +36,19 @@
3536
"requirements/pytorch/examples.txt",
3637
),
3738
"app": (
38-
"requirements/app/base.txt",
39-
"requirements/app/ui.txt",
39+
"requirements/app/app.txt",
4040
"requirements/app/cloud.txt",
41+
"requirements/app/ui.txt",
4142
),
4243
"fabric": (
4344
"requirements/fabric/base.txt",
4445
"requirements/fabric/strategies.txt",
4546
),
47+
"data": (
48+
"requirements/data/data.txt",
49+
"requirements/data/cloud.txt",
50+
"requirements/data/examples.txt",
51+
),
4652
}
4753
REQUIREMENT_FILES_ALL = list(chain(*REQUIREMENT_FILES.values()))
4854

@@ -80,6 +86,7 @@ def adjust(self, unfreeze: str) -> str:
8086
'arrow>=1.2.0'
8187
>>> _RequirementWithComment("arrow").adjust("major")
8288
'arrow'
89+
8390
"""
8491
out = str(self)
8592
if self.strict:
@@ -109,6 +116,7 @@ def _parse_requirements(strs: Union[str, Iterable[str]]) -> Iterator[_Requiremen
109116
>>> txt = '\\n'.join(txt)
110117
>>> [r.adjust('none') for r in _parse_requirements(txt)]
111118
['this', 'example', 'foo # strict', 'thing']
119+
112120
"""
113121
lines = yield_lines(strs)
114122
pip_argument = None
@@ -142,10 +150,14 @@ def load_requirements(path_dir: str, file_name: str = "base.txt", unfreeze: str
142150
143151
>>> path_req = os.path.join(_PROJECT_ROOT, "requirements")
144152
>>> load_requirements(path_req, "docs.txt", unfreeze="major") # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
145-
['sphinx<6.0,>=4.0', ...]
153+
['sphinx<...]
154+
146155
"""
147156
assert unfreeze in {"none", "major", "all"}
148157
path = Path(path_dir) / file_name
158+
if not path.exists():
159+
logging.warning(f"Folder {path_dir} does not have any base requirements.")
160+
return []
149161
assert path.exists(), (path_dir, file_name, path)
150162
text = path.read_text()
151163
return [req.adjust(unfreeze) for req in _parse_requirements(text)]
@@ -156,6 +168,7 @@ def load_readme_description(path_dir: str, homepage: str, version: str) -> str:
156168
157169
>>> load_readme_description(_PROJECT_ROOT, "", "") # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
158170
'...PyTorch Lightning is just organized PyTorch...'
171+
159172
"""
160173
path_readme = os.path.join(path_dir, "README.md")
161174
with open(path_readme, encoding="utf-8") as fo:
@@ -187,13 +200,12 @@ def load_readme_description(path_dir: str, homepage: str, version: str) -> str:
187200
skip_begin = r"<!-- following section will be skipped from PyPI description -->"
188201
skip_end = r"<!-- end skipping PyPI description -->"
189202
# todo: wrap content as commented description
190-
text = re.sub(rf"{skip_begin}.+?{skip_end}", "<!-- -->", text, flags=re.IGNORECASE + re.DOTALL)
203+
return re.sub(rf"{skip_begin}.+?{skip_end}", "<!-- -->", text, flags=re.IGNORECASE + re.DOTALL)
191204

192205
# # https://github.com/Borda/pytorch-lightning/releases/download/1.1.0a6/codecov_badge.png
193206
# github_release_url = os.path.join(homepage, "releases", "download", version)
194207
# # download badge and replace url with local file
195208
# text = _parse_for_badge(text, github_release_url)
196-
return text
197209

198210

199211
def distribute_version(src_folder: str, ver_file: str = "version.info") -> None:
@@ -208,7 +220,7 @@ def distribute_version(src_folder: str, ver_file: str = "version.info") -> None:
208220
shutil.copy2(ver_template, fpath)
209221

210222

211-
def _download_frontend(pkg_path: str):
223+
def _download_frontend(pkg_path: str, version: str = "v0.0.0"):
212224
"""Downloads an archive file for a specific release of the Lightning frontend and extracts it to the correct
213225
directory."""
214226

@@ -218,13 +230,13 @@ def _download_frontend(pkg_path: str):
218230

219231
shutil.rmtree(frontend_dir, ignore_errors=True)
220232
# TODO: remove this once lightning-ui package is ready as a dependency
221-
frontend_release_url = "https://storage.googleapis.com/grid-packages/lightning-ui/v0.0.0/build.tar.gz"
233+
frontend_release_url = f"https://lightning-packages.s3.amazonaws.com/ui/{version}.tar.gz"
222234
response = urllib.request.urlopen(frontend_release_url)
223235

224236
file = tarfile.open(fileobj=response, mode="r|gz")
225237
file.extractall(path=download_dir)
226238

227-
shutil.move(os.path.join(download_dir, "build"), frontend_dir)
239+
shutil.move(download_dir, frontend_dir)
228240
print("The Lightning UI has successfully been downloaded!")
229241

230242
# If installing from source without internet connection, we don't want to break the installation
@@ -236,11 +248,12 @@ def _load_aggregate_requirements(req_dir: str = "requirements", freeze_requireme
236248
"""Load all base requirements from all particular packages and prune duplicates.
237249
238250
>>> _load_aggregate_requirements(os.path.join(_PROJECT_ROOT, "requirements"))
251+
239252
"""
240253
requires = [
241254
load_requirements(d, unfreeze="none" if freeze_requirements else "major")
242255
for d in glob.glob(os.path.join(req_dir, "*"))
243-
# skip empty folder as git artefacts, and resolving Will's special issue
256+
# skip empty folder (git artifacts), and resolving Will's special issue
244257
if os.path.isdir(d) and len(glob.glob(os.path.join(d, "*"))) > 0 and not os.path.basename(d).startswith("_")
245258
]
246259
if not requires:
@@ -292,6 +305,7 @@ def _replace_imports(lines: List[str], mapping: List[Tuple[str, str]], lightning
292305
'http://pytorch_lightning.ai', \
293306
'from lightning_fabric import __version__', \
294307
'@lightning.ai']
308+
295309
"""
296310
out = lines[:]
297311
for source_import, target_import in mapping:
@@ -404,6 +418,7 @@ def _replace_min(fname: str) -> None:
404418
def replace_oldest_ver(requirement_fnames: Sequence[str] = REQUIREMENT_FILES_ALL) -> None:
405419
"""Replace the min package version by fixed one."""
406420
for fname in requirement_fnames:
421+
print(fname)
407422
AssistantCLI._replace_min(fname)
408423

409424
@staticmethod
@@ -421,6 +436,56 @@ def copy_replace_imports(
421436
source_dir, source_imports, target_imports, target_dir=target_dir, lightning_by=lightning_by
422437
)
423438

439+
@staticmethod
440+
def pull_docs_files(
441+
gh_user_repo: str,
442+
target_dir: str = "docs/source-pytorch/XXX",
443+
checkout: str = "refs/tags/1.0.0",
444+
source_dir: str = "docs/source",
445+
as_orphan: bool = False,
446+
) -> None:
447+
"""Pull docs pages from external source and append to local docs."""
448+
import zipfile
449+
450+
zip_url = f"https://github.com/{gh_user_repo}/archive/{checkout}.zip"
451+
452+
with tempfile.TemporaryDirectory() as tmp:
453+
zip_file = os.path.join(tmp, "repo.zip")
454+
try:
455+
urllib.request.urlretrieve(zip_url, zip_file)
456+
except urllib.error.HTTPError:
457+
raise RuntimeError(f"Requesting file '{zip_url}' does not exist or it is just unavailable.")
458+
459+
with zipfile.ZipFile(zip_file, "r") as zip_ref:
460+
zip_ref.extractall(tmp)
461+
462+
zip_dirs = [d for d in glob.glob(os.path.join(tmp, "*")) if os.path.isdir(d)]
463+
# check that the extracted archive has only repo folder
464+
assert len(zip_dirs) == 1
465+
repo_dir = zip_dirs[0]
466+
467+
ls_pages = glob.glob(os.path.join(repo_dir, source_dir, "*.rst"))
468+
ls_pages += glob.glob(os.path.join(repo_dir, source_dir, "**", "*.rst"))
469+
for rst in ls_pages:
470+
rel_rst = rst.replace(os.path.join(repo_dir, source_dir) + os.path.sep, "")
471+
rel_dir = os.path.dirname(rel_rst)
472+
os.makedirs(os.path.join(_PROJECT_ROOT, target_dir, rel_dir), exist_ok=True)
473+
new_rst = os.path.join(_PROJECT_ROOT, target_dir, rel_rst)
474+
if os.path.isfile(new_rst):
475+
logging.warning(f"Page {new_rst} already exists in the local tree so it will be skipped.")
476+
continue
477+
AssistantCLI._copy_rst(rst, new_rst, as_orphan=as_orphan)
478+
479+
@staticmethod
480+
def _copy_rst(rst_in, rst_out, as_orphan: bool = False):
481+
"""Copy RST page with optional inserting orphan statement."""
482+
with open(rst_in, encoding="utf-8") as fopen:
483+
page = fopen.read()
484+
if as_orphan and ":orphan:" not in page:
485+
page = ":orphan:\n\n" + page
486+
with open(rst_out, "w", encoding="utf-8") as fopen:
487+
fopen.write(page)
488+
424489

425490
if __name__ == "__main__":
426491
import jsonargparse

.azure/README.md

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -44,3 +44,27 @@ done
4444
```bash
4545
ps aux | grep start.sh
4646
```
47+
48+
# Machine maintenance
49+
50+
Since most of our jobs/checks are running in a Docker container, the OS/machine can become polluted and fail to run with errors such as:
51+
52+
```
53+
No space left on device : '/azp/agent-litGPU-21_0,1/_diag/pages/8bb191f4-a8c2-419a-8788-66e3f0522bea_1.log'
54+
```
55+
56+
In such cases, you need to log in to the machine and run `docker system prune`.
57+
58+
## Automated ways
59+
60+
Let's explore adding a cron job for periodically removing all Docker caches:
61+
62+
1. Open your user's cron tab for editing: `crontab -e`
63+
1. Schedule/add the command with the `--force` flag to force pruning without interactive confirmation:
64+
```bash
65+
# every day at 2:00 AM clean docker caches
66+
0 2 * * * docker system prune --force
67+
```
68+
1. Verify the entry: `crontab -l`
69+
70+
Note: You may need to add yourself to the Docker group by running `sudo usermod -aG docker <your_username>` to have permission to execute this command without needing `sudo` and entering the password.

0 commit comments

Comments
 (0)