|
4 | 4 | import logging
|
5 | 5 | import os
|
6 | 6 | import re
|
7 |
| -import resource |
8 | 7 | import shutil
|
9 | 8 | import subprocess # nosec
|
10 | 9 | import sys
|
11 | 10 | import tempfile
|
12 |
| -import textwrap |
13 | 11 | import threading
|
14 | 12 | import time
|
15 | 13 | import uuid
|
@@ -531,15 +529,6 @@ def run(
|
531 | 529 | tmpdir_lock: Optional[threading.Lock] = None,
|
532 | 530 | ) -> None:
|
533 | 531 |
|
534 |
| - # attempt to set an "unlimited" (-1) heap size for this process |
535 |
| - # (& thus commandline size) on any system that supports it |
536 |
| - # TODO: Do containers inherit the processes's limits? |
537 |
| - # Can they be configured from outside of the container? |
538 |
| - try: |
539 |
| - resource.setrlimit(resource.RLIMIT_DATA, (-1, -1)) |
540 |
| - except Exception: |
541 |
| - pass |
542 |
| - |
543 | 532 | if tmpdir_lock:
|
544 | 533 | with tmpdir_lock:
|
545 | 534 | if not os.path.exists(self.tmpdir):
|
@@ -600,20 +589,6 @@ def run(
|
600 | 589 |
|
601 | 590 | class ContainerCommandLineJob(JobBase, metaclass=ABCMeta):
|
602 | 591 | """Commandline job using containers."""
|
603 |
| - def __init__( |
604 |
| - self, |
605 |
| - builder: Builder, |
606 |
| - joborder: CWLObjectType, |
607 |
| - make_path_mapper: Callable[..., PathMapper], |
608 |
| - requirements: List[CWLObjectType], |
609 |
| - hints: List[CWLObjectType], |
610 |
| - name: str, |
611 |
| - ) -> None: |
612 |
| - super(JobBase, self).__init__( |
613 |
| - builder, joborder, make_path_mapper, requirements, hints, name |
614 |
| - ) |
615 |
| - self.universal_file_bindmount_dir = None |
616 |
| - self.bindings_map = None |
617 | 592 |
|
618 | 593 | @abstractmethod
|
619 | 594 | def get_from_requirements(
|
@@ -706,69 +681,10 @@ def add_volumes(
|
706 | 681 | any_path_okay: bool = False,
|
707 | 682 | ) -> None:
|
708 | 683 | """Append volume mappings to the runtime option list."""
|
709 |
| - container_outdir = self.builder.outdir |
710 |
| - for key, vol in (itm for itm in pathmapper.items() if itm[1].staged): |
711 |
| - host_outdir_tgt = None # type: Optional[str] |
712 |
| - if vol.target.startswith(container_outdir + "/"): |
713 |
| - host_outdir_tgt = os.path.join( |
714 |
| - self.outdir, vol.target[len(container_outdir) + 1 :] |
715 |
| - ) |
716 |
| - if not host_outdir_tgt and not any_path_okay: |
717 |
| - raise WorkflowException( |
718 |
| - "No mandatory DockerRequirement, yet path is outside " |
719 |
| - "the designated output directory, also know as " |
720 |
| - "$(runtime.outdir): {}".format(vol) |
721 |
| - ) |
722 |
| - if vol.type in ("File", "Directory"): |
723 |
| - self.add_file_or_directory_volume(runtime, vol, host_outdir_tgt) |
724 |
| - elif vol.type == "WritableFile": |
725 |
| - self.add_writable_file_volume( |
726 |
| - runtime, vol, host_outdir_tgt, tmpdir_prefix |
727 |
| - ) |
728 |
| - elif vol.type == "WritableDirectory": |
729 |
| - self.add_writable_directory_volume( |
730 |
| - runtime, vol, host_outdir_tgt, tmpdir_prefix |
731 |
| - ) |
732 |
| - elif vol.type in ["CreateFile", "CreateWritableFile"]: |
733 |
| - new_path = self.create_file_and_add_volume( |
734 |
| - runtime, vol, host_outdir_tgt, secret_store, tmpdir_prefix |
735 |
| - ) |
736 |
| - pathmapper.update(key, new_path, vol.target, vol.type, vol.staged) |
737 |
| - |
738 |
| - # Dir of individual file inputs for the job (all named as uuid4). |
739 |
| - # This creates the same dir inside of the container as exists outside of it, |
740 |
| - # Overlayfs must be supported/enabled (which should always be true for CWL). |
741 |
| - src = dst = self.universal_file_bindmount_dir |
742 |
| - runtime.append(f"--bind={src}:{dst}:rw") |
743 |
| - |
744 |
| - # Make a TSV of the file mappings. |
745 |
| - mapping_tsv = os.path.join(self.universal_file_bindmount_dir, 'mapping.tsv') |
746 |
| - with open(mapping_tsv, 'w') as f: |
747 |
| - # 1. Sort by the destination path, which should sort alphabetically |
748 |
| - # and by shortest path first. |
749 |
| - # 2. Then, when we go to hardlink the files, we |
750 |
| - # should then just be able to hardlink them in order. |
751 |
| - for (src, dst, writable) in sorted(self.bindings_map, key=lambda x: len(x[1])): |
752 |
| - f.write('\t'.join((src, dst, writable)) + '\n') |
753 |
| - |
754 |
| - # Make the script that uses the TSV file mappings to hardlink everything |
755 |
| - # inside of the container to where the job expects to find them. |
756 |
| - # This script needs to be the first thing run inside of the container. |
757 |
| - linking_script = os.path.join(self.universal_file_bindmount_dir, 'hard_linking_script.py') |
758 |
| - # TODO: Write in bash instead. All images might not have python. |
759 |
| - with open(linking_script, 'w') as f: |
760 |
| - f.write(textwrap.dedent(f""" |
761 |
| - import os |
762 |
| -
|
763 |
| - with open('{mapping_tsv}', 'r') as f: |
764 |
| - for line in f: |
765 |
| - src, dst, writable = line.split('\\t') |
766 |
| - os.makedirs(os.path.dirname(dst), exist_ok=True) |
767 |
| - os.link(src, dst) |
768 |
| - # TODO: set the permissions on the file here after linking |
769 |
| -
|
770 |
| - """[1:])) |
771 |
| - os.chmod(linking_script, 0o777) |
| 684 | + staging_dir = tempfile.mkdtemp() |
| 685 | + pathmapper.reset_stagedir(staging_dir) |
| 686 | + stage_files(pathmapper, symlink=False) |
| 687 | + self.append_volume(runtime, staging_dir, staging_dir) |
772 | 688 |
|
773 | 689 | def run(
|
774 | 690 | self,
|
|
0 commit comments