Skip to content

Commit 6dc463d

Browse files
authored
Add argument 'job_script_epilogue' (#677)
* Add argument 'job_script_epilogue' This allows user to specify commands that should be run after the worker command has exited. The working is similar to 'job_script_prologue' argument. * Mention 'job_script_epilogue' parameter in existing docs
1 parent c891c5a commit 6dc463d

File tree

10 files changed

+48
-0
lines changed

10 files changed

+48
-0
lines changed

dask_jobqueue/core.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -58,6 +58,8 @@
5858
Deprecated: use ``job_script_prologue`` instead. This parameter will be removed in a future version.
5959
job_script_prologue : list
6060
Other commands to add to script before launching worker.
61+
job_script_epilogue : list
62+
Commands to add to script which will run after the worker command has exited.
6163
header_skip : list
6264
Deprecated: use ``job_directives_skip`` instead. This parameter will be removed in a future version.
6365
job_directives_skip : list
@@ -147,6 +149,7 @@ class Job(ProcessInterface, abc.ABC):
147149
%(job_header)s
148150
%(job_script_prologue)s
149151
%(worker_command)s
152+
%(job_script_epilogue)s
150153
""".lstrip()
151154

152155
# Following class attributes should be overridden by extending classes.
@@ -176,6 +179,7 @@ def __init__(
176179
job_extra_directives=None,
177180
env_extra=None,
178181
job_script_prologue=None,
182+
job_script_epilogue=None,
179183
header_skip=None,
180184
job_directives_skip=None,
181185
log_directory=None,
@@ -277,6 +281,10 @@ def __init__(
277281
job_script_prologue = dask.config.get(
278282
"jobqueue.%s.job-script-prologue" % self.config_name
279283
)
284+
if job_script_epilogue is None:
285+
job_script_epilogue = dask.config.get(
286+
"jobqueue.%s.job-script-epilogue" % self.config_name
287+
)
280288
if env_extra is not None:
281289
warn = (
282290
"env_extra has been renamed to job_script_prologue. "
@@ -344,6 +352,7 @@ def __init__(
344352
self.shebang = shebang
345353

346354
self._job_script_prologue = job_script_prologue
355+
self._job_script_epilogue = job_script_epilogue
347356

348357
# dask-worker command line build
349358
dask_worker_command = "%(python)s -m %(worker_command)s" % dict(
@@ -396,6 +405,7 @@ def job_script(self):
396405
"job_header": self.job_header,
397406
"job_script_prologue": "\n".join(filter(None, self._job_script_prologue)),
398407
"worker_command": self._command_template,
408+
"job_script_epilogue": "\n".join(filter(None, self._job_script_epilogue)),
399409
}
400410
return self._script_template % pieces
401411

dask_jobqueue/htcondor.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -62,6 +62,12 @@ def __init__(
6262
self._job_script_prologue + [self._command_template]
6363
)
6464

65+
if self._job_script_epilogue is not None:
66+
# Overwrite command template: append commands from job_script_epilogue separated by semicolon.
67+
self._command_template = "; ".join(
68+
[self._command_template] + self._job_script_epilogue
69+
)
70+
6571
self.job_header_dict = {
6672
"MY.DaskWorkerName": '"htcondor--$F(MY.JobId)--"',
6773
"batch_name": self.name,

dask_jobqueue/jobqueue.yaml

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@ jobqueue:
2323
walltime: '00:30:00'
2424
env-extra: null
2525
job-script-prologue: []
26+
job-script-epilogue: []
2627
resource-spec: null
2728
job-extra: null
2829
job-extra-directives: []
@@ -57,6 +58,7 @@ jobqueue:
5758
walltime: '00:30:00'
5859
env-extra: null
5960
job-script-prologue: []
61+
job-script-epilogue: []
6062
resource-spec: null
6163
job-extra: null
6264
job-extra-directives: []
@@ -90,6 +92,7 @@ jobqueue:
9092
walltime: '00:30:00'
9193
env-extra: null
9294
job-script-prologue: []
95+
job-script-epilogue: []
9396
job-extra: null
9497
job-extra-directives: []
9598
job-directives-skip: []
@@ -123,6 +126,7 @@ jobqueue:
123126
walltime: '00:30:00'
124127
env-extra: null
125128
job-script-prologue: []
129+
job-script-epilogue: []
126130
job-cpu: null
127131
job-mem: null
128132
job-extra: null
@@ -157,6 +161,7 @@ jobqueue:
157161
walltime: '00:30:00'
158162
env-extra: null
159163
job-script-prologue: []
164+
job-script-epilogue: []
160165
resource-spec: null
161166
job-extra: null
162167
job-extra-directives: []
@@ -190,6 +195,7 @@ jobqueue:
190195
walltime: '00:30'
191196
env-extra: null
192197
job-script-prologue: []
198+
job-script-epilogue: []
193199
ncpus: null
194200
mem: null
195201
job-extra: null
@@ -223,6 +229,7 @@ jobqueue:
223229
disk: null # Total amount of disk per job
224230
env-extra: null
225231
job-script-prologue: []
232+
job-script-epilogue: []
226233
job-extra: null # Extra submit attributes
227234
job-extra-directives: {} # Extra submit attributes
228235
job-directives-skip: []
@@ -252,6 +259,7 @@ jobqueue:
252259

253260
env-extra: null
254261
job-script-prologue: []
262+
job-script-epilogue: []
255263
job-extra: null
256264
job-extra-directives: []
257265
job-directives-skip: []

dask_jobqueue/tests/test_htcondor.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,9 @@ def test_job_script():
3333
"cd /some/path/",
3434
"source venv/bin/activate",
3535
],
36+
job_script_epilogue=[
37+
'echo "Job finished"',
38+
],
3639
job_extra_directives={"+Extra": "True"},
3740
submit_command_extra=["-verbose"],
3841
cancel_command_extra=["-forcex"],
@@ -64,6 +67,7 @@ def test_job_script():
6467
assert f"--memory-limit {formatted_bytes}" in job_script
6568
assert "--nthreads 2" in job_script
6669
assert "--nworkers 2" in job_script
70+
assert 'echo ""Job finished""' in job_script
6771

6872

6973
@pytest.mark.env("htcondor")
@@ -144,6 +148,7 @@ def test_config_name_htcondor_takes_custom_config():
144148
"worker-extra-args": [],
145149
"env-extra": None,
146150
"job-script-prologue": [],
151+
"job-script-epilogue": [],
147152
"log-directory": None,
148153
"shebang": "#!/usr/bin/env condor_submit",
149154
"local-directory": "/tmp",

dask_jobqueue/tests/test_lsf.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -318,6 +318,7 @@ def test_config_name_lsf_takes_custom_config():
318318
"worker-extra-args": [],
319319
"env-extra": None,
320320
"job-script-prologue": [],
321+
"job-script-epilogue": [],
321322
"log-directory": None,
322323
"shebang": "#!/usr/bin/env bash",
323324
"use-stdin": None,

dask_jobqueue/tests/test_oar.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -96,6 +96,9 @@ def test_job_script():
9696
'export LANGUAGE="en_US.utf8"',
9797
'export LC_ALL="en_US.utf8"',
9898
],
99+
job_script_epilogue=[
100+
'echo "Job finished"',
101+
],
99102
) as cluster:
100103
job_script = cluster.job_script()
101104
assert "#OAR" in job_script
@@ -118,6 +121,7 @@ def test_job_script():
118121
assert "--nthreads 2" in job_script
119122
assert "--nworkers 4" in job_script
120123
assert f"--memory-limit {formatted_bytes}" in job_script
124+
assert 'echo "Job finished"' in job_script
121125

122126

123127
def test_config_name_oar_takes_custom_config():
@@ -141,6 +145,7 @@ def test_config_name_oar_takes_custom_config():
141145
"worker-extra-args": [],
142146
"env-extra": None,
143147
"job-script-prologue": [],
148+
"job-script-epilogue": [],
144149
"log-directory": None,
145150
"shebang": "#!/usr/bin/env bash",
146151
"job-cpu": None,

dask_jobqueue/tests/test_pbs.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -355,6 +355,7 @@ def test_config_name_pbs_takes_custom_config():
355355
"worker-extra-args": [],
356356
"env-extra": None,
357357
"job-script-prologue": [],
358+
"job-script-epilogue": [],
358359
"log-directory": None,
359360
"shebang": "#!/usr/bin/env bash",
360361
"job-cpu": None,

dask_jobqueue/tests/test_sge.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -61,6 +61,7 @@ def test_config_name_sge_takes_custom_config():
6161
"worker-extra-args": [],
6262
"env-extra": None,
6363
"job-script-prologue": [],
64+
"job-script-epilogue": [],
6465
"log-directory": None,
6566
"shebang": "#!/usr/bin/env bash",
6667
"job-cpu": None,
@@ -84,6 +85,9 @@ def test_job_script(tmpdir):
8485
project="my-project",
8586
walltime="02:00:00",
8687
job_script_prologue=["export MY_VAR=my_var"],
88+
job_script_epilogue=[
89+
'echo "Job finished"',
90+
],
8791
job_extra_directives=["-w e", "-m e"],
8892
log_directory=log_directory,
8993
resource_spec="h_vmem=12G,mem_req=12G",
@@ -106,6 +110,7 @@ def test_job_script(tmpdir):
106110
"-l h_vmem=12G,mem_req=12G",
107111
"#$ -cwd",
108112
"#$ -j y",
113+
'echo "Job finished"',
109114
]:
110115
assert each in job_script
111116

dask_jobqueue/tests/test_slurm.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -103,6 +103,9 @@ def test_job_script():
103103
'export LANGUAGE="en_US.utf8"',
104104
'export LC_ALL="en_US.utf8"',
105105
],
106+
job_script_epilogue=[
107+
'echo "Job finished"',
108+
],
106109
) as cluster:
107110
job_script = cluster.job_script()
108111
assert "#SBATCH" in job_script
@@ -127,6 +130,8 @@ def test_job_script():
127130
assert "--nworkers 4" in job_script
128131
assert f"--memory-limit {formatted_bytes}" in job_script
129132

133+
assert 'echo "Job finished"' in job_script
134+
130135

131136
@pytest.mark.env("slurm")
132137
def test_basic(loop):
@@ -208,6 +213,7 @@ def test_config_name_slurm_takes_custom_config():
208213
"worker-extra-args": [],
209214
"env-extra": None,
210215
"job-script-prologue": [],
216+
"job-script-epilogue": [],
211217
"log-directory": None,
212218
"shebang": "#!/usr/bin/env bash",
213219
"job-cpu": None,

docs/source/clusters-advanced-tips-and-tricks.rst

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -104,6 +104,7 @@ parameter in the submit description file. The relevant lines will look like this
104104
For other batch systems (``*Cluster`` classes) the additional commands will be inserted as separate lines
105105
in the submission script.
106106

107+
Similarly, if you need to run some commands after the worker has exited, then use ``job_script_epilogue`` parameter.
107108

108109
How to handle job queueing system walltime killing workers
109110
----------------------------------------------------------

0 commit comments

Comments
 (0)