Skip to content

Commit

Permalink
to check python3 when python is missing
Browse files Browse the repository at this point in the history
  • Loading branch information
tmaeno committed Sep 13, 2023
1 parent b9c27c6 commit 465b39d
Show file tree
Hide file tree
Showing 5 changed files with 20 additions and 7 deletions.
3 changes: 2 additions & 1 deletion ChangeLog.txt
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
** Release Notes

current
1.5.64
* fixed pyproject.toml
* improved various messages

1.5.63
* to check python3 if python is missing
Expand Down
2 changes: 1 addition & 1 deletion pandaclient/Client.py
Original file line number Diff line number Diff line change
Expand Up @@ -1061,7 +1061,7 @@ def getFullJobStatus(ids, verbose=False):
return status, pickle_loads(output)
except Exception as e:
dump_log("getFullJobStatus", e, output)
return EC_Failed,None
return EC_Failed, "cannot load pickle: {0}".format(str(e))


# set debug mode
Expand Down
2 changes: 1 addition & 1 deletion pandaclient/PandaToolsPkgInfo.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
release_version = "1.5.63"
release_version = "1.5.64"
10 changes: 8 additions & 2 deletions pandaclient/PathenaScript.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,10 @@
group_input.add_argument('--inDsTxt',action='store',dest='inDsTxt',default='',
type=str, help='a text file which contains the list of datasets to run over. newlines are replaced by commas and the result is set to --inDS. lines starting with # are ignored')
action = group_input.add_argument('--inOutDsJson', action='store', dest='inOutDsJson', default='',
help="A json file to specify input and output datasets for bulk submission. It contains a json dump of [{'inDS': a comma-concatenated input dataset names, 'outDS': output dataset name}, ...]")
help="A json file to specify input and output datasets for bulk submission. "
"It contains a json dump of [{'inDS': a comma-concatenated input dataset names, "
"'outDS': output dataset name}, ...]. "
"When this option is used --bulkSubmission is automatically set internally.")
group_output.shareWithMe(action)
group_input.add_argument('--secondaryDSs', action='store', dest='secondaryDSs', default='',
help='A versatile option to specify arbitrary secondary inputs that takes a list of '
Expand Down Expand Up @@ -406,7 +409,10 @@
group_input.add_argument('--pfnList', action='store', dest='pfnList', default='',
type=str, help='Name of file which contains a list of input PFNs. Those files can be un-registered in DDM')
group_build.add_argument('--cmtConfig', action='store', dest='cmtConfig', default=None,
type=str, help='CMTCONFIG=i686-slc5-gcc43-opt is used on remote worker-node by default even if you use another CMTCONFIG locally. This option allows you to use another CMTCONFIG remotely. e.g., --cmtConfig x86_64-slc5-gcc43-opt.')
help='CMTCONFIG is extracted from local environment variables when tasks are submitted, '
'to set up the same environment on remote worker-nodes. '
'This option allows to set up another CMTCONFIG '
'remotely. e.g., --cmtConfig x86_64-slc5-gcc43-opt.')
group_output.add_argument('--allowTaskDuplication',action='store_const',const=True,dest='allowTaskDuplication',default=False,
help="As a general rule each task has a unique outDS and history of file usage is recorded per task. This option allows multiple tasks to contribute to the same outDS. Typically useful to submit a new task with the outDS which was used by another broken task. Use this option very carefully at your own risk, since file duplication happens when the second task runs on the same input which the first task successfully processed")
group_input.add_argument('--skipFilesUsedBy', action='store', dest='skipFilesUsedBy', default='',
Expand Down
10 changes: 8 additions & 2 deletions pandaclient/PrunScript.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,10 @@ def main(get_taskparams=False, ext_args=None, dry_mode=False):

# the option is shared by both groups, group_input and group_output
action = group_input.add_argument('--inOutDsJson', action='store', dest='inOutDsJson', default='',
help="A json file to specify input and output datasets for bulk submission. It contains a json dump of [{'inDS': a comma-concatenated input dataset names, 'outDS': output dataset name}, ...]")
help="A json file to specify input and output datasets for bulk submission. "
"It contains a json dump of [{'inDS': a comma-concatenated input dataset names, "
"'outDS': output dataset name}, ...]. "
"When this option is used --bulkSubmission is automatically set internally.")
group_output.shareWithMe(action)

group_evtFilter.add_argument('--goodRunListXML', action='store', dest='goodRunListXML', default='',
Expand Down Expand Up @@ -330,7 +333,10 @@ def main(get_taskparams=False, ext_args=None, dry_mode=False):
group_input.add_argument('--pfnList',action='store',dest='pfnList',default='',
help='Name of file which contains a list of input PFNs. Those files can be un-registered in DDM')
group_build.add_argument('--cmtConfig', action='store', dest='cmtConfig', default=None,
help='CMTCONFIG=i686-slc5-gcc43-opt is used on remote worker-node by default even if you use another CMTCONFIG locally. This option allows you to use another CMTCONFIG remotely. e.g., --cmtConfig x86_64-slc5-gcc43-opt.')
help='CMTCONFIG is extracted from local environment variables when tasks are submitted, '
'to set up the same environment on remote worker-nodes. '
'This option allows to set up another CMTCONFIG '
'remotely. e.g., --cmtConfig x86_64-slc5-gcc43-opt.')
group_config.add_argument('--loadXML',action='store',dest='loadXML',default=None,
help='Expert mode: load complete submission configuration from an XML file ')
group_config.add_argument('--loadJson', action='store', dest='loadJson',default=None,
Expand Down

0 comments on commit 465b39d

Please sign in to comment.