Python, :
cmd_list = [{
'cmd': '/usr/bin/spark-submit --name %s --master yarn --deploy-mode cluster '
'--executor-memory %s --executor-cores %s --num-executors %s '
'--class %s %s %s'
% (
app_name,
config.SJ_EXECUTOR_MEMORY,
config.SJ_EXECUTOR_CORES,
config.SJ_NUM_OF_EXECUTORS,
config.PRODUCT_SNAPSHOT_SKU_PRESTO_CLASS,
config.SPARK_JAR_LOCATION,
config.SPARK_LOGGING_ENABLED
),
'cwd': config.WORK_DIR
}]
cmd_output = subprocess.run(cmd_obj['cmd'], shell=True, check=True, cwd=cwd, stderr=subprocess.PIPE)
cmd_output = cmd_output.stderr.decode("utf-8")
yarn_application_ids = re.findall(r"application_\d{13}_\d{4}", cmd_output)
if len(yarn_application_ids):
yarn_application_id = yarn_application_ids[0]
yarn_command = "yarn logs -applicationId " + yarn_application_id