diff --git a/testing/sdk_plan.py b/testing/sdk_plan.py index c7ec38f1..e9821c25 100644 --- a/testing/sdk_plan.py +++ b/testing/sdk_plan.py @@ -205,7 +205,7 @@ def wait_for_plan_status( else: statuses = status - initial_failures = sdk_tasks.get_failed_task_count(service_name, retry=True) + initial_failures = sdk_tasks.get_failed_task_count(service_name) wait_start = datetime.datetime.utcnow() @retrying.retry( diff --git a/testing/sdk_security.py b/testing/sdk_security.py index e603dc78..45dcdf64 100644 --- a/testing/sdk_security.py +++ b/testing/sdk_security.py @@ -188,6 +188,8 @@ def create_service_account(service_account_name: str, service_account_secret: st account=service_account_name, secret=service_account_secret ) ) + if service_account_secret == service_account_name: + log.warning("Values for service_account_name and service_account_secret are the same.") log.info("Remove any existing service account and/or secret") delete_service_account(service_account_name, service_account_secret) diff --git a/testing/sdk_tasks.py b/testing/sdk_tasks.py index 7e9cb421..8cc518e8 100644 --- a/testing/sdk_tasks.py +++ b/testing/sdk_tasks.py @@ -83,7 +83,7 @@ class Task(object): """Entry value returned by get_summary() and get_service_tasks()""" @staticmethod - def parse(task_entry: Dict[str, Any], agentid_to_hostname: Dict[str, str]) -> 'Task': + def parse(task_entry: Dict[str, Any], agentid_to_hostname: Dict[str, str]) -> "Task": agent_id = task_entry["slave_id"] matching_hostname = agentid_to_hostname.get(agent_id) if matching_hostname: @@ -155,19 +155,14 @@ def get_all_status_history(task_name: str, with_completed_tasks: bool = True) -> return history -def get_failed_task_count(service_name: str, retry: bool = False) -> int: - history_response = sdk_cmd.cluster_request( - "GET", "/dcos-history-service/history/last", retry=retry +def get_failed_task_count(service_name: str) -> int: + return len( + [ + t + for t in get_service_tasks(service_name, with_completed_tasks=True) + if t.state in FATAL_TERMINAL_TASK_STATES + ] ) - history_response.raise_for_status() - history = history_response.json() - service_history = [h for h in history["frameworks"] if h.get("name") == service_name] - if not service_history: - return 0 - - assert len(service_history) == 1 - - return sum(service_history[0].get(status, 0) for status in FATAL_TERMINAL_TASK_STATES) def check_task_count(service_name: str, expected_task_count: int) -> list: @@ -195,7 +190,9 @@ def get_task_ids(service_name: str, task_prefix: str = "") -> List[str]: return [t.id for t in get_service_tasks(service_name, task_prefix=task_prefix)] -def get_service_tasks(service_name: str, task_prefix: str = "", with_completed_tasks: bool = False) -> list: +def get_service_tasks( + service_name: str, task_prefix: str = "", with_completed_tasks: bool = False +) -> list: """Returns a list of task objects for tasks in the specified Mesos framework. : param service_name: The name of the Mesos framework whose task information should be retrieved. @@ -351,7 +348,7 @@ def _check_task_relaunched() -> None: def check_scheduler_relaunched( - service_name: str, old_scheduler_task_id: str, timeout_seconds: int = DEFAULT_TIMEOUT_SECONDS, + service_name: str, old_scheduler_task_id: str, timeout_seconds: int = DEFAULT_TIMEOUT_SECONDS ) -> None: """ This function checks for the relaunch of a task using the same matching as is @@ -464,7 +461,9 @@ def check_tasks_not_updated(service_name: str, prefix: str, old_task_ids: Iterab ), 'Tasks starting with "{}" were updated:{}'.format(prefix, task_sets) -def wait_for_active_framework(service_name: str, timeout_seconds: int = DEFAULT_TIMEOUT_SECONDS) -> None: +def wait_for_active_framework( + service_name: str, timeout_seconds: int = DEFAULT_TIMEOUT_SECONDS +) -> None: """ Waits until a framework with name `framework_name` is found and is active """ @@ -474,8 +473,16 @@ def wait_for_active_framework(service_name: str, timeout_seconds: int = DEFAULT_ wait_fixed=1000, stop_max_delay=timeout_seconds * 1000, retry_on_result=lambda res: not res ) def _wait_for_active_framework() -> bool: - return len(list(filter( - lambda fwk: fwk["name"] == service_name and fwk["active"], - sdk_cmd.cluster_request("GET", "/mesos/frameworks").json()["frameworks"] - ))) > 0 + return ( + len( + list( + filter( + lambda fwk: fwk["name"] == service_name and fwk["active"], + sdk_cmd.cluster_request("GET", "/mesos/frameworks").json()["frameworks"], + ) + ) + ) + > 0 + ) + _wait_for_active_framework() diff --git a/testing/security/transport_encryption.py b/testing/security/transport_encryption.py index 67b30354..55592dbe 100644 --- a/testing/security/transport_encryption.py +++ b/testing/security/transport_encryption.py @@ -14,8 +14,7 @@ def setup_service_account( - service_name: str, - service_account_secret: Optional[str] = None, + service_name: str, service_account_secret: Optional[str] = None ) -> Dict[str, Any]: """ Setup the service account for TLS. If the account or secret of the specified @@ -26,7 +25,7 @@ def setup_service_account( log.error("The setup of a service account requires DC/OS EE. service_name=%s", service_name) raise Exception("The setup of a service account requires DC/OS EE") - secret = service_name if service_account_secret is None else service_account_secret + secret = service_name + "-secret" if service_account_secret is None else service_account_secret service_account = "{}-service-account".format(service_name.replace("/", "")) @@ -69,10 +68,7 @@ def setup_service_account( return service_account_info -def cleanup_service_account( - service_name: str, - service_account_info: Dict[str, Any], -) -> None: +def cleanup_service_account(service_name: str, service_account_info: Dict[str, Any]) -> None: """ Clean up the specified service account. diff --git a/tools/ci/launch_cluster.sh b/tools/ci/launch_cluster.sh index 1df3876f..3d84c734 100755 --- a/tools/ci/launch_cluster.sh +++ b/tools/ci/launch_cluster.sh @@ -7,15 +7,24 @@ set -e LAUNCH_SUCCESS="False" RETRY_LAUNCH="True" +env + while [ x"${LAUNCH_SUCCESS}" == x"False" ]; do rm -f ${CLUSTER_INFO_FILE} # dcos-launch complains if the file already exists - dcos-launch create --config-path=${LAUNCH_CONFIG_FILE} --info-path=${CLUSTER_INFO_FILE} + + # The first parameter to wrap.sh is the name of the virtual environment the + # command should run in. The rest of the parameters is the command itself. + /venvs/wrap.sh dcos-launch dcos-launch create --config-path=${LAUNCH_CONFIG_FILE} --info-path=${CLUSTER_INFO_FILE} + if [ x"$RETRY_LAUNCH" == x"True" ]; then set +e else set -e fi - dcos-launch wait --info-path=${CLUSTER_INFO_FILE} 2>&1 | tee dcos-launch-wait-output.stdout + + # The first parameter to wrap.sh is the name of the virtual environment the + # command should run in. The rest of the parameters is the command itself. + /venvs/wrap.sh dcos-launch dcos-launch wait --info-path=${CLUSTER_INFO_FILE} 2>&1 | tee dcos-launch-wait-output.stdout # Grep exits with an exit code of 1 if no lines are matched. We thus need to # disable exit on errors. @@ -35,8 +44,13 @@ while [ x"${LAUNCH_SUCCESS}" == x"False" ]; do RETRY_LAUNCH="False" set -e - # We need to wait for the current stack to be deleted - dcos-launch delete --info-path=${CLUSTER_INFO_FILE} + + # We need to wait for the current stack to be deleted. + # + # The first parameter to wrap.sh is the name of the virtual environment + # the command should run in. The rest of the parameters is the command + # itself. + /venvs/wrap.sh dcos-launch dcos-launch delete --info-path=${CLUSTER_INFO_FILE} rm -f ${CLUSTER_INFO_FILE} echo "Cluster creation failed. Retrying after 30 seconds" sleep 30 diff --git a/tools/ci/steps/check_json_files.py b/tools/ci/steps/check_json_files.py new file mode 100755 index 00000000..d2003c5c --- /dev/null +++ b/tools/ci/steps/check_json_files.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python3 + +import collections +import difflib +import os.path +import json +import os +import fnmatch +import sys + + +framework_dir = os.getcwd() + "/" +path_list = list() +for path, subdirs, files in os.walk(framework_dir): + for name in files: + filtered_path = fnmatch.filter( + [os.path.join(path, name)], "*" + sys.argv[1] + "*universe*.json" + ) + if len(filtered_path) > 0: + path_list.extend(filtered_path) + +for path in path_list: + with open(path, "r") as source: + raw_data = [l.rstrip("\n") for l in source.readlines()] + formatted_data = [ + l + for l in json.dumps( + json.loads("".join(raw_data), object_pairs_hook=collections.OrderedDict), indent=2 + ).split("\n") + ] + diff = list( + difflib.unified_diff(raw_data, formatted_data, fromfile=path, tofile="formatted") + ) + if diff: + print("\n" + ("\n".join(diff))) + print("{} is not formatted correctly, see diff above".format(path)) + exit(1) diff --git a/tools/ci/test_runner.sh b/tools/ci/test_runner.sh index b3c0dab2..915215bd 100755 --- a/tools/ci/test_runner.sh +++ b/tools/ci/test_runner.sh @@ -10,7 +10,7 @@ export PACKAGE_REGISTRY_ENABLED export PACKAGE_REGISTRY_STUB_URL export DCOS_FILES_PATH -BUILD_TOOL_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +BUILD_TOOL_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" REPO_ROOT_DIR="${REPO_ROOT:-$1}" SINGLE_FRAMEWORK="True" @@ -66,6 +66,35 @@ else fi +function get_public_master_url() +{ + local cluster_description_file="$(mktemp)" + local attempts=5 + local master_ip + # We retry, since sometimes the cluster is created, but dcos-launch has intermittent problems describing it. + for attempt in $(seq 1 ${attempts}); do + # Careful to not use a pipeline! + # + # The first parameter to wrap.sh is the name of the virtual environment + # the command should run in. The rest of the parameters is the command + # itself. + if /venvs/wrap.sh dcos-launch dcos-launch describe --info-path="${REPO_ROOT_DIR}/cluster_info.json" > "${cluster_description_file}" && + master_ip=$(jq --raw-output --exit-status '.masters[0].public_ip' < "${cluster_description_file}") + then + echo "https://${master_ip}" + return 0 + else + echo "parsing output of [dcos-launch describe] failed (attempt ${attempt})" >&2 + local delay_sec=$((attempt*10)) + if [[ ${attempt} -lt ${attempts} ]]; then + echo "retrying in ${delay_sec} seconds..." >&2 + sleep ${delay_sec} + fi + fi + done + return 1 +} + # Now create a cluster if it doesn't exist. if [ -z "$CLUSTER_URL" ]; then echo "No DC/OS cluster specified. Attempting to create one now" @@ -73,8 +102,8 @@ if [ -z "$CLUSTER_URL" ]; then ${BUILD_TOOL_DIR}/launch_cluster.sh ${REPO_ROOT_DIR}/config.yaml ${REPO_ROOT_DIR}/cluster_info.json if [ -f ${REPO_ROOT_DIR}/cluster_info.json ]; then - export CLUSTER_URL=https://$(dcos-launch describe --info-path=${REPO_ROOT_DIR}/cluster_info.json | jq -r .masters[0].public_ip) - if [ -z $CLUSTER_URL ]; then + export CLUSTER_URL=$(get_public_master_url) + if [ -z "${CLUSTER_URL}" ]; then echo "Could not determine CLUSTER_URL" exit 1 fi @@ -173,7 +202,7 @@ echo "Finished integration tests at "`date` if [ -n "$CLUSTER_WAS_CREATED" ]; then echo "The DC/OS cluster $CLUSTER_URL was created. Please run" - echo "\t\$ dcos-launch delete --info-path=${CLUSTER_INFO_FILE}" + echo "\t\$ /venvs/wrap.sh dcos-launch dcos-launch delete --info-path=${CLUSTER_INFO_FILE}" echo "to remove the cluster." fi diff --git a/tools/distribution/UPDATING.md b/tools/distribution/UPDATING.md index 82a9fd92..ff91b43b 100644 --- a/tools/distribution/UPDATING.md +++ b/tools/distribution/UPDATING.md @@ -5,153 +5,13 @@ This framework is built using the [DC/OS Commons SDK](https://github.com/mesosph The parts of the SDK consumed consist of: * The SDK Java libraries including: * scheduler libraries - * executor libraries * testing libraries -* SDK artefacts including: - * The custom executor for use on DC/OS 1.9 clusters +* SDK artifacts including: * The `bootstrap` utility * CLI binaries for the three supported platforms * Build tooling * Testing utilities -## Preparation - -If this repository has never been updated in this way, then the following changes may be required: - -### Check `build.gradle` - -Check that `build.gradle` in the project root contains the following dependencies in addition to any others required: -``` -dependencies { - compile "mesosphere:scheduler:${dcosSDKVer}" - compile "mesosphere:executor:${dcosSDKVer}" - testCompile "mesosphere:testing:${dcosSDKVer}" -} -``` -as well as the following entry in the `ext` specification: -``` -ext { - dcosSDKVer = "" -} -``` -(where `` represents a version string such as `0.30.1`) - -Older versions of `build.gradle` contained the following dependencies and no entry in the `ext` specification: -* `compile "mesosphere:scheduler:"` -* `compile "mesosphere:executor:"` -* `testCompile "mesosphere:testing:"` - -Although this is supported in the current upgrade path, it is recommended that hese are changed to match the dependencies at the start of this section as this will result in a single line diff in the `build.gradle` file on update. - -### Check the `universe/resource.json` file - -#### URIs -In order to facilitate upgrades, the `universe/resource.json` file should contain the following entries in the `"uris"` section: -```json -"uris": { - "...": "...", - "bootstrap-zip": "https://downloads.mesosphere.com/dcos-commons/artifacts/{{dcos-sdk-version}}/bootstrap.zip", - "executor-zip": "http://downloads.mesosphere.com/dcos-commons/artifacts/{{dcos-sdk-version}}/executor.zip", - "...": "..." -} -``` -Note the use of the `{{dcos-skd-version}}` mustache template to replace an explicit version specification. - -#### CLIs - -In addition, if no custom CLI command are required, the `"cli"` section in the `universe/resource.json` can be replaced by: -```json -"cli":{ - "binaries":{ - "darwin":{ - "x86-64":{ - "contentHash":[ { "algo":"sha256", "value":"{{sha256:dcos-service-cli-darwin@https://downloads.mesosphere.com/dcos-commons/artifacts/{{dcos-sdk-version}}/SHA256SUMS}}" } ], - "kind":"executable", - "url":"https://downloads.mesosphere.com/dcos-commons/artifacts/{{dcos-sdk-version}}/dcos-service-cli-darwin" - } - }, - "linux":{ - "x86-64":{ - "contentHash":[ { "algo":"sha256", "value":"{{sha256:dcos-service-cli-linux@https://downloads.mesosphere.com/dcos-commons/artifacts/{{dcos-sdk-version}}/SHA256SUMS}}" } ], - "kind":"executable", - "url":"https://downloads.mesosphere.com/dcos-commons/artifacts/{{dcos-sdk-version}}/dcos-service-cli-linux" - } - }, - "windows":{ - "x86-64":{ - "contentHash":[ { "algo":"sha256", "value":"{{sha256:dcos-service-cli.exe@https://downloads.mesosphere.com/dcos-commons/artifacts/{{dcos-sdk-version}}/SHA256SUMS}}" } ], - "kind":"executable", - "url":"https://downloads.mesosphere.com/dcos-commons/artifacts/{{dcos-sdk-version}}/dcos-service-cli.exe" - } - } - } - } -``` -Meaning that the CLIs for the templated `{{dcos-sdk-version}}` are used directly instead of building these separately. - -## Updating - -### Clean the current working directory - -It is recommended that the update be performed in a **clean git repository**. Running the following commands should ensure this: - -**NOTE**: This is a destructive operation - -```bash -$ git checkout -b update-sdk-version-to- -$ git reset --hard HEAD -$ git clean -fdx -``` - -Now running `git status should yield: -```bash -$ git status -On branch update-sdk-version-to- -nothing to commit, working tree clean -``` - -### Perform the update - -Assuming the `build.gradle` and `resource.json` files have been updated accordingly, the update to a specific version of the SDK can be performed as follows: -```bash -$ docker pull mesosphere/dcos-commons:latest -$ docker run --rm -ti -v $(pwd):$(pwd) mesosphere/dcos-commons:latest init $(pwd) --update-sdk -``` - -Running a `git status` after this process should show something like: -```bash -$ git status -On branch update-sdk-version-to-0.41.0 -Changes not staged for commit: - (use "git add ..." to update what will be committed) - (use "git checkout -- ..." to discard changes in working directory) - - modified: build.gradle - modified: testing/sdk_auth.py - modified: testing/sdk_cmd.py - modified: testing/sdk_hosts.py - modified: testing/sdk_install.py - modified: testing/sdk_marathon.py - modified: testing/sdk_repository.py - modified: testing/sdk_security.py - modified: testing/sdk_upgrade.py - modified: testing/sdk_utils.py - modified: testing/security/transport_encryption.py - modified: tools/ci/init - modified: tools/release_builder.py - modified: tools/universe/package_builder.py - modified: tools/universe/package_manager.py - -no changes added to commit (use "git add" and/or "git commit -a") -``` -Note that the update procedure could also *delete* unneeded files. - -Check the differences in `build.gradle` and `tools/release_builder.py` to ensure that the `` is present in both files. - -Now add the changes to version control using the required git commants (`git add`, `git rm`). - -## Further steps - -* See the SDK release notes for any changes required when consuming the SKD. -* If the build process is heavily customized, it may be that additional changes will be required to the `build.sh` file in the repo. -* The API of the testing tools in `testing` could have changed, and any integration tests may need to be updted. Run `git diff testing` to check for any relevant changes. +For more details on the current update procedure, please refer to the section on updating of +the [DC/OS Commons SDK Git repository](https://github.com/mesosphere/dcos-commons) +at the version you want to update _to_. diff --git a/tools/distribution/copy-files b/tools/distribution/copy-files new file mode 100755 index 00000000..6913a26b --- /dev/null +++ b/tools/distribution/copy-files @@ -0,0 +1,140 @@ +#!/usr/bin/env python3 + +import logging +import argparse +import subprocess +import os +import re + +logging.basicConfig(level="INFO") + +LOGGER = logging.getLogger(__name__) + +DCOS_COMMONS_DIST_ROOT = os.environ.get("DCOS_COMMONS_DIST_ROOT", "/dcos-commons-dist") +DCOS_SDK_VERSION = "0.40.2" + + +def get_sdk_version(): + version_path = os.path.join(DCOS_COMMONS_DIST_ROOT, ".version") + if os.path.exists(version_path): + with open(version_path) as f: + version = f.read().strip().strip("'") + LOGGER.info("Read version: %s", version) + + if version: + DCOS_SDK_VERSION = version + + return os.environ.get("DCOS_SDK_VERSION", DCOS_SDK_VERSION) + + +def read_file(file_path: str) -> str: + LOGGER.info("Reading from %s", file_path) + with open(file_path, "r") as handle: + return handle.read() + + +def write_file(file_path: str, content: str) -> str: + LOGGER.info("Writing to %s", file_path) + with open(file_path, "w") as handle: + handle.write(content) + + +def copy_dist_file(filename: str, output_path: str): + """Copy a distribution file to the specified output path""" + source_file = os.path.join(DCOS_COMMONS_DIST_ROOT, filename) + + LOGGER.info("Copying %s to %s", source_file, output_path) + subprocess.check_output(["cp", source_file, output_path]) + + +def copy_dist_folder(folder: str, output_path: str, exclude: list = []): + """Copy a distribution folder to the specified ouput path""" + source_folder = os.path.join(DCOS_COMMONS_DIST_ROOT, folder.rstrip("/")) + + LOGGER.info("Copying %s to %s", source_folder, output_path) + cmd = ["rsync", "-avz", "--delete", ] + + if exclude: + for e in exclude: + cmd.extend(["--exclude={}".format(e)]) + + cmd.extend([source_folder, output_path]) + + subprocess.check_output(cmd) + + +def distribute_test_utils(output_path: str): + """Copies the required files into the target folders""" + + output_path = output_path.rstrip("/") + "/" + + files = ["conftest.py", + "run_container.sh", + "test.sh", + "TESTING.md", + "UPDATING.md", ] + + for f in files: + copy_dist_file(f, output_path) + + folders = {"testing": [], + "tools": ["tools/distribution", + "tools/ci/test_runner.sh", + "tools/ci/launch_cluster.sh", ]} + + for folder, exclude in folders.items(): + copy_dist_folder(folder, output_path, exclude) + + +def update_sdk(output_path: str, target_version: str): + build_gradle_path = os.path.join(output_path, "build.gradle") + + gradle_file_contents = read_file(build_gradle_path) + + gradle_file_contents = re.sub('dcosSDKVer = ".*?"', + 'dcosSDKVer = "{}"'.format(target_version), + gradle_file_contents) + + gradle_file_contents = re.sub(r'compile "mesosphere:scheduler:[\d\w\.\-]"', + 'compile "mesosphere:scheduler:{}"'.format(target_version), + gradle_file_contents) + gradle_file_contents = re.sub(r'compile "mesosphere:executor:[\d\w\.\-]"', + 'compile "mesosphere:executor:{}"'.format(target_version), + gradle_file_contents) + gradle_file_contents = re.sub(r'testCompile "mesosphere:testing:[\d\w\.\-]"', + 'testCompile "mesosphere:testing:{}"'.format(target_version), + gradle_file_contents) + + write_file(build_gradle_path, gradle_file_contents) + + package_builder_path = os.path.join(output_path, "tools", "universe", "package_builder.py") + + package_builder_contents = read_file(package_builder_path) + + package_builder_contents = re.sub('_dcos_sdk_version = [\'"].*?[\'"]', + '_dcos_sdk_version = "{}"'.format(target_version), + package_builder_contents) + write_file(package_builder_path, package_builder_contents) + + LOGGER.info("Updated to SDK version %s", target_version) + + +def parse_args(): + parser = argparse.ArgumentParser(description="Init DC/OS test environment") + parser.add_argument('output_path', type=str, + help='The absolute path where the testing tools should be created') + parser.add_argument("--update-sdk", type=str, + help="Update the SDK in the target framework.") + return parser.parse_args() + + +def main(): + args = parse_args() + distribute_test_utils(args.output_path) + + if (args.update_sdk): + update_sdk(args.output_path, args.update_sdk) + + +if __name__ == "__main__": + main() diff --git a/tools/release_builder.py b/tools/release_builder.py index 470a89e9..92246703 100755 --- a/tools/release_builder.py +++ b/tools/release_builder.py @@ -13,6 +13,7 @@ import tempfile import universe import urllib.request +import urllib.error log = logging.getLogger(__name__) logging.basicConfig(level=logging.DEBUG, format="%(message)s") @@ -44,7 +45,28 @@ def get_package_name(stub_universe_url: str) -> str: ) package_name = name_match.group(1) - log.info("Got package name %s from stub universe URL") + + try: + universe_name = None + log.info("Parsing stub universe...") + with urllib.request.urlopen(stub_universe_url) as response: + data = json.loads(response.read()) + for package in data["packages"]: + if "name" in package: + if universe_name is not None and universe_name != package["name"]: + log.warning( + "More than one different package names have been found '{}' and '{}'".format( + universe_name, package["name"] + ) + ) + else: + universe_name = package["name"] + if universe_name is not None: + package_name = universe_name + except Exception as e: + log.warning("Something went wrong during content loading: {}".format(str(e))) + + log.info("Got package name '{}' from stub universe URL".format(package_name)) return package_name diff --git a/tools/universe/package_builder.py b/tools/universe/package_builder.py index 76a0ac48..d61c4780 100755 --- a/tools/universe/package_builder.py +++ b/tools/universe/package_builder.py @@ -17,6 +17,9 @@ logging.basicConfig(level=logging.DEBUG, format="%(message)s") _jre_url = "https://downloads.mesosphere.com/java/openjdk-jre-8u212b03-hotspot-linux-x64.tar.gz" +_scheduler_jre_url = ( + "https://downloads.mesosphere.com/java/openjdk-jre-11.0.3.7-hotspot-linux-x64.tar.gz" +) _libmesos_bundle_url = ( "https://downloads.mesosphere.com/libmesos-bundle/libmesos-bundle-1.14-beta.tar.gz" ) @@ -177,6 +180,7 @@ def _get_template_mapping_for_content(self, orig_content): "documentation-path": self._get_documentation_path(), "issues-path": self._get_issues_path(), "jre-url": _jre_url, + "scheduler-jre-url": _scheduler_jre_url, "libmesos-bundle-url": _libmesos_bundle_url, }