From 25fddd98da7d1f7708ea37b0e6905304f5fadc87 Mon Sep 17 00:00:00 2001 From: Jyoti Ranjan Mahapatra Date: Tue, 25 Sep 2018 00:33:43 -0700 Subject: [PATCH 01/16] dasinstructions --- .../install-data-analytics-studio.sh | 66 +++++++++++++++++++ 1 file changed, 66 insertions(+) create mode 100644 install-data-analytics-studio/install-data-analytics-studio.sh diff --git a/install-data-analytics-studio/install-data-analytics-studio.sh b/install-data-analytics-studio/install-data-analytics-studio.sh new file mode 100644 index 0000000..7077f67 --- /dev/null +++ b/install-data-analytics-studio/install-data-analytics-studio.sh @@ -0,0 +1,66 @@ +#!/bin/sh +set -e +set -x +if [ -z $(sudo ambari-server status | grep -o "Ambari Server running") ] +then + echo "${HOSTNAME} : Ambari is not running. Exiting" + exit 0 +else + echo "${HOSTNAME}: Ambari is running. Proceed ahead." +fi + +sudo echo "deb http://s3.amazonaws.com/dev.hortonworks.com/DAS/ubuntu16/1.x/BUILDS/1.0.1.1-13 DAS main" > ~/DAS.list +sudo cp ~/DAS.list /etc/apt/sources.list.d && sudo apt-get update +sudo /usr/bin/apt-get -o Dpkg::Options::=--force-confdef --allow-unauthenticated --assume-yes install data-analytics-studio-lite +wget http://s3.amazonaws.com/dev.hortonworks.com/DAS/ubuntu16/1.x/BUILDS/1.0.1.1-13/tars/data_analytics_studio_lite/data-analytics-studio-mpack-1.0.1.1.0.1.1-13.tar.gz -O /tmp/data-analytics-studio-mpack.tar.gz +sudo ambari-server install-mpack --mpack=/tmp/data-analytics-studio-mpack.tar.gz +sudo ambari-server restart + +CLUSTERNAME=$(echo -e "import hdinsight_common.ClusterManifestParser as ClusterManifestParser\nprint ClusterManifestParser.parse_local_manifest().deployment.cluster_name" | python) +echo "Cluster Name=$CLUSTERNAME" +USERID=$(echo -e "import hdinsight_common.Constants as Constants\nprint Constants.AMBARI_WATCHDOG_USERNAME" | python) +echo "USERID=$USERID" +PASSWD=$(echo -e "import hdinsight_common.ClusterManifestParser as ClusterManifestParser\nimport hdinsight_common.Constants as Constants\nimport base64\nbase64pwd = ClusterManifestParser.parse_local_manifest().ambari_users.usersmap[Constants.AMBARI_WATCHDOG_USERNAME].password\nprint base64.b64decode(base64pwd)" | python) +TAG=$(cat /proc/sys/kernel/random/uuid) + +#Add service_name +curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X POST -d '{"ServiceInfo":{"service_name":"DATA_ANALYTICS_STUDIO"}}' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/services +#Add components +curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X POST -d '' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/services/DATA_ANALYTICS_STUDIO/components/DATA_ANALYTICS_STUDIO_WEBAPP +curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X POST -d '' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/services/DATA_ANALYTICS_STUDIO/components/DATA_ANALYTICS_STUDIO_EVENT_PROCESSOR +#Add configs +curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X POST -d '{"type": "data_analytics_studio-database", "tag": "$TAG","properties" : { "data_analytics_studio_database_port": "5432","data_analytics_studio_database_username": "das","data_analytics_studio_database_host": "","das_autocreate_db":"true", "pg_hba_conf_content": "local all {{data_analytics_studio_database_username}} md5\nhost all {{data_analytics_studio_database_username}} 0.0.0.0/0 md5\nhost all {{data_analytics_studio_database_username}} ::/0 md5\n\nlocal all postgres ident", "postgresql_conf_content": "listen_addresses = '\''*'\''\nport = {{data_analytics_studio_database_port}}\nmax_connections = 100\nshared_buffers = 128MB\ndynamic_shared_memory_type = posix\nlog_destination = '\''stderr'\''\nlogging_collector = on\nlog_directory = '\''pg_log'\''\nlog_filename = '\''postgresql-%a.log'\''\nlog_truncate_on_rotation = on\nlog_rotation_age = 1d\nlog_rotation_size = 0\nlog_line_prefix = '\''< %m > '\''\nlog_timezone = '\''UTC'\''\ndatestyle = '\''iso, mdy'\''\ntimezone = '\''UTC'\''\nlc_messages = '\''en_US.UTF-8'\''\nlc_monetary = '\''en_US.UTF-8'\''\nlc_numeric = '\''en_US.UTF-8'\''\nlc_time = '\''en_US.UTF-8'\''\ndefault_text_search_config = '\''pg_catalog.english'\''\n"}}' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/configurations +curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X POST -d '{"type": "data_analytics_studio-logsearch-conf", "tag": "$TAG", "properties" : { "component_mappings":"DATA_ANALYTICS_STUDIO_WEBAPP:data_analytics_studio_webapp,data_analytics_studio_webapp_access;DATA_ANALYTICS_STUDIO_EVENT_PROCESSOR:data_analytics_studio_event_processor,data_analytics_studio_event_processor_access","content" : "{ \"input\":[ { \"type\":\"data_analytics_studio_webapp\", \"rowtype\":\"service\", \"path\":\"{{default('\''/configurations/data_analytics_studio-env/data_analytics_studio_log_dir'\'','\''/var/log/das'\'')}}/das-webapp.log\" }, { \"type\": \"data_analytics_studio_event_processor\", \"rowtype\":\"service\", \"path\":\"{{default('\''/configurations/data_analytics_studio-env/data_analytics_studio_log_dir'\'', '\''/var/log/das'\'')}}/event-processor.log\" } ], \"filter\":[ { \"filter\":\"grok\", \"conditions\":{ \"fields\":{ \"type\":[ \"data_analytics_studio_webapp\", \"data_analytics_studio_event_processor\" ] } }, \"log4j_format\":\"\", \"multiline_pattern\":\"^(%{LOGLEVEL:level})\", \"message_pattern\":\"(?m)^%{LOGLEVEL:level}%{SPACE}\\\\[%{TIMESTAMP_ISO8601:logtime}\\\\]%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}\", \"post_map_values\":{ \"logtime\":{ \"map_date\":{ \"target_date_pattern\":\"yyyy-MM-dd HH:mm:ss,SSS\" } } } } ] }","service_name": "Data Analytics Studio"}}' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/configurations +curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X POST -d '{"type": "data_analytics_studio-webapp-properties", "tag": "$TAG", "properties" : { "data_analytics_studio_webapp_server_protocol": "http","data_analytics_studio_webapp_smartsense_id": "das-smartsense-id","data_analytics_studio_webapp_server_port": "30800","content": "{ \"logging\": { \"level\": \"INFO\", \"loggers\": { \"com.hortonworks.hivestudio\": \"DEBUG\" }, \"appenders\": [ { \"type\": \"file\", \"currentLogFilename\": \"{{data_analytics_studio_log_dir}}/das-webapp.log\", \"archivedLogFilenamePattern\": \"{{data_analytics_studio_log_dir}}/das-webapp-%i.log.gz\", \"archivedFileCount\": 5, \"maxFileSize\": \"1GB\" } ] }, \"jerseyClient\":{ }, \"database\": { \"driverClass\": \"org.postgresql.Driver\", \"url\": \"{{data_analytics_studio_database_jdbc_url}}\", \"user\": \"{{data_analytics_studio_database_username}}\", \"password\": \"{{data_analytics_studio_database_password}}\", \"properties\": { } }, \"flyway\": { \"schemas\": [\"das\"], \"locations\": [ \"db/migrate/common\", \"db/migrate/prod\" ] }, \"server\": { \"requestLog\": { \"appenders\": [ { \"type\": \"file\", \"currentLogFilename\": \"{{data_analytics_studio_log_dir}}/das-webapp-access.log\", \"archivedLogFilenamePattern\": \"{{data_analytics_studio_log_dir}}/das-webapp-access-%i.log.gz\", \"archivedFileCount\": 5, \"maxFileSize\": \"1GB\" } ] }, \"applicationConnectors\": [ { {% if data_analytics_studio_ssl_enabled %} \"keyStorePath\": \"{{data_analytics_studio_webapp_keystore_file}}\", \"keyStorePassword\": \"{{data_analytics_studio_webapp_keystore_password}}\", {# \"validateCerts\": true, #} {% endif %} \"type\": \"{{data_analytics_studio_webapp_server_protocol}}\", \"port\": {{data_analytics_studio_webapp_server_port}} } ], \"adminConnectors\": [ { {% if data_analytics_studio_ssl_enabled %} \"keyStorePath\": \"{{data_analytics_studio_webapp_keystore_file}}\", \"keyStorePassword\": \"{{data_analytics_studio_webapp_keystore_password}}\", {# \"validateCerts\": true, #} {% endif %} \"type\": \"{{data_analytics_studio_webapp_server_protocol}}\", \"port\": {{data_analytics_studio_webapp_admin_port}} } ] }, \"akka\": { \"properties\": { \"akka.loglevel\": \"INFO\", \"akka.stdout-loglevel\": \"INFO\", \"akka.actor.jdbc-connector-dispatcher.fork-join-executor.parallelism-factor\": 5.0, \"akka.actor.result-dispatcher.fork-join-executor.parallelism-factor\": 10.0, \"akka.actor.misc-dispatcher.fork-join-executor.parallelism-factor\": 5.0 } }, \"gaConfiguration\": { \"enabled\": true, \"identifier\": \"UA-22950817-34\" }, \"serviceConfigDirectory\" : \"/etc/das/conf/\", \"environment\": \"production\", \"smartsenseId\": \"{{data_analytics_studio_webapp_smartsense_id}}\", \"authConfig\": { \"enabled\": {{data_analytics_studio_webapp_auth_enabled}}, \"appUserName\": \"{{data_analytics_studio_user}}\", \"adminUsers\": \"{{data_analytics_studio_admin_users}}\", \"serviceAuthType\": \"{{data_analytics_studio_webapp_service_auth_type}}\", \"serviceKeytab\": \"{{data_analytics_studio_webapp_service_keytab}}\", \"servicePrincipal\": \"{{data_analytics_studio_webapp_service_principal}}\", \"knoxSSOEnabled\": {{data_analytics_studio_webapp_knox_sso_enabled}}, \"knoxSSOUrl\": \"{{data_analytics_studio_webapp_knox_sso_url}}\", \"knoxPublicKey\": \"{{data_analytics_studio_webapp_knox_publickey}}\", \"knoxCookieName\": \"{{data_analytics_studio_webapp_knox_cookiename}}\", \"knoxUrlParamName\": \"{{data_analytics_studio_webapp_knox_url_query_param}}\", \"knoxUserAgent\": \"{{data_analytics_studio_webapp_knox_useragent}}\" }}","data_analytics_studio_webapp_$USERID_port": "30801"}}' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/configurations +curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X POST -d '{"type": "data_analytics_studio-event_processor-properties", "tag": "$TAG", "properties" : { "data_analytics_studio_event_processor_admin_server_port": "30901","data_analytics_studio_event_processor_server_protocol": "http","content": "{\n \"logging\": {\n \"level\": \"INFO\",\n \"loggers\": {\n \"com.hortonworks.hivestudio\": \"DEBUG\"\n },\n \"appenders\": [\n {\n \"type\": \"file\",\n \"currentLogFilename\": \"{{data_analytics_studio_log_dir}}/event-processor.log\",\n \"archivedLogFilenamePattern\": \"{{data_analytics_studio_log_dir}}/event-processor-%i.log.gz\",\n \"archivedFileCount\": 5,\n \"maxFileSize\": \"1GB\"\n }\n ]\n },\n \"jerseyClient\": {\n \"timeout\": \"240s\",\n \"connectionTimeout\": \"2s\"\n },\n \"database\": {\n \"driverClass\": \"org.postgresql.Driver\",\n \"url\": \"{{data_analytics_studio_database_jdbc_url}}\",\n \"user\": \"{{data_analytics_studio_database_username}}\",\n \"password\": \"{{data_analytics_studio_database_password}}\",\n \"properties\": {\n }\n },\n \"server\": {\n \"requestLog\": {\n \"appenders\": [\n {\n \"type\": \"file\",\n \"currentLogFilename\": \"{{data_analytics_studio_log_dir}}/event-processor-access.log\",\n \"archivedLogFilenamePattern\": \"{{data_analytics_studio_log_dir}}/event-processor-access-%i.log.gz\",\n \"archivedFileCount\": 5,\n \"maxFileSize\": \"1GB\"\n }\n ]\n },\n \"applicationConnectors\": [\n {\n {% if data_analytics_studio_ssl_enabled %}\n \"keyStorePath\": \"{{data_analytics_studio_event_processor_keystore_file}}\",\n \"keyStorePassword\": \"{{data_analytics_studio_event_processor_keystore_password}}\",\n {# \"validateCerts\": true, #}\n {% endif %}\n \"type\": \"{{data_analytics_studio_event_processor_server_protocol}}\",\n \"port\": {{data_analytics_studio_event_processor_server_port}}\n }\n ],\n \"adminConnectors\": [\n {\n {% if data_analytics_studio_ssl_enabled %}\n \"keyStorePath\": \"{{data_analytics_studio_event_processor_keystore_file}}\",\n \"keyStorePassword\": \"{{data_analytics_studio_event_processor_keystore_password}}\",\n {# \"validateCerts\": true, #}\n {% endif %}\n \"type\": \"{{data_analytics_studio_event_processor_server_protocol}}\",\n \"port\": {{data_analytics_studio_event_processor_admin_server_port}}\n }\n ]\n },\n \"akka\": {\n \"properties\": {\n \"akka.loglevel\": \"INFO\",\n \"akka.stdout-loglevel\": \"INFO\",\n \"akka.loggers.0\": \"akka.event.slf4j.Slf4jLogger\"\n }\n },\n \"authConfig\": {\n \"enabled\": {{data_analytics_studio_event_processor_auth_enabled}},\n \"appUserName\": \"{{data_analytics_studio_user}}\",\n \"serviceAuthType\": \"{{data_analytics_studio_event_processor_service_auth_type}}\",\n \"serviceKeytab\": \"{{data_analytics_studio_event_processor_service_keytab}}\",\n \"servicePrincipal\": \"{{data_analytics_studio_event_processor_service_principal}}\"\n },\n \"event-processing\": {\n \"hive.hook.proto.base-directory\": \"{{data_analytics_studio_event_processor_hive_base_dir}}\",\n \"tez.history.logging.proto-base-dir\": \"{{data_analytics_studio_event_processor_tez_base_dir}}\",\n \"meta.info.sync.service.delay.millis\": 5000,\n \"actor.initialization.delay.millis\": 20000,\n \"close.folder.delay.millis\": 600000,\n \"reread.event.max.retries\": -1,\n \"reporting.scheduler.initial.delay.millis\": 30000,\n \"reporting.scheduler.interval.delay.millis\": 300000,\n \"reporting.scheduler.weekly.initial.delay.millis\": 60000,\n \"reporting.scheduler.weekly.interval.delay.millis\": 600000,\n \"reporting.scheduler.monthly.initial.delay.millis\": 90000,\n \"reporting.scheduler.monthly.interval.delay.millis\": 900000,\n \"reporting.scheduler.quarterly.initial.delay.millis\": 120000,\n \"reporting.scheduler.quarterly.interval.delay.millis\": 1200000\n },\n \"serviceConfigDirectory\": \"/etc/das/conf/\",\n \"environment\": \"production\"\n}","data_analytics_studio_event_processor_server_port": "30900"}}' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/configurations +curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X POST -d '{"type": "data_analytics_studio-event_processor-env", "tag": "$TAG","properties" : { "content": "#!/usr/bin/env bash\n\n#Do NOT edit Log and Pid dir, modify Advanced data_analytics_studio-env properties instead\nexport DAS_EP_PID_DIR=\"{{data_analytics_studio_pid_dir}}\"\nexport DAS_EP_LOG_DIR=\"{{data_analytics_studio_log_dir}}\"\nexport JAVA_OPTS=\"{{data_analytics_studio_ep_jvm_opts}}\"\nexport ADDITIONAL_CLASSPATH=\"{{data_analytics_studio_ep_additional_classpath}}\"\n\nexport DEBUG=\"false\"\n#export DEBUG_PORT="}}' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/configurations +curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X POST -d '{"type": "data_analytics_studio-properties", "tag": "$TAG","properties" : { "hive_session_params": "","content": "application.name=das-webapp\nhive.session.params={{data_analytics_studio_hive_session_params}}\ndas.jobs.dir=/user/{{data_analytics_studio_user}}/jobs\ndas.api.url={{data_analytics_studio_webapp_server_url}}\nuse.hive.interactive.mode=false"}}' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/configurations +curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X POST -d '{"type": "data_analytics_studio-webapp-env", "tag": "$TAG","properties" : { "content": "#!/usr/bin/env bash\n\n#Do NOT edit Log and Pid dir, modify Advanced data_analytics_studio-env properties instead\nexport DAS_PID_DIR=\"{{data_analytics_studio_pid_dir}}\"\nexport DAS_LOG_DIR=\"{{data_analytics_studio_log_dir}}\"\nexport JAVA_OPTS=\"{{data_analytics_studio_webapp_jvm_opts}}\"\nexport ADDITIONAL_CLASSPATH=\"{{data_analytics_studio_webapp_additional_classpath}}\"\n\nexport DEBUG=\"false\"\n#export DEBUG_PORT="}}' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/configurations +curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X POST -d '{"type": "data_analytics_studio-security-site", "tag": "$TAG","properties" : { "webapp_keystore_file": "","authentication_enabled": "false","ssl_enabled": "false","knox_cookiename": "hadoop-jwt","knox_sso_enabled": "false","knox_url_query_param": "originalUrl","knox_useragent": "Mozilla,Chrome","event_processor_keystore_file": "","knox_publickey": "","admin_users": "hive","knox_sso_url": ""}}' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/configurations +curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X POST -d '{"type": "data_analytics_studio-env", "tag": "$TAG","properties" : { "data_analytics_studio_pid_dir": "/usr/das/1.0.1.0-11/data_analytics_studio","data_analytics_studio_log_dir":"/var/log/das","ep_jvm_opts": "-Xmx1024m","webapp_jvm_opts": "-Xmx1024m","webapp_additional_classpath" : "/usr/hdp/current/hadoop-client/*:/usr/lib/rubix/*","ep_additional_classpath" : "/usr/hdp/current/hadoop-client/*:/usr/lib/rubix/*"}}' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/configurations +curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X PUT -d '{ "Clusters" : {"desired_configs": {"type": "data_analytics_studio-database", "tag" : "$TAG" }}}' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME +curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X PUT -d '{ "Clusters" : {"desired_configs": {"type": "data_analytics_studio-logsearch-conf", "tag" : "$TAG" }}}' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME +curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X PUT -d '{ "Clusters" : {"desired_configs": {"type": "data_analytics_studio-webapp-properties", "tag" : "$TAG" }}}' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME +curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X PUT -d '{ "Clusters" : {"desired_configs": {"type": "data_analytics_studio-event_processor-properties", "tag" : "$TAG" }}}' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME +curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X PUT -d '{ "Clusters" : {"desired_configs": {"type": "data_analytics_studio-event_processor-env", "tag" : "$TAG" }}}' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME +curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X PUT -d '{ "Clusters" : {"desired_configs": {"type": "data_analytics_studio-properties", "tag" : "$TAG" }}}' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME +curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X PUT -d '{ "Clusters" : {"desired_configs": {"type": "data_analytics_studio-webapp-env", "tag" : "$TAG" }}}' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME +curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X PUT -d '{ "Clusters" : {"desired_configs": {"type": "data_analytics_studio-security-site", "tag" : "$TAG" }}}' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME +curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X PUT -d '{ "Clusters" : {"desired_configs": {"type": "data_analytics_studio-env", "tag" : "$TAG" }}}' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME +#assign hosts +NODENAME=$(curl -u $USERID:$PASSWD --silent -H "X-Requested-By: ambari" -X GET https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/hosts | grep -i "hn0" | grep -i "host_name" | grep -o hn0.*net) +curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X POST -d '{"host_components" : [{"HostRoles":{"component_name":"DATA_ANALYTICS_STUDIO_WEBAPP"}}] }' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/hosts?Hosts/host_name=$NODENAME +curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X POST -d '{"host_components" : [{"HostRoles":{"component_name":"DATA_ANALYTICS_STUDIO_EVENT_PROCESSOR"}}] }' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/hosts?Hosts/host_name=$NODENAME +#start service +curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X PUT -d '{"ServiceInfo": {"state" : "INSTALLED"}, "RequestInfo": {"context": "Installed Data Analytics Studio"}}' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/services/DATA_ANALYTICS_STUDIO + +sleep 10s + +n=0 +until [ $n -gt 3 ] || [ "$(curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" --silent -X GET https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/services/DATA_ANALYTICS_STUDIO)" = '"state" : "INSTALLED"' ] +do + n=$[$n+1] + echo "Data Analytics Studio not yet installed" + sleep 20s +done +curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X PUT -d '{"ServiceInfo": {"state" : "STARTED"}, "RequestInfo": {"context": "Start Data Analytics Studio"}}' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/services/DATA_ANALYTICS_STUDIO \ No newline at end of file From 1b924dc97725e4c0b034b189092b4ec6cd60ef60 Mon Sep 17 00:00:00 2001 From: Jyoti Ranjan Date: Tue, 25 Sep 2018 00:36:19 -0700 Subject: [PATCH 02/16] Create README.md --- install-data-analytics-studio/README.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 install-data-analytics-studio/README.md diff --git a/install-data-analytics-studio/README.md b/install-data-analytics-studio/README.md new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/install-data-analytics-studio/README.md @@ -0,0 +1 @@ + From e94de36720746cff0dd6effd0d2519b1dc6f0ed2 Mon Sep 17 00:00:00 2001 From: Jyoti Ranjan Date: Tue, 25 Sep 2018 00:57:40 -0700 Subject: [PATCH 03/16] Update README.md --- install-data-analytics-studio/README.md | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/install-data-analytics-studio/README.md b/install-data-analytics-studio/README.md index 8b13789..7d83f5e 100644 --- a/install-data-analytics-studio/README.md +++ b/install-data-analytics-studio/README.md @@ -1 +1,16 @@ +###Instructions to install Data Analytics Studio on HDI 4.0 + +####Prerequisites +1. A precreated HDI 4 cluster with hive component. +2. Make sure the tez configs(tez-site) have been modified to include ```tez.history.logging.proto-base-dir=/warehouse/tablespace/external/hive/sys.db``` +Restart Tez after modifying the configuration. +3. Make sure the hive configs(hive-site) have been modified to include +```hive.exec.failure.hooks=org.apache.hadoop.hive.ql.hooks.HiveProtoLoggingHook``` ```hive.exec.post.hooks=org.apache.hadoop.hive.ql.hooks.HiveProtoLoggingHook``` ```hive.exec.pre.hooks=org.apache.hadoop.hive.ql.hooks.HiveProtoLoggingHook``` +Restart Hive after modifying the configurations. + +####Installation Instructions +Execute [install-data-analytics-studio.sh](install-data-analytics-studio.sh) on both head nodes. ```sudo install-data-analytics-studio.sh```. +If you wish to run the script using a custom script action, follow [this](https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux). +After the script succeeds, head to ambari view and refresh. On the left panel observe a new service: ```Data Analytics Studio```. +Use the Quick Links to navigate to the UI, or append /das/ at the end of the cluster name, e.g https://clustername.azurehdinsight.net/das/ From 182b9c666a519d43b4b021b5f643722b8dbe70ce Mon Sep 17 00:00:00 2001 From: Jyoti Ranjan Date: Tue, 25 Sep 2018 11:31:44 -0700 Subject: [PATCH 04/16] Update README.md --- install-data-analytics-studio/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/install-data-analytics-studio/README.md b/install-data-analytics-studio/README.md index 7d83f5e..d5652de 100644 --- a/install-data-analytics-studio/README.md +++ b/install-data-analytics-studio/README.md @@ -10,7 +10,7 @@ Restart Tez after modifying the configuration. Restart Hive after modifying the configurations. ####Installation Instructions -Execute [install-data-analytics-studio.sh](install-data-analytics-studio.sh) on both head nodes. ```sudo install-data-analytics-studio.sh```. -If you wish to run the script using a custom script action, follow [this](https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux). +Execute [install-data-analytics-studio.sh](install-data-analytics-studio.sh) on both head nodes. ```sudo install-data-analytics-studio.sh```. Make sure to execute the script on all head nodes. The script will identify the right node to install the package. +If you wish to run the script using a custom script action, follow [this](https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux). Make sure to execute the script action on head nodes. After the script succeeds, head to ambari view and refresh. On the left panel observe a new service: ```Data Analytics Studio```. Use the Quick Links to navigate to the UI, or append /das/ at the end of the cluster name, e.g https://clustername.azurehdinsight.net/das/ From b18fddbaa1eed050a98bae786296bc70ee5fb934 Mon Sep 17 00:00:00 2001 From: Jyoti Ranjan Date: Wed, 26 Sep 2018 08:58:22 -0700 Subject: [PATCH 05/16] Update install-data-analytics-studio.sh --- .../install-data-analytics-studio.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/install-data-analytics-studio/install-data-analytics-studio.sh b/install-data-analytics-studio/install-data-analytics-studio.sh index 7077f67..de317df 100644 --- a/install-data-analytics-studio/install-data-analytics-studio.sh +++ b/install-data-analytics-studio/install-data-analytics-studio.sh @@ -52,7 +52,7 @@ NODENAME=$(curl -u $USERID:$PASSWD --silent -H "X-Requested-By: ambari" -X GET h curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X POST -d '{"host_components" : [{"HostRoles":{"component_name":"DATA_ANALYTICS_STUDIO_WEBAPP"}}] }' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/hosts?Hosts/host_name=$NODENAME curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X POST -d '{"host_components" : [{"HostRoles":{"component_name":"DATA_ANALYTICS_STUDIO_EVENT_PROCESSOR"}}] }' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/hosts?Hosts/host_name=$NODENAME #start service -curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X PUT -d '{"ServiceInfo": {"state" : "INSTALLED"}, "RequestInfo": {"context": "Installed Data Analytics Studio"}}' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/services/DATA_ANALYTICS_STUDIO +curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X PUT -d '{"ServiceInfo": {"state" : "INSTALLED"}, "RequestInfo": {"context": "Install Data Analytics Studio"}}' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/services/DATA_ANALYTICS_STUDIO sleep 10s @@ -63,4 +63,4 @@ do echo "Data Analytics Studio not yet installed" sleep 20s done -curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X PUT -d '{"ServiceInfo": {"state" : "STARTED"}, "RequestInfo": {"context": "Start Data Analytics Studio"}}' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/services/DATA_ANALYTICS_STUDIO \ No newline at end of file +curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X PUT -d '{"ServiceInfo": {"state" : "STARTED"}, "RequestInfo": {"context": "Start Data Analytics Studio"}}' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/services/DATA_ANALYTICS_STUDIO From 4e83d6f7adac60092bce0da79fb437439a759859 Mon Sep 17 00:00:00 2001 From: Jyoti Ranjan Date: Wed, 26 Sep 2018 12:46:37 -0700 Subject: [PATCH 06/16] Update install-data-analytics-studio.sh --- .../install-data-analytics-studio.sh | 39 ++++++++++++++----- 1 file changed, 30 insertions(+), 9 deletions(-) diff --git a/install-data-analytics-studio/install-data-analytics-studio.sh b/install-data-analytics-studio/install-data-analytics-studio.sh index de317df..a2e26fe 100644 --- a/install-data-analytics-studio/install-data-analytics-studio.sh +++ b/install-data-analytics-studio/install-data-analytics-studio.sh @@ -1,6 +1,7 @@ #!/bin/sh set -e set -x + if [ -z $(sudo ambari-server status | grep -o "Ambari Server running") ] then echo "${HOSTNAME} : Ambari is not running. Exiting" @@ -8,7 +9,6 @@ then else echo "${HOSTNAME}: Ambari is running. Proceed ahead." fi - sudo echo "deb http://s3.amazonaws.com/dev.hortonworks.com/DAS/ubuntu16/1.x/BUILDS/1.0.1.1-13 DAS main" > ~/DAS.list sudo cp ~/DAS.list /etc/apt/sources.list.d && sudo apt-get update sudo /usr/bin/apt-get -o Dpkg::Options::=--force-confdef --allow-unauthenticated --assume-yes install data-analytics-studio-lite @@ -23,6 +23,16 @@ echo "USERID=$USERID" PASSWD=$(echo -e "import hdinsight_common.ClusterManifestParser as ClusterManifestParser\nimport hdinsight_common.Constants as Constants\nimport base64\nbase64pwd = ClusterManifestParser.parse_local_manifest().ambari_users.usersmap[Constants.AMBARI_WATCHDOG_USERNAME].password\nprint base64.b64decode(base64pwd)" | python) TAG=$(cat /proc/sys/kernel/random/uuid) +#grep returns exit code 1 if no match is found. Suppress the error +set +e +if [ -z "$(curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" --silent -X GET https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/services/HIVE | grep HIVE_SERVER_INTERACTIVE)" ] +then + LLAP=true +else + LLAP=false +fi +set -e +echo "Interactive hive mode: $LLAP" #Add service_name curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X POST -d '{"ServiceInfo":{"service_name":"DATA_ANALYTICS_STUDIO"}}' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/services #Add components @@ -34,7 +44,7 @@ curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X POST -d '{"type": "dat curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X POST -d '{"type": "data_analytics_studio-webapp-properties", "tag": "$TAG", "properties" : { "data_analytics_studio_webapp_server_protocol": "http","data_analytics_studio_webapp_smartsense_id": "das-smartsense-id","data_analytics_studio_webapp_server_port": "30800","content": "{ \"logging\": { \"level\": \"INFO\", \"loggers\": { \"com.hortonworks.hivestudio\": \"DEBUG\" }, \"appenders\": [ { \"type\": \"file\", \"currentLogFilename\": \"{{data_analytics_studio_log_dir}}/das-webapp.log\", \"archivedLogFilenamePattern\": \"{{data_analytics_studio_log_dir}}/das-webapp-%i.log.gz\", \"archivedFileCount\": 5, \"maxFileSize\": \"1GB\" } ] }, \"jerseyClient\":{ }, \"database\": { \"driverClass\": \"org.postgresql.Driver\", \"url\": \"{{data_analytics_studio_database_jdbc_url}}\", \"user\": \"{{data_analytics_studio_database_username}}\", \"password\": \"{{data_analytics_studio_database_password}}\", \"properties\": { } }, \"flyway\": { \"schemas\": [\"das\"], \"locations\": [ \"db/migrate/common\", \"db/migrate/prod\" ] }, \"server\": { \"requestLog\": { \"appenders\": [ { \"type\": \"file\", \"currentLogFilename\": \"{{data_analytics_studio_log_dir}}/das-webapp-access.log\", \"archivedLogFilenamePattern\": \"{{data_analytics_studio_log_dir}}/das-webapp-access-%i.log.gz\", \"archivedFileCount\": 5, \"maxFileSize\": \"1GB\" } ] }, \"applicationConnectors\": [ { {% if data_analytics_studio_ssl_enabled %} \"keyStorePath\": \"{{data_analytics_studio_webapp_keystore_file}}\", \"keyStorePassword\": \"{{data_analytics_studio_webapp_keystore_password}}\", {# \"validateCerts\": true, #} {% endif %} \"type\": \"{{data_analytics_studio_webapp_server_protocol}}\", \"port\": {{data_analytics_studio_webapp_server_port}} } ], \"adminConnectors\": [ { {% if data_analytics_studio_ssl_enabled %} \"keyStorePath\": \"{{data_analytics_studio_webapp_keystore_file}}\", \"keyStorePassword\": \"{{data_analytics_studio_webapp_keystore_password}}\", {# \"validateCerts\": true, #} {% endif %} \"type\": \"{{data_analytics_studio_webapp_server_protocol}}\", \"port\": {{data_analytics_studio_webapp_admin_port}} } ] }, \"akka\": { \"properties\": { \"akka.loglevel\": \"INFO\", \"akka.stdout-loglevel\": \"INFO\", \"akka.actor.jdbc-connector-dispatcher.fork-join-executor.parallelism-factor\": 5.0, \"akka.actor.result-dispatcher.fork-join-executor.parallelism-factor\": 10.0, \"akka.actor.misc-dispatcher.fork-join-executor.parallelism-factor\": 5.0 } }, \"gaConfiguration\": { \"enabled\": true, \"identifier\": \"UA-22950817-34\" }, \"serviceConfigDirectory\" : \"/etc/das/conf/\", \"environment\": \"production\", \"smartsenseId\": \"{{data_analytics_studio_webapp_smartsense_id}}\", \"authConfig\": { \"enabled\": {{data_analytics_studio_webapp_auth_enabled}}, \"appUserName\": \"{{data_analytics_studio_user}}\", \"adminUsers\": \"{{data_analytics_studio_admin_users}}\", \"serviceAuthType\": \"{{data_analytics_studio_webapp_service_auth_type}}\", \"serviceKeytab\": \"{{data_analytics_studio_webapp_service_keytab}}\", \"servicePrincipal\": \"{{data_analytics_studio_webapp_service_principal}}\", \"knoxSSOEnabled\": {{data_analytics_studio_webapp_knox_sso_enabled}}, \"knoxSSOUrl\": \"{{data_analytics_studio_webapp_knox_sso_url}}\", \"knoxPublicKey\": \"{{data_analytics_studio_webapp_knox_publickey}}\", \"knoxCookieName\": \"{{data_analytics_studio_webapp_knox_cookiename}}\", \"knoxUrlParamName\": \"{{data_analytics_studio_webapp_knox_url_query_param}}\", \"knoxUserAgent\": \"{{data_analytics_studio_webapp_knox_useragent}}\" }}","data_analytics_studio_webapp_$USERID_port": "30801"}}' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/configurations curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X POST -d '{"type": "data_analytics_studio-event_processor-properties", "tag": "$TAG", "properties" : { "data_analytics_studio_event_processor_admin_server_port": "30901","data_analytics_studio_event_processor_server_protocol": "http","content": "{\n \"logging\": {\n \"level\": \"INFO\",\n \"loggers\": {\n \"com.hortonworks.hivestudio\": \"DEBUG\"\n },\n \"appenders\": [\n {\n \"type\": \"file\",\n \"currentLogFilename\": \"{{data_analytics_studio_log_dir}}/event-processor.log\",\n \"archivedLogFilenamePattern\": \"{{data_analytics_studio_log_dir}}/event-processor-%i.log.gz\",\n \"archivedFileCount\": 5,\n \"maxFileSize\": \"1GB\"\n }\n ]\n },\n \"jerseyClient\": {\n \"timeout\": \"240s\",\n \"connectionTimeout\": \"2s\"\n },\n \"database\": {\n \"driverClass\": \"org.postgresql.Driver\",\n \"url\": \"{{data_analytics_studio_database_jdbc_url}}\",\n \"user\": \"{{data_analytics_studio_database_username}}\",\n \"password\": \"{{data_analytics_studio_database_password}}\",\n \"properties\": {\n }\n },\n \"server\": {\n \"requestLog\": {\n \"appenders\": [\n {\n \"type\": \"file\",\n \"currentLogFilename\": \"{{data_analytics_studio_log_dir}}/event-processor-access.log\",\n \"archivedLogFilenamePattern\": \"{{data_analytics_studio_log_dir}}/event-processor-access-%i.log.gz\",\n \"archivedFileCount\": 5,\n \"maxFileSize\": \"1GB\"\n }\n ]\n },\n \"applicationConnectors\": [\n {\n {% if data_analytics_studio_ssl_enabled %}\n \"keyStorePath\": \"{{data_analytics_studio_event_processor_keystore_file}}\",\n \"keyStorePassword\": \"{{data_analytics_studio_event_processor_keystore_password}}\",\n {# \"validateCerts\": true, #}\n {% endif %}\n \"type\": \"{{data_analytics_studio_event_processor_server_protocol}}\",\n \"port\": {{data_analytics_studio_event_processor_server_port}}\n }\n ],\n \"adminConnectors\": [\n {\n {% if data_analytics_studio_ssl_enabled %}\n \"keyStorePath\": \"{{data_analytics_studio_event_processor_keystore_file}}\",\n \"keyStorePassword\": \"{{data_analytics_studio_event_processor_keystore_password}}\",\n {# \"validateCerts\": true, #}\n {% endif %}\n \"type\": \"{{data_analytics_studio_event_processor_server_protocol}}\",\n \"port\": {{data_analytics_studio_event_processor_admin_server_port}}\n }\n ]\n },\n \"akka\": {\n \"properties\": {\n \"akka.loglevel\": \"INFO\",\n \"akka.stdout-loglevel\": \"INFO\",\n \"akka.loggers.0\": \"akka.event.slf4j.Slf4jLogger\"\n }\n },\n \"authConfig\": {\n \"enabled\": {{data_analytics_studio_event_processor_auth_enabled}},\n \"appUserName\": \"{{data_analytics_studio_user}}\",\n \"serviceAuthType\": \"{{data_analytics_studio_event_processor_service_auth_type}}\",\n \"serviceKeytab\": \"{{data_analytics_studio_event_processor_service_keytab}}\",\n \"servicePrincipal\": \"{{data_analytics_studio_event_processor_service_principal}}\"\n },\n \"event-processing\": {\n \"hive.hook.proto.base-directory\": \"{{data_analytics_studio_event_processor_hive_base_dir}}\",\n \"tez.history.logging.proto-base-dir\": \"{{data_analytics_studio_event_processor_tez_base_dir}}\",\n \"meta.info.sync.service.delay.millis\": 5000,\n \"actor.initialization.delay.millis\": 20000,\n \"close.folder.delay.millis\": 600000,\n \"reread.event.max.retries\": -1,\n \"reporting.scheduler.initial.delay.millis\": 30000,\n \"reporting.scheduler.interval.delay.millis\": 300000,\n \"reporting.scheduler.weekly.initial.delay.millis\": 60000,\n \"reporting.scheduler.weekly.interval.delay.millis\": 600000,\n \"reporting.scheduler.monthly.initial.delay.millis\": 90000,\n \"reporting.scheduler.monthly.interval.delay.millis\": 900000,\n \"reporting.scheduler.quarterly.initial.delay.millis\": 120000,\n \"reporting.scheduler.quarterly.interval.delay.millis\": 1200000\n },\n \"serviceConfigDirectory\": \"/etc/das/conf/\",\n \"environment\": \"production\"\n}","data_analytics_studio_event_processor_server_port": "30900"}}' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/configurations curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X POST -d '{"type": "data_analytics_studio-event_processor-env", "tag": "$TAG","properties" : { "content": "#!/usr/bin/env bash\n\n#Do NOT edit Log and Pid dir, modify Advanced data_analytics_studio-env properties instead\nexport DAS_EP_PID_DIR=\"{{data_analytics_studio_pid_dir}}\"\nexport DAS_EP_LOG_DIR=\"{{data_analytics_studio_log_dir}}\"\nexport JAVA_OPTS=\"{{data_analytics_studio_ep_jvm_opts}}\"\nexport ADDITIONAL_CLASSPATH=\"{{data_analytics_studio_ep_additional_classpath}}\"\n\nexport DEBUG=\"false\"\n#export DEBUG_PORT="}}' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/configurations -curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X POST -d '{"type": "data_analytics_studio-properties", "tag": "$TAG","properties" : { "hive_session_params": "","content": "application.name=das-webapp\nhive.session.params={{data_analytics_studio_hive_session_params}}\ndas.jobs.dir=/user/{{data_analytics_studio_user}}/jobs\ndas.api.url={{data_analytics_studio_webapp_server_url}}\nuse.hive.interactive.mode=false"}}' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/configurations +curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X POST -d '{"type": "data_analytics_studio-properties", "tag": "$TAG","properties" : { "hive_session_params": "","content": "application.name=das-webapp\nhive.session.params={{data_analytics_studio_hive_session_params}}\ndas.jobs.dir=/user/{{data_analytics_studio_user}}/jobs\ndas.api.url={{data_analytics_studio_webapp_server_url}}\nuse.hive.interactive.mode='$LLAP'"}}' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/configurations curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X POST -d '{"type": "data_analytics_studio-webapp-env", "tag": "$TAG","properties" : { "content": "#!/usr/bin/env bash\n\n#Do NOT edit Log and Pid dir, modify Advanced data_analytics_studio-env properties instead\nexport DAS_PID_DIR=\"{{data_analytics_studio_pid_dir}}\"\nexport DAS_LOG_DIR=\"{{data_analytics_studio_log_dir}}\"\nexport JAVA_OPTS=\"{{data_analytics_studio_webapp_jvm_opts}}\"\nexport ADDITIONAL_CLASSPATH=\"{{data_analytics_studio_webapp_additional_classpath}}\"\n\nexport DEBUG=\"false\"\n#export DEBUG_PORT="}}' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/configurations curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X POST -d '{"type": "data_analytics_studio-security-site", "tag": "$TAG","properties" : { "webapp_keystore_file": "","authentication_enabled": "false","ssl_enabled": "false","knox_cookiename": "hadoop-jwt","knox_sso_enabled": "false","knox_url_query_param": "originalUrl","knox_useragent": "Mozilla,Chrome","event_processor_keystore_file": "","knox_publickey": "","admin_users": "hive","knox_sso_url": ""}}' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/configurations curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X POST -d '{"type": "data_analytics_studio-env", "tag": "$TAG","properties" : { "data_analytics_studio_pid_dir": "/usr/das/1.0.1.0-11/data_analytics_studio","data_analytics_studio_log_dir":"/var/log/das","ep_jvm_opts": "-Xmx1024m","webapp_jvm_opts": "-Xmx1024m","webapp_additional_classpath" : "/usr/hdp/current/hadoop-client/*:/usr/lib/rubix/*","ep_additional_classpath" : "/usr/hdp/current/hadoop-client/*:/usr/lib/rubix/*"}}' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/configurations @@ -48,19 +58,30 @@ curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X PUT -d '{ "Clusters" : curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X PUT -d '{ "Clusters" : {"desired_configs": {"type": "data_analytics_studio-security-site", "tag" : "$TAG" }}}' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X PUT -d '{ "Clusters" : {"desired_configs": {"type": "data_analytics_studio-env", "tag" : "$TAG" }}}' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME #assign hosts -NODENAME=$(curl -u $USERID:$PASSWD --silent -H "X-Requested-By: ambari" -X GET https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/hosts | grep -i "hn0" | grep -i "host_name" | grep -o hn0.*net) +NODENAME=$(curl -u $USERID:$PASSWD --silent -H "X-Requested-By: ambari" -X GET https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/hosts | grep -i $HOSTNAME | grep -i "host_name" | grep -o $HOSTNAME.*net) curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X POST -d '{"host_components" : [{"HostRoles":{"component_name":"DATA_ANALYTICS_STUDIO_WEBAPP"}}] }' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/hosts?Hosts/host_name=$NODENAME curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X POST -d '{"host_components" : [{"HostRoles":{"component_name":"DATA_ANALYTICS_STUDIO_EVENT_PROCESSOR"}}] }' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/hosts?Hosts/host_name=$NODENAME #start service curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X PUT -d '{"ServiceInfo": {"state" : "INSTALLED"}, "RequestInfo": {"context": "Install Data Analytics Studio"}}' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/services/DATA_ANALYTICS_STUDIO sleep 10s - +#start DATA ANALYTICS STUDO, retry 3 times if fails n=0 -until [ $n -gt 3 ] || [ "$(curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" --silent -X GET https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/services/DATA_ANALYTICS_STUDIO)" = '"state" : "INSTALLED"' ] +SUCCESSCODE=202 +RETRYCOUNT=3 +STATUSCODE=400 +until [ $STATUSCODE -le $SUCCESSCODE ] || [ $n -gt $RETRYCOUNT ] do - n=$[$n+1] - echo "Data Analytics Studio not yet installed" - sleep 20s + STATUSCODE=$(curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X PUT -d '{"ServiceInfo": {"state" : "STARTED"}, "RequestInfo": {"context": "Start Data Analytics Studio"}}' --silent --write-out %{http_code} --output /tmp/response.txt https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/services/DATA_ANALYTICS_STUDIO) + if test $STATUSCODE -le $SUCCESSCODE; then + break + else + n=$[$n+1] + fi + sleep 30s done -curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X PUT -d '{"ServiceInfo": {"state" : "STARTED"}, "RequestInfo": {"context": "Start Data Analytics Studio"}}' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/services/DATA_ANALYTICS_STUDIO + +if test $STATUSCODE -gt $SUCCESSCODE; then + echo "Starting service failed for $CLUSTERNAME with $STATUSCODE" + exit 1 +fi From 2003f4bb3a0744804d4a16d4d5a91452009b267a Mon Sep 17 00:00:00 2001 From: Jyoti Ranjan Date: Wed, 26 Sep 2018 12:59:14 -0700 Subject: [PATCH 07/16] Update README.md --- install-data-analytics-studio/README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/install-data-analytics-studio/README.md b/install-data-analytics-studio/README.md index d5652de..6842ec3 100644 --- a/install-data-analytics-studio/README.md +++ b/install-data-analytics-studio/README.md @@ -14,3 +14,5 @@ Execute [install-data-analytics-studio.sh](install-data-analytics-studio.sh) on If you wish to run the script using a custom script action, follow [this](https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux). Make sure to execute the script action on head nodes. After the script succeeds, head to ambari view and refresh. On the left panel observe a new service: ```Data Analytics Studio```. Use the Quick Links to navigate to the UI, or append /das/ at the end of the cluster name, e.g https://clustername.azurehdinsight.net/das/ + +Note: Data Analytics studio today does not support high availability. As a result, if the node on which it is installed goes down, the Data analytics Studio service won't be accessible. From 9e96b78be349eaf72f7a0f242cbe2505c316b3ff Mon Sep 17 00:00:00 2001 From: Jyoti Ranjan Date: Wed, 26 Sep 2018 12:59:53 -0700 Subject: [PATCH 08/16] Update README.md --- install-data-analytics-studio/README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/install-data-analytics-studio/README.md b/install-data-analytics-studio/README.md index 6842ec3..5cb85cb 100644 --- a/install-data-analytics-studio/README.md +++ b/install-data-analytics-studio/README.md @@ -1,7 +1,7 @@ -###Instructions to install Data Analytics Studio on HDI 4.0 +### Instructions to install Data Analytics Studio on HDI 4.0 -####Prerequisites +#### Prerequisites 1. A precreated HDI 4 cluster with hive component. 2. Make sure the tez configs(tez-site) have been modified to include ```tez.history.logging.proto-base-dir=/warehouse/tablespace/external/hive/sys.db``` Restart Tez after modifying the configuration. @@ -9,7 +9,7 @@ Restart Tez after modifying the configuration. ```hive.exec.failure.hooks=org.apache.hadoop.hive.ql.hooks.HiveProtoLoggingHook``` ```hive.exec.post.hooks=org.apache.hadoop.hive.ql.hooks.HiveProtoLoggingHook``` ```hive.exec.pre.hooks=org.apache.hadoop.hive.ql.hooks.HiveProtoLoggingHook``` Restart Hive after modifying the configurations. -####Installation Instructions +#### Installation Instructions Execute [install-data-analytics-studio.sh](install-data-analytics-studio.sh) on both head nodes. ```sudo install-data-analytics-studio.sh```. Make sure to execute the script on all head nodes. The script will identify the right node to install the package. If you wish to run the script using a custom script action, follow [this](https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux). Make sure to execute the script action on head nodes. After the script succeeds, head to ambari view and refresh. On the left panel observe a new service: ```Data Analytics Studio```. From 33aff8e5c31afb92908973c4e091155518f80213 Mon Sep 17 00:00:00 2001 From: Jyoti Ranjan Date: Wed, 26 Sep 2018 22:22:53 -0700 Subject: [PATCH 09/16] Update install-data-analytics-studio.sh --- .../install-data-analytics-studio.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/install-data-analytics-studio/install-data-analytics-studio.sh b/install-data-analytics-studio/install-data-analytics-studio.sh index a2e26fe..3befd74 100644 --- a/install-data-analytics-studio/install-data-analytics-studio.sh +++ b/install-data-analytics-studio/install-data-analytics-studio.sh @@ -27,9 +27,9 @@ TAG=$(cat /proc/sys/kernel/random/uuid) set +e if [ -z "$(curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" --silent -X GET https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/services/HIVE | grep HIVE_SERVER_INTERACTIVE)" ] then - LLAP=true -else LLAP=false +else + LLAP=true fi set -e echo "Interactive hive mode: $LLAP" From c46e966a2150ac595879897f5c0ff070c9c819f7 Mon Sep 17 00:00:00 2001 From: Jyoti Ranjan Date: Thu, 27 Sep 2018 23:44:17 -0700 Subject: [PATCH 10/16] Update install-data-analytics-studio.sh --- .../install-data-analytics-studio.sh | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/install-data-analytics-studio/install-data-analytics-studio.sh b/install-data-analytics-studio/install-data-analytics-studio.sh index 3befd74..614bc5c 100644 --- a/install-data-analytics-studio/install-data-analytics-studio.sh +++ b/install-data-analytics-studio/install-data-analytics-studio.sh @@ -9,9 +9,8 @@ then else echo "${HOSTNAME}: Ambari is running. Proceed ahead." fi -sudo echo "deb http://s3.amazonaws.com/dev.hortonworks.com/DAS/ubuntu16/1.x/BUILDS/1.0.1.1-13 DAS main" > ~/DAS.list -sudo cp ~/DAS.list /etc/apt/sources.list.d && sudo apt-get update -sudo /usr/bin/apt-get -o Dpkg::Options::=--force-confdef --allow-unauthenticated --assume-yes install data-analytics-studio-lite +sudo echo "deb http://s3.amazonaws.com/dev.hortonworks.com/DAS/ubuntu16/1.x/BUILDS/1.0.1.1-13 DAS main" > /etc/apt/sources.list.d +sudo apt-get update && sudo /usr/bin/apt-get -o Dpkg::Options::=--force-confdef --allow-unauthenticated --assume-yes install data-analytics-studio-lite wget http://s3.amazonaws.com/dev.hortonworks.com/DAS/ubuntu16/1.x/BUILDS/1.0.1.1-13/tars/data_analytics_studio_lite/data-analytics-studio-mpack-1.0.1.1.0.1.1-13.tar.gz -O /tmp/data-analytics-studio-mpack.tar.gz sudo ambari-server install-mpack --mpack=/tmp/data-analytics-studio-mpack.tar.gz sudo ambari-server restart From f3c47dbf49285702917c3c74202c5c74e03ff738 Mon Sep 17 00:00:00 2001 From: Jyoti Ranjan Date: Thu, 27 Sep 2018 23:54:00 -0700 Subject: [PATCH 11/16] Update install-data-analytics-studio.sh --- install-data-analytics-studio/install-data-analytics-studio.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/install-data-analytics-studio/install-data-analytics-studio.sh b/install-data-analytics-studio/install-data-analytics-studio.sh index 614bc5c..1a4399c 100644 --- a/install-data-analytics-studio/install-data-analytics-studio.sh +++ b/install-data-analytics-studio/install-data-analytics-studio.sh @@ -9,7 +9,7 @@ then else echo "${HOSTNAME}: Ambari is running. Proceed ahead." fi -sudo echo "deb http://s3.amazonaws.com/dev.hortonworks.com/DAS/ubuntu16/1.x/BUILDS/1.0.1.1-13 DAS main" > /etc/apt/sources.list.d +sudo echo "deb http://s3.amazonaws.com/dev.hortonworks.com/DAS/ubuntu16/1.x/BUILDS/1.0.1.1-13 DAS main" > /etc/apt/sources.list.d/das.list sudo apt-get update && sudo /usr/bin/apt-get -o Dpkg::Options::=--force-confdef --allow-unauthenticated --assume-yes install data-analytics-studio-lite wget http://s3.amazonaws.com/dev.hortonworks.com/DAS/ubuntu16/1.x/BUILDS/1.0.1.1-13/tars/data_analytics_studio_lite/data-analytics-studio-mpack-1.0.1.1.0.1.1-13.tar.gz -O /tmp/data-analytics-studio-mpack.tar.gz sudo ambari-server install-mpack --mpack=/tmp/data-analytics-studio-mpack.tar.gz From 383878a607fe2f5f56cc315f9f80c84cb6b6c1a3 Mon Sep 17 00:00:00 2001 From: Jyoti Ranjan Date: Fri, 28 Sep 2018 00:16:31 -0700 Subject: [PATCH 12/16] Update install-data-analytics-studio.sh --- .../install-data-analytics-studio.sh | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/install-data-analytics-studio/install-data-analytics-studio.sh b/install-data-analytics-studio/install-data-analytics-studio.sh index 1a4399c..f96bdce 100644 --- a/install-data-analytics-studio/install-data-analytics-studio.sh +++ b/install-data-analytics-studio/install-data-analytics-studio.sh @@ -57,9 +57,12 @@ curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X PUT -d '{ "Clusters" : curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X PUT -d '{ "Clusters" : {"desired_configs": {"type": "data_analytics_studio-security-site", "tag" : "$TAG" }}}' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X PUT -d '{ "Clusters" : {"desired_configs": {"type": "data_analytics_studio-env", "tag" : "$TAG" }}}' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME #assign hosts -NODENAME=$(curl -u $USERID:$PASSWD --silent -H "X-Requested-By: ambari" -X GET https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/hosts | grep -i $HOSTNAME | grep -i "host_name" | grep -o $HOSTNAME.*net) -curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X POST -d '{"host_components" : [{"HostRoles":{"component_name":"DATA_ANALYTICS_STUDIO_WEBAPP"}}] }' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/hosts?Hosts/host_name=$NODENAME -curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X POST -d '{"host_components" : [{"HostRoles":{"component_name":"DATA_ANALYTICS_STUDIO_EVENT_PROCESSOR"}}] }' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/hosts?Hosts/host_name=$NODENAME +NODENAME1=$(curl -u $USERID:$PASSWD --silent -H "X-Requested-By: ambari" -X GET https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/hosts | grep -i hn0 | grep -i "host_name" | grep -o hn0.*net) +curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X POST -d '{"host_components" : [{"HostRoles":{"component_name":"DATA_ANALYTICS_STUDIO_WEBAPP"}}] }' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/hosts?Hosts/host_name=$NODENAME1 +curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X POST -d '{"host_components" : [{"HostRoles":{"component_name":"DATA_ANALYTICS_STUDIO_EVENT_PROCESSOR"}}] }' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/hosts?Hosts/host_name=$NODENAME1 +NODENAME2=$(curl -u $USERID:$PASSWD --silent -H "X-Requested-By: ambari" -X GET https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/hosts | grep -i hn1 | grep -i "host_name" | grep -o hn1.*net) +curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X POST -d '{"host_components" : [{"HostRoles":{"component_name":"DATA_ANALYTICS_STUDIO_WEBAPP"}}] }' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/hosts?Hosts/host_name=$NODENAME2 +curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X POST -d '{"host_components" : [{"HostRoles":{"component_name":"DATA_ANALYTICS_STUDIO_EVENT_PROCESSOR"}}] }' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/hosts?Hosts/host_name=$NODENAME2 #start service curl -u $USERID:$PASSWD -H "X-Requested-By: ambari" -i -X PUT -d '{"ServiceInfo": {"state" : "INSTALLED"}, "RequestInfo": {"context": "Install Data Analytics Studio"}}' https://$CLUSTERNAME.azurehdinsight.net/api/v1/clusters/$CLUSTERNAME/services/DATA_ANALYTICS_STUDIO From 817c0e2782a5ca38373a2dd0d22f988bb526ad5a Mon Sep 17 00:00:00 2001 From: Jyoti Ranjan Date: Fri, 28 Sep 2018 00:17:02 -0700 Subject: [PATCH 13/16] Update install-data-analytics-studio.sh --- .../install-data-analytics-studio.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/install-data-analytics-studio/install-data-analytics-studio.sh b/install-data-analytics-studio/install-data-analytics-studio.sh index f96bdce..0a9192f 100644 --- a/install-data-analytics-studio/install-data-analytics-studio.sh +++ b/install-data-analytics-studio/install-data-analytics-studio.sh @@ -2,6 +2,10 @@ set -e set -x +sudo echo "deb http://s3.amazonaws.com/dev.hortonworks.com/DAS/ubuntu16/1.x/BUILDS/1.0.1.1-13 DAS main" > /etc/apt/sources.list.d/das.list +sudo apt-get update && sudo /usr/bin/apt-get -o Dpkg::Options::=--force-confdef --allow-unauthenticated --assume-yes install data-analytics-studio-lite +wget http://s3.amazonaws.com/dev.hortonworks.com/DAS/ubuntu16/1.x/BUILDS/1.0.1.1-13/tars/data_analytics_studio_lite/data-analytics-studio-mpack-1.0.1.1.0.1.1-13.tar.gz -O /tmp/data-analytics-studio-mpack.tar.gz +sudo ambari-server install-mpack --mpack=/tmp/data-analytics-studio-mpack.tar.gz if [ -z $(sudo ambari-server status | grep -o "Ambari Server running") ] then echo "${HOSTNAME} : Ambari is not running. Exiting" @@ -9,10 +13,6 @@ then else echo "${HOSTNAME}: Ambari is running. Proceed ahead." fi -sudo echo "deb http://s3.amazonaws.com/dev.hortonworks.com/DAS/ubuntu16/1.x/BUILDS/1.0.1.1-13 DAS main" > /etc/apt/sources.list.d/das.list -sudo apt-get update && sudo /usr/bin/apt-get -o Dpkg::Options::=--force-confdef --allow-unauthenticated --assume-yes install data-analytics-studio-lite -wget http://s3.amazonaws.com/dev.hortonworks.com/DAS/ubuntu16/1.x/BUILDS/1.0.1.1-13/tars/data_analytics_studio_lite/data-analytics-studio-mpack-1.0.1.1.0.1.1-13.tar.gz -O /tmp/data-analytics-studio-mpack.tar.gz -sudo ambari-server install-mpack --mpack=/tmp/data-analytics-studio-mpack.tar.gz sudo ambari-server restart CLUSTERNAME=$(echo -e "import hdinsight_common.ClusterManifestParser as ClusterManifestParser\nprint ClusterManifestParser.parse_local_manifest().deployment.cluster_name" | python) From 00dd5e446c3c0175ff4021386a05062c8ec13f3d Mon Sep 17 00:00:00 2001 From: Jyoti Ranjan Date: Fri, 28 Sep 2018 01:29:12 -0700 Subject: [PATCH 14/16] Update README.md --- install-data-analytics-studio/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/install-data-analytics-studio/README.md b/install-data-analytics-studio/README.md index 5cb85cb..139c2b8 100644 --- a/install-data-analytics-studio/README.md +++ b/install-data-analytics-studio/README.md @@ -10,8 +10,8 @@ Restart Tez after modifying the configuration. Restart Hive after modifying the configurations. #### Installation Instructions -Execute [install-data-analytics-studio.sh](install-data-analytics-studio.sh) on both head nodes. ```sudo install-data-analytics-studio.sh```. Make sure to execute the script on all head nodes. The script will identify the right node to install the package. -If you wish to run the script using a custom script action, follow [this](https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux). Make sure to execute the script action on head nodes. +Execute [install-data-analytics-studio.sh](install-data-analytics-studio.sh) as [custom script action](https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-customize-cluster-linux) only on the head nodes. + After the script succeeds, head to ambari view and refresh. On the left panel observe a new service: ```Data Analytics Studio```. Use the Quick Links to navigate to the UI, or append /das/ at the end of the cluster name, e.g https://clustername.azurehdinsight.net/das/ From 239a21b0841f9c7ae20cb640a0094612da23eb1e Mon Sep 17 00:00:00 2001 From: Jyoti Ranjan Date: Fri, 28 Sep 2018 01:29:47 -0700 Subject: [PATCH 15/16] Update README.md --- install-data-analytics-studio/README.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/install-data-analytics-studio/README.md b/install-data-analytics-studio/README.md index 139c2b8..9846c9f 100644 --- a/install-data-analytics-studio/README.md +++ b/install-data-analytics-studio/README.md @@ -14,5 +14,3 @@ Execute [install-data-analytics-studio.sh](install-data-analytics-studio.sh) as After the script succeeds, head to ambari view and refresh. On the left panel observe a new service: ```Data Analytics Studio```. Use the Quick Links to navigate to the UI, or append /das/ at the end of the cluster name, e.g https://clustername.azurehdinsight.net/das/ - -Note: Data Analytics studio today does not support high availability. As a result, if the node on which it is installed goes down, the Data analytics Studio service won't be accessible. From 4fa38fd9b138cebdf38161b3e3a6e35d192d997e Mon Sep 17 00:00:00 2001 From: Jyoti Ranjan Date: Fri, 28 Sep 2018 01:32:46 -0700 Subject: [PATCH 16/16] Update README.md --- install-data-analytics-studio/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/install-data-analytics-studio/README.md b/install-data-analytics-studio/README.md index 9846c9f..a8faac4 100644 --- a/install-data-analytics-studio/README.md +++ b/install-data-analytics-studio/README.md @@ -2,7 +2,7 @@ ### Instructions to install Data Analytics Studio on HDI 4.0 #### Prerequisites -1. A precreated HDI 4 cluster with hive component. +1. A precreated HDI 4 cluster containing hive component. 2. Make sure the tez configs(tez-site) have been modified to include ```tez.history.logging.proto-base-dir=/warehouse/tablespace/external/hive/sys.db``` Restart Tez after modifying the configuration. 3. Make sure the hive configs(hive-site) have been modified to include