Skip to content

Commit 61c5ddc

Browse files
author
Melony QIN
authored
Merge pull request #2 from microsoft/master
merge from master
2 parents b7abae0 + 539ff2a commit 61c5ddc

2 files changed

Lines changed: 33 additions & 22 deletions

File tree

samples/features/azure-arc/deployment/kubeadm/ubuntu-single-node-vm/setup-controller-new.sh

Lines changed: 21 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -1,26 +1,5 @@
11
#!/bin/bash
22

3-
# Get controller username and password as input. It is used as default for the controller.
4-
#
5-
if [ -z "$AZDATA_USERNAME" ]
6-
then
7-
read -p "Create Username for Azure Arc Data Controller: " username
8-
echo
9-
export AZDATA_USERNAME=$username
10-
fi
11-
if [ -z "$AZDATA_PASSWORD" ]
12-
then
13-
while true; do
14-
read -s -p "Create Password for Azure Arc Data Controller: " password
15-
echo
16-
read -s -p "Confirm your Password: " password2
17-
echo
18-
[ "$password" = "$password2" ] && break
19-
echo "Password mismatch. Please try again."
20-
done
21-
export AZDATA_PASSWORD=$password
22-
fi
23-
243
# Prompt for private preview repository username and password provided by Microsoft
254
#
265
if [ -z "$DOCKER_USERNAME" ]
@@ -67,6 +46,27 @@ then
6746
export ARC_DC_REGION=$dc_region
6847
fi
6948

49+
# Get controller username and password as input. It is used as default for the controller.
50+
#
51+
if [ -z "$AZDATA_USERNAME" ]
52+
then
53+
read -p "Create Username for Azure Arc Data Controller: " username
54+
echo
55+
export AZDATA_USERNAME=$username
56+
fi
57+
if [ -z "$AZDATA_PASSWORD" ]
58+
then
59+
while true; do
60+
read -s -p "Create Password for Azure Arc Data Controller: " password
61+
echo
62+
read -s -p "Confirm your Password: " password2
63+
echo
64+
[ "$password" = "$password2" ] && break
65+
echo "Password mismatch. Please try again."
66+
done
67+
export AZDATA_PASSWORD=$password
68+
fi
69+
7070
set -Eeuo pipefail
7171

7272
# This is a script to create single-node Kubernetes cluster and deploy Azure Arc Data Controller on it.

samples/features/sql-big-data-cluster/spark/config-install/installpackage_Spark.ipynb

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -106,9 +106,20 @@
106106
"The following code can be used to install packages on each executor node at runtime. \\\n",
107107
"**Note**: This functionality is not available on a non-root BDC deployment (including OpenShift). This installation is temporary, and must be performed each time a new Spark session is invoked.\n",
108108
"\n",
109+
"If you want to use this from CU5 upwards, you must add two settings pre-deployment.\n",
110+
"\n",
111+
"In contron.json, add (under security):\n",
112+
"\n",
113+
"_\"allowRunAsRoot\": true_\n",
114+
"\n",
115+
"In BDC.json, add (under spec.services.spark.settings): \n",
116+
"\n",
117+
"_\"yarn-site.yarn.nodemanager.container-executor.class\": \"org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor\"_\n",
118+
"\n",
109119
"``` Python\n",
110120
"import subprocess\n",
111-
"\n",
121+
"import os\n",
122+
"os.environ[\"XDG_CACHE_HOME\"]=\"/tmp\"\n",
112123
"# Install TensorFlow\n",
113124
"stdout = subprocess.check_output(\n",
114125
" \"pip3 install tensorflow\",\n",

0 commit comments

Comments
 (0)