/skybluelee/spark3/sbin/start-all.sh
/skybluelee/hadoop3/sbin/start-all.sh
/skybluelee/zeppelin0/bin/zeppelin-daemon.sh restart
/skybluelee/spark3/sbin/stop-all.sh
/skybluelee/hadoop3/sbin/stop-all.sh
/skybluelee/spark3$ ./bin/pyspark
// (master = local[*])
$ YARN_CONF_DIR=/skybluelee/spark3/conf2 /skybluelee/spark3/bin/pyspark --master yarn
// yarn cluster
ssh -i "spark-key.pem" ubuntu@spark-master-01
ssh spark-worker-01
$ hdfs dfs -ls
/user/spark에 해당하는 위치를 리턴
$ hdfs dfs -ls /skybluelee
Found 6 items
drwxr-xr-x - spark supergroup 0 2023-04-06 10:18 /skybluelee/data
drwxr-xr-x - spark supergroup 0 2023-07-13 08:10 /skybluelee/movie
drwxr-xr-x - spark supergroup 0 2023-07-14 07:24 /skybluelee/movie-partitioned
-rw-r--r-- 3 spark supergroup 997 2023-05-22 07:46 /skybluelee/name_gender.csv
drwxr-xr-x - spark supergroup 0 2023-04-09 15:57 /skybluelee/skybluelee_warehouse_mysql_5.7
drwxr-xr-x - spark supergroup 0 2023-03-11 01:08 /skybluelee/spark3
$ docker rm -f $(docker ps -a -q)
$ sudo groupadd docker
$ sudo usermod -aG docker $USER
$ newgrp
$ sudo chmod 666 /var/run/docker.sock
source <~/.profile>
$ sudo useradd <user_name> -m -s /bin/bash
$ sudo passwd <user_name>
$ sudo visudo
또는
$ sudo vi /etc/sudoers
... 아무 공간이나 추가
<user_name> ALL=(ALL) NOPASSWD: ALL