From 97b1f0d0af50dd5101e3f1cb31dfe63826c24bb2 Mon Sep 17 00:00:00 2001 From: huzekang <1040080742@qq.com> Date: Wed, 26 Jun 2019 13:41:05 +0800 Subject: [PATCH] =?UTF-8?q?change:=201.=E6=9B=B4=E6=96=B0readme=EF=BC=8C?= =?UTF-8?q?=E5=A2=9E=E5=8A=A0spark=E4=BD=9C=E4=B8=9A=E6=8F=90=E4=BA=A4?= =?UTF-8?q?=E5=88=B0yarn=E4=B8=8A=E8=B7=91=E7=9A=84=E8=AF=B4=E6=98=8E?= =?UTF-8?q?=E3=80=82=202.=E4=BC=98=E5=8C=96=E4=BB=A3=E7=A0=81=E6=B3=A8?= =?UTF-8?q?=E9=87=8A?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- README.md | 18 +++++++++++++++++- .../sparkstarter/SparkHiveNewVersion.java | 5 +++-- 2 files changed, 20 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 724eeae..43e492a 100644 --- a/README.md +++ b/README.md @@ -151,4 +151,20 @@ SparkSession spark = SparkSession 4.打开spark server界面,可以看到已经完成的spark作业。 ![](https://raw.githubusercontent.com/huzekang/picbed/master/20190626112849.png) -### 提交作业到yarn \ No newline at end of file +### 提交作业到yarn +1.代码中定义的上下文不要指定master +```java + SparkSession spark = SparkSession + .builder() + .appName("Java Spark SQL Starter !!") + .enableHiveSupport() + .config("spark.some.config.option", "some-value") + .getOrCreate(); +``` + +2.使用`mvn clean package`打包好的作业,并提交到本地安装好的spark环境上跑 +``` +~/opt/spark-2.4.0-bin-hadoop2.7 » bin/spark-submit --master yarn --deploy-mode cluster --class "com.wugui.sparkstarter.SparkHiveNewVersion" /Users/huzekang/study/spark-starter/target/spark-starter-1.0-SNAPSHOT.jar +``` +3.打开yarn观察到作业已经完成了。 +![](https://raw.githubusercontent.com/huzekang/picbed/master/20190626133707.png) \ No newline at end of file diff --git a/src/main/java/com/wugui/sparkstarter/SparkHiveNewVersion.java b/src/main/java/com/wugui/sparkstarter/SparkHiveNewVersion.java index 0fdf924..8c6b36c 100644 --- a/src/main/java/com/wugui/sparkstarter/SparkHiveNewVersion.java +++ b/src/main/java/com/wugui/sparkstarter/SparkHiveNewVersion.java @@ -15,9 +15,10 @@ public static void main(String[] args) { // 定义上下文 SparkSession spark = SparkSession .builder() - // 如果需要提交到remote spark则使用spark://host:port + // 如果需要作业要以jar包形式提交到remote spark,则使用spark://host:port // .master("spark://10.0.0.50:7077") - // 如果需要提交到remote spark则使用local + // 如果idea中测试则使用local。 + // 如果作业要以jar包形式提交到yarn则不设置master。 .master("local") .appName("Java Spark SQL Starter !!") .enableHiveSupport()