Skip to content

Commit

Permalink
add: 1.新增spark连接hbase Phoenix示例
Browse files Browse the repository at this point in the history
  • Loading branch information
Kyofin committed Aug 30, 2019
1 parent 65ef143 commit bbab2df
Show file tree
Hide file tree
Showing 2 changed files with 34 additions and 0 deletions.
9 changes: 9 additions & 0 deletions spark-starter/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,10 @@
<artifactId>hadoop-common</artifactId>
<groupId>org.apache.hadoop</groupId>
</exclusion>
<exclusion>
<artifactId>disruptor</artifactId>
<groupId>com.lmax</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
Expand All @@ -125,6 +129,11 @@
</dependency>


<dependency>
<groupId>org.apache.phoenix</groupId>
<artifactId>phoenix-core</artifactId>
<version>4.14.0-HBase-1.2</version>
</dependency>
<!--<dependency>-->
<!--<groupId>org.apache.hadoop</groupId>-->
<!--<artifactId>hadoop-client</artifactId>-->
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
package com.wugui.sparkstarter.hbase;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

/**
* @program: bigdata-starter
* @author: huzekang
* @create: 2019-08-30 18:07
**/
public class SparkHbasePhoenix {
public static void main(String[] args) {
SparkSession sparkSession = SparkSession.builder().appName("SparkHBaseDataFrame").master("local").getOrCreate();

Dataset<Row> dataset = sparkSession.read()
.format("jdbc")
.option("driver", "org.apache.phoenix.jdbc.PhoenixDriver")
.option("phoenix.schema.isNamespaceMappingEnabled", "true")
.option("url", "jdbc:phoenix:cdh01:2181")
.option("dbtable", "userInfo")
.load();
dataset.printSchema();
}
}

0 comments on commit bbab2df

Please sign in to comment.