Flink 1.11 Hive Table API

726 阅读2分钟

依赖

<properties>
   <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
   <flink.version>1.11.0</flink.version>
   <java.version>1.8</java.version>
   <scala.binary.version>2.11</scala.binary.version>
   <maven.compiler.source>${java.version}</maven.compiler.source>
   <maven.compiler.target>${java.version}</maven.compiler.target>
   <log4j.version>2.12.1</log4j.version>
   <hive.version>2.1.1-cdh6.2.1</hive.version>
   <hadoop.version>3.0.0-cdh6.2.1</hadoop.version>
</properties>

<repositories>
   <repository>
   	<id>apache.snapshots</id>
   	<name>Apache Development Snapshot Repository</name>
   	<url>https://repository.apache.org/content/repositories/snapshots/</url>
   	<releases>
   		<enabled>false</enabled>
   	</releases>
   	<snapshots>
   		<enabled>true</enabled>
   	</snapshots>
   </repository>
   <!--引入CDH仓库-->
   <repository>
   	<id>cloudera</id>
   	<url>https://repository.cloudera.com/artifactory/cloudera-repos/</url>
   </repository>
</repositories>

<dependencies>
   <!-- Flink Table API Dependency -->
   <dependency>
   	<groupId>org.apache.flink</groupId>
   	<artifactId>flink-table-planner-blink_${scala.binary.version}</artifactId>
   	<version>${flink.version}</version>
   	<scope>provided</scope>
   </dependency>
   <dependency>
   	<groupId>org.apache.flink</groupId>
   	<artifactId>flink-connector-hive_${scala.binary.version}</artifactId>
   	<version>${flink.version}</version>
   	<scope>provided</scope>
   </dependency>
   <dependency>
   	<groupId>org.apache.flink</groupId>
   	<artifactId>flink-table-api-java-bridge_${scala.binary.version}</artifactId>
   	<version>${flink.version}</version>
   	<scope>provided</scope>
   </dependency>
   <dependency>
   	<groupId>org.apache.flink</groupId>
   	<artifactId>flink-table-common</artifactId>
   	<version>${flink.version}</version>
   	<scope>provided</scope>
   </dependency>
   
   <!-- Hive Dependency -->
   <dependency>
   	<groupId>org.apache.hive</groupId>
   	<artifactId>hive-exec</artifactId>
   	<version>${hive.version}</version>
   	<scope>provided</scope>
   </dependency>
   <dependency>
   	<groupId>org.apache.hadoop</groupId>
   	<artifactId>hadoop-client</artifactId>
   	<version>${hadoop.version}</version>
   	<scope>provided</scope>
   </dependency>

   <!-- Apache Flink dependencies -->
   <dependency>
   	<groupId>org.apache.flink</groupId>
   	<artifactId>flink-java</artifactId>
   	<version>${flink.version}</version>
   	<scope>provided</scope>
   </dependency>
   <dependency>
   	<groupId>org.apache.flink</groupId>
   	<artifactId>flink-streaming-java_${scala.binary.version}</artifactId>
   	<version>${flink.version}</version>
   	<scope>provided</scope>
   </dependency>
   <dependency>
   	<groupId>org.apache.flink</groupId>
   	<artifactId>flink-clients_${scala.binary.version}</artifactId>
   	<version>${flink.version}</version>
   	<scope>provided</scope>
   </dependency>
   <dependency>
   	<groupId>org.apache.flink</groupId>
   	<artifactId>flink-streaming-scala_${scala.binary.version}</artifactId>
   	<version>${flink.version}</version>
   	<scope>provided</scope>
   </dependency>

   <!-- Add logging framework -->
   <dependency>
   	<groupId>org.apache.logging.log4j</groupId>
   	<artifactId>log4j-slf4j-impl</artifactId>
   	<version>${log4j.version}</version>
   	<scope>runtime</scope>
   </dependency>
   <dependency>
   	<groupId>org.apache.logging.log4j</groupId>
   	<artifactId>log4j-api</artifactId>
   	<version>${log4j.version}</version>
   	<scope>runtime</scope>
   </dependency>
   <dependency>
   	<groupId>org.apache.logging.log4j</groupId>
   	<artifactId>log4j-core</artifactId>
   	<version>${log4j.version}</version>
   	<scope>runtime</scope>
   </dependency>
</dependencies>

hive-site.xml

hive-site.xml文件放到项目 lib 目录下

Demo

public class HiveTest {
   public static void main(String[] args) throws Exception{
       EnvironmentSettings bbSettings = EnvironmentSettings.newInstance().inBatchMode().build();
       TableEnvironment tableEnv = TableEnvironment.create(bbSettings);


       String catalogName     = "myhive";
       String defaultDatabase = "hive_data";
       String hiveConfDir     = "lib"; // a local path
       String version         = "2.1.1-cdh6.2.1";

       HiveCatalog hiveCatalog = new HiveCatalog(catalogName, defaultDatabase, hiveConfDir, version);
       tableEnv.registerCatalog(catalogName, hiveCatalog);

       // set the HiveCatalog as the current catalog of the session
       tableEnv.useCatalog(catalogName);

       // to use hive dialect
       tableEnv.getConfig().setSqlDialect(SqlDialect.HIVE);
       tableEnv.executeSql("create table  test_buck(id int, name string)\n" +
               "row format delimited fields terminated by '\\s+'");
       tableEnv.executeSql("insert into table test_buck values (1,'xiaoming') ");
       
       //  TableResult tableResult =  tableEnv.executeSql("select * from test_buck");
       //  tableResult.print();
   }
}