码迷,mamicode.com
首页 > 数据库 > 详细

spark sql 查询hive表并写入到PG中

时间:2018-05-22 18:24:20      阅读:1042      评论:0      收藏:0      [点我收藏+]

标签:create   查询   val   uri   ISE   post   conf   创建   def   

import java.sql.DriverManager
import java.util.Properties

import com.zhaopin.tools.{DateUtils, TextUtils}
import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.SparkSession

/**
  * Created by xiaoyan on 2018/5/21.
  */
object IhrDownloadPg {
  def main(args: Array[String]){
    //设置spark日志级别
    Logger.getLogger("org.apache.spark").setLevel(Level.ERROR)
    System.setProperty("HADOOP_USER_NAME","hive")
    val spark = SparkSession
      .builder()
      .master("local[*]")
      .appName("hive ->> ihr_oper_download")
      .config("spark.sql.warehouse.dir", "spark-warehouse")
      .config("hive.metastore.uris", "thrift://master:9083")
      .enableHiveSupport()
      .getOrCreate()
    import spark.sql

    val dt = if(!args.isEmpty) args(0) else "20180506"
    val yesterday = DateUtils.dateAdd(dt, -1)

    val url = "jdbc:postgresql://192.168.9.222:5432/safe_base"
    Class.forName("org.postgresql.Driver")
    val conn = DriverManager.getConnection(url,"secu_man","secu_man")
    val stmt = conn.createStatement()
    stmt.execute("delete from ihr_oper_download where dt = ‘" + yesterday+"‘")

    //查询RDD
    val re1 = sql("select oper_date, " +
      "       acct_id, " +
      "       acct_name, " +
      "       module_name, " +
      "       oper_desc, " +
      "       ip, " +
      "       dt"  +
      " from safe.fact_ihr_oper_download t " +
      " where t.dt > ‘20180320‘ and t.dt <"+yesterday+"");

    val connectionProperties = new Properties()
    //增加数据库的用户名(user)密码(password),指定postgresql驱动(driver)
    connectionProperties.put("user", "secu_man");
    connectionProperties.put("password", "secu_man");
    connectionProperties.put("driver", "org.postgresql.Driver");
    re1.toDF().write.mode("append").jdbc(url, "ihr_oper_download", connectionProperties);
    System.err.print("ihr_oper_download insert complete!! ");
  }
}

  注意:如果PG表不存在,默认会自动创建一张表,且字段类型为text

spark sql 查询hive表并写入到PG中

标签:create   查询   val   uri   ISE   post   conf   创建   def   

原文地址:https://www.cnblogs.com/qxyy/p/9073148.html

(0)
(0)
   
举报
评论 一句话评论(0
登录后才能评论!
© 2014 mamicode.com 版权所有  联系我们:gaon5@hotmail.com
迷上了代码!