public class SQLUtils
extends Object
Constructor and Description |
---|
SQLUtils() |
Modifier and Type | Method and Description |
---|---|
static Dataset<Row> |
createDF(RDD<byte[]> rdd,
StructType schema,
SparkSession sparkSession) |
static StructField |
createStructField(String name,
String dataType,
boolean nullable) |
static StructType |
createStructType(scala.collection.Seq<StructField> fields) |
static Dataset<Row> |
dapply(Dataset<Row> df,
byte[] func,
byte[] packageNames,
Object[] broadcastVars,
StructType schema)
The helper function for dapply() on R side.
|
static Object[][] |
dfToCols(Dataset<Row> df) |
static JavaRDD<byte[]> |
dfToRowRDD(Dataset<Row> df) |
static Dataset<Row> |
gapply(RelationalGroupedDataset gd,
byte[] func,
byte[] packageNames,
Object[] broadcastVars,
StructType schema)
The helper function for gapply() on R side.
|
static JavaSparkContext |
getJavaSparkContext(SparkSession spark) |
static SparkSession |
getOrCreateSparkSession(JavaSparkContext jsc,
java.util.Map<Object,Object> sparkConfigMap,
boolean enableHiveSupport) |
static java.util.Map<String,String> |
getSessionConf(SparkSession spark) |
static DataType |
getSQLDataType(String dataType) |
static String[] |
getTableNames(SparkSession sparkSession,
String databaseName) |
static Dataset<Row> |
getTables(SparkSession sparkSession,
String databaseName) |
static Dataset<Row> |
loadDF(SparkSession sparkSession,
String source,
java.util.Map<String,String> options) |
static Dataset<Row> |
loadDF(SparkSession sparkSession,
String source,
StructType schema,
java.util.Map<String,String> options) |
static Object |
readSqlObject(java.io.DataInputStream dis,
char dataType) |
static SaveMode |
saveMode(String mode) |
static StructType |
SERIALIZED_R_DATA_SCHEMA() |
static void |
setSparkContextSessionConf(SparkSession spark,
java.util.Map<Object,Object> sparkConfigMap) |
static boolean |
writeSqlObject(java.io.DataOutputStream dos,
Object obj) |
public static SparkSession getOrCreateSparkSession(JavaSparkContext jsc, java.util.Map<Object,Object> sparkConfigMap, boolean enableHiveSupport)
public static void setSparkContextSessionConf(SparkSession spark, java.util.Map<Object,Object> sparkConfigMap)
public static java.util.Map<String,String> getSessionConf(SparkSession spark)
public static JavaSparkContext getJavaSparkContext(SparkSession spark)
public static StructType createStructType(scala.collection.Seq<StructField> fields)
public static DataType getSQLDataType(String dataType)
public static StructField createStructField(String name, String dataType, boolean nullable)
public static Dataset<Row> createDF(RDD<byte[]> rdd, StructType schema, SparkSession sparkSession)
public static StructType SERIALIZED_R_DATA_SCHEMA()
public static Dataset<Row> dapply(Dataset<Row> df, byte[] func, byte[] packageNames, Object[] broadcastVars, StructType schema)
df
- (undocumented)func
- (undocumented)packageNames
- (undocumented)broadcastVars
- (undocumented)schema
- (undocumented)public static Dataset<Row> gapply(RelationalGroupedDataset gd, byte[] func, byte[] packageNames, Object[] broadcastVars, StructType schema)
gd
- (undocumented)func
- (undocumented)packageNames
- (undocumented)broadcastVars
- (undocumented)schema
- (undocumented)public static SaveMode saveMode(String mode)
public static Dataset<Row> loadDF(SparkSession sparkSession, String source, java.util.Map<String,String> options)
public static Dataset<Row> loadDF(SparkSession sparkSession, String source, StructType schema, java.util.Map<String,String> options)
public static Object readSqlObject(java.io.DataInputStream dis, char dataType)
public static boolean writeSqlObject(java.io.DataOutputStream dos, Object obj)
public static Dataset<Row> getTables(SparkSession sparkSession, String databaseName)
public static String[] getTableNames(SparkSession sparkSession, String databaseName)