This repository was archived by the owner on Jan 12, 2021. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 13
Expand file tree
/
Copy pathCheck.java
More file actions
74 lines (46 loc) · 1.63 KB
/
Check.java
File metadata and controls
74 lines (46 loc) · 1.63 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
package org.finra.hiveqlunit.syntax;
import org.apache.commons.io.FileUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.hive.HiveContext;
import java.io.File;
import java.io.IOException;
/**
* With a column heder defintion and a sql statement this class can validate the syntax of the sql statement will parse.
* It takes approximately 20-30 seconds since it starts an intance of hiverserver through spark context.
*/
public class Check {
HiveContext hc;
public Check() {
String header;
String sql;
SparkConf sparkConf = new SparkConf().setAppName("HiveQLUnit").setMaster("local[1]");
JavaSparkContext sparkContext = new JavaSparkContext(sparkConf);
hc = new HiveContext(sparkContext) ;
//Blow away hive meta store before execution
try {
FileUtils.deleteDirectory(new File("/tmp/foo"));
} catch (IOException e) {
e.printStackTrace();
}
hc.setConf("hive.metastore.warehouse.dir","/tmp/foo");
}
public Check(HiveContext hiveContext) {
String header;
String sql;
hc = hiveContext;
}
public static boolean verify(String createTableStatement, String sqlStatement ) {
Boolean isGood = true;
Check check = new Check();
check.hc.runSqlHive(createTableStatement);
check.hc.runSqlHive(sqlStatement);
try {
check.hc.runSqlHive(sqlStatement);
} catch (Exception e) {
e.printStackTrace();
isGood = false;
}
return isGood;
}
}