Skip to content

Commit

Permalink
Fix spark migration plugin
Browse files Browse the repository at this point in the history
  • Loading branch information
ketkarameya committed Oct 23, 2023
1 parent 304c281 commit ebe91e8
Show file tree
Hide file tree
Showing 4 changed files with 12 additions and 7 deletions.
2 changes: 1 addition & 1 deletion plugins/spark_upgrade/spark_config/java_scala_rules.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
name="spark_conf_change_java_scala",
query="cs new SparkConf()",
replace_node="*",
replace='new SparkSession.builder().config("spark.sql.legacy.allowUntypedScalaUDF", "true")',
replace='SparkSession.builder().config("spark.sql.legacy.allowUntypedScalaUDF", "true")',
holes={"spark_conf"},
)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,15 +7,15 @@

public class Sample {
public static void main(String[] args) {
SparkSession conf = new SparkSession.builder()
SparkSession conf = SparkSession.builder()
.config("spark.sql.legacy.allowUntypedScalaUDF", "true")
.appName("Sample App")
.getOrCreate();

SparkContext sc = conf.sparkContext();


SparkSession conf1 = new SparkSession.builder()
SparkSession conf1 = SparkSession.builder()
.config("spark.sql.legacy.allowUntypedScalaUDF", "true")
.sparkHome(sparkHome)
.executorEnv("spark.executor.extraClassPath", "test")
Expand All @@ -26,12 +26,14 @@ public static void main(String[] args) {

sc = conf1.sparkContext();

SparkSession conf2 = new SparkSession.builder().config("spark.sql.legacy.allowUntypedScalaUDF", "true").getOrCreate();
SparkSession conf2 = SparkSession.builder().config("spark.sql.legacy.allowUntypedScalaUDF", "true").getOrCreate();
conf2.config("spark.driver.instances:", "100");
conf2.appName(appName);
conf2.sparkHome(sparkHome);

sc2 = conf2.sparkContext();

SparkSession conf3 = SparkSession.builder().config("spark.sql.legacy.allowUntypedScalaUDF", "true").getOrCreate();

}
}
Original file line number Diff line number Diff line change
Expand Up @@ -6,15 +6,15 @@ import org.apache.spark.sql.SparkSession
class Sample {
def main(args: Array[String]): Unit = {

val conf= new SparkSession.builder()
val conf= SparkSession.builder()
.config("spark.sql.legacy.allowUntypedScalaUDF", "true")
.appName("Sample App")
.getOrCreate()

val sc = conf.sparkContext


val conf1 = new SparkSession.builder()
val conf1 = SparkSession.builder()
.config("spark.sql.legacy.allowUntypedScalaUDF", "true")
.master(master)
.all(Seq(("k2", "v2"), ("k3", "v3")))
Expand All @@ -25,7 +25,7 @@ class Sample {
.getOrCreate()
sc1 = conf1.sparkContext

val conf2 = new SparkSession.builder()
val conf2 = SparkSession.builder()
.config("spark.sql.legacy.allowUntypedScalaUDF", "true")
.master(master)
.getOrCreate()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,9 @@ public static void main(String[] args) {

sc2 = new JavaSparkContext(conf2);


var conf3 = new SparkConf();


}
}

0 comments on commit ebe91e8

Please sign in to comment.