在 sbt 控制台上运行 Spark

问题描述 投票:0回答:0

我是 Spark 的新手,正在尝试在 sbt 控制台上运行它。我有一个合适的

build.sbt
(因为代码在 Intellij 上运行良好)并且能够在代码中导入包。问题是,在 sbt 控制台(终端)上运行时,执行程序只是继续运行,无法完成任务。

代码

import org.apache.spark._
val sc = new SparkContext("local[1]", "SimpleProg")
val nums = sc.parallelize(List(1, 2, 3, 4));
println(nums.reduce((a, b) => a - b))

这里是不同控制台和方法的输出(使用-

Xprint:typer
)。

sbt console
(其中执行者一直在运行)

[[syntax trees at end of                     typer]] // <console>
package $line2 {
  object $read extends scala.AnyRef {
    def <init>(): type = {
      $read.super.<init>();
      ()
    };
    object $iw extends scala.AnyRef {
      def <init>(): type = {
        $iw.super.<init>();
        ()
      };
      object $iw extends scala.AnyRef {
        def <init>(): type = {
          $iw.super.<init>();
          ()
        };
        import org.apache.spark._;
        private[this] val sc: org.apache.spark.SparkContext = new org.apache.spark.SparkContext("local[1]", "SimpleProg", spark.this.SparkContext.<init>$default$3, spark.this.SparkContext.<init>$default$4, spark.this.SparkContext.<init>$default$5);
        <stable> <accessor> def sc: org.apache.spark.SparkContext = $iw.this.sc;
        private[this] val nums: org.apache.spark.rdd.RDD[Int] = $iw.this.sc.parallelize[Int](scala.collection.immutable.List.apply[Int](1, 2, 3, 4), $iw.this.sc.parallelize$default$2[Nothing])((ClassTag.Int: scala.reflect.ClassTag[Int]));
        <stable> <accessor> def nums: org.apache.spark.rdd.RDD[Int] = $iw.this.nums
      }
    };
    private[this] val INSTANCE: $line2.$read.type = this;
    <stable> <accessor> def INSTANCE: $line2.$read.type = $read.this.INSTANCE
  }
}

[[syntax trees at end of                     typer]] // <console>
package $line2 {
  object $eval extends scala.AnyRef {
    def <init>(): $line2.$eval.type = {
      $eval.super.<init>();
      ()
    };
    <stable> <accessor> lazy val $result: org.apache.spark.rdd.RDD[Int] = $line2.$read.INSTANCE.$iw.$iw.nums;
    <stable> <accessor> lazy val $print: String = {
      $line2.$read.INSTANCE.$iw.$iw;
      val sb: StringBuilder = new scala.`package`.StringBuilder();
      sb.append("import org.apache.spark._\n");
      sb.append("\u001B[1m\u001B[34msc\u001B[0m: \u001B[1m\u001B[32morg.apache.spark.SparkContext\u001B[0m = ".+(scala.runtime.ScalaRunTime.replStringOf($line2.$read.INSTANCE.$iw.$iw.sc, 1000)));
      sb.append("\u001B[1m\u001B[34mnums\u001B[0m: \u001B[1m\u001B[32morg.apache.spark.rdd.RDD[Int]\u001B[0m = ".+(scala.runtime.ScalaRunTime.replStringOf($line2.$read.INSTANCE.$iw.$iw.nums, 1000)));
      sb.toString()
    }
  }
}

在 spark-shell 中(此处代码运行良好)

[[syntax trees at end of                     typer]] // <console>
package $line15 {
  sealed class $read extends AnyRef with java.io.Serializable {
    def <init>(): $line15.$read = {
      $read.super.<init>();
      ()
    };
    sealed class $iw extends AnyRef with java.io.Serializable {
      def <init>(): $iw = {
        $iw.super.<init>();
        ()
      };
      private[this] val $line3$read: $line3.$read.type = $line3.$read.INSTANCE;
      <stable> <accessor> def $line3$read: $line3.$read.type = $iw.this.$line3$read;
      import $iw.this.$line3$read.$iw.$iw.spark;
      import $iw.this.$line3$read.$iw.$iw.sc;
      sealed class $iw extends AnyRef with java.io.Serializable {
        def <init>(): $iw = {
          $iw.super.<init>();
          ()
        };
        import org.apache.spark.SparkContext._;
        sealed class $iw extends AnyRef with java.io.Serializable {
          def <init>(): $iw = {
            $iw.super.<init>();
            ()
          };
          sealed class $iw extends AnyRef with java.io.Serializable {
            def <init>(): $iw = {
              $iw.super.<init>();
              ()
            };
            import $iw.this.$line3$read.$iw.$iw.spark.implicits._;
            sealed class $iw extends AnyRef with java.io.Serializable {
              def <init>(): $iw = {
                $iw.super.<init>();
                ()
              };
              import $iw.this.$line3$read.$iw.$iw.spark.sql;
              sealed class $iw extends AnyRef with java.io.Serializable {
                def <init>(): $iw = {
                  $iw.super.<init>();
                  ()
                };
                import org.apache.spark.sql.functions._;
                sealed class $iw extends AnyRef with java.io.Serializable {
                  def <init>(): $iw = {
                    $iw.super.<init>();
                    ()
                  };
                  sealed class $iw extends AnyRef with java.io.Serializable {
                    def <init>(): $iw = {
                      $iw.super.<init>();
                      ()
                    };
                    private[this] val nums: org.apache.spark.rdd.RDD[Int] = $iw.this.$line3$read.$iw.$iw.sc.parallelize[Int](scala.collection.immutable.List.apply[Int](1, 2, 3, 4), $iw.this.$line3$read.$iw.$iw.sc.parallelize$default$2[Nothing])((ClassTag.Int: scala.reflect.ClassTag[Int]));
                    <stable> <accessor> def nums: org.apache.spark.rdd.RDD[Int] = $iw.this.nums
                  };
                  private[this] val $iw: $iw = new $iw.this.$iw();
                  <stable> <accessor> def $iw: $iw = $iw.this.$iw
                };
                private[this] val $iw: $iw = new $iw.this.$iw();
                <stable> <accessor> def $iw: $iw = $iw.this.$iw
              };
              private[this] val $iw: $iw = new $iw.this.$iw();
              <stable> <accessor> def $iw: $iw = $iw.this.$iw
            };
            private[this] val $iw: $iw = new $iw.this.$iw();
            <stable> <accessor> def $iw: $iw = $iw.this.$iw
          };
          private[this] val $iw: $iw = new $iw.this.$iw();
          <stable> <accessor> def $iw: $iw = $iw.this.$iw
        };
        private[this] val $iw: $iw = new $iw.this.$iw();
        <stable> <accessor> def $iw: $iw = $iw.this.$iw
      };
      private[this] val $iw: $iw = new $iw.this.$iw();
      <stable> <accessor> def $iw: $iw = $iw.this.$iw
    };
    private[this] val $iw: $iw = new $read.this.$iw();
    <stable> <accessor> def $iw: $iw = $read.this.$iw
  };
  object $read extends scala.AnyRef with Serializable {
    def <init>(): type = {
      $read.super.<init>();
      ()
    };
    private[this] val INSTANCE: $line15.$read = new $read();
    <stable> <accessor> def INSTANCE: $line15.$read = $read.this.INSTANCE;
    <synthetic> private def readResolve(): Object = $line15.$read
  }
}

sbt console中的函数(

named temp
)中(这也有效)

[[syntax trees at end of                     typer]] // <console>
package $line7 {
  object $read extends scala.AnyRef {
    def <init>(): type = {
      $read.super.<init>();
      ()
    };
    object $iw extends scala.AnyRef {
      def <init>(): type = {
        $iw.super.<init>();
        ()
      };
      object $iw extends scala.AnyRef {
        def <init>(): type = {
          $iw.super.<init>();
          ()
        };
        import org.apache.spark._;
        object $iw extends scala.AnyRef {
          def <init>(): type = {
            $iw.super.<init>();
            ()
          };
          import $line6.$read.INSTANCE.$iw.$iw.$iw.$iw.temp;
          object $iw extends scala.AnyRef {
            def <init>(): type = {
              $iw.super.<init>();
              ()
            };
            private[this] val res1: Unit = $line6.$read.INSTANCE.$iw.$iw.$iw.$iw.temp();
            <stable> <accessor> def res1: Unit = $iw.this.res1
          }
        }
      }
    };
    private[this] val INSTANCE: $line7.$read.type = this;
    <stable> <accessor> def INSTANCE: $line7.$read.type = $read.this.INSTANCE
  }
}

[[syntax trees at end of                     typer]] // <console>
package $line7 {
  object $eval extends scala.AnyRef {
    def <init>(): $line7.$eval.type = {
      $eval.super.<init>();
      ()
    };
    <stable> <accessor> lazy val $result: Unit = $line7.$read.INSTANCE.$iw.$iw.$iw.$iw.res1;
    <stable> <accessor> lazy val $print: String = {
      $line7.$read.INSTANCE.$iw.$iw.$iw.$iw;
      ""
    }
  }
}
scala apache-spark sbt
© www.soinside.com 2019 - 2024. All rights reserved.