diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..645ce4c --- /dev/null +++ b/pom.xml @@ -0,0 +1,140 @@ + + + 4.0.0 + + groupId + Learning + 1.0-SNAPSHOT + + + + 4.12 + 2.10.6 + 1.6.2 + 1.2.17 + 1.7.22 + + + + + junit + junit + ${junit.version} + + + + + org.scala-lang + scala-library + ${scala.version} + + + + + + org.slf4j + jcl-over-slf4j + ${slf4j.version} + + + org.slf4j + slf4j-api + ${slf4j.version} + + + org.slf4j + slf4j-log4j12 + ${slf4j.version} + + + log4j + log4j + ${log4j.version} + + + + + + + org.apache.spark + spark-core_2.10 + ${spark.version} + + + org.apache.spark + spark-sql_2.10 + ${spark.version} + + + org.apache.spark + spark-streaming_2.10 + ${spark.version} + + + + + + + + + + src/main/java + + **/*.xml + **/*.properties + + + + src/main/resources + + **/*.xml + **/*.properties + + + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + 3.2 + + 1.8 + 1.8 + UTF-8 + + + + compile + + compile + + + + + + + net.alchim31.maven + scala-maven-plugin + 3.2.1 + + + scala-compile-first + process-resources + + add-source + compile + + + + + + + + + + \ No newline at end of file diff --git a/src/main/resources/log4j.properties b/src/main/resources/log4j.properties new file mode 100644 index 0000000..1994dbe --- /dev/null +++ b/src/main/resources/log4j.properties @@ -0,0 +1,14 @@ +#log4j.rootLogger=warn,stdout,R? ?#ʹ־ +#log4j.rootLogger=info,stdout,R +log4j.rootLogger=warn,stdout,R +log4j.appender.stdout=org.apache.log4j.ConsoleAppender +log4j.appender.stdout.layout=org.apache.log4j.PatternLayout +log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss,SSS}??%5p --- [%t]??%-c(line:%L) : %m%n + +log4j.appender.R=org.apache.log4j.RollingFileAppender +log4j.appender.R.File=spark.log +log4j.appender.R.MaxFileSize=1024KB +log4j.appender.R.MaxBackupIndex=1 + +log4j.appender.R.layout=org.apache.log4j.PatternLayout +log4j.appender.R.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss,SSS}??%5p --- [%t]??%-c(line:%L) : %m%n \ No newline at end of file diff --git a/src/main/scala/BaseTest.scala b/src/main/scala/BaseTest.scala new file mode 100644 index 0000000..86fe2b8 --- /dev/null +++ b/src/main/scala/BaseTest.scala @@ -0,0 +1,27 @@ + +import org.apache.spark.{SparkConf, SparkContext} +import org.slf4j.{Logger, LoggerFactory} + +object BaseTest { + val logger: Logger = LoggerFactory.getLogger(BaseTest.getClass) + + def main(args: Array[String]) { + //创建SparkConf()并设置App名称 + val conf = new SparkConf().setMaster("local[*]").setAppName("BaseTest") + + //创建SparkContext,该对象是提交spark App的入口 + val sc = new SparkContext(conf) + + //使用sc创建RDD并执行相应的transformation和action + val result = sc.textFile("C:\\Users\\Dell\\Desktop\\word.txt").flatMap(_.split(" ")).map((_, 1)).reduceByKey(_ + _, 1).sortBy(_._2, false) + + //停止sc,结束该任务 + result.collect().foreach(println(_)) + + //result.saveAsTextFile("hdfs") + + logger.info("----complete!----") + + sc.stop() + } +}