美文网首页
Spark源码之一时看一时爽,一直看一直爽

Spark源码之一时看一时爽,一直看一直爽

作者: yayooo | 来源:发表于2019-08-08 17:25 被阅读0次

    源码整理笔记:
    链接:https://pan.baidu.com/s/1QtymGBybFA57nLTx5_SOgA
    提取码:tf02
    复制这段内容后打开百度网盘手机App,操作更方便哦

    pom.xml文件

    <?xml version="1.0" encoding="UTF-8"?>
    <project xmlns="http://maven.apache.org/POM/4.0.0"
             xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
             xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
        <modelVersion>4.0.0</modelVersion>
    
        <groupId>com.atguigu.sparkStreaming</groupId>
        <artifactId>sparkStreamingTest</artifactId>
        <version>1.0-SNAPSHOT</version>
    
        <properties>
            <spark.version>2.1.1</spark.version>
            <scala.version>2.11.8</scala.version>
            <log4j.version>1.2.17</log4j.version>
            <slf4j.version>1.7.22</slf4j.version>
        </properties>
    
        <dependencies>
    
            <dependency>
                <groupId>org.apache.spark</groupId>
                <artifactId>spark-yarn_2.11</artifactId>
                <version>2.1.1</version>
            </dependency>
    
            <dependency>
                <groupId>org.apache.spark</groupId>
                <artifactId>spark-streaming-kafka-0-8_2.11</artifactId>
                <version>2.1.1</version>
            </dependency>
            <dependency>
                <groupId>org.apache.kafka</groupId>
                <artifactId>kafka-clients</artifactId>
                <version>0.11.0.0</version>
            </dependency>
    
    
    
            <dependency>
                <groupId>org.apache.spark</groupId>
                <artifactId>spark-streaming_2.11</artifactId>
                <version>2.1.1</version>
            </dependency>
    
    
    
    
            <!--此处放日志包,所有项目都要引用-->
            <!-- 所有子项目的日志框架 -->
            <dependency>
                <groupId>org.slf4j</groupId>
                <artifactId>jcl-over-slf4j</artifactId>
                <version>${slf4j.version}</version>
            </dependency>
            <dependency>
                <groupId>org.slf4j</groupId>
                <artifactId>slf4j-api</artifactId>
                <version>${slf4j.version}</version>
            </dependency>
            <dependency>
                <groupId>org.slf4j</groupId>
                <artifactId>slf4j-log4j12</artifactId>
                <version>${slf4j.version}</version>
            </dependency>
            <!-- 具体的日志实现 -->
            <dependency>
                <groupId>log4j</groupId>
                <artifactId>log4j</artifactId>
                <version>${log4j.version}</version>
            </dependency>
        </dependencies>
        <dependencyManagement>
            <!--此处声明工具依赖,各个模块,选择使用-->
            <dependencies>
                <dependency>
                    <groupId>org.apache.spark</groupId>
                    <artifactId>spark-core_2.11</artifactId>
                    <version>${spark.version}</version>
                    <!-- provider如果存在,那么运行时该Jar包不存在,也不会打包到最终的发布版本中,只是编译器有效 -->
                    <!--<scope>provided</scope>-->
                </dependency>
                <dependency>
                    <groupId>org.apache.spark</groupId>
                    <artifactId>spark-sql_2.11</artifactId>
                    <version>${spark.version}</version>
                    <!--<scope>provided</scope>-->
                </dependency>
                <dependency>
                    <groupId>org.apache.spark</groupId>
                    <artifactId>spark-streaming_2.11</artifactId>
                    <version>${spark.version}</version>
                    <!--<scope>provided</scope>-->
                </dependency>
                <dependency>
                    <groupId>org.apache.spark</groupId>
                    <artifactId>spark-mllib_2.11</artifactId>
                    <version>${spark.version}</version>
                    <!--<scope>provided</scope>-->
                </dependency>
                <dependency>
                    <groupId>org.apache.spark</groupId>
                    <artifactId>spark-graphx_2.11</artifactId>
                    <version>${spark.version}</version>
                    <!--<scope>provided</scope>-->
                </dependency>
                <dependency>
                    <groupId>org.scala-lang</groupId>
                    <artifactId>scala-library</artifactId>
                    <version>${scala.version}</version>
                    <!--<scope>provided</scope>-->
                </dependency>
                <dependency>
                    <groupId>org.apache.spark</groupId>
                    <artifactId>spark-hive_2.11</artifactId>
                    <version>${spark.version}</version>
                    <!--<scope>provided</scope>-->
                </dependency>
                <dependency>
                    <groupId>org.apache.spark</groupId>
                    <artifactId>spark-streaming-kafka-0-10_2.11</artifactId>
                    <version>${spark.version}</version>
                </dependency>
    
                <dependency>
                    <groupId>org.apache.spark</groupId>
                    <artifactId>spark-streaming_2.11</artifactId>
                    <version>2.1.1</version>
                </dependency>
    
    
            </dependencies>
        </dependencyManagement>
    
        <build>
            <!-- 声明并引入子项目共有的插件【插件就是附着到Maven各个声明周期的具体实现】 -->
            <plugins>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-compiler-plugin</artifactId>
                    <version>3.6.1</version>
                    <!-- 所有的编译都依照JDK1.8来搞 -->
                    <configuration>
                        <source>1.8</source>
                        <target>1.8</target>
                    </configuration>
                </plugin>
            </plugins>
    
            <!-- 仅声明子项目共有的插件,如果子项目需要此插件,那么子项目需要声明 -->
            <pluginManagement>
                <plugins>
                    <!-- 该插件用于将Scala代码编译成class文件 -->
                    <plugin>
                        <groupId>net.alchim31.maven</groupId>
                        <artifactId>scala-maven-plugin</artifactId>
                        <version>3.2.2</version>
                        <executions>
                            <execution>
                                <!-- 声明绑定到maven的compile阶段 -->
                                <goals>
                                    <goal>compile</goal>
                                    <goal>testCompile</goal>
                                </goals>
                            </execution>
                        </executions>
                    </plugin>
    
                    <!-- 用于项目的打包插件 -->
                    <plugin>
                        <groupId>org.apache.maven.plugins</groupId>
                        <artifactId>maven-assembly-plugin</artifactId>
                        <version>3.0.0</version>
                        <executions>
                            <execution>
                                <id>make-assembly</id>
                                <phase>package</phase>
                                <goals>
                                    <goal>single</goal>
                                </goals>
                            </execution>
                        </executions>
                    </plugin>
                </plugins>
            </pluginManagement>
        </build>
    
    
    
    
    
    </project>
    

    相关文章

      网友评论

          本文标题:Spark源码之一时看一时爽,一直看一直爽

          本文链接:https://www.haomeiwen.com/subject/rluodctx.html