循环语法

This commit is contained in:
2018-11-27 18:45:04 +08:00
parent e9f97b64fa
commit 71891ab92b
3 changed files with 188 additions and 69 deletions

153
pom.xml
View File

@@ -1,73 +1,88 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
<modelVersion>4.0.0</modelVersion> xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<groupId>top.fjy8018</groupId> <modelVersion>4.0.0</modelVersion>
<artifactId>helloscala</artifactId> <groupId>top.fjy8018</groupId>
<version>1.0-SNAPSHOT</version> <artifactId>helloscala</artifactId>
<name>${project.artifactId}</name> <version>1.0-SNAPSHOT</version>
<description>My wonderfull scala app</description> <name>${project.artifactId}</name>
<inceptionYear>2010</inceptionYear> <description>My wonderfull scala app</description>
<licenses> <inceptionYear>2010</inceptionYear>
<license> <licenses>
<name>My License</name> <license>
<url>http://....</url> <name>My License</name>
<distribution>repo</distribution> <url>http://....</url>
</license> <distribution>repo</distribution>
</licenses> </license>
</licenses>
<properties> <properties>
<maven.compiler.source>1.8</maven.compiler.source> <maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target> <maven.compiler.target>1.8</maven.compiler.target>
<encoding>UTF-8</encoding> <encoding>UTF-8</encoding>
<scala.version>2.11.8</scala.version> <scala.version>2.11.8</scala.version>
</properties> <spark.version>2.3.1</spark.version>
</properties>
<dependencies> <dependencies>
<dependency> <dependency>
<groupId>org.scala-lang</groupId> <groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId> <artifactId>scala-library</artifactId>
<version>${scala.version}</version> <version>${scala.version}</version>
</dependency> </dependency>
</dependencies>
<build> <dependency>
<sourceDirectory>src/main/scala</sourceDirectory> <groupId>org.apache.spark</groupId>
<testSourceDirectory>src/test/scala</testSourceDirectory> <artifactId>spark-core_2.11</artifactId>
<plugins> <version>${spark.version}</version>
<plugin> </dependency>
<groupId>org.scala-tools</groupId>
<artifactId>maven-scala-plugin</artifactId> <!-- https://mvnrepository.com/artifact/org.apache.spark/spark-sql -->
<version>2.15.0</version> <dependency>
<executions> <groupId>org.apache.spark</groupId>
<execution> <artifactId>spark-sql_2.11</artifactId>
<goals> <version>2.3.1</version>
<goal>compile</goal> </dependency>
<goal>testCompile</goal>
</goals> </dependencies>
<configuration>
<args> <build>
<arg>-make:transitive</arg> <sourceDirectory>src/main/scala</sourceDirectory>
<arg>-dependencyfile</arg> <testSourceDirectory>src/test/scala</testSourceDirectory>
<arg>${project.build.directory}/.scala_dependencies</arg> <plugins>
</args> <plugin>
</configuration> <groupId>org.scala-tools</groupId>
</execution> <artifactId>maven-scala-plugin</artifactId>
</executions> <version>2.15.0</version>
</plugin> <executions>
<plugin> <execution>
<groupId>org.apache.maven.plugins</groupId> <goals>
<artifactId>maven-surefire-plugin</artifactId> <goal>compile</goal>
<version>2.6</version> <goal>testCompile</goal>
<configuration> </goals>
<useFile>false</useFile> <configuration>
<disableXmlReport>true</disableXmlReport> <args>
<!-- If you have classpath issue like NoDefClassError,... --> <arg>-dependencyfile</arg>
<!-- useManifestOnlyJar>false</useManifestOnlyJar --> <arg>${project.build.directory}/.scala_dependencies</arg>
<includes> </args>
<include>**/*Test.*</include> </configuration>
<include>**/*Suite.*</include> </execution>
</includes> </executions>
</configuration> </plugin>
</plugin> <plugin>
</plugins> <groupId>org.apache.maven.plugins</groupId>
</build> <artifactId>maven-surefire-plugin</artifactId>
<version>2.6</version>
<configuration>
<useFile>false</useFile>
<disableXmlReport>true</disableXmlReport>
<!-- If you have classpath issue like NoDefClassError,... -->
<!-- useManifestOnlyJar>false</useManifestOnlyJar -->
<includes>
<include>**/*Test.*</include>
<include>**/*Suite.*</include>
</includes>
</configuration>
</plugin>
</plugins>
</build>
</project> </project>

View File

@@ -12,6 +12,17 @@ object FunctionApp {
// 调用无参方法可以不加括号 // 调用无参方法可以不加括号
printHello printHello
printHello("scala")
// 调用带默认值的方法不可以不加括号
defaultParam()
defaultParam("new value")
// 支持自定义参数顺序
println(sum(y = 2,x = 1))
printAllInt(1,2,3,4,5,6)
} }
def add(x:Int,y:Int):Int={ def add(x:Int,y:Int):Int={
@@ -29,4 +40,27 @@ object FunctionApp {
def printHello()= println("Hello world!") def printHello()= println("Hello world!")
/**
* 支持函数重载
* @param n
*/
def printHello(n:String)=println("Hello world " + n)
/**
* 支持参数默认值
* @param n
*/
def defaultParam(n:String="defaultValue")=println("value:" + n)
/**
* 可变参数
* @see org.apache.spark.sql.Dataset#select(org.apache.spark.sql.TypedColumn, org.apache.spark.sql.TypedColumn, org.apache.spark.sql.TypedColumn, org.apache.spark.sql.TypedColumn, org.apache.spark.sql.TypedColumn)
* @param numbers
*/
def printAllInt(numbers:Int*): Unit ={
for (number <- numbers){
print(number +",")
}
}
} }

View File

@@ -0,0 +1,70 @@
package top.fjy8018.scala
/**
* F嘉阳
* 2018-11-26 17:03
*/
object Loop {
val list = Array("a","b","c","d")
def main(args: Array[String]): Unit = {
printTest
println()
loop1
println()
loop2()
println()
loop3
println()
rangeTest1
rangeTest2
sumTest
}
def printTest()=print(1 to 10)
def loop1(): Unit ={
for (l <- list){
print(l)
}
}
/**
* 类似java lambda函数表达式
*/
def loop2(): Unit ={
list.foreach(l => print(l))
}
/**
* 带条件循环表达式
*/
def loop3(): Unit ={
for (i<- 1 to 10 if i%2==0){
print(i+" ")
}
}
def rangeTest1(): Unit ={
// 指定步长Range为to函数的底层实现
print(Range(1,10,2))
println()
println(Range(10,1,-3))
}
/**
* until 左闭右开to左闭右闭
*/
def rangeTest2(): Unit ={
println(1 until 10)
}
def sumTest(): Unit ={
var (num,sum) = (100,0)
while (num>0){
sum += num
num -= 1
}
println(sum)
}
}