循环语法

This commit is contained in:
2018-11-27 18:45:04 +08:00
parent e9f97b64fa
commit 71891ab92b
3 changed files with 188 additions and 69 deletions

153
pom.xml
View File

@@ -1,73 +1,88 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>top.fjy8018</groupId>
<artifactId>helloscala</artifactId>
<version>1.0-SNAPSHOT</version>
<name>${project.artifactId}</name>
<description>My wonderfull scala app</description>
<inceptionYear>2010</inceptionYear>
<licenses>
<license>
<name>My License</name>
<url>http://....</url>
<distribution>repo</distribution>
</license>
</licenses>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>top.fjy8018</groupId>
<artifactId>helloscala</artifactId>
<version>1.0-SNAPSHOT</version>
<name>${project.artifactId}</name>
<description>My wonderfull scala app</description>
<inceptionYear>2010</inceptionYear>
<licenses>
<license>
<name>My License</name>
<url>http://....</url>
<distribution>repo</distribution>
</license>
</licenses>
<properties>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
<encoding>UTF-8</encoding>
<scala.version>2.11.8</scala.version>
</properties>
<properties>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
<encoding>UTF-8</encoding>
<scala.version>2.11.8</scala.version>
<spark.version>2.3.1</spark.version>
</properties>
<dependencies>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>${scala.version}</version>
</dependency>
</dependencies>
<dependencies>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>${scala.version}</version>
</dependency>
<build>
<sourceDirectory>src/main/scala</sourceDirectory>
<testSourceDirectory>src/test/scala</testSourceDirectory>
<plugins>
<plugin>
<groupId>org.scala-tools</groupId>
<artifactId>maven-scala-plugin</artifactId>
<version>2.15.0</version>
<executions>
<execution>
<goals>
<goal>compile</goal>
<goal>testCompile</goal>
</goals>
<configuration>
<args>
<arg>-make:transitive</arg>
<arg>-dependencyfile</arg>
<arg>${project.build.directory}/.scala_dependencies</arg>
</args>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.6</version>
<configuration>
<useFile>false</useFile>
<disableXmlReport>true</disableXmlReport>
<!-- If you have classpath issue like NoDefClassError,... -->
<!-- useManifestOnlyJar>false</useManifestOnlyJar -->
<includes>
<include>**/*Test.*</include>
<include>**/*Suite.*</include>
</includes>
</configuration>
</plugin>
</plugins>
</build>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.11</artifactId>
<version>${spark.version}</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.spark/spark-sql -->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_2.11</artifactId>
<version>2.3.1</version>
</dependency>
</dependencies>
<build>
<sourceDirectory>src/main/scala</sourceDirectory>
<testSourceDirectory>src/test/scala</testSourceDirectory>
<plugins>
<plugin>
<groupId>org.scala-tools</groupId>
<artifactId>maven-scala-plugin</artifactId>
<version>2.15.0</version>
<executions>
<execution>
<goals>
<goal>compile</goal>
<goal>testCompile</goal>
</goals>
<configuration>
<args>
<arg>-dependencyfile</arg>
<arg>${project.build.directory}/.scala_dependencies</arg>
</args>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.6</version>
<configuration>
<useFile>false</useFile>
<disableXmlReport>true</disableXmlReport>
<!-- If you have classpath issue like NoDefClassError,... -->
<!-- useManifestOnlyJar>false</useManifestOnlyJar -->
<includes>
<include>**/*Test.*</include>
<include>**/*Suite.*</include>
</includes>
</configuration>
</plugin>
</plugins>
</build>
</project>

View File

@@ -12,6 +12,17 @@ object FunctionApp {
// 调用无参方法可以不加括号
printHello
printHello("scala")
// 调用带默认值的方法不可以不加括号
defaultParam()
defaultParam("new value")
// 支持自定义参数顺序
println(sum(y = 2,x = 1))
printAllInt(1,2,3,4,5,6)
}
def add(x:Int,y:Int):Int={
@@ -29,4 +40,27 @@ object FunctionApp {
def printHello()= println("Hello world!")
/**
* 支持函数重载
* @param n
*/
def printHello(n:String)=println("Hello world " + n)
/**
* 支持参数默认值
* @param n
*/
def defaultParam(n:String="defaultValue")=println("value:" + n)
/**
* 可变参数
* @see org.apache.spark.sql.Dataset#select(org.apache.spark.sql.TypedColumn, org.apache.spark.sql.TypedColumn, org.apache.spark.sql.TypedColumn, org.apache.spark.sql.TypedColumn, org.apache.spark.sql.TypedColumn)
* @param numbers
*/
def printAllInt(numbers:Int*): Unit ={
for (number <- numbers){
print(number +",")
}
}
}

View File

@@ -0,0 +1,70 @@
package top.fjy8018.scala
/**
* F嘉阳
* 2018-11-26 17:03
*/
object Loop {
val list = Array("a","b","c","d")
def main(args: Array[String]): Unit = {
printTest
println()
loop1
println()
loop2()
println()
loop3
println()
rangeTest1
rangeTest2
sumTest
}
def printTest()=print(1 to 10)
def loop1(): Unit ={
for (l <- list){
print(l)
}
}
/**
* 类似java lambda函数表达式
*/
def loop2(): Unit ={
list.foreach(l => print(l))
}
/**
* 带条件循环表达式
*/
def loop3(): Unit ={
for (i<- 1 to 10 if i%2==0){
print(i+" ")
}
}
def rangeTest1(): Unit ={
// 指定步长Range为to函数的底层实现
print(Range(1,10,2))
println()
println(Range(10,1,-3))
}
/**
* until 左闭右开to左闭右闭
*/
def rangeTest2(): Unit ={
println(1 until 10)
}
def sumTest(): Unit ={
var (num,sum) = (100,0)
while (num>0){
sum += num
num -= 1
}
println(sum)
}
}