循环语法

This commit is contained in:
2018-11-27 18:45:04 +08:00
parent e9f97b64fa
commit 71891ab92b
3 changed files with 188 additions and 69 deletions

19
pom.xml
View File

@@ -1,4 +1,5 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>top.fjy8018</groupId>
<artifactId>helloscala</artifactId>
@@ -19,6 +20,7 @@
<maven.compiler.target>1.8</maven.compiler.target>
<encoding>UTF-8</encoding>
<scala.version>2.11.8</scala.version>
<spark.version>2.3.1</spark.version>
</properties>
<dependencies>
@@ -27,6 +29,20 @@
<artifactId>scala-library</artifactId>
<version>${scala.version}</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.11</artifactId>
<version>${spark.version}</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.spark/spark-sql -->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_2.11</artifactId>
<version>2.3.1</version>
</dependency>
</dependencies>
<build>
@@ -45,7 +61,6 @@
</goals>
<configuration>
<args>
<arg>-make:transitive</arg>
<arg>-dependencyfile</arg>
<arg>${project.build.directory}/.scala_dependencies</arg>
</args>

View File

@@ -12,6 +12,17 @@ object FunctionApp {
// 调用无参方法可以不加括号
printHello
printHello("scala")
// 调用带默认值的方法不可以不加括号
defaultParam()
defaultParam("new value")
// 支持自定义参数顺序
println(sum(y = 2,x = 1))
printAllInt(1,2,3,4,5,6)
}
def add(x:Int,y:Int):Int={
@@ -29,4 +40,27 @@ object FunctionApp {
def printHello()= println("Hello world!")
/**
* 支持函数重载
* @param n
*/
def printHello(n:String)=println("Hello world " + n)
/**
* 支持参数默认值
* @param n
*/
def defaultParam(n:String="defaultValue")=println("value:" + n)
/**
* 可变参数
* @see org.apache.spark.sql.Dataset#select(org.apache.spark.sql.TypedColumn, org.apache.spark.sql.TypedColumn, org.apache.spark.sql.TypedColumn, org.apache.spark.sql.TypedColumn, org.apache.spark.sql.TypedColumn)
* @param numbers
*/
def printAllInt(numbers:Int*): Unit ={
for (number <- numbers){
print(number +",")
}
}
}

View File

@@ -0,0 +1,70 @@
package top.fjy8018.scala
/**
* F嘉阳
* 2018-11-26 17:03
*/
object Loop {
val list = Array("a","b","c","d")
def main(args: Array[String]): Unit = {
printTest
println()
loop1
println()
loop2()
println()
loop3
println()
rangeTest1
rangeTest2
sumTest
}
def printTest()=print(1 to 10)
def loop1(): Unit ={
for (l <- list){
print(l)
}
}
/**
* 类似java lambda函数表达式
*/
def loop2(): Unit ={
list.foreach(l => print(l))
}
/**
* 带条件循环表达式
*/
def loop3(): Unit ={
for (i<- 1 to 10 if i%2==0){
print(i+" ")
}
}
def rangeTest1(): Unit ={
// 指定步长Range为to函数的底层实现
print(Range(1,10,2))
println()
println(Range(10,1,-3))
}
/**
* until 左闭右开to左闭右闭
*/
def rangeTest2(): Unit ={
println(1 until 10)
}
def sumTest(): Unit ={
var (num,sum) = (100,0)
while (num>0){
sum += num
num -= 1
}
println(sum)
}
}