diff --git a/src/main/scala/stdlib/Asserts.scala b/src/main/scala/stdlib/Asserts.scala index 9c7dcb30..2ac589c0 100644 --- a/src/main/scala/stdlib/Asserts.scala +++ b/src/main/scala/stdlib/Asserts.scala @@ -36,13 +36,13 @@ object Asserts extends FlatSpec with Matchers with org.scalaexercises.definition * result shouldBe 3 // cannot customize equality, so fastest to compile, no parentheses required * }}} * - * Come on, your turn: true and false values can be compared with should matchers + * Come on, your turn: true and false values can be compared with should matchers: */ def scalaTestAsserts(res0: Boolean) { true should be(res0) } - /** Booleans in asserts can test equality. + /** Booleans in asserts can test equality: */ def booleanAsserts(res0: Int) { val v1 = 4 @@ -51,7 +51,7 @@ object Asserts extends FlatSpec with Matchers with org.scalaexercises.definition /** `shouldEqual` is an assertion. It is from ScalaTest, not from the Scala language. */ } - /** Sometimes we expect you to fill in the values + /** Sometimes we expect you to fill in the values: */ def valuesAsserts(res0: Int) { assert(res0 == 1 + 1) diff --git a/src/main/scala/stdlib/ByNameParameter.scala b/src/main/scala/stdlib/ByNameParameter.scala index cb60ee99..74bd365d 100644 --- a/src/main/scala/stdlib/ByNameParameter.scala +++ b/src/main/scala/stdlib/ByNameParameter.scala @@ -6,7 +6,7 @@ import org.scalatest._ */ object ByNameParameter extends FlatSpec with Matchers with org.scalaexercises.definitions.Section { - /** `() => Int` is a Function type that takes a `Unit` type. `Unit` is known as `void` to a Java programmer. The function returns an `Int`. You can place this as a method parameter so that you can you use it as a block, but still it doesn't look quite right. + /** `() => Int` is a Function type that takes a `Unit` type. `Unit` is known as `void` to a Java programmer. The function returns an `Int`. You can place this as a method parameter so that you can you use it as a block, but still it doesn't look quite right: */ def takesUnitByNameParameter(res0: Either[Throwable, Int]) { def calc(x: () ⇒ Int): Either[Throwable, Int] = { @@ -24,7 +24,7 @@ object ByNameParameter extends FlatSpec with Matchers with org.scalaexercises.de y should be(res0) } - /** A by-name parameter does the same thing as the previous koan but there is no need to explicitly handle `Unit` or `()`. This is used extensively in scala to create blocks. + /** A by-name parameter does the same thing as the previous koan but there is no need to explicitly handle `Unit` or `()`. This is used extensively in Scala to create blocks: */ def byNameParameter(res0: Either[Throwable, Int]) { def calc(x: ⇒ Int): Either[Throwable, Int] = { @@ -46,7 +46,7 @@ object ByNameParameter extends FlatSpec with Matchers with org.scalaexercises.de y should be(res0) } - /** By name parameters can also be used with an *Object* and apply to make interesting block-like calls + /** By name parameters can also be used with `object` and `apply` to make interesting block-like calls: */ def withApplyByNameParameter(res0: String) { object PigLatinizer { diff --git a/src/main/scala/stdlib/CaseClasses.scala b/src/main/scala/stdlib/CaseClasses.scala index 3292dfe7..a85a57ca 100644 --- a/src/main/scala/stdlib/CaseClasses.scala +++ b/src/main/scala/stdlib/CaseClasses.scala @@ -8,7 +8,7 @@ object CaseClasses extends FlatSpec with Matchers with org.scalaexercises.defini /** Scala supports the notion of ''case classes''. Case classes are regular classes which export their constructor parameters and which provide a recursive decomposition mechanism via pattern matching. * - * Here is an example for a class hierarchy which consists of an abstract super class `Term` and three concrete case classes `Var`, `Fun`, and `App`. + * Here is an example for a class hierarchy which consists of an abstract superclass `Term` and three concrete case classes `Var`, `Fun`, and `App`: * * {{{ * abstract class Term @@ -32,7 +32,7 @@ object CaseClasses extends FlatSpec with Matchers with org.scalaexercises.defini * println(x.name) * }}} * - * For every case class the Scala compiler generates `equals` method which implements structural equality and a`toString` method. For instance: + * For every case class the Scala compiler generates an `equals` method which implements structural equality and a `toString` method. For instance, * * {{{ * val x1 = Var("x") @@ -218,7 +218,7 @@ object CaseClasses extends FlatSpec with Matchers with org.scalaexercises.defini parts._4 should be(res3) } - /** Case classes are Serializable + /** Case classes are `Serializable`: */ def serializableCaseClasses(res0: Boolean, res1: Boolean) { case class PersonCC(firstName: String, lastName: String) diff --git a/src/main/scala/stdlib/Classes.scala b/src/main/scala/stdlib/Classes.scala index 0b001203..6ada3d42 100644 --- a/src/main/scala/stdlib/Classes.scala +++ b/src/main/scala/stdlib/Classes.scala @@ -14,7 +14,7 @@ object Classes extends FlatSpec with Matchers with org.scalaexercises.definition * override def toString(): String = "(" + x + ", " + y + ")" * } * }}} - * The class defines two variables `x` and `y` and one method: `toString`. + * The class defines two variables `x` and `y` and one method `toString`. * * Classes in Scala are parameterized with constructor arguments. The code above defines two constructor arguments, `x` and `y`; they are both visible in the whole body of the class. In our example they are used to implement `toString`. * @@ -31,7 +31,7 @@ object Classes extends FlatSpec with Matchers with org.scalaexercises.definition * * The program defines an executable application `Classes` in the form of a top-level singleton object with a `main` method. The `main` method creates a new `Point` and stores it in value `pt`. * - * This also demonstrates the use of value parameters in ClassWithValParameter(val name: String), which automatically creates an internal property (val name: String) in the class. + * This also demonstrates the use of value parameters in `ClassWithValParameter(val name: String)`, which automatically creates an internal property `val name: String` in the class: * */ def classWithValParameterClasses(res0: String) { diff --git a/src/main/scala/stdlib/EmptyValues.scala b/src/main/scala/stdlib/EmptyValues.scala index dc2802b9..e4aba087 100644 --- a/src/main/scala/stdlib/EmptyValues.scala +++ b/src/main/scala/stdlib/EmptyValues.scala @@ -17,7 +17,7 @@ object EmptyValues extends FlatSpec with Matchers with org.scalaexercises.defini * * ==Nothing== * - * [[http://www.scala-lang.org/api/current/index.html#scala.Nothing Nothing]] is a trait that is guaranteed to have _zero_ instances. It is a subtype of all other types. It has two main reasons for existing: to provide a return type for methods that **never** return normally (i.e. a method that always throws an exception). The other reason is to provide a type for Nil (explained below). + * [[http://www.scala-lang.org/api/current/index.html#scala.Nothing Nothing]] is a trait that is guaranteed to have zero instances. It is a subtype of all other types. It has two main reasons for existing: to provide a return type for methods that never return normally (i.e. a method that always throws an exception). The other reason is to provide a type for Nil (explained below). * * ==Unit== * @@ -47,25 +47,25 @@ object EmptyValues extends FlatSpec with Matchers with org.scalaexercises.defini None eq None shouldBe res0 } - /** `None` can be converted to a *String*: + /** `None` can be converted to a String: */ def noneToStringEmptyValues(res0: String) { assert(None.toString === res0) } - /** `None` can be converted to an empty list + /** `None` can be converted to an empty list: */ def noneToListEmptyValues(res0: Boolean) { None.toList === Nil shouldBe res0 } - /** `None` is considered empty + /** `None` is considered empty: */ def noneAsEmptyEmptyValues(res0: Boolean) { assert(None.isEmpty === res0) } - /** `None` can be cast `Any`, `AnyRef` or `AnyVal` + /** `None` can be cast to `Any`, `AnyRef` or `AnyVal`: */ def noneToAnyEmptyValues(res0: Boolean, res1: Boolean, res2: Boolean) { None.asInstanceOf[Any] === None shouldBe res0 @@ -73,7 +73,7 @@ object EmptyValues extends FlatSpec with Matchers with org.scalaexercises.defini None.asInstanceOf[AnyVal] === None shouldBe res2 } - /** `None` can be used with `Option` instead of null references + /** `None` can be used with `Option` instead of null references: */ def noneWithOptionEmptyValues(res0: Boolean, res1: Option[String]) { val optional: Option[String] = None @@ -81,7 +81,7 @@ object EmptyValues extends FlatSpec with Matchers with org.scalaexercises.defini assert(optional === res1) } - /** `Some` is the opposite of `None` for `Option` types + /** `Some` is the opposite of `None` for `Option` types: */ def someAgainstNoneEmptyValues(res0: Boolean, res1: Boolean) { val optional: Option[String] = Some("Some Value") diff --git a/src/main/scala/stdlib/Extractors.scala b/src/main/scala/stdlib/Extractors.scala index b067ac55..0600d071 100644 --- a/src/main/scala/stdlib/Extractors.scala +++ b/src/main/scala/stdlib/Extractors.scala @@ -25,8 +25,8 @@ object Extractors extends FlatSpec with Matchers with org.scalaexercises.definit * * There are two syntactic conventions at work here: * - * - The pattern `case Twice(n)` will cause an invocation of `Twice.unapply`, which is used to match even number; the return value of the `unapply` signals whether the argument has matched or not, and any sub-values that can be used for further matching. Here, the sub-value is `z/2` - * - The `apply` method is not necessary for pattern matching. It is only used to mimick a constructor. `val x = Twice(21)` expands to `val x = Twice.apply(21)`. + * - The pattern `case Twice(n)` will cause an invocation of `Twice.unapply`, which is used to match even number; the return value of the `unapply` signals whether the argument has matched or not, and any sub-values that can be used for further matching. Here, the sub-value is `z/2` + * - The `apply` method is not necessary for pattern matching. It is only used to mimick a constructor. `val x = Twice(21)` expands to `val x = Twice.apply(21)`. * * The code in the preceding example would be expanded as follows: * @@ -38,9 +38,9 @@ object Extractors extends FlatSpec with Matchers with org.scalaexercises.definit * }}} * The return type of an `unapply` should be chosen as follows: * - * - If it is just a test, return a `Boolean`. For instance `case even()` - * - If it returns a single sub-value of type `T`, return a `Option[T]` - * - If you want to return several sub-values `T1,...,Tn`, group them in an optional tuple `Option[(T1,...,Tn)]`. + * - If it is just a test, return a `Boolean`. For instance `case even()` + * - If it returns a single sub-value of type `T`, return a `Option[T]` + * - If you want to return several sub-values `T1,...,Tn`, group them in an optional tuple `Option[(T1,...,Tn)]`. * * Sometimes, the number of sub-values is fixed and we would like to return a sequence. For this reason, you can also define patterns through `unapplySeq`. The last sub-value type `Tn` has to be `Seq[S]`. This mechanism is used for instance in pattern `case List(x1, ..., xn)`. * @@ -76,7 +76,7 @@ object Extractors extends FlatSpec with Matchers with org.scalaexercises.definit d should be(res3) } - /** Of course an extractor can be used in pattern matching... + /** An extractor can also be used in pattern matching: */ def patternMatchingExtractors(res0: String, res1: String) { class Car(val make: String, val model: String, val year: Short, val topSpeed: Short) @@ -94,7 +94,7 @@ object Extractors extends FlatSpec with Matchers with org.scalaexercises.definit x._2 should be(res1) } - /** Since we aren't really using u and v in the previous pattern matching with can replace them with _. + /** Since we aren't really using `u` and `v` in the previous pattern matching, they can be replaced with `_`: */ def withWildcardExtractors(res0: String, res1: String) { class Car(val make: String, val model: String, val year: Short, val topSpeed: Short) @@ -132,7 +132,7 @@ object Extractors extends FlatSpec with Matchers with org.scalaexercises.definit result should be(res0) } - /** An extractor can be any stable object, including instantiated classes with an unapply method. + /** An extractor can be any stable object, including instantiated classes with an unapply method: */ def anyObjectExtractors(res0: String) { class Car(val make: String, val model: String, val year: Short, val topSpeed: Short) { @@ -149,7 +149,7 @@ object Extractors extends FlatSpec with Matchers with org.scalaexercises.definit result should be(res0) } - /** What is typical is to create a custom extractor in the companion object of the class. In this exercise, we use it as an assignment: + /** A custom extractor is typically created in the companion object of the class. In this exercise, we use it as an assignment: */ def asAssignmentExtractors(res0: String, res1: Option[String], res2: String) { class Employee( @@ -174,7 +174,7 @@ object Extractors extends FlatSpec with Matchers with org.scalaexercises.definit c should be(res2) } - /** In this exercise we use the unapply for pattern matching employee objects + /** In this exercise we use `unapply` for pattern matching employee objects: */ def unapplyForPatternMatchingExtractors(res0: String) { class Employee( diff --git a/src/main/scala/stdlib/ForExpressions.scala b/src/main/scala/stdlib/ForExpressions.scala index 4c3710e6..a25487bd 100644 --- a/src/main/scala/stdlib/ForExpressions.scala +++ b/src/main/scala/stdlib/ForExpressions.scala @@ -7,7 +7,7 @@ import org.scalatest._ */ object ForExpressions extends FlatSpec with Matchers with org.scalaexercises.definitions.Section { - /** For expressions can nest, with later generators varying more rapidly than earlier ones: + /** `for` expressions can nest, with later generators varying more rapidly than earlier ones: */ def canBeNestedForExpressions(res0: Int, res1: Int) { val xValues = 1 to 4 @@ -19,7 +19,7 @@ object ForExpressions extends FlatSpec with Matchers with org.scalaexercises.def coordinates(4) should be(res0, res1) } - /** Using `for` we can make more readable code + /** Using `for` we can make more readable code: */ def readableCodeForExpressions(res0: List[Int]) { val nums = List(List(1), List(2), List(3), List(4), List(5)) diff --git a/src/main/scala/stdlib/HigherOrderFunctions.scala b/src/main/scala/stdlib/HigherOrderFunctions.scala index 3ddfd1bd..7c5461b1 100644 --- a/src/main/scala/stdlib/HigherOrderFunctions.scala +++ b/src/main/scala/stdlib/HigherOrderFunctions.scala @@ -9,7 +9,7 @@ object HigherOrderFunctions extends FlatSpec with Matchers with org.scalaexercis /** Meet lambda. Scala provides a relatively lightweight syntax for defining anonymous functions. Anonymous functions in source code are called function literals and at run time, function literals are instantiated into objects called function values. * - * Scala supports first-class functions, which means you can express functions in function literal syntax, i.e.,` (x: Int) => x + 1`, and those functions can be represented by objects, which are called function values. + * Scala supports first-class functions, which means you can express functions in function literal syntax, i.e. ` (x: Int) => x + 1`, and those functions can be represented by objects, which are called function values. */ def meetLambdaHigherOrderFunctions(res0: Int, res1: Int, res2: Int, res3: Int, res4: Int, res5: Int) { def lambda = { x: Int ⇒ x + 1 } @@ -38,7 +38,7 @@ object HigherOrderFunctions extends FlatSpec with Matchers with org.scalaexercis result5 should be(res5) } - /** An anonymous function can also take on a different look by taking out the brackets + /** An anonymous function can also take on a different look by taking out the brackets: */ def differentLookHigherOrderFunctions(res0: Int) { def lambda = (x: Int) ⇒ x + 1 @@ -68,7 +68,7 @@ object HigherOrderFunctions extends FlatSpec with Matchers with org.scalaexercis result2 should be(res1) } - /** We can take that closure and throw it into a method and it will still hold the environment + /** We can take that closure and throw it into a method and it will still hold the environment: */ def holdEnvironmentHigherOrderFunctions(res0: Int, res1: Int) { def summation(x: Int, y: Int ⇒ Int) = y(x) @@ -123,7 +123,7 @@ object HigherOrderFunctions extends FlatSpec with Matchers with org.scalaexercis /** Function taking another function as a parameter. Helps in composing functions. * - * Hint: a map method applies the function to each element of a list + * Hint: a map method applies the function to each element of a list. */ def functionAsParameterHigherOrderFunctions(res0: List[String], res1: List[String], res2: List[Int]) { def makeUpper(xs: List[String]) = xs map { diff --git a/src/main/scala/stdlib/Implicits.scala b/src/main/scala/stdlib/Implicits.scala index 33bd2085..c94697e0 100644 --- a/src/main/scala/stdlib/Implicits.scala +++ b/src/main/scala/stdlib/Implicits.scala @@ -10,8 +10,8 @@ object Implicits extends FlatSpec with Matchers with org.scalaexercises.definiti /** The actual arguments that are eligible to be passed to an implicit parameter fall into two categories: * - * - First, eligible are all identifiers x that can be accessed at the point of the method call without a prefix and that denote an implicit definition or an implicit parameter. - * - Second, eligible are also all members of companion modules of the implicit parameter's type that are labeled implicit. + * - First, eligible are all identifiers x that can be accessed at the point of the method call without a prefix and that denote an implicit definition or an implicit parameter. + * - Second, eligible are also all members of companion modules of the implicit parameter's type that are labeled implicit. * * In the following example we define a method `sum` which computes the sum of a list of elements using the monoid's `add` and `unit` operations. Please note that implicit values can not be top-level, they have to be members of a template. * @@ -46,7 +46,7 @@ object Implicits extends FlatSpec with Matchers with org.scalaexercises.definiti * abc * }}} * - * Implicits wrap around existing classes to provide extra functionality. This is similar to *monkey patching* in **Ruby**, and *Meta-Programming* in **Groovy**. + * Implicits wrap around existing classes to provide extra functionality. This is similar to monkey patching in Ruby and meta-programming in Groovy. * * Creating a method `isOdd` for `Int`, which doesn't exist: */ diff --git a/src/main/scala/stdlib/InfixPrefixandPostfixOperators.scala b/src/main/scala/stdlib/InfixPrefixandPostfixOperators.scala index fe113eb7..f6a04c90 100644 --- a/src/main/scala/stdlib/InfixPrefixandPostfixOperators.scala +++ b/src/main/scala/stdlib/InfixPrefixandPostfixOperators.scala @@ -8,7 +8,7 @@ import scala.language.postfixOps */ object InfixPrefixandPostfixOperators extends FlatSpec with Matchers with org.scalaexercises.definitions.Section { - /** Any method which takes a single parameter can be used as an infix operator: `a.m(b)` can be written `a m b`. + /** Any method which takes a single parameter can be used as an infix operator: `a.m(b)` can also be written as `a m b`. */ def singleParameterInfixPrefixandPostfixOperators(res0: Int, res1: Int) { val g: Int = 3 @@ -16,7 +16,7 @@ object InfixPrefixandPostfixOperators extends FlatSpec with Matchers with org.sc g.+(4) should be(res1) // same result but not using the infix operator } - /** Infix Operators do NOT work if an object has a method that takes two parameters: + /** Infix operators do NOT work if an object has a method that takes two parameters: */ def notWithTwoInfixPrefixandPostfixOperators(res0: Int, res1: Int) { val g: String = "Check out the big brains on Brad!" @@ -28,14 +28,14 @@ object InfixPrefixandPostfixOperators extends FlatSpec with Matchers with org.sc g.indexOf('o', 7) should be(res1) //indexOf(Char, Int) must use standard java/scala calls } - /** Any method which does not require a parameter can be used as a postfix operator: `a.m` can be written `a m`. + /** Any method which does not require a parameter can be used as a postfix operator: `a.m` can be written as `a m`. * - * For instance `a.##(b)` can be written `a ## b` and `a.!` can be written `a!` + * For instance, `a.+(b)` is equivalent to `a + b` and `a.!` is the same as `a!`. * - * **Postfix operators** have lower precedence than **infix operators**, so: - * - `foo bar baz` means `foo.bar(baz)`. - * - `foo bar baz bam` means `(foo.bar(baz)).bam` - * - `foo bar baz bam bim` means `(foo.bar(baz)).bam(bim)`. + * Postfix operators have lower precedence than infix operators, so: + * - `foo bar baz` means `foo.bar(baz)`. + * - `foo bar baz bam` means `(foo.bar(baz)).bam` + * - `foo bar baz bam bim` means `(foo.bar(baz)).bam(bim)`. */ def postfixOperatorInfixPrefixandPostfixOperators(res0: String) { val g: Int = 31 @@ -50,7 +50,7 @@ object InfixPrefixandPostfixOperators extends FlatSpec with Matchers with org.sc (-g) should be(res0) } - /** Here we create our own prefix operator for our own class. The only identifiers that can be used as prefix operators are `+`, `-`, `!`, and `~`: + /** Here's how to create a prefix operator for our own class. The only identifiers that can be used as prefix operators are `+`, `-`, `!`, and `~`: */ def ourOwnOperatorInfixPrefixandPostfixOperators(res0: String, res1: String) { class Stereo { diff --git a/src/main/scala/stdlib/InfixTypes.scala b/src/main/scala/stdlib/InfixTypes.scala index c1abc965..93c0f154 100644 --- a/src/main/scala/stdlib/InfixTypes.scala +++ b/src/main/scala/stdlib/InfixTypes.scala @@ -10,7 +10,7 @@ object InfixTypes extends FlatSpec with Matchers with org.scalaexercises.definit /** An infix type `T1 op T2` consists of an infix operator `op` which gets applied to two * type operands `T1` and `T2`. The type is equivalent to the type application `op[T1,T2]`. * - * The infix operator op may be an arbitrary identifier, except for `*`, which is reserved + * The infix operator `op` may be an arbitrary identifier, except for `*`, which is reserved * as a postfix modifier denoting a repeated parameter type. * * We can make a type infix, meaning that the type can be displayed in complement between two types in order to make a readable declaration: diff --git a/src/main/scala/stdlib/Iterables.scala b/src/main/scala/stdlib/Iterables.scala index 42c29a99..0617bdd9 100644 --- a/src/main/scala/stdlib/Iterables.scala +++ b/src/main/scala/stdlib/Iterables.scala @@ -9,16 +9,16 @@ object Iterables extends FlatSpec with Matchers with org.scalaexercises.definiti /** The next trait from the top in the collections hierarchy is `Iterable`. All methods in this trait are defined in terms of an abstract method, `iterator`, which yields the collection's elements one by one. The `foreach` method from trait `Traversable` is implemented in `Iterable` in terms of `iterator`. Here is the actual implementation: * - * + * {{{ * def foreach[U](f: Elem => U): Unit = { * val it = iterator * while (it.hasNext) f(it.next()) * } + * }}} * + * Quite a few subclasses of `Iterable` override this standard implementation of `foreach` in `Iterable`, because they can provide a more efficient implementation. Remember that `foreach` is the basis of the implementation of all operations in `Traversable`, so its performance matters. * - * Quite a few subclasses of `Iterable` override this standard implementation of foreach in `Iterable`, because they can provide a more efficient implementation. Remember that `foreach` is the basis of the implementation of all operations in `Traversable`, so its performance matters. - * - * Some known iterables are *Sets*, *Lists*, *Vectors*, *Stacks*, and *Streams*. Iterator has two important methods: `hasNext`, which answers whether the iterator has another element available. `next` which will return the next element in the iterator. + * Some common iterables are `Set`, `List`, `Vector`, `Stacks` and `Stream`. Iterator has two important methods: `hasNext`, which answers whether the iterator has another element available, and `next` which returns the next element in the iterator. * */ def collectionIterablesIterables(res0: Int) { @@ -29,7 +29,7 @@ object Iterables extends FlatSpec with Matchers with org.scalaexercises.definiti } } - /** `grouped` will return fixed sized Iterable chucks of an Iterable + /** `grouped` will return fixed-size `Iterable` chucks of an `Iterable`: */ def groupedIterables(res0: Int, res1: Int, res2: Int, res3: Int, res4: Int, res5: Int, res6: Int, res7: Int, res8: Int) { val list = List(3, 5, 9, 11, 15, 19, 21, 24, 32) @@ -39,7 +39,7 @@ object Iterables extends FlatSpec with Matchers with org.scalaexercises.definiti it.next() should be(List(res6, res7, res8)) } - /** `sliding` will return an Iterable that shows a sliding window of an Iterable. + /** `sliding` will return an `Iterable` that shows a sliding window of an `Iterable`. */ def slidingIterables(res0: Int, res1: Int, res2: Int, res3: Int, res4: Int, res5: Int, res6: Int, res7: Int, res8: Int) { val list = List(3, 5, 9, 11, 15, 19, 21, 24, 32) @@ -49,7 +49,7 @@ object Iterables extends FlatSpec with Matchers with org.scalaexercises.definiti it.next() should be(List(res6, res7, res8)) } - /** `sliding` can take the size of the window as well the size of the step during each iteration + /** `sliding` can take the size of the window as well the size of the step during each iteration: */ def slidingWindowIterables(res0: Int, res1: Int, res2: Int, res3: Int, res4: Int, res5: Int, res6: Int, res7: Int, res8: Int) { val list = List(3, 5, 9, 11, 15, 19, 21, 24, 32) @@ -59,14 +59,14 @@ object Iterables extends FlatSpec with Matchers with org.scalaexercises.definiti it.next() should be(List(res6, res7, res8)) } - /** `takeRight` is the opposite of 'take' in Traversable. It retrieves the last elements of an Iterable. + /** `takeRight` is the opposite of 'take' in `Traversable`. It retrieves the last elements of an `Iterable`: */ def takeRightIterables(res0: Int, res1: Int, res2: Int) { val list = List(3, 5, 9, 11, 15, 19, 21, 24, 32) (list takeRight 3) should be(List(res0, res1, res2)) } - /** `dropRight` will drop the number of elements from the right. + /** `dropRight` will drop a specified number of elements from the right: */ def dropRightIterables(res0: Int, res1: Int, res2: Int, res3: Int, res4: Int, res5: Int) { val list = List(3, 5, 9, 11, 15, 19, 21, 24, 32) @@ -75,7 +75,7 @@ object Iterables extends FlatSpec with Matchers with org.scalaexercises.definiti /** `zip` will stitch two iterables into an iterable of pairs of corresponding elements from both iterables. * - * E.g. `Iterable(x1, x2, x3) zip Iterable(y1, y2, y3)` will return `((x1,y1), (x2, y2), (x3, y3))`: + * e.g. `Iterable(x1, x2, x3) zip Iterable(y1, y2, y3)` will return `((x1, y1), (x2, y2), (x3, y3))`: */ def zipIterables(res0: Int, res1: String, res2: Int, res3: String, res4: Int, res5: String) { val xs = List(3, 5, 9) @@ -83,9 +83,9 @@ object Iterables extends FlatSpec with Matchers with org.scalaexercises.definiti (xs zip ys) should be(List((res0, res1), (res2, res3), (res4, res5))) } - /** If two Iterables aren't the same size, then `zip` will only zip what can only be paired. + /** If two Iterables aren't the same size, then `zip` will only zip what can be paired. * - * E.g. `Iterable(x1, x2, x3) zip Iterable(y1, y2)` will return `((x1,y1), (x2, y2))` + * e.g. `Iterable(x1, x2, x3) zip Iterable(y1, y2)` will return `((x1, y1), (x2, y2))`: * */ def sameSizeZipIterables(res0: Int, res1: String, res2: Int, res3: String) { @@ -94,9 +94,9 @@ object Iterables extends FlatSpec with Matchers with org.scalaexercises.definiti (xs zip ys) should be(List((res0, res1), (res2, res3))) } - /** If two Iterables aren't the same size, then `zipAll` can provide fillers for what it couldn't find a complement for: + /** If two `Iterables` aren't the same size, then `zipAll` can provide fillers for what it couldn't find a complement for. * - * E.g. `Iterable(x1, x2, x3) zipAll (Iterable(y1, y2), x, y)` will return `((x1,y1), (x2, y2), (x3, y)))` + * e.g. `Iterable(x1, x2, x3) zipAll (Iterable(y1, y2), x, y)` will return `((x1,y1), (x2, y2), (x3, y)))`: */ def zipAllIterables(res0: Int, res1: String, res2: Int, res3: String, res4: Int, res5: Int, res6: String, res7: Int, res8: String, res9: String) { val xs = List(3, 5, 9) @@ -109,14 +109,14 @@ object Iterables extends FlatSpec with Matchers with org.scalaexercises.definiti } - /** `zipWithIndex` will zip an Iterable with its integer index + /** `zipWithIndex` will zip an `Iterable` with its integer index: */ def zipWithIndexIterables(res0: String, res1: String, res2: Int, res3: String) { val xs = List("Manny", "Moe", "Jack") xs.zipWithIndex should be(List((res0, 0), (res1, res2), (res3, 2))) } - /** `sameElements` will return true if the two iterables produce the same elements in the same order: + /** `sameElements` will return true if the two `Iterables` produce the same elements in the same order: */ def sameElementsIterables(res0: Boolean, res1: Boolean, res2: Boolean, res3: Boolean) { val xs = List("Manny", "Moe", "Jack") diff --git a/src/main/scala/stdlib/Lists.scala b/src/main/scala/stdlib/Lists.scala index 88e16198..3523f7f4 100644 --- a/src/main/scala/stdlib/Lists.scala +++ b/src/main/scala/stdlib/Lists.scala @@ -7,9 +7,9 @@ import org.scalatest._ */ object Lists extends FlatSpec with Matchers with org.scalaexercises.definitions.Section { - /** Scala Lists are quite similar to arrays which means, all the elements of a list have the same type but there are two important differences. First, lists are immutable, which means elements of a list cannot be changed by assignment. Second, lists represent a linked list whereas arrays are flat. The type of a list that has elements of type `T` is written as `List[T]`. + /** Scala Lists are quite similar to arrays, which means all the elements of a list have the same type - but there are two important differences. First, lists are immutable, which means elements of a list cannot be changed by assignment. Second, lists represent a linked list whereas arrays are flat. The type of a list that has elements of type `T` is written as `List[T]`. * - * `eq` tests identity (same object) + * `eq` tests identity (same object): */ def similarToArraysLists(res0: Boolean) { val a = List(1, 2, 3) @@ -17,7 +17,7 @@ object Lists extends FlatSpec with Matchers with org.scalaexercises.definitions. (a eq b) should be(res0) } - /** `==` tests equality (same content) + /** `==` tests equality (same content): */ def sameContentLists(res0: Boolean) { val a = List(1, 2, 3) @@ -25,7 +25,7 @@ object Lists extends FlatSpec with Matchers with org.scalaexercises.definitions. (a == b) should be(res0) } - /** Nil lists are identical, even of different types + /** Nil lists are identical, even of different types: */ def nilListsLists(res0: Boolean, res1: Boolean, res2: Boolean, res3: Boolean, res4: Boolean, res5: Boolean) { val a: List[String] = Nil @@ -41,15 +41,14 @@ object Lists extends FlatSpec with Matchers with org.scalaexercises.definitions. (a eq b) should be(res5) } - /** Lists are easily created + /** Lists can be easily created: */ def easilyCreatedLists(res0: Int, res1: Int, res2: Int) { val a = List(1, 2, 3) a should equal(List(res0, res1, res2)) } - /** Lists can be accessed via head, headOption and tail. - * Accessing List via `head` is unsafe and may result in a IndexOutOfBoundsException + /** Lists can be accessed via `head`, `headOption` and `tail`. Accessing a list via `head` is unsafe and may result in a `IndexOutOfBoundsException`. */ def headAndTailLists(res0: Int, res1: Int, res2: Int) { val a = List(1, 2, 3) @@ -57,7 +56,7 @@ object Lists extends FlatSpec with Matchers with org.scalaexercises.definitions. a.tail should equal(List(res1, res2)) } - /** Lists can be accessed by position + /** Lists can be accessed by position: */ def byPositionLists(res0: Int, res1: Int, res2: Int) { val a = List(1, 3, 5, 7, 9) @@ -70,7 +69,7 @@ object Lists extends FlatSpec with Matchers with org.scalaexercises.definitions. } } - /** Lists are immutable + /** Lists are immutable: */ def areImmutableLists(res0: Int, res1: Int, res2: Int, res3: Int) { val a = List(1, 3, 5, 7, 9) @@ -80,7 +79,7 @@ object Lists extends FlatSpec with Matchers with org.scalaexercises.definitions. b should equal(List(res0, res1, res2, res3)) } - /** Lists have many useful methods + /** Lists have many useful utility methods: */ def usefulMethodsLists(res0: Int, res1: List[Int], res2: List[Int], res3: List[Int]) { val a = List(1, 3, 5, 7, 9) @@ -98,7 +97,7 @@ object Lists extends FlatSpec with Matchers with org.scalaexercises.definitions. a.filter { v ⇒ v % 3 == 0 } should equal(res3) } - /** Functions over lists can use _ as shorthand + /** Functions over lists can use _ as shorthand: */ def wildcardAsShorthandLists(res0: Int, res1: Int, res2: Int, res3: Int) { val a = List(1, 2, 3) @@ -112,7 +111,7 @@ object Lists extends FlatSpec with Matchers with org.scalaexercises.definitions. } should equal(List(res3)) } - /** Functions over lists can use `()` instead of `{}` + /** Functions over lists can use `()` instead of `{}`: */ def functionsOverListsLists(res0: Int, res1: Int, res2: Int, res3: Int, res4: Int) { val a = List(1, 2, 3) @@ -120,7 +119,7 @@ object Lists extends FlatSpec with Matchers with org.scalaexercises.definitions. a.filter(_ % 2 != 0) should equal(List(res3, res4)) } - /** Lists can be *reduced* with a mathematical operation + /** Lists can be reduced with a mathematical operation: */ def reducingListsLists(res0: Int, res1: Int) { val a = List(1, 3, 5, 7) @@ -128,7 +127,7 @@ object Lists extends FlatSpec with Matchers with org.scalaexercises.definitions. a.reduceLeft(_ * _) should equal(res1) } - /** Foldleft is like reduce, but with an explicit starting value + /** `foldLeft` is like `reduce`, but with an explicit starting value: */ def foldLeftLists(res0: Int, res1: Int, res2: Int, res3: Int) { val a = List(1, 3, 5, 7) @@ -139,14 +138,14 @@ object Lists extends FlatSpec with Matchers with org.scalaexercises.definitions. a.foldLeft(0)(_ * _) should equal(res3) } - /** You can create a list from a range + /** You can create a list from a range: */ def fromRangeLists(res0: List[Int]) { val a = (1 to 5).toList a should be(res0) } - /** Lists reuse their tails + /** Lists reuse their tails: */ def reuseTailsLists(res0: Int, res1: Int, res2: Int, res3: List[Int], res4: List[Int], res5: List[Int]) { val d = Nil diff --git a/src/main/scala/stdlib/LiteralBooleans.scala b/src/main/scala/stdlib/LiteralBooleans.scala index 7db9a536..7da3a570 100644 --- a/src/main/scala/stdlib/LiteralBooleans.scala +++ b/src/main/scala/stdlib/LiteralBooleans.scala @@ -7,7 +7,7 @@ import org.scalatest._ */ object LiteralBooleans extends FlatSpec with Matchers with org.scalaexercises.definitions.Section { - /** Boolean literals are either true or false, using the true or false keyword + /** Boolean literals are either true or false, using the `true` or `false` keyword: */ def literalBooleanLiteralBooleans(res0: Boolean, res1: Boolean, res2: Boolean, res3: Boolean, res4: Boolean, res5: Boolean) { val a = true diff --git a/src/main/scala/stdlib/LiteralNumbers.scala b/src/main/scala/stdlib/LiteralNumbers.scala index 280c75de..5c933a3e 100644 --- a/src/main/scala/stdlib/LiteralNumbers.scala +++ b/src/main/scala/stdlib/LiteralNumbers.scala @@ -7,7 +7,7 @@ import org.scalatest._ */ object LiteralNumbers extends FlatSpec with Matchers with org.scalaexercises.definitions.Section { - /** Integer Literals are 32-bit and can be created from decimal, hexadecimal: + /** Integer literals are 32-bit and can be created from decimals as well as hexadecimals: */ def integerLiteralsLiteralNumbers(res0: Int, res1: Int, res2: Int, res3: Int, res4: Int, res5: Int, res6: Int) { val a = 2 @@ -26,7 +26,7 @@ object LiteralNumbers extends FlatSpec with Matchers with org.scalaexercises.def h should be(res6) //Hint: 30F = 783 } - /** Long Literals are 64 bit, are specified by appending an `L` or `l` at the end: + /** Long literals are 64-bit. They are specified by appending an `L` or `l` at the end of the declaration: */ def longLiteralsLiteralNumbers(res0: Long, res1: Long, res2: Long, res3: Long, res4: Long, res5: Long, res6: Long) { val a = 2L @@ -46,9 +46,9 @@ object LiteralNumbers extends FlatSpec with Matchers with org.scalaexercises.def h should be(res6) //Hint: 30F = 783 } - /** Float and Double Literals are IEEE 754 for specific, Float are 32-bit length, Doubles are 64-bit. - * Floats can be coerced using a f or F suffix, and Doubles can be coerced using a d or D suffix. - * Exponent are specified using e or E. + /** Float and Double literals conform to [[https://en.wikipedia.org/wiki/IEEE_floating_point IEEE-754]]. Floats are 32-bit, while doubles are 64-bit. + * Floats can be defined using a f or F suffix, while doubles use a d or D suffix. + * Exponents are specified using e or E. */ def floatsAndDoublesLiteralNumbers(res0: Double, res1: Double, res2: Double, res3: Double, res4: Double, res5: Double, res6: Double, res7: Double, res8: Double) { val a = 3.0 diff --git a/src/main/scala/stdlib/LiteralStrings.scala b/src/main/scala/stdlib/LiteralStrings.scala index 87adede4..595fd3af 100644 --- a/src/main/scala/stdlib/LiteralStrings.scala +++ b/src/main/scala/stdlib/LiteralStrings.scala @@ -7,7 +7,7 @@ import org.scalatest._ */ object LiteralStrings extends FlatSpec with Matchers with org.scalaexercises.definitions.Section { - /** Character Literals are quoted with single quotes: + /** Character literals are quoted with single quotes: */ def characterLiteralsLiteralStrings(res0: String, res1: String) { val a = 'a' @@ -17,7 +17,7 @@ object LiteralStrings extends FlatSpec with Matchers with org.scalaexercises.def b.toString should be(res1) } - /** Character Literals can use hexadecimal Unicode + /** Character literals can use hexadecimal Unicode: */ def characterLiteralsUnicodeLiteralStrings(res0: String) { val c = '\u0061' //unicode for a @@ -25,14 +25,14 @@ object LiteralStrings extends FlatSpec with Matchers with org.scalaexercises.def c.toString should be(res0) } - /** Character Literals can use octal as well + /** Character literals can use octal as well: */ def characterLiteralsOctalLiteralStrings(res0: String) { val d = '\141' //octal for a d.toString should be(res0) } - /** Character Literals can use escape sequences + /** Character literals can use escape sequences: */ def escapeSequenceLiteralStrings(res0: String, res1: String) { val e = '\"' @@ -42,7 +42,7 @@ object LiteralStrings extends FlatSpec with Matchers with org.scalaexercises.def f.toString should be(res1) } - /** One-Line String Literals are surrounded by quotation marks. + /** One-line String literals are surrounded by quotation marks: */ def oneLineLiteralStrings(res0: String) { val a = "To be or not to be" diff --git a/src/main/scala/stdlib/Maps.scala b/src/main/scala/stdlib/Maps.scala index e70dd118..0a2e7967 100644 --- a/src/main/scala/stdlib/Maps.scala +++ b/src/main/scala/stdlib/Maps.scala @@ -11,11 +11,11 @@ object Maps extends FlatSpec with Matchers with org.scalaexercises.definitions.S * * The fundamental operations on maps are similar to those on sets. They are summarized in the following table and fall into the following categories: * - * - Lookup operations `apply`, `get`, `getOrElse`, `contains`, and `isDefinedAt`. These turn maps into partial functions from keys to values. The fundamental lookup method for a map is: `def get(key): Option[Value]`. The operation "`m get key`" tests whether the map contains an association for the given key. If so, it returns the associated value in a `Some`. If no key is defined in the map, get returns `None`. Maps also define an `apply` method that returns the value associated with a given key directly, without wrapping it in an `Option`. If the key is not defined in the map, an exception is raised. - * - Additions and updates `+`, `++`, `updated`, which let you add new bindings to a map or change existing bindings. - * - Removals `-`, `--`, which remove bindings from a map. - * - Subcollection producers `keys`, `keySet`, `keysIterator`, `values`, `valuesIterator`, which return a map's keys and values separately in various forms. - * - Transformations `filterKeys` and `mapValues`, which produce a new map by filtering and transforming bindings of an existing map. + * - Lookup operations `apply`, `get`, `getOrElse`, `contains`, and `isDefinedAt`. These turn maps into partial functions from keys to values. The fundamental lookup method for a map is: `def get(key): Option[Value]`. The operation "`m get key`" tests whether the map contains an association for the given key. If so, it returns the associated value in a `Some`. If no key is defined in the map, get returns `None`. Maps also define an `apply` method that returns the value associated with a given key directly, without wrapping it in an `Option`. If the key is not defined in the map, an exception is raised. + * - Additions and updates `+`, `++`, `updated`, which let you add new bindings to a map or change existing bindings. + * - Removals `-`, `--`, which remove bindings from a map. + * - Subcollection producers `keys`, `keySet`, `keysIterator`, `values`, `valuesIterator`, which return a map's keys and values separately in various forms. + * - Transformations `filterKeys` and `mapValues`, which produce a new map by filtering and transforming bindings of an existing map. * * Maps can be created easily: */ @@ -78,8 +78,8 @@ object Maps extends FlatSpec with Matchers with org.scalaexercises.definitions.S myMap("IA") should be(res1) } - /** If a map key is requested using myMap(missingKey) which does not exist a NoSuchElementException will be thrown. - * Default values may be provided using either getOrElse or withDefaultValue for the entire map + /** If a nonexistent map key is requested using `myMap(missingKey)`, a `NoSuchElementException` will be thrown. + * Default values may be provided using either `getOrElse` or `withDefaultValue` for the entire map: */ def defaultValuesMayBeProvidedMaps(res0: String, res1: String) { val myMap = Map("MI" → "Michigan", "OH" → "Ohio", "WI" → "Wisconsin", "IA" → "Iowa") diff --git a/src/main/scala/stdlib/NamedandDefaultArguments.scala b/src/main/scala/stdlib/NamedandDefaultArguments.scala index f7c8769f..cd225932 100644 --- a/src/main/scala/stdlib/NamedandDefaultArguments.scala +++ b/src/main/scala/stdlib/NamedandDefaultArguments.scala @@ -94,7 +94,7 @@ object NamedandDefaultArguments extends FlatSpec with Matchers with org.scalaexe myColor should equal(res0, res1, res2) } - /** Can access class parameters and default arguments if you leave them off + /** Can access class parameters and default arguments if you leave them off: */ def accessClassParametersNamedandDefaultArguments(res0: Int, res1: Int, res2: Int) { val me = new WithClassParameters(10, 20, 30) @@ -103,7 +103,7 @@ object NamedandDefaultArguments extends FlatSpec with Matchers with org.scalaexe myColor should equal(res0, res1, res2) } - /** Can default class parameters and have default arguments too + /** Can default class parameters and have default arguments too: */ def defaultClassArgumentsNamedandDefaultArguments(res0: Int, res1: Int, res2: Int) { val me = new WithClassParametersInClassDefinition() @@ -112,7 +112,7 @@ object NamedandDefaultArguments extends FlatSpec with Matchers with org.scalaexe myColor should equal(res0, res1, res2) } - /** Default parameters can be functional too + /** Default parameters can be functional too: */ def functionalDefaulParametersNamedandDefaultArguments(res0: Int, res1: Int) { def reduce(a: Int, f: (Int, Int) ⇒ Int = _ + _): Int = f(a, a) diff --git a/src/main/scala/stdlib/Objects.scala b/src/main/scala/stdlib/Objects.scala index 0601b056..7c43d568 100644 --- a/src/main/scala/stdlib/Objects.scala +++ b/src/main/scala/stdlib/Objects.scala @@ -7,7 +7,7 @@ import org.scalatest._ */ object Objects extends FlatSpec with Matchers with org.scalaexercises.definitions.Section { - /** An object is a singleton. One object, that's it. This object is a replacement of static in Java, and is called upon much in the same way. + /** An object is a singleton. One object, that's it. This object is a replacement of static in Java, and is called upon much in the same way: */ def singletonObjects(res0: String, res1: String, res2: String, res3: String) { object Greeting { @@ -26,7 +26,7 @@ object Objects extends FlatSpec with Matchers with org.scalaexercises.definition Greeting.magyar should be(res3) } - /** Here is a proof that an object is a singleton, and not a static method in a class + /** Here is a proof that an object is a singleton, and not a static method in a class: */ def notStaticMethodObjects(res0: Boolean, res1: Boolean) { object Greeting { @@ -49,7 +49,7 @@ object Objects extends FlatSpec with Matchers with org.scalaexercises.definition x eq z should be(res1) } - /** An object that has the same name as a class is called a companion object of the class, and it is often used to contain factory methods for the class that it complements. + /** An object that has the same name as a class is called a companion object of the class, and it is often used to contain factory methods for the class that it complements: */ def companionObjectObjects(res0: String) { class Movie(val name: String, val year: Short) diff --git a/src/main/scala/stdlib/Options.scala b/src/main/scala/stdlib/Options.scala index eaf56509..dba5dee9 100644 --- a/src/main/scala/stdlib/Options.scala +++ b/src/main/scala/stdlib/Options.scala @@ -30,7 +30,7 @@ object Options extends FlatSpec with Matchers with org.scalaexercises.definition * } * }}} * - * Using `getOrElse` we can provide a default value ("No value") when the optional argument (`None`) does not exist. + * Using `getOrElse` we can provide a default value ("No value") when the optional argument (`None`) does not exist: */ def getOrElseOptions(res0: String, res1: String, res2: String) { val value1 = maybeItWillReturnSomething(true) @@ -74,7 +74,7 @@ object Options extends FlatSpec with Matchers with org.scalaexercises.definition /** An alternative for pattern matching is performing collection style operations. * This is possible because an option could be looked at as a collection with either one or zero elements. * - * One of these operations is `map`. This operation allows us to map the inner value to a different type while preserving the option + * One of these operations is `map`. This operation allows us to map the inner value to a different type while preserving the option: */ def mapOptions(res0: Option[Double], res1: Option[Double]) { val number: Option[Int] = Some(3) diff --git a/src/main/scala/stdlib/ParentClasses.scala b/src/main/scala/stdlib/ParentClasses.scala index be8016de..56a2a87b 100644 --- a/src/main/scala/stdlib/ParentClasses.scala +++ b/src/main/scala/stdlib/ParentClasses.scala @@ -44,7 +44,7 @@ object ParentClasses extends FlatSpec with Matchers with org.scalaexercises.defi * //val soldier = new Soldier * }}} * - * A class can be placed inside an abstract class just like in java: + * A class can be placed inside an abstract class just like in Java: */ def abstractClassParentClasses(res0: Int) { abstract class Soldier(val firstName: String, val lastName: String) { diff --git a/src/main/scala/stdlib/PartialFunctions.scala b/src/main/scala/stdlib/PartialFunctions.scala index f0c25830..dc188d94 100644 --- a/src/main/scala/stdlib/PartialFunctions.scala +++ b/src/main/scala/stdlib/PartialFunctions.scala @@ -7,7 +7,7 @@ import org.scalatest._ */ object PartialFunctions extends FlatSpec with Matchers with org.scalaexercises.definitions.Section { - /** A partial function is a `trait` that when implemented can be used as building blocks to determine a solution. The trait `PartialFunction` requires that the method `isDefinedAt` and `apply` be implemented. + /** A partial function is a `trait` that when implemented can be used as building blocks to determine a solution. The trait `PartialFunction` requires that the method `isDefinedAt` and `apply` be implemented: */ def partialFunctionPartialFunctions(res0: Int, res1: Int) { val doubleEvens: PartialFunction[Int, Int] = new PartialFunction[Int, Int] { @@ -30,7 +30,7 @@ object PartialFunctions extends FlatSpec with Matchers with org.scalaexercises.d whatToDo(4) should be(res1) } - /** Case statements are a quick way to create partial functions. When you create a case statement, the `apply` and `isDefinedAt` is created for you. + /** Case statements are a quick way to create partial functions. When you create a case statement, the `apply` and `isDefinedAt` methods are created automatically. */ def caseStatementsPartialFunctions(res0: Int, res1: Int) { //These case statements are called case statements with guards @@ -46,7 +46,7 @@ object PartialFunctions extends FlatSpec with Matchers with org.scalaexercises.d whatToDo(4) should be(res1) } - /** The result of partial functions can have an `andThen` function added to the end of the chain + /** The result of partial functions can have an `andThen` function added to the end of the chain: */ def andThenPartialFunctions(res0: Int, res1: Int) { //These are called case statements with guards @@ -63,7 +63,7 @@ object PartialFunctions extends FlatSpec with Matchers with org.scalaexercises.d whatToDo(4) should be(res1) } - /** The result of partial functions can have an `andThen` function added to the end of the chain used to continue onto another chain of logic: + /** `andThen` can be used to continue onto another chain of logic: */ def chainOfLogicPartialFunctions(res0: String, res1: String) { val doubleEvens: PartialFunction[Int, Int] = { diff --git a/src/main/scala/stdlib/PartiallyAppliedFunctions.scala b/src/main/scala/stdlib/PartiallyAppliedFunctions.scala index 63dd954e..998cddbb 100644 --- a/src/main/scala/stdlib/PartiallyAppliedFunctions.scala +++ b/src/main/scala/stdlib/PartiallyAppliedFunctions.scala @@ -25,7 +25,7 @@ object PartiallyAppliedFunctions extends FlatSpec with Matchers with org.scalaex sum(4, 5, 6) should be(res1) } - /** Currying is a technique to transform function with multiple parameters into multiple functions which each take one parameter + /** Currying is a technique to transform a function with multiple parameters into multiple functions which each take one parameter: */ def curryingPartiallyAppliedFunctions(res0: Boolean, res1: Int, res2: Int, res3: Int, res4: Int) { def multiply(x: Int, y: Int) = x * y @@ -38,7 +38,7 @@ object PartiallyAppliedFunctions extends FlatSpec with Matchers with org.scalaex multiplyCurriedFour(4) should be(res4) } - /** Currying allows you to create specialized version of generalized function + /** Currying allows you to create specialized versions of generalized functions: */ def specializedVersionPartiallyAppliedFunctions(res0: List[Int], res1: List[Int]) { def customFilter(f: Int ⇒ Boolean)(xs: List[Int]) = { diff --git a/src/main/scala/stdlib/PatternMatching.scala b/src/main/scala/stdlib/PatternMatching.scala index 5dbdeebb..7c94c0b3 100644 --- a/src/main/scala/stdlib/PatternMatching.scala +++ b/src/main/scala/stdlib/PatternMatching.scala @@ -44,7 +44,7 @@ object PatternMatching extends FlatSpec with Matchers with org.scalaexercises.de myStuff should be(res0) } - /** Pattern matching can return complex somethings: + /** Pattern matching can return complex values: */ def returnComplexPatternMatching(res0: Int, res1: Int, res2: Int) { val stuff = "blue" @@ -122,7 +122,7 @@ object PatternMatching extends FlatSpec with Matchers with org.scalaexercises.de // goldilocks("Sitting Alert: bear=Mama, source=chair") should be(res1) // } - /** A backquote can be used to refer to a stable variable in scope to create a case statement. This prevents what is called "Variable Shadowing" + /** A backquote can be used to refer to a stable variable in scope to create a case statement - this prevents "variable shadowing": */ def createCaseStatementPatternMatching(res0: String, res1: String, res2: String, res3: String) { val foodItem = "porridge" @@ -152,7 +152,7 @@ object PatternMatching extends FlatSpec with Matchers with org.scalaexercises.de patternEquals(9, 9) should be(res2) } - /** To pattern match against a `List`, the list can be broken out into parts, in this case the head `x` and the tail `xs`. Since the case doesn't terminate in `Nil`, `xs` is interpreted as the rest of the list: + /** To pattern match against a `List`, the list can be split into parts, in this case the head `x` and the tail `xs`. Since the case doesn't terminate in `Nil`, `xs` is interpreted as the rest of the list: */ def againstListsPatternMatching(res0: Int) { val secondElement = List(1, 2, 3) match { diff --git a/src/main/scala/stdlib/Ranges.scala b/src/main/scala/stdlib/Ranges.scala index 3e82f93d..fc6ce8e0 100644 --- a/src/main/scala/stdlib/Ranges.scala +++ b/src/main/scala/stdlib/Ranges.scala @@ -7,7 +7,13 @@ import org.scalatest._ */ object Ranges extends FlatSpec with Matchers with org.scalaexercises.definitions.Section { - /** A range's upper bound is not inclusive: + /** A Range is an ordered sequence of integers that are equally spaced apart. For example, "1, 2, 3" is a range, as is "5, 8, 11, 14". To create a range in Scala, use the predefined methods `to` and `by`. `1 to 3` generates `Range(1, 2, 3)` and `5 to 14 by 3` generates `Range(5, 8, 11, 14)`. + * + * If you want to create a range that is exclusive of its upper limit, then use the convenience method `until` instead of `to`: `1 until 3` generates `Range(1, 2)`. + * + * Ranges are represented in constant space, because they can be defined by just three numbers: their start, their end, and the stepping value. Because of this representation, most operations on ranges are extremely fast. + * + * A range's upper bound is not inclusive: */ def upperNotInclusiveRangeExercises(res0: Int, res1: Int, res2: Int) { val someNumbers = Range(0, 10) @@ -19,7 +25,7 @@ object Ranges extends FlatSpec with Matchers with org.scalaexercises.definitions last should be(res2) } - /** Ranges can be specified using 'until'. + /** Ranges can be specified using 'until': */ def untilRangeExercises(res0: Boolean) { val someNumbers = Range(0, 10) @@ -49,7 +55,7 @@ object Ranges extends FlatSpec with Matchers with org.scalaexercises.definitions someNumbers.contains(34) should be(res2) } - /** Range can specify to include its upper bound value + /** Range can specify to include its upper bound value: */ def specifyUpperRangeExercises(res0: Boolean) { val someNumbers = Range(0, 34).inclusive @@ -57,20 +63,13 @@ object Ranges extends FlatSpec with Matchers with org.scalaexercises.definitions someNumbers.contains(34) should be(res0) } - /** Inclusive ranges can be specified using 'to'. + /** Inclusive ranges can be specified using 'to': */ def inclusiveWithToRangeExercises(res0: Boolean) { val someNumbers = Range(0, 34).inclusive val otherRange = 0 to 34 (someNumbers == otherRange) should be(res0) - - /** A Range is an ordered sequence of integers that are equally spaced apart. For example, "1, 2, 3," is a range, as is "5, 8, 11, 14." To create a range in Scala, use the predefined methods `to` and `by`. `1 to 3` generates `Range(1, 2, 3)` and `5 to 14 by 3` generates `Range(5, 8, 11, 14)`. - * - * If you want to create a range that is exclusive of its upper limit, then use the convenience method `until` instead of `to`: `1 until 3` generates `Range(1, 2)`. - * - * Ranges are represented in constant space, because they can be defined by just three numbers: their start, their end, and the stepping value. Because of this representation, most operations on ranges are extremely fast. - */ } } diff --git a/src/main/scala/stdlib/RepeatedParameters.scala b/src/main/scala/stdlib/RepeatedParameters.scala index 4eef66bf..bce99fa9 100644 --- a/src/main/scala/stdlib/RepeatedParameters.scala +++ b/src/main/scala/stdlib/RepeatedParameters.scala @@ -30,7 +30,7 @@ object RepeatedParameters extends FlatSpec with Matchers with org.scalaexercises repeatedParameterMethod(3, "egg", List("a delicious sandwich", "protein", "high cholesterol")) should be(res0) } - /** A repeated parameter can accept a collection,and if you want it expanded, add `:_*` + /** A repeated parameter can accept a collection - if you want it expanded, add `:_*` */ def expandCollectionRepeatedParameters(res0: String) { repeatedParameterMethod(3, "egg", List("a delicious sandwich", "protein", "high cholesterol"): _*) should be(res0) diff --git a/src/main/scala/stdlib/SequencesandArrays.scala b/src/main/scala/stdlib/SequencesandArrays.scala index 2ddb725e..9bdeccf4 100644 --- a/src/main/scala/stdlib/SequencesandArrays.scala +++ b/src/main/scala/stdlib/SequencesandArrays.scala @@ -29,14 +29,14 @@ object SequencesandArrays extends FlatSpec with Matchers with org.scalaexercises l should equal(res0) } - /** You can create a sequence from a for comprehension: + /** You can create a sequence from a `for` loop: */ def fromForComprehensionSequencesandArrays(res0: List[Int]) { val s = for (v ← 1 to 4) yield v s.toList should be(res0) } - /** You can create a sequence from a for comprehension with a condition: + /** You can create a sequence from a `for` loop with a filter: */ def withConditionSequencesandArrays(res0: List[Int]) { val s = for (v ← 1 to 10 if v % 3 == 0) yield v diff --git a/src/main/scala/stdlib/Sets.scala b/src/main/scala/stdlib/Sets.scala index f4444ebc..ba3b3f54 100644 --- a/src/main/scala/stdlib/Sets.scala +++ b/src/main/scala/stdlib/Sets.scala @@ -9,10 +9,10 @@ object Sets extends FlatSpec with Matchers with org.scalaexercises.definitions.S /** `Set`s are `Iterable`s that contain no duplicate elements. The operations on sets are summarized in the following table for general sets and in the table after that for mutable sets. They fall into the following categories: * - * * **Tests** `contains`, `apply`, `subsetOf`. The `contains` method asks whether a set contains a given element. The `apply` method for a set is the same as `contains`, so `set(elem)` is the same as `set contains elem`. That means sets can also be used as test functions that return true for the elements they contain. - * * **Additions** `+` and `++`, which add one or more elements to a set, yielding a new set. - * * **Removals** `-`, `--`, which remove one or more elements from a set, yielding a new set. - * * **Set operations** for union, intersection, and set difference. Each of these operations exists in two forms: alphabetic and symbolic. The alphabetic versions are `intersect`, `union`, and `diff`, whereas the symbolic versions are `&`, `|`, and `&~`. In fact, the `++` that Set inherits from `Traversable` can be seen as yet another alias of `union` or `|`, except that `++` takes a `Traversable` argument whereas `union` and `|` take sets. + * - **Tests**: `contains`, `apply`, `subsetOf`. The `contains` method asks whether a set contains a given element. The `apply` method for a set is the same as `contains`, so `set(elem)` is the same as `set contains elem`. That means sets can also be used as test functions that return true for the elements they contain. + * - **Additions**: `+` and `++`, which add one or more elements to a set, yielding a new set. + * - **Removals**: `-`, `--`, which remove one or more elements from a set, yielding a new set. + * - **Set operations**: union, intersection, and set difference. Each of these operations exists in two forms: alphabetic and symbolic. The alphabetic versions are `intersect`, `union`, and `diff`, whereas the symbolic versions are `&`, `|`, and `&~`. In fact, the `++` that Set inherits from `Traversable` can be seen as yet another alias of `union` or `|`, except that `++` takes a `Traversable` argument whereas `union` and `|` take sets. * * Sets can be created easily: */ diff --git a/src/main/scala/stdlib/Traits.scala b/src/main/scala/stdlib/Traits.scala index 4d492b75..82deed77 100644 --- a/src/main/scala/stdlib/Traits.scala +++ b/src/main/scala/stdlib/Traits.scala @@ -6,7 +6,7 @@ import org.scalatest._ */ object Traits extends FlatSpec with Matchers with org.scalaexercises.definitions.Section { - /** Similar to *interfaces* in Java, traits are used to define object types by specifying the signature of the supported methods. Unlike Java, Scala allows traits to be partially implemented; i.e. it is possible to define default implementations for some methods. In contrast to classes, traits may not have constructor parameters. + /** Similar to interfaces in Java, traits are used to define object types by specifying the signature of the supported methods. Unlike Java, Scala allows traits to be partially implemented; i.e. it is possible to define default implementations for some methods. In contrast to classes, traits may not have constructor parameters. * * Here is an example: * diff --git a/src/main/scala/stdlib/Traversables.scala b/src/main/scala/stdlib/Traversables.scala index 4d1d33fb..b044eafc 100644 --- a/src/main/scala/stdlib/Traversables.scala +++ b/src/main/scala/stdlib/Traversables.scala @@ -9,17 +9,17 @@ import Stream.cons */ object Traversables extends FlatSpec with Matchers with org.scalaexercises.definitions.Section { - /** At the top of the collection hierarchy is trait *Traversable*. Its only abstract operation is `foreach`: + /** At the top of the collection hierarchy is the trait `Traversable`. Its only abstract operation is `foreach`: * * {{{ * def foreach[U](f: Elem => U) * }}} * - * Collection classes that implement *Traversable* just need to define this method; all other methods can be inherited from *Traversable*. + * Collection classes that implement `Traversable` just need to define this method; all other methods can be inherited from `Traversable`. * * The `foreach` method is meant to traverse all elements of the collection, and apply the given operation, `f`, to each element. The type of the operation is `Elem => U`, where `Elem` is the type of the collection's elements and `U` is an arbitrary result type. The invocation of `f` is done for its side effect only; in fact any function result of `f` is discarded by `foreach`. * - * Traversables are the superclass of *Lists*, *Arrays*, *Maps*, *Sets*, *Streams*, and more. The methods involved can be applied to each other in a different type. `++` appends two Traversables together. + * Traversables are the superclass of `List`, `Array`, `Map`, `Set`, `Stream` and more. The methods involved can be applied to each other in a different type. `++` appends two `Traversable`s together. */ def topOfCollectionTraversables(res0: Int, res1: Int) { val set = Set(1, 9, 10, 22) @@ -31,7 +31,7 @@ object Traversables extends FlatSpec with Matchers with org.scalaexercises.defin result2.size should be(res1) } - /** `map` will apply the given function on all elements of a *Traversable* and return a new collection of the result. + /** `map` will apply the given function on all elements of a `Traversable` and return a new collection of the result: */ def mapFunctionTraversables(res0: Option[Int]) { val set = Set(1, 3, 4, 6) @@ -39,14 +39,14 @@ object Traversables extends FlatSpec with Matchers with org.scalaexercises.defin result.lastOption should be(res0) } - /** `flatten` will smash all child *Traversables* within a *Traversable* + /** `flatten` will "pack" all child `Traversables` into a single `Traversable`: */ def flattenFunctionTraversables(res0: List[Int]) { val list = List(List(1), List(2, 3, 4), List(5, 6, 7), List(8, 9, 10)) list.flatten should be(res0) } - /** `flatMap` will not only apply the given function on all elements of a *Traversable*, but all elements within the elements and `flatten` the results: + /** `flatMap` will not only apply the given function on all elements of a `Traversable`, but all elements within the elements and `flatten` the results: */ def flatMapFunctionTraversables(res0: List[Int]) { val list = List(List(1), List(2, 3, 4), List(5, 6, 7), List(8, 9, 10)) @@ -54,7 +54,7 @@ object Traversables extends FlatSpec with Matchers with org.scalaexercises.defin result should be(res0) } - /** `flatMap` of `Options` will filter out all `None`s and Keep the `Some`s + /** `flatMap` of `Options` will filter out all `None`s but keep the `Some`s: */ def flatMapOfOptionsTraversables(res0: List[Int]) { val list = List(1, 2, 3, 4, 5) @@ -62,7 +62,7 @@ object Traversables extends FlatSpec with Matchers with org.scalaexercises.defin result should be(res0) } - /** `collect` will apply a partial function to all elements of a *Traversable* and will return a different collection. In this exercise, a case fragment is a partial function: + /** `collect` will apply a partial function to all elements of a `Traversable` and return a different collection. In this exercise, a case fragment is a partial function: */ def collectFunctionTraversables(res0: List[Int]) { val list = List(4, 6, 7, 8, 9, 13, 14) @@ -72,7 +72,7 @@ object Traversables extends FlatSpec with Matchers with org.scalaexercises.defin result should be(res0) } - /** `collect` will apply a partial function to all elements of a *Traversable* and will return a different collection. In this exercise, two case fragments are chained to create a more robust result: + /** Two case fragments can be chained to create a more robust result: */ def collectFunctionIITraversables(res0: List[Int]) { val list = List(4, 6, 7, 8, 9, 13, 14) @@ -86,8 +86,7 @@ object Traversables extends FlatSpec with Matchers with org.scalaexercises.defin result should be(res0) } - /** `foreach` will apply a function to all elements of a *Traversable*, but unlike - * the `map` function, it will not return anything since the return type is `Unit`, which is like a `void` return type in *Java*, *C++* + /** `foreach` will apply a function to all elements of a `Traversable`, but unlike the `map` function, it will not return anything since the return type is `Unit` - an equivalent to a `void` return type in Java/C++: */ def foreachFunctionTraversables(res0: List[Int]) { val list = List(4, 6, 7, 8, 9, 13, 14) @@ -95,7 +94,7 @@ object Traversables extends FlatSpec with Matchers with org.scalaexercises.defin list should be(res0) } - /** `toArray` will convert any *Traversable* to an `Array`, which is a special wrapper around a primitive *Java* array. + /** `toArray` will convert any `Traversable` to an `Array`, which is a special wrapper around a primitive Java array: */ def toArrayFunctionTraversables(res0: Boolean) { val set = Set(4, 6, 7, 8, 9, 13, 14) @@ -103,7 +102,7 @@ object Traversables extends FlatSpec with Matchers with org.scalaexercises.defin result.isInstanceOf[Array[Int]] should be(res0) } - /** `toList` will convert any *Traversable* to a `List`. + /** `toList` will convert any `Traversable` to a `List`. */ def toListFunctionTraversables(res0: Boolean) { val set = Set(4, 6, 7, 8, 9, 13, 14) @@ -112,7 +111,7 @@ object Traversables extends FlatSpec with Matchers with org.scalaexercises.defin result.isInstanceOf[List[_]] should be(res0) } - /** `toList`, as well as other conversion methods like `toSet`, `toArray`, will not convert if the collection type is the same. + /** `toList`, as well as other conversion methods such as `toSet` and `toArray`, will not convert if the collection type is the same: */ def toListFunctionIITraversables(res0: Boolean) { val list = List(5, 6, 7, 8, 9) @@ -120,7 +119,7 @@ object Traversables extends FlatSpec with Matchers with org.scalaexercises.defin result eq list should be(res0) } - /** `toIterable` will convert any *Traversable* to an *Iterable*. This is a base *trait* for all Scala collections that define an iterator method to step through one-by-one the collection's elements. + /** `toIterable` will convert any `Traversable` to an `Iterable`. This is a base `trait` for all Scala collections that define an iterator method to iterate through the collection's elements: */ def toIterableFunctionTraversables(res0: Boolean) { val set = Set(4, 6, 7, 8, 9, 13, 14) @@ -128,7 +127,7 @@ object Traversables extends FlatSpec with Matchers with org.scalaexercises.defin result.isInstanceOf[Iterable[_]] should be(res0) } - /** `toSeq` will convert any *Traversable* to a *Seq* which is an ordered `Iterable` and is the superclass to *List*, *Queues*, and *Vectors*. *Sequences* provide a method apply for indexing. Indices range from 0 up the length of a sequence. + /** `toSeq` will convert any `Traversable` to a `Seq` which is an ordered `Iterable` and the superclass to `List`, `Queue`, and `Vector`. `Sequences` provide a method apply for indexing. Indices range from 0 up to the length of a sequence: */ def toSeqFunctionTraversables(res0: Boolean) { val set = Set(4, 6, 7, 8, 9, 13, 14) @@ -136,7 +135,7 @@ object Traversables extends FlatSpec with Matchers with org.scalaexercises.defin result.isInstanceOf[Seq[_]] should be(res0) } - /** `toIndexedSeq` will convert any *Traversable* to an *IndexedSeq* which is an indexed sequence used in *Vectors* and *Strings* + /** `toIndexedSeq` will convert any `Traversable` to an `IndexedSeq` which is an indexed sequence used in `Vectors` and `Strings`: */ def toIndexedSeqFunctionTraversables(res0: Boolean) { val set = Set(4, 6, 7, 8, 9, 13, 14) @@ -144,7 +143,7 @@ object Traversables extends FlatSpec with Matchers with org.scalaexercises.defin result.isInstanceOf[IndexedSeq[_]] should be(res0) } - /** `toStream` will convert any *Traversable* to a `Stream` which is a lazy list where elements are evaluated as they are needed. + /** `toStream` will convert any `Traversable* to a `Stream` which is a lazy list where elements are evaluated as they are needed: */ def toStreamFunctionTraversables(res0: Boolean, res1: Stream[Int]) { val list = List(4, 6, 7, 8, 9, 13, 14) @@ -153,7 +152,7 @@ object Traversables extends FlatSpec with Matchers with org.scalaexercises.defin (result take 3) should be(res1) } - /** `toSet` will convert any *Traversable* to a *Set* which is a collection of unordered, unique values. + /** `toSet` will convert any `Traversable` to a `Set` which is a collection of unordered, unique values: */ def toSetFunctionTraversables(res0: Boolean) { val list = List(4, 6, 7, 8, 9, 13, 14) @@ -161,7 +160,7 @@ object Traversables extends FlatSpec with Matchers with org.scalaexercises.defin result.isInstanceOf[Set[_]] should be(res0) } - /** `toMap` will convert any *Traversable* to a *Map*. How it's used depends on the original collection; if it's a *List* or *Seq*, it should be of parameterized type *Tuple2*. + /** `toMap` will convert any `Traversable` to a `Map`. How it's used depends on the original collection; if it's a `List` or `Seq`, it should be of parameterized type `Tuple2`: */ def toMapFunctionTraversables(res0: Boolean) { val list = List("Phoenix" → "Arizona", "Austin" → "Texas") @@ -169,7 +168,7 @@ object Traversables extends FlatSpec with Matchers with org.scalaexercises.defin result.isInstanceOf[Map[_, _]] should be(res0) } - /** `toMap` will convert a *Set* to a *Map*, it should be of parameterized type *Tuple2*. + /** `toMap` will also convert a `Set` to a `Map`; it should be of parameterized type `Tuple2`: */ def toMapFunctionIITraversables(res0: Boolean) { val set = Set("Phoenix" → "Arizona", "Austin" → "Texas") @@ -177,7 +176,7 @@ object Traversables extends FlatSpec with Matchers with org.scalaexercises.defin result.isInstanceOf[Map[_, _]] should be(res0) } - /** `isEmpty` is pretty self evident + /** `isEmpty` is pretty self-evident: */ def isEmptyFunctionTraversables(res0: Boolean, res1: Boolean) { val map = Map("Phoenix" → "Arizona", "Austin" → "Texas") @@ -187,7 +186,7 @@ object Traversables extends FlatSpec with Matchers with org.scalaexercises.defin set.isEmpty should be(res1) } - /** `nonEmpty` is pretty self evident too + /** `nonEmpty` is pretty self-evident too: */ def nonEmptyFunctionTraversables(res0: Boolean, res1: Boolean) { val map = Map("Phoenix" → "Arizona", "Austin" → "Texas") @@ -197,14 +196,14 @@ object Traversables extends FlatSpec with Matchers with org.scalaexercises.defin set.nonEmpty should be(res1) } - /** `size` provides the size of the traversable + /** `size` provides the size of the traversable: */ def sizeFunctionTraversables(res0: Int) { val map = Map("Phoenix" → "Arizona", "Austin" → "Texas") map.size should be(res0) } - /** `hasDefiniteSize` will return `true` if there is traversable that has a finite end, otherwise `false`. + /** `hasDefiniteSize` will return `true` if the traversable has a finite end, otherwise `false`: */ def hasDefiniteSizeFunctionTraversables(res0: Boolean, res1: Boolean) { val map = Map("Phoenix" → "Arizona", "Austin" → "Texas") @@ -214,14 +213,14 @@ object Traversables extends FlatSpec with Matchers with org.scalaexercises.defin stream.hasDefiniteSize should be(res1) } - /** `head` will return the first element of an ordered collection, or some random element if order is not defined like in a *Set* or *Map* + /** `head` will return the first element of an ordered collection, or some random element if order is not defined like in a `Set` or `Map`: */ def headFunctionTraversables(res0: Int) { val list = List(10, 19, 45, 1, 22) list.head should be(res0) } - /** `headOption` will return the first element as an *Option* of an ordered collection, or some random element if order is not defined. If a first element is not available, then *None* is returned. + /** `headOption` will return the first element as an `Option` of an ordered collection, or some random element if order is not defined. If a first element is not available, then `None` is returned: */ def headOptionFunctionTraversables(res0: Option[Int], res1: Option[Int]) { val list = List(10, 19, 45, 1, 22) @@ -231,14 +230,14 @@ object Traversables extends FlatSpec with Matchers with org.scalaexercises.defin list2.headOption should be(res1) } - /** `last` will return the last element of an ordered collection, or some random element if order is not defined like in a *Set* or *Map*. + /** `last` will return the last element of an ordered collection, or some random element if order is not defined: */ def lastFunctionTraversables(res0: Int) { val list = List(10, 19, 45, 1, 22) list.last should be(res0) } - /** `lastOption` will return the last element as an *Option* of an ordered collection, or some random element if order is not defined. If a last element is not available, then `None` is returned: + /** `lastOption` will return the last element as an `Option` of an ordered collection, or some random element if order is not defined. If a last element is not available, then `None` is returned: */ def lastOptionFunctionTraversables(res0: Option[Int], res1: Option[Int]) { val list = List(10, 19, 45, 1, 22) @@ -248,7 +247,7 @@ object Traversables extends FlatSpec with Matchers with org.scalaexercises.defin list2.lastOption should be(res1) } - /** `find` will locate the first item that matches a predicate p as *Some* or *None* if an element is not found: + /** `find` will locate the first item that matches the predicate `p` as `Some`, or `None` if an element is not found: */ def findFunctionTraversables(res0: Option[Int], res1: Option[Int]) { val list = List(10, 19, 45, 1, 22) @@ -258,35 +257,35 @@ object Traversables extends FlatSpec with Matchers with org.scalaexercises.defin list2.find(_ % 2 != 0) should be(res1) } - /** `tail` will return the rest of the collection without the head + /** `tail` will return the rest of the collection without the head: */ def tailFunctionTraversables(res0: List[Int]) { val list = List(10, 19, 45, 1, 22) list.tail should be(res0) } - /** `init` will return the rest of the collection without the last + /** `init` will return the rest of the collection without the last: */ def initFunctionTraversables(res0: List[Int]) { val list = List(10, 19, 45, 1, 22) list.init should be(res0) } - /** Given a `from` index, and a `to` index, slice will return the part of the collection including `from`, and excluding `to`: + /** Given a `from` index, and a `to` index, `slice` will return the part of the collection including `from`, and excluding `to`: */ def sliceFunctionTraversables(res0: List[Int]) { val list = List(10, 19, 45, 1, 22) list.slice(1, 3) should be(res0) } - /** `take` will return the first number of elements given. + /** `take` will return the first number of elements given: */ def takeFunctionTraversables(res0: List[Int]) { val list = List(10, 19, 45, 1, 22) list.take(3) should be(res0) } - /** `take` is used often with *Streams*, and *Streams* after all are *Traversable*. + /** `take` is used often with `Streams`, since they are also `Traversable`: */ def takeFunctionIITraversables(res0: List[Int]) { def streamer(v: Int): Stream[Int] = cons(v, streamer(v + 1)) @@ -294,7 +293,7 @@ object Traversables extends FlatSpec with Matchers with org.scalaexercises.defin (a take 3 toList) should be(res0) } - /** `drop` will take the rest of the *Traversable* except the number of elements given + /** `drop` will take the rest of the `Traversable` except the number of elements given: */ def dropFunctionTraversables(res0: List[Int]) { def streamer(v: Int): Stream[Int] = cons(v, streamer(v + 1)) @@ -302,35 +301,35 @@ object Traversables extends FlatSpec with Matchers with org.scalaexercises.defin ((a drop 6) take 3).toList should be(res0) } - /** `takeWhile` will continually accumulate elements until a predicate is no longer satisfied. + /** `takeWhile` will continually accumulate elements until a predicate is no longer satisfied: */ def takeWhileFunctionTraversables(res0: List[Int]) { val list = List(87, 44, 5, 4, 200, 10, 39, 100) list.takeWhile(_ < 100) should be(res0) } - /** `dropWhile` will continually drop elements until a predicate is no longer satisfied. + /** `dropWhile` will continually drop elements until a predicate is no longer satisfied: */ def dropWhileFunctionTraversables(res0: List[Int]) { val list = List(87, 44, 5, 4, 200, 10, 39, 100) list.dropWhile(_ < 100) should be(res0) } - /** `filter` will take out all elements that don't satisfy a predicate. An *Array* is also *Traversable*. + /** `filter` will take out all elements that don't satisfy a predicate. (An `Array` is also `Traversable`.) */ def filterFunctionTraversables(res0: Array[Int]) { val array = Array(87, 44, 5, 4, 200, 10, 39, 100) array.filter(_ < 100) should be(res0) } - /** `filterNot` will take out all elements that satisfy a predicate. An *Array* is also *Traversable*. + /** `filterNot` will take out all elements that satisfy a predicate: */ def filterNotFunctionTraversables(res0: Array[Int]) { val array = Array(87, 44, 5, 4, 200, 10, 39, 100) array.filterNot(_ < 100) should be(res0) } - /** `splitAt` will split a *Traversable* at a position, returning a 2 product *Tuple*. `splitAt` is also defined as `(xs take n, xs drop n)` + /** `splitAt` will split a `Traversable` at a position, returning a 2 product `Tuple`. `splitAt` is also defined as `(xs take n, xs drop n)` */ def splitAtFunctionTraversables(res0: Array[Int], res1: Array[Int]) { val array = Array(87, 44, 5, 4, 200, 10, 39, 100) @@ -339,7 +338,7 @@ object Traversables extends FlatSpec with Matchers with org.scalaexercises.defin result._2 should be(res1) } - /** `span` will split a *Traversable* according to predicate, returning a 2 product *Tuple*. `span` is also defined as `(xs takeWhile p, xs dropWhile p)` + /** `span` will split a `Traversable` according to a predicate, returning a 2 product `Tuple`. `span` is also defined as `(xs takeWhile p, xs dropWhile p)` */ def spanFunctionTraversables(res0: Array[Int], res1: Array[Int]) { val array = Array(87, 44, 5, 4, 200, 10, 39, 100) @@ -348,7 +347,7 @@ object Traversables extends FlatSpec with Matchers with org.scalaexercises.defin result._2 should be(res1) } - /** `partition` will split a *Traversable* according to predicate, returning a 2 product *Tuple*. The left hand side contains the elements satisfied by the predicate whereas the right hand side contains those that `don't`. *Array* is *Traversable*, partition is also defined as `(xs filter p, xs filterNot p)` + /** `partition` will split a `Traversable` according to a predicate, returning a 2 product `Tuple`. The left-hand side contains the elements satisfied by the predicate whereas the right hand side contains the rest of the elements. `partition` is also defined as `(xs filter p, xs filterNot p)` */ def partitionFunctionTraversables(res0: Array[Int], res1: Array[Int]) { val array = Array(87, 44, 5, 4, 200, 10, 39, 100) @@ -357,7 +356,7 @@ object Traversables extends FlatSpec with Matchers with org.scalaexercises.defin result._2 should be(res1) } - /** `groupBy` will categorize a *Traversable* according to a given function, and return a map with the results. This exercise uses *Partial Function* chaining. + /** `groupBy` will categorize a `Traversable` according to a given function and return a map with the results. This exercise uses partial function chaining: */ def groupByFunctionTraversables(res0: Int, res1: Int) { val array = Array(87, 44, 5, 4, 200, 10, 39, 100) @@ -394,7 +393,7 @@ object Traversables extends FlatSpec with Matchers with org.scalaexercises.defin (result("Large Number") size) should be(res1) } - /** `forall` will determine if a predicate is valid for all members of a *Traversable*. + /** `forall` will determine if a predicate is valid for all members of a `Traversable`: */ def forallFunctionTraversables(res0: Boolean) { val list = List(87, 44, 5, 4, 200, 10, 39, 100) @@ -402,7 +401,7 @@ object Traversables extends FlatSpec with Matchers with org.scalaexercises.defin result should be(res0) } - /** `exists` will determine if a predicate is valid for some members of a *Traversable*. + /** `exists` will determine if a predicate is valid for some members of a `Traversable`: */ def existsFunctionTraversables(res0: Boolean) { val list = List(87, 44, 5, 4, 200, 10, 39, 100) @@ -410,7 +409,7 @@ object Traversables extends FlatSpec with Matchers with org.scalaexercises.defin result should be(res0) } - /** `count` will count the number of elements that satisfy a predicate in a *Traversable*. + /** `count` will count the number of elements that satisfy a predicate in a `Traversable`: */ def countFunctionTraversables(res0: Int) { val list = List(87, 44, 5, 4, 200, 10, 39, 100) @@ -418,7 +417,7 @@ object Traversables extends FlatSpec with Matchers with org.scalaexercises.defin result should be(res0) } - /** `/:` or `foldLeft` will combine an operation starting with a seed and combining from the left. *Fold Left* is defined as (seed /: list), where seed is the initial value. Once the fold is established, you provide a function that takes two arguments. The first argument is the running total of the operation, and the second element is the next element of the list. + /** `/:` or `foldLeft` will combine an operation starting with a seed and combining from the left. `foldLeft` is defined as (seed /: list), where seed is the initial value. Once the fold is established, you provide a function that takes two arguments. The first argument is the running total of the operation, and the second element is the next element of the list. * * Given a `Traversable (x1, x2, x3, x4)`, an initial value of `init`, an operation `op`, `foldLeft` is defined as: `(((init op x1) op x2) op x3) op x4)` */ @@ -468,7 +467,7 @@ object Traversables extends FlatSpec with Matchers with org.scalaexercises.defin (5 - (4 - (3 - (2 - (1 - 0))))) should be(res4) } - /** `reduceLeft` is the similar to *foldLeft*, except that the seed is the head value + /** `reduceLeft` is similar to `foldLeft`, except that the seed is the head value: */ def reduceLeftFunctionTraversables(res0: Int, res1: String) { val intList = List(5, 4, 3, 2, 1) @@ -482,7 +481,7 @@ object Traversables extends FlatSpec with Matchers with org.scalaexercises.defin } should be(res1) } - /** `reduceRight` is the similar to *foldRight*, except that the seed is the last value + /** `reduceRight` is similar to `foldRight`, except that the seed is the last value: */ def reduceRightFunctionTraversables(res0: Int, res1: String) { val intList = List(5, 4, 3, 2, 1) @@ -496,7 +495,7 @@ object Traversables extends FlatSpec with Matchers with org.scalaexercises.defin } should be(res1) } - /** There are some methods that take much of the folding work out by providing basic functionality. `sum` will add all the elements, product will multiply, `min` would determine the smallest element, and `max` the largest. + /** There are some methods that take much of the folding work out by providing basic functionality. `sum` will add all the elements, `product` will multiply, `min` would determine the smallest element, and `max` the largest: */ def sumFunctionTraversables(res0: Int, res1: Int, res2: Int, res3: Int) { val intList = List(5, 4, 3, 2, 1) @@ -506,7 +505,7 @@ object Traversables extends FlatSpec with Matchers with org.scalaexercises.defin intList.min should be(res3) } - /** You would choose *foldLeft/reduceLeft* or *foldRight/reduceRight* based on your mathematical goal. One other reason for deciding is performance. `foldLeft` is more performant since it uses tail recursion and is optimized. This exercise will either work or you will receive a *StackOverflowError*. + /** You would choose `foldLeft`/`reduceLeft` or `foldRight`/`reduceRight` based on your mathematical goal. One other reason for deciding is performance - `foldLeft` generally has better performance since it uses tail recursion. This exercise will either work fine or you will receive a `StackOverflowError`: */ def performantTraversables(res0: Boolean) { val MAX_SIZE = 1000000 @@ -524,7 +523,7 @@ object Traversables extends FlatSpec with Matchers with org.scalaexercises.defin (totalReduceRightTime > totalReduceLeftTime) should be(res0) } - /** `transpose` will take a traversable of traversables and group them by their position in it's own traversable. E.g.: + /** `transpose` will take a traversable of traversables and group them by their position in it's own traversable, e.g.: * `((x1, x2),(y1, y2)).transpose = (x1, y1), (x2, y2)` or * `((x1, x2, x3),(y1, y2, y3),(z1, z2, z3)).transpose = ((x1, y1, z1), (x2, y2, z2), (x3, y3, z3))` */ @@ -536,14 +535,14 @@ object Traversables extends FlatSpec with Matchers with org.scalaexercises.defin list2.transpose should be(List(res3)) } - /** `mkString` will format a *Traversable* using a given string as the delimiter. + /** `mkString` will format a `Traversable` using a given string as the delimiter: */ def mkStringFunctionTraversables(res0: String) { val list = List(1, 2, 3, 4, 5) list.mkString(",") should be(res0) } - /** `mkString` will also take a beginning and ending string to surround the list. + /** `mkString` will also take a beginning and ending string to surround the list.: */ def mkStringFunctionIITraversables(res0: String) { val list = List(1, 2, 3, 4, 5) diff --git a/src/main/scala/stdlib/Tuples.scala b/src/main/scala/stdlib/Tuples.scala index 650aa502..75ba6875 100644 --- a/src/main/scala/stdlib/Tuples.scala +++ b/src/main/scala/stdlib/Tuples.scala @@ -9,7 +9,7 @@ import org.scalatest._ */ object Tuples extends FlatSpec with Matchers with org.scalaexercises.definitions.Section { - /** Scala tuple combines a fixed number of items together so that they can be passed around as a whole. They are one indexed. Unlike an array or list, a tuple can hold objects with different types but they are also immutable. Here is an example of a tuple holding an integer, a string, and the console: + /** Scala tuple combines a fixed number of items together so that they can be passed around as a whole. They are one-indexed. Unlike an array or list, a tuple can hold objects with different types but they are also immutable. Here is an example of a tuple holding an integer, a string, and the console: * * {{{ * val t = (1, "hello", Console) @@ -51,7 +51,7 @@ object Tuples extends FlatSpec with Matchers with org.scalaexercises.definitions gpa should be(res2) } - /** Tuples items can be swapped on a Tuple 2 + /** The `swap` method can be used to swap the elements of a Tuple2: */ def swappedTuples(res0: Int, res1: String) { val tuple = ("apple", 3).swap diff --git a/src/main/scala/stdlib/TypeSignatures.scala b/src/main/scala/stdlib/TypeSignatures.scala index e0be08f0..f1673cfc 100644 --- a/src/main/scala/stdlib/TypeSignatures.scala +++ b/src/main/scala/stdlib/TypeSignatures.scala @@ -6,9 +6,9 @@ import org.scalatest._ */ object TypeSignatures extends FlatSpec with Matchers with org.scalaexercises.definitions.Section { - /** A method's *type signature* comprises its name, the number, order, and types of its parameters, if any, and its result type. The type signature of a class, trait, or singleton object comprises its name, the type signatures of all of its members and constructors, and its declared inheritance and mixin relations. + /** A method's type signature comprises its name; the number, order, and types of its parameters, if any; and its result type. The type signature of a class, trait, or singleton object comprises its name, the type signatures of all of its members and constructors, and its declared inheritance and mixin relations. * - * In Java you declare a generic type within a `<>`, in Scala it is `[]` + * In Java you declare a generic type within a `<>`, in Scala `[]` is used: * * {{{ * val z: List[String] = "Do" :: "Re" :: "Mi" :: "Fa" :: "So" :: "La" :: "Te" :: "Do" :: Nil @@ -39,14 +39,14 @@ object TypeSignatures extends FlatSpec with Matchers with org.scalaexercises.def (intRand.draw < Int.MaxValue) should be(res0) } - /** Class meta-information can be retrieved by class name by using `classOf[className]` + /** Class meta-information can be retrieved by class name by using `classOf[className]`: */ def retrieveMetaInformationTypeSignatures(res0: String, res1: String) { classOf[String].getCanonicalName should be(res0) classOf[String].getSimpleName should be(res1) } - /** Class meta-information can be derived from an object reference using `getClass()` + /** Class meta-information can be derived from an object reference using `getClass()`: */ def deriveMetaInformationTypeSignatures(res0: Boolean, res1: String, res2: String) { val zoom = "zoom" diff --git a/src/main/scala/stdlib/TypeVariance.scala b/src/main/scala/stdlib/TypeVariance.scala index bb73d088..6f1b4dab 100644 --- a/src/main/scala/stdlib/TypeVariance.scala +++ b/src/main/scala/stdlib/TypeVariance.scala @@ -7,13 +7,13 @@ import org.scalatest._ */ object TypeVariance extends FlatSpec with Matchers with org.scalaexercises.definitions.Section { - /** A traditional objection to static typing is that it has much syntactic overhead. Scala alleviates this by providing *type inference*. + /** A traditional objection to static typing is that it has much syntactic overhead. Scala alleviates this by providing type inference. * - * The classic method for type inference in functional programming languages is *Hindley-Milner*, and it was first employed in ML. + * The classic method for type inference in functional programming languages is Hindley-Milner, and it was first employed in ML. * * Scala's type inference system works a little differently, but it's similar in spirit: infer constraints, and attempt to unify a type. * - * Using type inference the type that you instantiate it will be the val or var reference type: + * Using type inference the type that you instantiate will be the val or var reference type: */ def syntacticOverheadTypeVariance(res0: String) { class MyContainer[A](val a: A)(implicit manifest: scala.reflect.Manifest[A]) { @@ -35,7 +35,7 @@ object TypeVariance extends FlatSpec with Matchers with org.scalaexercises.defin fruitBasket.contents should be(res0) } - /** You can coerce your object to a type. + /** You can coerce your object to a type: */ def coerceObjectTypeVariance(res0: String) { class MyContainer[A](val a: A)(implicit manifest: scala.reflect.Manifest[A]) { @@ -48,17 +48,17 @@ object TypeVariance extends FlatSpec with Matchers with org.scalaexercises.defin /** Scala's type system has to account for class hierarchies together with polymorphism. Class hierarchies allow the expression of subtype relationships. A central question that comes up when mixing OO with polymorphism is: if `T'` is a subclass of `T`, is `Container[T']` considered a subclass of `Container[T]`? Variance annotations allow you to express the following relationships between class hierarchies & polymorphic types: * - * ####Covariant: - * - `C[T']` is a subclass of `C[T]` - * - Scala notation: `[+T]` + * ==Covariant== + * - `C[T']` is a subclass of `C[T]` + * - Scala notation: `[+T]` * - * ####Contravariant: - * - `C[T]` is a subclass of `C[T']` - * - Scala notation: `[-T]` + * ==Contravariant== + * - `C[T]` is a subclass of `C[T']` + * - Scala notation: `[-T]` * - * ####Invariant: - * - `C[T]` and `C[T']` are not related - * - Scala notation: `[T]` + * ==Invariant== + * - `C[T]` and `C[T']` are not related + * - Scala notation: `[T]` * * * That one probably blew your mind. Now if you assign a type to the instantiation that is different to the variable type, you'll have problems. You may want to take time after this koan to compare and contrast with the previous one. @@ -98,7 +98,7 @@ object TypeVariance extends FlatSpec with Matchers with org.scalaexercises.defin //val tangeloBasket: MyContainer[Tangelo] = new MyContainer[Orange](new Orange()) //Bad! } - /** Declaring - indicates contravariance variance. Using - you can apply any container with a certain type to a container with a superclass of that type. This is reverse to covariant. In our example, we can set a citrus basket to an orange or tangelo basket. Since an orange or tangelo basket are a citrus basket. Contravariance is the opposite of covariance: + /** Declaring `-` indicates contravariance variance. Using `-` you can apply any container with a certain type to a container with a superclass of that type. This is reverse to covariant. In our example, we can set a citrus basket to an orange or tangelo basket. Since an orange or tangelo basket are a citrus basket. Contravariance is the opposite of covariance: */ def contravarianceVarianceTypeVariance(res0: String, res1: String, res2: String, res3: String) { @@ -119,7 +119,7 @@ object TypeVariance extends FlatSpec with Matchers with org.scalaexercises.defin //val wrongCitrusBasket: MyContainer[Citrus] = new MyContainer[Orange](new Orange) //Bad! } - /** Declaring neither `-`/`+`, indicates invariance variance. You cannot use a superclass variable reference (\"contravariant\" position) or a subclass variable reference (\"covariant\" position) of that type. In our example, this means that if you create a citrus basket you can only reference that citrus basket with a citrus variable only. + /** Declaring neither `-` nor `+` indicates invariance. You cannot use a superclass variable reference (contravariant position) or a subclass variable reference (covariant position) of that type. In our example, this means that if you create a citrus basket you can only reference that citrus basket with a citrus variable only. * * Invariance means you need to specify the type exactly: */ diff --git a/src/main/scala/stdlib/UniformAccessPrinciple.scala b/src/main/scala/stdlib/UniformAccessPrinciple.scala index 1c310b5b..45da9a43 100644 --- a/src/main/scala/stdlib/UniformAccessPrinciple.scala +++ b/src/main/scala/stdlib/UniformAccessPrinciple.scala @@ -7,9 +7,9 @@ import org.scalatest._ */ object UniformAccessPrinciple extends FlatSpec with Matchers with org.scalaexercises.definitions.Section { - /** The Scala language implements a programming concept known as the [Uniform Access Principle](http://en.wikipedia.org/wiki/Uniform_access_principle) which was first put forth by Bertrand Meyer, inventor of the Eiffel programming language. + /** The Scala language implements a programming concept known as the [[http://en.wikipedia.org/wiki/Uniform_access_principle Uniform Access Principle]], which was first put forth by Bertrand Meyer, the inventor of the Eiffel programming language. * - * This principle states that variables and parameterless functions should be accessed using the same syntax. Scala supports this principle by allowing parentheses to not be placed at call sites of parameterless functions. As a result, a parameterless function definition can be changed to a val, or vice versa, without affecting client code. + * This principle states that variables and parameterless functions should be accessed using the same syntax. Scala supports this principle by allowing parentheses to not be placed at call sites of parameterless functions. As a result, a parameterless function definition can be changed to a `val`, or vice versa, without affecting client code: * */ def uniformAccessPrincipleUniformAccessPrinciple(res0: Int, res1: Int) {