• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

JohnSnowLabs / spark-nlp / 11429325160

20 Oct 2024 08:18PM UTC coverage: 60.052% (-0.2%) from 60.216%
11429325160

Pull #14439

github

web-flow
Merge 1c191569d into 9db33328b
Pull Request #14439: [SPARKNLP-1067] PromptAssembler

0 of 50 new or added lines in 2 files covered. (0.0%)

48 existing lines in 26 files now uncovered.

8985 of 14962 relevant lines covered (60.05%)

0.6 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

65.52
/src/main/scala/com/johnsnowlabs/nlp/SparkNLP.scala
1
/*
2
 * Copyright 2017-2022 John Snow Labs
3
 *
4
 * Licensed under the Apache License, Version 2.0 (the "License");
5
 * you may not use this file except in compliance with the License.
6
 * You may obtain a copy of the License at
7
 *
8
 *    http://www.apache.org/licenses/LICENSE-2.0
9
 *
10
 * Unless required by applicable law or agreed to in writing, software
11
 * distributed under the License is distributed on an "AS IS" BASIS,
12
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
 * See the License for the specific language governing permissions and
14
 * limitations under the License.
15
 */
16

17
package com.johnsnowlabs.nlp
18

19
import org.apache.spark.sql.SparkSession
20

21
object SparkNLP {
22

23
  val currentVersion = "5.5.0"
1✔
24
  val MavenSpark3 = s"com.johnsnowlabs.nlp:spark-nlp_2.12:$currentVersion"
1✔
25
  val MavenGpuSpark3 = s"com.johnsnowlabs.nlp:spark-nlp-gpu_2.12:$currentVersion"
1✔
26
  val MavenSparkSilicon = s"com.johnsnowlabs.nlp:spark-nlp-silicon_2.12:$currentVersion"
1✔
27
  val MavenSparkAarch64 = s"com.johnsnowlabs.nlp:spark-nlp-aarch64_2.12:$currentVersion"
1✔
28

29
  /** Start SparkSession with Spark NLP
30
    *
31
    * @param gpu
32
    *   start Spark NLP with GPU
33
    * @param apple_silicon
34
    *   start Spark NLP for Apple M1 & M2 systems
35
    * @param aarch64
36
    *   start Spark NLP for Linux Aarch64 systems
37
    * @param memory
38
    *   set driver memory for SparkSession
39
    * @param cache_folder
40
    *   The location to download and extract pretrained Models and Pipelines (by default, it will
41
    *   be in the users home directory under `cache_pretrained`.)
42
    * @param log_folder
43
    *   The location to use on a cluster for temporarily files such as unpacking indexes for
44
    *   WordEmbeddings. By default, this locations is the location of `hadoop.tmp.dir` set via
45
    *   Hadoop configuration for Apache Spark. NOTE: `S3` is not supported and it must be local,
46
    *   HDFS, or DBFS.
47
    * @param cluster_tmp_dir
48
    *   The location to save logs from annotators during training (By default, it will be in the
49
    *   users home directory under `annotator_logs`.)
50
    * @param params
51
    *   Custom parameters to set for the Spark configuration (Default: `Map.empty`)
52
    * @return
53
    *   SparkSession
54
    */
55
  def start(
56
      gpu: Boolean = false,
57
      apple_silicon: Boolean = false,
58
      aarch64: Boolean = false,
59
      memory: String = "16G",
60
      cache_folder: String = "",
61
      log_folder: String = "",
62
      cluster_tmp_dir: String = "",
63
      params: Map[String, String] = Map.empty): SparkSession = {
64

65
    if (SparkSession.getActiveSession.isDefined)
×
66
      println("Warning: Spark Session already created, some configs may not be applied.")
1✔
67

68
    val builder = SparkSession
69
      .builder()
70
      .appName("Spark NLP")
71
      .config("spark.driver.memory", memory)
72
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
73
      .config("spark.kryoserializer.buffer.max", "2000M")
74
      .config("spark.driver.maxResultSize", "0")
1✔
75

76
    // get the set cores by users since local[*] will override spark.driver.cores if set
77
    if (params.contains("spark.driver.cores")) {
1✔
78
      builder.master("local[" + params("spark.driver.cores") + "]")
1✔
79
    } else {
80
      builder.master("local[*]")
×
81
    }
82

83
    val sparkNlpJar =
84
      if (apple_silicon) MavenSparkSilicon
×
85
      else if (aarch64) MavenSparkAarch64
×
UNCOV
86
      else if (gpu) MavenGpuSpark3
×
87
      else MavenSpark3
1✔
88

89
    if (!params.contains("spark.jars.packages")) {
1✔
90
      builder.config("spark.jars.packages", sparkNlpJar)
×
91
    }
92

93
    params.foreach {
1✔
94
      case (key, value) if key == "spark.jars.packages" =>
1✔
95
        builder.config(key, sparkNlpJar + "," + value)
1✔
96
      case (key, value) =>
97
        builder.config(key, value)
1✔
98
    }
99

100
    if (cache_folder.nonEmpty)
1✔
101
      builder.config("spark.jsl.settings.pretrained.cache_folder", cache_folder)
×
102

103
    if (log_folder.nonEmpty)
1✔
104
      builder.config("spark.jsl.settings.annotator.log_folder", log_folder)
×
105

106
    if (cluster_tmp_dir.nonEmpty)
1✔
107
      builder.config("spark.jsl.settings.storage.cluster_tmp_dir", cluster_tmp_dir)
×
108

109
    builder.getOrCreate()
1✔
110
  }
111

112
  def version(): String = {
113
    currentVersion
×
114
  }
115

116
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc