• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

JohnSnowLabs / spark-nlp / 15252839065

26 May 2025 11:30AM CUT coverage: 52.115% (-0.6%) from 52.715%
15252839065

Pull #14585

github

web-flow
Merge 625e5c10f into 56512b006
Pull Request #14585: SparkNLP 1131 - Introducing Florance-2

0 of 199 new or added lines in 4 files covered. (0.0%)

50 existing lines in 33 files now uncovered.

9931 of 19056 relevant lines covered (52.11%)

0.52 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

11.54
/src/main/scala/com/johnsnowlabs/nlp/annotators/ner/dl/LoadsContrib.scala
1
/*
2
 * Copyright 2017-2022 John Snow Labs
3
 *
4
 * Licensed under the Apache License, Version 2.0 (the "License");
5
 * you may not use this file except in compliance with the License.
6
 * You may obtain a copy of the License at
7
 *
8
 *    http://www.apache.org/licenses/LICENSE-2.0
9
 *
10
 * Unless required by applicable law or agreed to in writing, software
11
 * distributed under the License is distributed on an "AS IS" BASIS,
12
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
 * See the License for the specific language governing permissions and
14
 * limitations under the License.
15
 */
16

17
package com.johnsnowlabs.nlp.annotators.ner.dl
18

19
import com.johnsnowlabs.nlp.util.io.ResourceHelper
20
import org.apache.spark.SparkFiles
21
import org.apache.spark.sql.SparkSession
22
import org.tensorflow.TensorFlow
23

24
import java.io.{BufferedOutputStream, File, FileOutputStream}
25
import java.nio.file.Paths
26

27
object LoadsContrib {
28
  @transient var loadedToCluster = false
1✔
29
  @transient var loadedToTensorflow = false
1✔
30

31
  private lazy val lib1 = "_sparse_feature_cross_op.so"
32
  private lazy val lib2 = "_lstm_ops.so"
33

34
  private def resourcePath(os: String, lib: String) = "ner-dl/" + os + "/" + lib
×
35

36
  /*
37
   * In TensorFlow 1.15.0 we don't need to load any .so files
38
   * We reserve this feature for the future releases
39
   *  */
40
  lazy val contribPaths: Option[(String, String)] = None
41

42
  private def getFileName(path: String) = {
43
    "sparknlp_contrib" + new File(path).getName.take(5)
×
44
  }
45

46
  /** NOT thread safe. Make sure this runs on DRIVER only */
47
  private def copyResourceToTmp(path: String): File = {
48
    val stream = ResourceHelper.getResourceStream(path)
×
49
    val tmpFolder = System.getProperty("java.io.tmpdir")
×
50
    val tmp = Paths.get(tmpFolder, getFileName(path)).toFile
×
51
    val target = new BufferedOutputStream(new FileOutputStream(tmp))
×
52

53
    val buffer = new Array[Byte](1 << 13)
×
54
    var read = stream.read(buffer)
×
55
    while (read > 0) {
×
56
      target.write(buffer, 0, read)
×
57
      read = stream.read(buffer)
×
58
    }
59
    stream.close()
×
60
    target.close()
×
61

62
    tmp
63
  }
64

65
  def loadContribToCluster(spark: SparkSession): Unit = {
66

67
    /** NOT thread-safe. DRIVER only */
68
    if (!LoadsContrib.loadedToCluster && contribPaths.isDefined) {
1✔
69
      LoadsContrib.loadedToCluster = true
×
70
      spark.sparkContext.addFile(copyResourceToTmp(contribPaths.get._1).getPath)
×
71
      spark.sparkContext.addFile(copyResourceToTmp(contribPaths.get._2).getPath)
×
72
    }
73
  }
74

75
  def loadContribToTensorflow(): Unit = {
UNCOV
76
    if (!LoadsContrib.loadedToTensorflow && contribPaths.isDefined) {
×
77
      LoadsContrib.loadedToTensorflow = true
×
78
      val fp1 = SparkFiles.get(getFileName(contribPaths.get._1))
×
79
      val fp2 = SparkFiles.get(getFileName(contribPaths.get._2))
×
80
      if (new File(fp1).exists() && new File(fp2).exists()) {
×
81
        TensorFlow.loadLibrary(fp1)
×
82
        TensorFlow.loadLibrary(fp2)
×
83
      }
84
    }
85
  }
86

87
}
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc