• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

askomics / flaskomics / 8571637570

05 Apr 2024 02:48PM UTC coverage: 83.261% (+0.04%) from 83.219%
8571637570

push

github

mboudet
fix tests

5 of 5 new or added lines in 2 files covered. (100.0%)

38 existing lines in 2 files now uncovered.

6322 of 7593 relevant lines covered (83.26%)

0.83 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

63.08
/askomics/libaskomics/SparqlQueryLauncher.py
1
import time
1✔
2
import traceback
1✔
3
import sys
1✔
4

5
from SPARQLWrapper import JSON, SPARQLWrapper
1✔
6

7
from askomics.libaskomics.Params import Params
1✔
8

9
import requests
1✔
10
from urllib3.exceptions import HTTPError
1✔
11

12

13
class SparqlQueryLauncher(Params):
1✔
14
    """SparqlQueryLauncher
15

16
    Attributes
17
    ----------
18
    endpoint : SPARQLWrapper
19
        The triplestore endpoint
20
    query_time : time
21
        Query execution time
22
    triplestore : string
23
        triplesotre (virtuoso, fuseki ...)
24
    """
25

26
    def __init__(self, app, session, get_result_query=False, federated=False, endpoints=None):
1✔
27
        """init
28

29
        Parameters
30
        ----------
31
        app : Flask
32
            Flask app
33
        session :
34
            AskOmics session
35
        """
36
        Params.__init__(self, app, session)
1✔
37

38
        self.query_time = None
1✔
39

40
        # local endpoint (for federated query engine)
41
        self.local_endpoint_f = self.settings.get('triplestore', 'endpoint')
1✔
42
        try:
1✔
43
            self.local_endpoint_f = self.settings.get('federation', 'local_endpoint')
1✔
44
        except Exception:
×
45
            pass
×
46

47
        local = False
1✔
48
        # Use the federated query engine
49
        if federated:
1✔
50
            self.federated = True
×
51
            self.local_query = False
×
52
            self.url_endpoint = self.settings.get('federation', 'endpoint')
×
53
            self.url_updatepoint = self.settings.get('federation', 'endpoint')
×
54
            self.triplestore = self.settings.get('federation', 'query_engine')
×
55
        # use the external endpoint
56
        elif endpoints is not None and endpoints != [self.local_endpoint_f]:
1✔
57
            self.federated = False
×
58
            self.local_query = False
×
59
            self.triplestore = "unknown"
×
60
            self.url_endpoint = endpoints[0]
×
61
            self.url_updatepoint = endpoints[0]
×
62
        # use the local endpoint
63
        else:
64
            self.federated = False
1✔
65
            self.local_query = True
1✔
66
            self.triplestore = self.settings.get('triplestore', 'triplestore')
1✔
67
            self.url_endpoint = self.settings.get('triplestore', 'endpoint')
1✔
68
            self.url_updatepoint = self.settings.get('triplestore', 'updatepoint')
1✔
69
            local = True
1✔
70

71
        self.endpoint = SPARQLWrapper(self.url_endpoint, self.url_updatepoint)
1✔
72

73
        if local:
1✔
74
            try:
1✔
75
                self.endpoint.setCredentials(
1✔
76
                    self.settings.get('triplestore', 'username'),
77
                    self.settings.get('triplestore', 'password')
78
                )
79
                self.endpoint.setHTTPAuth(self.settings.get('triplestore', 'http_auth', fallback="basic"))
1✔
80
            except Exception:
×
81
                pass
×
82

83
    def load_data(self, file_name, graph, host_url):
1✔
84
        """Load data in function of the triplestore
85

86
        Parameters
87
        ----------
88
        file_name : string
89
            File name to load
90
        graph : string
91
            graph name
92
        host_url : string
93
            AskOmics url
94
        """
95
        if self.triplestore == 'fuseki':
×
96
            self.load_data_fuseki(file_name, graph)
×
97
        else:
98
            self.load_data_virtuoso(file_name, graph, host_url)
×
99

100
    def load_data_fuseki(self, file_name, graph):
1✔
101
        """Load data using fuseki load request
102

103
        Parameters
104
        ----------
105
        file_name : string
106
            File name to load
107
        graph : string
108
            graph name
109

110
        Returns
111
        -------
112
        response
113
            Response of request
114
        """
115
        file_path = "{}/{}_{}/ttl/{}".format(
×
116
            self.settings.get("askomics", "data_directory"),
117
            self.session["user"]["id"],
118
            self.session["user"]["username"],
119
            file_name
120
        )
121

122
        data = {'graph': graph}
×
123
        files = [('file', (file_name, open(file_path), 'text/turtle'))]
×
124

125
        start_time = time.time()
×
126

127
        response = requests.post(self.settings.get('triplestore', 'fuseki_upload_url'), data=data, files=files)
×
128

129
        self.query_time = time.time() - start_time
×
130

131
        return response
×
132

133
    def load_data_virtuoso(self, file_name, graph, host_url):
1✔
134
        """Load data using virtuoso load query
135

136
        Parameters
137
        ----------
138
        file_name : string
139
            File name to load
140
        graph : string
141
            graph name
142
        host_url : string
143
            AskOmics url
144

145
        Returns
146
        -------
147
        TYPE
148
            result of query
149
        """
150
        try:
×
151
            load_url = self.settings.get('triplestore', 'load_url')
×
152
        except Exception:
×
153
            load_url = host_url
×
154

155
        if not load_url.endswith('/'):
×
156
            load_url = load_url + "/"
×
157

158
        file_url = '{}api/files/ttl/{}/{}/{}'.format(
×
159
            load_url,
160
            self.session['user']['id'],
161
            self.session['user']['username'],
162
            file_name
163
        )
164

165
        file_url = file_url + "?key={}".format(self.session['user']['apikey'])
×
166

UNCOV
167
        query = "LOAD <{}> INTO GRAPH <{}>".format(file_url, graph)
×
UNCOV
168
        return self.execute_query(query, is_update=True)
×
169

170
    def get_triples_from_graph(self, graph):
1✔
171
        """Get triples from a rdflib graph
172

173
        Parameters
174
        ----------
175
        graph : Graph
176
            rdf graph
177

178
        Returns
179
        -------
180
        string
181
            ttl string
182
        """
183
        ttl = ""
1✔
184
        for s, p, o in graph.get_triple():
1✔
185
            ttl += "<{}> <{}> <{}> .\n".format(s, p, o)
1✔
186
        return ttl
1✔
187

188
    def insert_ttl_string(self, ttl_string, graph):
1✔
189
        """Insert ttl into the triplestore
190

191
        Parameters
192
        ----------
193
        ttl_string : string
194
            ttl triples to insert
195
        graph : string
196
            Insert in the named graph
197

198
        Returns
199
        -------
200
        dict?
201
            query result
202
        """
UNCOV
203
        query = '''
×
204
        INSERT {{
205
            GRAPH <{}> {{
206
                {}
207
            }}
208
        }}
209
        '''.format(graph, ttl_string)
210

UNCOV
211
        return self.execute_query(query, is_update=True)
×
212

213
    def insert_data(self, ttl, graph, metadata=False):
1✔
214
        """Insert data into the triplesotre using INSERT
215

216
        Parameters
217
        ----------
218
        ttl : Graph
219
            rdflib graph
220
        graph : string
221
            graph name
222
        metadata : bool, optional
223
            metadatas?
224

225
        Returns
226
        -------
227
        TYPE
228
            query result
229
        """
230
        triples = self.get_triples_from_graph(ttl) if metadata else ttl.serialize(format='nt')
1✔
231

232
        query = '''
1✔
233
        INSERT {{
234
            GRAPH <{}> {{
235
                {}
236
            }}
237
        }}
238
        '''.format(graph, triples)
239

240
        return self.execute_query(query, is_update=True)
1✔
241

242
    def drop_dataset(self, graph):
1✔
243
        """Drop the datasets of the triplestore and its metadata
244

245
        Parameters
246
        ----------
247
        graph : string
248
            graph name to remove
249
        """
250
        query = '''
1✔
251
        DROP SILENT GRAPH <{}>
252
        '''.format(graph)
253
        self.execute_query(query, disable_log=True, isql_api=True, is_update=True)
1✔
254

255
    def process_query(self, query, isql_api=False, is_update=False):
1✔
256
        """Execute a query and return parsed results
257

258
        Parameters
259
        ----------
260
        query : string
261
            The query to execute
262

263
        Returns
264
        -------
265
        list
266
            Parsed results
267
        """
268
        return self.parse_results(self.execute_query(query, isql_api=isql_api, is_update=is_update))
1✔
269

270
    def execute_query(self, query, disable_log=False, isql_api=False, is_update=False):
1✔
271
        """Execute a sparql query
272

273
        Parameters
274
        ----------
275
        query : string
276
            Query to perform
277

278
        Returns
279
        -------
280
        TYPE
281
            result
282
        """
283
        try:
1✔
284
            triplestore = self.settings.get("triplestore", "triplestore")
1✔
285

286
            # Use ISQL or SPARQL
287
            isql_api_url = None
1✔
288
            try:
1✔
289
                isql_api_url = self.settings.get("triplestore", "isqlapi")
1✔
UNCOV
290
            except Exception:
×
UNCOV
291
                pass
×
292
            use_isql = True if triplestore == "virtuoso" and isql_api_url and self.local_query and isql_api else False
1✔
293

294
            start_time = time.time()
1✔
295
            self.endpoint.setQuery(query)
1✔
296

297
            # Debug
298
            if self.settings.getboolean('askomics', 'debug'):
1✔
UNCOV
299
                self.log.debug("Launch {} query on {} ({})".format("ISQL" if use_isql else "SPARQL", self.triplestore, self.url_endpoint))
×
UNCOV
300
                self.log.debug(query)
×
301

302
            if use_isql:
1✔
303
                formatted_query = "SPARQL {}".format(query)
1✔
304
                json = {"command": formatted_query, "disable_log": disable_log, "sparql_select": not is_update}
1✔
305
                response = requests.post(url=isql_api_url, json=json)
1✔
306
                results = response.json()
1✔
307
                if results["status"] == 500:
1✔
UNCOV
308
                    raise HTTPError("isqlapi: {}".format(results["message"]))
×
309

310
            else:
311
                # Update
312
                self.endpoint.setMethod('POST')
1✔
313
                if is_update:
1✔
314
                    # Force sending to secure endpoint
315
                    self.endpoint.queryType = "INSERT"
1✔
316
                    results = self.endpoint.query()
1✔
317

318
                # Select
319
                else:
320
                    self.endpoint.setReturnFormat(JSON)
1✔
321
                    # Force sending to public endpoint
322
                    self.endpoint.queryType = "SELECT"
1✔
323
                    results = self.endpoint.query().convert()
1✔
324

325
                self.query_time = time.time() - start_time
1✔
326

327
            return results
1✔
328

329
        except Exception as e:
×
UNCOV
330
            traceback.print_exc(file=sys.stdout)
×
UNCOV
331
            raise type(e)("Triplestore error: {}".format(str(e))).with_traceback(sys.exc_info()[2])
×
332

333
    def parse_results(self, json_results):
1✔
334
        """Parse result of sparql query
335

336
        Parameters
337
        ----------
338
        json_results : dict
339
            Query result
340

341
        Returns
342
        -------
343
        list, list
344
            Header and data
345
        """
346
        try:
1✔
347
            # If isql, results are allready parsed
348
            if "isql" in json_results:
1✔
UNCOV
349
                return json_results["vars"], json_results["rows"]
×
350

351
            header = json_results['head']['vars']
1✔
352
            data = []
1✔
353
            for row in json_results["results"]["bindings"]:
1✔
354
                row_dict = {}
1✔
355
                for key, value in row.items():
1✔
356
                    row_dict[key] = value['value']
1✔
357
                data.append(row_dict)
1✔
358

359
        except Exception:
×
UNCOV
360
            traceback.print_exc(file=sys.stdout)
×
UNCOV
361
            return [], []
×
362

363
        return header, data
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc