• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

askomics / flaskomics / 8143972779

04 Mar 2024 04:54PM UTC coverage: 83.219%. Remained the same
8143972779

Pull #449

github

web-flow
Merge 6d2bc1d30 into 91939bc97
Pull Request #449: Bump xml2js and parse-bmfont-xml

6283 of 7550 relevant lines covered (83.22%)

0.83 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

63.57
/askomics/libaskomics/SparqlQueryLauncher.py
1
import time
1✔
2
import traceback
1✔
3
import sys
1✔
4

5
from SPARQLWrapper import JSON, SPARQLWrapper
1✔
6

7
from askomics.libaskomics.Params import Params
1✔
8

9
import requests
1✔
10
from urllib3.exceptions import HTTPError
1✔
11

12

13
class SparqlQueryLauncher(Params):
1✔
14
    """SparqlQueryLauncher
15

16
    Attributes
17
    ----------
18
    endpoint : SPARQLWrapper
19
        The triplestore endpoint
20
    query_time : time
21
        Query execution time
22
    triplestore : string
23
        triplesotre (virtuoso, fuseki ...)
24
    """
25

26
    def __init__(self, app, session, get_result_query=False, federated=False, endpoints=None):
1✔
27
        """init
28

29
        Parameters
30
        ----------
31
        app : Flask
32
            Flask app
33
        session :
34
            AskOmics session
35
        """
36
        Params.__init__(self, app, session)
1✔
37

38
        self.query_time = None
1✔
39

40
        # local endpoint (for federated query engine)
41
        self.local_endpoint_f = self.settings.get('triplestore', 'endpoint')
1✔
42
        try:
1✔
43
            self.local_endpoint_f = self.settings.get('federation', 'local_endpoint')
1✔
44
        except Exception:
×
45
            pass
×
46

47
        local = False
1✔
48
        # Use the federated query engine
49
        if federated:
1✔
50
            self.federated = True
×
51
            self.local_query = False
×
52
            self.url_endpoint = self.settings.get('federation', 'endpoint')
×
53
            self.url_updatepoint = self.settings.get('federation', 'endpoint')
×
54
            self.triplestore = self.settings.get('federation', 'query_engine')
×
55
        # use the external endpoint
56
        elif endpoints is not None and endpoints != [self.local_endpoint_f]:
1✔
57
            self.federated = False
×
58
            self.local_query = False
×
59
            self.triplestore = "unknown"
×
60
            self.url_endpoint = endpoints[0]
×
61
            self.url_updatepoint = endpoints[0]
×
62
        # use the local endpoint
63
        else:
64
            self.federated = False
1✔
65
            self.local_query = True
1✔
66
            self.triplestore = self.settings.get('triplestore', 'triplestore')
1✔
67
            self.url_endpoint = self.settings.get('triplestore', 'endpoint')
1✔
68
            self.url_updatepoint = self.settings.get('triplestore', 'updatepoint')
1✔
69
            local = True
1✔
70

71
        self.endpoint = SPARQLWrapper(self.url_endpoint, self.url_updatepoint)
1✔
72

73
        if local:
1✔
74
            try:
1✔
75
                self.endpoint.setCredentials(
1✔
76
                    self.settings.get('triplestore', 'username'),
77
                    self.settings.get('triplestore', 'password')
78
                )
79
                self.endpoint.setHTTPAuth(self.settings.get('triplestore', 'http_auth', fallback="basic"))
1✔
80
            except Exception:
×
81
                pass
×
82

83
    def load_data(self, file_name, graph, host_url):
1✔
84
        """Load data in function of the triplestore
85

86
        Parameters
87
        ----------
88
        file_name : string
89
            File name to load
90
        graph : string
91
            graph name
92
        host_url : string
93
            AskOmics url
94
        """
95
        if self.triplestore == 'fuseki':
×
96
            self.load_data_fuseki(file_name, graph)
×
97
        else:
98
            self.load_data_virtuoso(file_name, graph, host_url)
×
99

100
    def load_data_fuseki(self, file_name, graph):
1✔
101
        """Load data using fuseki load request
102

103
        Parameters
104
        ----------
105
        file_name : string
106
            File name to load
107
        graph : string
108
            graph name
109

110
        Returns
111
        -------
112
        response
113
            Response of request
114
        """
115
        file_path = "{}/{}_{}/ttl/{}".format(
×
116
            self.settings.get("askomics", "data_directory"),
117
            self.session["user"]["id"],
118
            self.session["user"]["username"],
119
            file_name
120
        )
121

122
        data = {'graph': graph}
×
123
        files = [('file', (file_name, open(file_path), 'text/turtle'))]
×
124

125
        start_time = time.time()
×
126

127
        response = requests.post(self.settings.get('triplestore', 'fuseki_upload_url'), data=data, files=files)
×
128

129
        self.query_time = time.time() - start_time
×
130

131
        return response
×
132

133
    def load_data_virtuoso(self, file_name, graph, host_url):
1✔
134
        """Load data using virtuoso load query
135

136
        Parameters
137
        ----------
138
        file_name : string
139
            File name to load
140
        graph : string
141
            graph name
142
        host_url : string
143
            AskOmics url
144

145
        Returns
146
        -------
147
        TYPE
148
            result of query
149
        """
150
        try:
×
151
            load_url = self.settings.get('triplestore', 'load_url')
×
152
        except Exception:
×
153
            load_url = host_url
×
154

155
        if not load_url.endswith('/'):
×
156
            load_url = load_url + "/"
×
157

158
        file_url = '{}api/files/ttl/{}/{}/{}'.format(
×
159
            load_url,
160
            self.session['user']['id'],
161
            self.session['user']['username'],
162
            file_name
163
        )
164

165
        query = "LOAD <{}> INTO GRAPH <{}>".format(file_url, graph)
×
166
        return self.execute_query(query, is_update=True)
×
167

168
    def get_triples_from_graph(self, graph):
1✔
169
        """Get triples from a rdflib graph
170

171
        Parameters
172
        ----------
173
        graph : Graph
174
            rdf graph
175

176
        Returns
177
        -------
178
        string
179
            ttl string
180
        """
181
        ttl = ""
1✔
182
        for s, p, o in graph.get_triple():
1✔
183
            ttl += "<{}> <{}> <{}> .\n".format(s, p, o)
1✔
184
        return ttl
1✔
185

186
    def insert_ttl_string(self, ttl_string, graph):
1✔
187
        """Insert ttl into the triplestore
188

189
        Parameters
190
        ----------
191
        ttl_string : string
192
            ttl triples to insert
193
        graph : string
194
            Insert in the named graph
195

196
        Returns
197
        -------
198
        dict?
199
            query result
200
        """
201
        query = '''
×
202
        INSERT {{
203
            GRAPH <{}> {{
204
                {}
205
            }}
206
        }}
207
        '''.format(graph, ttl_string)
208

209
        return self.execute_query(query, is_update=True)
×
210

211
    def insert_data(self, ttl, graph, metadata=False):
1✔
212
        """Insert data into the triplesotre using INSERT
213

214
        Parameters
215
        ----------
216
        ttl : Graph
217
            rdflib graph
218
        graph : string
219
            graph name
220
        metadata : bool, optional
221
            metadatas?
222

223
        Returns
224
        -------
225
        TYPE
226
            query result
227
        """
228
        triples = self.get_triples_from_graph(ttl) if metadata else ttl.serialize(format='nt')
1✔
229

230
        query = '''
1✔
231
        INSERT {{
232
            GRAPH <{}> {{
233
                {}
234
            }}
235
        }}
236
        '''.format(graph, triples)
237

238
        return self.execute_query(query, is_update=True)
1✔
239

240
    def drop_dataset(self, graph):
1✔
241
        """Drop the datasets of the triplestore and its metadata
242

243
        Parameters
244
        ----------
245
        graph : string
246
            graph name to remove
247
        """
248
        query = '''
1✔
249
        DROP SILENT GRAPH <{}>
250
        '''.format(graph)
251
        self.execute_query(query, disable_log=True, isql_api=True, is_update=True)
1✔
252

253
    def process_query(self, query, isql_api=False, is_update=False):
1✔
254
        """Execute a query and return parsed results
255

256
        Parameters
257
        ----------
258
        query : string
259
            The query to execute
260

261
        Returns
262
        -------
263
        list
264
            Parsed results
265
        """
266
        return self.parse_results(self.execute_query(query, isql_api=isql_api, is_update=is_update))
1✔
267

268
    def execute_query(self, query, disable_log=False, isql_api=False, is_update=False):
1✔
269
        """Execute a sparql query
270

271
        Parameters
272
        ----------
273
        query : string
274
            Query to perform
275

276
        Returns
277
        -------
278
        TYPE
279
            result
280
        """
281
        try:
1✔
282
            triplestore = self.settings.get("triplestore", "triplestore")
1✔
283

284
            # Use ISQL or SPARQL
285
            isql_api_url = None
1✔
286
            try:
1✔
287
                isql_api_url = self.settings.get("triplestore", "isqlapi")
1✔
288
            except Exception:
×
289
                pass
×
290
            use_isql = True if triplestore == "virtuoso" and isql_api_url and self.local_query and isql_api else False
1✔
291

292
            start_time = time.time()
1✔
293
            self.endpoint.setQuery(query)
1✔
294

295
            # Debug
296
            if self.settings.getboolean('askomics', 'debug'):
1✔
297
                self.log.debug("Launch {} query on {} ({})".format("ISQL" if use_isql else "SPARQL", self.triplestore, self.url_endpoint))
×
298
                self.log.debug(query)
×
299

300
            if use_isql:
1✔
301
                formatted_query = "SPARQL {}".format(query)
1✔
302
                json = {"command": formatted_query, "disable_log": disable_log, "sparql_select": not is_update}
1✔
303
                response = requests.post(url=isql_api_url, json=json)
1✔
304
                results = response.json()
1✔
305
                if results["status"] == 500:
1✔
306
                    raise HTTPError("isqlapi: {}".format(results["message"]))
×
307

308
            else:
309
                # Update
310
                self.endpoint.setMethod('POST')
1✔
311
                if is_update:
1✔
312
                    # Force sending to secure endpoint
313
                    self.endpoint.queryType = "INSERT"
1✔
314
                    results = self.endpoint.query()
1✔
315

316
                # Select
317
                else:
318
                    self.endpoint.setReturnFormat(JSON)
1✔
319
                    # Force sending to public endpoint
320
                    self.endpoint.queryType = "SELECT"
1✔
321
                    results = self.endpoint.query().convert()
1✔
322

323
                self.query_time = time.time() - start_time
1✔
324

325
            return results
1✔
326

327
        except Exception as e:
×
328
            traceback.print_exc(file=sys.stdout)
×
329
            raise type(e)("Triplestore error: {}".format(str(e))).with_traceback(sys.exc_info()[2])
×
330

331
    def parse_results(self, json_results):
1✔
332
        """Parse result of sparql query
333

334
        Parameters
335
        ----------
336
        json_results : dict
337
            Query result
338

339
        Returns
340
        -------
341
        list, list
342
            Header and data
343
        """
344
        try:
1✔
345
            # If isql, results are allready parsed
346
            if "isql" in json_results:
1✔
347
                return json_results["vars"], json_results["rows"]
×
348

349
            header = json_results['head']['vars']
1✔
350
            data = []
1✔
351
            for row in json_results["results"]["bindings"]:
1✔
352
                row_dict = {}
1✔
353
                for key, value in row.items():
1✔
354
                    row_dict[key] = value['value']
1✔
355
                data.append(row_dict)
1✔
356

357
        except Exception:
×
358
            traceback.print_exc(file=sys.stdout)
×
359
            return [], []
×
360

361
        return header, data
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc