bench_executor.virtuoso

Virtuoso is a secure and high-performance platform for modern data access, integration, virtualization, and multi-model data management (tables & graphs) based on innovative support of existing open standards (e.g., SQL, SPARQL, and GraphQL).

Website: https://virtuoso.openlinksw.com/
Repository: https://github.com/openlink/virtuoso-opensource

  1#!/usr/bin/env python3
  2
  3"""
  4Virtuoso is a secure and high-performance platform for modern data access,
  5integration, virtualization, and multi-model data management (tables & graphs)
  6based on innovative support of existing open standards
  7(e.g., SQL, SPARQL, and GraphQL).
  8
  9**Website**: https://virtuoso.openlinksw.com/<br>
 10**Repository**: https://github.com/openlink/virtuoso-opensource
 11"""
 12
 13import os
 14import tempfile
 15import psutil
 16from typing import Dict
 17from threading import Thread
 18from bench_executor.container import Container
 19from bench_executor.logger import Logger
 20
 21VERSION = '7.2.7'
 22MAX_ROWS = '10000000'
 23QUERY_TIMEOUT = '0'  # no limit
 24MAX_VECTOR_SIZE = '3000000'  # max value is 'around' 3,500,000 from docs
 25PASSWORD = 'root'
 26NUMBER_OF_BUFFERS_PER_GB = 85000
 27MAX_DIRTY_BUFFERS_PER_GB = 65000
 28
 29
 30def _spawn_loader(container):
 31    """Thread function to parallel load RDF.
 32
 33    Parameters
 34    ----------
 35    container : Container
 36        The Virtuoso container on which the RDF loader should run.
 37    """
 38    success, logs = container.exec('isql -U dba -P root '
 39                                   'exec="rdf_loader_run();"')
 40
 41
 42class Virtuoso(Container):
 43    """Virtuoso container to execute SPARQL queries"""
 44
 45    def __init__(self, data_path: str, config_path: str, directory: str,
 46                 verbose: bool):
 47        """Creates an instance of the Virtuoso class.
 48
 49        Parameters
 50        ----------
 51        data_path : str
 52            Path to the data directory of the case.
 53        config_path : str
 54            Path to the config directory of the case.
 55        directory : str
 56            Path to the directory to store logs.
 57        verbose : bool
 58            Enable verbose logs.
 59        """
 60        self._data_path = os.path.abspath(data_path)
 61        self._config_path = os.path.abspath(config_path)
 62        self._logger = Logger(__name__, directory, verbose)
 63
 64        tmp_dir = os.path.join(tempfile.gettempdir(), 'virtuoso')
 65        os.umask(0)
 66        os.makedirs(tmp_dir, exist_ok=True)
 67        os.makedirs(os.path.join(self._data_path, 'virtuoso'), exist_ok=True)
 68        number_of_buffers = int(psutil.virtual_memory().total / (10**9)
 69                                * NUMBER_OF_BUFFERS_PER_GB)
 70        max_dirty_buffers = int(psutil.virtual_memory().total / (10**9)
 71                                * MAX_DIRTY_BUFFERS_PER_GB)
 72        environment = {'DBA_PASSWORD': PASSWORD,
 73                       'VIRT_SPARQL_ResultSetMaxRows': MAX_ROWS,
 74                       'VIRT_SPARQL_MaxQueryExecutionTime': QUERY_TIMEOUT,
 75                       'VIRT_SPARQL_ExecutionTimeout': QUERY_TIMEOUT,
 76                       'VIRT_SPARQL_MaxQueryCostEstimationTime': QUERY_TIMEOUT,
 77                       'VIRT_Parameters_MaxVectorSize': MAX_VECTOR_SIZE,
 78                       'VIRT_Parameters_NumberOfBuffers': number_of_buffers,
 79                       'VIRT_Parameters_MaxDirtyBuffers': max_dirty_buffers}
 80        super().__init__(f'blindreviewing/virtuoso:v{VERSION}',
 81                         'Virtuoso', self._logger,
 82                         ports={'8890': '8890', '1111': '1111'},
 83                         environment=environment,
 84                         volumes=[f'{self._data_path}/shared:/usr/share/proj',
 85                                  f'{tmp_dir}:/database'])
 86        self._endpoint = 'http://localhost:8890/sparql'
 87
 88    def initialization(self) -> bool:
 89        """Initialize Virtuoso's database.
 90
 91        Returns
 92        -------
 93        success : bool
 94            Whether the initialization was successfull or not.
 95        """
 96        # Virtuoso should start with a initialized database, start Virtuoso
 97        # if not initialized to avoid the pre-run start during benchmark
 98        # execution
 99        success = self.wait_until_ready()
100        if not success:
101            return False
102        success = self.stop()
103
104        return success
105
106    @property
107    def root_mount_directory(self) -> str:
108        """Subdirectory in the root directory of the case for Virtuoso.
109
110        Returns
111        -------
112        subdirectory : str
113            Subdirectory of the root directory for Virtuoso.
114        """
115        return __name__.lower()
116
117    def wait_until_ready(self, command: str = '') -> bool:
118        """Wait until Virtuoso is ready to execute SPARQL queries.
119
120        Parameters
121        ----------
122        command : str
123            Command to execute in the Virtuoso container, optionally, defaults
124            to no command.
125
126        Returns
127        -------
128        success : bool
129            Whether the Virtuoso was initialized successfull or not.
130        """
131        return self.run_and_wait_for_log('Server online at', command=command)
132
133    def load(self, rdf_file: str) -> bool:
134        """Load an RDF file into Virtuoso.
135
136        Currently, only N-Triples files are supported.
137
138        Parameters
139        ----------
140        rdf_file : str
141            Name of the RDF file to load.
142
143        Returns
144        -------
145        success : bool
146            Whether the loading was successfull or not.
147        """
148        return self.load_parallel(rdf_file, 1)
149
150    def load_parallel(self, rdf_file: str, cores: int) -> bool:
151        """Load an RDF file into Virtuoso in parallel.
152
153        Currently, only N-Triples files are supported.
154
155        Parameters
156        ----------
157        rdf_file : str
158            Name of the RDF file to load.
159        cores : int
160            Number of CPU cores for loading.
161
162        Returns
163        -------
164        success : bool
165            Whether the loading was successfull or not.
166        """
167        success = True
168
169        success, logs = self.exec(f'sh -c "ls /usr/share/proj/{rdf_file}"')
170        for line in logs:
171            self._logger.debug(line)
172        if not success:
173            self._logger.error('RDF files do not exist for loading')
174            return False
175
176        # Load directory with data
177        success, logs = self.exec('isql -U dba -P root '
178                                  'exec="ld_dir(\'/usr/share/proj/\','
179                                  f'\'{rdf_file}\', '
180                                  '\'http://example.com/graph\');"')
181        for line in logs:
182            self._logger.debug(line)
183        if not success:
184            self._logger.error('ISQL loader query failure')
185            return False
186
187        loader_threads = []
188        self._logger.debug(f'Spawning {cores} loader threads')
189        for i in range(cores):
190            t = Thread(target=_spawn_loader, args=(self,), daemon=True)
191            t.start()
192            loader_threads.append(t)
193
194        for t in loader_threads:
195            t.join()
196        self._logger.debug(f'Loading finished with {cores} threads')
197
198        # Re-enable checkpoints and scheduler which are disabled automatically
199        # after loading RDF with rdf_loader_run()
200        success, logs = self.exec('isql -U dba -P root exec="checkpoint;"')
201        for line in logs:
202            self._logger.debug(line)
203        if not success:
204            self._logger.error('ISQL re-enable checkpoints query failure')
205            return False
206
207        success, logs = self.exec('isql -U dba -P root '
208                                  'exec="checkpoint_interval(60);"')
209        for line in logs:
210            self._logger.debug(line)
211        if not success:
212            self._logger.error('ISQL checkpoint interval query failure')
213            return False
214
215        success, logs = self.exec('isql -U dba -P root '
216                                  'exec="scheduler_interval(10);"')
217        for line in logs:
218            self._logger.debug(line)
219        if not success:
220            self._logger.error('ISQL scheduler interval query failure')
221            return False
222
223        return success
224
225    def stop(self) -> bool:
226        """Stop Virtuoso.
227
228        Drops all triples in Virtuoso before stopping its container.
229
230        Returns
231        -------
232        success : bool
233            Whether stopping Virtuoso was successfull or not.
234        """
235        # Drop loaded triples
236        success, logs = self.exec('isql -U dba -P root '
237                                  'exec="delete from DB.DBA.load_list;"')
238        for line in logs:
239            self._logger.debug(line)
240        if not success:
241            self._logger.error('ISQL delete load list query failure')
242            return False
243
244        success, logs = self.exec('isql -U dba -P root '
245                                  'exec="rdf_global_reset();"')
246        for line in logs:
247            self._logger.debug(line)
248        if not success:
249            self._logger.error('ISQL RDF global reset query failure')
250            return False
251        return super().stop()
252
253    @property
254    def endpoint(self) -> str:
255        """SPARQL endpoint URL"""
256        return self._endpoint
257
258    @property
259    def headers(self) -> Dict[str, Dict[str, str]]:
260        """HTTP headers of SPARQL queries for serialization formats.
261
262        Only supported serialization formats are included in the dictionary.
263        Currently, the following formats are supported:
264        - N-Triples
265        - Turtle
266        - CSV
267        - RDF/JSON
268        - RDF/XML
269        - JSON-LD
270
271        Returns
272        -------
273        headers : dict
274            Dictionary of headers to use for each serialization format.
275        """
276        headers = {}
277        headers['ntriples'] = {'Accept': 'text/ntriples'}
278        headers['turtle'] = {'Accept': 'text/turtle'}
279        headers['rdfxml'] = {'Accept': 'application/rdf+xml'}
280        headers['rdfjson'] = {'Accept': 'application/rdf+json'}
281        headers['csv'] = {'Accept': 'text/csv'}
282        headers['jsonld'] = {'Accept': 'application/ld+json'}
283        return headers
284
285
286if __name__ == '__main__':
287    print(f'ℹ️  Starting up Virtuoso v{VERSION}...')
288    v = Virtuoso('data', 'config', 'log', True)
289    v.wait_until_ready()
290    input('ℹ️  Press any key to stop')
291    v.stop()
292    print('ℹ️  Stopped')
class Virtuoso(bench_executor.container.Container):
 43class Virtuoso(Container):
 44    """Virtuoso container to execute SPARQL queries"""
 45
 46    def __init__(self, data_path: str, config_path: str, directory: str,
 47                 verbose: bool):
 48        """Creates an instance of the Virtuoso class.
 49
 50        Parameters
 51        ----------
 52        data_path : str
 53            Path to the data directory of the case.
 54        config_path : str
 55            Path to the config directory of the case.
 56        directory : str
 57            Path to the directory to store logs.
 58        verbose : bool
 59            Enable verbose logs.
 60        """
 61        self._data_path = os.path.abspath(data_path)
 62        self._config_path = os.path.abspath(config_path)
 63        self._logger = Logger(__name__, directory, verbose)
 64
 65        tmp_dir = os.path.join(tempfile.gettempdir(), 'virtuoso')
 66        os.umask(0)
 67        os.makedirs(tmp_dir, exist_ok=True)
 68        os.makedirs(os.path.join(self._data_path, 'virtuoso'), exist_ok=True)
 69        number_of_buffers = int(psutil.virtual_memory().total / (10**9)
 70                                * NUMBER_OF_BUFFERS_PER_GB)
 71        max_dirty_buffers = int(psutil.virtual_memory().total / (10**9)
 72                                * MAX_DIRTY_BUFFERS_PER_GB)
 73        environment = {'DBA_PASSWORD': PASSWORD,
 74                       'VIRT_SPARQL_ResultSetMaxRows': MAX_ROWS,
 75                       'VIRT_SPARQL_MaxQueryExecutionTime': QUERY_TIMEOUT,
 76                       'VIRT_SPARQL_ExecutionTimeout': QUERY_TIMEOUT,
 77                       'VIRT_SPARQL_MaxQueryCostEstimationTime': QUERY_TIMEOUT,
 78                       'VIRT_Parameters_MaxVectorSize': MAX_VECTOR_SIZE,
 79                       'VIRT_Parameters_NumberOfBuffers': number_of_buffers,
 80                       'VIRT_Parameters_MaxDirtyBuffers': max_dirty_buffers}
 81        super().__init__(f'blindreviewing/virtuoso:v{VERSION}',
 82                         'Virtuoso', self._logger,
 83                         ports={'8890': '8890', '1111': '1111'},
 84                         environment=environment,
 85                         volumes=[f'{self._data_path}/shared:/usr/share/proj',
 86                                  f'{tmp_dir}:/database'])
 87        self._endpoint = 'http://localhost:8890/sparql'
 88
 89    def initialization(self) -> bool:
 90        """Initialize Virtuoso's database.
 91
 92        Returns
 93        -------
 94        success : bool
 95            Whether the initialization was successfull or not.
 96        """
 97        # Virtuoso should start with a initialized database, start Virtuoso
 98        # if not initialized to avoid the pre-run start during benchmark
 99        # execution
100        success = self.wait_until_ready()
101        if not success:
102            return False
103        success = self.stop()
104
105        return success
106
107    @property
108    def root_mount_directory(self) -> str:
109        """Subdirectory in the root directory of the case for Virtuoso.
110
111        Returns
112        -------
113        subdirectory : str
114            Subdirectory of the root directory for Virtuoso.
115        """
116        return __name__.lower()
117
118    def wait_until_ready(self, command: str = '') -> bool:
119        """Wait until Virtuoso is ready to execute SPARQL queries.
120
121        Parameters
122        ----------
123        command : str
124            Command to execute in the Virtuoso container, optionally, defaults
125            to no command.
126
127        Returns
128        -------
129        success : bool
130            Whether the Virtuoso was initialized successfull or not.
131        """
132        return self.run_and_wait_for_log('Server online at', command=command)
133
134    def load(self, rdf_file: str) -> bool:
135        """Load an RDF file into Virtuoso.
136
137        Currently, only N-Triples files are supported.
138
139        Parameters
140        ----------
141        rdf_file : str
142            Name of the RDF file to load.
143
144        Returns
145        -------
146        success : bool
147            Whether the loading was successfull or not.
148        """
149        return self.load_parallel(rdf_file, 1)
150
151    def load_parallel(self, rdf_file: str, cores: int) -> bool:
152        """Load an RDF file into Virtuoso in parallel.
153
154        Currently, only N-Triples files are supported.
155
156        Parameters
157        ----------
158        rdf_file : str
159            Name of the RDF file to load.
160        cores : int
161            Number of CPU cores for loading.
162
163        Returns
164        -------
165        success : bool
166            Whether the loading was successfull or not.
167        """
168        success = True
169
170        success, logs = self.exec(f'sh -c "ls /usr/share/proj/{rdf_file}"')
171        for line in logs:
172            self._logger.debug(line)
173        if not success:
174            self._logger.error('RDF files do not exist for loading')
175            return False
176
177        # Load directory with data
178        success, logs = self.exec('isql -U dba -P root '
179                                  'exec="ld_dir(\'/usr/share/proj/\','
180                                  f'\'{rdf_file}\', '
181                                  '\'http://example.com/graph\');"')
182        for line in logs:
183            self._logger.debug(line)
184        if not success:
185            self._logger.error('ISQL loader query failure')
186            return False
187
188        loader_threads = []
189        self._logger.debug(f'Spawning {cores} loader threads')
190        for i in range(cores):
191            t = Thread(target=_spawn_loader, args=(self,), daemon=True)
192            t.start()
193            loader_threads.append(t)
194
195        for t in loader_threads:
196            t.join()
197        self._logger.debug(f'Loading finished with {cores} threads')
198
199        # Re-enable checkpoints and scheduler which are disabled automatically
200        # after loading RDF with rdf_loader_run()
201        success, logs = self.exec('isql -U dba -P root exec="checkpoint;"')
202        for line in logs:
203            self._logger.debug(line)
204        if not success:
205            self._logger.error('ISQL re-enable checkpoints query failure')
206            return False
207
208        success, logs = self.exec('isql -U dba -P root '
209                                  'exec="checkpoint_interval(60);"')
210        for line in logs:
211            self._logger.debug(line)
212        if not success:
213            self._logger.error('ISQL checkpoint interval query failure')
214            return False
215
216        success, logs = self.exec('isql -U dba -P root '
217                                  'exec="scheduler_interval(10);"')
218        for line in logs:
219            self._logger.debug(line)
220        if not success:
221            self._logger.error('ISQL scheduler interval query failure')
222            return False
223
224        return success
225
226    def stop(self) -> bool:
227        """Stop Virtuoso.
228
229        Drops all triples in Virtuoso before stopping its container.
230
231        Returns
232        -------
233        success : bool
234            Whether stopping Virtuoso was successfull or not.
235        """
236        # Drop loaded triples
237        success, logs = self.exec('isql -U dba -P root '
238                                  'exec="delete from DB.DBA.load_list;"')
239        for line in logs:
240            self._logger.debug(line)
241        if not success:
242            self._logger.error('ISQL delete load list query failure')
243            return False
244
245        success, logs = self.exec('isql -U dba -P root '
246                                  'exec="rdf_global_reset();"')
247        for line in logs:
248            self._logger.debug(line)
249        if not success:
250            self._logger.error('ISQL RDF global reset query failure')
251            return False
252        return super().stop()
253
254    @property
255    def endpoint(self) -> str:
256        """SPARQL endpoint URL"""
257        return self._endpoint
258
259    @property
260    def headers(self) -> Dict[str, Dict[str, str]]:
261        """HTTP headers of SPARQL queries for serialization formats.
262
263        Only supported serialization formats are included in the dictionary.
264        Currently, the following formats are supported:
265        - N-Triples
266        - Turtle
267        - CSV
268        - RDF/JSON
269        - RDF/XML
270        - JSON-LD
271
272        Returns
273        -------
274        headers : dict
275            Dictionary of headers to use for each serialization format.
276        """
277        headers = {}
278        headers['ntriples'] = {'Accept': 'text/ntriples'}
279        headers['turtle'] = {'Accept': 'text/turtle'}
280        headers['rdfxml'] = {'Accept': 'application/rdf+xml'}
281        headers['rdfjson'] = {'Accept': 'application/rdf+json'}
282        headers['csv'] = {'Accept': 'text/csv'}
283        headers['jsonld'] = {'Accept': 'application/ld+json'}
284        return headers

Virtuoso container to execute SPARQL queries

Virtuoso(data_path: str, config_path: str, directory: str, verbose: bool)
46    def __init__(self, data_path: str, config_path: str, directory: str,
47                 verbose: bool):
48        """Creates an instance of the Virtuoso class.
49
50        Parameters
51        ----------
52        data_path : str
53            Path to the data directory of the case.
54        config_path : str
55            Path to the config directory of the case.
56        directory : str
57            Path to the directory to store logs.
58        verbose : bool
59            Enable verbose logs.
60        """
61        self._data_path = os.path.abspath(data_path)
62        self._config_path = os.path.abspath(config_path)
63        self._logger = Logger(__name__, directory, verbose)
64
65        tmp_dir = os.path.join(tempfile.gettempdir(), 'virtuoso')
66        os.umask(0)
67        os.makedirs(tmp_dir, exist_ok=True)
68        os.makedirs(os.path.join(self._data_path, 'virtuoso'), exist_ok=True)
69        number_of_buffers = int(psutil.virtual_memory().total / (10**9)
70                                * NUMBER_OF_BUFFERS_PER_GB)
71        max_dirty_buffers = int(psutil.virtual_memory().total / (10**9)
72                                * MAX_DIRTY_BUFFERS_PER_GB)
73        environment = {'DBA_PASSWORD': PASSWORD,
74                       'VIRT_SPARQL_ResultSetMaxRows': MAX_ROWS,
75                       'VIRT_SPARQL_MaxQueryExecutionTime': QUERY_TIMEOUT,
76                       'VIRT_SPARQL_ExecutionTimeout': QUERY_TIMEOUT,
77                       'VIRT_SPARQL_MaxQueryCostEstimationTime': QUERY_TIMEOUT,
78                       'VIRT_Parameters_MaxVectorSize': MAX_VECTOR_SIZE,
79                       'VIRT_Parameters_NumberOfBuffers': number_of_buffers,
80                       'VIRT_Parameters_MaxDirtyBuffers': max_dirty_buffers}
81        super().__init__(f'blindreviewing/virtuoso:v{VERSION}',
82                         'Virtuoso', self._logger,
83                         ports={'8890': '8890', '1111': '1111'},
84                         environment=environment,
85                         volumes=[f'{self._data_path}/shared:/usr/share/proj',
86                                  f'{tmp_dir}:/database'])
87        self._endpoint = 'http://localhost:8890/sparql'

Creates an instance of the Virtuoso class.

Parameters
  • data_path (str): Path to the data directory of the case.
  • config_path (str): Path to the config directory of the case.
  • directory (str): Path to the directory to store logs.
  • verbose (bool): Enable verbose logs.
def initialization(self) -> bool:
 89    def initialization(self) -> bool:
 90        """Initialize Virtuoso's database.
 91
 92        Returns
 93        -------
 94        success : bool
 95            Whether the initialization was successfull or not.
 96        """
 97        # Virtuoso should start with a initialized database, start Virtuoso
 98        # if not initialized to avoid the pre-run start during benchmark
 99        # execution
100        success = self.wait_until_ready()
101        if not success:
102            return False
103        success = self.stop()
104
105        return success

Initialize Virtuoso's database.

Returns
  • success (bool): Whether the initialization was successfull or not.
root_mount_directory: str

Subdirectory in the root directory of the case for Virtuoso.

Returns
  • subdirectory (str): Subdirectory of the root directory for Virtuoso.
def wait_until_ready(self, command: str = '') -> bool:
118    def wait_until_ready(self, command: str = '') -> bool:
119        """Wait until Virtuoso is ready to execute SPARQL queries.
120
121        Parameters
122        ----------
123        command : str
124            Command to execute in the Virtuoso container, optionally, defaults
125            to no command.
126
127        Returns
128        -------
129        success : bool
130            Whether the Virtuoso was initialized successfull or not.
131        """
132        return self.run_and_wait_for_log('Server online at', command=command)

Wait until Virtuoso is ready to execute SPARQL queries.

Parameters
  • command (str): Command to execute in the Virtuoso container, optionally, defaults to no command.
Returns
  • success (bool): Whether the Virtuoso was initialized successfull or not.
def load(self, rdf_file: str) -> bool:
134    def load(self, rdf_file: str) -> bool:
135        """Load an RDF file into Virtuoso.
136
137        Currently, only N-Triples files are supported.
138
139        Parameters
140        ----------
141        rdf_file : str
142            Name of the RDF file to load.
143
144        Returns
145        -------
146        success : bool
147            Whether the loading was successfull or not.
148        """
149        return self.load_parallel(rdf_file, 1)

Load an RDF file into Virtuoso.

Currently, only N-Triples files are supported.

Parameters
  • rdf_file (str): Name of the RDF file to load.
Returns
  • success (bool): Whether the loading was successfull or not.
def load_parallel(self, rdf_file: str, cores: int) -> bool:
151    def load_parallel(self, rdf_file: str, cores: int) -> bool:
152        """Load an RDF file into Virtuoso in parallel.
153
154        Currently, only N-Triples files are supported.
155
156        Parameters
157        ----------
158        rdf_file : str
159            Name of the RDF file to load.
160        cores : int
161            Number of CPU cores for loading.
162
163        Returns
164        -------
165        success : bool
166            Whether the loading was successfull or not.
167        """
168        success = True
169
170        success, logs = self.exec(f'sh -c "ls /usr/share/proj/{rdf_file}"')
171        for line in logs:
172            self._logger.debug(line)
173        if not success:
174            self._logger.error('RDF files do not exist for loading')
175            return False
176
177        # Load directory with data
178        success, logs = self.exec('isql -U dba -P root '
179                                  'exec="ld_dir(\'/usr/share/proj/\','
180                                  f'\'{rdf_file}\', '
181                                  '\'http://example.com/graph\');"')
182        for line in logs:
183            self._logger.debug(line)
184        if not success:
185            self._logger.error('ISQL loader query failure')
186            return False
187
188        loader_threads = []
189        self._logger.debug(f'Spawning {cores} loader threads')
190        for i in range(cores):
191            t = Thread(target=_spawn_loader, args=(self,), daemon=True)
192            t.start()
193            loader_threads.append(t)
194
195        for t in loader_threads:
196            t.join()
197        self._logger.debug(f'Loading finished with {cores} threads')
198
199        # Re-enable checkpoints and scheduler which are disabled automatically
200        # after loading RDF with rdf_loader_run()
201        success, logs = self.exec('isql -U dba -P root exec="checkpoint;"')
202        for line in logs:
203            self._logger.debug(line)
204        if not success:
205            self._logger.error('ISQL re-enable checkpoints query failure')
206            return False
207
208        success, logs = self.exec('isql -U dba -P root '
209                                  'exec="checkpoint_interval(60);"')
210        for line in logs:
211            self._logger.debug(line)
212        if not success:
213            self._logger.error('ISQL checkpoint interval query failure')
214            return False
215
216        success, logs = self.exec('isql -U dba -P root '
217                                  'exec="scheduler_interval(10);"')
218        for line in logs:
219            self._logger.debug(line)
220        if not success:
221            self._logger.error('ISQL scheduler interval query failure')
222            return False
223
224        return success

Load an RDF file into Virtuoso in parallel.

Currently, only N-Triples files are supported.

Parameters
  • rdf_file (str): Name of the RDF file to load.
  • cores (int): Number of CPU cores for loading.
Returns
  • success (bool): Whether the loading was successfull or not.
def stop(self) -> bool:
226    def stop(self) -> bool:
227        """Stop Virtuoso.
228
229        Drops all triples in Virtuoso before stopping its container.
230
231        Returns
232        -------
233        success : bool
234            Whether stopping Virtuoso was successfull or not.
235        """
236        # Drop loaded triples
237        success, logs = self.exec('isql -U dba -P root '
238                                  'exec="delete from DB.DBA.load_list;"')
239        for line in logs:
240            self._logger.debug(line)
241        if not success:
242            self._logger.error('ISQL delete load list query failure')
243            return False
244
245        success, logs = self.exec('isql -U dba -P root '
246                                  'exec="rdf_global_reset();"')
247        for line in logs:
248            self._logger.debug(line)
249        if not success:
250            self._logger.error('ISQL RDF global reset query failure')
251            return False
252        return super().stop()

Stop Virtuoso.

Drops all triples in Virtuoso before stopping its container.

Returns
  • success (bool): Whether stopping Virtuoso was successfull or not.
endpoint: str

SPARQL endpoint URL

headers: Dict[str, Dict[str, str]]

HTTP headers of SPARQL queries for serialization formats.

Only supported serialization formats are included in the dictionary. Currently, the following formats are supported:

  • N-Triples
  • Turtle
  • CSV
  • RDF/JSON
  • RDF/XML
  • JSON-LD
Returns
  • headers (dict): Dictionary of headers to use for each serialization format.