May-18-2019, 11:02 PM
This is sort of off topic, it involves python, but only as a wrapper around an sqlite3 query.
I have created an index on Places.Name, but this still takes several minutes to run on table with 12,691 rows.
What can I do to speed up?
I have created an index on Places.Name, but this still takes several minutes to run on table with 12,691 rows.
What can I do to speed up?
from pathlib import Path import os import sqlite3 class SimpleQuery: def __init__(self): os.chdir(os.path.abspath(os.path.dirname(__file__))) self.homepath = Path('.') self.dbpath = self.homepath / '../..' / 'data' / 'National' / 'database' self.db = self.dbpath / 'Elections.db' self.dbcon = None self.dbcur = None def try_query(self): self.db_connect() sqlstr = "SELECT CandidateLName, CandidateFName, CandidateMInit, CandidateSuffix, Town, PlaceId, Name, StateId " \ " FROM ElectionResults2012, Places WHERE Places.Name like ElectionResults2012.Town + '%';" self.dbcon.execute(sqlstr) self.db_close() def db_connect(self): print(f'\nConnecting to: {self.db}') self.dbcon = sqlite3.connect(self.db) self.dbcur = self.dbcon.cursor() def db_close(self, rollback=False): if rollback: self.dbcon.rollback() else: self.dbcon.commit() self.dbcon.close() def db_commit(self): self.dbcon.commit() if __name__ == '__main__': sq = SimpleQuery() sq.try_query()