I am trying to execute a sql statement multiple times concurrently using multiprocessing library. Here is the script I am using,
import time
from multiprocessing import Pool
import psycopg2
rec = []
def make_query(connection):
cursor = connection.cursor()
print("conn: %s curs:%s pid=%s" % (id(connection), id(cursor), os.getpid()))
postgreSQL_select_Query = "select * from users"
cursor.execute(postgreSQL_select_Query)
records = cursor.fetchall()
for row in records:
rec.append(row[0])
if __name__ == "__main__":
connection = psycopg2.connect(user="postgres", password="password", host="127.0.0.1", port="5432", database="abc")
#cursor = connection.cursor()
pool = Pool()
start_time = time.time()
for _ in range(1000):
pool.apply_async(make_query, (connection,))
pool.close()
pool.join()
print(rec)
print("--- %s seconds ---" % (time.time() - start_time))
and this is the response I am getting,
[]
--- 0.48270273208618164 seconds ---
am I doing anything wrong here, because I am not able to access any records. But users table has thousands of records.