Im inexperienced at threading in python and was trying to make a few simple mulithreaded programs to get a bit more experience. I'm trying to send requests out to a pre-defined list of URLs.
When trying to execute the program, it instantly finishes and prints("End") with no exits or exceptions. the print call placed in the thread_function doesn't execute and no errors are thrown.
Any help would be appreciated.
import networking
import threading
import concurrent.futures
class concurrencyTest:
def __init__(self, URLlist):
self.URLlist = URLlist
self.resourceDict = {}
self._urlListLock = threading.Lock()
self._resourceListLock = threading.Lock()
def sendMultiThreadedRequests(self, threadNum=3):
self.resourceDict = {}
with concurrent.futures.ThreadPoolExecutor(max_workers=threadNum) as executor:
results = executor.map(self.thread_function)
def thread_function(self):
print("You are are in the thread_function")
while True:
with self._urlListLock:
numOfRemainingURL = len(self.URLlist)
print(numOfRemainingURL)
if numOfRemainingURL == 0:
return
urlToRequest = self.URLlist.pop()
webpage = networking.getWebpage(urlToRequest)
##parse webpage or resource
with self._resourceListLock:
self.resourceDict[urlToRequest] = webpage
def sendRegularRequests(self):
self.resourceDict = {}
for url in self.URLlist:
resource = networking.getWebpage(url)
self.resourceDict[url] = resource
def updateURLpool(self):
return "Not currently coded"
def main():
#The real urlList is a lot larger than just 3 URLs
urlList = ["www.google.com","www.stackoverflow.com","www.reddit.com"]
parTest = concurrencyTest(urlList)
parTest.sendMultiThreadedRequests()
print("End")
main()
multiprocessing.networkingmodule withgetWebpage(..)come from? Can't find any reference to it online.