I have been coding in Python since the last 2 weeks and pretty new to it.
I have written a code to kind of emulate the way "find" command works in *NIX systems. My code works okay-ish for not so deep directories but if I start searching from the "root" directory, it takes too much time and processor heats up :D which on the other hand takes about 8 seconds using "find" cmd.
Hey I know I am kinda noob in Python now but any hint at trying to improve the search efficiency will be greatly appreciated.
Here's what I have written:
#!/usr/bin/python3
import os
class srchx:
file_names = []
is_prohibit = False
def show_result(self):
if(self.is_prohibit):
print("some directories were denied read-access")
print("\nsearch returned {0} result(s)".format(len(self.file_names)))
for _file in self.file_names:
print(_file)
def read_dir(self, cur_dir, srch_name, level):
try:
listing = os.listdir(cur_dir)
except:
self.is_prohibit = True
return
dir_list = []
#print("-"*level+cur_dir)
for entry in listing:
if(os.path.isdir(cur_dir+"/"+entry)):
dir_list.append(entry)
else:
if(srch_name == entry):
self.file_names.append(cur_dir+"/"+entry)
for _dir in dir_list:
new_dir = cur_dir + "/" + _dir
self.read_dir(new_dir, srch_name, level+1)
if(level == 0):
self.show_result()
def __init__(self, dir_name=os.getcwd()):
srch_name = ""
while(len(srch_name) == 0):
srch_name = input("search for: ")
self.read_dir(dir_name, srch_name, 0)
def main():
srch = srchx()
if (__name__ == "__main__"):
main()
Take a look at and please help me to solve this issue.