I am trying to write a script where it traverses the current folder and all subfolders and files and prints to screen the name of the files, filesize, and date last modified.

e.g.

test0.xml | 26 bytes | 11/21/08 23:35:28 (I can get it to do this)
Folder/test1.xml | 26 bytes | 11/21/08 23:35:28 (this I can't)
Folder/folder2/test2.xml | 26 bytes | 11/21/08 23:35:28

I can easily get it to work for all the folders in the current directory but always run into problems in python when I try to use all sub directories also..

#!/usr/python/bin
# Filename: log_file.py

"""
This file takes a file in a directory and writes a log file containing all the filenames, modification times and filesizes. Then on a later date, the program will check this log file to detect any changes in the directory
"""

import os, glob, time

cwd = os.getcwd()

log = open('log_file.txt', 'w') 

for dirpath,dirs,files in os.walk(cwd):

   for dirs in os.walk(cwd):
      for f in dirs:
        # psplit = os.path.split(f)[1]
   for file in files:
      size = os.stat(file)
      file_date = time.strftime("%m/%d/%y %H:%M:%S", time.localtime())
      print file,'|','%s' % size.st_size,'','bytes','|','%s' % file_date,''

Could someone explain what I need to do please.

Always write error messages you've got!
The problem was in os.stat(file_path).st_size. On default it get files in current work directory(i.e. where your source code is). Therefore you must write an absolute path to the file
i.e. file_path=root+'/'+file . That's why you've had the problem

PS: sorry for my bad English

#!/usr/python/bin
# Filename: log_file.py

"""
This file takes a file in a directory and writes a log file containing all the filenames, modification times and filesizes. Then on a later date, the program will check this log file to detect any changes in the directory
"""

import os, glob, time

cwd = os.getcwd()
for root, dirs,files in os.walk(cwd):
    print root
    for file in files:
        file_path=root+'/'+file
        file_date = time.strftime("%m/%d/%y %H:%M:%S", time.localtime())
        print file,' | ',os.stat(file_path).st_size,'bytes | ',file_date
    print

ok I fixed some things..

import os, glob, time

cwd = os.getcwd()
datalog = open('log_file.txt', 'w') 

for root,dirs,files in os.walk(cwd):

   for folder in glob.glob(root):
      for file in glob.glob(folder + '/*.**'):
         file_path = os.path.split(file)[1]
         size = os.stat(file)
         file_date = time.strftime("%m/%d/%y %H:%M:%S", time.localtime())
         data = '%s | %s bytes | %s ' % (os.path.split(file_path)[1], size.st_size, file_date),
         datalog.write(data)

As far as I can tell that should now work but I get this error.

Traceback (most recent call last):
  File "C:\Documents and Settings\Gabe\My Documents\Scripts\testing_grounds\Log_file\log_file.py", line 21, in <module>
    datalog.write(data)
TypeError: argument 1 must be string or read-only character buffer, not tuple

Just take the comma off the end of line 20. Adding the comma makes it a tuple.

I would also reccomend that when you write to the file you have a new line after each item. To do this then just go:

datalog.write(data+'\n')

Hope that helps

I just have one last question. I didn't clearly explain what I was using the code for. Right now this works to write to a file the names of files in a directory with filename etc...

log_file = open('log_file.txt', 'w')

for root,dirs,files in os.walk(cwd):   
   for folder in glob.glob(root):
      for file in glob.glob(folder + '/*.**'):
         file_path = os.path.split(file)[1]
         size = os.stat(file)
         file_date = time.strftime("%m/%d/%y %H:%M:%S", time.localtime())
         data = '%s | %s bytes | %s ' % (os.path.split(file_path)[1], size.st_size, file_date)
         log_file.write(data+'\n')

log_file.close()

But I have some more code.. it checks to see if a file has already been written and compares the two logs. Why does the above code work but this doesn't? Thanks for all the help.

#!/usr/python/bin
# Filename: log_file.py

"""
This file takes a file in a directory and writes a log file containing all the filenames, modification times and filesizes. Then on a later date, the program will check this log file to detect any changes in the directory
"""

import os, glob, time, filecmp, tempfile

cwd = os.getcwd()

def datalog():
   
   for root,dirs,files in os.walk(cwd):   
      for folder in glob.glob(root):
         for file in glob.glob(folder + '/*.**'):
            file_path = os.path.split(file)[1]
            size = os.stat(file)
            file_date = time.strftime("%m/%d/%y %H:%M:%S", time.localtime())
            data = '%s | %s bytes | %s ' % (os.path.split(file_path)[1], size.st_size, file_date)

if os.path.exists('log_file.txt'):

   # Create a 'temporary' file to store current directory's dimensions
   log_file2 = open('log_file2.txt', 'w')
   datalog()
   log_file2.write(data+'\n')
   log_file2.close()

   # Compair the directory's current dimensions to previous modifications
   if filecmp.cmp('log_file.txt', 'log_file2.txt'):
      print 'No files have been modified'
      os.remove('log_file2.txt')
         
   else:
      print 'The directory has been modified'
      done = raw_input("When done compariing the logs type 'done' ")
      if done == done:
         os.remove('log_file.txt')
         os.remove('log_file2.txt')
         log_file = open('log_file.txt', 'w')
         datalog()
         log_file.write(data+'\n')
         log_file.close()
   
else:

   log_file = open('log_file.txt', 'w')
   log_file = open('log_file.txt', 'w')
   log_file.write(data+'\n')
   log_file.close()
Be a part of the DaniWeb community

We're a friendly, industry-focused community of developers, IT pros, digital marketers, and technology enthusiasts meeting, networking, learning, and sharing knowledge.