I have a python code that reads several csv files and writes teh data to a single file but every time it loops back around to the next file it prints the headers again. Is there a way to stop this I can post my code if it is nessisary.

Recommended Answers

All 4 Replies

You can read and ignore the first line of other than the first file. I assume then form of these files is exactly same. Something similar to:

def files_to_one(new_file, *files):
    with open(new_file, 'w') as fout:
        for fileno, fn in enumerate(files):
            with open(fn) as fin:
                print fileno, fn
                first = fin.readline()
                if not fileno:
                    fout.write(first)
                fout.write(fin.read())

Here is a differnt way,with a little change of good answer given by pyTony.
Using next() to skip first line(header).

def files_to_one(new_file, *files):
    with open(new_file, 'w') as fout:
        for f in (files):
            with open(f) as fin:
                next(fin)
                for line in fin:
                    fout.write(line+'\n')

Sure, snippsat, but you forgot to include the first header. File is not good name for variable, which I corrected in original code.

OK this may help more here is the code that I have if it will help explain where i need to code put and what everything should be called.

import csv, datetime, os, shutil, glob

franchiseList = {}

with open('Franchise_Name_Scrub_List.csv', 'r') as ff:
    fcf = csv.DictReader(ff)
    for frow in fcf:
        franchiseList[frow['Misc Franchise Name']] = frow
with open('Franchise_Name_Scrub_List.csv', 'r') as fF:
    fcf = csv.DictReader(fF)
    for Frow in fcf:
        franchiseList[Frow['FRANCHISE Name - Directory']] = Frow

newrow={'Last Sale Date': '', 'Last Sale Amount': '', 'First Name': '', 'Last Name': '', 'Email Address': '', 'Franchise': '', 'State': '', 'Postal/Zip Code': '', 'Last Web Order ID': '', 'Date Added': '', 'Email Source':'', 'osg_web_dir': ''}
updaterow={'Last Sale Date': '', 'Last Sale Amount': '', 'First Name': '', 'Last Name': '', 'Email Address': '', 'Franchise': '', 'State': '', 'Postal/Zip Code': '', 'Last Web Order ID': '',  'osg_web_dir': ''}
new_field_names = newrow.keys()
update_field_names = updaterow.keys()
dt = datetime.datetime.now().strftime("%m_%d_%y_%H_%M_%S")

os.chdir("/Users/HatterX/Desktop/Unprocessed")
for FILE in glob.glob("SOR935*"):
    with open(FILE, 'r') as f1, open('/Users/HatterX/Desktop/Bronto Files/FACTS_bronto_import_add_'+dt+'.csv', 'ab') as f2, open('/Users/HatterX/Desktop/Bronto Files/FACTS_bronto_import_update_'+dt+'.csv', 'ab') as f3:
        cf1 = csv.DictReader(f1, fieldnames=('CustNo1', 'CustNo2', 'LastOrderDate', 'LastOrderAmount', 'FirstName', 'LastName', 'UserNo', 'EmailAddress', 'Franchise', 'PrevOrderDate', 'PrevOrderAmount', 'State', 'ZIP', 'Amt1', 'Amt2', 'Amt3', 'SalesPerson', 'WEBID'))
        cf2 = csv.DictWriter(f2, new_field_names)
        cf3 = csv.DictReader(f1, fieldnames=('CustNo1', 'CustNo2', 'LastOrderDate', 'LastOrderAmount', 'FirstName', 'LastName', 'UserNo', 'EmailAddress', 'Franchise', 'PrevOrderDate', 'PrevOrderAmount', 'State', 'ZIP', 'Amt1', 'Amt2', 'Amt3', 'SalesPerson', 'WEBID'))
        cf4 = csv.DictWriter(f3, update_field_names)
        cf2.writeheader()
        cf4.writeheader()

        for row in cf1:
            nr = newrow
            nr['Last Sale Date'] = row['LastOrderDate'].strip()
            nr['Last Sale Amount'] = row['LastOrderAmount'].strip()
            nr['First Name'] = row['FirstName'].strip()
            nr['Last Name'] = row['LastName'].strip()
            nr['Email Address'] = row['EmailAddress'].strip().split(',',1)[0]
            if nr['Email Address'] == '':
                continue

            fr_name = row['Franchise'].strip()
            if fr_name in franchiseList:
                        nr['Franchise'] = franchiseList[fr_name]['FRANCHISE Name'].strip()
                        if nr['Franchise'] == 'IGNORE':
                            continue
                        nr['osg_web_dir'] = franchiseList[fr_name]['FRANCHISE Name - Directory'].strip()
                        if nr['osg_web_dir'] == '':
                            nr['osg_web_dir'] = 'shop'
            else:
                        nr['Franchise'] = 'SHOP'
                        nr['osg_web_dir'] = 'shop'

            nr['State'] = row['State'].strip()
            nr['Postal/Zip Code'] = row['ZIP'].strip()
            nr['Last Web Order ID'] = row['WEBID'].strip()
            nr['Date Added'] = datetime.date.today().strftime('%m/%d/%Y')  
            nr['Email Source'] = 'FACTSauto'
            print nr
            cf2.writerow(nr)


            ur = updaterow
            ur['Last Sale Date'] = row['LastOrderDate'].strip()
            ur['Last Sale Amount'] = row['LastOrderAmount'].strip()
            ur['First Name'] = row['FirstName'].strip()
            ur['Last Name'] = row['LastName'].strip()
            ur['Email Address'] = row['EmailAddress'].strip().split(',',1)[0]
            if ur['Email Address'] == '':
                        continue

            fr_name = row['Franchise'].strip()
            if fr_name in franchiseList:
                        ur['Franchise'] = franchiseList[fr_name]['FRANCHISE Name'].strip()
                        if ur['Franchise'] == 'IGNORE':
                            continue
                        ur['osg_web_dir'] = franchiseList[fr_name]['FRANCHISE Name - Directory'].strip()
                        if ur['osg_web_dir'] == '':
                            ur['osg_web_dir'] = 'shop'
            else:
                        ur['Franchise'] = 'SHOP'
                        ur['osg_web_dir'] = 'shop'

            ur['State'] = row['State'].strip()
            ur['Postal/Zip Code'] = row['ZIP'].strip()
            ur['Last Web Order ID'] = row['WEBID'].strip()
            print ur
            cf4.writerow(ur)




    shutil.move(FILE, "/Users/HatterX/Desktop/Processed/"+ dt +"_"+ FILE)
Be a part of the DaniWeb community

We're a friendly, industry-focused community of developers, IT pros, digital marketers, and technology enthusiasts meeting, networking, learning, and sharing knowledge.