Combining a large number of CSV files (30,000) in Python Pandas

I use the following function to concatenate a large number of CSV files:

def concatenate():
    files = sort() # input is an array of filenames
    merged = pd.DataFrame()
    for file in files:
        print "concatinating" + file
        if file.endswith('FulltimeSimpleOpt.csv'): # only consider those filenames
            filenamearray = file.split("_")
            f = pd.read_csv(file, index_col=0)
            f.loc[:,'Vehicle'] = filenamearray[0].replace("veh", "")
            f.loc[:,'Year'] = filenamearray[1].replace("year", "")
            if "timelimit" in file:
                f.loc[:,'Timelimit'] = "1"
            else:
                f.loc[:,'Timelimit'] = "0"
            merged = pd.concat([merged, f], axis=0)
    merged.to_csv('merged.csv')

The problem with this function is that it does not process a large number of files (30,000). I tried using a sample of 100 files that end up properly. However, for 30,000 files, the script slows down and crashes at some point.

How can I handle a large number of files better in Python Pandas?

+4
source share
1 answer

first create a dfs list and then concatenate:

def concatenate():
    files = sort() # input is an array of filenames
    df_list =[]
    #merged = pd.DataFrame()
    for file in files:
        print "concatinating" + file
        if file.endswith('FulltimeSimpleOpt.csv'): # only consider those filenames
            filenamearray = file.split("_")
            f = pd.read_csv(file, index_col=0)
            f.loc[:,'Vehicle'] = filenamearray[0].replace("veh", "")
            f.loc[:,'Year'] = filenamearray[1].replace("year", "")
            if "timelimit" in file:
                f.loc[:,'Timelimit'] = "1"
            else:
                f.loc[:,'Timelimit'] = "0"
            df_list.append(f)
    merged = pd.concat(df_list, axis=0)
    merged.to_csv('merged.csv')

, , df , dfs,

+6

Source: https://habr.com/ru/post/1615040/


All Articles