Dec-14-2021, 07:34 PM
You may also use Pandas and Numpy to deal with huge files; just an example here bellow
import pandas as pd import numpy as np import os Path=str(os.getcwd()) CsvFile1='data1.csv' CsvFile2='data2.csv' # if you've a header (if not put it to False) Header=True # First csv file reading File1=pd.read_csv(Path + '/' + CsvFile1, header=None, delimiter=';') if Header : File1=File1.drop([0], axis=0) Array1=File1.to_numpy(dtype=float) # second csv file reading File2=pd.read_csv(Path + '/' + CsvFile2, header=None, delimiter=';') if Header : File2=File2.drop([0], axis=0) Array2=File2.to_numpy(dtype=float) # concatenation: you must have the same number of rows, but may have different number of columns Concat=np.hstack((Array1, Array2)) # write a "like" csv file; remove fmt for the highest accuracy %.18e # np.savetxt(Path + '/concat.csv', Concat, delimiter=';', fmt='%f') np.savetxt(Path + '/concat.csv', Concat, delimiter=';')