I have written a macro, that is crawling through multiple excel files, which are all identical in terms of structure (columns, but row content may differ; there is a "key" though) and matching and copying the data into a master file. But with an increasing number of files the duration of macro execution is growing longer and longer, so maybe someone has a more efficient solution available?
Sub DataCrawler()
On Error GoTo HandleError
Application.ScreenUpdating = False
Dim objectFileSys As Object
Dim objectGetFolder As Object
Dim file As Object
Set objectFileSys = CreateObject("Scripting.FileSystemObject")
Set objectGetFolder = objectFileSys.GetFolder("pathName") ' location of folder with files
Dim counter As Integer
counter = 0
' macro opens one file after another and checks for every key, if data is available
For Each file In objectGetFolder.Files
Dim sourceFiles As Workbook
Set sourceFiles = Workbooks.Open(file.Path, True, True)
Dim lookUp As Range
Dim searchRange As Range
For i = 10 To 342 ' number of rows with key in master file
Set lookUp = Cells(i, 31)
Set searchRange = sourceFiles.Worksheets("tableName").Range("AE:AJ")
' if cell in master file related to the key is empty, copy data
If IsEmpty(Cells(i, 35)) Then
lookUp.Offset(0, 1).Value = Application.VLookup(lookUp, searchRange, 2, False)
lookUp.Offset(0, 2).Value = Application.VLookup(lookUp, searchRange, 3, False)
lookUp.Offset(0, 3).Value = Application.VLookup(lookUp, searchRange, 4, False)
lookUp.Offset(0, 4).Value = Application.VLookup(lookUp, searchRange, 5, False)
lookUp.Offset(0, 5).Value = Application.VLookup(lookUp, searchRange, 6, False)
' if cell in master file related to the key is already filled, skip
Else
End If
Next
sourceFiles.Close False
Set sourceFiles = Nothing
Next
HandleError:
Application.EnableEvents = True
Application.ScreenUpdating = True
End Sub
CodePudding user response:
A single Application.Match()
to find the row for the "key", then copying the content as an array would be faster, but it's difficult to say what impact that would have on the overall run time. That would depend on how many files you're opening, and what the performance of that aspect of the process is like.
Sub DataCrawler()
Dim objectFileSys As Object, objectGetFolder As Object
Dim file As Object, searchRange As Range, i As Long
Dim m, wsData As Worksheet, wbSource As Workbook
On Error GoTo HandleError
Application.ScreenUpdating = False
Application.EnableEvents = False
Set wsData = ThisWorkbook.Sheets("Lookup") 'for example
Set objectFileSys = CreateObject("Scripting.FileSystemObject")
Set objectGetFolder = objectFileSys.GetFolder("pathName")
For Each file In objectGetFolder.Files
Set wbSource = Workbooks.Open(file.Path, True, True)
Set searchRange = wbSource.Worksheets("tableName").Columns("AE")
For i = 10 To 342 ' number of rows with key in master file
If IsEmpty(wsData.Cells(i, 35)) Then
m = Application.Match(wsData.Cells(i, 31).Value, searchRange, 0)
If Not IsError(m) Then
wsData.Cells(i, 32).Resize(1, 5).Value = _
searchRange.Cells(m).Offset(0, 1).Resize(1, 5).Value
End If
End If
Next
wbSource.Close False
Next file
HandleError:
If Err.Number <> 0 Then MsgBox Err.Description
Application.EnableEvents = True
Application.ScreenUpdating = True
End Sub