Home > Back-end >  CSV to JSON Mass converter in Python
CSV to JSON Mass converter in Python

Time:11-01

I have a folder it is called DATA, inside that folder there is multiple .logs files and it is formatted as CSV . Now I want to convert every single .logs files inside DATA folder using Python.

import csv 
import json 
import glob, os

def csv_to_json(csvFilePath, jsonFilePath):
    jsonArray = []
      
    #read csv file
    with open(csvFilePath, encoding='utf-8') as csvf: 
        #load csv file data using csv library's dictionary reader
        csvReader = csv.DictReader(csvf) 

        #convert each csv row into python dict
        for row in csvReader: 
            #add this python dict to json array
            jsonArray.append(row)
  
    #convert python jsonArray to JSON String and write to file
    with open(jsonFilePath, 'w', encoding='utf-8') as jsonf: 
        jsonString = json.dumps(jsonArray, indent=4)
        jsonf.write(jsonString)

os.chdir(r"C:\Users\Arda\Desktop\DATA")# use whatever directory you want

#double\\ no single \

for file in glob.glob("**/*.logs", recursive = True):
    csvFilePath = [] 
    csvFilePath = file
    

    jsonFilePath = r'data.json'
    csv_to_json(csvFilePath, jsonFilePath)
    

I can only convert one single file but there is multiple .logs file as CSV In that list last one has been converted to JSON "T1555.logs"

T1003.001.logs
T1003.002.logs
T1003.003.logs
T1003.004.logs
T1003.logs
T1552.002.logs
T1552.004.logs
T1555.003.logs
T1555.logs 

CodePudding user response:

I'd re-arrange where you're traversing the files so that all of the results are stored in a single jsonArray, then written to the file at the end:

import csv 
import json 
import glob, os

def csvs_to_json(csvFilePaths, jsonFilePath):
    jsonArray = []
    
    for csvFilePath in csvFilePaths:  
        #read csv file
        with open(csvFilePath, encoding='utf-8') as csvf: 
            #load csv file data using csv library's dictionary reader
            csvReader = csv.DictReader(csvf) 
    
            #convert each csv row into python dict
            for row in csvReader: 
                #add this python dict to json array
                jsonArray.append(row)
  
    #convert python jsonArray to JSON String and write to file
    with open(jsonFilePath, 'w', encoding='utf-8') as jsonf: 
        jsonString = json.dumps(jsonArray, indent=4)
        jsonf.write(jsonString)

os.chdir(r"C:\Users\Arda\Desktop\DATA")# use whatever directory you want

#double\\ no single \

files = list(glob.glob("**/*.logs", recursive = True))
    
jsonFilePath = r'data.json'
csvs_to_json(files, jsonFilePath)

Let me know if this works for you!

  • Related