I have the following python code to convert csv file into json file.
def make_json_from_csv(csv_file_path, json_file_path, unique_column_name):
import csv
import json
# create a dictionary
data = {}
# Open a csv reader called DictReader
with open(csv_file_path, encoding='utf-8') as csvf:
csv_reader = csv.DictReader(csvf)
primary_key_column_name = unique_column_name.lstrip() # remove leading space in string
# Convert each row into a dictionary
# and add it to data
for rows in csv_reader:
key = rows[primary_key_column_name]
data[key] = rows
# Open a json writer, and use the json.dumps()
# function to dump data
with open(json_file_path, 'w', encoding='utf-8') as jsonf:
jsonf.write(json.dumps(data, indent=4))
return None
The code above will convert ALL the rows in the CSV file into json file. I want to convert only the last X number of rows into json.
I am using python v3.
CodePudding user response:
In Python 3.6 the dict keep the insertion order, so to fetch the last rows of a dictionary, just do:
from itertools import islice
x = 5
d = {}
for i, v in enumerate("abcdedfghi"):
d[i] = v
d = dict(islice(d.items(), len(d) - x, len(d)))
print(d)
Output
{5: 'd', 6: 'f', 7: 'g', 8: 'h', 9: 'i'}
Basically add (change) these lines into your code:
from itertools import islice
x = 5
data = dict(islice(data.items(), len(data) - x, len(data)))
# Open a json writer, and use the json.dumps()
# function to dump data
with open(json_file_path, 'w', encoding='utf-8') as jsonf:
jsonf.write(json.dumps(data, indent=4))
CodePudding user response:
I would like to answer my own question by building on Dani Mesejo's answer. The credit goes entirely to him.
def make_json(csv_file_path, json_file_path,
unique_column_name, no_of_rows_to_extract):
import csv
import json
from itertools import islice
# create a dictionary
data = {}
# Open a csv reader called DictReader
with open(csv_file_path, encoding='utf-8') as csvf:
csv_reader = csv.DictReader(csvf)
primary_key_column_name = unique_column_name.lstrip() # remove leading space in string
# Convert each row into a dictionary
# and add it to data
for rows in csv_reader:
key = rows[primary_key_column_name]
data[key] = rows
data = dict(islice(data.items(), len(data) - no_of_rows_to_extract, len(data)))
# Open a json writer, and use the json.dumps()
# function to dump data
with open(json_file_path, 'w', encoding='utf-8') as jsonf:
jsonf.write(json.dumps(data, indent=4))
return None