Home > Software design >  crawling data from site having JSon
crawling data from site having JSon

Time:11-03

I want to crawl data from the web page https://www.balticshipping.com/vessels including ship info from its detail pages such as https://www.balticshipping.com/vessel/imo/9331713 and save this data to CSV tables. While going to the next page I see that the URL didn't change, so I don't know how to get data from all pages at the same time. Is there any specific way to get all this data in one CSV file? Next button inspection view

import requests
        from bs4 import BeautifulSoup
        
        baseurl ='https://www.balticshipping.com/'
        headers= {'User-Agent': 'Mozilla/5.0'}
        
        productlinks = []
        response = requests.get('https://www.balticshipping.com/vessels') 
        soup = BeautifulSoup(response.content, 'html.parser')
        productlist =soup.find_all('div', id="search_results")
         #loop to get all href from ul
        for item in productlist:
            for link in item.find_all('a', href = True):
                productlinks.append(baseurl   link['href'])
        print(productlinks)

I tried this code to get all links from the current page, but it give me an empty result maybe there is some error in line 'productlist =soup.find_all('div', id="search_results")', because it didnot takes any data by using id instead of class.

CodePudding user response:

You can access that data through the api. But keep in mind, you'll be iterating through about 7700 pages of data.

import requests
from bs4 import BeautifulSoup
import pandas as pd

url ='https://www.balticshipping.com/'

ships_found = True
page = 0
rows = []
#while ships_found:
for page in range(10):
    payload = {
    'request[0][module]': 'ships',
    'request[0][action]': 'list',
    'request[0][id]': '0',
    'request[0][data][0][name]': 'search_id',
    'request[0][data][0][value]': '0',
    'request[0][data][1][name]': 'name',
    'request[0][data][1][value]': '',
    'request[0][data][2][name]': 'imo',
    'request[0][data][2][value]': '',
    'request[0][data][3][name]': 'page',
    'request[0][data][3][value]': f'{page}',
    'request[0][sort]': '',
    'request[0][limit]': '27',
    'request[0][stamp]': '0',
    'request[1][module]': 'top_stat',
    'request[1][action]': 'list',
    'request[1][id]': '0',
    'request[1][data]': '',
    'request[1][sort]': '',
    'request[1][limit]': '',
    'request[1][stamp]': '0'}
    
    jsonData = requests.post(url, data=payload).json()
    
    if len(jsonData['data']['request'][0]['ships']) == 0:
        ships_found = False
        print('End of Pages.')
        
    else:
       for each in jsonData['data']['request'][0]['ships']:
           row = each['data']
           rows.append(row)
        
       page  = 1
       print(page)
       
df = pd.DataFrame(rows)

CodePudding user response:

@chitown88 solution works but from what i've seen API provides incomplete data, so my solution is to query ship details by IMO number:

import requests
from bs4 import BeautifulSoup

baseurl ='https://www.balticshipping.com'

# Sample iteration from first page to page 10
for pagenum in range(10):
    payload = {
        'request[0][module]': 'ships',
        'request[0][action]': 'list',
        'request[0][id]': 0,
        'request[0][data][0][name]': 'search_id',
        'request[0][data][0][value]': 0,
        'request[0][data][1][name]': 'name',
        'request[0][data][1][value]': '',
        'request[0][data][2][name]': 'imo',
        'request[0][data][2][value]': '',
        'request[0][data][3][name]': 'page',
        'request[0][data][3][value]': pagenum,
        'request[0][sort]': '',
        'request[0][limit]': 9,
        'request[0][stamp]': 0,
        'request[1][module]': 'top_stat',
        'request[1][action]': 'list',
        'request[1][id]': 0,
        'request[1][data]': '',
        'request[1][sort]': '',
        'request[1][limit]': '',
        'request[1][stamp]': 0
    }

    response = requests.post(baseurl, data=payload)
    try:
        jsondata = response.json()
    except requests.exceptions.JSONDecodeError:
        print("Error occured while decoding JSON response.")

    ships = jsondata['data']['request'][0]['ships']
    for ship in ships:
        ship_imo = ship['data']['imo']
        print(ship_imo)
        # Now query ship details by its IMO number
        # just replace number in this example url: https://www.balticshipping.com/vessel/imo/9331713
        # resp = requests.get('{baseurl}/vessel/imo/{ship_imo}'.format(baseurl=baseurl, ship_imo=ship_imo))
  • Related