import requests
from bs4 import BeautifulSoup
import csv
import json
url = 'https://example.com/data' # Replace with actual URL
response = requests.get(url)
if response.status_code == 200:
content_type = response.headers.get('Content-Type', '')
if 'text/html' in content_type:
soup = BeautifulSoup(response.text, 'html.parser')
print("HTML content:")
print(soup.prettify())
elif 'application/json' in content_type:
data = response.json()
print("JSON data:")
print(data)
elif 'application/xml' in content_type or 'text/xml' in content_type:
soup = BeautifulSoup(response.content, 'xml')
print("XML content:")
print(soup.prettify())
elif 'text/plain' in content_type:
text_content = response.text
print("Plain text content:")
print(text_content)
elif 'text/csv' in content_type:
csv_content = response.text
print("CSV content:")
reader = csv.reader(csv_content.splitlines())
for row in reader:
print(row)
else:
print("Received unsupported content type:", content_type)
else:
print(f"Failed to retrieve data. Status code: {response.status_code}")
No hay comentarios:
Publicar un comentario