Aufgrund der Datenvolumen, die ich ziehen muss. https://i.sstatic.net/jkvws2c9.png
[Der Screenshot zeigt die Dokumentation für die Verwendung - Ich weiß nicht wirklich, wie man Curl in Python implementiert. haben output = MakeRequest (URL, Parames) < /p>
Wenn ich jedoch meine Daten wie diese mache, kann es keine Antwort finden. Erkenne die nächste report_number an such_after < /p>
Code: Select all
import requests
import pandas as pd
import time
import math
url = "https://api.fda.gov/device/event.json?api_key=XXXXXXXXXXXXXXXXXXX&search=date_manufacturer_received:[20250101+TO+20250509]&limit=1000"
# List to store all dataframes
dataframes = []
def make_request(url, params=None):
while True:
response = requests.get(url, params=params)
if response.status_code == 429: # Too Many Requests
print("Rate limit exceeded. Waiting before retrying...")
time.sleep(60) # Wait for 60 seconds before retrying
else:
return response
# Initial request
print("Fetching initial data...")
output = make_request(url)
data = output.json()
if 'results' in data:
df = pd.DataFrame(data['results'])
dataframes.append(df)
last_item = data['results'][-1]['report_number']
print(f"Initial data fetched. Last item: {last_item}")
else:
print("No results found in the initial request.")
last_item = None
# Access 'total' under 'meta' -> 'results'
if 'meta' in data and 'results' in data['meta'] and 'total' in data['meta']['results']:
total_items = data['meta']['results']['total']
else:
total_items = 0
print("Key 'total' not found in the response.")
items_per_page = 1000
iterations = math.ceil(total_items / items_per_page)
print(iterations)
# Continue making requests using search_after
while last_item:
print(f"Fetching data after {last_item}...")
search_after_value = last_item # Use the last_item from the previous iteration
params = {'search_after': search_after_value}
output = make_request(url, params)
data = output.json()
if 'results' in data:
df = pd.DataFrame(data['results'])
dataframes.append(df)
new_last_item = data['results'][-1]['report_number']
print(f"Data fetched. Last item: {new_last_item}")
# Check if the new last item is different from the previous one
if new_last_item == last_item:
print("No new data found. Exiting loop.")
break
last_item = new_last_item
else:
print("No more results found.")
last_item = None
# Appending all these dataframes into one final dataframe to be exported as a csv file
if dataframes:
final_df = pd.concat(dataframes, ignore_index=True)
final_df.to_csv("APICallTesting.csv", index=False)
print("API Call Completed")
else:
print("No dataframes created. Check the response data and query parameters.")