import requests
# API configuration
AUTH_ENDPOINT = "https://engage-auth.longenesis.com/auth/realms/longenesis/protocol/openid-connect/token"
API_BASE_URL = "https://engage-api.longenesis.com"
CLIENT_ID = "your-client-id"
CLIENT_SECRET = "your-client-secret"
ORG_SLUG = "your-organization"
# Get access token
auth_data = {
"grant_type": "client_credentials",
"client_id": CLIENT_ID,
"client_secret": CLIENT_SECRET
}
auth_response = requests.post(AUTH_ENDPOINT, data=auth_data)
access_token = auth_response.json()["access_token"]
headers = {"Authorization": f"Bearer {access_token}"}
# Search for participants first
search_url = f"{API_BASE_URL}/search_org_participants/{ORG_SLUG}"
search_params = {"search_str": "Smith", "limit": 50}
search_response = requests.get(search_url, headers=headers, params=search_params)
search_data = search_response.json()
# Get participant IDs from search results
participant_ids = [person['person_id'] for person in search_data['persons']]
# Export data for these participants
export_url = f"{API_BASE_URL}/v2/xlsx_answers/{ORG_SLUG}"
export_params = {
"person_filter": participant_ids,
"limit": 5000
}
export_response = requests.get(export_url, headers=headers, params=export_params)
# Save the Excel file
if export_response.status_code == 200:
with open("participant_responses.xlsx", "wb") as f:
f.write(export_response.content)
print("Export saved as participant_responses.xlsx")
else:
print(f"Export failed: {export_response.status_code}") # Export data from specific date range
export_params = {
"person_filter": participant_ids,
"date_filter": ["2024-01-01T00:00:00", "2024-12-31T23:59:59"],
"limit": 5000
}
export_response = requests.get(export_url, headers=headers, params=export_params)
if export_response.status_code == 200:
with open("participant_responses_2024.xlsx", "wb") as f:
f.write(export_response.content)
print("Export with date filter saved") # Export data for specific participants
export_params = {
"person_filter": ["PARTICIPANT_001", "PARTICIPANT_002", "PARTICIPANT_003"],
"limit": 5000
}
export_response = requests.get(export_url, headers=headers, params=export_params)
if export_response.status_code == 200:
with open("specific_participants.xlsx", "wb") as f:
f.write(export_response.content)
print("Export for specific participants saved") # Export in chunks of 5000
offset = 0
chunk_size = 5000
chunk_number = 1
while True:
params = {
"person_filter": participant_ids,
"limit": chunk_size,
"offset": offset
}
response = requests.get(export_url, headers=headers, params=params)
if response.status_code == 200 and len(response.content) > 0:
filename = f"participant_responses_chunk_{chunk_number}.xlsx"
with open(filename, "wb") as f:
f.write(response.content)
print(f"Saved {filename}")
offset += chunk_size
chunk_number += 1
else:
break # Get submission data in JSON format
json_url = f"{API_BASE_URL}/v2/json_answers/{ORG_SLUG}"
json_params = {"person_filter": participant_ids}
json_response = requests.get(json_url, headers=headers, params=json_params)
submission_data = json_response.json()
# Extract submission IDs
for record in submission_data['records']:
submission_id = record['submission_id']
# Download PDF for this submission
pdf_url = f"{API_BASE_URL}/submission_pdf/{submission_id}"
pdf_response = requests.get(pdf_url, headers=headers)
if pdf_response.status_code == 200:
filename = f"submission_{submission_id}.pdf"
with open(filename, "wb") as f:
f.write(pdf_response.content)
print(f"Saved {filename}") pdf_params = {
"include_sections": ["answers", "calculated_answers", "response_alerts"]
}
pdf_response = requests.get(pdf_url, headers=headers, params=pdf_params) pip install requests python export_script.py