Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
53 changes: 53 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 3 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,11 @@ version = "0.1.0"
edition = "2024"

[dependencies]
async-stream = "0.3"
axum = "0.8.7"
bytes = "1.9"
chrono = { version = "0.4.42", features = ["serde"] }
futures = "0.3"
serde = "1.0.228"
serde_json = "1.0"
sqlx = { version = "0.8.6", features = ["runtime-tokio-rustls", "postgres", "macros", "uuid", "chrono"] }
Expand Down
63 changes: 44 additions & 19 deletions src/handler.rs
Original file line number Diff line number Diff line change
@@ -1,13 +1,17 @@
use crate::models::{AppState, DataEntry, ExportData, ExportRequest, Project};
use crate::models::{AppState, DataEntry, ExportRequest, Project};
use bytes::Bytes;

use axum::{
body::Body,
extract::{Path, State},
http::{
StatusCode,
header::{CONTENT_DISPOSITION, CONTENT_TYPE},
},
response::{IntoResponse, Response},
};
use futures::stream::StreamExt;
use sqlx::Row;

pub async fn export(
State(state): State<AppState>,
Expand Down Expand Up @@ -51,28 +55,49 @@ pub async fn export(
})?
.ok_or(StatusCode::NOT_FOUND)?;

let data_entries = sqlx::query_as::<_, DataEntry>(
"SELECT data, created_at FROM data_entries WHERE project_id = $1 ORDER BY created_at DESC"
)
.bind(export_request.project_id)
.fetch_all(&state.pool)
.await
.map_err(|e| {
println!("Error while fetching data entries: {:?}", e);
let project_json = serde_json::to_string(&project).map_err(|e| {
println!("Error while serializing project: {:?}", e);
StatusCode::INTERNAL_SERVER_ERROR
})?;

let export_data = ExportData {
project,
data_entries,
};
let filename = format!("project-{}-export.json", export_request.project_id);
let project_id = export_request.project_id;

let json_string = serde_json::to_string_pretty(&export_data).map_err(|e| {
println!("Error while serializing export data: {:?}", e);
StatusCode::INTERNAL_SERVER_ERROR
})?;
let stream = async_stream::stream! {
yield Ok::<_, std::io::Error>(Bytes::from(format!("{{\"project\":{},\"data_entries\":[", project_json)));

let filename = format!("project-{}-export.json", export_request.project_id);
let mut row_stream = sqlx::query("SELECT data, created_at FROM data_entries WHERE project_id = $1 ORDER BY created_at DESC")
.bind(project_id)
.fetch(&state.pool);

let mut first = true;
while let Some(row) = row_stream.next().await {
match row {
Ok(row) => {
let data_entry = DataEntry {
data: row.try_get("data").ok(),
created_at: row.get("created_at"),
};

if let Ok(entry_json) = serde_json::to_string(&data_entry) {
if !first {
yield Ok(Bytes::from(","));
}
first = false;
yield Ok(Bytes::from(entry_json));
}
}
Err(e) => {
println!("Error while streaming data entry: {:?}", e);
break;
}
}
}

yield Ok(Bytes::from("]}"));
};

let body = Body::from_stream(stream);

Ok((
StatusCode::OK,
Expand All @@ -83,7 +108,7 @@ pub async fn export(
&format!("attachment; filename=\"{}\"", filename),
),
],
json_string,
body,
)
.into_response())
}
6 changes: 0 additions & 6 deletions src/models.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,3 @@ pub struct DataEntry {
pub data: Option<serde_json::Value>,
pub created_at: NaiveDateTime,
}

#[derive(Debug, Serialize)]
pub struct ExportData {
pub project: Project,
pub data_entries: Vec<DataEntry>,
}