Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
129 changes: 128 additions & 1 deletion crates/lib/src/api/client/import.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
use crate::api;
use crate::api::client;
use crate::error::OxenError;
use crate::model::RemoteRepository;
use crate::model::{NewCommitBody, RemoteRepository};
use crate::view::CommitResponse;

use std::path::Path;

Expand Down Expand Up @@ -56,17 +57,58 @@ pub async fn upload_zip(
Ok(response.commit)
}

/// Import a file from a URL into a repository branch
pub async fn import_url(
remote_repo: &RemoteRepository,
branch_name: impl AsRef<str>,
directory: impl AsRef<str>,
download_url: impl AsRef<str>,
commit: &NewCommitBody,
force_update: bool,
) -> Result<crate::model::Commit, OxenError> {
let branch_name = branch_name.as_ref();
let directory = directory.as_ref();

let uri = format!("/import/{branch_name}/{directory}");
let url = api::endpoint::url_from_repo(remote_repo, &uri)?;

let mut body = serde_json::json!({
"download_url": download_url.as_ref(),
"name": commit.author,
"email": commit.email,
"message": commit.message,
});
if force_update {
body["force_update"] = serde_json::Value::Bool(true);
}

let client = client::new_for_url(&url)?;
let response = client
.post(&url)
.header("Content-Type", "application/json")
.json(&body)
.send()
.await?;
let body = client::parse_json_body(&url, response).await?;
let response: CommitResponse = serde_json::from_str(&body)
.map_err(|e| OxenError::basic_str(format!("Failed to parse response: {e}\n\n{body}")))?;

Ok(response.commit)
}

#[cfg(test)]
mod tests {
use crate::test;
use bytes::Bytes;

use crate::constants::DEFAULT_BRANCH_NAME;
use crate::error::OxenError;
use crate::model::NewCommitBody;

use crate::api;

use std::io::Write;
use std::path::Path;

#[tokio::test]
async fn test_upload_zip_file() -> Result<(), OxenError> {
Expand Down Expand Up @@ -183,4 +225,89 @@ mod tests {
})
.await
}

#[tokio::test]
async fn test_import_url_with_force_update() -> Result<(), OxenError> {
test::run_remote_repo_test_bounding_box_csv_pushed(|_local_repo, remote_repo| async move {
let branch_name = DEFAULT_BRANCH_NAME;
let download_url =
"https://hub.oxen.ai/api/repos/ox/Oxen-AI-Assets/file/main/images/bloxy_white_background.png";

let commit_body = NewCommitBody {
message: "First import".to_string(),
author: "Test User".to_string(),
email: "test@oxen.ai".to_string(),
};

// First import
let first_commit = api::client::import::import_url(
&remote_repo,
branch_name,
"imported",
download_url,
&commit_body,
false,
)
.await?;
assert!(!first_commit.id.is_empty());

// Second import of same file WITHOUT force_update should fail (no changes)
let result = api::client::import::import_url(
&remote_repo,
branch_name,
"imported",
download_url,
&NewCommitBody {
message: "Second import no force".to_string(),
..commit_body.clone()
},
false,
)
.await;
assert!(
result.is_err(),
"Expected import to fail with no changes: {result:?}"
);

// Third import of same file WITH force_update should succeed
let third_commit = api::client::import::import_url(
&remote_repo,
branch_name,
"imported",
download_url,
&NewCommitBody {
message: "Force update import".to_string(),
..commit_body.clone()
},
true,
)
.await?;
assert_ne!(first_commit.id, third_commit.id);

// Verify latest_commit on the entry matches the force_update commit
let entries = api::client::dir::list(
&remote_repo,
branch_name,
Path::new("imported"),
1,
100,
)
.await?;
let file_entry = entries
.entries
.iter()
.find(|e| e.filename() == "bloxy_white_background.png")
.expect("Should find the imported file");
let latest_commit = file_entry
.latest_commit()
.expect("Entry should have latest_commit");
assert_eq!(
latest_commit.id, third_commit.id,
"latest_commit should match the force_update commit"
);

Ok(remote_repo)
})
.await
}
}
13 changes: 12 additions & 1 deletion crates/lib/src/api/client/versions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -110,6 +110,7 @@ pub async fn parallel_large_file_upload(
file_path: impl AsRef<Path>,
dst_dir: Option<impl AsRef<Path>>, // dst_dir is provided for workspace add workflow
workspace_id: Option<String>,
force_update: bool,
entry: Option<Entry>, // entry is provided for push workflow
progress: Option<&Arc<PushProgress>>, // for push workflow
) -> Result<MultipartLargeFileUpload, OxenError> {
Expand All @@ -132,7 +133,14 @@ pub async fn parallel_large_file_upload(
.await?;
let num_chunks = results.len();
log::debug!("multipart_large_file_upload num_chunks: {num_chunks:?}");
complete_multipart_large_file_upload(remote_repo, upload, num_chunks, workspace_id).await
complete_multipart_large_file_upload(
remote_repo,
upload,
num_chunks,
workspace_id,
force_update,
)
.await
}

/// Creates a new multipart large file upload
Expand Down Expand Up @@ -441,6 +449,7 @@ async fn complete_multipart_large_file_upload(
upload: MultipartLargeFileUpload,
num_chunks: usize,
workspace_id: Option<String>,
force_update: bool,
) -> Result<MultipartLargeFileUpload, OxenError> {
let file_hash = &upload.hash.to_string();

Expand All @@ -463,6 +472,7 @@ async fn complete_multipart_large_file_upload(
upload_results: None,
}],
workspace_id,
force_update,
};

let body = serde_json::to_string(&body)?;
Expand Down Expand Up @@ -826,6 +836,7 @@ mod tests {
path,
dst_dir,
workspace_id,
false,
None,
None,
)
Expand Down
Loading
Loading