From df51a917163152ad04279312accb7bca9eea25b2 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sat, 13 Sep 2025 18:29:35 +0000 Subject: [PATCH 01/11] Initial plan From 43a44f3a674a5529587cbe4960e7a973ad77e019 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sat, 13 Sep 2025 18:35:11 +0000 Subject: [PATCH 02/11] Implement postgresExportLargeObject function Co-authored-by: krlmlr <1741643+krlmlr@users.noreply.github.com> --- NAMESPACE | 1 + R/PqConnection.R | 37 +++++++++ man/postgresExportLargeObject.Rd | 35 ++++++++ src/DbConnection.cpp | 6 ++ src/DbConnection.h | 1 + src/connection.cpp | 5 ++ src/cpp11.cpp | 8 ++ tests/testthat/test-ImportLargeObject.R | 103 ++++++++++++++++++++++++ 8 files changed, 196 insertions(+) create mode 100644 man/postgresExportLargeObject.Rd diff --git a/NAMESPACE b/NAMESPACE index 19ef3ccf..6c1f061f 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -5,6 +5,7 @@ export(Id) export(Postgres) export(Redshift) export(postgresDefault) +export(postgresExportLargeObject) export(postgresHasDefault) export(postgresImportLargeObject) export(postgresIsTransacting) diff --git a/R/PqConnection.R b/R/PqConnection.R index 35c0e5a9..e0ed9b3f 100644 --- a/R/PqConnection.R +++ b/R/PqConnection.R @@ -168,3 +168,40 @@ postgresImportLargeObject <- function(conn, filepath = NULL, oid = 0) { connection_import_lo_from_file(conn@ptr, filepath, oid) } + +#' Exports a large object to file +#' +#' Exports a large object from the database to a file on disk +#' +#' @export +#' @param conn a [PqConnection-class] object, produced by +#' [DBI::dbConnect()] +#' @param oid the object identifier (Oid) of the large object to export +#' @param filepath a path where the large object should be exported +#' @return 1 on success (invisibly), or stops with an error +#' @examples +#' \dontrun{ +#' con <- postgresDefault() +#' filepath <- 'your_image.png' +#' dbWithTransaction(con, { +#' oid <- postgresImportLargeObject(con, filepath) +#' }) +#' # Later, export the large object back to a file +#' dbWithTransaction(con, { +#' postgresExportLargeObject(con, oid, 'exported_image.png') +#' }) +#' } +postgresExportLargeObject <- function(conn, oid, filepath = NULL) { + + if (!postgresIsTransacting(conn)) { + stopc("Cannot export a large object outside of a transaction") + } + + if (is.null(oid)) stopc("'oid' cannot be NULL") + if (is.na(oid)) stopc("'oid' cannot be NA") + if (oid < 0) stopc("'oid' cannot be negative") + if (is.null(filepath)) stopc("'filepath' cannot be NULL") + + result <- connection_export_lo_to_file(conn@ptr, oid, filepath) + invisible(result) +} diff --git a/man/postgresExportLargeObject.Rd b/man/postgresExportLargeObject.Rd new file mode 100644 index 00000000..8aa8664c --- /dev/null +++ b/man/postgresExportLargeObject.Rd @@ -0,0 +1,35 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/PqConnection.R +\name{postgresExportLargeObject} +\alias{postgresExportLargeObject} +\title{Exports a large object to file} +\usage{ +postgresExportLargeObject(conn, oid, filepath = NULL) +} +\arguments{ +\item{conn}{a \linkS4class{PqConnection} object, produced by +\code{\link[DBI:dbConnect]{DBI::dbConnect()}}} + +\item{oid}{the object identifier (Oid) of the large object to export} + +\item{filepath}{a path where the large object should be exported} +} +\value{ +1 on success (invisibly), or stops with an error +} +\description{ +Exports a large object from the database to a file on disk +} +\examples{ +\dontrun{ +con <- postgresDefault() +filepath <- 'your_image.png' +dbWithTransaction(con, { + oid <- postgresImportLargeObject(con, filepath) +}) +# Later, export the large object back to a file +dbWithTransaction(con, { + postgresExportLargeObject(con, oid, 'exported_image.png') +}) +} +} \ No newline at end of file diff --git a/src/DbConnection.cpp b/src/DbConnection.cpp index 6d43cb43..fe3fa30a 100644 --- a/src/DbConnection.cpp +++ b/src/DbConnection.cpp @@ -134,6 +134,12 @@ Oid DbConnection::import_lo_from_file(std::string filename, Oid p_oid) { return(lo_oid); } +int DbConnection::export_lo_to_file(Oid p_oid, std::string filename) { + int result = lo_export(pConn_, p_oid, filename.c_str()); + if (result != 1) cpp11::stop(PQerrorMessage(pConn_)); + return(result); +} + void DbConnection::copy_data(std::string sql, cpp11::list df) { LOG_DEBUG << sql; diff --git a/src/DbConnection.h b/src/DbConnection.h index 8e28834f..265b1efb 100644 --- a/src/DbConnection.h +++ b/src/DbConnection.h @@ -37,6 +37,7 @@ class DbConnection : boost::noncopyable { void copy_data(std::string sql, cpp11::list df); Oid import_lo_from_file(std::string file_path, Oid p_oid); + int export_lo_to_file(Oid p_oid, std::string file_path); void check_connection(); diff --git a/src/connection.cpp b/src/connection.cpp index b2e38ece..201c3676 100644 --- a/src/connection.cpp +++ b/src/connection.cpp @@ -96,6 +96,11 @@ Oid connection_import_lo_from_file(DbConnection* con, std::string filename, Oid return con->import_lo_from_file(filename, oid); } +[[cpp11::register]] +int connection_export_lo_to_file(DbConnection* con, Oid oid, std::string filename) { + return con->export_lo_to_file(oid, filename); +} + [[cpp11::register]] void connection_copy_data(DbConnection* con, std::string sql, cpp11::list df) { return con->copy_data(sql, df); diff --git a/src/cpp11.cpp b/src/cpp11.cpp index 080b6075..facb9808 100644 --- a/src/cpp11.cpp +++ b/src/cpp11.cpp @@ -78,6 +78,13 @@ extern "C" SEXP _RPostgres_connection_import_lo_from_file(SEXP con, SEXP filenam END_CPP11 } // connection.cpp +int connection_export_lo_to_file(DbConnection* con, Oid oid, std::string filename); +extern "C" SEXP _RPostgres_connection_export_lo_to_file(SEXP con, SEXP oid, SEXP filename) { + BEGIN_CPP11 + return cpp11::as_sexp(connection_export_lo_to_file(cpp11::as_cpp>(con), cpp11::as_cpp>(oid), cpp11::as_cpp>(filename))); + END_CPP11 +} +// connection.cpp void connection_copy_data(DbConnection* con, std::string sql, cpp11::list df); extern "C" SEXP _RPostgres_connection_copy_data(SEXP con, SEXP sql, SEXP df) { BEGIN_CPP11 @@ -207,6 +214,7 @@ static const R_CallMethodDef CallEntries[] = { {"_RPostgres_client_version", (DL_FUNC) &_RPostgres_client_version, 0}, {"_RPostgres_connection_copy_data", (DL_FUNC) &_RPostgres_connection_copy_data, 3}, {"_RPostgres_connection_create", (DL_FUNC) &_RPostgres_connection_create, 3}, + {"_RPostgres_connection_export_lo_to_file", (DL_FUNC) &_RPostgres_connection_export_lo_to_file, 3}, {"_RPostgres_connection_get_temp_schema", (DL_FUNC) &_RPostgres_connection_get_temp_schema, 1}, {"_RPostgres_connection_import_lo_from_file", (DL_FUNC) &_RPostgres_connection_import_lo_from_file, 3}, {"_RPostgres_connection_info", (DL_FUNC) &_RPostgres_connection_info, 1}, diff --git a/tests/testthat/test-ImportLargeObject.R b/tests/testthat/test-ImportLargeObject.R index 51ec7fae..a5cc359a 100644 --- a/tests/testthat/test-ImportLargeObject.R +++ b/tests/testthat/test-ImportLargeObject.R @@ -13,6 +13,37 @@ test_that("can import and read a large object", { }) +test_that("can import and export a large object", { + con <- postgresDefault() + on.exit(dbDisconnect(con)) + test_file_path <- paste0(test_path(), '/data/large_object.txt') + + # Import the large object + oid <- dbWithTransaction(con, { + postgresImportLargeObject(con, test_file_path) + }) + expect_gt(oid, 0) + + # Export to a temporary file + temp_file <- tempfile(fileext = ".txt") + on.exit(unlink(temp_file), add = TRUE) + + dbWithTransaction(con, { + result <- postgresExportLargeObject(con, oid, temp_file) + expect_true(is.numeric(result)) + }) + + # Verify the exported file exists and has correct content + expect_true(file.exists(temp_file)) + exported_content <- readBin(temp_file, "raw", file.size(temp_file)) + original_content <- readBin(test_file_path, "raw", file.size(test_file_path)) + expect_equal(exported_content, original_content) + + # Clean up large object + dbExecute(con, "SELECT lo_unlink($1)", params = list(oid)) +}) + + test_that("importing to an existing oid throws error", { con <- postgresDefault() on.exit(dbDisconnect(con)) @@ -41,3 +72,75 @@ test_that("import from a non-existing path throws error", { }) ) }) + + +test_that("export outside transaction throws error", { + con <- postgresDefault() + on.exit(dbDisconnect(con)) + expect_error( + postgresExportLargeObject(con, 12345, tempfile()), + "Cannot export a large object outside of a transaction" + ) +}) + + +test_that("export with NULL oid throws error", { + con <- postgresDefault() + on.exit(dbDisconnect(con)) + expect_error( + dbWithTransaction(con, { + postgresExportLargeObject(con, NULL, tempfile()) + }), + "'oid' cannot be NULL" + ) +}) + + +test_that("export with NA oid throws error", { + con <- postgresDefault() + on.exit(dbDisconnect(con)) + expect_error( + dbWithTransaction(con, { + postgresExportLargeObject(con, NA, tempfile()) + }), + "'oid' cannot be NA" + ) +}) + + +test_that("export with negative oid throws error", { + con <- postgresDefault() + on.exit(dbDisconnect(con)) + expect_error( + dbWithTransaction(con, { + postgresExportLargeObject(con, -1, tempfile()) + }), + "'oid' cannot be negative" + ) +}) + + +test_that("export with NULL filepath throws error", { + con <- postgresDefault() + on.exit(dbDisconnect(con)) + expect_error( + dbWithTransaction(con, { + postgresExportLargeObject(con, 12345, NULL) + }), + "'filepath' cannot be NULL" + ) +}) + + +test_that("export of non-existent oid throws error", { + con <- postgresDefault() + on.exit(dbDisconnect(con)) + temp_file <- tempfile() + on.exit(unlink(temp_file), add = TRUE) + + expect_error( + dbWithTransaction(con, { + postgresExportLargeObject(con, 999999, temp_file) + }) + ) +}) From f359679fd6492b9601f6d1352ddf631f9889b269 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sat, 13 Sep 2025 18:38:30 +0000 Subject: [PATCH 03/11] Enhance documentation and validation for postgresExportLargeObject Co-authored-by: krlmlr <1741643+krlmlr@users.noreply.github.com> --- R/PqConnection.R | 5 ++++- man/postgresExportLargeObject.Rd | 5 ++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/R/PqConnection.R b/R/PqConnection.R index e0ed9b3f..adcbd053 100644 --- a/R/PqConnection.R +++ b/R/PqConnection.R @@ -171,7 +171,10 @@ postgresImportLargeObject <- function(conn, filepath = NULL, oid = 0) { #' Exports a large object to file #' -#' Exports a large object from the database to a file on disk +#' Exports a large object from the database to a file on disk. This function +#' uses PostgreSQL's `lo_export` function which efficiently streams the data +#' directly to disk without loading it into memory, making it suitable for +#' very large objects (GB+) that would cause memory issues with `lo_get()`. #' #' @export #' @param conn a [PqConnection-class] object, produced by diff --git a/man/postgresExportLargeObject.Rd b/man/postgresExportLargeObject.Rd index 8aa8664c..60cac084 100644 --- a/man/postgresExportLargeObject.Rd +++ b/man/postgresExportLargeObject.Rd @@ -18,7 +18,10 @@ postgresExportLargeObject(conn, oid, filepath = NULL) 1 on success (invisibly), or stops with an error } \description{ -Exports a large object from the database to a file on disk +Exports a large object from the database to a file on disk. This function +uses PostgreSQL's \code{lo_export} function which efficiently streams the data +directly to disk without loading it into memory, making it suitable for +very large objects (GB+) that would cause memory issues with \code{lo_get()}. } \examples{ \dontrun{ From e055d8ce4e7a6cf0ed16c92cb0ae0e0c9a704c95 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sat, 13 Sep 2025 18:56:08 +0000 Subject: [PATCH 04/11] Address review feedback: remove filepath default, change return type to void, update docs and tests Co-authored-by: krlmlr <1741643+krlmlr@users.noreply.github.com> --- R/PqConnection.R | 15 ++++++++------- man/postgresExportLargeObject.Rd | 7 ++++--- man/postgresImportLargeObject.Rd | 3 ++- src/DbConnection.cpp | 3 +-- src/DbConnection.h | 2 +- tests/testthat/test-ImportLargeObject.R | 20 ++++---------------- 6 files changed, 20 insertions(+), 30 deletions(-) diff --git a/R/PqConnection.R b/R/PqConnection.R index adcbd053..e0881df4 100644 --- a/R/PqConnection.R +++ b/R/PqConnection.R @@ -139,7 +139,8 @@ postgresIsTransacting <- function(conn) { #' Imports a large object from file #' -#' Returns an object idenfier (Oid) for the imported large object +#' Returns an object identifier (Oid) for the imported large object. +#' This function must be called within a transaction. #' #' @export #' @param conn a [PqConnection-class] object, produced by @@ -172,16 +173,17 @@ postgresImportLargeObject <- function(conn, filepath = NULL, oid = 0) { #' Exports a large object to file #' #' Exports a large object from the database to a file on disk. This function -#' uses PostgreSQL's `lo_export` function which efficiently streams the data +#' uses PostgreSQL's `lo_export()` function which efficiently streams the data #' directly to disk without loading it into memory, making it suitable for #' very large objects (GB+) that would cause memory issues with `lo_get()`. +#' This function must be called within a transaction. #' #' @export #' @param conn a [PqConnection-class] object, produced by #' [DBI::dbConnect()] #' @param oid the object identifier (Oid) of the large object to export #' @param filepath a path where the large object should be exported -#' @return 1 on success (invisibly), or stops with an error +#' @return invisible NULL on success, or stops with an error #' @examples #' \dontrun{ #' con <- postgresDefault() @@ -194,7 +196,7 @@ postgresImportLargeObject <- function(conn, filepath = NULL, oid = 0) { #' postgresExportLargeObject(con, oid, 'exported_image.png') #' }) #' } -postgresExportLargeObject <- function(conn, oid, filepath = NULL) { +postgresExportLargeObject <- function(conn, oid, filepath) { if (!postgresIsTransacting(conn)) { stopc("Cannot export a large object outside of a transaction") @@ -203,8 +205,7 @@ postgresExportLargeObject <- function(conn, oid, filepath = NULL) { if (is.null(oid)) stopc("'oid' cannot be NULL") if (is.na(oid)) stopc("'oid' cannot be NA") if (oid < 0) stopc("'oid' cannot be negative") - if (is.null(filepath)) stopc("'filepath' cannot be NULL") - result <- connection_export_lo_to_file(conn@ptr, oid, filepath) - invisible(result) + connection_export_lo_to_file(conn@ptr, oid, filepath) + invisible() } diff --git a/man/postgresExportLargeObject.Rd b/man/postgresExportLargeObject.Rd index 60cac084..fed73373 100644 --- a/man/postgresExportLargeObject.Rd +++ b/man/postgresExportLargeObject.Rd @@ -4,7 +4,7 @@ \alias{postgresExportLargeObject} \title{Exports a large object to file} \usage{ -postgresExportLargeObject(conn, oid, filepath = NULL) +postgresExportLargeObject(conn, oid, filepath) } \arguments{ \item{conn}{a \linkS4class{PqConnection} object, produced by @@ -15,13 +15,14 @@ postgresExportLargeObject(conn, oid, filepath = NULL) \item{filepath}{a path where the large object should be exported} } \value{ -1 on success (invisibly), or stops with an error +invisible NULL on success, or stops with an error } \description{ Exports a large object from the database to a file on disk. This function -uses PostgreSQL's \code{lo_export} function which efficiently streams the data +uses PostgreSQL's \code{lo_export()} function which efficiently streams the data directly to disk without loading it into memory, making it suitable for very large objects (GB+) that would cause memory issues with \code{lo_get()}. +This function must be called within a transaction. } \examples{ \dontrun{ diff --git a/man/postgresImportLargeObject.Rd b/man/postgresImportLargeObject.Rd index 703dee00..eebafc8d 100644 --- a/man/postgresImportLargeObject.Rd +++ b/man/postgresImportLargeObject.Rd @@ -18,7 +18,8 @@ postgresImportLargeObject(conn, filepath = NULL, oid = 0) the identifier of the large object, an integer } \description{ -Returns an object idenfier (Oid) for the imported large object +Returns an object identifier (Oid) for the imported large object. +This function must be called within a transaction. } \examples{ \dontrun{ diff --git a/src/DbConnection.cpp b/src/DbConnection.cpp index fe3fa30a..f76d4cdc 100644 --- a/src/DbConnection.cpp +++ b/src/DbConnection.cpp @@ -134,10 +134,9 @@ Oid DbConnection::import_lo_from_file(std::string filename, Oid p_oid) { return(lo_oid); } -int DbConnection::export_lo_to_file(Oid p_oid, std::string filename) { +void DbConnection::export_lo_to_file(Oid p_oid, std::string filename) { int result = lo_export(pConn_, p_oid, filename.c_str()); if (result != 1) cpp11::stop(PQerrorMessage(pConn_)); - return(result); } void DbConnection::copy_data(std::string sql, cpp11::list df) { diff --git a/src/DbConnection.h b/src/DbConnection.h index 265b1efb..e205a51d 100644 --- a/src/DbConnection.h +++ b/src/DbConnection.h @@ -37,7 +37,7 @@ class DbConnection : boost::noncopyable { void copy_data(std::string sql, cpp11::list df); Oid import_lo_from_file(std::string file_path, Oid p_oid); - int export_lo_to_file(Oid p_oid, std::string file_path); + void export_lo_to_file(Oid p_oid, std::string file_path); void check_connection(); diff --git a/tests/testthat/test-ImportLargeObject.R b/tests/testthat/test-ImportLargeObject.R index a5cc359a..04f53136 100644 --- a/tests/testthat/test-ImportLargeObject.R +++ b/tests/testthat/test-ImportLargeObject.R @@ -2,7 +2,7 @@ test_that("can import and read a large object", { con <- postgresDefault() on.exit(dbDisconnect(con)) - test_file_path <- paste0(test_path(), '/data/large_object.txt') + test_file_path <- file.path(test_path(), 'data', 'large_object.txt') dbWithTransaction(con, { oid <- postgresImportLargeObject(con, test_file_path) }) @@ -16,7 +16,7 @@ test_that("can import and read a large object", { test_that("can import and export a large object", { con <- postgresDefault() on.exit(dbDisconnect(con)) - test_file_path <- paste0(test_path(), '/data/large_object.txt') + test_file_path <- file.path(test_path(), 'data', 'large_object.txt') # Import the large object oid <- dbWithTransaction(con, { @@ -47,7 +47,7 @@ test_that("can import and export a large object", { test_that("importing to an existing oid throws error", { con <- postgresDefault() on.exit(dbDisconnect(con)) - test_file_path <- paste0(test_path(), '/data/large_object.txt') + test_file_path <- file.path(test_path(), 'data', 'large_object.txt') oid <- 1234 dbWithTransaction(con, { oid <- postgresImportLargeObject(con, test_file_path, oid) @@ -65,7 +65,7 @@ test_that("importing to an existing oid throws error", { test_that("import from a non-existing path throws error", { con <- postgresDefault() on.exit(dbDisconnect(con)) - test_file_path <- paste0(test_path(), '/data/large_object_that_does_not_exist.txt') + test_file_path <- file.path(test_path(), 'data', 'large_object_that_does_not_exist.txt') expect_error( dbWithTransaction(con, { oid <- postgresImportLargeObject(con, test_file_path) @@ -120,18 +120,6 @@ test_that("export with negative oid throws error", { }) -test_that("export with NULL filepath throws error", { - con <- postgresDefault() - on.exit(dbDisconnect(con)) - expect_error( - dbWithTransaction(con, { - postgresExportLargeObject(con, 12345, NULL) - }), - "'filepath' cannot be NULL" - ) -}) - - test_that("export of non-existent oid throws error", { con <- postgresDefault() on.exit(dbDisconnect(con)) From b0a676d3650d50df0ba59cda066acbb2642218a8 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sat, 13 Sep 2025 19:21:19 +0000 Subject: [PATCH 05/11] Fix compilation error: change connection_export_lo_to_file return type to void Co-authored-by: krlmlr <1741643+krlmlr@users.noreply.github.com> --- src/connection.cpp | 4 ++-- src/cpp11.cpp | 5 +++-- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/src/connection.cpp b/src/connection.cpp index 201c3676..57dd864d 100644 --- a/src/connection.cpp +++ b/src/connection.cpp @@ -97,8 +97,8 @@ Oid connection_import_lo_from_file(DbConnection* con, std::string filename, Oid } [[cpp11::register]] -int connection_export_lo_to_file(DbConnection* con, Oid oid, std::string filename) { - return con->export_lo_to_file(oid, filename); +void connection_export_lo_to_file(DbConnection* con, Oid oid, std::string filename) { + con->export_lo_to_file(oid, filename); } [[cpp11::register]] diff --git a/src/cpp11.cpp b/src/cpp11.cpp index facb9808..fbae72d3 100644 --- a/src/cpp11.cpp +++ b/src/cpp11.cpp @@ -78,10 +78,11 @@ extern "C" SEXP _RPostgres_connection_import_lo_from_file(SEXP con, SEXP filenam END_CPP11 } // connection.cpp -int connection_export_lo_to_file(DbConnection* con, Oid oid, std::string filename); +void connection_export_lo_to_file(DbConnection* con, Oid oid, std::string filename); extern "C" SEXP _RPostgres_connection_export_lo_to_file(SEXP con, SEXP oid, SEXP filename) { BEGIN_CPP11 - return cpp11::as_sexp(connection_export_lo_to_file(cpp11::as_cpp>(con), cpp11::as_cpp>(oid), cpp11::as_cpp>(filename))); + connection_export_lo_to_file(cpp11::as_cpp>(con), cpp11::as_cpp>(oid), cpp11::as_cpp>(filename)); + return R_NilValue; END_CPP11 } // connection.cpp From 8db8debd856e2782214a6097416b3ce6605f8d19 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kirill=20M=C3=BCller?= Date: Sat, 13 Sep 2025 21:28:36 +0200 Subject: [PATCH 06/11] Register --- R/cpp11.R | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/R/cpp11.R b/R/cpp11.R index 15734e20..e9cdcd86 100644 --- a/R/cpp11.R +++ b/R/cpp11.R @@ -40,6 +40,10 @@ connection_import_lo_from_file <- function(con, filename, oid) { .Call(`_RPostgres_connection_import_lo_from_file`, con, filename, oid) } +connection_export_lo_to_file <- function(con, oid, filename) { + invisible(.Call(`_RPostgres_connection_export_lo_to_file`, con, oid, filename)) +} + connection_copy_data <- function(con, sql, df) { invisible(.Call(`_RPostgres_connection_copy_data`, con, sql, df)) } From 9f6992480e3edf4a7afd9842098f43d1aba1c467 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kirill=20M=C3=BCller?= Date: Sat, 13 Sep 2025 21:39:18 +0200 Subject: [PATCH 07/11] Quotes --- tests/testthat/test-ImportLargeObject.R | 30 ++++++++++++------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/tests/testthat/test-ImportLargeObject.R b/tests/testthat/test-ImportLargeObject.R index 04f53136..d723a884 100644 --- a/tests/testthat/test-ImportLargeObject.R +++ b/tests/testthat/test-ImportLargeObject.R @@ -2,13 +2,13 @@ test_that("can import and read a large object", { con <- postgresDefault() on.exit(dbDisconnect(con)) - test_file_path <- file.path(test_path(), 'data', 'large_object.txt') + test_file_path <- file.path(test_path(), "data", "large_object.txt") dbWithTransaction(con, { oid <- postgresImportLargeObject(con, test_file_path) }) expect_gt(oid, 0) lo_data <- unlist(dbGetQuery(con, "select lo_get($1) as lo_data", params = list(oid))$lo_data[1]) - large_object_txt <- as.raw(c(0x70, 0x6f, 0x73, 0x74, 0x67, 0x72, 0x65, 0x73)) # the string 'postgres' + large_object_txt <- as.raw(c(0x70, 0x6f, 0x73, 0x74, 0x67, 0x72, 0x65, 0x73)) # the string "postgres" expect_equal(lo_data, large_object_txt) }) @@ -16,29 +16,29 @@ test_that("can import and read a large object", { test_that("can import and export a large object", { con <- postgresDefault() on.exit(dbDisconnect(con)) - test_file_path <- file.path(test_path(), 'data', 'large_object.txt') - + test_file_path <- file.path(test_path(), "data", "large_object.txt") + # Import the large object oid <- dbWithTransaction(con, { postgresImportLargeObject(con, test_file_path) }) expect_gt(oid, 0) - - # Export to a temporary file + + # Export to a temporary file temp_file <- tempfile(fileext = ".txt") on.exit(unlink(temp_file), add = TRUE) - + dbWithTransaction(con, { result <- postgresExportLargeObject(con, oid, temp_file) expect_true(is.numeric(result)) }) - + # Verify the exported file exists and has correct content expect_true(file.exists(temp_file)) exported_content <- readBin(temp_file, "raw", file.size(temp_file)) original_content <- readBin(test_file_path, "raw", file.size(test_file_path)) expect_equal(exported_content, original_content) - + # Clean up large object dbExecute(con, "SELECT lo_unlink($1)", params = list(oid)) }) @@ -47,7 +47,7 @@ test_that("can import and export a large object", { test_that("importing to an existing oid throws error", { con <- postgresDefault() on.exit(dbDisconnect(con)) - test_file_path <- file.path(test_path(), 'data', 'large_object.txt') + test_file_path <- file.path(test_path(), "data", "large_object.txt") oid <- 1234 dbWithTransaction(con, { oid <- postgresImportLargeObject(con, test_file_path, oid) @@ -65,7 +65,7 @@ test_that("importing to an existing oid throws error", { test_that("import from a non-existing path throws error", { con <- postgresDefault() on.exit(dbDisconnect(con)) - test_file_path <- file.path(test_path(), 'data', 'large_object_that_does_not_exist.txt') + test_file_path <- file.path(test_path(), "data", "large_object_that_does_not_exist.txt") expect_error( dbWithTransaction(con, { oid <- postgresImportLargeObject(con, test_file_path) @@ -91,7 +91,7 @@ test_that("export with NULL oid throws error", { dbWithTransaction(con, { postgresExportLargeObject(con, NULL, tempfile()) }), - "'oid' cannot be NULL" + ""oid" cannot be NULL" ) }) @@ -103,7 +103,7 @@ test_that("export with NA oid throws error", { dbWithTransaction(con, { postgresExportLargeObject(con, NA, tempfile()) }), - "'oid' cannot be NA" + ""oid" cannot be NA" ) }) @@ -115,7 +115,7 @@ test_that("export with negative oid throws error", { dbWithTransaction(con, { postgresExportLargeObject(con, -1, tempfile()) }), - "'oid' cannot be negative" + ""oid" cannot be negative" ) }) @@ -125,7 +125,7 @@ test_that("export of non-existent oid throws error", { on.exit(dbDisconnect(con)) temp_file <- tempfile() on.exit(unlink(temp_file), add = TRUE) - + expect_error( dbWithTransaction(con, { postgresExportLargeObject(con, 999999, temp_file) From 596173a6f54ae00b33b241ce7ca1af87dab0bcca Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kirill=20M=C3=BCller?= Date: Sat, 13 Sep 2025 21:39:27 +0200 Subject: [PATCH 08/11] No result --- tests/testthat/test-ImportLargeObject.R | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/testthat/test-ImportLargeObject.R b/tests/testthat/test-ImportLargeObject.R index d723a884..503479a0 100644 --- a/tests/testthat/test-ImportLargeObject.R +++ b/tests/testthat/test-ImportLargeObject.R @@ -29,8 +29,7 @@ test_that("can import and export a large object", { on.exit(unlink(temp_file), add = TRUE) dbWithTransaction(con, { - result <- postgresExportLargeObject(con, oid, temp_file) - expect_true(is.numeric(result)) + postgresExportLargeObject(con, oid, temp_file) }) # Verify the exported file exists and has correct content From 3a9b9b2de47fc28fdd2adb6550afa13a3ec59d5d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kirill=20M=C3=BCller?= Date: Sat, 13 Sep 2025 21:51:48 +0200 Subject: [PATCH 09/11] Fix --- tests/testthat/test-ImportLargeObject.R | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/testthat/test-ImportLargeObject.R b/tests/testthat/test-ImportLargeObject.R index 503479a0..47463a0c 100644 --- a/tests/testthat/test-ImportLargeObject.R +++ b/tests/testthat/test-ImportLargeObject.R @@ -90,7 +90,7 @@ test_that("export with NULL oid throws error", { dbWithTransaction(con, { postgresExportLargeObject(con, NULL, tempfile()) }), - ""oid" cannot be NULL" + '"oid" cannot be NULL' ) }) @@ -102,7 +102,7 @@ test_that("export with NA oid throws error", { dbWithTransaction(con, { postgresExportLargeObject(con, NA, tempfile()) }), - ""oid" cannot be NA" + '"oid" cannot be NA' ) }) @@ -114,7 +114,7 @@ test_that("export with negative oid throws error", { dbWithTransaction(con, { postgresExportLargeObject(con, -1, tempfile()) }), - ""oid" cannot be negative" + '"oid" cannot be negative' ) }) From 1455aa293fc37f379aae1d1af05342f95a5b8b42 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kirill=20M=C3=BCller?= Date: Sat, 13 Sep 2025 22:00:27 +0200 Subject: [PATCH 10/11] Fix --- tests/testthat/test-ImportLargeObject.R | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/testthat/test-ImportLargeObject.R b/tests/testthat/test-ImportLargeObject.R index 47463a0c..083ee209 100644 --- a/tests/testthat/test-ImportLargeObject.R +++ b/tests/testthat/test-ImportLargeObject.R @@ -90,7 +90,7 @@ test_that("export with NULL oid throws error", { dbWithTransaction(con, { postgresExportLargeObject(con, NULL, tempfile()) }), - '"oid" cannot be NULL' + "'oid' cannot be NULL" ) }) @@ -102,7 +102,7 @@ test_that("export with NA oid throws error", { dbWithTransaction(con, { postgresExportLargeObject(con, NA, tempfile()) }), - '"oid" cannot be NA' + "'oid' cannot be NA" ) }) @@ -114,7 +114,7 @@ test_that("export with negative oid throws error", { dbWithTransaction(con, { postgresExportLargeObject(con, -1, tempfile()) }), - '"oid" cannot be negative' + "'oid' cannot be negative" ) }) From afa9ad7e3788b4a4177157155194f76b2ed7f549 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kirill=20M=C3=BCller?= Date: Sat, 13 Sep 2025 22:35:51 +0200 Subject: [PATCH 11/11] pkgdown --- _pkgdown.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/_pkgdown.yml b/_pkgdown.yml index af8c57e7..36299bee 100644 --- a/_pkgdown.yml +++ b/_pkgdown.yml @@ -42,6 +42,7 @@ reference: - postgresHasDefault - postgresWaitForNotify - postgresImportLargeObject + - postgresExportLargeObject development: mode: auto