diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index a86caf1..ba3ace3 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -11,7 +11,7 @@ permissions: jobs: build: runs-on: ${{ matrix.os }} - name: ${{ matrix.name }}${{ matrix.arch && format('-{0}', matrix.arch) || '' }} build${{ matrix.arch != 'arm64-v8a' && matrix.name != 'isim' && matrix.name != 'ios' && ' + test' || ''}} + name: ${{ matrix.name }}${{ matrix.arch && format('-{0}', matrix.arch) || '' }} build${{ matrix.arch != 'arm64-v8a' && matrix.name != 'isim' && matrix.name != 'ios' && matrix.name != 'wasm' && ' + test' || ''}} timeout-minutes: 20 strategy: fail-fast: false @@ -43,6 +43,9 @@ jobs: - os: macos-latest name: isim make: PLATFORM=isim + - os: ubuntu-latest + name: wasm + make: PLATFORM=wasm defaults: run: @@ -70,6 +73,10 @@ jobs: run: make curl/windows/libcurl.a shell: msys2 {0} + - name: wasm install wabt + if: matrix.name == 'wasm' + run: sudo apt install wabt + - name: build sqlite-sync run: make extension ${{ matrix.make && matrix.make || ''}} @@ -137,11 +144,68 @@ jobs: with: path: coverage + - name: wasm npm pack + if: matrix.name == 'wasm' + run: | + TAG=$(git ls-remote --tags https://github.com/sqlite/sqlite-wasm.git | \ + awk -v ver=$(make sqlite_version) -F'/' '$NF ~ ver"-build[0-9]+$" {print $NF}' | \ + sort -V | \ + tail -n1) + git clone --branch "$TAG" --depth 1 https://github.com/sqlite/sqlite-wasm.git sqlite-wasm + rm -rf sqlite-wasm/sqlite-wasm/* + unzip dist/sqlite-wasm.zip -d sqlite-wasm/tmp + mv sqlite-wasm/tmp/sqlite-wasm-*/jswasm sqlite-wasm/sqlite-wasm + + cd sqlite-wasm && npm i && npm run fix && npm run publint && npm run check-types && cd .. + + PKG=sqlite-wasm/package.json + TMP=sqlite-wasm/package.tmp.json + + DESC="SQLite Wasm compiled with the automatically initialized cloudsync extension. Conveniently packaged as an ES Module for effortless integration." + + jq \ + --arg name "@sqliteai/cloudsync-wasm" \ + --arg version "$(make sqlite_version)-cloudsync-$(make version)" \ + --arg desc "$DESC" \ + --argjson keywords '["offsync","cloudsync","sqliteai"]' \ + --arg repo_url "git+https://github.com/sqliteai/sqlite-sync.git" \ + --arg author "Gioele Cantoni (gioele@sqlitecloud.io)" \ + --arg bugs_url "https://github.com/sqliteai/sqlite-sync/issues" \ + --arg homepage "https://github.com/sqliteai/sqlite-sync#readme" \ + ' + .name = $name + | .version = $version + | .description = $desc + | .keywords += $keywords + | del(.bin) + | .scripts |= with_entries(select( + .key != "build" + and .key != "start" + and .key != "start:node" + and .key != "prepublishOnly" + and .key != "deploy" + )) + | .repository.url = $repo_url + | .author = $author + | .bugs.url = $bugs_url + | .homepage = $homepage + | del(.devDependencies.decompress) + | del(.devDependencies["http-server"]) + | del(.devDependencies.shx) + ' "$PKG" > "$TMP" && mv "$TMP" "$PKG" + + sed 's/@sqlite\.org\/sqlite-wasm/@sqliteai\/cloudsync-wasm/g' sqlite-wasm/README.md > sqlite-wasm/README.tmp + echo -e "# sqlite-sync WASM $(make version)\nThis README and the TypeScript types are from the [official SQLite wasm repository](https://github.com/sqlite/sqlite-wasm)\n\n$(cat sqlite-wasm/README.tmp)" > sqlite-wasm/README.md + rm -rf sqlite-wasm/tmp sqlite-wasm/bin sqlite-wasm/demo sqlite-wasm/README.tmp sqlite-wasm/package-lock.json + cd sqlite-wasm && npm pack + - uses: actions/upload-artifact@v4.6.2 if: always() with: name: cloudsync-${{ matrix.name }}${{ matrix.arch && format('-{0}', matrix.arch) || '' }} - path: dist/cloudsync.* + path: | + dist/cloudsync.* + sqlite-wasm/*.tgz if-no-files-found: error release: @@ -170,8 +234,7 @@ jobs: - name: release tag version from cloudsync.h id: tag run: | - FILE="src/cloudsync.h" - VERSION=$(grep -oP '#define CLOUDSYNC_VERSION\s+"\K[^"]+' "$FILE") + VERSION=$(make version) if [[ "$VERSION" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then LATEST=$(curl -s -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" https://api.github.com/repos/${{ github.repository }}/releases/latest | jq -r '.name') if [[ "$VERSION" != "$LATEST" || "$GITHUB_EVENT_NAME" == "workflow_dispatch" ]]; then @@ -183,6 +246,18 @@ jobs: fi echo "❌ CLOUDSYNC_VERSION not found in cloudsync.h" exit 1 + + - uses: actions/setup-node@v4 + with: + node-version: '20.x' + registry-url: 'https://registry.npmjs.org' + + - name: wasm publish to npmjs + if: steps.tag.outputs.version != '' + #use this version when the repo will become public run: npm publish --provenance --access public ./artifacts/cloudsync-wasm/sqlite-wasm/sqliteai-cloudsync-wasm-*-cloudsync-*.tgz + run: npm publish --access public ./artifacts/cloudsync-wasm/sqlite-wasm/sqliteai-cloudsync-wasm-*-cloudsync-*.tgz + env: + NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} - name: zip artifacts run: | diff --git a/Makefile b/Makefile index 0958776..5288a81 100644 --- a/Makefile +++ b/Makefile @@ -8,6 +8,9 @@ SQLITE3 ?= sqlite3 # set curl version to download and build CURL_VERSION ?= 8.12.1 +# set sqlite version for WASM static build +SQLITE_VERSION ?= 3.49.2 + # Set default platform if not specified ifeq ($(OS),Windows_NT) PLATFORM := windows @@ -47,13 +50,14 @@ CURL_DIR = curl CURL_SRC = $(CURL_DIR)/src/curl-$(CURL_VERSION) COV_DIR = coverage CUSTOM_CSS = $(TEST_DIR)/sqliteai.css +BUILD_WASM = build/wasm SRC_FILES = $(wildcard $(SRC_DIR)/*.c) TEST_SRC = $(wildcard $(TEST_DIR)/*.c) TEST_FILES = $(SRC_FILES) $(TEST_SRC) $(wildcard $(SQLITE_DIR)/*.c) RELEASE_OBJ = $(patsubst %.c, $(BUILD_RELEASE)/%.o, $(notdir $(SRC_FILES))) TEST_OBJ = $(patsubst %.c, $(BUILD_TEST)/%.o, $(notdir $(TEST_FILES))) -COV_FILES = $(filter-out $(SRC_DIR)/lz4.c $(SRC_DIR)/network.c, $(SRC_FILES)) +COV_FILES = $(filter-out $(SRC_DIR)/lz4.c $(SRC_DIR)/network.c $(SRC_DIR)/wasm.c, $(SRC_FILES)) CURL_LIB = $(CURL_DIR)/$(PLATFORM)/libcurl.a TEST_TARGET = $(patsubst %.c,$(DIST_DIR)/%$(EXE), $(notdir $(TEST_SRC))) @@ -110,6 +114,8 @@ else ifeq ($(PLATFORM),isim) T_LDFLAGS = -framework Security CFLAGS += -arch x86_64 -arch arm64 $(SDK) CURL_CONFIG = --host=arm64-apple-darwin --with-secure-transport CFLAGS="-arch x86_64 -arch arm64 -isysroot $$(xcrun --sdk iphonesimulator --show-sdk-path) -miphonesimulator-version-min=11.0" +else ifeq ($(PLATFORM),wasm) + TARGET := $(DIST_DIR)/sqlite-wasm.zip else # linux TARGET := $(DIST_DIR)/cloudsync.so LDFLAGS += -shared -lssl -lcrypto @@ -127,7 +133,7 @@ endif # Windows .def file generation $(DEF_FILE): ifeq ($(PLATFORM),windows) - @echo "LIBRARY js.dll" > $@ + @echo "LIBRARY cloudsync.dll" > $@ @echo "EXPORTS" >> $@ @echo " sqlite3_cloudsync_init" >> $@ endif @@ -139,12 +145,32 @@ $(shell mkdir -p $(BUILD_DIRS) $(DIST_DIR)) extension: $(TARGET) all: $(TARGET) +ifneq ($(PLATFORM),wasm) # Loadable library $(TARGET): $(RELEASE_OBJ) $(DEF_FILE) $(CURL_LIB) $(CC) $(RELEASE_OBJ) $(DEF_FILE) -o $@ $(LDFLAGS) ifeq ($(PLATFORM),windows) # Generate import library for Windows - dlltool -D $@ -d $(DEF_FILE) -l $(DIST_DIR)/js.lib + dlltool -D $@ -d $(DEF_FILE) -l $(DIST_DIR)/cloudsync.lib +endif +else +#WASM build +EMSDK := $(BUILD_WASM)/emsdk +$(EMSDK): + git clone https://github.com/emscripten-core/emsdk.git $(EMSDK) + cd $(EMSDK) && ./emsdk install latest && ./emsdk activate latest + +SQLITE_SRC := $(BUILD_WASM)/sqlite +$(SQLITE_SRC): $(EMSDK) + git clone --branch version-$(SQLITE_VERSION) --depth 1 https://github.com/sqlite/sqlite.git $(SQLITE_SRC) + cd $(EMSDK) && . ./emsdk_env.sh && cd ../sqlite && ./configure --enable-all + +WASM_FLAGS = emcc.jsflags += -sFETCH -pthread +WASM_MAKEFILE = $(SQLITE_SRC)/ext/wasm/GNUmakefile +$(TARGET): $(SQLITE_SRC) $(SRC_FILES) + @grep '$(WASM_FLAGS)' '$(WASM_MAKEFILE)' >/dev/null 2>&1 || echo '$(WASM_FLAGS)' >> '$(WASM_MAKEFILE)' + cd $(SQLITE_SRC)/ext/wasm && $(MAKE) dist sqlite3_wasm_extra_init.c=../../../../../src/wasm.c + mv $(SQLITE_SRC)/ext/wasm/sqlite-wasm-*.zip $(TARGET) endif # Test executable @@ -266,9 +292,15 @@ endif mv $(CURL_SRC)/lib/.libs/libcurl.a $(CURL_DIR)/$(PLATFORM) rm -rf $(CURL_DIR)/src +# Tools +sqlite_version: + @echo $(SQLITE_VERSION) +version: + @echo $(shell sed -n 's/^#define CLOUDSYNC_VERSION[[:space:]]*"\([^"]*\)".*/\1/p' src/cloudsync.h) + # Clean up generated files clean: - rm -rf $(BUILD_DIRS) $(DIST_DIR)/* $(COV_DIR) *.gcda *.gcno *.gcov $(CURL_DIR)/src *.sqlite + rm -rf $(BUILD_DIRS) $(DIST_DIR)/* $(COV_DIR) *.gcda *.gcno *.gcov $(CURL_DIR)/src *.sqlite $(BUILD_WASM) # Help message help: @@ -283,6 +315,7 @@ help: @echo " android (needs ARCH to be set to x86_64 or arm64-v8a and ANDROID_NDK to be set)" @echo " ios (only on macOS)" @echo " isim (only on macOS)" + @echo " wasm (needs wabt[brew install wabt/sudo apt install wabt])" @echo "" @echo "Targets:" @echo " all - Build the extension (default)" diff --git a/src/cloudsync.c b/src/cloudsync.c index 57a7fe9..ef16093 100644 --- a/src/cloudsync.c +++ b/src/cloudsync.c @@ -360,7 +360,7 @@ char *db_version_build_query (sqlite3 *db) { // the good news is that the query can be computed in SQLite without the need to do any extra computation from the host language const char *sql = "WITH table_names AS (" - "SELECT format(\"%w\", name) as tbl_name " + "SELECT format('%w', name) as tbl_name " "FROM sqlite_master " "WHERE type='table' " "AND name LIKE '%_cloudsync'" diff --git a/src/cloudsync.h b/src/cloudsync.h index 5f4238c..71915d6 100644 --- a/src/cloudsync.h +++ b/src/cloudsync.h @@ -16,7 +16,7 @@ #include "sqlite3.h" #endif -#define CLOUDSYNC_VERSION "0.8.5" +#define CLOUDSYNC_VERSION "0.8.6" int sqlite3_cloudsync_init (sqlite3 *db, char **pzErrMsg, const sqlite3_api_routines *pApi); diff --git a/src/dbutils.c b/src/dbutils.c index 48e805c..54993fd 100644 --- a/src/dbutils.c +++ b/src/dbutils.c @@ -419,7 +419,7 @@ bool dbutils_table_sanity_check (sqlite3 *db, sqlite3_context *context, const ch // the affinity of a column is determined by the declared type of the column, // according to the following rules in the order shown: // 1. If the declared type contains the string "INT" then it is assigned INTEGER affinity. - sql = sqlite3_snprintf((int)blen, buffer, "SELECT count(*) FROM pragma_table_info('%w') WHERE pk=1 AND \"type\" LIKE \"%%INT%%\";", name); + sql = sqlite3_snprintf((int)blen, buffer, "SELECT count(*) FROM pragma_table_info('%w') WHERE pk=1 AND \"type\" LIKE '%%INT%%';", name); sqlite3_int64 count2 = dbutils_int_select(db, sql); if (count == count2) { dbutils_context_result_error(context, "Table %s uses an single-column INTEGER primary key. For CRDT replication, primary keys must be globally unique. Consider using a TEXT primary key with UUIDs or ULID to avoid conflicts across nodes. If you understand the risk and still want to use this INTEGER primary key, set the third argument of the cloudsync_init function to 1 to skip this check.", name); diff --git a/src/network.c b/src/network.c index 845d1d1..27466bf 100644 --- a/src/network.c +++ b/src/network.c @@ -14,9 +14,15 @@ #include "cloudsync_private.h" #include "netword_private.h" +#ifndef SQLITE_WASM_EXTRA_INIT #ifndef CLOUDSYNC_OMIT_CURL #include "curl/curl.h" #endif +#else +#include +#include +#include +#endif #ifdef __ANDROID__ #include "cacert.h" @@ -96,12 +102,108 @@ bool network_data_set_endpoints (network_data *data, char *auth, char *check, ch // MARK: - Utils - #ifndef CLOUDSYNC_OMIT_CURL +#ifdef SQLITE_WASM_EXTRA_INIT +NETWORK_RESULT network_receive_buffer (network_data *data, const char *endpoint, const char *authentication, bool zero_terminated, bool is_post_request, char *json_payload, const char *custom_header) { + char *buffer = NULL; + size_t blen = 0; + + emscripten_fetch_attr_t attr; + emscripten_fetch_attr_init(&attr); + + // Set method + if (json_payload || is_post_request) { + strcpy(attr.requestMethod, "POST"); + } else { + strcpy(attr.requestMethod, "GET"); + } + attr.attributes = EMSCRIPTEN_FETCH_LOAD_TO_MEMORY | EMSCRIPTEN_FETCH_SYNCHRONOUS | EMSCRIPTEN_FETCH_REPLACE; + + // Prepare header array (alternating key, value, NULL-terminated) + const char *headers[11]; + int h = 0; + + // Custom header (must be "Key: Value", split at ':') + char *custom_key = NULL; + if (custom_header) { + const char *colon = strchr(custom_header, ':'); + if (colon) { + size_t klen = colon - custom_header; + custom_key = (char *)malloc(klen + 1); + strncpy(custom_key, custom_header, klen); + custom_key[klen] = 0; + const char *custom_val = colon + 1; + while (*custom_val == ' ') custom_val++; + headers[h++] = custom_key; + headers[h++] = custom_val; + } + } + + // Authorization + char auth_header[256]; + if (authentication) { + snprintf(auth_header, sizeof(auth_header), "Bearer %s", authentication); + headers[h++] = "Authorization"; + headers[h++] = auth_header; + } + + // Content-Type for JSON + if (json_payload) { + headers[h++] = "Content-Type"; + headers[h++] = "application/json"; + } + + headers[h] = 0; + attr.requestHeaders = headers; + + // Body + if (json_payload) { + attr.requestData = json_payload; + attr.requestDataSize = strlen(json_payload); + } + + emscripten_fetch_t *fetch = emscripten_fetch(&attr, endpoint); // Blocks here until the operation is complete. + NETWORK_RESULT result = {0, NULL, 0, NULL, NULL}; + + if(fetch->readyState == 4){ + buffer = fetch->data; + blen = fetch->totalBytes; + } + + if (fetch->status >= 200 && fetch->status < 300) { + + if (blen > 0 && buffer) { + char *buf = (char*)malloc(blen + 1); + if (buf) { + memcpy(buf, buffer, blen); + buf[blen] = 0; + result.code = CLOUDSYNC_NETWORK_BUFFER; + result.buffer = buf; + result.blen = blen; + result.xfree = free; + } else result.code = CLOUDSYNC_NETWORK_ERROR; + } else result.code = CLOUDSYNC_NETWORK_OK; + } else { + result.code = CLOUDSYNC_NETWORK_ERROR; + if (fetch->statusText && fetch->statusText[0]) { + result.buffer = strdup(fetch->statusText); + } + result.blen = sizeof(fetch->statusText); + result.xfree = free; + } + + // cleanup + emscripten_fetch_close(fetch); + if (custom_key) free(custom_key); + + return result; +} +#else static bool network_buffer_check (network_buffer *data, size_t needed) { // alloc/resize buffer if (data->bused + needed > data->balloc) { if (needed < CLOUDSYNC_NETWORK_MINBUF_SIZE) needed = CLOUDSYNC_NETWORK_MINBUF_SIZE; - size_t balloc = (data->balloc * 2) + needed; + size_t balloc = data->balloc + needed; char *buffer = cloudsync_memory_realloc(data->buffer, balloc); if (!buffer) return false; @@ -168,7 +270,7 @@ NETWORK_RESULT network_receive_buffer (network_data *data, const char *endpoint, network_buffer netdata = {NULL, 0, 0, (zero_terminated) ? 1 : 0}; curl_easy_setopt(curl, CURLOPT_WRITEDATA, &netdata); curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, network_receive_callback); - + // add optional JSON payload (implies setting CURLOPT_POST to 1) // or set the CURLOPT_POST option if (json_payload) { @@ -205,6 +307,7 @@ NETWORK_RESULT network_receive_buffer (network_data *data, const char *endpoint, return result; } +#endif static size_t network_read_callback(char *buffer, size_t size, size_t nitems, void *userdata) { network_read_data *rd = (network_read_data *)userdata; @@ -220,7 +323,45 @@ static size_t network_read_callback(char *buffer, size_t size, size_t nitems, vo return to_copy; } -bool network_send_buffer (network_data *data, const char *endpoint, const char *authentication, const void *blob, int blob_size) { +#ifdef SQLITE_WASM_EXTRA_INIT +bool network_send_buffer(network_data *data, const char *endpoint, const char *authentication, const void *blob, int blob_size) { + + bool result = false; + emscripten_fetch_attr_t attr; + emscripten_fetch_attr_init(&attr); + strcpy(attr.requestMethod, "PUT"); + attr.attributes = EMSCRIPTEN_FETCH_LOAD_TO_MEMORY | EMSCRIPTEN_FETCH_SYNCHRONOUS | EMSCRIPTEN_FETCH_REPLACE; + + // Prepare headers (alternating key, value, NULL-terminated) + // Max 3 headers: Accept, (optional Auth), Content-Type + const char *headers[7]; + int h = 0; + headers[h++] = "Accept"; + headers[h++] = "text/plain"; + char auth_header[256]; + if (authentication) { + snprintf(auth_header, sizeof(auth_header), "Bearer %s", authentication); + headers[h++] = "Authorization"; + headers[h++] = auth_header; + } + headers[h++] = "Content-Type"; + headers[h++] = "application/octet-stream"; + headers[h] = 0; + attr.requestHeaders = headers; + + // Set request body + attr.requestData = (const char *)blob; + attr.requestDataSize = blob_size; + + emscripten_fetch_t *fetch = emscripten_fetch(&attr, endpoint); // Blocks here until the operation is complete. + if (fetch->status >= 200 && fetch->status < 300) result = true; + + emscripten_fetch_close(fetch); + + return result; +} +#else +bool network_send_buffer(network_data *data, const char *endpoint, const char *authentication, const void *blob, int blob_size) { struct curl_slist *headers = NULL; curl_mime *mime = NULL; bool result = false; @@ -292,6 +433,7 @@ bool network_send_buffer (network_data *data, const char *endpoint, const char * return result; } #endif +#endif int network_set_sqlite_result (sqlite3_context *context, NETWORK_RESULT *result) { int rc = 0; @@ -360,6 +502,9 @@ int network_extract_query_param(const char *query, const char *key, char *output size_t key_len = strlen(key); const char *p = query; + #ifdef SQLITE_WASM_EXTRA_INIT + if (*p == '?') p++; + #endif while (p && *p) { // Find the start of a key=value pair @@ -394,6 +539,19 @@ int network_extract_query_param(const char *query, const char *key, char *output } #ifndef CLOUDSYNC_OMIT_CURL + +#ifdef SQLITE_WASM_EXTRA_INIT +static char *substr(const char *start, const char *end) { + size_t len = end - start; + char *out = (char *)malloc(len + 1); + if (out) { + memcpy(out, start, len); + out[len] = 0; + } + return out; +} +#endif + bool network_compute_endpoints (sqlite3_context *context, network_data *data, const char *conn_string) { // compute endpoints bool result = false; @@ -410,12 +568,15 @@ bool network_compute_endpoints (sqlite3_context *context, network_data *data, co char *conn_string_https = NULL; + #ifndef SQLITE_WASM_EXTRA_INIT CURLUcode rc = CURLUE_OUT_OF_MEMORY; CURLU *url = curl_url(); if (!url) goto finalize; + #endif conn_string_https = cloudsync_string_replace_prefix(conn_string, "sqlitecloud://", "https://"); + #ifndef SQLITE_WASM_EXTRA_INIT // set URL: https://UUID.g5.sqlite.cloud:443/chinook.sqlite?apikey=hWDanFolRT9WDK0p54lufNrIyfgLZgtMw6tb6fbPmpo rc = curl_url_set(url, CURLUPART_URL, conn_string_https, 0); if (rc != CURLUE_OK) goto finalize; @@ -440,6 +601,37 @@ bool network_compute_endpoints (sqlite3_context *context, network_data *data, co // apikey=hWDanFolRT9WDK0p54lufNrIyfgLZgtMw6tb6fbPmpo (OPTIONAL) rc = curl_url_get(url, CURLUPART_QUERY, &query, 0); if (rc != CURLUE_OK && rc != CURLUE_NO_QUERY) goto finalize; + #else + // Parse: scheme://host[:port]/path?query + const char *p = strstr(conn_string_https, "://"); + if (!p) goto finalize; + scheme = substr(conn_string_https, p); + p += 3; + const char *host_start = p; + const char *host_end = strpbrk(host_start, ":/?"); + if (!host_end) goto finalize; + host = substr(host_start, host_end); + p = host_end; + if (*p == ':') { + ++p; + const char *port_end = strpbrk(p, "/?"); + if (!port_end) goto finalize; + port = substr(p, port_end); + p = port_end; + } + if (*p == '/') { + const char *path_start = p; + const char *path_end = strchr(path_start, '?'); + if (!path_end) path_end = path_start + strlen(path_start); + database = substr(path_start, path_end); + p = path_end; + } + if (*p == '?') { + query = strdup(p); + } + if (!scheme || !host || !database) goto finalize; + char *port_or_default = port && strcmp(port, "8860") != 0 ? port : CLOUDSYNC_DEFAULT_ENDPOINT_PORT; + #endif if (query != NULL) { char value[MAX_QUERY_VALUE_LEN]; if (!authentication && network_extract_query_param(query, "apikey", value, sizeof(value)) == 0) { @@ -463,8 +655,13 @@ bool network_compute_endpoints (sqlite3_context *context, network_data *data, co finalize: if (result == false) { // store proper result code/message + #ifndef SQLITE_WASM_EXTRA_INIT if (rc != CURLUE_OK) sqlite3_result_error(context, curl_url_strerror(rc), -1); - sqlite3_result_error_code(context, (rc != CURLUE_OK) ? SQLITE_ERROR : SQLITE_NOMEM); + sqlite3_result_error_code(context, (rc != CURLE_OK) ? SQLITE_ERROR : SQLITE_NOMEM); + #else + sqlite3_result_error(context, "URL parse error", -1); + sqlite3_result_error_code(context, SQLITE_ERROR); + #endif // cleanup memory managed by the extension if (authentication) cloudsync_memory_free(authentication); @@ -485,13 +682,17 @@ bool network_compute_endpoints (sqlite3_context *context, network_data *data, co data->upload_endpoint = upload_endpoint; } - // cleanup memory managed by libcurl + // cleanup memory + #ifndef SQLITE_WASM_EXTRA_INIT + if (url) curl_url_cleanup(url); + #else + #define curl_free(x) free(x) + #endif if (scheme) curl_free(scheme); if (host) curl_free(host); if (port) curl_free(port); if (database) curl_free(database); if (query) curl_free(query); - if (url) curl_url_cleanup(url); if (conn_string_https && conn_string_https != conn_string) cloudsync_memory_free(conn_string_https); return result; @@ -518,7 +719,7 @@ network_data *cloudsync_network_data(sqlite3_context *context) { void cloudsync_network_init (sqlite3_context *context, int argc, sqlite3_value **argv) { DEBUG_FUNCTION("cloudsync_network_init"); - #ifndef CLOUDSYNC_OMIT_CURL + #if !defined(CLOUDSYNC_OMIT_CURL) && !defined(SQLITE_WASM_EXTRA_INIT) curl_global_init(CURL_GLOBAL_ALL); #endif @@ -583,7 +784,7 @@ void cloudsync_network_cleanup (sqlite3_context *context, int argc, sqlite3_valu sqlite3_result_int(context, SQLITE_OK); - #ifndef CLOUDSYNC_OMIT_CURL + #if !defined(CLOUDSYNC_OMIT_CURL) && !defined(SQLITE_WASM_EXTRA_INIT) curl_global_cleanup(); #endif } @@ -625,7 +826,7 @@ void cloudsync_network_set_apikey (sqlite3_context *context, int argc, sqlite3_v void cloudsync_network_has_unsent_changes (sqlite3_context *context, int argc, sqlite3_value **argv) { sqlite3 *db = sqlite3_context_db_handle(context); - char *sql = "SELECT max(db_version), hex(site_id) FROM cloudsync_changes() WHERE site_id == (SELECT site_id FROM cloudsync_site_id WHERE rowid=0)"; + char *sql = "SELECT max(db_version), hex(site_id) FROM cloudsync_changes WHERE site_id == (SELECT site_id FROM cloudsync_site_id WHERE rowid=0)"; int last_local_change = (int)dbutils_int_select(db, sql); if (last_local_change == 0) { sqlite3_result_int(context, 0); diff --git a/src/vtab.c b/src/vtab.c index 21ca6dd..ca29429 100644 --- a/src/vtab.c +++ b/src/vtab.c @@ -137,18 +137,18 @@ char *build_changes_sql (sqlite3 *db, const char *idxs) { "changes_query AS ( " " SELECT " " 'SELECT " - " \"' || \"table_name\" || '\" AS tbl, " + " ''' || \"table_name\" || ''' AS tbl, " " t1.pk AS pk, " " t1.col_name AS col_name, " - " cloudsync_col_value(\"' || \"table_name\" || '\", t1.col_name, t1.pk) AS col_value, " + " cloudsync_col_value(''' || \"table_name\" || ''', t1.col_name, t1.pk) AS col_value, " " t1.col_version AS col_version, " " t1.db_version AS db_version, " " site_tbl.site_id AS site_id, " " t1.seq AS seq, " " COALESCE(t2.col_version, 1) AS cl " - " FROM \"' || \"table_meta\" || '\" AS t1 " + " FROM ''' || \"table_meta\" || ''' AS t1 " " LEFT JOIN cloudsync_site_id AS site_tbl ON t1.site_id = site_tbl.rowid " - " LEFT JOIN \"' || \"table_meta\" || '\" AS t2 ON t1.pk = t2.pk AND t2.col_name = ''" CLOUDSYNC_TOMBSTONE_VALUE "'' " + " LEFT JOIN ''' || \"table_meta\" || ''' AS t2 ON t1.pk = t2.pk AND t2.col_name = ''" CLOUDSYNC_TOMBSTONE_VALUE "'' " " WHERE col_value IS NOT ''" CLOUDSYNC_RLS_RESTRICTED_VALUE "''' " " AS query_string FROM table_names " "), " diff --git a/src/wasm.c b/src/wasm.c new file mode 100644 index 0000000..601760a --- /dev/null +++ b/src/wasm.c @@ -0,0 +1,19 @@ +#ifdef SQLITE_WASM_EXTRA_INIT + +#include "sqlite3.h" +#include + +#include "utils.c" +#include "network.c" +#include "dbutils.c" +#include "cloudsync.c" +#include "vtab.c" +#include "pk.c" +#include "lz4.c" + +int sqlite3_wasm_extra_init(const char *z) { + fprintf(stderr, "%s: %s()\n", __FILE__, __func__); + return sqlite3_auto_extension((void *) sqlite3_cloudsync_init); +} + +#endif \ No newline at end of file