From 1d6043196955f916908b9c6c621d68b48aa7e751 Mon Sep 17 00:00:00 2001 From: Oscar Franco Date: Fri, 12 Apr 2024 17:16:11 +0200 Subject: [PATCH] C ABI target (#4793) * Add C ABI target with support for iOS and Android * Start undoing merge problems * Undo merge frocks * Undo merge frocks * Undo merge frocks * Various fixes * Update query-engine/query-engine-c-abi/README.md Co-authored-by: Jan Piotrowski * Update README * Lint * Safety comments * Shellcheck * PR changes * Update query-engine/query-engine-c-abi/README.md Co-authored-by: Flavian Desverne * PR comments * Get rid of functions file * Remove comments * Replace all custom errors with the common ApiError * Remove hanging TODO * Remove unnecessary from serde error * Add failed at field to migrations table and throw on failed migration * Update query-engine/query-engine-c-abi/src/engine.rs Co-authored-by: Flavian Desverne * Update react native tests to take real emulator url * Return null adapter on RN case and handle null case * Fix tests * Fix mssql & cockroach * Pass raw error pointer to C context and resign de allocation to calling function * Serialize API errors on start, commit and rollback transaction * Re introduce dispatcher and spans * Add metrics feature to query-engine-common * change base path missing message to tracing * Fix copy xcframework script * Store URL in the query engine instance * Fix non cloned url error and linting * Add ReactNative preview feature * Modify make scripts and add e2e workflow for RN * Tweak CI * Tweak CI * Tweak CI * Tweak CI * Tweak CI * Tweak CI * Comment e2e test for RN * Update query-engine/query-engine-c-abi/src/engine.rs Co-authored-by: Alexey Orlenko * Adjust to main * Remove non-working workflow --------- Co-authored-by: Jan Piotrowski Co-authored-by: Flavian Desverne Co-authored-by: Sergey Tatarintsev Co-authored-by: Serhii Tatarintsev Co-authored-by: Alexey Orlenko --- .github/workflows/codspeed.yml | 2 +- Cargo.lock | 70 +- Cargo.toml | 4 + Makefile | 3 + libs/query-engine-common/Cargo.toml | 9 +- libs/query-engine-common/src/engine.rs | 1 + libs/query-engine-common/src/error.rs | 1 + psl/psl-core/src/common/preview_features.rs | 5 +- query-engine/connector-test-kit-rs/README.md | 2 +- .../query-engine-tests/tests/new/metrics.rs | 2 +- .../query-tests-setup/src/config.rs | 36 +- .../src/connector_tag/js/external_process.rs | 3 +- .../src/connector_tag/sqlite.rs | 3 + .../test-configs/react-native | 6 + .../connectors/sql-query-connector/Cargo.toml | 12 +- .../sql-query-connector/src/database/mod.rs | 19 +- .../connectors/sql-query-connector/src/lib.rs | 13 +- .../driver-adapters/executor/src/rn.ts | 96 +++ .../driver-adapters/executor/src/testd.ts | 418 ++++++----- query-engine/query-engine-c-abi/.gitignore | 7 + query-engine/query-engine-c-abi/Cargo.toml | 52 ++ query-engine/query-engine-c-abi/Makefile | 56 ++ query-engine/query-engine-c-abi/README.md | 56 ++ .../build-android-target.sh | 62 ++ .../query-engine-c-abi/build-openssl.sh | 76 ++ query-engine/query-engine-c-abi/build.rs | 33 + .../query-engine-c-abi/cargo-config.toml | 14 + .../query-engine-c-abi/copy-android.sh | 18 + query-engine/query-engine-c-abi/copy-ios.sh | 12 + query-engine/query-engine-c-abi/src/engine.rs | 662 ++++++++++++++++++ query-engine/query-engine-c-abi/src/lib.rs | 5 + query-engine/query-engine-c-abi/src/logger.rs | 174 +++++ .../query-engine-c-abi/src/migrations.rs | 185 +++++ query-engine/query-engine-c-abi/src/tracer.rs | 1 + query-engine/query-engine-node-api/Cargo.toml | 12 +- query-engine/query-engine/Cargo.toml | 7 +- query-engine/request-handlers/Cargo.toml | 13 +- .../request-handlers/src/load_executor.rs | 10 +- 38 files changed, 1937 insertions(+), 223 deletions(-) create mode 100644 query-engine/connector-test-kit-rs/test-configs/react-native create mode 100644 query-engine/driver-adapters/executor/src/rn.ts create mode 100644 query-engine/query-engine-c-abi/.gitignore create mode 100644 query-engine/query-engine-c-abi/Cargo.toml create mode 100644 query-engine/query-engine-c-abi/Makefile create mode 100644 query-engine/query-engine-c-abi/README.md create mode 100755 query-engine/query-engine-c-abi/build-android-target.sh create mode 100755 query-engine/query-engine-c-abi/build-openssl.sh create mode 100644 query-engine/query-engine-c-abi/build.rs create mode 100644 query-engine/query-engine-c-abi/cargo-config.toml create mode 100755 query-engine/query-engine-c-abi/copy-android.sh create mode 100755 query-engine/query-engine-c-abi/copy-ios.sh create mode 100644 query-engine/query-engine-c-abi/src/engine.rs create mode 100644 query-engine/query-engine-c-abi/src/lib.rs create mode 100644 query-engine/query-engine-c-abi/src/logger.rs create mode 100644 query-engine/query-engine-c-abi/src/migrations.rs create mode 100644 query-engine/query-engine-c-abi/src/tracer.rs diff --git a/.github/workflows/codspeed.yml b/.github/workflows/codspeed.yml index 96263f590079..285c8c9fbd54 100644 --- a/.github/workflows/codspeed.yml +++ b/.github/workflows/codspeed.yml @@ -31,7 +31,7 @@ jobs: run: cargo codspeed build -p schema --features all_connectors - name: "Build the benchmark targets: request-handlers" - run: cargo codspeed build -p request-handlers --features native + run: cargo codspeed build -p request-handlers --features native,all - name: Run the benchmarks uses: CodSpeedHQ/action@v2 diff --git a/Cargo.lock b/Cargo.lock index da25af4bcf41..d3aa22a1b839 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -475,6 +475,25 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" +[[package]] +name = "cbindgen" +version = "0.24.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b922faaf31122819ec80c4047cc684c6979a087366c069611e33649bf98e18d" +dependencies = [ + "clap 3.2.25", + "heck 0.4.1", + "indexmap 1.9.3", + "log", + "proc-macro2", + "quote", + "serde", + "serde_json", + "syn 1.0.109", + "tempfile", + "toml", +] + [[package]] name = "cc" version = "1.0.83" @@ -574,9 +593,12 @@ version = "3.2.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ea181bf566f71cb9a5d17a59e1871af638180a18fb0035c92ae62b705207123" dependencies = [ + "atty", "bitflags 1.3.2", "clap_lex", "indexmap 1.9.3", + "strsim 0.10.0", + "termcolor", "textwrap 0.16.0", ] @@ -2096,9 +2118,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.150" +version = "0.2.151" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89d92a4743f9a61002fae18374ed11e7973f530cb3a3255fb354818118b2203c" +checksum = "302d7ab3130588088d277783b1e2d2e10c9e9e4a16dd9050e6ec93fb3e7048f4" [[package]] name = "libloading" @@ -3724,6 +3746,41 @@ dependencies = [ "user-facing-errors", ] +[[package]] +name = "query-engine-c-abi" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "cbindgen", + "chrono", + "connection-string", + "futures", + "indoc 2.0.3", + "once_cell", + "opentelemetry", + "psl", + "quaint", + "query-connector", + "query-core", + "query-engine-common", + "query-structure", + "request-handlers", + "rusqlite", + "serde", + "serde_json", + "sql-query-connector", + "thiserror", + "tokio", + "tracing", + "tracing-futures", + "tracing-opentelemetry", + "tracing-subscriber", + "url", + "user-facing-errors", + "uuid", +] + [[package]] name = "query-engine-common" version = "0.1.0" @@ -5247,6 +5304,15 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "termcolor" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff1bc3d3f05aff0403e8ac0d92ced918ec05b666a43f83297ccef5bea8a3d449" +dependencies = [ + "winapi-util", +] + [[package]] name = "test-cli" version = "0.1.0" diff --git a/Cargo.toml b/Cargo.toml index f14f7c508c8c..513dc7283b04 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -25,6 +25,7 @@ members = [ "query-engine/query-engine", "query-engine/query-engine-node-api", "query-engine/query-engine-wasm", + "query-engine/query-engine-c-abi", "query-engine/request-handlers", "query-engine/schema", "libs/*", @@ -86,6 +87,9 @@ strip = "symbols" [profile.release.package.query-engine] strip = "symbols" +[profile.release.package.query-engine-c-abi] +strip = "symbols" + [profile.release] lto = "fat" codegen-units = 1 diff --git a/Makefile b/Makefile index 8c08ecaaa173..ec16c50b9dc2 100644 --- a/Makefile +++ b/Makefile @@ -136,6 +136,9 @@ start-sqlite: dev-sqlite: cp $(CONFIG_PATH)/sqlite $(CONFIG_FILE) +dev-react-native: + cp $(CONFIG_PATH)/react-native $(CONFIG_FILE) + dev-libsql-js: build-qe-napi build-driver-adapters-kit cp $(CONFIG_PATH)/libsql-js $(CONFIG_FILE) diff --git a/libs/query-engine-common/Cargo.toml b/libs/query-engine-common/Cargo.toml index e2fb3b4bfe48..7554bcb7f067 100644 --- a/libs/query-engine-common/Cargo.toml +++ b/libs/query-engine-common/Cargo.toml @@ -3,7 +3,8 @@ name = "query-engine-common" version = "0.1.0" edition = "2021" -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +[features] +metrics = [] [dependencies] thiserror = "1" @@ -20,11 +21,11 @@ tracing.workspace = true tracing-subscriber = { version = "0.3" } tracing-futures = "0.2" tracing-opentelemetry = "0.17.3" -opentelemetry = { version = "0.17"} +opentelemetry = { version = "0.17" } -[target.'cfg(not(target_arch = "wasm32"))'.dependencies] -napi.workspace = true +[target.'cfg(all(not(target_arch = "wasm32")))'.dependencies] query-engine-metrics = { path = "../../query-engine/metrics" } +napi.workspace = true [target.'cfg(target_arch = "wasm32")'.dependencies] wasm-bindgen.workspace = true diff --git a/libs/query-engine-common/src/engine.rs b/libs/query-engine-common/src/engine.rs index 77aa2fec804b..96c51584e437 100644 --- a/libs/query-engine-common/src/engine.rs +++ b/libs/query-engine-common/src/engine.rs @@ -58,6 +58,7 @@ pub struct EngineBuilder { pub struct ConnectedEngineNative { pub config_dir: PathBuf, pub env: HashMap, + #[cfg(feature = "metrics")] pub metrics: Option, } diff --git a/libs/query-engine-common/src/error.rs b/libs/query-engine-common/src/error.rs index f7c9712af8a7..ef3b4b719d2c 100644 --- a/libs/query-engine-common/src/error.rs +++ b/libs/query-engine-common/src/error.rs @@ -94,6 +94,7 @@ impl From for ApiError { } #[cfg(not(target_arch = "wasm32"))] +#[cfg(not(any(target_os = "android", target_os = "ios")))] impl From for napi::Error { fn from(e: ApiError) -> Self { let user_facing = user_facing_errors::Error::from(e); diff --git a/psl/psl-core/src/common/preview_features.rs b/psl/psl-core/src/common/preview_features.rs index ccacad441263..9ad86929a34a 100644 --- a/psl/psl-core/src/common/preview_features.rs +++ b/psl/psl-core/src/common/preview_features.rs @@ -78,8 +78,9 @@ features!( UncheckedScalarInputs, Views, RelationJoins, + ReactNative, PrismaSchemaFolder, - OmitApi, + OmitApi ); /// Generator preview features (alphabetically sorted) @@ -131,7 +132,7 @@ pub const ALL_PREVIEW_FEATURES: FeatureMap = FeatureMap { | TransactionApi | UncheckedScalarInputs }), - hidden: enumflags2::make_bitflags!(PreviewFeature::{PrismaSchemaFolder}), + hidden: enumflags2::make_bitflags!(PreviewFeature::{PrismaSchemaFolder | ReactNative}), }; #[derive(Debug)] diff --git a/query-engine/connector-test-kit-rs/README.md b/query-engine/connector-test-kit-rs/README.md index 650f8f2d4dd0..5d8fbcc148bb 100644 --- a/query-engine/connector-test-kit-rs/README.md +++ b/query-engine/connector-test-kit-rs/README.md @@ -84,7 +84,7 @@ To run tests through a driver adapters, you should also configure the following * `DRIVER_ADAPTER`: tells the test executor to use a particular driver adapter. Set to `neon`, `planetscale` or any other supported adapter. * `DRIVER_ADAPTER_CONFIG`: a json string with the configuration for the driver adapter. This is adapter specific. See the [github workflow for driver adapter tests](.github/workflows/query-engine-driver-adapters.yml) for examples on how to configure the driver adapters. -* `ENGINE`: can be used to run either `wasm` or `napi` version of the engine. +* `ENGINE`: can be used to run either `wasm` or `napi` or `c-abi` version of the engine. Example: diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/metrics.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/metrics.rs index 7a020f27aa31..50d8f7c0a9ca 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/metrics.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/metrics.rs @@ -5,7 +5,7 @@ use query_engine_tests::test_suite; exclude( Vitess("planetscale.js", "planetscale.js.wasm"), Postgres("neon.js", "pg.js", "neon.js.wasm", "pg.js.wasm"), - Sqlite("libsql.js", "libsql.js.wasm", "cfd1") + Sqlite("libsql.js", "libsql.js.wasm", "cfd1", "react-native") ) )] mod metrics { diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs index fa7964d4a263..5566fe3d717e 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/config.rs @@ -8,11 +8,12 @@ use std::{convert::TryFrom, env, fmt::Display, fs::File, io::Read, path::PathBuf static TEST_CONFIG_FILE_NAME: &str = ".test_config"; -#[derive(Debug, Deserialize, Default, Clone)] +#[derive(Debug, Deserialize, Default, Clone, Copy, PartialEq)] pub enum TestExecutor { #[default] Napi, Wasm, + Mobile, } impl Display for TestExecutor { @@ -20,6 +21,7 @@ impl Display for TestExecutor { match self { TestExecutor::Napi => f.write_str("Napi"), TestExecutor::Wasm => f.write_str("Wasm"), + TestExecutor::Mobile => f.write_str("Mobile"), } } } @@ -62,6 +64,11 @@ pub struct TestConfigFromSerde { /// test executor by setting the `DRIVER_ADAPTER_CONFIG` env var when spawning the executor. /// Correctness: if set, [`TestConfigFromSerde::driver_adapter`] must be set as well. pub(crate) driver_adapter_config: Option, + + /// For mobile tests a running device with a valid http server is required. + /// This is the URL to the mobile emulator which will execute the queries against + /// the instances of the engine running on the device. + pub(crate) mobile_emulator_url: Option, } impl TestConfigFromSerde { @@ -105,10 +112,18 @@ impl TestConfigFromSerde { Err(err) => exit_with_message(&err.to_string()), } - if self.external_test_executor.is_some() && self.driver_adapter.is_none() { - exit_with_message( - "When using an external test executor, the driver adapter (DRIVER_ADAPTER env var) must be set.", - ); + if self.external_test_executor.is_some() { + if self.external_test_executor.unwrap() == TestExecutor::Mobile && self.mobile_emulator_url.is_none() { + exit_with_message( + "When using the mobile external test executor, the mobile emulator URL (MOBILE_EMULATOR_URL env var) must be set.", + ); + } + + if self.external_test_executor.unwrap() != TestExecutor::Mobile && self.driver_adapter.is_none() { + exit_with_message( + "When using an external test executor, the driver adapter (DRIVER_ADAPTER env var) must be set.", + ); + } } if self.driver_adapter.is_some() && self.external_test_executor.is_none() { @@ -154,6 +169,7 @@ pub struct TestConfig { pub(crate) connector_version: Option, pub(crate) with_driver_adapter: Option, pub(crate) is_ci: bool, + pub(crate) mobile_emulator_url: Option, } impl From for TestConfig { @@ -174,6 +190,7 @@ impl From for TestConfig { connector_version: config.connector_version, is_ci: config.is_ci, with_driver_adapter, + mobile_emulator_url: config.mobile_emulator_url, } } } @@ -213,6 +230,7 @@ And optionally, to test driver adapters - EXTERNAL_TEST_EXECUTOR - DRIVER_ADAPTER - DRIVER_ADAPTER_CONFIG (optional, not required by all driver adapters) +- MOBILE_EMULATOR_URL (optional, only required by mobile external test executor) 📁 Config file @@ -278,6 +296,8 @@ impl TestConfig { .map(|config| serde_json::from_str::(config.as_str()).ok()) .unwrap_or_default(); + let mobile_emulator_url = std::env::var("MOBILE_EMULATOR_URL").ok(); + // Just care for a set value for now. let is_ci = std::env::var("BUILDKITE").is_ok(); @@ -289,13 +309,13 @@ impl TestConfig { external_test_executor, driver_adapter, driver_adapter_config, + mobile_emulator_url, }) .map(Self::from) } fn from_file() -> Option { let current_dir = env::current_dir().ok(); - current_dir .and_then(|path| Self::try_path(config_path(path))) .or_else(|| Self::workspace_root().and_then(|path| Self::try_path(config_path(path)))) @@ -402,6 +422,10 @@ impl TestConfig { "PRISMA_DISABLE_QUAINT_EXECUTORS".to_string(), "1".to_string(), ), + ( + "MOBILE_EMULATOR_URL".to_string(), + self.mobile_emulator_url.clone().unwrap_or_default() + ), ) } } diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/external_process.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/external_process.rs index 06d1551f9405..9db9556137f4 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/external_process.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/js/external_process.rs @@ -196,8 +196,9 @@ fn start_rpc_thread(mut receiver: mpsc::Receiver) -> Result<()> { .build() .unwrap() .block_on(async move { + let environment = CONFIG.for_external_executor(); let process = match Command::new(&path) - .envs(CONFIG.for_external_executor()) + .envs(environment) .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::inherit()) diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/sqlite.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/sqlite.rs index d1f185a6cf88..4f45c7d3a242 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/sqlite.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/sqlite.rs @@ -29,6 +29,7 @@ impl ConnectorTagInterface for SqliteConnectorTag { #[derive(Clone, Debug, PartialEq, Eq)] pub enum SqliteVersion { V3, + ReactNative, LibsqlJsNapi, LibsqlJsWasm, CloudflareD1, @@ -37,6 +38,7 @@ pub enum SqliteVersion { impl ToString for SqliteVersion { fn to_string(&self) -> String { match self { + SqliteVersion::ReactNative => "react-native".to_string(), SqliteVersion::V3 => "3".to_string(), SqliteVersion::LibsqlJsNapi => "libsql.js".to_string(), SqliteVersion::LibsqlJsWasm => "libsql.js.wasm".to_string(), @@ -53,6 +55,7 @@ impl TryFrom<&str> for SqliteVersion { "3" => Self::V3, "libsql.js" => Self::LibsqlJsNapi, "libsql.js.wasm" => Self::LibsqlJsWasm, + "react-native" => Self::ReactNative, "cfd1" => Self::CloudflareD1, _ => return Err(TestError::parse_error(format!("Unknown SQLite version `{s}`"))), }; diff --git a/query-engine/connector-test-kit-rs/test-configs/react-native b/query-engine/connector-test-kit-rs/test-configs/react-native new file mode 100644 index 000000000000..858347a7e6a4 --- /dev/null +++ b/query-engine/connector-test-kit-rs/test-configs/react-native @@ -0,0 +1,6 @@ +{ + "connector": "sqlite", + "version": "react-native", + "external_test_executor": "Mobile", + "mobile_emulator_url": "http://localhost:3000" +} diff --git a/query-engine/connectors/sql-query-connector/Cargo.toml b/query-engine/connectors/sql-query-connector/Cargo.toml index 4c55cff55420..c7152688629c 100644 --- a/query-engine/connectors/sql-query-connector/Cargo.toml +++ b/query-engine/connectors/sql-query-connector/Cargo.toml @@ -10,15 +10,8 @@ sqlite = ["quaint/sqlite", "psl/sqlite"] mssql = ["quaint/mssql"] cockroachdb = ["relation_joins", "quaint/postgresql", "psl/cockroachdb"] vendored-openssl = ["quaint/vendored-openssl"] -native_all = [ - "sqlite", - "mysql", - "postgresql", - "mssql", - "cockroachdb", - "quaint/native", - "quaint/pooled", -] +all = ["sqlite", "mysql", "postgresql", "mssql", "cockroachdb", "native"] +native = ["quaint/native", "quaint/pooled"] # TODO: At the moment of writing (rustc 1.77.0), can_have_capability from psl does not eliminate joins # code from bundle for some reason, so we are doing it explicitly. Check with a newer version of compiler - if elimination # happens successfully, we don't need this feature anymore @@ -46,7 +39,6 @@ tracing-opentelemetry = "0.17.3" cuid = { git = "https://github.com/prisma/cuid-rust", branch = "wasm32-support" } quaint.workspace = true - [dependencies.connector-interface] package = "query-connector" path = "../query-connector" diff --git a/query-engine/connectors/sql-query-connector/src/database/mod.rs b/query-engine/connectors/sql-query-connector/src/database/mod.rs index 513100250c8f..e0ec3f7e29e5 100644 --- a/query-engine/connectors/sql-query-connector/src/database/mod.rs +++ b/query-engine/connectors/sql-query-connector/src/database/mod.rs @@ -3,11 +3,15 @@ mod connection; mod js; mod transaction; -#[cfg(not(target_arch = "wasm32"))] +#[cfg(feature = "native")] pub(crate) mod native { + #[cfg(feature = "mssql")] pub(crate) mod mssql; + #[cfg(feature = "mysql")] pub(crate) mod mysql; + #[cfg(feature = "postgresql")] pub(crate) mod postgresql; + #[cfg(feature = "sqlite")] pub(crate) mod sqlite; } @@ -19,8 +23,17 @@ use connector_interface::{error::ConnectorError, Connector}; #[cfg(feature = "driver-adapters")] pub use js::*; -#[cfg(not(target_arch = "wasm32"))] -pub use native::{mssql::*, mysql::*, postgresql::*, sqlite::*}; +#[cfg(all(feature = "native", feature = "mssql"))] +pub use native::mssql::*; + +#[cfg(all(feature = "native", feature = "mysql"))] +pub use native::mysql::*; + +#[cfg(all(feature = "native", feature = "postgresql"))] +pub use native::postgresql::*; + +#[cfg(all(feature = "native", feature = "sqlite"))] +pub use native::sqlite::*; #[async_trait] pub trait FromSource { diff --git a/query-engine/connectors/sql-query-connector/src/lib.rs b/query-engine/connectors/sql-query-connector/src/lib.rs index 8dc26bda5c25..52bc33a51b0c 100644 --- a/query-engine/connectors/sql-query-connector/src/lib.rs +++ b/query-engine/connectors/sql-query-connector/src/lib.rs @@ -27,7 +27,16 @@ pub use database::FromSource; pub use database::Js; pub use error::SqlError; -#[cfg(not(target_arch = "wasm32"))] -pub use database::{Mssql, Mysql, PostgreSql, Sqlite}; +#[cfg(all(feature = "native", feature = "mssql"))] +pub use database::Mssql; + +#[cfg(all(feature = "native", feature = "mysql"))] +pub use database::Mysql; + +#[cfg(all(feature = "native", feature = "postgresql"))] +pub use database::PostgreSql; + +#[cfg(all(feature = "native", feature = "sqlite"))] +pub use database::Sqlite; type Result = std::result::Result; diff --git a/query-engine/driver-adapters/executor/src/rn.ts b/query-engine/driver-adapters/executor/src/rn.ts new file mode 100644 index 000000000000..d3c62b3e40e7 --- /dev/null +++ b/query-engine/driver-adapters/executor/src/rn.ts @@ -0,0 +1,96 @@ +export function createRNEngineConnector( + url: string, + schema: string, + logCallback: (msg: string) => void +) { + const headers = { + "Content-Type": "application/json", + Accept: "application/json", + }; + + return { + connect: async () => { + const res = await fetch(`${url}/connect`, { + method: "POST", + mode: "no-cors", + headers, + body: JSON.stringify({ schema }), + }); + + return await res.json(); + }, + query: async ( + body: string, + trace: string, + txId: string + ): Promise => { + const res = await fetch(`${url}/query`, { + method: "POST", + mode: "no-cors", + headers, + body: JSON.stringify({ + body, + trace, + txId, + }), + }); + + const response = await res.json(); + + if (response.logs.length) { + response.logs.forEach(logCallback); + } + + return response.engineResponse; + }, + startTransaction: async (body: string, trace: string): Promise => { + const res = await fetch(`${url}/start_transaction`, { + method: "POST", + mode: "no-cors", + headers, + body: JSON.stringify({ + body, + trace, + }), + }); + return await res.json(); + }, + commitTransaction: async (txId: string, trace: string): Promise => { + const res = await fetch(`${url}/commit_transaction`, { + method: "POST", + mode: "no-cors", + headers, + body: JSON.stringify({ + txId, + trace, + }), + }); + return res.json(); + }, + rollbackTransaction: async ( + txId: string, + trace: string + ): Promise => { + const res = await fetch(`${url}/rollback_transaction`, { + method: "POST", + mode: "no-cors", + headers, + body: JSON.stringify({ + txId, + trace, + }), + }); + return res.json(); + }, + disconnect: async (trace: string) => { + await fetch(`${url}/disconnect`, { + method: "POST", + mode: "no-cors", + headers, + body: JSON.stringify({ + trace, + }), + }); + }, + }; +} diff --git a/query-engine/driver-adapters/executor/src/testd.ts b/query-engine/driver-adapters/executor/src/testd.ts index 0c96fb927379..a83df43d86d0 100644 --- a/query-engine/driver-adapters/executor/src/testd.ts +++ b/query-engine/driver-adapters/executor/src/testd.ts @@ -1,220 +1,276 @@ -import * as readline from 'node:readline' -import { match } from 'ts-pattern' -import * as S from '@effect/schema/Schema' -import {bindAdapter, ErrorCapturingDriverAdapter} from '@prisma/driver-adapter-utils' -import { webcrypto } from 'node:crypto' - -import type { DriverAdaptersManager } from './driver-adapters-manager' -import { jsonRpc, Env, ExternalTestExecutor } from './types' -import * as qe from './qe' -import { PgManager } from './driver-adapters-manager/pg' -import { NeonWsManager } from './driver-adapters-manager/neon.ws' -import { LibSQLManager } from './driver-adapters-manager/libsql' -import { PlanetScaleManager } from './driver-adapters-manager/planetscale' -import { D1Manager } from './driver-adapters-manager/d1' +import * as readline from "node:readline"; +import { match } from "ts-pattern"; +import * as S from "@effect/schema/Schema"; +import { + bindAdapter, + ErrorCapturingDriverAdapter, +} from "@prisma/driver-adapter-utils"; +import { webcrypto } from "node:crypto"; + +import type { DriverAdaptersManager } from "./driver-adapters-manager"; +import { jsonRpc, Env, ExternalTestExecutor } from "./types"; +import * as qe from "./qe"; +import { PgManager } from "./driver-adapters-manager/pg"; +import { NeonWsManager } from "./driver-adapters-manager/neon.ws"; +import { LibSQLManager } from "./driver-adapters-manager/libsql"; +import { PlanetScaleManager } from "./driver-adapters-manager/planetscale"; +import { D1Manager } from "./driver-adapters-manager/d1"; +import { createRNEngineConnector } from "./rn"; if (!global.crypto) { - global.crypto = webcrypto as Crypto + global.crypto = webcrypto as Crypto; } -async function initialiseDriverAdapterManager(env: Env, migrationScript?: string): Promise { - return match(env) - .with({ DRIVER_ADAPTER: 'pg' }, async (env) => await PgManager.setup(env)) - .with({ DRIVER_ADAPTER: 'neon:ws' }, async (env) => await NeonWsManager.setup(env)) - .with({ DRIVER_ADAPTER: 'libsql' }, async (env) => await LibSQLManager.setup(env)) - .with({ DRIVER_ADAPTER: 'planetscale' }, async (env) => await PlanetScaleManager.setup(env)) - .with({ DRIVER_ADAPTER: 'd1' }, async (env) => await D1Manager.setup(env, migrationScript)) - .exhaustive() +async function initialiseDriverAdapterManager( + env: Env, + migrationScript?: string +): Promise { + return match(env) + .with({ DRIVER_ADAPTER: "pg" }, async (env) => await PgManager.setup(env)) + .with( + { DRIVER_ADAPTER: "neon:ws" }, + async (env) => await NeonWsManager.setup(env) + ) + .with( + { DRIVER_ADAPTER: "libsql" }, + async (env) => await LibSQLManager.setup(env) + ) + .with( + { DRIVER_ADAPTER: "planetscale" }, + async (env) => await PlanetScaleManager.setup(env) + ) + .with( + { DRIVER_ADAPTER: "d1" }, + async (env) => await D1Manager.setup(env, migrationScript) + ) + .exhaustive(); } // conditional debug logging based on LOG_LEVEL env var const debug = (() => { - if ((process.env.LOG_LEVEL ?? '').toLowerCase() != 'debug') { - return (...args: any[]) => {} - } + if ((process.env.LOG_LEVEL ?? "").toLowerCase() != "debug") { + return (...args: any[]) => {}; + } - return (...args: any[]) => { - console.error('[nodejs] DEBUG:', ...args); - }; + return (...args: any[]) => { + console.error("[nodejs] DEBUG:", ...args); + }; })(); // error logger -const err = (...args: any[]) => console.error('[nodejs] ERROR:', ...args); +const err = (...args: any[]) => console.error("[nodejs] ERROR:", ...args); async function main(): Promise { - const env = S.decodeUnknownSync(Env)(process.env) - console.log('[env]', env) - - const iface = readline.createInterface({ - input: process.stdin, - output: process.stdout, - terminal: false, - }); - - iface.on('line', async (line) => { - try { - const request = S.decodeSync(jsonRpc.RequestFromString)(line) - debug(`Got a request: ${line}`) - - try { - const response = await handleRequest(request, env) - respondOk(request.id, response) - } catch (err) { - debug("[nodejs] Error from request handler: ", err) - respondErr(request.id, { - code: 1, - message: err.stack ?? err.toString(), - }) - } - } catch (err) { - debug("Received non-json line: ", line); - console.error(err) - } + const env = S.decodeUnknownSync(Env)(process.env); + console.log("[env]", env); + + const iface = readline.createInterface({ + input: process.stdin, + output: process.stdout, + terminal: false, + }); - }); + iface.on("line", async (line) => { + try { + const request = S.decodeSync(jsonRpc.RequestFromString)(line); + debug(`Got a request: ${line}`); + + try { + const response = await handleRequest(request, env); + respondOk(request.id, response); + } catch (err) { + debug("[nodejs] Error from request handler: ", err); + respondErr(request.id, { + code: 1, + message: err.stack ?? err.toString(), + }); + } + } catch (err) { + debug("Received non-json line: ", line); + console.error(err); + } + }); } -const state: Record = {} - -async function handleRequest({ method, params }: jsonRpc.Request, env: Env): Promise { - switch (method) { - case 'initializeSchema': { - const { url, schema, schemaId, migrationScript } = params - const logs = [] as string[] - - const logCallback = (log) => { logs.push(log) } - - const driverAdapterManager = await initialiseDriverAdapterManager(env, migrationScript) - const engineType = env.EXTERNAL_TEST_EXECUTOR ?? 'Napi' - - const { engine, adapter } = await initQe({ - engineType, - url, - driverAdapterManager,schema, - logCallback, - }) - await engine.connect('') - - state[schemaId] = { - engine, - driverAdapterManager, - adapter, - logs - } - return null - } - case 'query': { - debug("Got `query`", params) - const { query, schemaId, txId } = params - const engine = state[schemaId].engine - const result = await engine.query(JSON.stringify(query), "", txId) - - const parsedResult = JSON.parse(result) - if (parsedResult.errors) { - const error = parsedResult.errors[0]?.user_facing_error - if (error.error_code === 'P2036') { - const jsError = state[schemaId].adapter.errorRegistry.consumeError(error.meta.id) - if (!jsError) { - err(`Something went wrong. Engine reported external error with id ${error.meta.id}, but it was not registered.`) - } else { - err("got error response from the engine caused by the driver: ", jsError) - } - } - } - - debug("got response from engine: ", result) - // returning unparsed string: otherwise, some information gots lost during this round-trip. - // In particular, floating point without decimal part turn into integers - return result - } +const state: Record< + number, + { + engine: qe.QueryEngine; + driverAdapterManager: DriverAdaptersManager; + adapter: ErrorCapturingDriverAdapter | null; + logs: string[]; + } +> = {}; - case 'startTx': { - debug("Got `startTx", params) - const { schemaId, options } = params - const result = await state[schemaId].engine.startTransaction(JSON.stringify(options), "") - return JSON.parse(result) - } +async function handleRequest( + { method, params }: jsonRpc.Request, + env: Env +): Promise { + switch (method) { + case "initializeSchema": { + const { url, schema, schemaId, migrationScript } = params; + const logs = [] as string[]; - case 'commitTx': { - debug("Got `commitTx", params) - const { schemaId, txId } = params - const result = await state[schemaId].engine.commitTransaction(txId, '{}') - return JSON.parse(result) - } + const logCallback = (log) => { + logs.push(log); + }; - case 'rollbackTx': { - debug("Got `rollbackTx", params) - const { schemaId, txId } = params - const result = await state[schemaId].engine.rollbackTransaction(txId, '{}') - return JSON.parse(result) - } - case 'teardown': { - debug("Got `teardown", params) - const { schemaId } = params + const driverAdapterManager = await initialiseDriverAdapterManager( + env, + migrationScript + ); + const engineType = env.EXTERNAL_TEST_EXECUTOR ?? "Napi"; - await state[schemaId].engine.disconnect("") - await state[schemaId].driverAdapterManager.teardown() - delete state[schemaId] + const { engine, adapter } = await initQe({ + engineType, + url, + driverAdapterManager, + schema, + logCallback, + }); + await engine.connect(""); - return {} - } - case 'getLogs': { - const { schemaId } = params - return state[schemaId].logs - } - default: { - throw new Error(`Unknown method: \`${method}\``) + state[schemaId] = { + engine, + driverAdapterManager, + adapter, + logs, + }; + return null; + } + case "query": { + debug("Got `query`", params); + const { query, schemaId, txId } = params; + const engine = state[schemaId].engine; + const result = await engine.query(JSON.stringify(query), "", txId); + + const parsedResult = JSON.parse(result); + if (parsedResult.errors) { + const error = parsedResult.errors[0]?.user_facing_error; + if (error.error_code === "P2036") { + const jsError = state[schemaId].adapter?.errorRegistry.consumeError( + error.meta.id + ); + if (!jsError) { + err( + `Something went wrong. Engine reported external error with id ${error.meta.id}, but it was not registered.` + ); + } else { + err( + "got error response from the engine caused by the driver: ", + jsError + ); + } } + } + + debug("🟢 Engine response: ", result); + // returning unparsed string: otherwise, some information gots lost during this round-trip. + // In particular, floating point without decimal part turn into integers + return result; + } + + case "startTx": { + debug("Got `startTx", params); + const { schemaId, options } = params; + const result = await state[schemaId].engine.startTransaction( + JSON.stringify(options), + "" + ); + return JSON.parse(result); + } + + case "commitTx": { + debug("Got `commitTx", params); + const { schemaId, txId } = params; + const result = await state[schemaId].engine.commitTransaction(txId, "{}"); + return JSON.parse(result); + } + + case "rollbackTx": { + debug("Got `rollbackTx", params); + const { schemaId, txId } = params; + const result = await state[schemaId].engine.rollbackTransaction( + txId, + "{}" + ); + return JSON.parse(result); + } + case "teardown": { + debug("Got `teardown", params); + const { schemaId } = params; + + await state[schemaId].engine.disconnect(""); + await state[schemaId].driverAdapterManager.teardown(); + delete state[schemaId]; + + return {}; + } + case "getLogs": { + const { schemaId } = params; + return state[schemaId].logs; } + default: { + throw new Error(`Unknown method: \`${method}\``); + } + } } function respondErr(requestId: number, error: jsonRpc.RpcError) { - const msg: jsonRpc.ErrResponse = { - jsonrpc: '2.0', - id: requestId, - error, - } - console.log(JSON.stringify(msg)) + const msg: jsonRpc.ErrResponse = { + jsonrpc: "2.0", + id: requestId, + error, + }; + console.log(JSON.stringify(msg)); } function respondOk(requestId: number, payload: unknown) { - const msg: jsonRpc.OkResponse = { - jsonrpc: '2.0', - id: requestId, - result: payload - - }; - console.log(JSON.stringify(msg)) + const msg: jsonRpc.OkResponse = { + jsonrpc: "2.0", + id: requestId, + result: payload, + }; + console.log(JSON.stringify(msg)); } type InitQueryEngineParams = { - engineType: ExternalTestExecutor, - driverAdapterManager: DriverAdaptersManager, - url: string, - schema: string, - logCallback: qe.QueryLogCallback -} + engineType: ExternalTestExecutor; + driverAdapterManager: DriverAdaptersManager; + url: string; + schema: string; + logCallback: qe.QueryLogCallback; +}; async function initQe({ - engineType, - driverAdapterManager, - url, - schema, - logCallback + engineType, + driverAdapterManager, + url, + schema, + logCallback, }: InitQueryEngineParams) { - const adapter = await driverAdapterManager.connect({ url }) - const errorCapturingAdapter = bindAdapter(adapter) - const engineInstance = await qe.initQueryEngine(engineType, errorCapturingAdapter, schema, logCallback, debug) - - return { - engine: engineInstance, - adapter: errorCapturingAdapter, + if (process.env.EXTERNAL_TEST_EXECUTOR === "Mobile") { + if (process.env.MOBILE_EMULATOR_URL) { + url = process.env.MOBILE_EMULATOR_URL; } + const engine = createRNEngineConnector(url, schema, logCallback); + return { engine, adapter: null }; + } else { + const adapter = await driverAdapterManager.connect({ url }); + const errorCapturingAdapter = bindAdapter(adapter); + const engineInstance = await qe.initQueryEngine( + engineType, + errorCapturingAdapter, + schema, + logCallback, + debug + ); + + return { + engine: engineInstance, + adapter: errorCapturingAdapter, + }; + } } -main().catch(err) +main().catch(err); diff --git a/query-engine/query-engine-c-abi/.gitignore b/query-engine/query-engine-c-abi/.gitignore new file mode 100644 index 000000000000..2974fad5812d --- /dev/null +++ b/query-engine/query-engine-c-abi/.gitignore @@ -0,0 +1,7 @@ +QueryEngine.xcframework +simulator_fat +# Artifacts of the C ABI engine +*.tar.gz +openssl-3.1.4 +libs +include \ No newline at end of file diff --git a/query-engine/query-engine-c-abi/Cargo.toml b/query-engine/query-engine-c-abi/Cargo.toml new file mode 100644 index 000000000000..65ffd72c38cc --- /dev/null +++ b/query-engine/query-engine-c-abi/Cargo.toml @@ -0,0 +1,52 @@ +[package] +name = "query-engine-c-abi" +version = "0.1.0" +edition = "2021" + +[lib] +doc = false +crate-type = ["staticlib"] +name = "query_engine" + +[features] +metrics = ["query-engine-common/metrics"] + +[dependencies] +anyhow = "1" +async-trait = "0.1" +query-core = { path = "../core" } +request-handlers = { path = "../request-handlers", features = [ + "sqlite", + "native", +] } +query-connector = { path = "../connectors/query-connector" } +query-engine-common = { path = "../../libs/query-engine-common" } +user-facing-errors = { path = "../../libs/user-facing-errors" } +psl = { workspace = true, features = ["sqlite"] } +sql-connector = { path = "../connectors/sql-query-connector", package = "sql-query-connector" } +query-structure = { path = "../query-structure" } +chrono.workspace = true +quaint = { path = "../../quaint", default-features = false, features = [ + "sqlite", +] } +rusqlite = "0.29" +uuid.workspace = true +thiserror = "1" +connection-string.workspace = true +url = "2" +serde_json.workspace = true +serde.workspace = true +indoc.workspace = true + +tracing = "0.1" +tracing-subscriber = { version = "0.3" } +tracing-futures = "0.2" +tracing-opentelemetry = "0.17.3" +opentelemetry = { version = "0.17" } + +tokio.workspace = true +futures = "0.3" +once_cell = "1.19.0" + +[build-dependencies] +cbindgen = "0.24.0" diff --git a/query-engine/query-engine-c-abi/Makefile b/query-engine/query-engine-c-abi/Makefile new file mode 100644 index 000000000000..83e1d506c4e4 --- /dev/null +++ b/query-engine/query-engine-c-abi/Makefile @@ -0,0 +1,56 @@ +# rustup target add x86_64-apple-ios # intel simulator +# rustup target add aarch64-apple-ios # actual iOS +# rustup target add aarch64-apple-ios-sim # arm simulator + +# rustup target add aarch64-linux-android # Android arm 64 bits +# rustup target add x86_64-linux-android # Intel 64 bits emulator +# rustup target add armv7-linux-androideabi # Android arm 32 bits +# rustup target add i686-linux-android # Intel 32 bits emulator + +ARCH_IOS_SIM = aarch64-apple-ios-sim +ARCHS_IOS = x86_64-apple-ios aarch64-apple-ios aarch64-apple-ios-sim +ARCHS_ANDROID = aarch64-linux-android armv7-linux-androideabi x86_64-linux-android i686-linux-android +LIB = libquery_engine.a +XCFRAMEWORK = QueryEngine.xcframework + +.PHONY: clean ios android $(ARCH_IOS_SIM) $(ARCHS_IOS) $(ARCHS_ANDROID) sim copy-ios nuke + +nuke: + rm -rf ../../target + +clean: + rm -rf QueryEngine.xcframework + rm -rf simulator_fat + mkdir simulator_fat + # rm -rf include + # mkdir include + +all: nuke ios android + +################# ANDROID ################# +android: clean $(ARCHS_ANDROID) + ./copy-android.sh + +$(ARCHS_ANDROID): %: + ./build-android-target.sh $@ + +################# iOS ################# +ios: clean $(XCFRAMEWORK) + +sim: clean + cargo build --target $(ARCH_IOS_SIM) + xcodebuild -create-xcframework -library ../../target/$(ARCH_IOS_SIM)/debug/libquery_engine.a -headers include -output $(XCFRAMEWORK) + ./copy-ios.sh + +sim-release: clean + cargo build --target $(ARCH_IOS_SIM) --release + xcodebuild -create-xcframework -library ../../target/$(ARCH_IOS_SIM)/release/libquery_engine.a -headers include -output $(XCFRAMEWORK) + ./copy-ios.sh + +$(ARCHS_IOS): %: + cargo build --release --target $@ + +$(XCFRAMEWORK): $(ARCHS_IOS) + lipo -create $(wildcard ../../target/x86_64-apple-ios/release/$(LIB)) $(wildcard ../../target/aarch64-apple-ios-sim/release/$(LIB)) -output simulator_fat/libquery_engine.a + xcodebuild -create-xcframework -library $(wildcard ../../target/aarch64-apple-ios/release/$(LIB)) -headers include -library simulator_fat/libquery_engine.a -headers include -output $@ + ./copy-ios.sh \ No newline at end of file diff --git a/query-engine/query-engine-c-abi/README.md b/query-engine/query-engine-c-abi/README.md new file mode 100644 index 000000000000..89f9ef2b68ee --- /dev/null +++ b/query-engine/query-engine-c-abi/README.md @@ -0,0 +1,56 @@ +# UNSTABLE/EXPERIMENTAL Query Engine C (compatible) ABI + +This version of the query engine exposes the Rust engine via C callable functions. There are subtle differences to this implementation compared to the node and wasm versions. Although it is usable by any language that can operate with the C ABI, it is oriented to having prisma running on react-native so the build scripts are oriented to that goal. + +## Setup + +You need to have Xcode, Java, Android's NDK (you can/should install it via Android Studio), Cmake installed on your machine to compile the engine. The make file contains the main entry points for building the different architectures and platforms. You also need to install the target Rust architectures. You can find the exact [process described here](https://ospfranco.com/post/2023/08/11/react-native,-rust-step-by-step-integration-guide/). + +- `make ios` → Builds the iOS libraries in release mode +- `make sim` → Builds the simulator arch only in debug, much faster, meant for rapid development +- `make android` → Builds all the android archs +- `make all` → Builds all the archs + +Once the libraries have been built there are a couple of extra scripts (`copy-ios.sh` and `copy-android.sh`) that move the results of the compilation into a sibling of the parent folder (`react-native-prisma`), which is where they will be packaged and published to npm. + +The result of the compilation are static libraries (.a) as well a generated C header file. + +A C header file (`include/query_engine.h`) is automatically generated on the compilation process via `cbindgen`. There is no need to manually modify this file, it will be automatically generated and packaged each time you compile the library. You need to mark the functions inside `engine.rs` as `extern "C"` so that the generator picks them up. + +### iOS + +iOS requires the use of `.xcframework` to package similar architectures (proper iOS and iOS 64 bit simulator thanks to m1 machines) without conflicts. + +## Base Path + +This query engine takes one additional parameter in the create function (the entry point of all operations), which is the `base_path` string param. This param is meant to allow the query engine to change it's working directory to the passed path. This is required on iOS (and on the latest versions of Android) because the file system is sandboxed. The react-native client library that consumes this version of the engine passes the Library directory on iOS and the Databases folder on Android, both of this folders are within the sandbox and can be freely read and written. The implemented solution literally just changes the working directory of the Rust code in order to allow the query engine to operate as if it was working on a non-sandboxed platform and allowed to the query engine to run without changing implementation details and even hackier workarounds. It might have unintented consequences on the behavior of the engine though, so if you have any issues please report them. + +## Migrations + +This query engine version also contains parts of the schema engine. Previous versions of prisma were meant to be run on the server by the developer to test migrations or execute them for a single server database. Now that we are targeting front-end platforms, it is required to be able to perform migrations ON-DEVICE and on RUNTIME. + +In order to enable this there are some new functions exposed through the query engine api that call schema engine. + +- `prisma_apply_pending_migrations` → Given a path, it will scan all the folders in alphabetical order all look inside for a `migration.sql` and execute that. It's equivalent (it literally calls the same internal function) as `prisma migrate dev` + +- `prisma_push_schema` → Will try to apply the passed schema into the database in an unsafe manner. Some data might be lost. It's equivalent to `prisma db push` + +## Usage + +Like any C-API, returning multiple chunks of data is done via passing pointers (e.g. SQLite). Especially the query engine instanciation, will return a obfuscated pointer allocated on the heap. You need to pass this pointer to each subsequent call to the interfaces to use the query engine functionality. + +Each operation should return an integer status code that indicates PRISMA_OK (0) if the opereation finished correctly or different error codes for each possible error. + +C calls are not compatible with tokio/async, so the C functions need to use `block_on` in order to keep synchronicity. If async functionality is wanted the calling language/environment should spin up their own threads and call the functions in there. + +While `block_on` might not be the most efficient way to achieve things, it keeps changes to the core query_engine functionality at a minimum. + +## OpenSSL Snafu + +The query engine (to be exact, different database connectors) depends on OpenSSL, however, the Rust crate tries to compile the latest version which [currently has a problem with Android armv7 architectures](https://github.com/openssl/openssl/pull/22181). In order to get around this, we have to download OpenSSL, patch it, compile and link it manually. The download, patching and compiling is scripted via the `build-openssl.sh` script. You need to have the Android NDK installed and the `ANDROID_NDK_ROOT` variable set in your environment before running this script. You can find more info on the script itself. The libraries will be outputed in the `libs` folder with the specific structure the Rust compilation needs to finish linking OpenSSL in the main query engine compilation. The crate `openssl` then uses the compiled version by detecting the `OPENSSL_DIR` flag which is set in the `build-android-target.sh` script. + +Once the issues upstream are merged we can get rid of this custom compilation step. + +## Tests + +The tests for React Native are dependant on JSI, meaning they cannot be run outside a device/simulator. The example app contains an HTTP server and the test setup has been reworked to send the requests via HTTP. The usual steps to running the tests are needed but you also need to be running the app and replace the IP address that appears on the screen in the `executor/rn.ts` file. diff --git a/query-engine/query-engine-c-abi/build-android-target.sh b/query-engine/query-engine-c-abi/build-android-target.sh new file mode 100755 index 000000000000..823888f02473 --- /dev/null +++ b/query-engine/query-engine-c-abi/build-android-target.sh @@ -0,0 +1,62 @@ +#!/bin/bash + +set -ex + +TARGET="$1" + +if [ "$TARGET" = "" ]; then + echo "missing argument TARGET" + echo "Usage: $0 TARGET" + exit 1 +fi + +NDK_TARGET=$TARGET + +if [ "$TARGET" = "armv7-linux-androideabi" ]; then + NDK_TARGET="armv7a-linux-androideabi" +fi + +OPENSSL_ARCH="android-arm64" +# if [ "$TARGET" = "aarch64-linux-android" ]; then +# fi + +if [ "$TARGET" = "x86_64-linux-android" ]; then + OPENSSL_ARCH="android-x86_64" +fi + +if [ "$TARGET" = "armv7-linux-androideabi" ]; then + OPENSSL_ARCH="android-arm" +fi + +if [ "$TARGET" = "i686-linux-android" ]; then + OPENSSL_ARCH="android-x86" +fi + + +API_VERSION="21" +NDK_VERSION="26.0.10792818" +NDK_HOST="darwin-x86_64" + +if [ -z "$ANDROID_SDK_ROOT" ]; then + echo "ANDROID SDK IS MISSING 🟥" + exit 1 +fi + +if [ -z "$NDK" ]; then + NDK="$ANDROID_SDK_ROOT/ndk/$NDK_VERSION" +fi + +TOOLS="$NDK/toolchains/llvm/prebuilt/$NDK_HOST" + +CWD=$(pwd) + +export OPENSSL_DIR=$CWD/libs/$OPENSSL_ARCH +export OPENSSL_STATIC=1 + +# OPENSSL_DIR=./libs/android/clang/${OPENSSL_ARCH} \ +AR=$TOOLS/bin/llvm-ar \ +CC=$TOOLS/bin/${NDK_TARGET}${API_VERSION}-clang \ +CXX=$TOOLS/bin/${NDK_TARGET}${API_VERSION}-clang++ \ +RANLIB=$TOOLS/bin/llvm-ranlib \ +CXXFLAGS="--target=$NDK_TARGET" \ +cargo build --release --target "$TARGET" \ No newline at end of file diff --git a/query-engine/query-engine-c-abi/build-openssl.sh b/query-engine/query-engine-c-abi/build-openssl.sh new file mode 100755 index 000000000000..878d4ed727ae --- /dev/null +++ b/query-engine/query-engine-c-abi/build-openssl.sh @@ -0,0 +1,76 @@ +#!/bin/bash + +#set -v +set -ex + +export OPENSSL_VERSION="openssl-3.1.4" +rm -rf ${OPENSSL_VERSION} +# check if the tar is already downloaded and if not download and extract it +if [ ! -d ${OPENSSL_VERSION}.tar.gz ]; then + curl -O "https://www.openssl.org/source/${OPENSSL_VERSION}.tar.gz" + tar xfz "${OPENSSL_VERSION}.tar.gz" +fi + +PATH_ORG=$PATH +OUTPUT_DIR="libs" + +# Clean output: +rm -rf $OUTPUT_DIR +mkdir $OUTPUT_DIR + +build_android_clang() { + + echo "" + echo "----- Build libcrypto & libssl.so for $1 -----" + echo "" + + ARCHITECTURE=$1 + TOOLCHAIN=$2 + + # Set toolchain + export TOOLCHAIN_ROOT=$ANDROID_NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64 + export SYSROOT=$TOOLCHAIN_ROOT/sysroot + export CC=${TOOLCHAIN}21-clang + export CXX=${TOOLCHAIN}21-clang++ + export CXXFLAGS="-fPIC" + export CPPFLAGS="-DANDROID -fPIC" + + export PATH=$TOOLCHAIN_ROOT/bin:$SYSROOT/usr/local/bin:$PATH + + cd "${OPENSSL_VERSION}" + + ./Configure "$ARCHITECTURE" no-asm no-shared -D__ANDROID_API__=21 + + make clean + # Apply patch that fixes the armcap instruction + # Linux version + # sed -e '/[.]hidden.*OPENSSL_armcap_P/d; /[.]extern.*OPENSSL_armcap_P/ {p; s/extern/hidden/ }' -i -- crypto/*arm*pl crypto/*/asm/*arm*pl + # macOS version + sed -E -i '' -e '/[.]hidden.*OPENSSL_armcap_P/d' -e '/[.]extern.*OPENSSL_armcap_P/ {p; s/extern/hidden/; }' crypto/*arm*pl crypto/*/asm/*arm*pl + + make + + mkdir -p ../$OUTPUT_DIR/"${ARCHITECTURE}"/lib + mkdir -p ../$OUTPUT_DIR/"${ARCHITECTURE}"/include + + # file libcrypto.so + # file libssl.so + + cp libcrypto.a ../$OUTPUT_DIR/"${ARCHITECTURE}"/lib/libcrypto.a + cp libssl.a ../$OUTPUT_DIR/"${ARCHITECTURE}"/lib/libssl.a + # cp libcrypto.so ../$OUTPUT_DIR/${ARCHITECTURE}/lib/libcrypto.so + # cp libssl.so ../$OUTPUT_DIR/${ARCHITECTURE}/lib/libssl.so + + cp -R include/openssl ../$OUTPUT_DIR/"${ARCHITECTURE}"/include + + cd .. +} + +build_android_clang "android-arm" "armv7a-linux-androideabi" +build_android_clang "android-x86" "i686-linux-android" +build_android_clang "android-x86_64" "x86_64-linux-android" +build_android_clang "android-arm64" "aarch64-linux-android" + +export PATH=$PATH_ORG + +# pingme "OpenSSL finished compiling" \ No newline at end of file diff --git a/query-engine/query-engine-c-abi/build.rs b/query-engine/query-engine-c-abi/build.rs new file mode 100644 index 000000000000..0739d31bf255 --- /dev/null +++ b/query-engine/query-engine-c-abi/build.rs @@ -0,0 +1,33 @@ +extern crate cbindgen; + +use std::env; +use std::process::Command; + +fn store_git_commit_hash() { + let output = Command::new("git").args(["rev-parse", "HEAD"]).output().unwrap(); + let git_hash = String::from_utf8(output.stdout).unwrap(); + println!("cargo:rustc-env=GIT_HASH={git_hash}"); +} + +fn generate_c_headers() { + let crate_dir = env::var("CARGO_MANIFEST_DIR").unwrap(); + + cbindgen::Builder::new() + .with_crate(crate_dir) + .with_language(cbindgen::Language::C) + .with_include_guard("query_engine_h") + .with_autogen_warning("/* Warning, this file is autogenerated by cbindgen. Don't modify this manually. */") + .with_namespace("prisma") + .with_cpp_compat(true) + .generate() + .expect("Unable to generate bindings") + .write_to_file("include/query_engine.h"); +} + +fn main() { + // Tell Cargo that if the given file changes, to rerun this build script. + println!("cargo:rerun-if-changed=src/engine.rs"); + // println!("✅ Running build.rs"); + store_git_commit_hash(); + generate_c_headers(); +} diff --git a/query-engine/query-engine-c-abi/cargo-config.toml b/query-engine/query-engine-c-abi/cargo-config.toml new file mode 100644 index 000000000000..68151bfbd7b6 --- /dev/null +++ b/query-engine/query-engine-c-abi/cargo-config.toml @@ -0,0 +1,14 @@ +# template file +# move this to your home directory to allow rust to compile the library for android +# All paths are relative to the user home folder +[target.aarch64-linux-android] +linker = "Library/Android/sdk/ndk/26.0.10792818/toolchains/llvm/prebuilt/darwin-x86_64/bin/aarch64-linux-android21-clang" + +[target.armv7-linux-androideabi] +linker = "Library/Android/sdk/ndk/26.0.10792818/toolchains/llvm/prebuilt/darwin-x86_64/bin/armv7a-linux-androideabi21-clang" + +[target.i686-linux-android] +linker = "Library/Android/sdk/ndk/26.0.10792818/toolchains/llvm/prebuilt/darwin-x86_64/bin/i686-linux-android21-clang" + +[target.x86_64-linux-android] +linker = "Library/Android/sdk/ndk/26.0.10792818/toolchains/llvm/prebuilt/darwin-x86_64/bin/x86_64-linux-android21-clang" diff --git a/query-engine/query-engine-c-abi/copy-android.sh b/query-engine/query-engine-c-abi/copy-android.sh new file mode 100755 index 000000000000..20b1e86b8ec9 --- /dev/null +++ b/query-engine/query-engine-c-abi/copy-android.sh @@ -0,0 +1,18 @@ +#! /bin/bash + +TARGET_DIR=../../../react-native-prisma + +mkdir -p $TARGET_DIR/android/jniLibs +mkdir -p $TARGET_DIR/android/jniLibs/x86 +mkdir -p $TARGET_DIR/android/jniLibs/x86_64 +mkdir -p $TARGET_DIR/android/jniLibs/arm64-v8a +mkdir -p $TARGET_DIR/android/jniLibs/armeabi-v7a + +cp ../../target/i686-linux-android/release/libquery_engine.a $TARGET_DIR/android/jniLibs/x86/libquery_engine.a +cp ../../target/aarch64-linux-android/release/libquery_engine.a $TARGET_DIR/android/jniLibs/arm64-v8a/libquery_engine.a +cp ../../target/armv7-linux-androideabi/release/libquery_engine.a $TARGET_DIR/android/jniLibs/armeabi-v7a/libquery_engine.a +cp ../../target/x86_64-linux-android/release/libquery_engine.a $TARGET_DIR/android/jniLibs/x86_64/libquery_engine.a + +cp ./include/query_engine.h $TARGET_DIR/cpp/query_engine.h + +# pingme "✅ Android compilation ready" \ No newline at end of file diff --git a/query-engine/query-engine-c-abi/copy-ios.sh b/query-engine/query-engine-c-abi/copy-ios.sh new file mode 100755 index 000000000000..58195b42bb8a --- /dev/null +++ b/query-engine/query-engine-c-abi/copy-ios.sh @@ -0,0 +1,12 @@ +#!/bin/sh + +set -ex + +TARGET_DIR=../../../react-native-prisma + +# This one is not actually necessary but XCode picks it up and mixes up versions +cp ./include/query_engine.h $TARGET_DIR/cpp/query_engine.h + +cp -R QueryEngine.xcframework $TARGET_DIR + +# pingme "✅ Prisma iOS Finished" \ No newline at end of file diff --git a/query-engine/query-engine-c-abi/src/engine.rs b/query-engine/query-engine-c-abi/src/engine.rs new file mode 100644 index 000000000000..69e8a3027cc6 --- /dev/null +++ b/query-engine/query-engine-c-abi/src/engine.rs @@ -0,0 +1,662 @@ +use crate::{ + logger::Logger, + migrations::{ + detect_failed_migrations, execute_migration_script, list_migration_dir, list_migrations, + record_migration_started, MigrationDirectory, + }, +}; +use once_cell::sync::Lazy; +use query_core::{ + protocol::EngineProtocol, + schema::{self}, + telemetry, TransactionOptions, TxId, +}; +use request_handlers::{load_executor, RequestBody, RequestHandler}; +use serde_json::json; +use std::{ + env, + ffi::{c_char, c_int, CStr, CString}, + path::{Path, PathBuf}, + ptr::null_mut, + sync::Arc, +}; +use tokio::{ + runtime::{self, Runtime}, + sync::RwLock, +}; +use tracing::{field, instrument::WithSubscriber, level_filters::LevelFilter, Instrument}; + +use query_engine_common::Result; +use query_engine_common::{ + engine::{stringify_env_values, ConnectedEngine, ConnectedEngineNative, EngineBuilder, EngineBuilderNative, Inner}, + error::ApiError, +}; +use request_handlers::ConnectorKind; + +// The query engine code is async by nature, however the C API does not function with async functions +// This runtime is here to allow the C API to block_on it and return the responses in a sync manner +static RUNTIME: Lazy = Lazy::new(|| runtime::Builder::new_multi_thread().enable_all().build().unwrap()); + +// C-like return codes +#[no_mangle] +pub static PRISMA_OK: i32 = 0; +#[no_mangle] +pub static PRISMA_UNKNOWN_ERROR: i32 = 1; +#[no_mangle] +pub static PRISMA_MISSING_POINTER: i32 = 2; + +/// This struct holds an instance of the prisma query engine +/// You can instanciate as many as you want +pub struct QueryEngine { + inner: RwLock, + base_path: Option, + logger: Logger, + url: String, +} + +#[repr(C)] +pub struct ConstructorOptionsNative { + pub config_dir: *const c_char, +} + +/// Parameters defining the construction of an engine. +/// Unlike the Node version, this doesn't support the GraphQL protocol for talking with the prisma/client, since it is +/// deprecated and going forward everything should be done via JSON rpc. +#[repr(C)] +pub struct ConstructorOptions { + id: *const c_char, + datamodel: *const c_char, + // Used on iOS/Android to navigate to the sandboxed app folder to execute all file operations because file systems are sandboxed + // Take a look at README for a more detailed explanation + base_path: *const c_char, + log_level: *const c_char, + log_queries: bool, + datasource_overrides: *const c_char, + env: *const c_char, + ignore_env_var_errors: bool, + native: ConstructorOptionsNative, + log_callback: unsafe extern "C" fn(*const c_char, *const c_char), +} + +fn get_cstr_safe(ptr: *const c_char) -> Option { + if ptr.is_null() { + None + } else { + let cstr = unsafe { CStr::from_ptr(ptr) }; + Some(String::from_utf8_lossy(cstr.to_bytes()).to_string()) + } +} + +fn map_known_error(err: query_core::CoreError) -> Result { + let user_error: user_facing_errors::Error = err.into(); + let value = serde_json::to_string(&user_error)?; + + Ok(value) +} + +fn serialize_api_error(err: ApiError) -> String { + let user_error: user_facing_errors::Error = err.into(); + serde_json::to_string(&user_error).unwrap() +} + +// Struct that holds an internal prisma engine +// the inner prop holds the internal state, it starts as a Builder +// meaning it is not connected to the database +// a call to connect is necessary to start executing queries +impl QueryEngine { + /// Parse a valid datamodel and configuration to allow connecting later on. + pub fn new(constructor_options: ConstructorOptions) -> Result { + // Create a logs closure that can be passed around and called at any time + // safe scheduling should be taken care by the code/language/environment calling this C-compatible API + let engine_id = get_cstr_safe(constructor_options.id).expect("engine id cannot be missing"); + let log_callback_c = constructor_options.log_callback; + let log_callback = move |msg: String| { + let id = CString::new(engine_id.clone()).unwrap(); + let c_message = CString::new(msg).unwrap(); + unsafe { + log_callback_c(id.as_ptr(), c_message.as_ptr()); + } + }; + + let str_env = get_cstr_safe(constructor_options.env).expect("Environment missing"); + let json_env = serde_json::from_str(str_env.as_str()).expect("Environment cannot be parsed"); + let env = stringify_env_values(json_env)?; // we cannot trust anything JS sends us from process.env + + let str_datasource_overrides = + get_cstr_safe(constructor_options.datasource_overrides).expect("Datesource overrides missing"); + let json_datasource_overrides = + serde_json::from_str(str_datasource_overrides.as_str()).expect("Datasource overrides cannot be parsed"); + let overrides: Vec<(_, _)> = stringify_env_values(json_datasource_overrides) + .unwrap() + .into_iter() + .collect(); + + let datamodel = get_cstr_safe(constructor_options.datamodel).expect("Datamodel must be present"); + let mut schema = psl::validate(datamodel.into()); + // extract the url for later use in apply_migrations + let url = schema + .configuration + .datasources + .first() + .unwrap() + .load_url(|key| env::var(key).ok()) + .unwrap(); + + let config = &mut schema.configuration; + + schema + .diagnostics + .to_result() + .map_err(|err| ApiError::conversion(err, schema.db.source_assert_single()))?; + + let base_path = get_cstr_safe(constructor_options.base_path); + match &base_path { + Some(path) => env::set_current_dir(Path::new(&path)).expect("Could not change directory"), + _ => tracing::trace!("No base path provided"), + } + + config + .resolve_datasource_urls_query_engine( + &overrides, + |key| env.get(key).map(ToString::to_string), + // constructor_options.ignore_env_var_errors, + true, + ) + .map_err(|err| ApiError::conversion(err, schema.db.source_assert_single()))?; + + config + .validate_that_one_datasource_is_provided() + .map_err(|errors| ApiError::conversion(errors, schema.db.source_assert_single()))?; + + let engine_protocol = EngineProtocol::Json; + + let config_dir_string = get_cstr_safe(constructor_options.native.config_dir).expect("Config dir is expected"); + let config_dir = PathBuf::from(config_dir_string); + + let builder = EngineBuilder { + schema: Arc::new(schema), + engine_protocol, + native: EngineBuilderNative { config_dir, env }, + }; + + let log_level_string = get_cstr_safe(constructor_options.log_level).unwrap(); + let log_level = log_level_string.parse::().unwrap(); + let logger = Logger::new( + constructor_options.log_queries, + log_level, + Box::new(log_callback), + false, + ); + + Ok(Self { + inner: RwLock::new(Inner::Builder(builder)), + base_path, + logger, + url, + }) + } + + pub async fn connect(&self, trace: *const c_char) -> Result<()> { + if let Some(base_path) = self.base_path.as_ref() { + env::set_current_dir(Path::new(&base_path)).expect("Could not change directory"); + } + + let trace_string = get_cstr_safe(trace).expect("Connect trace is missing"); + + let span = tracing::info_span!("prisma:engine:connect"); + let _ = telemetry::helpers::set_parent_context_from_json_str(&span, &trace_string); + + let mut inner = self.inner.write().await; + let builder = inner.as_builder()?; + let arced_schema = Arc::clone(&builder.schema); + let arced_schema_2 = Arc::clone(&builder.schema); + + let engine = async move { + // We only support one data source & generator at the moment, so take the first one (default not exposed yet). + let data_source = arced_schema + .configuration + .datasources + .first() + .ok_or_else(|| ApiError::configuration("No valid data source found"))?; + + let preview_features = arced_schema.configuration.preview_features(); + + let executor_fut = async { + let url = data_source + .load_url_with_config_dir(&builder.native.config_dir, |key| { + builder.native.env.get(key).map(ToString::to_string) + }) + .map_err(|err| ApiError::Conversion(err, builder.schema.db.source_assert_single().to_owned()))?; + // This version of the query engine supports connecting via Rust bindings directly + // support for JS drivers can be added, but I commented it out for now + let connector_kind = ConnectorKind::Rust { + url, + datasource: data_source, + }; + + let executor = load_executor(connector_kind, preview_features).await?; + let connector = executor.primary_connector(); + + let conn_span = tracing::info_span!( + "prisma:engine:connection", + user_facing = true, + "db.type" = connector.name(), + ); + + connector.get_connection().instrument(conn_span).await?; + + Result::<_>::Ok(executor) + }; + + let query_schema_span = tracing::info_span!("prisma:engine:schema"); + let query_schema_fut = tokio::runtime::Handle::current() + .spawn_blocking(move || { + let enable_raw_queries = true; + schema::build(arced_schema_2, enable_raw_queries) + }) + .instrument(query_schema_span); + + let (query_schema, executor) = tokio::join!(query_schema_fut, executor_fut); + + Ok(ConnectedEngine { + schema: builder.schema.clone(), + query_schema: Arc::new(query_schema.unwrap()), + executor: executor?, + engine_protocol: builder.engine_protocol, + native: ConnectedEngineNative { + config_dir: builder.native.config_dir.clone(), + env: builder.native.env.clone(), + #[cfg(feature = "metrics")] + metrics: None, + }, + }) as Result + } + .instrument(span) + .await?; + + *inner = Inner::Connected(engine); + Ok(()) + } + + pub async fn query( + &self, + body_str: *const c_char, + trace_str: *const c_char, + tx_id_str: *const c_char, + ) -> Result { + let dispatcher = self.logger.dispatcher(); + + async move { + let inner = self.inner.read().await; + let engine = inner.as_engine()?; + + let body = get_cstr_safe(body_str).expect("Prisma engine execute body is missing"); + let tx_id = get_cstr_safe(tx_id_str); + let trace = get_cstr_safe(trace_str).expect("Trace is needed"); + + let query = RequestBody::try_from_str(&body, engine.engine_protocol())?; + + let span = tracing::info_span!("prisma:engine", user_facing = true); + let trace_id = telemetry::helpers::set_parent_context_from_json_str(&span, &trace); + + async move { + let handler = RequestHandler::new(engine.executor(), engine.query_schema(), engine.engine_protocol()); + let response = handler.handle(query, tx_id.map(TxId::from), trace_id).await; + + let serde_span = tracing::info_span!("prisma:engine:response_json_serialization", user_facing = true); + Ok(serde_span.in_scope(|| serde_json::to_string(&response))?) + } + .instrument(span) + .await + } + .with_subscriber(dispatcher) + .await + } + + /// Disconnect and drop the core. Can be reconnected later with `#connect`. + pub async fn disconnect(&self, trace_str: *const c_char) -> Result<()> { + let trace = get_cstr_safe(trace_str).expect("Trace is needed"); + let dispatcher = self.logger.dispatcher(); + async { + let span = tracing::info_span!("prisma:engine:disconnect"); + let _ = telemetry::helpers::set_parent_context_from_json_str(&span, &trace); + + async { + let mut inner = self.inner.write().await; + let engine = inner.as_engine()?; + + let builder = EngineBuilder { + schema: engine.schema.clone(), + engine_protocol: engine.engine_protocol(), + native: EngineBuilderNative { + config_dir: engine.native.config_dir.clone(), + env: engine.native.env.clone(), + }, + }; + + *inner = Inner::Builder(builder); + + Ok(()) + } + .instrument(span) + .await + } + .with_subscriber(dispatcher) + .await + } + + async unsafe fn apply_migrations(&self, migration_folder_path: *const c_char) -> Result<()> { + if let Some(base_path) = self.base_path.as_ref() { + env::set_current_dir(Path::new(&base_path)).expect("Could not change directory"); + } + let migration_folder_path_str = get_cstr_safe(migration_folder_path).unwrap(); + let migration_folder_path = Path::new(&migration_folder_path_str); + let migrations_from_filesystem = list_migration_dir(migration_folder_path)?; + + let url = self.url.clone(); + let url_without_prefix = url.strip_prefix("file:").unwrap_or(&url); + let database_path = Path::new(url_without_prefix); + + let migrations_from_database = list_migrations(database_path).unwrap(); + + let unapplied_migrations: Vec<&MigrationDirectory> = migrations_from_filesystem + .iter() + .filter(|fs_migration| { + !migrations_from_database + .iter() + .filter(|db_migration: &&crate::migrations::MigrationRecord| db_migration.finished_at.is_some()) + .any(|db_migration| fs_migration.migration_name() == db_migration.migration_name) + }) + .collect(); + + detect_failed_migrations(&migrations_from_database)?; + + let mut applied_migration_names: Vec = Vec::with_capacity(unapplied_migrations.len()); + + for unapplied_migration in unapplied_migrations { + let script = unapplied_migration.read_migration_script()?; + + record_migration_started(database_path, unapplied_migration.migration_name())?; + + execute_migration_script(database_path, unapplied_migration.migration_name(), &script)?; + + applied_migration_names.push(unapplied_migration.migration_name().to_owned()); + } + + Ok(()) + } + + /// If connected, attempts to start a transaction in the core and returns its ID. + pub async fn start_transaction(&self, input_str: *const c_char, trace_str: *const c_char) -> Result { + let input = get_cstr_safe(input_str).expect("Input string missing"); + let trace = get_cstr_safe(trace_str).expect("trace is required in transactions"); + let inner = self.inner.read().await; + let engine = inner.as_engine()?; + + let dispatcher = self.logger.dispatcher(); + + async move { + let span = tracing::info_span!("prisma:engine:itx_runner", user_facing = true, itx_id = field::Empty); + telemetry::helpers::set_parent_context_from_json_str(&span, &trace); + + let tx_opts: TransactionOptions = serde_json::from_str(&input)?; + match engine + .executor() + .start_tx(engine.query_schema().clone(), engine.engine_protocol(), tx_opts) + .instrument(span) + .await + { + Ok(tx_id) => Ok(json!({ "id": tx_id.to_string() }).to_string()), + Err(err) => Ok(map_known_error(err)?), + } + } + .with_subscriber(dispatcher) + .await + } + + // If connected, attempts to commit a transaction with id `tx_id` in the core. + pub async fn commit_transaction(&self, tx_id_str: *const c_char, _trace: *const c_char) -> Result { + let tx_id = get_cstr_safe(tx_id_str).expect("Input string missing"); + let inner = self.inner.read().await; + let engine = inner.as_engine()?; + + let dispatcher = self.logger.dispatcher(); + + async move { + match engine.executor().commit_tx(TxId::from(tx_id)).await { + Ok(_) => Ok("{}".to_string()), + Err(err) => Ok(map_known_error(err)?), + } + } + .with_subscriber(dispatcher) + .await + } + + // If connected, attempts to roll back a transaction with id `tx_id` in the core. + pub async fn rollback_transaction(&self, tx_id_str: *const c_char, _trace: *const c_char) -> Result { + let tx_id = get_cstr_safe(tx_id_str).expect("Input string missing"); + // let trace = get_cstr_safe(trace_str).expect("trace is required in transactions"); + let inner = self.inner.read().await; + let engine = inner.as_engine()?; + + let dispatcher = self.logger.dispatcher(); + + async move { + match engine.executor().rollback_tx(TxId::from(tx_id)).await { + Ok(_) => Ok("{}".to_string()), + Err(err) => Ok(map_known_error(err)?), + } + } + .with_subscriber(dispatcher) + .await + } +} + +// _____ _____ +// /\ | __ \_ _| +// / \ | |__) || | +// / /\ \ | ___/ | | +// / ____ \| | _| |_ +// /_/ \_\_| |_____| +// +// This API is meant to be stateless. This means the box pointer to the query engine structure will be returned to the +// calling code and should be passed to subsequent calls +// +// We should be careful about not de-allocating the pointer +// when adding a new function remember to always call mem::forget + +/// # Safety +/// The calling context needs to pass a valid pointer that will store the reference +/// The calling context also need to clear the pointer of the error string if it is not null +#[no_mangle] +pub unsafe extern "C" fn prisma_create( + options: ConstructorOptions, + qe_ptr: *mut *mut QueryEngine, + error_string_ptr: *mut *mut c_char, +) -> c_int { + if qe_ptr.is_null() { + return PRISMA_MISSING_POINTER; + } + + let res = QueryEngine::new(options); + match res { + Ok(v) => { + *qe_ptr = Box::into_raw(Box::new(v)); + PRISMA_OK + } + Err(err) => { + let error_string = CString::new(err.to_string()).unwrap(); + *error_string_ptr = error_string.into_raw() as *mut c_char; + PRISMA_UNKNOWN_ERROR + } + } +} + +/// # Safety +/// +/// The calling context needs to pass a valid pointer that will store the reference to the error string +/// The calling context also need to clear the pointer of the error string if it is not null +#[no_mangle] +pub unsafe extern "C" fn prisma_connect( + qe: *mut QueryEngine, + trace: *const c_char, + error_string_ptr: *mut *mut c_char, +) -> c_int { + let query_engine: Box = Box::from_raw(qe); + let result = RUNTIME.block_on(async { query_engine.connect(trace).await }); + + match result { + Ok(_engine) => { + std::mem::forget(query_engine); + *error_string_ptr = std::ptr::null_mut(); + PRISMA_OK + } + Err(err) => { + let error_string = CString::new(err.to_string()).unwrap(); + *error_string_ptr = error_string.into_raw() as *mut c_char; + std::mem::forget(query_engine); + PRISMA_UNKNOWN_ERROR + } + } +} + +/// # Safety +/// +/// The calling context needs to pass a valid pointer that will store the reference to the error string +/// The calling context also need to clear the pointer of the error string if it is not null +#[no_mangle] +pub unsafe extern "C" fn prisma_query( + qe: *mut QueryEngine, + body_str: *const c_char, + header_str: *const c_char, + tx_id_str: *const c_char, + error_string_ptr: *mut *mut c_char, +) -> *const c_char { + let query_engine: Box = Box::from_raw(qe); + let result = RUNTIME.block_on(async { query_engine.query(body_str, header_str, tx_id_str).await }); + match result { + Ok(query_result) => { + std::mem::forget(query_engine); + *error_string_ptr = std::ptr::null_mut(); + CString::new(query_result).unwrap().into_raw() + } + Err(err) => { + let error_string = CString::new(err.to_string()).unwrap(); + *error_string_ptr = error_string.into_raw() as *mut c_char; + + std::mem::forget(query_engine); + null_mut() + } + } +} + +/// # Safety +/// +/// The calling context needs to pass a valid pointer that will store the reference to the error string +/// The calling context also need to clear the pointer of the error string if it is not null +#[no_mangle] +pub unsafe extern "C" fn prisma_start_transaction( + qe: *mut QueryEngine, + options_str: *const c_char, + header_str: *const c_char, +) -> *const c_char { + let query_engine: Box = Box::from_raw(qe); + let result = RUNTIME.block_on(async { query_engine.start_transaction(options_str, header_str).await }); + match result { + Ok(query_result) => { + std::mem::forget(query_engine); + CString::new(query_result).unwrap().into_raw() + } + Err(err) => { + std::mem::forget(query_engine); + CString::new(serialize_api_error(err)).unwrap().into_raw() + } + } +} + +/// # Safety +/// +/// The calling context needs to pass a valid pointer that will store the reference to the error string +#[no_mangle] +pub unsafe extern "C" fn prisma_commit_transaction( + qe: *mut QueryEngine, + tx_id_str: *const c_char, + header_str: *const c_char, +) -> *const c_char { + let query_engine: Box = Box::from_raw(qe); + let result = RUNTIME.block_on(async { query_engine.commit_transaction(tx_id_str, header_str).await }); + std::mem::forget(query_engine); + match result { + Ok(query_result) => CString::new(query_result).unwrap().into_raw(), + Err(err) => CString::new(serialize_api_error(err)).unwrap().into_raw(), + } +} + +/// # Safety +/// +/// The calling context needs to pass a valid pointer that will store the reference to the error string +#[no_mangle] +pub unsafe extern "C" fn prisma_rollback_transaction( + qe: *mut QueryEngine, + tx_id_str: *const c_char, + header_str: *const c_char, +) -> *const c_char { + let query_engine: Box = Box::from_raw(qe); + let result = RUNTIME.block_on(async { query_engine.rollback_transaction(tx_id_str, header_str).await }); + std::mem::forget(query_engine); + match result { + Ok(query_result) => CString::new(query_result).unwrap().into_raw(), + Err(err) => CString::new(serialize_api_error(err)).unwrap().into_raw(), + } +} + +/// # Safety +/// +/// The calling context needs to pass a valid pointer that will store the reference to the error string +#[no_mangle] +pub unsafe extern "C" fn prisma_disconnect(qe: *mut QueryEngine, header_str: *const c_char) -> c_int { + let query_engine: Box = Box::from_raw(qe); + let result = RUNTIME.block_on(async { query_engine.disconnect(header_str).await }); + std::mem::forget(query_engine); + match result { + Ok(_) => PRISMA_OK, + Err(_err) => PRISMA_UNKNOWN_ERROR, + } +} + +/// # Safety +/// +/// The calling context needs to pass a valid pointer that will store the reference to the error string +/// The calling context also need to clear the pointer of the error string if it is not null +#[no_mangle] +pub unsafe extern "C" fn prisma_apply_pending_migrations( + qe: *mut QueryEngine, + migration_folder_path: *const c_char, + error_string_ptr: *mut *mut c_char, +) -> c_int { + let query_engine: Box = Box::from_raw(qe); + let result = RUNTIME.block_on(async { query_engine.apply_migrations(migration_folder_path).await }); + match result { + Ok(_) => { + std::mem::forget(query_engine); + *error_string_ptr = std::ptr::null_mut(); + PRISMA_OK + } + Err(err) => { + let error_string = CString::new(err.to_string()).unwrap(); + *error_string_ptr = error_string.into_raw() as *mut c_char; + std::mem::forget(query_engine); + PRISMA_UNKNOWN_ERROR + } + } +} + +/// # Safety +/// +/// Will destroy the pointer to the query engine +#[no_mangle] +pub unsafe extern "C" fn prisma_destroy(qe: *mut QueryEngine) -> c_int { + // Once the variable goes out of scope, it will be deallocated + let _query_engine: Box = Box::from_raw(qe); + PRISMA_OK +} diff --git a/query-engine/query-engine-c-abi/src/lib.rs b/query-engine/query-engine-c-abi/src/lib.rs new file mode 100644 index 000000000000..f41e48fdfad0 --- /dev/null +++ b/query-engine/query-engine-c-abi/src/lib.rs @@ -0,0 +1,5 @@ +mod engine; +mod logger; +mod migrations; + +mod tracer; diff --git a/query-engine/query-engine-c-abi/src/logger.rs b/query-engine/query-engine-c-abi/src/logger.rs new file mode 100644 index 000000000000..1970262c207f --- /dev/null +++ b/query-engine/query-engine-c-abi/src/logger.rs @@ -0,0 +1,174 @@ +use core::fmt; +use query_core::telemetry; +use query_engine_common::logger::StringCallback; +// use query_engine_metrics::MetricRegistry; +use serde_json::Value; +use std::collections::BTreeMap; +use std::sync::Arc; +use tracing::{ + field::{Field, Visit}, + level_filters::LevelFilter, + Dispatch, Level, Subscriber, +}; +use tracing_subscriber::{ + filter::{filter_fn, FilterExt}, + layer::SubscriberExt, + Layer, Registry, +}; + +pub(crate) type LogCallback = Box; + +pub(crate) struct Logger { + dispatcher: Dispatch, + // metrics: Option, +} + +impl Logger { + /// Creates a new logger using a call layer + pub fn new(log_queries: bool, log_level: LevelFilter, log_callback: LogCallback, enable_tracing: bool) -> Self { + let is_sql_query = filter_fn(|meta| { + meta.target() == "quaint::connector::metrics" && meta.fields().iter().any(|f| f.name() == "query") + }); + + // is a mongodb query? + let is_mongo_query = filter_fn(|meta| meta.target() == "mongodb_query_connector::query"); + + // We need to filter the messages to send to our callback logging mechanism + let filters = if log_queries { + // Filter trace query events (for query log) or based in the defined log level + is_sql_query.or(is_mongo_query).or(log_level).boxed() + } else { + // Filter based in the defined log level + FilterExt::boxed(log_level) + }; + + let log_callback = Arc::new(log_callback); + let callback_layer = Box::new(CallbackLayer::new(Arc::clone(&log_callback))); + + let is_user_trace = filter_fn(telemetry::helpers::user_facing_span_only_filter); + let tracer = crate::tracer::new_pipeline().install_simple(callback_layer); + let telemetry = if enable_tracing { + let telemetry = tracing_opentelemetry::layer() + .with_tracer(tracer) + .with_filter(is_user_trace); + Some(telemetry) + } else { + None + }; + + let layer = CallbackLayer::new(log_callback).with_filter(filters); + + // let metrics = if enable_metrics { + // query_engine_metrics::setup(); + // Some(MetricRegistry::new()) + // } else { + // None + // }; + + Self { + dispatcher: Dispatch::new(Registry::default().with(telemetry).with(layer)), + // metrics, + } + } + + pub fn dispatcher(&self) -> Dispatch { + self.dispatcher.clone() + } + + // pub fn metrics(&self) -> Option { + // self.metrics.clone() + // } +} + +pub struct JsonVisitor<'a> { + values: BTreeMap<&'a str, Value>, +} + +impl<'a> JsonVisitor<'a> { + pub fn new(level: &Level, target: &str) -> Self { + let mut values = BTreeMap::new(); + values.insert("level", serde_json::Value::from(level.to_string())); + + // NOTE: previous version used module_path, this is not correct and it should be _target_ + values.insert("module_path", serde_json::Value::from(target)); + + JsonVisitor { values } + } +} + +impl<'a> Visit for JsonVisitor<'a> { + fn record_debug(&mut self, field: &Field, value: &dyn fmt::Debug) { + match field.name() { + name if name.starts_with("r#") => { + self.values + .insert(&name[2..], serde_json::Value::from(format!("{value:?}"))); + } + name => { + self.values.insert(name, serde_json::Value::from(format!("{value:?}"))); + } + }; + } + + fn record_i64(&mut self, field: &Field, value: i64) { + self.values.insert(field.name(), serde_json::Value::from(value)); + } + + fn record_u64(&mut self, field: &Field, value: u64) { + self.values.insert(field.name(), serde_json::Value::from(value)); + } + + fn record_bool(&mut self, field: &Field, value: bool) { + self.values.insert(field.name(), serde_json::Value::from(value)); + } + + fn record_str(&mut self, field: &Field, value: &str) { + self.values.insert(field.name(), serde_json::Value::from(value)); + } +} + +impl<'a> ToString for JsonVisitor<'a> { + fn to_string(&self) -> String { + serde_json::to_string(&self.values).unwrap() + } +} + +#[derive(Clone)] +pub(crate) struct CallbackLayer +where + F: Fn(String) + 'static, +{ + callback: Arc, +} + +impl CallbackLayer +where + F: Fn(String) + 'static, +{ + pub fn new(callback: Arc) -> Self { + CallbackLayer { callback } + } +} + +impl StringCallback for CallbackLayer +where + F: Fn(String) + 'static, +{ + fn call(&self, message: String) -> Result<(), String> { + let callback = &self.callback; + callback(message); + Ok(()) + } +} + +// A tracing layer for sending logs to a js callback, layers are composable, subscribers are not. +impl Layer for CallbackLayer +where + S: Subscriber, + F: Fn(String), +{ + fn on_event(&self, event: &tracing::Event<'_>, _ctx: tracing_subscriber::layer::Context<'_, S>) { + let mut visitor = JsonVisitor::new(event.metadata().level(), event.metadata().target()); + event.record(&mut visitor); + _ = self.call(visitor.to_string()); + } +} diff --git a/query-engine/query-engine-c-abi/src/migrations.rs b/query-engine/query-engine-c-abi/src/migrations.rs new file mode 100644 index 000000000000..4cd374705ba5 --- /dev/null +++ b/query-engine/query-engine-c-abi/src/migrations.rs @@ -0,0 +1,185 @@ +use indoc::indoc; +use query_engine_common::error::ApiError; +use query_engine_common::Result; +use rusqlite::Connection; +use std::{ + fs::{read_dir, DirEntry}, + path::{Path, PathBuf}, +}; + +pub type Timestamp = chrono::DateTime; + +// TODO there is a bunch of casting that is present, however it is not the most correct way +// but since this is an out of tree branch, I do not want to change the common libraries yet + +#[derive(Debug)] +pub struct MigrationDirectory { + path: PathBuf, +} + +impl MigrationDirectory { + /// The `{timestamp}_{name}` formatted migration name. + pub fn migration_name(&self) -> &str { + self.path + .file_name() + .expect("MigrationDirectory::migration_id") + .to_str() + .expect("Migration directory name is not valid UTF-8.") + } + + /// Read the migration script to a string. + pub fn read_migration_script(&self) -> Result { + let path = self.path.join("migration.sql"); + std::fs::read_to_string(path).map_err(|err| ApiError::Configuration(err.to_string())) + } +} + +impl From for MigrationDirectory { + fn from(entry: DirEntry) -> MigrationDirectory { + MigrationDirectory { path: entry.path() } + } +} + +/// An applied migration, as returned by list_migrations. +#[derive(Debug, Clone)] +pub struct MigrationRecord { + /// A unique, randomly generated identifier. + pub id: String, + /// The timestamp at which the migration completed *successfully*. + pub finished_at: Option, + /// The name of the migration, i.e. the name of migration directory + /// containing the migration script. + pub migration_name: String, + /// The time the migration started being applied. + pub started_at: Timestamp, + /// The time the migration failed + pub failed_at: Option, +} + +pub fn list_migration_dir(migrations_directory_path: &Path) -> Result> { + let mut entries: Vec = Vec::new(); + + let read_dir_entries = match read_dir(migrations_directory_path) { + Ok(read_dir_entries) => read_dir_entries, + Err(err) => return Err(ApiError::Configuration(err.to_string())), + }; + + for entry in read_dir_entries { + let entry = entry.map_err(|err| ApiError::Configuration(err.to_string()))?; + + if entry + .file_type() + .map_err(|err| ApiError::Configuration(err.to_string()))? + .is_dir() + { + entries.push(entry.into()); + } + } + + entries.sort_by(|a, b| a.migration_name().cmp(b.migration_name())); + + Ok(entries) +} + +pub fn detect_failed_migrations(migrations_from_database: &[MigrationRecord]) -> Result<()> { + tracing::debug!("Checking for failed migrations."); + + let mut failed_migrations = migrations_from_database + .iter() + .filter(|migration| migration.finished_at.is_none() && migration.failed_at.is_none()) + .peekable(); + + if failed_migrations.peek().is_none() { + Ok(()) + } else { + Err(ApiError::Configuration( + format!( + "Failed migration detected: {}", + failed_migrations.peek().unwrap().migration_name + ) + .to_string(), + )) + } +} + +pub fn list_migrations(database_filename: &Path) -> Result> { + let conn = Connection::open(database_filename).map_err(|err| ApiError::Configuration(err.to_string()))?; + + // Check if the migrations table exists + let table_exists = conn + .prepare("SELECT name FROM sqlite_master WHERE type='table' AND name='_prisma_migrations'") + .and_then(|mut stmt| stmt.query_row([], |_| Ok(()))) + .is_ok(); + + // If the migrations table doesn't exist, create it + if !table_exists { + let sql = indoc! {r#" + CREATE TABLE "_prisma_migrations" ( + "id" TEXT PRIMARY KEY NOT NULL, + "finished_at" DATETIME, + "migration_name" TEXT NOT NULL, + "started_at" DATETIME NOT NULL DEFAULT current_timestamp, + "failed_at" DATETIME + ); + "#}; + + conn.execute(sql, []) + .map_err(|err| ApiError::Configuration(err.to_string()))?; + } + + let mut stmt = conn + .prepare("SELECT id, migration_name, started_at, finished_at, failed_at FROM _prisma_migrations") + .map_err(|err| ApiError::Configuration(err.to_string()))?; + let mut rows = stmt.query([]).map_err(|err| ApiError::Configuration(err.to_string()))?; + + let mut entries: Vec = Vec::new(); + + while let Some(row) = rows.next().unwrap() { + let id = row.get(0).unwrap(); + let migration_name: String = row.get(1).unwrap(); + let started_at: Timestamp = row.get(2).unwrap(); + let finished_at: Option = row.get(3).unwrap(); + let failed_at: Option = row.get(4).unwrap(); + + entries.push(MigrationRecord { + id, + migration_name, + started_at, + finished_at, + failed_at, + }); + } + + Ok(entries) +} + +pub fn record_migration_started(database_filename: &Path, migration_name: &str) -> Result<()> { + let conn = Connection::open(database_filename).map_err(|err| ApiError::Configuration(err.to_string()))?; + + let sql = "INSERT INTO _prisma_migrations (id, migration_name) VALUES (?, ?)"; + conn.execute(sql, [uuid::Uuid::new_v4().to_string(), migration_name.to_owned()]) + .map_err(|err| ApiError::Configuration(err.to_string()))?; + + Ok(()) +} + +pub fn execute_migration_script(database_filename: &Path, migration_name: &str, script: &str) -> Result<()> { + let conn = Connection::open(database_filename).map_err(|err| ApiError::Configuration(err.to_string()))?; + + let migration_result = conn.execute_batch(script); + + match migration_result { + Ok(_) => { + let sql = "UPDATE _prisma_migrations SET finished_at = current_timestamp WHERE migration_name = ?"; + conn.execute(sql, [migration_name]) + .map_err(|err| ApiError::Configuration(err.to_string()))?; + Ok(()) + } + Err(err) => { + let sql = "UPDATE _prisma_migrations SET failed_at = current_timestamp WHERE migration_name = ?"; + conn.execute(sql, [migration_name]) + .map_err(|err| ApiError::Configuration(err.to_string()))?; + Err(ApiError::Configuration(err.to_string())) + } + } +} diff --git a/query-engine/query-engine-c-abi/src/tracer.rs b/query-engine/query-engine-c-abi/src/tracer.rs new file mode 100644 index 000000000000..3bfae7b1e02d --- /dev/null +++ b/query-engine/query-engine-c-abi/src/tracer.rs @@ -0,0 +1 @@ +pub(crate) use query_engine_common::tracer::*; diff --git a/query-engine/query-engine-node-api/Cargo.toml b/query-engine/query-engine-node-api/Cargo.toml index 83997d887dee..73abee76eaba 100644 --- a/query-engine/query-engine-node-api/Cargo.toml +++ b/query-engine/query-engine-node-api/Cargo.toml @@ -20,13 +20,19 @@ driver-adapters = [ anyhow = "1" async-trait.workspace = true query-core = { path = "../core", features = ["metrics"] } -request-handlers = { path = "../request-handlers", features = ["native"] } +request-handlers = { path = "../request-handlers", features = [ + "native", + "all", +] } query-connector = { path = "../connectors/query-connector" } -query-engine-common = { path = "../../libs/query-engine-common" } +query-engine-common = { path = "../../libs/query-engine-common", features = [ + "metrics", +] } user-facing-errors = { path = "../../libs/user-facing-errors" } psl = { workspace = true, features = ["all"] } sql-connector = { path = "../connectors/sql-query-connector", package = "sql-query-connector", features = [ - "native_all", + "native", + "all", ] } query-structure = { path = "../query-structure" } driver-adapters = { path = "../driver-adapters", features = [ diff --git a/query-engine/query-engine/Cargo.toml b/query-engine/query-engine/Cargo.toml index f0f41fcbe4f0..72ff363e5ada 100644 --- a/query-engine/query-engine/Cargo.toml +++ b/query-engine/query-engine/Cargo.toml @@ -6,7 +6,7 @@ version = "0.1.0" [features] default = ["sql", "mongodb"] mongodb = ["mongodb-connector"] -sql = ["sql-connector", "sql-connector/native_all"] +sql = ["sql-connector", "sql-connector/all", "sql-connector/native"] vendored-openssl = ["sql-connector/vendored-openssl"] [dependencies] @@ -21,7 +21,10 @@ psl = { workspace = true, features = ["all"] } graphql-parser = { git = "https://github.com/prisma/graphql-parser" } mongodb-connector = { path = "../connectors/mongodb-query-connector", optional = true, package = "mongodb-query-connector" } query-core = { path = "../core", features = ["metrics"] } -request-handlers = { path = "../request-handlers", features = ["native"] } +request-handlers = { path = "../request-handlers", features = [ + "native", + "all", +] } serde.workspace = true serde_json.workspace = true sql-connector = { path = "../connectors/sql-query-connector", optional = true, package = "sql-query-connector" } diff --git a/query-engine/request-handlers/Cargo.toml b/query-engine/request-handlers/Cargo.toml index 8c2277948193..802d72344f7e 100644 --- a/query-engine/request-handlers/Cargo.toml +++ b/query-engine/request-handlers/Cargo.toml @@ -37,13 +37,20 @@ sql = ["sql-query-connector"] postgresql = ["sql", "sql-query-connector/postgresql", "psl/postgresql"] mysql = ["sql", "sql-query-connector/mysql", "psl/mysql"] sqlite = ["sql", "sql-query-connector/sqlite", "psl/sqlite"] +cockroachdb = ["sql", "sql-query-connector/postgresql", "psl/cockroachdb"] +mssql = ["sql", "sql-query-connector/mssql", "psl/mssql"] driver-adapters = ["sql-query-connector/driver-adapters"] -native = [ +native = ["sql-query-connector/native"] +all = [ "mongodb", - "sql", + "mysql", + "sqlite", + "postgresql", + "cockroachdb", + "mssql", "graphql-protocol", + "sql-query-connector/all", "psl/all", - "sql-query-connector/native_all", "query-core/metrics", ] graphql-protocol = ["query-core/graphql-protocol", "dep:graphql-parser"] diff --git a/query-engine/request-handlers/src/load_executor.rs b/query-engine/request-handlers/src/load_executor.rs index 6cb112383f41..0a289cd80adc 100644 --- a/query-engine/request-handlers/src/load_executor.rs +++ b/query-engine/request-handlers/src/load_executor.rs @@ -43,12 +43,16 @@ pub async fn load( } match datasource.active_provider { + #[cfg(feature = "sqlite")] p if SQLITE.is_provider(p) => native::sqlite(datasource, &url, features).await, + #[cfg(feature = "mysql")] p if MYSQL.is_provider(p) => native::mysql(datasource, &url, features).await, + #[cfg(feature = "postgresql")] p if POSTGRES.is_provider(p) => native::postgres(datasource, &url, features).await, + #[cfg(feature = "mssql")] p if MSSQL.is_provider(p) => native::mssql(datasource, &url, features).await, + #[cfg(feature = "cockroachdb")] p if COCKROACH.is_provider(p) => native::postgres(datasource, &url, features).await, - #[cfg(feature = "mongodb")] p if MONGODB.is_provider(p) => native::mongodb(datasource, &url, features).await, @@ -76,6 +80,7 @@ mod native { use super::*; use tracing::trace; + #[cfg(feature = "sqlite")] pub(crate) async fn sqlite( source: &Datasource, url: &str, @@ -87,6 +92,7 @@ mod native { Ok(executor_for(sqlite, false)) } + #[cfg(feature = "postgresql")] pub(crate) async fn postgres( source: &Datasource, url: &str, @@ -109,6 +115,7 @@ mod native { Ok(executor_for(psql, force_transactions)) } + #[cfg(feature = "mysql")] pub(crate) async fn mysql( source: &Datasource, url: &str, @@ -119,6 +126,7 @@ mod native { Ok(executor_for(mysql, false)) } + #[cfg(feature = "mssql")] pub(crate) async fn mssql( source: &Datasource, url: &str,