diff --git a/.config/nextest.toml b/.config/nextest.toml index 090f4047e..283a80316 100644 --- a/.config/nextest.toml +++ b/.config/nextest.toml @@ -8,19 +8,31 @@ containerized = { max-threads = 8 } # Don't use too much memory engine = { max-threads = 2 } # Engine tests are very memory-hungry database = { max-threads = 8 } # Don't use too much memory +# NOTE: --> There is an incompatibility between nextest and sqlx: +# - nextest implies multiprocessing, +# - while sqlx has a lock on cleanup within the current process +# (https://github.com/launchbadge/sqlx/pull/2640#issuecomment-1659455042). + # TODO: Delete this workaround when this PR is merged: # - Fix: nextest cleanup race condition by bonega # https://github.com/launchbadge/sqlx/pull/3334 # -# NOTE: There is an incompatibility between nextest and sqlx: -# - nextest implies multiprocessing, -# - while sqlx has a lock on cleanup within the current process -# (https://github.com/launchbadge/sqlx/pull/2640#issuecomment-1659455042). [[profile.default.overrides]] filter = "test(::mysql::)" test-group = "mysql" retries = { count = 3, backoff = "exponential", delay = "3s" } +[[profile.default.overrides]] +filter = "test(::postgres::)" +retries = { count = 3, backoff = "exponential", delay = "3s" } +# NOTE: <-- There is an incompatibility between nextest and sqlx + +# NOTE: Periodic missing rows when the system is under load +# https://github.com/kamu-data/kamu-engine-risingwave/issues/7 +[[profile.default.overrides]] +filter = "test(::risingwave::)" +retries = { count = 3, backoff = "exponential", delay = "3s" } + [[profile.default.overrides]] filter = "test(::setup::)" test-group = "setup" diff --git a/CHANGELOG.md b/CHANGELOG.md index d0a3119c2..e88588776 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,6 +11,32 @@ Recommendation: for ease of reading, use the following order: - Fixed --> +## [Unreleased] +### Added +- Added (or expanded) E2E tests for: + - `kamu config` command + - `kamu init` command + - `kamu add` command + - `kamu rename` command + - `kamu ingest` command + - `kamu inspect` command + - `kamu log` command + - `kamu new` command + - `kamu reset` command + - `kamu search` command + - `kamu sql` command + - `kamu system gc` command + - `kamu system info` command + - `kamu system diagnose` command + - `kamu tail` command + - `kamu login` command + - `kamu logout` command + - `kamu push` command + - `kamu pull` command +- E2E: HTTP middleware is implemented, which improves stability of E2E tests +### Fixed +- `kamu add`: fixed behavior when using `--stdin` and `--name` arguments + ## [0.205.0] - 2024-10-15 ### Changed - `kamu push ` command now can be called without `--to` reference and Alias or Remote dataset repository will be used as destination diff --git a/Cargo.lock b/Cargo.lock index 6605ab05e..e8b8a5fc2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1140,8 +1140,7 @@ checksum = "71938f30533e4d95a6d17aa530939da3842c2ab6f4f84b9dae68447e4129f74a" [[package]] name = "assert_cmd" version = "2.0.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc1835b7f27878de8525dc71410b5a31cdcc5f230aed5ba5df968e09c201b23d" +source = "git+https://github.com/kamu-data/assert_cmd?branch=deactivate-output-truncation#c83b0859dcbce6f9bb941ea07b615cc90aebd003" dependencies = [ "anstyle", "bstr", @@ -5373,6 +5372,7 @@ dependencies = [ "futures", "headers", "http 1.1.0", + "http-body-util", "http-common", "hyper 1.4.1", "indoc 2.0.5", @@ -5690,6 +5690,7 @@ dependencies = [ "internal-error", "kamu-cli-e2e-common-macros", "kamu-cli-puppet", + "lazy_static", "opendatafabric", "pretty_assertions", "regex", @@ -5762,6 +5763,7 @@ dependencies = [ "kamu-cli-e2e-common", "kamu-cli-puppet", "opendatafabric", + "pretty_assertions", "reqwest", "tokio", "tokio-retry", @@ -5789,8 +5791,10 @@ dependencies = [ "async-trait", "chrono", "datafusion", + "indoc 2.0.5", "kamu-data-utils", "opendatafabric", + "pretty_assertions", "serde", "serde_json", "tempfile", diff --git a/Cargo.toml b/Cargo.toml index 6be5b77ce..5268799ab 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -271,3 +271,4 @@ debug = "line-tables-only" # datafusion-odata = { git = 'https://github.com/kamu-data/datafusion-odata.git', branch = '42.0.0-axum-0.6' } # datafusion-ethers = { git = "https://github.com/kamu-data/datafusion-ethers.git", tag = "42.0.0" } # object_store = { git = 'https://github.com/s373r/arrow-rs', branch = 'add-debug-logs', package = "object_store" } +assert_cmd = { git = 'https://github.com/kamu-data/assert_cmd', branch = "deactivate-output-truncation" } diff --git a/resources/cli-reference.md b/resources/cli-reference.md index 0355784ae..dea607fce 100644 --- a/resources/cli-reference.md +++ b/resources/cli-reference.md @@ -20,7 +20,7 @@ To regenerate this schema from existing code, use the following command: * `inspect` — Group of commands for exploring dataset metadata * `list [ls]` — List all datasets in the workspace * `log` — Shows dataset metadata history -* `login` — Authentiates with a remote ODF server interactively +* `login` — Authenticates with a remote ODF server interactively * `logout` — Logs out from a remote Kamu server * `new` — Creates a new dataset manifest from a template * `notebook` — Starts the notebook server for exploring the data in the workspace @@ -508,7 +508,7 @@ Using a filter to inspect blocks containing query changes of a derivative datase ## `kamu login` -Authentiates with a remote ODF server interactively +Authenticates with a remote ODF server interactively **Usage:** `kamu login [OPTIONS] [SERVER] [COMMAND]` diff --git a/src/adapter/graphql/src/mutations/datasets_mut.rs b/src/adapter/graphql/src/mutations/datasets_mut.rs index aa023cb81..9357a72d0 100644 --- a/src/adapter/graphql/src/mutations/datasets_mut.rs +++ b/src/adapter/graphql/src/mutations/datasets_mut.rs @@ -110,6 +110,8 @@ impl DatasetsMut { } // TODO: Multi-tenancy + // https://github.com/kamu-data/kamu-cli/issues/891 + // TODO: Multi-tenant resolution for derivative dataset inputs (should it only // work by ID?) #[allow(unused_variables)] diff --git a/src/adapter/http/Cargo.toml b/src/adapter/http/Cargo.toml index ffa7d83b8..1bff300df 100644 --- a/src/adapter/http/Cargo.toml +++ b/src/adapter/http/Cargo.toml @@ -21,6 +21,12 @@ workspace = true doctest = false +[features] +default = [] + +e2e = ["dep:messaging-outbox", "dep:http-body-util"] + + [dependencies] database-common = { workspace = true } database-common-macros = { workspace = true } @@ -86,6 +92,11 @@ tracing = "0.1" url = { version = "2", features = ["serde"] } uuid = { version = "1", default-features = false, features = ["v4"] } +# Optional +messaging-outbox = { optional = true, workspace = true } + +http-body-util = { optional = true, version = "0.1" } + [dev-dependencies] container-runtime = { workspace = true } diff --git a/src/adapter/http/src/e2e/e2e_middleware.rs b/src/adapter/http/src/e2e/e2e_middleware.rs new file mode 100644 index 000000000..22ab277d2 --- /dev/null +++ b/src/adapter/http/src/e2e/e2e_middleware.rs @@ -0,0 +1,105 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use axum::body::{Body, Bytes}; +use axum::extract::Request; +use axum::middleware::Next; +use axum::response::Response; +use http::Method; +use http_common::ApiError; +use serde::Deserialize; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +/// Middleware that invokes Outbox messages processing for mutable requests +pub async fn e2e_middleware_fn(request: Request, next: Next) -> Result { + let base_catalog = request + .extensions() + .get::() + .cloned() + .expect("Catalog not found in http server extensions"); + + let (is_mutable_request, request) = analyze_request_for_mutability(request).await?; + let response = next.run(request).await; + + if is_mutable_request && response.status().is_success() { + let outbox_executor = base_catalog + .get_one::() + .unwrap(); + + outbox_executor.run_while_has_tasks().await?; + } + + Ok(response) +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +async fn analyze_request_for_mutability(request: Request) -> Result<(bool, Request), ApiError> { + { + let is_not_modifying_requests = request.method() != Method::POST; + + if is_not_modifying_requests { + return Ok((false, request)); + } + } + { + let is_rest_api_post_request = request.uri().path() != "/graphql"; + + if is_rest_api_post_request { + return Ok((true, request)); + } + } + { + // In the case of GQL, we check whether the query is mutable or not + let (request_parts, request_body) = request.into_parts(); + let buffered_request_body = buffer_request_body(request_body).await?; + + let is_mutating_gql = if let Ok(body) = std::str::from_utf8(&buffered_request_body) { + let gql_request = serde_json::from_str::(body) + .map_err(ApiError::bad_request)?; + + gql_request.query.starts_with("mutation") + } else { + false + }; + + let request = Request::from_parts(request_parts, Body::from(buffered_request_body)); + + Ok((is_mutating_gql, request)) + } +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +async fn buffer_request_body(request_body: B) -> Result +where + B: axum::body::HttpBody, + B::Error: std::error::Error + Send + Sync + 'static, +{ + use http_body_util::BodyExt; + + let body_bytes = match request_body.collect().await { + Ok(collected) => collected.to_bytes(), + Err(e) => { + return Err(ApiError::bad_request(e)); + } + }; + + Ok(body_bytes) +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +#[derive(Debug, Deserialize)] +struct SimplifiedGqlRequest { + query: String, +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/adapter/http/src/e2e/mod.rs b/src/adapter/http/src/e2e/mod.rs index c18d4f36a..1aab87b15 100644 --- a/src/adapter/http/src/e2e/mod.rs +++ b/src/adapter/http/src/e2e/mod.rs @@ -7,5 +7,8 @@ // the Business Source License, use of this software will be governed // by the Apache License, Version 2.0. +mod e2e_middleware; mod e2e_router; + +pub use e2e_middleware::*; pub use e2e_router::*; diff --git a/src/adapter/http/src/lib.rs b/src/adapter/http/src/lib.rs index 0a138e055..7ab28f685 100644 --- a/src/adapter/http/src/lib.rs +++ b/src/adapter/http/src/lib.rs @@ -18,6 +18,7 @@ mod access_token; pub use access_token::*; mod axum_utils; pub mod data; +#[cfg(feature = "e2e")] pub mod e2e; mod simple_protocol; pub mod smart_protocol; diff --git a/src/app/cli/Cargo.toml b/src/app/cli/Cargo.toml index a3d507bde..17d87241a 100644 --- a/src/app/cli/Cargo.toml +++ b/src/app/cli/Cargo.toml @@ -57,7 +57,7 @@ kamu-data-utils = { workspace = true } kamu-adapter-auth-oso = { workspace = true } kamu-adapter-flight-sql = { optional = true, workspace = true } kamu-adapter-graphql = { workspace = true } -kamu-adapter-http = { workspace = true } +kamu-adapter-http = { workspace = true, features = ["e2e"], default-features = false } kamu-adapter-oauth = { workspace = true } kamu-adapter-odata = { workspace = true } kamu-datafusion-cli = { workspace = true } diff --git a/src/app/cli/src/cli.rs b/src/app/cli/src/cli.rs index 11610e789..187ea0e3c 100644 --- a/src/app/cli/src/cli.rs +++ b/src/app/cli/src/cli.rs @@ -634,7 +634,7 @@ pub struct Log { //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// -/// Authentiates with a remote ODF server interactively +/// Authenticates with a remote ODF server interactively #[derive(Debug, clap::Args)] pub struct Login { #[command(subcommand)] diff --git a/src/app/cli/src/cli_commands.rs b/src/app/cli/src/cli_commands.rs index 09c790b6b..3dbeaf4f1 100644 --- a/src/app/cli/src/cli_commands.rs +++ b/src/app/cli/src/cli_commands.rs @@ -127,7 +127,7 @@ pub fn get_command( } } cli::Command::Inspect(c) => match c.subcommand { - cli::InspectSubCommand::Lineage(sc) => Box::new(LineageCommand::new( + cli::InspectSubCommand::Lineage(sc) => Box::new(InspectLineageCommand::new( cli_catalog.get_one()?, cli_catalog.get_one()?, cli_catalog.get_one()?, diff --git a/src/app/cli/src/commands/add_command.rs b/src/app/cli/src/commands/add_command.rs index 19b387831..1a863f354 100644 --- a/src/app/cli/src/commands/add_command.rs +++ b/src/app/cli/src/commands/add_command.rs @@ -223,7 +223,8 @@ impl Command for AddCommand { "No manifest references or paths were provided", )); } - if self.name.is_some() && (self.recursive || self.snapshot_refs.len() != 1) { + if self.name.is_some() && (self.recursive || !(self.snapshot_refs.len() == 1 || self.stdin)) + { return Err(CLIError::usage_error( "Name override can be used only when adding a single manifest", )); diff --git a/src/app/cli/src/commands/ingest_command.rs b/src/app/cli/src/commands/ingest_command.rs index bbffd7ca2..67471f9f0 100644 --- a/src/app/cli/src/commands/ingest_command.rs +++ b/src/app/cli/src/commands/ingest_command.rs @@ -145,6 +145,8 @@ impl Command for IngestCommand { _ => Ok(()), }?; + // TODO: `kamu ingest`: implement `--recursive` mode + // https://github.com/kamu-data/kamu-cli/issues/886 if self.recursive { unimplemented!("Sorry, recursive ingest is not yet implemented") } diff --git a/src/app/cli/src/commands/lineage_command.rs b/src/app/cli/src/commands/inspect_lineage_command.rs similarity index 99% rename from src/app/cli/src/commands/lineage_command.rs rename to src/app/cli/src/commands/inspect_lineage_command.rs index 1503238f9..c1200342b 100644 --- a/src/app/cli/src/commands/lineage_command.rs +++ b/src/app/cli/src/commands/inspect_lineage_command.rs @@ -31,7 +31,7 @@ pub enum LineageOutputFormat { //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// -pub struct LineageCommand { +pub struct InspectLineageCommand { dataset_repo: Arc, provenance_svc: Arc, workspace_layout: Arc, @@ -41,7 +41,7 @@ pub struct LineageCommand { output_config: Arc, } -impl LineageCommand { +impl InspectLineageCommand { pub fn new( dataset_repo: Arc, provenance_svc: Arc, @@ -94,7 +94,7 @@ impl LineageCommand { // TODO: Support temporality and evolution #[async_trait::async_trait(?Send)] -impl Command for LineageCommand { +impl Command for InspectLineageCommand { async fn run(&mut self) -> Result<(), CLIError> { use futures::{StreamExt, TryStreamExt}; let mut dataset_handles: Vec<_> = if self.dataset_refs.is_empty() { diff --git a/src/app/cli/src/commands/log_command.rs b/src/app/cli/src/commands/log_command.rs index 87561fe55..ca5852b20 100644 --- a/src/app/cli/src/commands/log_command.rs +++ b/src/app/cli/src/commands/log_command.rs @@ -29,6 +29,8 @@ use crate::output::OutputConfig; pub enum MetadataLogOutputFormat { Shell, Yaml, + // TODO: `kamu log`: support `--output-format json` + // https://github.com/kamu-data/kamu-cli/issues/887 } //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/app/cli/src/commands/login_silent_command.rs b/src/app/cli/src/commands/login_silent_command.rs index f67160ef3..daef6049f 100644 --- a/src/app/cli/src/commands/login_silent_command.rs +++ b/src/app/cli/src/commands/login_silent_command.rs @@ -16,16 +16,19 @@ use crate::{odf_server, CLIError, Command}; //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// +#[derive(Debug)] pub enum LoginSilentMode { OAuth(LoginSilentModeOAuth), Password(LoginSilentModePassword), } +#[derive(Debug)] pub struct LoginSilentModeOAuth { pub provider: String, pub access_token: String, } +#[derive(Debug)] pub struct LoginSilentModePassword { pub login: String, pub password: String, diff --git a/src/app/cli/src/commands/mod.rs b/src/app/cli/src/commands/mod.rs index 5525e8f3d..385ad0a10 100644 --- a/src/app/cli/src/commands/mod.rs +++ b/src/app/cli/src/commands/mod.rs @@ -20,9 +20,9 @@ mod delete_command; mod gc_command; mod ingest_command; mod init_command; +mod inspect_lineage_command; mod inspect_query_command; mod inspect_schema_command; -mod lineage_command; mod list_command; mod log_command; mod login_command; @@ -71,9 +71,9 @@ pub use delete_command::*; pub use gc_command::*; pub use ingest_command::*; pub use init_command::*; +pub use inspect_lineage_command::*; pub use inspect_query_command::*; pub use inspect_schema_command::*; -pub use lineage_command::*; pub use list_command::*; pub use log_command::*; pub use login_command::*; diff --git a/src/app/cli/src/commands/reset_command.rs b/src/app/cli/src/commands/reset_command.rs index 8a4ba34d5..32f69a4df 100644 --- a/src/app/cli/src/commands/reset_command.rs +++ b/src/app/cli/src/commands/reset_command.rs @@ -15,6 +15,8 @@ use opendatafabric::*; use super::{CLIError, Command}; use crate::Interact; +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + pub struct ResetCommand { interact: Arc, dataset_repo: Arc, @@ -66,3 +68,5 @@ impl Command for ResetCommand { Ok(()) } } + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/app/cli/src/commands/tail_command.rs b/src/app/cli/src/commands/tail_command.rs index e715e71ca..b025ceeb3 100644 --- a/src/app/cli/src/commands/tail_command.rs +++ b/src/app/cli/src/commands/tail_command.rs @@ -17,6 +17,8 @@ use opendatafabric::*; use super::{CLIError, Command}; use crate::output::*; +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + pub struct TailCommand { query_svc: Arc, dataset_ref: DatasetRef, @@ -93,3 +95,5 @@ impl Command for TailCommand { Ok(()) } } + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/app/cli/src/commands/verify_command.rs b/src/app/cli/src/commands/verify_command.rs index 433dcc395..ac3d25778 100644 --- a/src/app/cli/src/commands/verify_command.rs +++ b/src/app/cli/src/commands/verify_command.rs @@ -152,7 +152,6 @@ impl VerifyCommand { Ok(self .verification_svc - .clone() .verify_multi(filtered_requests, options, listener) .await) } @@ -193,14 +192,14 @@ impl VerifyCommand { let mut current_missed_dependencies = vec![]; - for dependecy in summary.dependencies { + for dependency in summary.dependencies { if self .dataset_repo - .resolve_dataset_ref(&DatasetRef::ID(dependecy.clone())) + .resolve_dataset_ref(&DatasetRef::ID(dependency.clone())) .await .is_err() { - current_missed_dependencies.push(dependecy.to_string()); + current_missed_dependencies.push(dependency.to_string()); } } if !current_missed_dependencies.is_empty() { diff --git a/src/app/cli/src/explore/api_server.rs b/src/app/cli/src/explore/api_server.rs index 3d238c5fc..510d81298 100644 --- a/src/app/cli/src/explore/api_server.rs +++ b/src/app/cli/src/explore/api_server.rs @@ -14,7 +14,7 @@ use std::path::PathBuf; use std::pin::Pin; use std::sync::Arc; -use axum::Extension; +use axum::{middleware, Extension}; use database_common_macros::transactional_handler; use dill::{Catalog, CatalogBuilder}; use http_common::ApiError; @@ -94,6 +94,8 @@ impl APIServer { let mut app = axum::Router::new() .route("/", axum::routing::get(root)) .route( + // IMPORTANT: The same name is used inside e2e_middleware_fn(). + // If there is a need to change, please update there too. "/graphql", axum::routing::get(graphql_playground_handler).post(graphql_handler), ) @@ -135,7 +137,17 @@ impl APIServer { .nest("/", kamu_adapter_http::data::dataset_router()), multi_tenant_workspace, ), - ) + ); + + let is_e2e_testing = e2e_output_data_path.is_some(); + + if is_e2e_testing { + app = app.layer(middleware::from_fn( + kamu_adapter_http::e2e::e2e_middleware_fn, + )); + } + + app = app .layer(kamu_adapter_http::AuthenticationLayer::new()) .layer( tower_http::cors::CorsLayer::new() @@ -157,7 +169,6 @@ impl APIServer { .layer(Extension(gql_schema)) .layer(Extension(api_server_catalog)); - let is_e2e_testing = e2e_output_data_path.is_some(); let maybe_shutdown_notify = if is_e2e_testing { let shutdown_notify = Arc::new(Notify::new()); diff --git a/src/app/cli/src/services/config/config_service.rs b/src/app/cli/src/services/config/config_service.rs index 1317c19f5..2baaf9842 100644 --- a/src/app/cli/src/services/config/config_service.rs +++ b/src/app/cli/src/services/config/config_service.rs @@ -256,6 +256,8 @@ impl ConfigService { fn path_for_scope(&self, scope: ConfigScope) -> PathBuf { match scope { + // TODO: Respect `XDG_CONFIG_HOME` when working with configs + // https://github.com/kamu-data/kamu-cli/issues/848 ConfigScope::User => dirs::home_dir() .expect("Cannot determine user home directory") .join(CONFIG_FILENAME), diff --git a/src/app/cli/src/services/gc_service.rs b/src/app/cli/src/services/gc_service.rs index 29d0e4e98..9c91e7638 100644 --- a/src/app/cli/src/services/gc_service.rs +++ b/src/app/cli/src/services/gc_service.rs @@ -9,11 +9,17 @@ use std::sync::Arc; -use chrono::{DateTime, Duration, Utc}; +use chrono::{DateTime, Duration, TimeDelta, Utc}; use internal_error::{InternalError, ResultIntoInternal}; use crate::WorkspaceLayout; +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +const EVICTION_THRESHOLD: TimeDelta = Duration::hours(24); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + pub struct GcService { workspace_layout: Arc, } @@ -56,9 +62,6 @@ impl GcService { /// Evict stale entries to manage cache size #[tracing::instrument(level = "debug", skip_all)] pub fn evict_cache(&self) -> Result { - // TODO: Make const after https://github.com/chronotope/chrono/issues/309 - // Or make into a config option - let eviction_threshold: Duration = Duration::hours(24); let now = Utc::now(); let mut entries_freed = 0; let mut bytes_freed = 0; @@ -69,7 +72,7 @@ impl GcService { let mtime: DateTime = chrono::DateTime::from(entry.metadata().int_err()?.modified().int_err()?); - if (now - mtime) > eviction_threshold { + if (now - mtime) > EVICTION_THRESHOLD { if entry.path().is_dir() { bytes_freed += fs_extra::dir::get_size(entry.path()).int_err()?; std::fs::remove_dir_all(entry.path()).int_err()?; @@ -93,7 +96,11 @@ impl GcService { } } +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + pub struct GcResult { pub entries_freed: usize, pub bytes_freed: u64, } + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/app/cli/tests/tests/test_workspace_svc.rs b/src/app/cli/tests/tests/test_workspace_svc.rs index ae37a6ce1..417cd5b60 100644 --- a/src/app/cli/tests/tests/test_workspace_svc.rs +++ b/src/app/cli/tests/tests/test_workspace_svc.rs @@ -14,6 +14,7 @@ use kamu::domain::*; use kamu::testing::{MetadataFactory, ParquetWriterHelper}; use kamu::*; use kamu_cli::*; +use kamu_cli_puppet::extensions::KamuCliPuppetExt; use kamu_cli_puppet::KamuCliPuppet; use opendatafabric::serde::yaml::Manifest; use opendatafabric::*; @@ -46,16 +47,15 @@ async fn test_workspace_upgrade() { let kamu = KamuCliPuppet::new(temp_dir.path()); - let assert = kamu.execute(["list"]).await.failure(); - let stderr = std::str::from_utf8(&assert.get_output().stderr).unwrap(); - - assert!( - stderr.contains( + kamu.assert_failure_command_execution( + ["list"], + None, + Some([ "Error: Workspace needs to be upgraded before continuing - please run `kamu system \ - upgrade-workspace`" - ), - "Unexpected output:\n{stderr}", - ); + upgrade-workspace`", + ]), + ) + .await; // TODO: Restore this test upon the first upgrade post V5 breaking changes /* diff --git a/src/domain/core/src/services/reset_service.rs b/src/domain/core/src/services/reset_service.rs index 17240e704..51b3738d2 100644 --- a/src/domain/core/src/services/reset_service.rs +++ b/src/domain/core/src/services/reset_service.rs @@ -14,6 +14,8 @@ use thiserror::Error; use crate::entities::SetRefError; use crate::*; +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + #[async_trait::async_trait] pub trait ResetService: Send + Sync { async fn reset_dataset( @@ -103,3 +105,5 @@ pub struct OldHeadMismatchError { pub current_head: Multihash, pub old_head: Multihash, } + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/common/Cargo.toml b/src/e2e/app/cli/common/Cargo.toml index 0714bf122..8f31999fb 100644 --- a/src/e2e/app/cli/common/Cargo.toml +++ b/src/e2e/app/cli/common/Cargo.toml @@ -30,6 +30,7 @@ opendatafabric = { workspace = true } async-trait = "0.1" chrono = { version = "0.4", default-features = false, features = ["now"] } indoc = "2" +lazy_static = { version = "1" } pretty_assertions = "1" regex = "1" reqwest = { version = "0.12", default-features = false, features = ["json"] } diff --git a/src/e2e/app/cli/common/src/e2e_harness.rs b/src/e2e/app/cli/common/src/e2e_harness.rs index dadb71910..40d4992d2 100644 --- a/src/e2e/app/cli/common/src/e2e_harness.rs +++ b/src/e2e/app/cli/common/src/e2e_harness.rs @@ -9,7 +9,7 @@ use std::future::Future; -use chrono::{DateTime, NaiveTime, Utc}; +use chrono::{DateTime, NaiveTime, TimeZone, Utc}; use kamu_cli_puppet::extensions::KamuCliPuppetExt; use kamu_cli_puppet::{KamuCliPuppet, NewWorkspaceOptions}; use regex::Regex; @@ -34,6 +34,7 @@ pub struct KamuCliApiServerHarnessOptions { potential_workspace: PotentialWorkspace, env_vars: Vec<(String, String)>, frozen_system_time: Option>, + kamu_config: Option, } impl KamuCliApiServerHarnessOptions { @@ -54,12 +55,18 @@ impl KamuCliApiServerHarnessOptions { self } - pub fn with_frozen_system_time(mut self, value: DateTime) -> Self { + pub fn with_custom_frozen_system_time(mut self, value: DateTime) -> Self { self.frozen_system_time = Some(value); self } + pub fn with_frozen_system_time(self) -> Self { + let t = Utc.with_ymd_and_hms(2050, 1, 2, 3, 4, 5).unwrap(); + + self.with_custom_frozen_system_time(t) + } + pub fn with_today_as_frozen_system_time(self) -> Self { let today = { let now = Utc::now(); @@ -68,7 +75,13 @@ impl KamuCliApiServerHarnessOptions { .unwrap() }; - self.with_frozen_system_time(today) + self.with_custom_frozen_system_time(today) + } + + pub fn with_kamu_config(mut self, content: &str) -> Self { + self.kamu_config = Some(content.into()); + + self } } @@ -76,7 +89,6 @@ impl KamuCliApiServerHarnessOptions { pub struct KamuCliApiServerHarness { options: KamuCliApiServerHarnessOptions, - kamu_config: Option, } impl KamuCliApiServerHarness { @@ -174,11 +186,21 @@ impl KamuCliApiServerHarness { Self::new(options, Some(kamu_config)) } - fn new(options: KamuCliApiServerHarnessOptions, kamu_config: Option) -> Self { - Self { - options, - kamu_config, + fn new( + mut options: KamuCliApiServerHarnessOptions, + generated_kamu_config: Option, + ) -> Self { + assert!( + !(options.kamu_config.is_some() && generated_kamu_config.is_some()), + "There can be only one configuration file: either preset from the test options or \ + generated based on the storage type" + ); + + if options.kamu_config.is_none() { + options.kamu_config = generated_kamu_config; } + + Self { options } } pub async fn run_api_server(self, fixture: Fixture) @@ -208,7 +230,8 @@ impl KamuCliApiServerHarness { let KamuCliApiServerHarnessOptions { potential_workspace, env_vars, - frozen_system_time: freeze_system_time, + frozen_system_time, + kamu_config, } = self.options; let mut kamu = match potential_workspace { @@ -218,14 +241,14 @@ impl KamuCliApiServerHarness { KamuCliPuppet::new_workspace_tmp_with(NewWorkspaceOptions { is_multi_tenant, - kamu_config: self.kamu_config, + kamu_config, env_vars, }) .await } }; - kamu.set_system_time(freeze_system_time); + kamu.set_system_time(frozen_system_time); kamu } diff --git a/src/e2e/app/cli/common/src/kamu_api_server_client.rs b/src/e2e/app/cli/common/src/kamu_api_server_client.rs index 5cdfc97a0..dfee2940b 100644 --- a/src/e2e/app/cli/common/src/kamu_api_server_client.rs +++ b/src/e2e/app/cli/common/src/kamu_api_server_client.rs @@ -8,6 +8,7 @@ // by the Apache License, Version 2.0. use internal_error::{InternalError, ResultIntoInternal}; +use opendatafabric::DatasetAlias; use reqwest::{Method, Response, StatusCode, Url}; use serde::Deserialize; use tokio_retry::strategy::FixedInterval; @@ -79,6 +80,22 @@ impl KamuApiServerClient { &self.server_base_url } + pub fn get_node_url(&self) -> Url { + let mut node_url = Url::parse("odf+http://host").unwrap(); + let base_url = self.get_base_url(); + + node_url.set_host(base_url.host_str()).unwrap(); + node_url.set_port(base_url.port()).unwrap(); + + node_url + } + + pub fn get_dataset_endpoint(&self, dataset_alias: &DatasetAlias) -> Url { + let node_url = self.get_node_url(); + + node_url.join(format!("{dataset_alias}").as_str()).unwrap() + } + pub async fn rest_api_call_assert( &self, token: Option, diff --git a/src/e2e/app/cli/common/src/kamu_api_server_client_ext.rs b/src/e2e/app/cli/common/src/kamu_api_server_client_ext.rs index a79385dbd..e9033fbe3 100644 --- a/src/e2e/app/cli/common/src/kamu_api_server_client_ext.rs +++ b/src/e2e/app/cli/common/src/kamu_api_server_client_ext.rs @@ -8,12 +8,130 @@ // by the Apache License, Version 2.0. use async_trait::async_trait; +use lazy_static::lazy_static; +use opendatafabric::{AccountName, DatasetAlias, DatasetName}; use reqwest::{Method, StatusCode}; use crate::{KamuApiServerClient, RequestBody}; //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// +/// +pub const DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR: &str = indoc::indoc!( + r#" + kind: DatasetSnapshot + version: 1 + content: + name: player-scores + kind: Root + metadata: + - kind: AddPushSource + sourceName: default + read: + kind: NdJson + schema: + - "match_time TIMESTAMP" + - "match_id BIGINT" + - "player_id STRING" + - "score BIGINT" + merge: + kind: Ledger + primaryKey: + - match_id + - player_id + - kind: SetVocab + eventTimeColumn: match_time + "# +); + +/// +pub const DATASET_DERIVATIVE_LEADERBOARD_SNAPSHOT_STR: &str = indoc::indoc!( + r#" + kind: DatasetSnapshot + version: 1 + content: + name: leaderboard + kind: Derivative + metadata: + - kind: SetTransform + inputs: + - datasetRef: player-scores + alias: player_scores + transform: + kind: Sql + engine: risingwave + queries: + - alias: leaderboard + # Note we are using explicit `crate materialized view` statement below + # because RW does not currently support Top-N queries directly on sinks. + # + # Note `partition by 1` is currently required by RW engine + # See: https://docs.risingwave.com/docs/current/window-functions/#syntax + query: | + create materialized view leaderboard as + select + * + from ( + select + row_number() over (partition by 1 order by score desc) as place, + match_time, + match_id, + player_id, + score + from player_scores + ) + where place <= 2 + - query: | + select * from leaderboard + - kind: SetVocab + eventTimeColumn: match_time + "# +); + +lazy_static! { + /// + pub static ref DATASET_ROOT_PLAYER_SCORES_SNAPSHOT: String = { + DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR + .escape_default() + .to_string() + }; + + /// + pub static ref DATASET_DERIVATIVE_LEADERBOARD_SNAPSHOT: String = { + DATASET_DERIVATIVE_LEADERBOARD_SNAPSHOT_STR + .escape_default() + .to_string() + }; +} + +/// +pub const DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_1: &str = indoc::indoc!( + r#" + {"match_time": "2000-01-01", "match_id": 1, "player_id": "Alice", "score": 100} + {"match_time": "2000-01-01", "match_id": 1, "player_id": "Bob", "score": 80} + "# +); + +/// +pub const DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_2: &str = indoc::indoc!( + r#" + {"match_time": "2000-01-02", "match_id": 2, "player_id": "Alice", "score": 70} + {"match_time": "2000-01-02", "match_id": 2, "player_id": "Charlie", "score": 90} + "# +); + +/// +pub const DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_3: &str = indoc::indoc!( + r#" + {"match_time": "2000-01-03", "match_id": 3, "player_id": "Bob", "score": 60} + {"match_time": "2000-01-03", "match_id": 3, "player_id": "Charlie", "score": 110} + "# +); + +pub const E2E_USER_ACCOUNT_NAME_STR: &str = "e2e-user"; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + pub type AccessToken = String; pub type DatasetId = String; @@ -22,11 +140,29 @@ pub type DatasetId = String; #[async_trait] pub trait KamuApiServerClientExt { async fn login_as_kamu(&self) -> AccessToken; + async fn login_as_e2e_user(&self) -> AccessToken; + + // TODO: also return alias, after solving this bug: + // https://github.com/kamu-data/kamu-cli/issues/891 async fn create_dataset(&self, dataset_snapshot_yaml: &str, token: &AccessToken) -> DatasetId; + async fn create_player_scores_dataset(&self, token: &AccessToken) -> DatasetId; - async fn create_player_scores_dataset_with_data(&self, token: &AccessToken) -> DatasetId; + + async fn create_player_scores_dataset_with_data( + &self, + token: &AccessToken, + account_name_maybe: Option, + ) -> DatasetId; + async fn create_leaderboard(&self, token: &AccessToken) -> DatasetId; + + async fn ingest_data( + &self, + dataset_alias: &DatasetAlias, + data: RequestBody, + token: &AccessToken, + ); } //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// @@ -108,57 +244,28 @@ impl KamuApiServerClientExt for KamuApiServerClient { } async fn create_player_scores_dataset(&self, token: &AccessToken) -> DatasetId { - // https://github.com/kamu-data/kamu-cli/blob/master/examples/leaderboard/player-scores.yaml - let snapshot = indoc::indoc!( - r#" - kind: DatasetSnapshot - version: 1 - content: - name: player-scores - kind: Root - metadata: - - kind: AddPushSource - sourceName: default - read: - kind: NdJson - schema: - - "match_time TIMESTAMP" - - "match_id BIGINT" - - "player_id STRING" - - "score BIGINT" - merge: - kind: Ledger - primaryKey: - - match_id - - player_id - - kind: SetVocab - eventTimeColumn: match_time - "# - ) - .escape_default() - .to_string(); - - self.create_dataset(&snapshot, token).await + self.create_dataset(&DATASET_ROOT_PLAYER_SCORES_SNAPSHOT, token) + .await } - async fn create_player_scores_dataset_with_data(&self, token: &AccessToken) -> DatasetId { + async fn create_player_scores_dataset_with_data( + &self, + token: &AccessToken, + account_name_maybe: Option, + ) -> DatasetId { let dataset_id = self.create_player_scores_dataset(token).await; - self.rest_api_call_assert( - Some(token.clone()), - Method::POST, - "player-scores/ingest", - Some(RequestBody::NdJson( - indoc::indoc!( - r#" - {"match_time": "2000-01-01", "match_id": 1, "player_id": "Alice", "score": 100} - {"match_time": "2000-01-01", "match_id": 1, "player_id": "Bob", "score": 80} - "#, - ) - .into(), - )), - StatusCode::OK, - None, + // TODO: Use the alias from the reply, after fixing the bug: + // https://github.com/kamu-data/kamu-cli/issues/891 + let dataset_alias = DatasetAlias::new( + account_name_maybe, + DatasetName::new_unchecked("player-scores"), + ); + + self.ingest_data( + &dataset_alias, + RequestBody::NdJson(DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_1.into()), + token, ) .await; @@ -166,53 +273,27 @@ impl KamuApiServerClientExt for KamuApiServerClient { } async fn create_leaderboard(&self, token: &AccessToken) -> DatasetId { - // https://github.com/kamu-data/kamu-cli/blob/master/examples/leaderboard/leaderboard.yaml - let snapshot = indoc::indoc!( - r#" - kind: DatasetSnapshot - version: 1 - content: - name: leaderboard - kind: Derivative - metadata: - - kind: SetTransform - inputs: - - datasetRef: player-scores - alias: player_scores - transform: - kind: Sql - engine: risingwave - queries: - - alias: leaderboard - # Note we are using explicit `crate materialized view` statement below - # because RW does not currently support Top-N queries directly on sinks. - # - # Note `partition by 1` is currently required by RW engine - # See: https://docs.risingwave.com/docs/current/window-functions/#syntax - query: | - create materialized view leaderboard as - select - * - from ( - select - row_number() over (partition by 1 order by score desc) as place, - match_time, - match_id, - player_id, - score - from player_scores - ) - where place <= 2 - - query: | - select * from leaderboard - - kind: SetVocab - eventTimeColumn: match_time - "# - ) - .escape_default() - .to_string(); + self.create_dataset(&DATASET_DERIVATIVE_LEADERBOARD_SNAPSHOT, token) + .await + } + + async fn ingest_data( + &self, + dataset_alias: &DatasetAlias, + data: RequestBody, + token: &AccessToken, + ) { + let endpoint = format!("{dataset_alias}/ingest"); - self.create_dataset(&snapshot, token).await + self.rest_api_call_assert( + Some(token.clone()), + Method::POST, + endpoint.as_str(), + Some(data), + StatusCode::OK, + None, + ) + .await; } } diff --git a/src/e2e/app/cli/inmem/tests/tests/commands/mod.rs b/src/e2e/app/cli/inmem/tests/tests/commands/mod.rs index 77dccbd11..406d408d3 100644 --- a/src/e2e/app/cli/inmem/tests/tests/commands/mod.rs +++ b/src/e2e/app/cli/inmem/tests/tests/commands/mod.rs @@ -8,10 +8,25 @@ // by the Apache License, Version 2.0. mod test_add_command; +mod test_compact_command; mod test_complete_command; +mod test_config_command; +mod test_delete_command; mod test_ingest_command; mod test_init_command; -mod test_repo_alias_command; +mod test_inspect_command; +mod test_log_command; +mod test_login_command; +mod test_new_command; +mod test_rename_command; +mod test_repo_command; +mod test_reset_command; +mod test_search_command; mod test_sql_command; mod test_system_api_server_gql_query; +mod test_system_diagnose_command; +mod test_system_gc_command; mod test_system_generate_token_command; +mod test_system_info_command; +mod test_tail_command; +mod test_verify_command; diff --git a/src/e2e/app/cli/inmem/tests/tests/commands/test_add_command.rs b/src/e2e/app/cli/inmem/tests/tests/commands/test_add_command.rs index 2964dc298..b7b39e306 100644 --- a/src/e2e/app/cli/inmem/tests/tests/commands/test_add_command.rs +++ b/src/e2e/app/cli/inmem/tests/tests/commands/test_add_command.rs @@ -11,6 +11,27 @@ use kamu_cli_e2e_common::prelude::*; //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// +kamu_cli_execute_command_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_add_dataset_from_stdin +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_add_dataset_with_name +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_add_dataset_with_replace +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + kamu_cli_execute_command_e2e_test!( storage = inmem, fixture = kamu_cli_e2e_repo_tests::test_add_recursive diff --git a/src/e2e/app/cli/inmem/tests/tests/commands/test_compact_command.rs b/src/e2e/app/cli/inmem/tests/tests/commands/test_compact_command.rs new file mode 100644 index 000000000..6ff90ee7b --- /dev/null +++ b/src/e2e/app/cli/inmem/tests/tests/commands/test_compact_command.rs @@ -0,0 +1,36 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_compact_hard + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_compact_keep_metadata_only + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_compact_verify + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/inmem/tests/tests/commands/test_config_command.rs b/src/e2e/app/cli/inmem/tests/tests/commands/test_config_command.rs new file mode 100644 index 000000000..9fb7d78c3 --- /dev/null +++ b/src/e2e/app/cli/inmem/tests/tests/commands/test_config_command.rs @@ -0,0 +1,53 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_config_set_value +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_config_reset_key +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_config_get_with_default +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_config_get_from_config + options = Options::default().with_kamu_config( + indoc::indoc!( + r#" + kind: CLIConfig + version: 1 + content: + engine: + runtime: podman + uploads: + maxFileSizeInMb: 42 + "# + ) + ) +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/inmem/tests/tests/commands/test_delete_command.rs b/src/e2e/app/cli/inmem/tests/tests/commands/test_delete_command.rs new file mode 100644 index 000000000..0906123b3 --- /dev/null +++ b/src/e2e/app/cli/inmem/tests/tests/commands/test_delete_command.rs @@ -0,0 +1,33 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_delete_dataset +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_delete_dataset_recursive +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_delete_dataset_all +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/inmem/tests/tests/commands/test_ingest_command.rs b/src/e2e/app/cli/inmem/tests/tests/commands/test_ingest_command.rs index 7974dae1c..3687ca7d5 100644 --- a/src/e2e/app/cli/inmem/tests/tests/commands/test_ingest_command.rs +++ b/src/e2e/app/cli/inmem/tests/tests/commands/test_ingest_command.rs @@ -14,6 +14,7 @@ use kamu_cli_e2e_common::prelude::*; kamu_cli_execute_command_e2e_test!( storage = inmem, fixture = kamu_cli_e2e_repo_tests::test_push_ingest_from_file_ledger, + options = Options::default().with_frozen_system_time(), extra_test_groups = "engine, ingest, datafusion" ); @@ -22,6 +23,34 @@ kamu_cli_execute_command_e2e_test!( kamu_cli_execute_command_e2e_test!( storage = inmem, fixture = kamu_cli_e2e_repo_tests::test_push_ingest_from_file_snapshot_with_event_time, + options = Options::default().with_frozen_system_time(), + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_ingest_from_stdin, + options = Options::default().with_frozen_system_time(), + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_ingest_recursive, + options = Options::default().with_frozen_system_time(), + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_ingest_with_source_name, + options = Options::default().with_frozen_system_time(), extra_test_groups = "engine, ingest, datafusion" ); diff --git a/src/e2e/app/cli/inmem/tests/tests/commands/test_init_command.rs b/src/e2e/app/cli/inmem/tests/tests/commands/test_init_command.rs index 1d937dd5f..7f27502c5 100644 --- a/src/e2e/app/cli/inmem/tests/tests/commands/test_init_command.rs +++ b/src/e2e/app/cli/inmem/tests/tests/commands/test_init_command.rs @@ -43,3 +43,11 @@ kamu_cli_execute_command_e2e_test!( ); //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_init_in_an_existing_workspace, + options = Options::default().with_no_workspace() +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/inmem/tests/tests/commands/test_inspect_command.rs b/src/e2e/app/cli/inmem/tests/tests/commands/test_inspect_command.rs new file mode 100644 index 000000000..0d22b67d5 --- /dev/null +++ b/src/e2e/app/cli/inmem/tests/tests/commands/test_inspect_command.rs @@ -0,0 +1,35 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_inspect_lineage, +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_inspect_query, + options = Options::default().with_frozen_system_time() +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_inspect_schema, + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/inmem/tests/tests/commands/test_log_command.rs b/src/e2e/app/cli/inmem/tests/tests/commands/test_log_command.rs new file mode 100644 index 000000000..d83b3b951 --- /dev/null +++ b/src/e2e/app/cli/inmem/tests/tests/commands/test_log_command.rs @@ -0,0 +1,21 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_log, + options = Options::default().with_frozen_system_time(), + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/inmem/tests/tests/commands/test_login_command.rs b/src/e2e/app/cli/inmem/tests/tests/commands/test_login_command.rs new file mode 100644 index 000000000..d47541b57 --- /dev/null +++ b/src/e2e/app/cli/inmem/tests/tests/commands/test_login_command.rs @@ -0,0 +1,27 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_run_api_server_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_login_logout_password, +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_run_api_server_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_login_logout_oauth, + options = Options::default().with_multi_tenant() +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/inmem/tests/tests/commands/test_new_command.rs b/src/e2e/app/cli/inmem/tests/tests/commands/test_new_command.rs new file mode 100644 index 000000000..fd38ab3d6 --- /dev/null +++ b/src/e2e/app/cli/inmem/tests/tests/commands/test_new_command.rs @@ -0,0 +1,26 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_new_root, +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_new_derivative, +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/inmem/tests/tests/commands/test_rename_command.rs b/src/e2e/app/cli/inmem/tests/tests/commands/test_rename_command.rs new file mode 100644 index 000000000..a8666b29b --- /dev/null +++ b/src/e2e/app/cli/inmem/tests/tests/commands/test_rename_command.rs @@ -0,0 +1,19 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_rename_dataset +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/inmem/tests/tests/commands/test_repo_alias_command.rs b/src/e2e/app/cli/inmem/tests/tests/commands/test_repo_command.rs similarity index 100% rename from src/e2e/app/cli/inmem/tests/tests/commands/test_repo_alias_command.rs rename to src/e2e/app/cli/inmem/tests/tests/commands/test_repo_command.rs diff --git a/src/e2e/app/cli/inmem/tests/tests/commands/test_reset_command.rs b/src/e2e/app/cli/inmem/tests/tests/commands/test_reset_command.rs new file mode 100644 index 000000000..17c46fee1 --- /dev/null +++ b/src/e2e/app/cli/inmem/tests/tests/commands/test_reset_command.rs @@ -0,0 +1,20 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_reset, + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/inmem/tests/tests/commands/test_search_command.rs b/src/e2e/app/cli/inmem/tests/tests/commands/test_search_command.rs new file mode 100644 index 000000000..3d9b2c90d --- /dev/null +++ b/src/e2e/app/cli/inmem/tests/tests/commands/test_search_command.rs @@ -0,0 +1,66 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_run_api_server_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_search_multi_user + // We need synthetic time for the tests, but the third-party JWT code + // uses the current time. Assuming that the token lifetime is 24 hours, we will + // use the projected date (the current day) as a workaround. + options = Options::default() + .with_multi_tenant() + .with_today_as_frozen_system_time() + .with_kamu_config( + indoc::indoc!( + r#" + kind: CLIConfig + version: 1 + content: + users: + predefined: + - accountName: kamu + "# + ) + ), + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_run_api_server_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_search_by_name + // We need synthetic time for the tests, but the third-party JWT code + // uses the current time. Assuming that the token lifetime is 24 hours, we will + // use the projected date (the current day) as a workaround. + options = Options::default() + .with_multi_tenant() + .with_today_as_frozen_system_time(), + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_run_api_server_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_search_by_repo + // We need synthetic time for the tests, but the third-party JWT code + // uses the current time. Assuming that the token lifetime is 24 hours, we will + // use the projected date (the current day) as a workaround. + options = Options::default() + .with_multi_tenant() + .with_today_as_frozen_system_time(), + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/inmem/tests/tests/commands/test_sql_command.rs b/src/e2e/app/cli/inmem/tests/tests/commands/test_sql_command.rs index a48fd8e50..7315367ca 100644 --- a/src/e2e/app/cli/inmem/tests/tests/commands/test_sql_command.rs +++ b/src/e2e/app/cli/inmem/tests/tests/commands/test_sql_command.rs @@ -27,3 +27,12 @@ kamu_cli_execute_command_e2e_test!( ); //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_sql_command, + options = Options::default().with_frozen_system_time(), + extra_test_groups = "engine, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/inmem/tests/tests/commands/test_system_diagnose_command.rs b/src/e2e/app/cli/inmem/tests/tests/commands/test_system_diagnose_command.rs new file mode 100644 index 000000000..44d851352 --- /dev/null +++ b/src/e2e/app/cli/inmem/tests/tests/commands/test_system_diagnose_command.rs @@ -0,0 +1,19 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_system_diagnose +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/inmem/tests/tests/commands/test_system_gc_command.rs b/src/e2e/app/cli/inmem/tests/tests/commands/test_system_gc_command.rs new file mode 100644 index 000000000..b36419994 --- /dev/null +++ b/src/e2e/app/cli/inmem/tests/tests/commands/test_system_gc_command.rs @@ -0,0 +1,16 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!(storage = inmem, fixture = kamu_cli_e2e_repo_tests::test_gc); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/inmem/tests/tests/commands/test_system_info_command.rs b/src/e2e/app/cli/inmem/tests/tests/commands/test_system_info_command.rs new file mode 100644 index 000000000..00c824968 --- /dev/null +++ b/src/e2e/app/cli/inmem/tests/tests/commands/test_system_info_command.rs @@ -0,0 +1,19 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_system_info +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/inmem/tests/tests/commands/test_tail_command.rs b/src/e2e/app/cli/inmem/tests/tests/commands/test_tail_command.rs new file mode 100644 index 000000000..ad25c37f6 --- /dev/null +++ b/src/e2e/app/cli/inmem/tests/tests/commands/test_tail_command.rs @@ -0,0 +1,21 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_tail, + options = Options::default().with_frozen_system_time(), + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/inmem/tests/tests/commands/test_verify_command.rs b/src/e2e/app/cli/inmem/tests/tests/commands/test_verify_command.rs new file mode 100644 index 000000000..cd27f148b --- /dev/null +++ b/src/e2e/app/cli/inmem/tests/tests/commands/test_verify_command.rs @@ -0,0 +1,36 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_verify_regular_dataset, + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_verify_recursive, + extra_test_groups = "containerized, engine, ingest, datafusion, risingwave" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_verify_integrity, + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/inmem/tests/tests/test_flow.rs b/src/e2e/app/cli/inmem/tests/tests/test_flow.rs index 665fa0e8e..e1684e70a 100644 --- a/src/e2e/app/cli/inmem/tests/tests/test_flow.rs +++ b/src/e2e/app/cli/inmem/tests/tests/test_flow.rs @@ -38,7 +38,7 @@ kamu_cli_run_api_server_e2e_test!( kamu_cli_run_api_server_e2e_test!( storage = inmem, fixture = kamu_cli_e2e_repo_tests::test_dataset_trigger_flow, - extra_test_groups = "containerized, engine, transform, datafusion" + extra_test_groups = "containerized, engine, transform, datafusion, risingwave" ); //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/inmem/tests/tests/test_smart_transfer_protocol.rs b/src/e2e/app/cli/inmem/tests/tests/test_smart_transfer_protocol.rs index dfa4094c2..4dd8081a0 100644 --- a/src/e2e/app/cli/inmem/tests/tests/test_smart_transfer_protocol.rs +++ b/src/e2e/app/cli/inmem/tests/tests/test_smart_transfer_protocol.rs @@ -24,3 +24,104 @@ kamu_cli_run_api_server_e2e_test!( ); //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_run_api_server_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_smart_force_push_pull, + options = Options::default() + .with_multi_tenant() + .with_today_as_frozen_system_time(), + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_run_api_server_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_smart_push_pull_add_alias, + options = Options::default() + .with_multi_tenant() + .with_today_as_frozen_system_time(), + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_run_api_server_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_smart_pull_as, + options = Options::default() + .with_multi_tenant() + .with_today_as_frozen_system_time(), + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_run_api_server_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_smart_push_pull_all, + options = Options::default() + .with_multi_tenant() + .with_today_as_frozen_system_time(), + extra_test_groups = "containerized, engine, ingest, transform, datafusion, risingwave" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_run_api_server_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_smart_push_pull_recursive, + options = Options::default() + .with_multi_tenant() + .with_today_as_frozen_system_time(), + extra_test_groups = "containerized, engine, ingest, transform, datafusion, risingwave" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_smart_pull_set_watermark, + options = Options::default().with_frozen_system_time(), +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_smart_pull_reset_derivative, + options = Options::default().with_frozen_system_time(), + extra_test_groups = "containerized, engine, ingest, transform, datafusion, risingwave" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_run_api_server_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_smart_push_visibility, + options = Options::default() + .with_multi_tenant() + .with_today_as_frozen_system_time(), + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_smart_push_pull_s3, + options = Options::default().with_frozen_system_time(), + extra_test_groups = "containerized, engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = inmem, + fixture = kamu_cli_e2e_repo_tests::test_smart_pull_derivative, + options = Options::default().with_frozen_system_time(), + extra_test_groups = "containerized, engine, ingest, transform, datafusion, risingwave" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/mysql/tests/tests/test_flow.rs b/src/e2e/app/cli/mysql/tests/tests/test_flow.rs index 50ebbb6e5..b7d1fe4c5 100644 --- a/src/e2e/app/cli/mysql/tests/tests/test_flow.rs +++ b/src/e2e/app/cli/mysql/tests/tests/test_flow.rs @@ -38,7 +38,7 @@ kamu_cli_run_api_server_e2e_test!( kamu_cli_run_api_server_e2e_test!( storage = mysql, fixture = kamu_cli_e2e_repo_tests::test_dataset_trigger_flow, - extra_test_groups = "containerized, engine, transform, datafusion" + extra_test_groups = "containerized, engine, transform, datafusion, risingwave" ); //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/postgres/tests/tests/commands/mod.rs b/src/e2e/app/cli/postgres/tests/tests/commands/mod.rs index b58f493b3..6c06ec61a 100644 --- a/src/e2e/app/cli/postgres/tests/tests/commands/mod.rs +++ b/src/e2e/app/cli/postgres/tests/tests/commands/mod.rs @@ -7,5 +7,24 @@ // the Business Source License, use of this software will be governed // by the Apache License, Version 2.0. +mod test_add_command; +mod test_compact_command; +mod test_delete_command; +mod test_ingest_command; +mod test_init_command; +mod test_inspect_command; +mod test_log_command; +mod test_login_command; +mod test_new_command; +mod test_rename_command; +mod test_repo_command; +mod test_reset_command; +mod test_search_command; +mod test_sql_command; mod test_system_api_server_gql_query; +mod test_system_diagnose_command; +mod test_system_gc_command; mod test_system_generate_token_command; +mod test_system_info_command; +mod test_tail_command; +mod test_verify_command; diff --git a/src/e2e/app/cli/postgres/tests/tests/commands/test_add_command.rs b/src/e2e/app/cli/postgres/tests/tests/commands/test_add_command.rs new file mode 100644 index 000000000..407eb9cbc --- /dev/null +++ b/src/e2e/app/cli/postgres/tests/tests/commands/test_add_command.rs @@ -0,0 +1,40 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_add_dataset_from_stdin +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_add_dataset_with_name +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_add_dataset_with_replace +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_add_recursive +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/postgres/tests/tests/commands/test_compact_command.rs b/src/e2e/app/cli/postgres/tests/tests/commands/test_compact_command.rs new file mode 100644 index 000000000..90d11dab3 --- /dev/null +++ b/src/e2e/app/cli/postgres/tests/tests/commands/test_compact_command.rs @@ -0,0 +1,36 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_compact_hard + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_compact_keep_metadata_only + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_compact_verify + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/postgres/tests/tests/commands/test_delete_command.rs b/src/e2e/app/cli/postgres/tests/tests/commands/test_delete_command.rs new file mode 100644 index 000000000..faa7a6a99 --- /dev/null +++ b/src/e2e/app/cli/postgres/tests/tests/commands/test_delete_command.rs @@ -0,0 +1,33 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_delete_dataset +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_delete_dataset_recursive +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_delete_dataset_all +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/postgres/tests/tests/commands/test_ingest_command.rs b/src/e2e/app/cli/postgres/tests/tests/commands/test_ingest_command.rs new file mode 100644 index 000000000..a9fd603d7 --- /dev/null +++ b/src/e2e/app/cli/postgres/tests/tests/commands/test_ingest_command.rs @@ -0,0 +1,57 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_push_ingest_from_file_ledger, + options = Options::default().with_frozen_system_time(), + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_push_ingest_from_file_snapshot_with_event_time, + options = Options::default().with_frozen_system_time(), + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_ingest_from_stdin, + options = Options::default().with_frozen_system_time(), + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_ingest_recursive, + options = Options::default().with_frozen_system_time(), + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_ingest_with_source_name, + options = Options::default().with_frozen_system_time(), + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/postgres/tests/tests/commands/test_init_command.rs b/src/e2e/app/cli/postgres/tests/tests/commands/test_init_command.rs new file mode 100644 index 000000000..0617e08c6 --- /dev/null +++ b/src/e2e/app/cli/postgres/tests/tests/commands/test_init_command.rs @@ -0,0 +1,36 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_init_exist_ok_st, + options = Options::default().with_no_workspace() +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_init_exist_ok_mt, + options = Options::default().with_no_workspace() +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_init_in_an_existing_workspace, + options = Options::default().with_no_workspace() +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/postgres/tests/tests/commands/test_inspect_command.rs b/src/e2e/app/cli/postgres/tests/tests/commands/test_inspect_command.rs new file mode 100644 index 000000000..96239fd6c --- /dev/null +++ b/src/e2e/app/cli/postgres/tests/tests/commands/test_inspect_command.rs @@ -0,0 +1,35 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_inspect_lineage, +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_inspect_query, + options = Options::default().with_frozen_system_time() +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_inspect_schema, + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/postgres/tests/tests/commands/test_log_command.rs b/src/e2e/app/cli/postgres/tests/tests/commands/test_log_command.rs new file mode 100644 index 000000000..c2449941d --- /dev/null +++ b/src/e2e/app/cli/postgres/tests/tests/commands/test_log_command.rs @@ -0,0 +1,21 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_log, + options = Options::default().with_frozen_system_time(), + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/postgres/tests/tests/commands/test_login_command.rs b/src/e2e/app/cli/postgres/tests/tests/commands/test_login_command.rs new file mode 100644 index 000000000..98b7aecd3 --- /dev/null +++ b/src/e2e/app/cli/postgres/tests/tests/commands/test_login_command.rs @@ -0,0 +1,27 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_run_api_server_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_login_logout_password, +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_run_api_server_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_login_logout_oauth, + options = Options::default().with_multi_tenant() +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/postgres/tests/tests/commands/test_new_command.rs b/src/e2e/app/cli/postgres/tests/tests/commands/test_new_command.rs new file mode 100644 index 000000000..3c65ebef4 --- /dev/null +++ b/src/e2e/app/cli/postgres/tests/tests/commands/test_new_command.rs @@ -0,0 +1,26 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_new_root, +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_new_derivative, +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/postgres/tests/tests/commands/test_rename_command.rs b/src/e2e/app/cli/postgres/tests/tests/commands/test_rename_command.rs new file mode 100644 index 000000000..64a5565da --- /dev/null +++ b/src/e2e/app/cli/postgres/tests/tests/commands/test_rename_command.rs @@ -0,0 +1,19 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_rename_dataset +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/postgres/tests/tests/commands/test_repo_command.rs b/src/e2e/app/cli/postgres/tests/tests/commands/test_repo_command.rs new file mode 100644 index 000000000..eeec2744e --- /dev/null +++ b/src/e2e/app/cli/postgres/tests/tests/commands/test_repo_command.rs @@ -0,0 +1,26 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_repository_pull_aliases_commands +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_repository_push_aliases_commands +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/postgres/tests/tests/commands/test_reset_command.rs b/src/e2e/app/cli/postgres/tests/tests/commands/test_reset_command.rs new file mode 100644 index 000000000..1161e8059 --- /dev/null +++ b/src/e2e/app/cli/postgres/tests/tests/commands/test_reset_command.rs @@ -0,0 +1,20 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_reset, + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/postgres/tests/tests/commands/test_search_command.rs b/src/e2e/app/cli/postgres/tests/tests/commands/test_search_command.rs new file mode 100644 index 000000000..135de4131 --- /dev/null +++ b/src/e2e/app/cli/postgres/tests/tests/commands/test_search_command.rs @@ -0,0 +1,40 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_run_api_server_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_search_by_name + // We need synthetic time for the tests, but the third-party JWT code + // uses the current time. Assuming that the token lifetime is 24 hours, we will + // use the projected date (the current day) as a workaround. + options = Options::default() + .with_multi_tenant() + .with_today_as_frozen_system_time(), + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_run_api_server_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_search_by_repo + // We need synthetic time for the tests, but the third-party JWT code + // uses the current time. Assuming that the token lifetime is 24 hours, we will + // use the projected date (the current day) as a workaround. + options = Options::default() + .with_multi_tenant() + .with_today_as_frozen_system_time(), + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/postgres/tests/tests/commands/test_sql_command.rs b/src/e2e/app/cli/postgres/tests/tests/commands/test_sql_command.rs new file mode 100644 index 000000000..339bc6586 --- /dev/null +++ b/src/e2e/app/cli/postgres/tests/tests/commands/test_sql_command.rs @@ -0,0 +1,38 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_datafusion_cli, + extra_test_groups = "engine, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_datafusion_cli_not_launched_in_root_ws, + options = Options::default().with_no_workspace(), + extra_test_groups = "engine, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_sql_command, + options = Options::default().with_frozen_system_time(), + extra_test_groups = "engine, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/postgres/tests/tests/commands/test_system_diagnose_command.rs b/src/e2e/app/cli/postgres/tests/tests/commands/test_system_diagnose_command.rs new file mode 100644 index 000000000..9d1968db8 --- /dev/null +++ b/src/e2e/app/cli/postgres/tests/tests/commands/test_system_diagnose_command.rs @@ -0,0 +1,19 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_system_diagnose +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/postgres/tests/tests/commands/test_system_gc_command.rs b/src/e2e/app/cli/postgres/tests/tests/commands/test_system_gc_command.rs new file mode 100644 index 000000000..0a95452b5 --- /dev/null +++ b/src/e2e/app/cli/postgres/tests/tests/commands/test_system_gc_command.rs @@ -0,0 +1,19 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_gc +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/postgres/tests/tests/commands/test_system_info_command.rs b/src/e2e/app/cli/postgres/tests/tests/commands/test_system_info_command.rs new file mode 100644 index 000000000..63a713ca8 --- /dev/null +++ b/src/e2e/app/cli/postgres/tests/tests/commands/test_system_info_command.rs @@ -0,0 +1,19 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_system_info +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/postgres/tests/tests/commands/test_tail_command.rs b/src/e2e/app/cli/postgres/tests/tests/commands/test_tail_command.rs new file mode 100644 index 000000000..9950b16af --- /dev/null +++ b/src/e2e/app/cli/postgres/tests/tests/commands/test_tail_command.rs @@ -0,0 +1,21 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_tail, + options = Options::default().with_frozen_system_time(), + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/postgres/tests/tests/commands/test_verify_command.rs b/src/e2e/app/cli/postgres/tests/tests/commands/test_verify_command.rs new file mode 100644 index 000000000..ac98ac359 --- /dev/null +++ b/src/e2e/app/cli/postgres/tests/tests/commands/test_verify_command.rs @@ -0,0 +1,36 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_verify_regular_dataset, + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_verify_recursive, + extra_test_groups = "containerized, engine, ingest, datafusion, risingwave" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_verify_integrity, + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/postgres/tests/tests/test_flow.rs b/src/e2e/app/cli/postgres/tests/tests/test_flow.rs index d0872f75d..f81d9769d 100644 --- a/src/e2e/app/cli/postgres/tests/tests/test_flow.rs +++ b/src/e2e/app/cli/postgres/tests/tests/test_flow.rs @@ -38,7 +38,7 @@ kamu_cli_run_api_server_e2e_test!( kamu_cli_run_api_server_e2e_test!( storage = postgres, fixture = kamu_cli_e2e_repo_tests::test_dataset_trigger_flow, - extra_test_groups = "containerized, engine, transform, datafusion" + extra_test_groups = "containerized, engine, transform, datafusion, risingwave" ); //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/postgres/tests/tests/test_smart_transfer_protocol.rs b/src/e2e/app/cli/postgres/tests/tests/test_smart_transfer_protocol.rs index fd35d189c..b943affd1 100644 --- a/src/e2e/app/cli/postgres/tests/tests/test_smart_transfer_protocol.rs +++ b/src/e2e/app/cli/postgres/tests/tests/test_smart_transfer_protocol.rs @@ -24,3 +24,104 @@ kamu_cli_run_api_server_e2e_test!( ); //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_run_api_server_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_smart_force_push_pull, + options = Options::default() + .with_multi_tenant() + .with_today_as_frozen_system_time(), + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_run_api_server_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_smart_push_pull_add_alias, + options = Options::default() + .with_multi_tenant() + .with_today_as_frozen_system_time(), + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_run_api_server_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_smart_pull_as, + options = Options::default() + .with_multi_tenant() + .with_today_as_frozen_system_time(), + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_run_api_server_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_smart_push_pull_all, + options = Options::default() + .with_multi_tenant() + .with_today_as_frozen_system_time(), + extra_test_groups = "containerized, engine, ingest, transform, datafusion, risingwave" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_run_api_server_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_smart_push_pull_recursive, + options = Options::default() + .with_multi_tenant() + .with_today_as_frozen_system_time(), + extra_test_groups = "containerized, engine, ingest, transform, datafusion, risingwave" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_smart_pull_set_watermark, + options = Options::default().with_frozen_system_time(), +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_smart_pull_reset_derivative, + options = Options::default().with_frozen_system_time(), + extra_test_groups = "containerized, engine, ingest, transform, datafusion, risingwave" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_run_api_server_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_smart_push_visibility, + options = Options::default() + .with_multi_tenant() + .with_today_as_frozen_system_time(), + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_smart_push_pull_s3, + options = Options::default().with_frozen_system_time(), + extra_test_groups = "containerized, engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = postgres, + fixture = kamu_cli_e2e_repo_tests::test_smart_pull_derivative, + options = Options::default().with_frozen_system_time(), + extra_test_groups = "containerized, engine, ingest, transform, datafusion, risingwave" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/repo-tests/Cargo.toml b/src/e2e/app/cli/repo-tests/Cargo.toml index 7b44ffd70..8ba810ed7 100644 --- a/src/e2e/app/cli/repo-tests/Cargo.toml +++ b/src/e2e/app/cli/repo-tests/Cargo.toml @@ -36,6 +36,7 @@ opendatafabric = { workspace = true } chrono = { version = "0.4", default-features = false } indoc = "2" +pretty_assertions = { version = "1" } reqwest = { version = "0.12", default-features = false, features = [] } tokio = { version = "1", default-features = false, features = [] } tokio-retry = "0.3" diff --git a/src/e2e/app/cli/repo-tests/src/commands/mod.rs b/src/e2e/app/cli/repo-tests/src/commands/mod.rs index 19a8906ae..ef14ffc19 100644 --- a/src/e2e/app/cli/repo-tests/src/commands/mod.rs +++ b/src/e2e/app/cli/repo-tests/src/commands/mod.rs @@ -8,19 +8,49 @@ // by the Apache License, Version 2.0. mod test_add_command; +mod test_compact_command; mod test_complete_command; +mod test_config_command; +mod test_delete_command; mod test_ingest_command; mod test_init_command; -mod test_repo_alias_command; +mod test_inspect_command; +mod test_log_command; +mod test_login_command; +mod test_new_command; +mod test_rename_command; +mod test_repo_command; +mod test_reset_command; +mod test_search_command; mod test_sql_command; mod test_system_api_server_gql_query; +mod test_system_gc_command; mod test_system_generate_token_command; +mod test_system_info_command; +mod test_system_info_diagnose; +mod test_tail_command; +mod test_verify_command; pub use test_add_command::*; +pub use test_compact_command::*; pub use test_complete_command::*; +pub use test_config_command::*; +pub use test_delete_command::*; pub use test_ingest_command::*; pub use test_init_command::*; -pub use test_repo_alias_command::*; +pub use test_inspect_command::*; +pub use test_log_command::*; +pub use test_login_command::*; +pub use test_new_command::*; +pub use test_rename_command::*; +pub use test_repo_command::*; +pub use test_reset_command::*; +pub use test_search_command::*; pub use test_sql_command::*; pub use test_system_api_server_gql_query::*; +pub use test_system_gc_command::*; pub use test_system_generate_token_command::*; +pub use test_system_info_command::*; +pub use test_system_info_diagnose::*; +pub use test_tail_command::*; +pub use test_verify_command::*; diff --git a/src/e2e/app/cli/repo-tests/src/commands/test_add_command.rs b/src/e2e/app/cli/repo-tests/src/commands/test_add_command.rs index 4a10c33ad..022fd1ba5 100644 --- a/src/e2e/app/cli/repo-tests/src/commands/test_add_command.rs +++ b/src/e2e/app/cli/repo-tests/src/commands/test_add_command.rs @@ -8,12 +8,143 @@ // by the Apache License, Version 2.0. use kamu::testing::MetadataFactory; +use kamu_cli_e2e_common::DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR; use kamu_cli_puppet::extensions::KamuCliPuppetExt; use kamu_cli_puppet::KamuCliPuppet; use opendatafabric as odf; //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// +pub async fn test_add_dataset_from_stdin(kamu: KamuCliPuppet) { + kamu.assert_success_command_execution_with_input( + ["add", "--stdin"], + DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR, + None, + Some([indoc::indoc!( + r#" + Added: player-scores + Added 1 dataset(s) + "# + )]), + ) + .await; + + let dataset_names = kamu + .list_datasets() + .await + .into_iter() + .map(|dataset| dataset.name) + .collect::>(); + + assert_eq!(dataset_names, ["player-scores"]); +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_add_dataset_with_name(kamu: KamuCliPuppet) { + // Add from stdio + kamu.assert_success_command_execution_with_input( + ["add", "--stdin", "--name", "player-scores-1"], + DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR, + None, + Some([indoc::indoc!( + r#" + Added: player-scores-1 + Added 1 dataset(s) + "# + )]), + ) + .await; + + // Add from a file + let snapshot_path = kamu.workspace_path().join("player-scores.yaml"); + + std::fs::write( + snapshot_path.clone(), + DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR, + ) + .unwrap(); + + kamu.assert_success_command_execution( + [ + "add", + "--name", + "player-scores-2", + snapshot_path.to_str().unwrap(), + ], + None, + Some([indoc::indoc!( + r#" + Added: player-scores-2 + Added 1 dataset(s) + "# + )]), + ) + .await; + + let dataset_names = kamu + .list_datasets() + .await + .into_iter() + .map(|dataset| dataset.name) + .collect::>(); + + assert_eq!(dataset_names, ["player-scores-1", "player-scores-2"]); +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_add_dataset_with_replace(kamu: KamuCliPuppet) { + kamu.assert_success_command_execution_with_input( + ["add", "--stdin"], + DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR, + None, + Some([indoc::indoc!( + r#" + Added: player-scores + "# + )]), + ) + .await; + + kamu.assert_success_command_execution_with_input( + ["add", "--stdin"], + DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR, + None, + Some([indoc::indoc!( + r#" + Skipped: player-scores: Already exists + Added 0 dataset(s) + "# + )]), + ) + .await; + + kamu.assert_success_command_execution_with_input( + ["--yes", "add", "--stdin", "--replace"], + DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR, + None, + Some([indoc::indoc!( + r#" + Added: player-scores + Added 1 dataset(s) + "# + )]), + ) + .await; + + let dataset_names = kamu + .list_datasets() + .await + .into_iter() + .map(|dataset| dataset.name) + .collect::>(); + + assert_eq!(dataset_names, ["player-scores"]); +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + pub async fn test_add_recursive(kamu: KamuCliPuppet) { // Plain manifest let snapshot = MetadataFactory::dataset_snapshot().name("plain").build(); @@ -60,14 +191,23 @@ pub async fn test_add_recursive(kamu: KamuCliPuppet) { ) .unwrap(); - kamu.execute([ - "-v", - "add", - "--recursive", - kamu.workspace_path().as_os_str().to_str().unwrap(), - ]) - .await - .success(); + kamu.assert_success_command_execution( + [ + "-v", + "add", + "--recursive", + kamu.workspace_path().as_os_str().to_str().unwrap(), + ], + None, + Some([indoc::indoc!( + r#" + Added: commented + Added: plain + Added 2 dataset(s) + "# + )]), + ) + .await; let dataset_names = kamu .list_datasets() diff --git a/src/e2e/app/cli/repo-tests/src/commands/test_compact_command.rs b/src/e2e/app/cli/repo-tests/src/commands/test_compact_command.rs new file mode 100644 index 000000000..3747b77b9 --- /dev/null +++ b/src/e2e/app/cli/repo-tests/src/commands/test_compact_command.rs @@ -0,0 +1,164 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use std::assert_matches::assert_matches; + +use kamu_cli_e2e_common::{ + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_1, + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_2, + DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR, +}; +use kamu_cli_puppet::extensions::KamuCliPuppetExt; +use kamu_cli_puppet::KamuCliPuppet; +use opendatafabric::{DatasetName, MetadataEvent}; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_compact_hard(kamu: KamuCliPuppet) { + let dataset_name = DatasetName::new_unchecked("player-scores"); + + kamu.execute_with_input(["add", "--stdin"], DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR) + .await + .success(); + + kamu.ingest_data( + &dataset_name, + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_1, + ) + .await; + kamu.ingest_data( + &dataset_name, + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_2, + ) + .await; + + let blocks_before_compacting = kamu.list_blocks(&dataset_name).await; + + kamu.assert_success_command_execution( + [ + "--yes", + "system", + "compact", + dataset_name.as_str(), + "--hard", + ], + None, + Some(["1 dataset(s) were compacted"]), + ) + .await; + + let blocks_after_compacting = kamu.list_blocks(&dataset_name).await; + assert_eq!( + blocks_before_compacting.len() - 1, + blocks_after_compacting.len() + ); + assert_matches!( + blocks_after_compacting.first().unwrap().block.event, + MetadataEvent::AddData(_) + ); +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_compact_keep_metadata_only(kamu: KamuCliPuppet) { + let dataset_name = DatasetName::new_unchecked("player-scores"); + + kamu.execute_with_input(["add", "--stdin"], DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR) + .await + .success(); + + kamu.ingest_data( + &dataset_name, + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_1, + ) + .await; + kamu.ingest_data( + &dataset_name, + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_2, + ) + .await; + + let blocks_before_compacting = kamu.list_blocks(&dataset_name).await; + + kamu.assert_success_command_execution( + [ + "--yes", + "system", + "compact", + dataset_name.as_str(), + "--hard", + "--keep-metadata-only", + ], + None, + Some(["1 dataset(s) were compacted"]), + ) + .await; + + let blocks_after_compacting = kamu.list_blocks(&dataset_name).await; + assert_eq!( + blocks_before_compacting.len() - 2, + blocks_after_compacting.len() + ); + assert_matches!( + blocks_after_compacting.first().unwrap().block.event, + MetadataEvent::SetDataSchema(_) + ); +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_compact_verify(kamu: KamuCliPuppet) { + let dataset_name = DatasetName::new_unchecked("player-scores"); + + kamu.execute_with_input(["add", "--stdin"], DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR) + .await + .success(); + + kamu.ingest_data( + &dataset_name, + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_1, + ) + .await; + kamu.ingest_data( + &dataset_name, + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_2, + ) + .await; + + let blocks_before_compacting = kamu.list_blocks(&dataset_name).await; + + kamu.assert_success_command_execution( + [ + "--yes", + "system", + "compact", + dataset_name.as_str(), + "--hard", + "--verify", + ], + None, + Some([ + "verify with dataset_ref: player-scores", + "1 dataset(s) were compacted", + ]), + ) + .await; + + let blocks_after_compacting = kamu.list_blocks(&dataset_name).await; + assert_eq!( + blocks_before_compacting.len() - 1, + blocks_after_compacting.len() + ); + assert_matches!( + blocks_after_compacting.first().unwrap().block.event, + MetadataEvent::AddData(_) + ); +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/repo-tests/src/commands/test_config_command.rs b/src/e2e/app/cli/repo-tests/src/commands/test_config_command.rs new file mode 100644 index 000000000..8f5df7ddb --- /dev/null +++ b/src/e2e/app/cli/repo-tests/src/commands/test_config_command.rs @@ -0,0 +1,176 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_puppet::extensions::KamuCliPuppetExt; +use kamu_cli_puppet::KamuCliPuppet; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_config_set_value(kamu: KamuCliPuppet) { + // 0. CI sets container runtime to podman for some targets, so we simulate this + // behavior for all others. + kamu.assert_success_command_execution( + ["config", "set", "engine.runtime", "podman"], + None, + Some(["Set engine.runtime to podman in workspace scope"]), + ) + .await; + + // 1. Set flow for the "engine.networkNs" key + kamu.assert_success_command_execution( + ["config", "list"], + Some(indoc::indoc!( + r#" + engine: + runtime: podman + + "# + )), + None::>, + ) + .await; + + kamu.assert_success_command_execution( + ["config", "set", "engine.networkNs", "host"], + None, + Some(["Set engine.networkNs to host in workspace scope"]), + ) + .await; + + kamu.assert_success_command_execution( + ["config", "get", "engine.networkNs"], + Some(indoc::indoc!( + r#" + host + + "# + )), + None::>, + ) + .await; + + kamu.assert_success_command_execution( + ["config", "list"], + Some(indoc::indoc!( + r#" + engine: + runtime: podman + networkNs: host + + "# + )), + None::>, + ) + .await; + + // 2. Set flow for the "uploads.maxFileSizeInMb" key + kamu.assert_success_command_execution( + ["config", "set", "uploads.maxFileSizeInMb", "42"], + None, + Some(["Set uploads.maxFileSizeInMb to 42 in workspace scope"]), + ) + .await; + + kamu.assert_success_command_execution( + ["config", "get", "uploads.maxFileSizeInMb"], + Some(indoc::indoc!( + r#" + 42 + + "# + )), + None::>, + ) + .await; + + kamu.assert_success_command_execution( + ["config", "list"], + Some(indoc::indoc!( + r#" + engine: + runtime: podman + networkNs: host + uploads: + maxFileSizeInMb: 42 + + "# + )), + None::>, + ) + .await; +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_config_reset_key(kamu: KamuCliPuppet) { + kamu.assert_success_command_execution( + ["config", "set", "engine.networkNs", "host"], + None, + Some(["Set engine.networkNs to host in workspace scope"]), + ) + .await; + + kamu.assert_success_command_execution( + ["config", "set", "engine.networkNs"], + None, + Some(["Removed engine.networkNs from workspace scope"]), + ) + .await; + + kamu.assert_failure_command_execution( + ["config", "get", "engine.networkNs"], + None, + Some(["Error: Key engine.networkNs not found"]), + ) + .await; +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_config_get_with_default(kamu: KamuCliPuppet) { + kamu.assert_failure_command_execution( + ["config", "get", "engine.networkNs"], + None, + Some(["Error: Key engine.networkNs not found"]), + ) + .await; + + kamu.assert_success_command_execution( + ["config", "get", "engine.networkNs", "--with-defaults"], + Some(indoc::indoc!( + r#" + private + + "# + )), + None::>, + ) + .await; +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_config_get_from_config(kamu: KamuCliPuppet) { + kamu.assert_success_command_execution( + ["config", "list"], + Some(indoc::indoc!( + r#" + engine: + runtime: podman + uploads: + maxFileSizeInMb: 42 + + "# + )), + None::>, + ) + .await; +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/repo-tests/src/commands/test_delete_command.rs b/src/e2e/app/cli/repo-tests/src/commands/test_delete_command.rs new file mode 100644 index 000000000..0d6cae2e0 --- /dev/null +++ b/src/e2e/app/cli/repo-tests/src/commands/test_delete_command.rs @@ -0,0 +1,168 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::{ + DATASET_DERIVATIVE_LEADERBOARD_SNAPSHOT_STR, + DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR, +}; +use kamu_cli_puppet::extensions::KamuCliPuppetExt; +use kamu_cli_puppet::KamuCliPuppet; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_delete_dataset(kamu: KamuCliPuppet) { + kamu.assert_failure_command_execution( + ["delete", "player-scores"], + None, + Some(["Error: Dataset not found: player-scores"]), + ) + .await; + + kamu.execute_with_input(["add", "--stdin"], DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR) + .await + .success(); + + kamu.assert_success_command_execution( + ["--yes", "delete", "player-scores"], + None, + Some(["Deleted 1 dataset(s)"]), + ) + .await; + + let dataset_names = kamu + .list_datasets() + .await + .into_iter() + .map(|dataset| dataset.name) + .collect::>(); + + assert!( + dataset_names.is_empty(), + "Unexpected dataset names: {dataset_names:?}" + ); +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_delete_dataset_recursive(kamu: KamuCliPuppet) { + // 1. Root + kamu.execute_with_input(["add", "--stdin"], DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR) + .await + .success(); + + // 2. Derivative (from 1.) + kamu.execute_with_input( + ["add", "--stdin"], + DATASET_DERIVATIVE_LEADERBOARD_SNAPSHOT_STR, + ) + .await + .success(); + + // 3. One more root + kamu.execute_with_input( + ["add", "--stdin", "--name", "another-root"], + DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR, + ) + .await + .success(); + + { + let dataset_names = kamu + .list_datasets() + .await + .into_iter() + .map(|dataset| dataset.name) + .collect::>(); + + assert_eq!( + dataset_names, + ["another-root", "leaderboard", "player-scores"] + ); + } + + kamu.assert_success_command_execution( + ["--yes", "delete", "player-scores", "--recursive"], + None, + Some(["Deleted 2 dataset(s)"]), + ) + .await; + + { + let dataset_names = kamu + .list_datasets() + .await + .into_iter() + .map(|dataset| dataset.name) + .collect::>(); + + assert_eq!(dataset_names, ["another-root"]); + } +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_delete_dataset_all(kamu: KamuCliPuppet) { + // 1. Root + kamu.execute_with_input(["add", "--stdin"], DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR) + .await + .success(); + + // 2. Derivative (from 1.) + kamu.execute_with_input( + ["add", "--stdin"], + DATASET_DERIVATIVE_LEADERBOARD_SNAPSHOT_STR, + ) + .await + .success(); + + // 3. One more root + kamu.execute_with_input( + ["add", "--stdin", "--name", "another-root"], + DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR, + ) + .await + .success(); + + { + let dataset_names = kamu + .list_datasets() + .await + .into_iter() + .map(|dataset| dataset.name) + .collect::>(); + + assert_eq!( + dataset_names, + ["another-root", "leaderboard", "player-scores"] + ); + } + + kamu.assert_success_command_execution( + ["--yes", "delete", "--all"], + None, + Some(["Deleted 3 dataset(s)"]), + ) + .await; + + { + let dataset_names = kamu + .list_datasets() + .await + .into_iter() + .map(|dataset| dataset.name) + .collect::>(); + + assert!( + dataset_names.is_empty(), + "Unexpected dataset names: {dataset_names:?}" + ); + } +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/repo-tests/src/commands/test_ingest_command.rs b/src/e2e/app/cli/repo-tests/src/commands/test_ingest_command.rs index 1181b36de..7f20b585f 100644 --- a/src/e2e/app/cli/repo-tests/src/commands/test_ingest_command.rs +++ b/src/e2e/app/cli/repo-tests/src/commands/test_ingest_command.rs @@ -9,17 +9,21 @@ use std::path::Path; -use chrono::{TimeZone, Utc}; use indoc::indoc; +use kamu_cli_e2e_common::{ + DATASET_DERIVATIVE_LEADERBOARD_SNAPSHOT_STR, + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_1, + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_2, + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_3, + DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR, +}; use kamu_cli_puppet::extensions::KamuCliPuppetExt; use kamu_cli_puppet::KamuCliPuppet; use opendatafabric::*; //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// -pub async fn test_push_ingest_from_file_ledger(mut kamu: KamuCliPuppet) { - kamu.set_system_time(Some(Utc.with_ymd_and_hms(2000, 1, 1, 0, 0, 0).unwrap())); - +pub async fn test_push_ingest_from_file_ledger(kamu: KamuCliPuppet) { kamu.add_dataset(DatasetSnapshot { name: "population".try_into().unwrap(), kind: DatasetKind::Root, @@ -86,9 +90,9 @@ pub async fn test_push_ingest_from_file_ledger(mut kamu: KamuCliPuppet) { +--------+----+----------------------+----------------------+------+------------+ | offset | op | system_time | event_time | city | population | +--------+----+----------------------+----------------------+------+------------+ - | 0 | 0 | 2000-01-01T00:00:00Z | 2020-01-01T00:00:00Z | A | 1000 | - | 1 | 0 | 2000-01-01T00:00:00Z | 2020-01-01T00:00:00Z | B | 2000 | - | 2 | 0 | 2000-01-01T00:00:00Z | 2020-01-01T00:00:00Z | C | 3000 | + | 0 | 0 | 2050-01-02T03:04:05Z | 2020-01-01T00:00:00Z | A | 1000 | + | 1 | 0 | 2050-01-02T03:04:05Z | 2020-01-01T00:00:00Z | B | 2000 | + | 2 | 0 | 2050-01-02T03:04:05Z | 2020-01-01T00:00:00Z | C | 3000 | +--------+----+----------------------+----------------------+------+------------+ "# ), @@ -96,9 +100,9 @@ pub async fn test_push_ingest_from_file_ledger(mut kamu: KamuCliPuppet) { .await; } -pub async fn test_push_ingest_from_file_snapshot_with_event_time(mut kamu: KamuCliPuppet) { - kamu.set_system_time(Some(Utc.with_ymd_and_hms(2000, 1, 1, 0, 0, 0).unwrap())); +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// +pub async fn test_push_ingest_from_file_snapshot_with_event_time(kamu: KamuCliPuppet) { kamu.add_dataset(DatasetSnapshot { name: "population".try_into().unwrap(), kind: DatasetKind::Root, @@ -168,9 +172,9 @@ pub async fn test_push_ingest_from_file_snapshot_with_event_time(mut kamu: KamuC +--------+----+----------------------+----------------------+------+------------+ | offset | op | system_time | event_time | city | population | +--------+----+----------------------+----------------------+------+------------+ - | 0 | 0 | 2000-01-01T00:00:00Z | 2050-01-01T00:00:00Z | A | 1000 | - | 1 | 0 | 2000-01-01T00:00:00Z | 2050-01-01T00:00:00Z | B | 2000 | - | 2 | 0 | 2000-01-01T00:00:00Z | 2050-01-01T00:00:00Z | C | 3000 | + | 0 | 0 | 2050-01-02T03:04:05Z | 2050-01-01T00:00:00Z | A | 1000 | + | 1 | 0 | 2050-01-02T03:04:05Z | 2050-01-01T00:00:00Z | B | 2000 | + | 2 | 0 | 2050-01-02T03:04:05Z | 2050-01-01T00:00:00Z | C | 3000 | +--------+----+----------------------+----------------------+------+------------+ "# ), @@ -180,8 +184,243 @@ pub async fn test_push_ingest_from_file_snapshot_with_event_time(mut kamu: KamuC //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// +pub async fn test_ingest_from_stdin(kamu: KamuCliPuppet) { + kamu.execute_with_input(["add", "--stdin"], DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR) + .await + .success(); + + assert_ingest_data_to_player_scores_from_stdio( + &kamu, + ["ingest", "player-scores", "--stdin"], + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_1, + indoc::indoc!( + r#" + ┌────┬──────────────────────┬──────────────────────┬──────────┬───────────┬───────┐ + │ op │ system_time │ match_time │ match_id │ player_id │ score │ + ├────┼──────────────────────┼──────────────────────┼──────────┼───────────┼───────┤ + │ 0 │ 2050-01-02T03:04:05Z │ 2000-01-01T00:00:00Z │ 1 │ Bob │ 80 │ + │ 0 │ 2050-01-02T03:04:05Z │ 2000-01-01T00:00:00Z │ 1 │ Alice │ 100 │ + └────┴──────────────────────┴──────────────────────┴──────────┴───────────┴───────┘ + "# + ), + ) + .await; + + assert_ingest_data_to_player_scores_from_stdio( + &kamu, + ["ingest", "player-scores", "--stdin"], + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_2, + indoc::indoc!( + r#" + ┌────┬──────────────────────┬──────────────────────┬──────────┬───────────┬───────┐ + │ op │ system_time │ match_time │ match_id │ player_id │ score │ + ├────┼──────────────────────┼──────────────────────┼──────────┼───────────┼───────┤ + │ 0 │ 2050-01-02T03:04:05Z │ 2000-01-01T00:00:00Z │ 1 │ Bob │ 80 │ + │ 0 │ 2050-01-02T03:04:05Z │ 2000-01-01T00:00:00Z │ 1 │ Alice │ 100 │ + │ 0 │ 2050-01-02T03:04:05Z │ 2000-01-02T00:00:00Z │ 2 │ Alice │ 70 │ + │ 0 │ 2050-01-02T03:04:05Z │ 2000-01-02T00:00:00Z │ 2 │ Charlie │ 90 │ + └────┴──────────────────────┴──────────────────────┴──────────┴───────────┴───────┘ + "# + ), + ) + .await; + + assert_ingest_data_to_player_scores_from_stdio( + &kamu, + ["ingest", "player-scores", "--stdin"], + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_3, + indoc::indoc!( + r#" + ┌────┬──────────────────────┬──────────────────────┬──────────┬───────────┬───────┐ + │ op │ system_time │ match_time │ match_id │ player_id │ score │ + ├────┼──────────────────────┼──────────────────────┼──────────┼───────────┼───────┤ + │ 0 │ 2050-01-02T03:04:05Z │ 2000-01-01T00:00:00Z │ 1 │ Bob │ 80 │ + │ 0 │ 2050-01-02T03:04:05Z │ 2000-01-01T00:00:00Z │ 1 │ Alice │ 100 │ + │ 0 │ 2050-01-02T03:04:05Z │ 2000-01-02T00:00:00Z │ 2 │ Alice │ 70 │ + │ 0 │ 2050-01-02T03:04:05Z │ 2000-01-02T00:00:00Z │ 2 │ Charlie │ 90 │ + │ 0 │ 2050-01-02T03:04:05Z │ 2000-01-03T00:00:00Z │ 3 │ Bob │ 60 │ + │ 0 │ 2050-01-02T03:04:05Z │ 2000-01-03T00:00:00Z │ 3 │ Charlie │ 110 │ + └────┴──────────────────────┴──────────────────────┴──────────┴───────────┴───────┘ + "# + ), + ) + .await; +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_ingest_recursive(kamu: KamuCliPuppet) { + // 0. Add datasets: the root dataset and its derived dataset + kamu.execute_with_input(["add", "--stdin"], DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR) + .await + .success(); + + kamu.execute_with_input( + ["add", "--stdin"], + DATASET_DERIVATIVE_LEADERBOARD_SNAPSHOT_STR, + ) + .await + .success(); + + kamu.assert_failure_command_execution( + ["tail", "leaderboard", "--output-format", "table"], + None, + Some(["Error: Dataset schema is not yet available: leaderboard"]), + ) + .await; + + // TODO: `kamu ingest`: implement `--recursive` mode + // https://github.com/kamu-data/kamu-cli/issues/886 + + // 1. Ingest data: the first chunk + // { + // let assert = kamu + // .execute_with_input( + // ["ingest", "player-scores", "--stdin", "--recursive"], + // DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_1, + // ) + // .await + // .success(); + // + // let stderr = + // std::str::from_utf8(&assert.get_output().stderr).unwrap(); + // + // assert!( + // stderr.contains("Dataset updated"), + // "Unexpected output:\n{stderr}", + // ); + // } + + // TODO: check via the tail command added data in the derived dataset + // (leaderboard) + + // TODO: do the same for 2nd & 3rd chunks +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_ingest_with_source_name(kamu: KamuCliPuppet) { + kamu.execute_with_input(["add", "--stdin"], DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR) + .await + .success(); + + assert_ingest_data_to_player_scores_from_stdio( + &kamu, + [ + "ingest", + "player-scores", + "--stdin", + "--source-name", + "default", + ], + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_1, + indoc::indoc!( + r#" + ┌────┬──────────────────────┬──────────────────────┬──────────┬───────────┬───────┐ + │ op │ system_time │ match_time │ match_id │ player_id │ score │ + ├────┼──────────────────────┼──────────────────────┼──────────┼───────────┼───────┤ + │ 0 │ 2050-01-02T03:04:05Z │ 2000-01-01T00:00:00Z │ 1 │ Bob │ 80 │ + │ 0 │ 2050-01-02T03:04:05Z │ 2000-01-01T00:00:00Z │ 1 │ Alice │ 100 │ + └────┴──────────────────────┴──────────────────────┴──────────┴───────────┴───────┘ + "# + ), + ) + .await; + + assert_ingest_data_to_player_scores_from_stdio( + &kamu, + [ + "ingest", + "player-scores", + "--stdin", + "--source-name", + "default", + ], + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_2, + indoc::indoc!( + r#" + ┌────┬──────────────────────┬──────────────────────┬──────────┬───────────┬───────┐ + │ op │ system_time │ match_time │ match_id │ player_id │ score │ + ├────┼──────────────────────┼──────────────────────┼──────────┼───────────┼───────┤ + │ 0 │ 2050-01-02T03:04:05Z │ 2000-01-01T00:00:00Z │ 1 │ Bob │ 80 │ + │ 0 │ 2050-01-02T03:04:05Z │ 2000-01-01T00:00:00Z │ 1 │ Alice │ 100 │ + │ 0 │ 2050-01-02T03:04:05Z │ 2000-01-02T00:00:00Z │ 2 │ Alice │ 70 │ + │ 0 │ 2050-01-02T03:04:05Z │ 2000-01-02T00:00:00Z │ 2 │ Charlie │ 90 │ + └────┴──────────────────────┴──────────────────────┴──────────┴───────────┴───────┘ + "# + ), + ) + .await; + + assert_ingest_data_to_player_scores_from_stdio( + &kamu, + [ + "ingest", + "player-scores", + "--stdin", + "--source-name", + "default", + ], + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_3, + indoc::indoc!( + r#" + ┌────┬──────────────────────┬──────────────────────┬──────────┬───────────┬───────┐ + │ op │ system_time │ match_time │ match_id │ player_id │ score │ + ├────┼──────────────────────┼──────────────────────┼──────────┼───────────┼───────┤ + │ 0 │ 2050-01-02T03:04:05Z │ 2000-01-01T00:00:00Z │ 1 │ Bob │ 80 │ + │ 0 │ 2050-01-02T03:04:05Z │ 2000-01-01T00:00:00Z │ 1 │ Alice │ 100 │ + │ 0 │ 2050-01-02T03:04:05Z │ 2000-01-02T00:00:00Z │ 2 │ Alice │ 70 │ + │ 0 │ 2050-01-02T03:04:05Z │ 2000-01-02T00:00:00Z │ 2 │ Charlie │ 90 │ + │ 0 │ 2050-01-02T03:04:05Z │ 2000-01-03T00:00:00Z │ 3 │ Bob │ 60 │ + │ 0 │ 2050-01-02T03:04:05Z │ 2000-01-03T00:00:00Z │ 3 │ Charlie │ 110 │ + └────┴──────────────────────┴──────────────────────┴──────────┴───────────┴───────┘ + "# + ), + ) + .await; +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// +// Helpers +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + fn path(p: &Path) -> &str { p.as_os_str().to_str().unwrap() } //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +async fn assert_ingest_data_to_player_scores_from_stdio( + kamu: &KamuCliPuppet, + ingest_cmd: I, + ingest_data: T, + expected_tail_table: &str, +) where + I: IntoIterator + Send + Clone, + S: AsRef, + T: Into> + Send + Clone, +{ + // Ingest + kamu.assert_success_command_execution_with_input( + ingest_cmd.clone(), + ingest_data.clone(), + None, + Some(["Dataset updated"]), + ) + .await; + + // Trying to ingest the same data + kamu.assert_success_command_execution_with_input( + ingest_cmd, + ingest_data, + None, + Some(["Dataset up-to-date"]), + ) + .await; + + // Assert ingested data + kamu.assert_player_scores_dataset_data(expected_tail_table) + .await; +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/repo-tests/src/commands/test_init_command.rs b/src/e2e/app/cli/repo-tests/src/commands/test_init_command.rs index 06573431b..8f8c48319 100644 --- a/src/e2e/app/cli/repo-tests/src/commands/test_init_command.rs +++ b/src/e2e/app/cli/repo-tests/src/commands/test_init_command.rs @@ -8,6 +8,7 @@ // by the Apache License, Version 2.0. use kamu_cli::{DEFAULT_MULTI_TENANT_SQLITE_DATABASE_NAME, KAMU_WORKSPACE_DIR_NAME}; +use kamu_cli_puppet::extensions::KamuCliPuppetExt; use kamu_cli_puppet::KamuCliPuppet; //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// @@ -86,3 +87,19 @@ pub async fn test_init_exist_ok_mt(mut kamu: KamuCliPuppet) { } //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_init_in_an_existing_workspace(mut kamu: KamuCliPuppet) { + kamu.set_workspace_path_in_tmp_dir(); + + kamu.assert_success_command_execution(["init"], None, Some(["Initialized an empty workspace"])) + .await; + + kamu.assert_failure_command_execution( + ["init"], + None, + Some(["Error: Directory is already a kamu workspace"]), + ) + .await; +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/repo-tests/src/commands/test_inspect_command.rs b/src/e2e/app/cli/repo-tests/src/commands/test_inspect_command.rs new file mode 100644 index 000000000..d2c12670e --- /dev/null +++ b/src/e2e/app/cli/repo-tests/src/commands/test_inspect_command.rs @@ -0,0 +1,209 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::{ + DATASET_DERIVATIVE_LEADERBOARD_SNAPSHOT_STR, + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_1, + DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR, +}; +use kamu_cli_puppet::extensions::KamuCliPuppetExt; +use kamu_cli_puppet::KamuCliPuppet; +use opendatafabric::{DatasetName, EnumWithVariants, SetTransform}; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_inspect_lineage(kamu: KamuCliPuppet) { + kamu.execute_with_input(["add", "--stdin"], DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR) + .await + .success(); + + kamu.execute_with_input( + ["add", "--stdin"], + DATASET_DERIVATIVE_LEADERBOARD_SNAPSHOT_STR, + ) + .await + .success(); + + kamu.assert_success_command_execution( + ["inspect", "lineage", "--output-format", "shell"], + Some(indoc::indoc!( + r#" + leaderboard: Derivative + └── player-scores: Root + player-scores: Root + "# + )), + None::>, + ) + .await; +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_inspect_query(kamu: KamuCliPuppet) { + kamu.execute_with_input(["add", "--stdin"], DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR) + .await + .success(); + + let player_scores_dataset_id = kamu + .list_datasets() + .await + .into_iter() + .find_map(|dataset| { + if dataset.name == DatasetName::new_unchecked("player-scores") { + Some(dataset.id) + } else { + None + } + }) + .unwrap(); + + kamu.execute_with_input( + ["add", "--stdin"], + DATASET_DERIVATIVE_LEADERBOARD_SNAPSHOT_STR, + ) + .await + .success(); + + let leaderboard_transform_block_hash = kamu + .list_blocks(&DatasetName::new_unchecked("leaderboard")) + .await + .into_iter() + .find_map(|block| { + if block.block.event.as_variant::().is_some() { + Some(block.block_hash) + } else { + None + } + }) + .unwrap(); + + kamu.assert_success_command_execution( + ["inspect", "query", "player-scores"], + Some(""), + None::>, + ) + .await; + + kamu.assert_success_command_execution( + ["inspect", "query", "leaderboard"], + Some( + indoc::formatdoc!( + r#" + Transform: {leaderboard_transform_block_hash} + As Of: 2050-01-02T03:04:05Z + Inputs: + player_scores {player_scores_dataset_id} + Engine: risingwave (None) + Query: leaderboard + create materialized view leaderboard as + select + * + from ( + select + row_number() over (partition by 1 order by score desc) as place, + match_time, + match_id, + player_id, + score + from player_scores + ) + where place <= 2 + Query: leaderboard + select * from leaderboard + "# + ) + .as_str(), + ), + None::>, + ) + .await; +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_inspect_schema(kamu: KamuCliPuppet) { + kamu.execute_with_input(["add", "--stdin"], DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR) + .await + .success(); + + kamu.assert_success_command_execution( + ["inspect", "schema", "player-scores"], + None, + Some(["Warning: Dataset schema is not yet available: player-scores"]), + ) + .await; + + kamu.execute_with_input( + ["add", "--stdin"], + DATASET_DERIVATIVE_LEADERBOARD_SNAPSHOT_STR, + ) + .await + .success(); + + kamu.assert_success_command_execution( + [ + "inspect", + "schema", + "leaderboard", + "--output-format", + "parquet", + ], + None, + Some(["Warning: Dataset schema is not yet available: leaderboard"]), + ) + .await; + + kamu.execute_with_input( + ["ingest", "player-scores", "--stdin"], + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_1, + ) + .await + .success(); + + kamu.assert_success_command_execution( + [ + "inspect", + "schema", + "player-scores", + "--output-format", + "parquet", + ], + Some(indoc::indoc!( + r#" + message arrow_schema { + REQUIRED INT64 offset; + REQUIRED INT32 op; + REQUIRED INT64 system_time (TIMESTAMP(MILLIS,true)); + OPTIONAL INT64 match_time (TIMESTAMP(MILLIS,true)); + OPTIONAL INT64 match_id; + OPTIONAL BYTE_ARRAY player_id (STRING); + OPTIONAL INT64 score; + } + "# + )), + None::>, + ) + .await; + + kamu.assert_success_command_execution( + [ + "inspect", + "schema", + "leaderboard", + "--output-format", + "parquet", + ], + None, + Some(["Warning: Dataset schema is not yet available: leaderboard"]), + ) + .await; +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/repo-tests/src/commands/test_log_command.rs b/src/e2e/app/cli/repo-tests/src/commands/test_log_command.rs new file mode 100644 index 000000000..8ae5550a2 --- /dev/null +++ b/src/e2e/app/cli/repo-tests/src/commands/test_log_command.rs @@ -0,0 +1,306 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use std::assert_matches::assert_matches; + +use chrono::{TimeZone, Utc}; +use kamu_cli_e2e_common::{ + DATASET_DERIVATIVE_LEADERBOARD_SNAPSHOT_STR, + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_1, + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_2, + DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR, +}; +use kamu_cli_puppet::extensions::KamuCliPuppetExt; +use kamu_cli_puppet::KamuCliPuppet; +use opendatafabric::{ + AddData, + AddPushSource, + DatasetKind, + DatasetName, + EnumWithVariants, + MergeStrategy, + MergeStrategyLedger, + MetadataEvent, + OffsetInterval, + ReadStep, + ReadStepNdJson, + SetDataSchema, + SetTransform, + SetVocab, + SqlQueryStep, + Transform, + TransformSql, +}; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_log(kamu: KamuCliPuppet) { + kamu.execute_with_input(["add", "--stdin"], DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR) + .await + .success(); + + kamu.execute_with_input( + ["ingest", "player-scores", "--stdin"], + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_1, + ) + .await + .success(); + + kamu.execute_with_input( + ["ingest", "player-scores", "--stdin"], + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_2, + ) + .await + .success(); + + { + let mut metadata_blocks = kamu + .list_blocks(&DatasetName::new_unchecked("player-scores")) + .await + .into_iter() + .map(|br| br.block) + .collect::>(); + + pretty_assertions::assert_eq!(6, metadata_blocks.len()); + + { + let block = metadata_blocks.pop().unwrap(); + + pretty_assertions::assert_eq!(0, block.sequence_number); + + assert_matches!( + block.event, + MetadataEvent::Seed(event) + if event.dataset_kind == DatasetKind::Root + ); + } + { + let block = metadata_blocks.pop().unwrap(); + + pretty_assertions::assert_eq!(1, block.sequence_number); + + let actual_push_source = block.event.as_variant::().unwrap(); + let expected_push_source = AddPushSource { + source_name: "default".to_string(), + read: ReadStep::NdJson(ReadStepNdJson { + schema: Some(vec![ + "match_time TIMESTAMP".into(), + "match_id BIGINT".into(), + "player_id STRING".into(), + "score BIGINT".into(), + ]), + date_format: None, + encoding: None, + timestamp_format: None, + }), + preprocess: None, + merge: MergeStrategy::Ledger(MergeStrategyLedger { + primary_key: vec!["match_id".into(), "player_id".into()], + }), + }; + + pretty_assertions::assert_eq!(&expected_push_source, actual_push_source); + } + { + let block = metadata_blocks.pop().unwrap(); + + pretty_assertions::assert_eq!(2, block.sequence_number); + + let actual_set_vocab = block.event.as_variant::().unwrap(); + let expected_set_vocab = SetVocab { + offset_column: None, + operation_type_column: None, + system_time_column: None, + event_time_column: Some("match_time".into()), + }; + + pretty_assertions::assert_eq!(&expected_set_vocab, actual_set_vocab); + } + { + let block = metadata_blocks.pop().unwrap(); + + pretty_assertions::assert_eq!(3, block.sequence_number); + + let actual_set_data_schema = block.event.as_variant::().unwrap(); + let expected_set_data_schema = SetDataSchema { + schema: vec![ + 12, 0, 0, 0, 8, 0, 8, 0, 0, 0, 4, 0, 8, 0, 0, 0, 4, 0, 0, 0, 7, 0, 0, 0, 124, + 1, 0, 0, 60, 1, 0, 0, 244, 0, 0, 0, 180, 0, 0, 0, 108, 0, 0, 0, 56, 0, 0, 0, 4, + 0, 0, 0, 108, 255, 255, 255, 16, 0, 0, 0, 24, 0, 0, 0, 0, 0, 1, 2, 20, 0, 0, 0, + 160, 254, 255, 255, 64, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 5, 0, 0, 0, 115, 99, + 111, 114, 101, 0, 0, 0, 156, 255, 255, 255, 24, 0, 0, 0, 12, 0, 0, 0, 0, 0, 1, + 5, 16, 0, 0, 0, 0, 0, 0, 0, 4, 0, 4, 0, 4, 0, 0, 0, 9, 0, 0, 0, 112, 108, 97, + 121, 101, 114, 95, 105, 100, 0, 0, 0, 204, 255, 255, 255, 16, 0, 0, 0, 24, 0, + 0, 0, 0, 0, 1, 2, 20, 0, 0, 0, 0, 255, 255, 255, 64, 0, 0, 0, 0, 0, 0, 1, 0, 0, + 0, 0, 8, 0, 0, 0, 109, 97, 116, 99, 104, 95, 105, 100, 0, 0, 0, 0, 16, 0, 20, + 0, 16, 0, 14, 0, 15, 0, 4, 0, 0, 0, 8, 0, 16, 0, 0, 0, 20, 0, 0, 0, 12, 0, 0, + 0, 0, 0, 1, 10, 28, 0, 0, 0, 0, 0, 0, 0, 196, 255, 255, 255, 8, 0, 0, 0, 0, 0, + 1, 0, 3, 0, 0, 0, 85, 84, 67, 0, 10, 0, 0, 0, 109, 97, 116, 99, 104, 95, 116, + 105, 109, 101, 0, 0, 144, 255, 255, 255, 28, 0, 0, 0, 12, 0, 0, 0, 0, 0, 0, 10, + 36, 0, 0, 0, 0, 0, 0, 0, 8, 0, 12, 0, 10, 0, 4, 0, 8, 0, 0, 0, 8, 0, 0, 0, 0, + 0, 1, 0, 3, 0, 0, 0, 85, 84, 67, 0, 11, 0, 0, 0, 115, 121, 115, 116, 101, 109, + 95, 116, 105, 109, 101, 0, 212, 255, 255, 255, 16, 0, 0, 0, 24, 0, 0, 0, 0, 0, + 0, 2, 20, 0, 0, 0, 196, 255, 255, 255, 32, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 2, + 0, 0, 0, 111, 112, 0, 0, 16, 0, 20, 0, 16, 0, 0, 0, 15, 0, 4, 0, 0, 0, 8, 0, + 16, 0, 0, 0, 24, 0, 0, 0, 32, 0, 0, 0, 0, 0, 0, 2, 28, 0, 0, 0, 8, 0, 12, 0, 4, + 0, 11, 0, 8, 0, 0, 0, 64, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 6, 0, 0, 0, 111, + 102, 102, 115, 101, 116, 0, 0, + ], + }; + + pretty_assertions::assert_eq!(&expected_set_data_schema, actual_set_data_schema); + } + { + let block = metadata_blocks.pop().unwrap(); + + pretty_assertions::assert_eq!(4, block.sequence_number); + + let actual_add_data = block.event.as_variant::().unwrap(); + + pretty_assertions::assert_eq!(None, actual_add_data.prev_checkpoint); + pretty_assertions::assert_eq!(None, actual_add_data.prev_offset); + + let actual_new_data = actual_add_data.new_data.as_ref().unwrap(); + + pretty_assertions::assert_eq!( + OffsetInterval { start: 0, end: 1 }, + actual_new_data.offset_interval + ); + pretty_assertions::assert_eq!(1665, actual_new_data.size); + + pretty_assertions::assert_eq!(None, actual_add_data.new_checkpoint); + pretty_assertions::assert_eq!( + Some(Utc.with_ymd_and_hms(2000, 1, 1, 0, 0, 0).unwrap()), + actual_add_data.new_watermark + ); + pretty_assertions::assert_eq!(None, actual_add_data.new_source_state); + } + { + let block = metadata_blocks.pop().unwrap(); + + pretty_assertions::assert_eq!(5, block.sequence_number); + + let actual_add_data = block.event.as_variant::().unwrap(); + + pretty_assertions::assert_eq!(None, actual_add_data.prev_checkpoint); + pretty_assertions::assert_eq!(Some(1), actual_add_data.prev_offset); + + let actual_new_data = actual_add_data.new_data.as_ref().unwrap(); + + pretty_assertions::assert_eq!( + OffsetInterval { start: 2, end: 3 }, + actual_new_data.offset_interval + ); + pretty_assertions::assert_eq!(1681, actual_new_data.size); + + pretty_assertions::assert_eq!(None, actual_add_data.new_checkpoint); + pretty_assertions::assert_eq!( + Some(Utc.with_ymd_and_hms(2000, 1, 2, 0, 0, 0).unwrap()), + actual_add_data.new_watermark + ); + pretty_assertions::assert_eq!(None, actual_add_data.new_source_state); + } + } + + kamu.execute_with_input( + ["add", "--stdin"], + DATASET_DERIVATIVE_LEADERBOARD_SNAPSHOT_STR, + ) + .await + .success(); + + { + let mut metadata_blocks = kamu + .list_blocks(&DatasetName::new_unchecked("leaderboard")) + .await + .into_iter() + .map(|br| br.block) + .collect::>(); + + pretty_assertions::assert_eq!(3, metadata_blocks.len()); + + { + let block = metadata_blocks.pop().unwrap(); + + pretty_assertions::assert_eq!(0, block.sequence_number); + + assert_matches!( + block.event, + MetadataEvent::Seed(event) + if event.dataset_kind == DatasetKind::Derivative + ); + } + { + let block = metadata_blocks.pop().unwrap(); + + pretty_assertions::assert_eq!(1, block.sequence_number); + + let actual_set_transform = block.event.as_variant::().unwrap(); + + pretty_assertions::assert_eq!(1, actual_set_transform.inputs.len()); + pretty_assertions::assert_eq!( + Some("player_scores".into()), + actual_set_transform.inputs[0].alias + ); + + let expected_transform = Transform::Sql(TransformSql { + engine: "risingwave".into(), + version: None, + query: None, + queries: Some(vec![ + SqlQueryStep { + alias: Some("leaderboard".into()), + query: indoc::indoc!( + r#" + create materialized view leaderboard as + select + * + from ( + select + row_number() over (partition by 1 order by score desc) as place, + match_time, + match_id, + player_id, + score + from player_scores + ) + where place <= 2 + "# + ) + .into(), + }, + SqlQueryStep { + alias: None, + query: "select * from leaderboard".into(), + }, + ]), + temporal_tables: None, + }); + + pretty_assertions::assert_eq!(expected_transform, actual_set_transform.transform); + } + { + let block = metadata_blocks.pop().unwrap(); + + pretty_assertions::assert_eq!(2, block.sequence_number); + + let actual_set_vocab = block.event.as_variant::().unwrap(); + let expected_set_vocab = SetVocab { + offset_column: None, + operation_type_column: None, + system_time_column: None, + event_time_column: Some("match_time".into()), + }; + + pretty_assertions::assert_eq!(&expected_set_vocab, actual_set_vocab); + } + } +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/repo-tests/src/commands/test_login_command.rs b/src/e2e/app/cli/repo-tests/src/commands/test_login_command.rs new file mode 100644 index 000000000..d0f2a80dc --- /dev/null +++ b/src/e2e/app/cli/repo-tests/src/commands/test_login_command.rs @@ -0,0 +1,126 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::{ + KamuApiServerClient, + KamuApiServerClientExt, + DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR, +}; +use kamu_cli_puppet::extensions::KamuCliPuppetExt; +use kamu_cli_puppet::KamuCliPuppet; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_login_logout_password(kamu_node_api_client: KamuApiServerClient) { + let kamu_node_url = kamu_node_api_client.get_base_url().as_str(); + let kamu = KamuCliPuppet::new_workspace_tmp().await; + + kamu.assert_success_command_execution( + ["logout", kamu_node_url], + None, + Some([format!("Not logged in to {kamu_node_url}").as_str()]), + ) + .await; + + kamu.assert_failure_command_execution( + ["login", kamu_node_url, "--check"], + None, + Some([format!("Error: No access token found for: {kamu_node_url}").as_str()]), + ) + .await; + + kamu.assert_success_command_execution( + ["login", "password", "kamu", "kamu", kamu_node_url], + None, + Some([format!("Login successful: {kamu_node_url}").as_str()]), + ) + .await; + + kamu.assert_success_command_execution( + ["login", kamu_node_url, "--check"], + None, + Some([format!("Access token valid: {kamu_node_url}").as_str()]), + ) + .await; + + kamu.execute_with_input(["add", "--stdin"], DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR) + .await + .success(); + + // Token validation, via an API call that requires authorization + kamu.assert_success_command_execution( + [ + "push", + "player-scores", + "--to", + &format!("odf+{kamu_node_url}player-scores"), + ], + None, + Some(["1 dataset(s) pushed"]), + ) + .await; + + kamu.assert_success_command_execution( + ["logout", kamu_node_url], + None, + Some([format!("Logged out of {kamu_node_url}").as_str()]), + ) + .await; +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_login_logout_oauth(kamu_node_api_client: KamuApiServerClient) { + let kamu_node_url = kamu_node_api_client.get_base_url().as_str(); + let kamu = KamuCliPuppet::new_workspace_tmp().await; + + kamu.assert_success_command_execution( + ["logout", kamu_node_url], + None, + Some([format!("Not logged in to {kamu_node_url}").as_str()]), + ) + .await; + + kamu.assert_failure_command_execution( + ["login", kamu_node_url, "--check"], + None, + Some([format!("Error: No access token found for: {kamu_node_url}").as_str()]), + ) + .await; + + let oauth_token = kamu_node_api_client.login_as_e2e_user().await; + + kamu.assert_success_command_execution( + ["login", "oauth", "github", &oauth_token, kamu_node_url], + None, + Some([format!("Login successful: {kamu_node_url}").as_str()]), + ) + .await; + + kamu.assert_success_command_execution( + ["login", kamu_node_url, "--check"], + None, + Some([format!("Access token valid: {kamu_node_url}").as_str()]), + ) + .await; + + // Token validation, via an API call that requires authorization + kamu_node_api_client + .create_player_scores_dataset(&oauth_token) + .await; + + kamu.assert_success_command_execution( + ["logout", kamu_node_url], + None, + Some([format!("Logged out of {kamu_node_url}").as_str()]), + ) + .await; +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/repo-tests/src/commands/test_new_command.rs b/src/e2e/app/cli/repo-tests/src/commands/test_new_command.rs new file mode 100644 index 000000000..82bd99080 --- /dev/null +++ b/src/e2e/app/cli/repo-tests/src/commands/test_new_command.rs @@ -0,0 +1,59 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_puppet::extensions::KamuCliPuppetExt; +use kamu_cli_puppet::KamuCliPuppet; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_new_root(kamu: KamuCliPuppet) { + kamu.assert_success_command_execution( + ["new", "--root", "test-dataset"], + None, + Some([indoc::indoc!( + r#" + Written new manifest template to: test-dataset.yaml + Follow directions in the file's comments and use `kamu add test-dataset.yaml` when ready. + "# + )]), + ) + .await; + + // TODO: After solving this issue, add `kamu add` calls and populate with + // data + // + // `kamu new`: generate snapshots that will be immediately ready to be + // added/worked on + // https://github.com/kamu-data/kamu-cli/issues/888 +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_new_derivative(kamu: KamuCliPuppet) { + kamu.assert_success_command_execution( + ["new", "--derivative", "test-dataset"], + None, + Some([indoc::indoc!( + r#" + Written new manifest template to: test-dataset.yaml + Follow directions in the file's comments and use `kamu add test-dataset.yaml` when ready. + "# + )]), + ) + .await; + + // TODO: After solving this issue, add `kamu add` calls and populate with + // data + // + // `kamu new`: generate snapshots that will be immediately ready to be + // added/worked on + // https://github.com/kamu-data/kamu-cli/issues/888 +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/repo-tests/src/commands/test_rename_command.rs b/src/e2e/app/cli/repo-tests/src/commands/test_rename_command.rs new file mode 100644 index 000000000..e05386b2c --- /dev/null +++ b/src/e2e/app/cli/repo-tests/src/commands/test_rename_command.rs @@ -0,0 +1,45 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR; +use kamu_cli_puppet::extensions::KamuCliPuppetExt; +use kamu_cli_puppet::KamuCliPuppet; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_rename_dataset(kamu: KamuCliPuppet) { + kamu.assert_failure_command_execution( + ["rename", "player-scores", "top-player-scores"], + None, + Some(["Error: Dataset not found: player-scores"]), + ) + .await; + + kamu.execute_with_input(["add", "--stdin"], DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR) + .await + .success(); + + kamu.assert_success_command_execution( + ["rename", "player-scores", "top-player-scores"], + None, + Some(["Dataset renamed"]), + ) + .await; + + let dataset_names = kamu + .list_datasets() + .await + .into_iter() + .map(|dataset| dataset.name) + .collect::>(); + + assert_eq!(dataset_names, ["top-player-scores"]); +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/repo-tests/src/commands/test_repo_alias_command.rs b/src/e2e/app/cli/repo-tests/src/commands/test_repo_command.rs similarity index 97% rename from src/e2e/app/cli/repo-tests/src/commands/test_repo_alias_command.rs rename to src/e2e/app/cli/repo-tests/src/commands/test_repo_command.rs index 154dc993e..bda9155e3 100644 --- a/src/e2e/app/cli/repo-tests/src/commands/test_repo_alias_command.rs +++ b/src/e2e/app/cli/repo-tests/src/commands/test_repo_command.rs @@ -87,6 +87,8 @@ pub async fn test_repository_pull_aliases_commands(kamu: KamuCliPuppet) { assert!(aliases.is_empty()); } +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + pub async fn test_repository_push_aliases_commands(kamu: KamuCliPuppet) { kamu.add_dataset(DatasetSnapshot { name: "foo".try_into().unwrap(), diff --git a/src/e2e/app/cli/repo-tests/src/commands/test_reset_command.rs b/src/e2e/app/cli/repo-tests/src/commands/test_reset_command.rs new file mode 100644 index 000000000..9ae74eeab --- /dev/null +++ b/src/e2e/app/cli/repo-tests/src/commands/test_reset_command.rs @@ -0,0 +1,80 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use std::assert_matches::assert_matches; + +use kamu_cli_e2e_common::{ + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_1, + DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR, +}; +use kamu_cli_puppet::extensions::KamuCliPuppetExt; +use kamu_cli_puppet::KamuCliPuppet; +use opendatafabric::{DatasetName, MetadataEvent}; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_reset(kamu: KamuCliPuppet) { + kamu.execute_with_input(["add", "--stdin"], DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR) + .await + .success(); + + let block_records_after_ingesting = kamu + .list_blocks(&DatasetName::new_unchecked("player-scores")) + .await; + + pretty_assertions::assert_eq!(3, block_records_after_ingesting.len()); + + let set_vocab_block_record = &block_records_after_ingesting[0]; + + assert_matches!( + &set_vocab_block_record.block.event, + MetadataEvent::SetVocab(_), + ); + + pretty_assertions::assert_eq!(3, block_records_after_ingesting.len()); + + kamu.execute_with_input( + ["ingest", "player-scores", "--stdin"], + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_1, + ) + .await + .success(); + + pretty_assertions::assert_eq!( + 5, + kamu.list_blocks(&DatasetName::new_unchecked("player-scores")) + .await + .len() + ); + + let set_vocab_block_hash = set_vocab_block_record + .block_hash + .as_multibase() + .to_stack_string(); + + kamu.assert_success_command_execution( + [ + "--yes", + "reset", + "player-scores", + set_vocab_block_hash.as_str(), + ], + None, + Some(["Dataset was reset"]), + ) + .await; + + let block_records_after_resetting = kamu + .list_blocks(&DatasetName::new_unchecked("player-scores")) + .await; + + pretty_assertions::assert_eq!(block_records_after_ingesting, block_records_after_resetting); +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/repo-tests/src/commands/test_search_command.rs b/src/e2e/app/cli/repo-tests/src/commands/test_search_command.rs new file mode 100644 index 000000000..0f78b7211 --- /dev/null +++ b/src/e2e/app/cli/repo-tests/src/commands/test_search_command.rs @@ -0,0 +1,360 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::{ + KamuApiServerClient, + KamuApiServerClientExt, + RequestBody, + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_1, +}; +use kamu_cli_puppet::extensions::KamuCliPuppetExt; +use kamu_cli_puppet::KamuCliPuppet; +use opendatafabric::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_search_multi_user(kamu_node_api_client: KamuApiServerClient) { + let kamu = KamuCliPuppet::new_workspace_tmp().await; + + add_repo_to_workspace(&kamu_node_api_client, &kamu, "kamu-node").await; + + kamu.assert_success_command_execution( + ["search", "player", "--output-format", "table"], + Some(indoc::indoc!( + r#" + ┌───────┬──────┬─────────────┬────────┬─────────┬──────┐ + │ Alias │ Kind │ Description │ Blocks │ Records │ Size │ + ├───────┼──────┼─────────────┼────────┼─────────┼──────┤ + │ │ │ │ │ │ │ + └───────┴──────┴─────────────┴────────┴─────────┴──────┘ + "# + )), + None::>, + ) + .await; + + let e2e_user_token = kamu_node_api_client.login_as_e2e_user().await; + + kamu_node_api_client + .create_player_scores_dataset(&e2e_user_token) + .await; + + kamu.assert_success_command_execution( + ["search", "player", "--output-format", "table"], + Some(indoc::indoc!( + r#" + ┌──────────────────────────────────┬──────┬─────────────┬────────┬─────────┬──────┐ + │ Alias │ Kind │ Description │ Blocks │ Records │ Size │ + ├──────────────────────────────────┼──────┼─────────────┼────────┼─────────┼──────┤ + │ kamu-node/e2e-user/player-scores │ Root │ - │ 3 │ - │ - │ + └──────────────────────────────────┴──────┴─────────────┴────────┴─────────┴──────┘ + "# + )), + None::>, + ) + .await; + + let player_scores_alias = DatasetAlias::new( + Some(AccountName::new_unchecked("e2e-user")), + DatasetName::new_unchecked("player-scores"), + ); + + kamu_node_api_client + .ingest_data( + &player_scores_alias, + RequestBody::NdJson(DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_1.into()), + &e2e_user_token, + ) + .await; + + kamu.assert_success_command_execution( + ["search", "player", "--output-format", "table"], + Some(indoc::indoc!( + r#" + ┌──────────────────────────────────┬──────┬─────────────┬────────┬─────────┬──────────┐ + │ Alias │ Kind │ Description │ Blocks │ Records │ Size │ + ├──────────────────────────────────┼──────┼─────────────┼────────┼─────────┼──────────┤ + │ kamu-node/e2e-user/player-scores │ Root │ - │ 5 │ 2 │ 1.63 KiB │ + └──────────────────────────────────┴──────┴─────────────┴────────┴─────────┴──────────┘ + "# + )), + None::>, + ) + .await; + + // The same as DATASET_DERIVATIVE_LEADERBOARD_SNAPSHOT, but contains the word + // "player" in the name so that it can be found together with "player-scores" + let dataset_derivative_player_leaderboard_snapshot = indoc::indoc!( + r#" + kind: DatasetSnapshot + version: 1 + content: + name: player-leaderboard + kind: Derivative + metadata: + - kind: SetTransform + inputs: + - datasetRef: player-scores + alias: player_scores + transform: + kind: Sql + engine: risingwave + queries: + - alias: leaderboard + query: | + create materialized view leaderboard as + select + * + from ( + select + row_number() over (partition by 1 order by score desc) as place, + match_time, + match_id, + player_id, + score + from player_scores + ) + where place <= 2 + - query: | + select * from leaderboard + - kind: SetVocab + eventTimeColumn: match_time + "# + ) + .escape_default() + .to_string(); + + kamu_node_api_client + .create_dataset( + &dataset_derivative_player_leaderboard_snapshot, + &e2e_user_token, + ) + .await; + + kamu.assert_success_command_execution( + ["search", "player", "--output-format", "table"], + Some(indoc::indoc!( + r#" + ┌───────────────────────────────────────┬────────────┬─────────────┬────────┬─────────┬──────────┐ + │ Alias │ Kind │ Description │ Blocks │ Records │ Size │ + ├───────────────────────────────────────┼────────────┼─────────────┼────────┼─────────┼──────────┤ + │ kamu-node/e2e-user/player-leaderboard │ Derivative │ - │ 3 │ - │ - │ + │ kamu-node/e2e-user/player-scores │ Root │ - │ 5 │ 2 │ 1.63 KiB │ + └───────────────────────────────────────┴────────────┴─────────────┴────────┴─────────┴──────────┘ + "# + )), + None::>, + ) + .await; + + let kamu_token = kamu_node_api_client.login_as_kamu().await; + + kamu_node_api_client + .create_player_scores_dataset(&kamu_token) + .await; + + kamu.assert_success_command_execution( + ["search", "player", "--output-format", "table"], + Some(indoc::indoc!( + r#" + ┌───────────────────────────────────────┬────────────┬─────────────┬────────┬─────────┬──────────┐ + │ Alias │ Kind │ Description │ Blocks │ Records │ Size │ + ├───────────────────────────────────────┼────────────┼─────────────┼────────┼─────────┼──────────┤ + │ kamu-node/e2e-user/player-leaderboard │ Derivative │ - │ 3 │ - │ - │ + │ kamu-node/e2e-user/player-scores │ Root │ - │ 5 │ 2 │ 1.63 KiB │ + │ kamu-node/kamu/player-scores │ Root │ - │ 3 │ - │ - │ + └───────────────────────────────────────┴────────────┴─────────────┴────────┴─────────┴──────────┘ + "# + )), + None::>, + ) + .await; +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_search_by_name(kamu_node_api_client: KamuApiServerClient) { + let kamu = KamuCliPuppet::new_workspace_tmp().await; + + add_repo_to_workspace(&kamu_node_api_client, &kamu, "kamu-node").await; + + let e2e_user_token = kamu_node_api_client.login_as_e2e_user().await; + + kamu_node_api_client + .create_player_scores_dataset(&e2e_user_token) + .await; + + kamu_node_api_client + .create_leaderboard(&e2e_user_token) + .await; + + kamu.assert_success_command_execution( + ["search", "player", "--output-format", "table"], + Some(indoc::indoc!( + r#" + ┌──────────────────────────────────┬──────┬─────────────┬────────┬─────────┬──────┐ + │ Alias │ Kind │ Description │ Blocks │ Records │ Size │ + ├──────────────────────────────────┼──────┼─────────────┼────────┼─────────┼──────┤ + │ kamu-node/e2e-user/player-scores │ Root │ - │ 3 │ - │ - │ + └──────────────────────────────────┴──────┴─────────────┴────────┴─────────┴──────┘ + "# + )), + None::>, + ) + .await; + + kamu.assert_success_command_execution( + ["search", "scores", "--output-format", "table"], + Some(indoc::indoc!( + r#" + ┌──────────────────────────────────┬──────┬─────────────┬────────┬─────────┬──────┐ + │ Alias │ Kind │ Description │ Blocks │ Records │ Size │ + ├──────────────────────────────────┼──────┼─────────────┼────────┼─────────┼──────┤ + │ kamu-node/e2e-user/player-scores │ Root │ - │ 3 │ - │ - │ + └──────────────────────────────────┴──────┴─────────────┴────────┴─────────┴──────┘ + "# + )), + None::>, + ) + .await; + + kamu.assert_success_command_execution( + ["search", "not-relevant-query", "--output-format", "table"], + Some(indoc::indoc!( + r#" + ┌───────┬──────┬─────────────┬────────┬─────────┬──────┐ + │ Alias │ Kind │ Description │ Blocks │ Records │ Size │ + ├───────┼──────┼─────────────┼────────┼─────────┼──────┤ + │ │ │ │ │ │ │ + └───────┴──────┴─────────────┴────────┴─────────┴──────┘ + "# + )), + None::>, + ) + .await; + + kamu.assert_success_command_execution( + ["search", "lead", "--output-format", "table"], + Some(indoc::indoc!( + r#" + ┌────────────────────────────────┬────────────┬─────────────┬────────┬─────────┬──────┐ + │ Alias │ Kind │ Description │ Blocks │ Records │ Size │ + ├────────────────────────────────┼────────────┼─────────────┼────────┼─────────┼──────┤ + │ kamu-node/e2e-user/leaderboard │ Derivative │ - │ 3 │ - │ - │ + └────────────────────────────────┴────────────┴─────────────┴────────┴─────────┴──────┘ + "# + )), + None::>, + ) + .await; +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_search_by_repo(kamu_node_api_client: KamuApiServerClient) { + let kamu = KamuCliPuppet::new_workspace_tmp().await; + + // As a test, add two repos pointing to the same node + add_repo_to_workspace(&kamu_node_api_client, &kamu, "kamu-node").await; + add_repo_to_workspace(&kamu_node_api_client, &kamu, "acme-org-node").await; + + let e2e_user_token = kamu_node_api_client.login_as_e2e_user().await; + + kamu_node_api_client + .create_player_scores_dataset(&e2e_user_token) + .await; + + kamu_node_api_client + .create_leaderboard(&e2e_user_token) + .await; + + kamu.assert_success_command_execution( + ["search", "player", "--output-format", "table"], + Some(indoc::indoc!( + r#" + ┌──────────────────────────────────────┬──────┬─────────────┬────────┬─────────┬──────┐ + │ Alias │ Kind │ Description │ Blocks │ Records │ Size │ + ├──────────────────────────────────────┼──────┼─────────────┼────────┼─────────┼──────┤ + │ acme-org-node/e2e-user/player-scores │ Root │ - │ 3 │ - │ - │ + │ kamu-node/e2e-user/player-scores │ Root │ - │ 3 │ - │ - │ + └──────────────────────────────────────┴──────┴─────────────┴────────┴─────────┴──────┘ + "# + )), + None::>, + ) + .await; + + kamu.assert_success_command_execution( + [ + "search", + "player", + "--repo", + "acme-org-node", + "--output-format", + "table", + ], + Some(indoc::indoc!( + r#" + ┌──────────────────────────────────────┬──────┬─────────────┬────────┬─────────┬──────┐ + │ Alias │ Kind │ Description │ Blocks │ Records │ Size │ + ├──────────────────────────────────────┼──────┼─────────────┼────────┼─────────┼──────┤ + │ acme-org-node/e2e-user/player-scores │ Root │ - │ 3 │ - │ - │ + └──────────────────────────────────────┴──────┴─────────────┴────────┴─────────┴──────┘ + "# + )), + None::>, + ) + .await; + + kamu.assert_success_command_execution( + [ + "search", + "player", + "--repo", + "kamu-node", + "--output-format", + "table", + ], + Some(indoc::indoc!( + r#" + ┌──────────────────────────────────┬──────┬─────────────┬────────┬─────────┬──────┐ + │ Alias │ Kind │ Description │ Blocks │ Records │ Size │ + ├──────────────────────────────────┼──────┼─────────────┼────────┼─────────┼──────┤ + │ kamu-node/e2e-user/player-scores │ Root │ - │ 3 │ - │ - │ + └──────────────────────────────────┴──────┴─────────────┴────────┴─────────┴──────┘ + "# + )), + None::>, + ) + .await; +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// +// Helpers +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +async fn add_repo_to_workspace( + kamu_node_api_client: &KamuApiServerClient, + kamu: &KamuCliPuppet, + repo_name: &str, +) { + kamu.assert_success_command_execution( + [ + "repo", + "add", + repo_name, + kamu_node_api_client.get_node_url().as_str(), + ], + None, + Some([format!("Added: {repo_name}").as_str()]), + ) + .await; +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/repo-tests/src/commands/test_sql_command.rs b/src/e2e/app/cli/repo-tests/src/commands/test_sql_command.rs index abe16c63d..5718a97f0 100644 --- a/src/e2e/app/cli/repo-tests/src/commands/test_sql_command.rs +++ b/src/e2e/app/cli/repo-tests/src/commands/test_sql_command.rs @@ -7,7 +7,11 @@ // the Business Source License, use of this software will be governed // by the Apache License, Version 2.0. -use indoc::indoc; +use kamu_cli_e2e_common::{ + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_1, + DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR, +}; +use kamu_cli_puppet::extensions::KamuCliPuppetExt; use kamu_cli_puppet::KamuCliPuppet; //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// @@ -22,7 +26,7 @@ pub async fn test_datafusion_cli(kamu: KamuCliPuppet) { assert!( stdout.contains( - indoc!( + indoc::indoc!( r#" +----------+ | Int64(1) | @@ -45,7 +49,92 @@ pub async fn test_datafusion_cli_not_launched_in_root_ws(kamu: KamuCliPuppet) { // The workspace search functionality checks for parent folders, // so there is no problem that the process working directory is one of the // subdirectories (kamu-cli/src/e2e/app/cli/inmem) - kamu.execute(["list"]).await.failure(); + + kamu.assert_failure_command_execution( + ["list"], + None, + Some(["Error: Directory is not a kamu workspace"]), + ) + .await; +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_sql_command(kamu: KamuCliPuppet) { + kamu.assert_success_command_execution( + [ + "sql", + "--command", + "SELECT 42 as answer;", + "--output-format", + "table", + ], + Some(indoc::indoc!( + r#" + ┌────────┐ + │ answer │ + ├────────┤ + │ 42 │ + └────────┘ + "# + )), + None::>, + ) + .await; + + kamu.execute_with_input(["add", "--stdin"], DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR) + .await + .success(); + + kamu.assert_success_command_execution( + [ + "sql", + "--command", + "SELECT * FROM \"player-scores\";", + "--output-format", + "table", + ], + Some(indoc::indoc!( + r#" + ┌┐ + ││ + ├┤ + ││ + └┘ + "# + )), + None::>, + ) + .await; + + kamu.execute_with_input( + ["ingest", "player-scores", "--stdin"], + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_1, + ) + .await + .success(); + + kamu.assert_success_command_execution( + [ + "sql", + "--command", + "SELECT * FROM \"player-scores\" ORDER BY offset;", + "--output-format", + "table", + ], + Some(indoc::indoc!( + r#" + ┌────────┬────┬──────────────────────┬──────────────────────┬──────────┬───────────┬───────┐ + │ offset │ op │ system_time │ match_time │ match_id │ player_id │ score │ + ├────────┼────┼──────────────────────┼──────────────────────┼──────────┼───────────┼───────┤ + │ 0 │ 0 │ 2050-01-02T03:04:05Z │ 2000-01-01T00:00:00Z │ 1 │ Alice │ 100 │ + │ 1 │ 0 │ 2050-01-02T03:04:05Z │ 2000-01-01T00:00:00Z │ 1 │ Bob │ 80 │ + └────────┴────┴──────────────────────┴──────────────────────┴──────────┴───────────┴───────┘ + "# + )), + None::>, + ) + .await; } //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/repo-tests/src/commands/test_system_api_server_gql_query.rs b/src/e2e/app/cli/repo-tests/src/commands/test_system_api_server_gql_query.rs index 7b9a77c28..5556d7f64 100644 --- a/src/e2e/app/cli/repo-tests/src/commands/test_system_api_server_gql_query.rs +++ b/src/e2e/app/cli/repo-tests/src/commands/test_system_api_server_gql_query.rs @@ -7,32 +7,29 @@ // the Business Source License, use of this software will be governed // by the Apache License, Version 2.0. +use kamu_cli_puppet::extensions::KamuCliPuppetExt; use kamu_cli_puppet::KamuCliPuppet; //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// pub async fn test_gql_query_api_version(kamu: KamuCliPuppet) { - let assert = kamu - .execute([ + kamu.assert_success_command_execution( + [ "system", "api-server", "gql-query", "{apiVersion}".escape_default().to_string().as_str(), - ]) - .await - .success(); - let stdout = std::str::from_utf8(&assert.get_output().stdout).unwrap(); - - assert_eq!( - stdout, - indoc::indoc!( + ], + Some(indoc::indoc!( r#" { "apiVersion": "0.1" } "# - ) - ); + )), + None::>, + ) + .await; } //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/repo-tests/src/commands/test_system_gc_command.rs b/src/e2e/app/cli/repo-tests/src/commands/test_system_gc_command.rs new file mode 100644 index 000000000..3c82b5b6a --- /dev/null +++ b/src/e2e/app/cli/repo-tests/src/commands/test_system_gc_command.rs @@ -0,0 +1,24 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_puppet::extensions::KamuCliPuppetExt; +use kamu_cli_puppet::KamuCliPuppet; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_gc(kamu: KamuCliPuppet) { + kamu.assert_success_command_execution( + ["system", "gc"], + None, + Some(["Cleaning cache...", "Workspace is already clean"]), + ) + .await; +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/repo-tests/src/commands/test_system_info_command.rs b/src/e2e/app/cli/repo-tests/src/commands/test_system_info_command.rs new file mode 100644 index 000000000..d0f8d206e --- /dev/null +++ b/src/e2e/app/cli/repo-tests/src/commands/test_system_info_command.rs @@ -0,0 +1,18 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_puppet::KamuCliPuppet; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_system_info(kamu: KamuCliPuppet) { + kamu.execute(["system", "info"]).await.success(); +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/repo-tests/src/commands/test_system_info_diagnose.rs b/src/e2e/app/cli/repo-tests/src/commands/test_system_info_diagnose.rs new file mode 100644 index 000000000..9ba38606d --- /dev/null +++ b/src/e2e/app/cli/repo-tests/src/commands/test_system_info_diagnose.rs @@ -0,0 +1,18 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_puppet::KamuCliPuppet; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_system_diagnose(kamu: KamuCliPuppet) { + kamu.execute(["system", "diagnose"]).await.success(); +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/repo-tests/src/commands/test_tail_command.rs b/src/e2e/app/cli/repo-tests/src/commands/test_tail_command.rs new file mode 100644 index 000000000..1f84696db --- /dev/null +++ b/src/e2e/app/cli/repo-tests/src/commands/test_tail_command.rs @@ -0,0 +1,55 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::{ + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_1, + DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR, +}; +use kamu_cli_puppet::extensions::KamuCliPuppetExt; +use kamu_cli_puppet::KamuCliPuppet; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_tail(kamu: KamuCliPuppet) { + kamu.execute_with_input(["add", "--stdin"], DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR) + .await + .success(); + + kamu.assert_failure_command_execution( + ["tail", "player-scores", "--output-format", "table"], + None, + Some(["Error: Dataset schema is not yet available: player-scores"]), + ) + .await; + + kamu.execute_with_input( + ["ingest", "player-scores", "--stdin"], + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_1, + ) + .await + .success(); + + kamu.assert_success_command_execution( + ["tail", "player-scores", "--output-format", "table"], + Some(indoc::indoc!( + r#" + ┌────────┬────┬──────────────────────┬──────────────────────┬──────────┬───────────┬───────┐ + │ offset │ op │ system_time │ match_time │ match_id │ player_id │ score │ + ├────────┼────┼──────────────────────┼──────────────────────┼──────────┼───────────┼───────┤ + │ 0 │ +A │ 2050-01-02T03:04:05Z │ 2000-01-01T00:00:00Z │ 1 │ Alice │ 100 │ + │ 1 │ +A │ 2050-01-02T03:04:05Z │ 2000-01-01T00:00:00Z │ 1 │ Bob │ 80 │ + └────────┴────┴──────────────────────┴──────────────────────┴──────────┴───────────┴───────┘ + "# + )), + None::>, + ) + .await; +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/repo-tests/src/commands/test_verify_command.rs b/src/e2e/app/cli/repo-tests/src/commands/test_verify_command.rs new file mode 100644 index 000000000..eaac2950e --- /dev/null +++ b/src/e2e/app/cli/repo-tests/src/commands/test_verify_command.rs @@ -0,0 +1,109 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::{ + DATASET_DERIVATIVE_LEADERBOARD_SNAPSHOT_STR, + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_1, + DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR, +}; +use kamu_cli_puppet::extensions::KamuCliPuppetExt; +use kamu_cli_puppet::KamuCliPuppet; +use opendatafabric::DatasetName; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_verify_regular_dataset(kamu: KamuCliPuppet) { + let dataset_name = DatasetName::new_unchecked("player-scores"); + + kamu.execute_with_input(["add", "--stdin"], DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR) + .await + .success(); + + kamu.ingest_data( + &dataset_name, + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_1, + ) + .await; + + kamu.assert_success_command_execution( + ["verify", dataset_name.as_str()], + None, + Some(["1 dataset(s) are valid"]), + ) + .await; +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_verify_recursive(kamu: KamuCliPuppet) { + let dataset_name = DatasetName::new_unchecked("player-scores"); + let dataset_derivative_name = DatasetName::new_unchecked("leaderboard"); + + kamu.execute_with_input(["add", "--stdin"], DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR) + .await + .success(); + + kamu.execute_with_input( + ["add", "--stdin"], + DATASET_DERIVATIVE_LEADERBOARD_SNAPSHOT_STR, + ) + .await + .success(); + + kamu.ingest_data( + &dataset_name, + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_1, + ) + .await; + + kamu.execute(["pull", dataset_derivative_name.as_str()]) + .await + .success(); + + // Call verify without recursive flag + kamu.assert_success_command_execution( + ["verify", dataset_derivative_name.as_str()], + None, + Some(["1 dataset(s) are valid"]), + ) + .await; + + // Call verify wit recursive flag + kamu.assert_success_command_execution( + ["verify", dataset_derivative_name.as_str(), "--recursive"], + None, + Some(["2 dataset(s) are valid"]), + ) + .await; +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_verify_integrity(kamu: KamuCliPuppet) { + let dataset_name = DatasetName::new_unchecked("player-scores"); + + kamu.execute_with_input(["add", "--stdin"], DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR) + .await + .success(); + + kamu.ingest_data( + &dataset_name, + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_1, + ) + .await; + + kamu.assert_success_command_execution( + ["verify", dataset_name.as_str(), "--integrity"], + None, + Some(["1 dataset(s) are valid"]), + ) + .await; +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/repo-tests/src/lib.rs b/src/e2e/app/cli/repo-tests/src/lib.rs index 2bd7d97f9..51d5ab6b2 100644 --- a/src/e2e/app/cli/repo-tests/src/lib.rs +++ b/src/e2e/app/cli/repo-tests/src/lib.rs @@ -7,6 +7,8 @@ // the Business Source License, use of this software will be governed // by the Apache License, Version 2.0. +#![feature(assert_matches)] + mod commands; mod test_auth; mod test_flow; diff --git a/src/e2e/app/cli/repo-tests/src/test_flow.rs b/src/e2e/app/cli/repo-tests/src/test_flow.rs index 855724fc5..d1ff857fb 100644 --- a/src/e2e/app/cli/repo-tests/src/test_flow.rs +++ b/src/e2e/app/cli/repo-tests/src/test_flow.rs @@ -17,7 +17,7 @@ pub async fn test_get_dataset_list_flows(kamu_api_server_client: KamuApiServerCl let token = kamu_api_server_client.login_as_kamu().await; let dataset_id = kamu_api_server_client - .create_player_scores_dataset_with_data(&token) + .create_player_scores_dataset_with_data(&token, None) .await; // The query is almost identical to kamu-web-ui, for ease of later edits. @@ -88,7 +88,7 @@ pub async fn test_dataset_all_flows_paused(kamu_api_server_client: KamuApiServer let token = kamu_api_server_client.login_as_kamu().await; let dataset_id = kamu_api_server_client - .create_player_scores_dataset_with_data(&token) + .create_player_scores_dataset_with_data(&token, None) .await; // The query is almost identical to kamu-web-ui, for ease of later edits. @@ -147,7 +147,7 @@ pub async fn test_dataset_flows_initiators(kamu_api_server_client: KamuApiServer let token = kamu_api_server_client.login_as_kamu().await; let dataset_id = kamu_api_server_client - .create_player_scores_dataset_with_data(&token) + .create_player_scores_dataset_with_data(&token, None) .await; // The query is almost identical to kamu-web-ui, for ease of later edits. @@ -228,7 +228,7 @@ pub async fn test_dataset_trigger_flow(kamu_api_server_client: KamuApiServerClie let token = kamu_api_server_client.login_as_kamu().await; let _root_dataset_id = kamu_api_server_client - .create_player_scores_dataset_with_data(&token) + .create_player_scores_dataset_with_data(&token, None) .await; let derivative_dataset_id = kamu_api_server_client.create_leaderboard(&token).await; @@ -1169,7 +1169,7 @@ async fn wait_for_flows_to_finish( dataset_id: &str, token: AccessToken, ) { - let retry_strategy = FixedInterval::from_millis(5_000).take(10); + let retry_strategy = FixedInterval::from_millis(5_000).take(18); // 1m 30s Retry::spawn(retry_strategy, || async { let response = kamu_api_server_client diff --git a/src/e2e/app/cli/repo-tests/src/test_rest_api.rs b/src/e2e/app/cli/repo-tests/src/test_rest_api.rs index 2cbf04a80..d023596f1 100644 --- a/src/e2e/app/cli/repo-tests/src/test_rest_api.rs +++ b/src/e2e/app/cli/repo-tests/src/test_rest_api.rs @@ -13,6 +13,7 @@ use kamu_cli_e2e_common::{ KamuApiServerClient, KamuApiServerClientExt, RequestBody, + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_1, }; use reqwest::{Method, StatusCode}; @@ -46,13 +47,7 @@ pub async fn test_rest_api_request_dataset_tail(kamu_api_server_client: KamuApiS Method::POST, "player-scores/ingest", Some(RequestBody::NdJson( - indoc::indoc!( - r#" - {"match_time": "2000-01-01", "match_id": 1, "player_id": "Alice", "score": 100} - {"match_time": "2000-01-01", "match_id": 1, "player_id": "Bob", "score": 80} - "#, - ) - .into(), + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_1.into(), )), StatusCode::OK, None, diff --git a/src/e2e/app/cli/repo-tests/src/test_smart_transfer_protocol.rs b/src/e2e/app/cli/repo-tests/src/test_smart_transfer_protocol.rs index 4de9a1663..c8d47cab9 100644 --- a/src/e2e/app/cli/repo-tests/src/test_smart_transfer_protocol.rs +++ b/src/e2e/app/cli/repo-tests/src/test_smart_transfer_protocol.rs @@ -7,104 +7,753 @@ // the Business Source License, use of this software will be governed // by the Apache License, Version 2.0. -use kamu_cli_e2e_common::{KamuApiServerClient, KamuApiServerClientExt}; +use std::str::FromStr; + +use chrono::DateTime; +use kamu::testing::LocalS3Server; +use kamu_cli_e2e_common::{ + KamuApiServerClient, + KamuApiServerClientExt, + DATASET_DERIVATIVE_LEADERBOARD_SNAPSHOT_STR, + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_1, + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_2, + DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR, + E2E_USER_ACCOUNT_NAME_STR, +}; +use kamu_cli_puppet::extensions::{KamuCliPuppetExt, RepoAlias}; use kamu_cli_puppet::KamuCliPuppet; -use reqwest::Url; +use opendatafabric::{AccountName, DatasetAlias, DatasetName}; //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// pub async fn test_smart_push_pull_sequence(kamu_api_server_client: KamuApiServerClient) { + let dataset_alias = DatasetAlias::new( + Some(AccountName::new_unchecked("e2e-user")), + DatasetName::new_unchecked("player-scores"), + ); + let kamu_api_server_dataset_endpoint = + kamu_api_server_client.get_dataset_endpoint(&dataset_alias); + // 1. Grub a token let token = kamu_api_server_client.login_as_e2e_user().await; - let kamu_api_server_dataset_endpoint = { - let base_url = kamu_api_server_client.get_base_url(); + // 2. Pushing the dataset to the API server + { + let kamu_in_push_workspace = KamuCliPuppet::new_workspace_tmp().await; + + // 2.1. Add the dataset + { + kamu_in_push_workspace + .execute_with_input(["add", "--stdin"], DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR) + .await + .success(); + } + + // 2.1. Ingest data to the dataset + { + kamu_in_push_workspace + .ingest_data( + &dataset_alias.dataset_name, + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_1, + ) + .await; + } + + // 2.2. Login to the API server + kamu_in_push_workspace + .execute([ + "login", + kamu_api_server_client.get_base_url().as_str(), + "--access-token", + token.as_str(), + ]) + .await + .success(); + + // 2.3. Push the dataset to the API server + kamu_in_push_workspace + .assert_success_command_execution( + [ + "push", + dataset_alias.dataset_name.as_str(), + "--to", + kamu_api_server_dataset_endpoint.as_str(), + ], + None, + Some(["1 dataset(s) pushed"]), + ) + .await; + } + + // 3. Pulling the dataset from the API server + { + let kamu_in_pull_workspace = KamuCliPuppet::new_workspace_tmp().await; + + kamu_in_pull_workspace + .assert_success_command_execution( + ["pull", kamu_api_server_dataset_endpoint.as_str()], + None, + Some(["1 dataset(s) updated"]), + ) + .await; + } +} - let mut dataset_endpoint = Url::parse("odf+http://host").unwrap(); +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// - dataset_endpoint.set_host(base_url.host_str()).unwrap(); - dataset_endpoint.set_port(base_url.port()).unwrap(); +pub async fn test_smart_force_push_pull(kamu_api_server_client: KamuApiServerClient) { + let dataset_alias = DatasetAlias::new( + Some(AccountName::new_unchecked("e2e-user")), + DatasetName::new_unchecked("player-scores"), + ); + let kamu_api_server_dataset_endpoint = + kamu_api_server_client.get_dataset_endpoint(&dataset_alias); - dataset_endpoint - .join("e2e-user/player-scores") - .unwrap() - .to_string() - }; + // 1. Grub a token + let token = kamu_api_server_client.login_as_e2e_user().await; // 2. Pushing the dataset to the API server { let kamu_in_push_workspace = KamuCliPuppet::new_workspace_tmp().await; // 2.1. Add the dataset + kamu_in_push_workspace + .execute_with_input(["add", "--stdin"], DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR) + .await + .success(); + + // 2.1. Ingest data to the dataset + kamu_in_push_workspace + .ingest_data( + &dataset_alias.dataset_name, + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_1, + ) + .await; + + // 2.2. Login to the API server + kamu_in_push_workspace + .execute([ + "login", + kamu_api_server_client.get_base_url().as_str(), + "--access-token", + token.as_str(), + ]) + .await + .success(); + + // Initial dataset push + kamu_in_push_workspace + .assert_success_command_execution( + [ + "push", + dataset_alias.dataset_name.as_str(), + "--to", + kamu_api_server_dataset_endpoint.as_str(), + ], + None, + Some(["1 dataset(s) pushed"]), + ) + .await; + + // Hard compact dataset + kamu_in_push_workspace + .execute([ + "--yes", + "system", + "compact", + dataset_alias.dataset_name.as_str(), + "--hard", + "--keep-metadata-only", + ]) + .await + .success(); + + // Should fail without force flag + kamu_in_push_workspace + .assert_failure_command_execution( + [ + "push", + dataset_alias.dataset_name.as_str(), + "--to", + kamu_api_server_dataset_endpoint.as_str(), + ], + None, + Some(["Failed to push 1 dataset(s)"]), + ) + .await; + + // Should successfully push with force flag + kamu_in_push_workspace + .assert_success_command_execution( + [ + "push", + dataset_alias.dataset_name.as_str(), + "--to", + kamu_api_server_dataset_endpoint.as_str(), + "--force", + ], + None, + Some(["1 dataset(s) pushed"]), + ) + .await; + } + + // 3. Pulling the dataset from the API server + { + let kamu_in_pull_workspace = KamuCliPuppet::new_workspace_tmp().await; + + // Call with no-alias flag to avoid remote ingest checking in next step + kamu_in_pull_workspace + .assert_success_command_execution( + [ + "pull", + kamu_api_server_dataset_endpoint.as_str(), + "--no-alias", + ], + None, + Some(["1 dataset(s) updated"]), + ) + .await; + + // Ingest data in pulled dataset + + kamu_in_pull_workspace + .ingest_data( + &dataset_alias.dataset_name, + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_2, + ) + .await; + + // Should fail without force flag + kamu_in_pull_workspace + .assert_failure_command_execution( + ["pull", kamu_api_server_dataset_endpoint.as_str()], + None, + Some(["Failed to update 1 dataset(s)"]), + ) + .await; + + // Should successfully pull with force flag + kamu_in_pull_workspace + .assert_success_command_execution( + ["pull", kamu_api_server_dataset_endpoint.as_str(), "--force"], + None, + Some(["1 dataset(s) updated"]), + ) + .await; + } +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_smart_push_pull_add_alias(kamu_api_server_client: KamuApiServerClient) { + let dataset_alias = DatasetAlias::new( + Some(AccountName::new_unchecked("e2e-user")), + DatasetName::new_unchecked("player-scores"), + ); + let kamu_api_server_dataset_endpoint = + kamu_api_server_client.get_dataset_endpoint(&dataset_alias); + + // 1. Grub a token + let token = kamu_api_server_client.login_as_e2e_user().await; + + // 2. Push command + { + let kamu_in_push_workspace = KamuCliPuppet::new_workspace_tmp().await; + + // Add the dataset + kamu_in_push_workspace + .execute_with_input(["add", "--stdin"], DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR) + .await + .success(); + + // Ingest data to the dataset + kamu_in_push_workspace + .ingest_data( + &dataset_alias.dataset_name, + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_1, + ) + .await; + + // Login to the API server + kamu_in_push_workspace + .execute([ + "login", + kamu_api_server_client.get_base_url().as_str(), + "--access-token", + token.as_str(), + ]) + .await + .success(); + + // Dataset push without storing alias + kamu_in_push_workspace + .assert_success_command_execution( + [ + "push", + dataset_alias.dataset_name.as_str(), + "--to", + kamu_api_server_dataset_endpoint.as_str(), + "--no-alias", + ], + None, + Some(["1 dataset(s) pushed"]), + ) + .await; + + // Check alias should be empty + let aliases = kamu_in_push_workspace + .get_list_of_repo_aliases(&dataset_alias.dataset_name.clone().into()) + .await; + assert!(aliases.is_empty()); + + // Dataset push with storing alias + kamu_in_push_workspace + .assert_success_command_execution( + [ + "push", + dataset_alias.dataset_name.as_str(), + "--to", + kamu_api_server_dataset_endpoint.as_str(), + ], + None, + Some(["1 dataset(s) up-to-date"]), + ) + .await; + + let aliases = kamu_in_push_workspace + .get_list_of_repo_aliases(&dataset_alias.dataset_name.clone().into()) + .await; + let expected_aliases = vec![RepoAlias { + dataset: dataset_alias.dataset_name.clone(), + kind: "Push".to_string(), + alias: kamu_api_server_dataset_endpoint.to_string(), + }]; + pretty_assertions::assert_eq!(aliases, expected_aliases); + } + + // 3. Pull command + { + let kamu_in_pull_workspace = KamuCliPuppet::new_workspace_tmp().await; + + // Dataset pull without storing alias + kamu_in_pull_workspace + .assert_success_command_execution( + [ + "pull", + kamu_api_server_dataset_endpoint.as_str(), + "--no-alias", + ], + None, + Some(["1 dataset(s) updated"]), + ) + .await; + + // Check alias should be empty + let aliases = kamu_in_pull_workspace + .get_list_of_repo_aliases(&dataset_alias.dataset_name.clone().into()) + .await; + assert!(aliases.is_empty()); + + // Delete local dataset + kamu_in_pull_workspace + .execute(["--yes", "delete", dataset_alias.dataset_name.as_str()]) + .await + .success(); + + // Dataset pull with storing alias + kamu_in_pull_workspace + .assert_success_command_execution( + ["pull", kamu_api_server_dataset_endpoint.as_str()], + None, + Some(["1 dataset(s) updated"]), + ) + .await; + + let aliases = kamu_in_pull_workspace + .get_list_of_repo_aliases(&dataset_alias.dataset_name.clone().into()) + .await; + let expected_aliases = vec![RepoAlias { + dataset: dataset_alias.dataset_name.clone(), + kind: "Pull".to_string(), + alias: kamu_api_server_dataset_endpoint.to_string(), + }]; + pretty_assertions::assert_eq!(aliases, expected_aliases); + } +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_smart_pull_as(kamu_api_server_client: KamuApiServerClient) { + let dataset_alias = DatasetAlias::new( + Some(AccountName::new_unchecked("e2e-user")), + DatasetName::new_unchecked("player-scores"), + ); + let kamu_api_server_dataset_endpoint = + kamu_api_server_client.get_dataset_endpoint(&dataset_alias); + + // 1. Grub a token + let token = kamu_api_server_client.login_as_e2e_user().await; + + kamu_api_server_client + .create_player_scores_dataset_with_data( + &token, + Some(AccountName::new_unchecked(E2E_USER_ACCOUNT_NAME_STR)), + ) + .await; + + { + let kamu_in_pull_workspace = KamuCliPuppet::new_workspace_tmp().await; + let new_dataset_name = DatasetName::new_unchecked("foo"); + + kamu_in_pull_workspace + .assert_success_command_execution( + [ + "pull", + kamu_api_server_dataset_endpoint.as_str(), + "--as", + new_dataset_name.as_str(), + ], + None, + Some(["1 dataset(s) updated"]), + ) + .await; + + let expected_dataset_list = kamu_in_pull_workspace + .list_datasets() + .await + .into_iter() + .map(|dataset| dataset.name) + .collect::>(); + + pretty_assertions::assert_eq!(vec![new_dataset_name], expected_dataset_list); + } +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_smart_push_pull_all(kamu_api_server_client: KamuApiServerClient) { + let root_dataset_alias = DatasetAlias::new( + Some(AccountName::new_unchecked("e2e-user")), + DatasetName::new_unchecked("player-scores"), + ); + let kamu_api_server_root_dataset_endpoint = + kamu_api_server_client.get_dataset_endpoint(&root_dataset_alias); + + let derivative_dataset_alias = DatasetAlias::new( + Some(AccountName::new_unchecked("e2e-user")), + DatasetName::new_unchecked("leaderboard"), + ); + let kamu_api_server_derivative_dataset_endpoint = + kamu_api_server_client.get_dataset_endpoint(&derivative_dataset_alias); + + // 1. Grub a token + let token = kamu_api_server_client.login_as_e2e_user().await; + + let mut kamu_in_push_workspace = KamuCliPuppet::new_workspace_tmp().await; + + // 2. Pushing datasets to the API server + { + kamu_in_push_workspace + .set_system_time(Some(DateTime::from_str("2050-01-02T03:04:05Z").unwrap())); + + // 2.1. Add datasets { - let dataset_path = kamu_in_push_workspace - .workspace_path() - .join("player-scores.yaml"); - - std::fs::write( - dataset_path.clone(), - indoc::indoc!( - r#" - kind: DatasetSnapshot - version: 1 - content: - name: player-scores - kind: Root - metadata: - - kind: AddPushSource - sourceName: default - read: - kind: NdJson - schema: - - "match_time TIMESTAMP" - - "match_id BIGINT" - - "player_id STRING" - - "score BIGINT" - merge: - kind: Ledger - primaryKey: - - match_id - - player_id - - kind: SetVocab - eventTimeColumn: match_time - "# - ), - ) - .unwrap(); + kamu_in_push_workspace + .execute_with_input(["add", "--stdin"], DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR) + .await + .success(); kamu_in_push_workspace - .execute(["add", dataset_path.to_str().unwrap()]) + .execute_with_input( + ["add", "--stdin"], + DATASET_DERIVATIVE_LEADERBOARD_SNAPSHOT_STR, + ) .await .success(); } // 2.1. Ingest data to the dataset { - let dataset_data_path = kamu_in_push_workspace - .workspace_path() - .join("player-scores.data.ndjson"); + kamu_in_push_workspace + .ingest_data( + &root_dataset_alias.dataset_name, + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_1, + ) + .await; + } - std::fs::write( - dataset_data_path.clone(), - indoc::indoc!( - r#" - {"match_time": "2000-01-01", "match_id": 1, "player_id": "Alice", "score": 100} - {"match_time": "2000-01-01", "match_id": 1, "player_id": "Bob", "score": 80} - "#, - ), + // 2.2. Login to the API server + kamu_in_push_workspace + .execute([ + "login", + kamu_api_server_client.get_base_url().as_str(), + "--access-token", + token.as_str(), + ]) + .await + .success(); + + // Push all datasets should fail + kamu_in_push_workspace + .assert_failure_command_execution( + ["push", "--all"], + None, + Some(["Pushing all datasets is not yet supported"]), + ) + .await; + + // Push datasets one by one + kamu_in_push_workspace + .assert_success_command_execution( + [ + "push", + root_dataset_alias.dataset_name.as_str(), + "--to", + kamu_api_server_root_dataset_endpoint.as_str(), + ], + None, + Some(["1 dataset(s) pushed"]), + ) + .await; + + kamu_in_push_workspace + .execute(["pull", derivative_dataset_alias.dataset_name.as_str()]) + .await + .success(); + + kamu_in_push_workspace + .assert_success_command_execution( + [ + "push", + derivative_dataset_alias.dataset_name.as_str(), + "--to", + kamu_api_server_derivative_dataset_endpoint.as_str(), + ], + None, + Some(["1 dataset(s) pushed"]), + ) + .await; + } + + // 3. Pulling datasets from the API server + { + let kamu_in_pull_workspace = KamuCliPuppet::new_workspace_tmp().await; + + // Pull datasets one by one and check data + kamu_in_pull_workspace + .assert_success_command_execution( + ["pull", kamu_api_server_root_dataset_endpoint.as_str()], + None, + Some(["1 dataset(s) updated"]), + ) + .await; + + kamu_in_pull_workspace + .assert_success_command_execution( + ["pull", kamu_api_server_derivative_dataset_endpoint.as_str()], + None, + Some(["1 dataset(s) updated"]), + ) + .await; + + let expected_schema = indoc::indoc!( + r#" + message arrow_schema { + REQUIRED INT64 offset; + REQUIRED INT32 op; + REQUIRED INT64 system_time (TIMESTAMP(MILLIS,true)); + OPTIONAL INT64 match_time (TIMESTAMP(MILLIS,true)); + OPTIONAL INT64 match_id; + OPTIONAL BYTE_ARRAY player_id (STRING); + OPTIONAL INT64 score; + } + "# + ); + let expected_data = indoc::indoc!( + r#" + +--------+----+----------------------+----------------------+----------+-----------+-------+ + | offset | op | system_time | match_time | match_id | player_id | score | + +--------+----+----------------------+----------------------+----------+-----------+-------+ + | 0 | 0 | 2050-01-02T03:04:05Z | 2000-01-01T00:00:00Z | 1 | Alice | 100 | + | 1 | 0 | 2050-01-02T03:04:05Z | 2000-01-01T00:00:00Z | 1 | Bob | 80 | + +--------+----+----------------------+----------------------+----------+-----------+-------+ + "# + ); + let expected_derivative_schema = indoc::indoc!( + r#" + message arrow_schema { + OPTIONAL INT64 offset; + REQUIRED INT32 op; + REQUIRED INT64 system_time (TIMESTAMP(MILLIS,true)); + OPTIONAL INT64 match_time (TIMESTAMP(MILLIS,true)); + OPTIONAL INT64 place; + OPTIONAL INT64 match_id; + OPTIONAL BYTE_ARRAY player_id (STRING); + OPTIONAL INT64 score; + } + "# + ); + let expected_derivative_data = indoc::indoc!( + r#" + +--------+----+----------------------+----------------------+-------+----------+-----------+-------+ + | offset | op | system_time | match_time | place | match_id | player_id | score | + +--------+----+----------------------+----------------------+-------+----------+-----------+-------+ + | 0 | 0 | 2050-01-02T03:04:05Z | 2000-01-01T00:00:00Z | 1 | 1 | Alice | 100 | + | 1 | 0 | 2050-01-02T03:04:05Z | 2000-01-01T00:00:00Z | 2 | 1 | Bob | 80 | + +--------+----+----------------------+----------------------+-------+----------+-----------+-------+ + "# + ); + + kamu_in_pull_workspace + .assert_last_data_slice( + &root_dataset_alias.dataset_name, + expected_schema, + expected_data, + ) + .await; + kamu_in_pull_workspace + .assert_last_data_slice( + &derivative_dataset_alias.dataset_name, + expected_derivative_schema, + expected_derivative_data, + ) + .await; + + // Update remote datasets + + kamu_in_push_workspace + .ingest_data( + &root_dataset_alias.dataset_name, + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_2, + ) + .await; + + kamu_in_push_workspace + .assert_success_command_execution( + ["pull", derivative_dataset_alias.dataset_name.as_str()], + None, + Some(["1 dataset(s) updated"]), + ) + .await; + + kamu_in_push_workspace + .assert_success_command_execution( + [ + "push", + root_dataset_alias.dataset_name.as_str(), + "--to", + kamu_api_server_root_dataset_endpoint.as_str(), + ], + None, + Some(["1 dataset(s) pushed"]), + ) + .await; + + kamu_in_push_workspace + .assert_success_command_execution( + [ + "push", + derivative_dataset_alias.dataset_name.as_str(), + "--to", + kamu_api_server_derivative_dataset_endpoint.as_str(), + ], + None, + Some(["1 dataset(s) pushed"]), + ) + .await; + + // Pull all datasets + kamu_in_pull_workspace + .assert_success_command_execution( + ["pull", "--all"], + None, + Some(["2 dataset(s) updated"]), + ) + .await; + + // Perform dataslices checks + let expected_data = indoc::indoc!( + r#" + +--------+----+----------------------+----------------------+----------+-----------+-------+ + | offset | op | system_time | match_time | match_id | player_id | score | + +--------+----+----------------------+----------------------+----------+-----------+-------+ + | 2 | 0 | 2050-01-02T03:04:05Z | 2000-01-02T00:00:00Z | 2 | Charlie | 90 | + | 3 | 0 | 2050-01-02T03:04:05Z | 2000-01-02T00:00:00Z | 2 | Alice | 70 | + +--------+----+----------------------+----------------------+----------+-----------+-------+ + "# + ); + let expected_derivative_data = indoc::indoc!( + r#" + +--------+----+----------------------+----------------------+-------+----------+-----------+-------+ + | offset | op | system_time | match_time | place | match_id | player_id | score | + +--------+----+----------------------+----------------------+-------+----------+-----------+-------+ + | 2 | 1 | 2050-01-02T03:04:05Z | 2000-01-01T00:00:00Z | 2 | 1 | Bob | 80 | + | 3 | 0 | 2050-01-02T03:04:05Z | 2000-01-02T00:00:00Z | 2 | 2 | Charlie | 90 | + +--------+----+----------------------+----------------------+-------+----------+-----------+-------+ + "# + ); + + kamu_in_pull_workspace + .assert_last_data_slice( + &root_dataset_alias.dataset_name, + expected_schema, + expected_data, ) - .unwrap(); + .await; + kamu_in_pull_workspace + .assert_last_data_slice( + &derivative_dataset_alias.dataset_name, + expected_derivative_schema, + expected_derivative_data, + ) + .await; + } +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_smart_push_pull_recursive(kamu_api_server_client: KamuApiServerClient) { + let root_dataset_alias = DatasetAlias::new( + Some(AccountName::new_unchecked("e2e-user")), + DatasetName::new_unchecked("player-scores"), + ); + let kamu_api_server_root_dataset_endpoint = + kamu_api_server_client.get_dataset_endpoint(&root_dataset_alias); + + let derivative_dataset_alias = DatasetAlias::new( + Some(AccountName::new_unchecked("e2e-user")), + DatasetName::new_unchecked("leaderboard"), + ); + + // 1. Grub a token + let token = kamu_api_server_client.login_as_e2e_user().await; + + let mut kamu_in_push_workspace = KamuCliPuppet::new_workspace_tmp().await; + + // 2. Pushing datasets to the API server + { + kamu_in_push_workspace + .set_system_time(Some(DateTime::from_str("2050-01-02T03:04:05Z").unwrap())); + // 2.1. Add datasets + { kamu_in_push_workspace - .execute([ - "ingest", - "player-scores", - dataset_data_path.to_str().unwrap(), - ]) + .execute_with_input(["add", "--stdin"], DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR) .await .success(); } + // 2.1. Ingest data to the dataset + { + kamu_in_push_workspace + .ingest_data( + &root_dataset_alias.dataset_name, + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_1, + ) + .await; + } + // 2.2. Login to the API server kamu_in_push_workspace .execute([ @@ -116,27 +765,521 @@ pub async fn test_smart_push_pull_sequence(kamu_api_server_client: KamuApiServer .await .success(); - // 2.3. Push the dataset to the API server + // Push all datasets should fail + kamu_in_push_workspace + .assert_failure_command_execution( + [ + "push", + root_dataset_alias.dataset_name.as_str(), + "--recursive", + ], + None, + Some(["Recursive push is not yet supported"]), + ) + .await; + + // Push dataset + kamu_in_push_workspace + .assert_success_command_execution( + [ + "push", + root_dataset_alias.dataset_name.as_str(), + "--to", + kamu_api_server_root_dataset_endpoint.as_str(), + ], + None, + Some(["1 dataset(s) pushed"]), + ) + .await; + } + + // 3. Pulling datasets from the API server + { + let mut kamu_in_pull_workspace = KamuCliPuppet::new_workspace_tmp().await; + + kamu_in_pull_workspace + .set_system_time(Some(DateTime::from_str("2050-01-02T03:04:05Z").unwrap())); + + // Pull datasets one by one and check data + kamu_in_pull_workspace + .assert_success_command_execution( + ["pull", kamu_api_server_root_dataset_endpoint.as_str()], + None, + Some(["1 dataset(s) updated"]), + ) + .await; + + kamu_in_pull_workspace + .execute_with_input( + ["add", "--stdin"], + DATASET_DERIVATIVE_LEADERBOARD_SNAPSHOT_STR, + ) + .await + .success(); + + kamu_in_pull_workspace + .assert_success_command_execution( + ["pull", derivative_dataset_alias.dataset_name.as_str()], + None, + Some(["1 dataset(s) updated"]), + ) + .await; + + let expected_schema = indoc::indoc!( + r#" + message arrow_schema { + REQUIRED INT64 offset; + REQUIRED INT32 op; + REQUIRED INT64 system_time (TIMESTAMP(MILLIS,true)); + OPTIONAL INT64 match_time (TIMESTAMP(MILLIS,true)); + OPTIONAL INT64 match_id; + OPTIONAL BYTE_ARRAY player_id (STRING); + OPTIONAL INT64 score; + } + "# + ); + let expected_data = indoc::indoc!( + r#" + +--------+----+----------------------+----------------------+----------+-----------+-------+ + | offset | op | system_time | match_time | match_id | player_id | score | + +--------+----+----------------------+----------------------+----------+-----------+-------+ + | 0 | 0 | 2050-01-02T03:04:05Z | 2000-01-01T00:00:00Z | 1 | Alice | 100 | + | 1 | 0 | 2050-01-02T03:04:05Z | 2000-01-01T00:00:00Z | 1 | Bob | 80 | + +--------+----+----------------------+----------------------+----------+-----------+-------+ + "# + ); + let expected_derivative_schema = indoc::indoc!( + r#" + message arrow_schema { + OPTIONAL INT64 offset; + REQUIRED INT32 op; + REQUIRED INT64 system_time (TIMESTAMP(MILLIS,true)); + OPTIONAL INT64 match_time (TIMESTAMP(MILLIS,true)); + OPTIONAL INT64 place; + OPTIONAL INT64 match_id; + OPTIONAL BYTE_ARRAY player_id (STRING); + OPTIONAL INT64 score; + } + "# + ); + let expected_derivative_data = indoc::indoc!( + r#" + +--------+----+----------------------+----------------------+-------+----------+-----------+-------+ + | offset | op | system_time | match_time | place | match_id | player_id | score | + +--------+----+----------------------+----------------------+-------+----------+-----------+-------+ + | 0 | 0 | 2050-01-02T03:04:05Z | 2000-01-01T00:00:00Z | 1 | 1 | Alice | 100 | + | 1 | 0 | 2050-01-02T03:04:05Z | 2000-01-01T00:00:00Z | 2 | 1 | Bob | 80 | + +--------+----+----------------------+----------------------+-------+----------+-----------+-------+ + "# + ); + + kamu_in_pull_workspace + .assert_last_data_slice( + &root_dataset_alias.dataset_name, + expected_schema, + expected_data, + ) + .await; + kamu_in_pull_workspace + .assert_last_data_slice( + &derivative_dataset_alias.dataset_name, + expected_derivative_schema, + expected_derivative_data, + ) + .await; + + // Update remote datasets + + kamu_in_push_workspace + .ingest_data( + &root_dataset_alias.dataset_name, + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_2, + ) + .await; + + kamu_in_push_workspace + .assert_success_command_execution( + [ + "push", + root_dataset_alias.dataset_name.as_str(), + "--to", + kamu_api_server_root_dataset_endpoint.as_str(), + ], + None, + Some(["1 dataset(s) pushed"]), + ) + .await; + + // Pull all datasets + kamu_in_pull_workspace + .assert_success_command_execution( + [ + "pull", + derivative_dataset_alias.dataset_name.as_str(), + "--recursive", + ], + None, + Some(["2 dataset(s) updated"]), + ) + .await; + + // Perform dataslices checks + let expected_data = indoc::indoc!( + r#" + +--------+----+----------------------+----------------------+----------+-----------+-------+ + | offset | op | system_time | match_time | match_id | player_id | score | + +--------+----+----------------------+----------------------+----------+-----------+-------+ + | 2 | 0 | 2050-01-02T03:04:05Z | 2000-01-02T00:00:00Z | 2 | Charlie | 90 | + | 3 | 0 | 2050-01-02T03:04:05Z | 2000-01-02T00:00:00Z | 2 | Alice | 70 | + +--------+----+----------------------+----------------------+----------+-----------+-------+ + "# + ); + let expected_derivative_data = indoc::indoc!( + r#" + +--------+----+----------------------+----------------------+-------+----------+-----------+-------+ + | offset | op | system_time | match_time | place | match_id | player_id | score | + +--------+----+----------------------+----------------------+-------+----------+-----------+-------+ + | 2 | 1 | 2050-01-02T03:04:05Z | 2000-01-01T00:00:00Z | 2 | 1 | Bob | 80 | + | 3 | 0 | 2050-01-02T03:04:05Z | 2000-01-02T00:00:00Z | 2 | 2 | Charlie | 90 | + +--------+----+----------------------+----------------------+-------+----------+-----------+-------+ + "# + ); + + kamu_in_pull_workspace + .assert_last_data_slice( + &root_dataset_alias.dataset_name, + expected_schema, + expected_data, + ) + .await; + kamu_in_pull_workspace + .assert_last_data_slice( + &derivative_dataset_alias.dataset_name, + expected_derivative_schema, + expected_derivative_data, + ) + .await; + } +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_smart_pull_set_watermark(kamu: KamuCliPuppet) { + let dataset_name = DatasetName::new_unchecked("player-scores"); + + kamu.execute_with_input(["add", "--stdin"], DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR) + .await + .success(); + + kamu.assert_success_command_execution( + [ + "pull", + dataset_name.as_str(), + "--set-watermark", + "2051-01-02T03:04:05Z", + ], + None, + Some(["Committed new block"]), + ) + .await; +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_smart_pull_reset_derivative(kamu: KamuCliPuppet) { + let dataset_name = DatasetName::new_unchecked("player-scores"); + let dataset_derivative_name = DatasetName::new_unchecked("leaderboard"); + + kamu.execute_with_input(["add", "--stdin"], DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR) + .await + .success(); + + kamu.execute_with_input( + ["add", "--stdin"], + DATASET_DERIVATIVE_LEADERBOARD_SNAPSHOT_STR, + ) + .await + .success(); + + kamu.ingest_data( + &dataset_name, + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_1, + ) + .await; + + kamu.assert_success_command_execution( + ["pull", dataset_derivative_name.as_str()], + None, + Some(["1 dataset(s) updated"]), + ) + .await; + + let expected_derivative_schema = indoc::indoc!( + r#" + message arrow_schema { + OPTIONAL INT64 offset; + REQUIRED INT32 op; + REQUIRED INT64 system_time (TIMESTAMP(MILLIS,true)); + OPTIONAL INT64 match_time (TIMESTAMP(MILLIS,true)); + OPTIONAL INT64 place; + OPTIONAL INT64 match_id; + OPTIONAL BYTE_ARRAY player_id (STRING); + OPTIONAL INT64 score; + } + "# + ); + let expected_derivative_data = indoc::indoc!( + r#" + +--------+----+----------------------+----------------------+-------+----------+-----------+-------+ + | offset | op | system_time | match_time | place | match_id | player_id | score | + +--------+----+----------------------+----------------------+-------+----------+-----------+-------+ + | 0 | 0 | 2050-01-02T03:04:05Z | 2000-01-01T00:00:00Z | 1 | 1 | Alice | 100 | + | 1 | 0 | 2050-01-02T03:04:05Z | 2000-01-01T00:00:00Z | 2 | 1 | Bob | 80 | + +--------+----+----------------------+----------------------+-------+----------+-----------+-------+ + "# + ); + kamu.assert_last_data_slice( + &dataset_derivative_name, + expected_derivative_schema, + expected_derivative_data, + ) + .await; + + // Compact root dataset + kamu.execute([ + "--yes", + "system", + "compact", + dataset_name.as_str(), + "--hard", + "--keep-metadata-only", + ]) + .await + .success(); + + // Pull derivative should fail + kamu.assert_failure_command_execution( + ["pull", dataset_derivative_name.as_str()], + None, + Some(["Failed to update 1 dataset(s)"]), + ) + .await; + + // Add new data to root dataset + kamu.ingest_data( + &dataset_name, + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_2, + ) + .await; + + kamu.assert_success_command_execution( + [ + "pull", + dataset_derivative_name.as_str(), + "--reset-derivatives-on-diverged-input", + ], + None, + Some(["1 dataset(s) updated"]), + ) + .await; + + let expected_derivative_data = indoc::indoc!( + r#" + +--------+----+----------------------+----------------------+-------+----------+-----------+-------+ + | offset | op | system_time | match_time | place | match_id | player_id | score | + +--------+----+----------------------+----------------------+-------+----------+-----------+-------+ + | 0 | 0 | 2050-01-02T03:04:05Z | 2000-01-02T00:00:00Z | 2 | 2 | Alice | 70 | + | 1 | 0 | 2050-01-02T03:04:05Z | 2000-01-02T00:00:00Z | 1 | 2 | Charlie | 90 | + +--------+----+----------------------+----------------------+-------+----------+-----------+-------+ + "# + ); + kamu.assert_last_data_slice( + &dataset_derivative_name, + expected_derivative_schema, + expected_derivative_data, + ) + .await; +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_smart_push_visibility(kamu_api_server_client: KamuApiServerClient) { + let dataset_alias = DatasetAlias::new( + Some(AccountName::new_unchecked("e2e-user")), + DatasetName::new_unchecked("player-scores"), + ); + let kamu_api_server_dataset_endpoint = + kamu_api_server_client.get_dataset_endpoint(&dataset_alias); + + // 1. Grub a token + let token = kamu_api_server_client.login_as_e2e_user().await; + + // 2. Pushing the dataset to the API server + { + let kamu_in_push_workspace = KamuCliPuppet::new_workspace_tmp().await; + + // 2.1. Add the dataset + kamu_in_push_workspace + .execute_with_input(["add", "--stdin"], DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR) + .await + .success(); + + // 2.1. Ingest data to the dataset + kamu_in_push_workspace + .ingest_data( + &dataset_alias.dataset_name, + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_1, + ) + .await; + + // 2.2. Login to the API server kamu_in_push_workspace .execute([ - "push", - "player-scores", - "--to", - kamu_api_server_dataset_endpoint.as_str(), + "login", + kamu_api_server_client.get_base_url().as_str(), + "--access-token", + token.as_str(), ]) .await .success(); + + kamu_in_push_workspace + .assert_success_command_execution( + [ + "push", + dataset_alias.dataset_name.as_str(), + "--to", + kamu_api_server_dataset_endpoint.as_str(), + "--visibility", + "private", + ], + None, + Some(["1 dataset(s) pushed"]), + ) + .await; + + // ToDo add visibility check } +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_smart_push_pull_s3(kamu: KamuCliPuppet) { + let dataset_name = DatasetName::new_unchecked("player-scores"); + + kamu.execute_with_input(["add", "--stdin"], DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR) + .await + .success(); + + kamu.ingest_data( + &dataset_name, + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_1, + ) + .await; + + let s3_server = LocalS3Server::new().await; + let dataset_url = format!("{}/e2e-user/{dataset_name}", s3_server.url); + + // Push dataset + kamu.assert_success_command_execution( + ["push", dataset_name.as_str(), "--to", dataset_url.as_str()], + None, + Some(["1 dataset(s) pushed"]), + ) + .await; - // 3. Pulling the dataset from the API server { let kamu_in_pull_workspace = KamuCliPuppet::new_workspace_tmp().await; kamu_in_pull_workspace - .execute(["pull", kamu_api_server_dataset_endpoint.as_str()]) - .await - .success(); + .assert_success_command_execution( + ["pull", dataset_url.as_str()], + None, + Some(["1 dataset(s) updated"]), + ) + .await; + + let expected_schema = indoc::indoc!( + r#" + message arrow_schema { + REQUIRED INT64 offset; + REQUIRED INT32 op; + REQUIRED INT64 system_time (TIMESTAMP(MILLIS,true)); + OPTIONAL INT64 match_time (TIMESTAMP(MILLIS,true)); + OPTIONAL INT64 match_id; + OPTIONAL BYTE_ARRAY player_id (STRING); + OPTIONAL INT64 score; + } + "# + ); + let expected_data = indoc::indoc!( + r#" + +--------+----+----------------------+----------------------+----------+-----------+-------+ + | offset | op | system_time | match_time | match_id | player_id | score | + +--------+----+----------------------+----------------------+----------+-----------+-------+ + | 0 | 0 | 2050-01-02T03:04:05Z | 2000-01-01T00:00:00Z | 1 | Alice | 100 | + | 1 | 0 | 2050-01-02T03:04:05Z | 2000-01-01T00:00:00Z | 1 | Bob | 80 | + +--------+----+----------------------+----------------------+----------+-----------+-------+ + "# + ); + kamu.assert_last_data_slice(&dataset_name, expected_schema, expected_data) + .await; } } //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +pub async fn test_smart_pull_derivative(kamu: KamuCliPuppet) { + let dataset_name = DatasetName::new_unchecked("player-scores"); + let dataset_derivative_name = DatasetName::new_unchecked("leaderboard"); + + kamu.execute_with_input(["add", "--stdin"], DATASET_ROOT_PLAYER_SCORES_SNAPSHOT_STR) + .await + .success(); + + kamu.execute_with_input( + ["add", "--stdin"], + DATASET_DERIVATIVE_LEADERBOARD_SNAPSHOT_STR, + ) + .await + .success(); + + kamu.ingest_data( + &dataset_name, + DATASET_ROOT_PLAYER_SCORES_INGEST_DATA_NDJSON_CHUNK_1, + ) + .await; + + kamu.assert_failure_command_execution( + [ + "tail", + dataset_derivative_name.as_str(), + "--output-format", + "table", + ], + None, + Some(["Error: Dataset schema is not yet available: leaderboard"]), + ) + .await; + + kamu.assert_success_command_execution( + ["pull", dataset_derivative_name.as_str()], + None, + Some(["1 dataset(s) updated"]), + ) + .await; + + kamu.assert_player_scores_dataset_data(indoc::indoc!( + r#" + ┌────┬──────────────────────┬──────────────────────┬──────────┬───────────┬───────┐ + │ op │ system_time │ match_time │ match_id │ player_id │ score │ + ├────┼──────────────────────┼──────────────────────┼──────────┼───────────┼───────┤ + │ 0 │ 2050-01-02T03:04:05Z │ 2000-01-01T00:00:00Z │ 1 │ Bob │ 80 │ + │ 0 │ 2050-01-02T03:04:05Z │ 2000-01-01T00:00:00Z │ 1 │ Alice │ 100 │ + └────┴──────────────────────┴──────────────────────┴──────────┴───────────┴───────┘ + "# + )) + .await; +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/sqlite/tests/tests/commands/mod.rs b/src/e2e/app/cli/sqlite/tests/tests/commands/mod.rs index b58f493b3..6c06ec61a 100644 --- a/src/e2e/app/cli/sqlite/tests/tests/commands/mod.rs +++ b/src/e2e/app/cli/sqlite/tests/tests/commands/mod.rs @@ -7,5 +7,24 @@ // the Business Source License, use of this software will be governed // by the Apache License, Version 2.0. +mod test_add_command; +mod test_compact_command; +mod test_delete_command; +mod test_ingest_command; +mod test_init_command; +mod test_inspect_command; +mod test_log_command; +mod test_login_command; +mod test_new_command; +mod test_rename_command; +mod test_repo_command; +mod test_reset_command; +mod test_search_command; +mod test_sql_command; mod test_system_api_server_gql_query; +mod test_system_diagnose_command; +mod test_system_gc_command; mod test_system_generate_token_command; +mod test_system_info_command; +mod test_tail_command; +mod test_verify_command; diff --git a/src/e2e/app/cli/sqlite/tests/tests/commands/test_add_command.rs b/src/e2e/app/cli/sqlite/tests/tests/commands/test_add_command.rs new file mode 100644 index 000000000..6ba93dace --- /dev/null +++ b/src/e2e/app/cli/sqlite/tests/tests/commands/test_add_command.rs @@ -0,0 +1,40 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_add_dataset_from_stdin +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_add_dataset_with_name +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_add_dataset_with_replace +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_add_recursive +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/sqlite/tests/tests/commands/test_compact_command.rs b/src/e2e/app/cli/sqlite/tests/tests/commands/test_compact_command.rs new file mode 100644 index 000000000..fb2c2851f --- /dev/null +++ b/src/e2e/app/cli/sqlite/tests/tests/commands/test_compact_command.rs @@ -0,0 +1,36 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_compact_hard + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_compact_keep_metadata_only + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_compact_verify + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/sqlite/tests/tests/commands/test_delete_command.rs b/src/e2e/app/cli/sqlite/tests/tests/commands/test_delete_command.rs new file mode 100644 index 000000000..4166202a2 --- /dev/null +++ b/src/e2e/app/cli/sqlite/tests/tests/commands/test_delete_command.rs @@ -0,0 +1,33 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_delete_dataset +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_delete_dataset_recursive +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_delete_dataset_all +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/sqlite/tests/tests/commands/test_ingest_command.rs b/src/e2e/app/cli/sqlite/tests/tests/commands/test_ingest_command.rs new file mode 100644 index 000000000..2c74c7d44 --- /dev/null +++ b/src/e2e/app/cli/sqlite/tests/tests/commands/test_ingest_command.rs @@ -0,0 +1,57 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_push_ingest_from_file_ledger, + options = Options::default().with_frozen_system_time(), + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_push_ingest_from_file_snapshot_with_event_time, + options = Options::default().with_frozen_system_time(), + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_ingest_from_stdin, + options = Options::default().with_frozen_system_time(), + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_ingest_recursive, + options = Options::default().with_frozen_system_time(), + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_ingest_with_source_name, + options = Options::default().with_frozen_system_time(), + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/sqlite/tests/tests/commands/test_init_command.rs b/src/e2e/app/cli/sqlite/tests/tests/commands/test_init_command.rs new file mode 100644 index 000000000..a07699b0b --- /dev/null +++ b/src/e2e/app/cli/sqlite/tests/tests/commands/test_init_command.rs @@ -0,0 +1,53 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_init_multi_tenant_creates_sqlite_database, + options = Options::default().with_no_workspace() +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = + kamu_cli_e2e_repo_tests::test_init_multi_tenant_with_exists_ok_flag_creates_sqlite_database, + options = Options::default().with_no_workspace() +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_init_exist_ok_st, + options = Options::default().with_no_workspace() +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_init_exist_ok_mt, + options = Options::default().with_no_workspace() +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_init_in_an_existing_workspace, + options = Options::default().with_no_workspace() +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/sqlite/tests/tests/commands/test_inspect_command.rs b/src/e2e/app/cli/sqlite/tests/tests/commands/test_inspect_command.rs new file mode 100644 index 000000000..9ddfcd3d5 --- /dev/null +++ b/src/e2e/app/cli/sqlite/tests/tests/commands/test_inspect_command.rs @@ -0,0 +1,35 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_inspect_lineage, +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_inspect_query, + options = Options::default().with_frozen_system_time() +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_inspect_schema, + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/sqlite/tests/tests/commands/test_log_command.rs b/src/e2e/app/cli/sqlite/tests/tests/commands/test_log_command.rs new file mode 100644 index 000000000..54bc30e5a --- /dev/null +++ b/src/e2e/app/cli/sqlite/tests/tests/commands/test_log_command.rs @@ -0,0 +1,21 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_log, + options = Options::default().with_frozen_system_time(), + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/sqlite/tests/tests/commands/test_login_command.rs b/src/e2e/app/cli/sqlite/tests/tests/commands/test_login_command.rs new file mode 100644 index 000000000..eeeb6e2f0 --- /dev/null +++ b/src/e2e/app/cli/sqlite/tests/tests/commands/test_login_command.rs @@ -0,0 +1,27 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_run_api_server_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_login_logout_password, +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_run_api_server_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_login_logout_oauth, + options = Options::default().with_multi_tenant() +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/sqlite/tests/tests/commands/test_new_command.rs b/src/e2e/app/cli/sqlite/tests/tests/commands/test_new_command.rs new file mode 100644 index 000000000..d299df3d9 --- /dev/null +++ b/src/e2e/app/cli/sqlite/tests/tests/commands/test_new_command.rs @@ -0,0 +1,26 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_new_root, +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_new_derivative, +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/sqlite/tests/tests/commands/test_rename_command.rs b/src/e2e/app/cli/sqlite/tests/tests/commands/test_rename_command.rs new file mode 100644 index 000000000..889c54c2a --- /dev/null +++ b/src/e2e/app/cli/sqlite/tests/tests/commands/test_rename_command.rs @@ -0,0 +1,19 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_rename_dataset +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/sqlite/tests/tests/commands/test_repo_command.rs b/src/e2e/app/cli/sqlite/tests/tests/commands/test_repo_command.rs new file mode 100644 index 000000000..29b16d730 --- /dev/null +++ b/src/e2e/app/cli/sqlite/tests/tests/commands/test_repo_command.rs @@ -0,0 +1,26 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_repository_pull_aliases_commands +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_repository_push_aliases_commands +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/sqlite/tests/tests/commands/test_reset_command.rs b/src/e2e/app/cli/sqlite/tests/tests/commands/test_reset_command.rs new file mode 100644 index 000000000..08fcb2bc3 --- /dev/null +++ b/src/e2e/app/cli/sqlite/tests/tests/commands/test_reset_command.rs @@ -0,0 +1,20 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_reset, + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/sqlite/tests/tests/commands/test_search_command.rs b/src/e2e/app/cli/sqlite/tests/tests/commands/test_search_command.rs new file mode 100644 index 000000000..dbdf1ec9a --- /dev/null +++ b/src/e2e/app/cli/sqlite/tests/tests/commands/test_search_command.rs @@ -0,0 +1,40 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_run_api_server_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_search_by_name + // We need synthetic time for the tests, but the third-party JWT code + // uses the current time. Assuming that the token lifetime is 24 hours, we will + // use the projected date (the current day) as a workaround. + options = Options::default() + .with_multi_tenant() + .with_today_as_frozen_system_time(), + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_run_api_server_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_search_by_repo + // We need synthetic time for the tests, but the third-party JWT code + // uses the current time. Assuming that the token lifetime is 24 hours, we will + // use the projected date (the current day) as a workaround. + options = Options::default() + .with_multi_tenant() + .with_today_as_frozen_system_time(), + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/sqlite/tests/tests/commands/test_sql_command.rs b/src/e2e/app/cli/sqlite/tests/tests/commands/test_sql_command.rs new file mode 100644 index 000000000..8d21f99b9 --- /dev/null +++ b/src/e2e/app/cli/sqlite/tests/tests/commands/test_sql_command.rs @@ -0,0 +1,38 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_datafusion_cli, + extra_test_groups = "engine, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_datafusion_cli_not_launched_in_root_ws, + options = Options::default().with_no_workspace(), + extra_test_groups = "engine, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_sql_command, + options = Options::default().with_frozen_system_time(), + extra_test_groups = "engine, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/sqlite/tests/tests/commands/test_system_diagnose_command.rs b/src/e2e/app/cli/sqlite/tests/tests/commands/test_system_diagnose_command.rs new file mode 100644 index 000000000..df35fd6e5 --- /dev/null +++ b/src/e2e/app/cli/sqlite/tests/tests/commands/test_system_diagnose_command.rs @@ -0,0 +1,19 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_system_diagnose +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/sqlite/tests/tests/commands/test_system_gc_command.rs b/src/e2e/app/cli/sqlite/tests/tests/commands/test_system_gc_command.rs new file mode 100644 index 000000000..7a3c36657 --- /dev/null +++ b/src/e2e/app/cli/sqlite/tests/tests/commands/test_system_gc_command.rs @@ -0,0 +1,16 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!(storage = sqlite, fixture = kamu_cli_e2e_repo_tests::test_gc); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/sqlite/tests/tests/commands/test_system_info_command.rs b/src/e2e/app/cli/sqlite/tests/tests/commands/test_system_info_command.rs new file mode 100644 index 000000000..8f5781110 --- /dev/null +++ b/src/e2e/app/cli/sqlite/tests/tests/commands/test_system_info_command.rs @@ -0,0 +1,19 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_system_info +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/sqlite/tests/tests/commands/test_tail_command.rs b/src/e2e/app/cli/sqlite/tests/tests/commands/test_tail_command.rs new file mode 100644 index 000000000..ac92be7d6 --- /dev/null +++ b/src/e2e/app/cli/sqlite/tests/tests/commands/test_tail_command.rs @@ -0,0 +1,21 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_tail, + options = Options::default().with_frozen_system_time(), + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/sqlite/tests/tests/commands/test_verify_command.rs b/src/e2e/app/cli/sqlite/tests/tests/commands/test_verify_command.rs new file mode 100644 index 000000000..e025ab033 --- /dev/null +++ b/src/e2e/app/cli/sqlite/tests/tests/commands/test_verify_command.rs @@ -0,0 +1,36 @@ +// Copyright Kamu Data, Inc. and contributors. All rights reserved. +// +// Use of this software is governed by the Business Source License +// included in the LICENSE file. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0. + +use kamu_cli_e2e_common::prelude::*; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_verify_regular_dataset, + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_verify_recursive, + extra_test_groups = "containerized, engine, ingest, datafusion, risingwave" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_verify_integrity, + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/sqlite/tests/tests/test_flow.rs b/src/e2e/app/cli/sqlite/tests/tests/test_flow.rs index 9ec764030..a0a38ec16 100644 --- a/src/e2e/app/cli/sqlite/tests/tests/test_flow.rs +++ b/src/e2e/app/cli/sqlite/tests/tests/test_flow.rs @@ -38,7 +38,7 @@ kamu_cli_run_api_server_e2e_test!( kamu_cli_run_api_server_e2e_test!( storage = sqlite, fixture = kamu_cli_e2e_repo_tests::test_dataset_trigger_flow, - extra_test_groups = "containerized, engine, transform, datafusion" + extra_test_groups = "containerized, engine, transform, datafusion, risingwave" ); //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/e2e/app/cli/sqlite/tests/tests/test_smart_transfer_protocol.rs b/src/e2e/app/cli/sqlite/tests/tests/test_smart_transfer_protocol.rs index 326e42818..a77a86dd3 100644 --- a/src/e2e/app/cli/sqlite/tests/tests/test_smart_transfer_protocol.rs +++ b/src/e2e/app/cli/sqlite/tests/tests/test_smart_transfer_protocol.rs @@ -24,3 +24,104 @@ kamu_cli_run_api_server_e2e_test!( ); //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_run_api_server_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_smart_force_push_pull, + options = Options::default() + .with_multi_tenant() + .with_today_as_frozen_system_time(), + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_run_api_server_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_smart_push_pull_add_alias, + options = Options::default() + .with_multi_tenant() + .with_today_as_frozen_system_time(), + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_run_api_server_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_smart_pull_as, + options = Options::default() + .with_multi_tenant() + .with_today_as_frozen_system_time(), + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_run_api_server_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_smart_push_pull_all, + options = Options::default() + .with_multi_tenant() + .with_today_as_frozen_system_time(), + extra_test_groups = "containerized, engine, ingest, transform, datafusion, risingwave" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_run_api_server_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_smart_push_pull_recursive, + options = Options::default() + .with_multi_tenant() + .with_today_as_frozen_system_time(), + extra_test_groups = "containerized, engine, ingest, transform, datafusion, risingwave" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_smart_pull_set_watermark, + options = Options::default().with_frozen_system_time(), +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_smart_pull_reset_derivative, + options = Options::default().with_frozen_system_time(), + extra_test_groups = "containerized, engine, ingest, transform, datafusion, risingwave" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_run_api_server_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_smart_push_visibility, + options = Options::default() + .with_multi_tenant() + .with_today_as_frozen_system_time(), + extra_test_groups = "engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_smart_push_pull_s3, + options = Options::default().with_frozen_system_time(), + extra_test_groups = "containerized, engine, ingest, datafusion" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +kamu_cli_execute_command_e2e_test!( + storage = sqlite, + fixture = kamu_cli_e2e_repo_tests::test_smart_pull_derivative, + options = Options::default().with_frozen_system_time(), + extra_test_groups = "containerized, engine, ingest, transform, datafusion, risingwave" +); + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/infra/core/src/query_service_impl.rs b/src/infra/core/src/query_service_impl.rs index 352460f98..d38dd1f9f 100644 --- a/src/infra/core/src/query_service_impl.rs +++ b/src/infra/core/src/query_service_impl.rs @@ -56,7 +56,7 @@ impl QueryServiceImpl { .with_information_schema(true) .with_default_catalog_and_schema("kamu", "kamu"); - // Forcing cese-sensitive identifiers in case-insensitive language seems to + // Forcing case-sensitive identifiers in case-insensitive language seems to // be a lesser evil than following DataFusion's default behavior of forcing // identifiers to lowercase instead of case-insensitive matching. // diff --git a/src/infra/core/src/remote_repository_registry_impl.rs b/src/infra/core/src/remote_repository_registry_impl.rs index cd48a2063..f80402d1d 100644 --- a/src/infra/core/src/remote_repository_registry_impl.rs +++ b/src/infra/core/src/remote_repository_registry_impl.rs @@ -44,7 +44,7 @@ impl RemoteRepositoryRegistryImpl { let file_path = self.repos_dir.join(repo_name); if !file_path.exists() { - // run full scan to support case-insensetive matches + // run full scan to support case-insensitive matches let all_repositories_stream = self.get_all_repositories(); for repository_name in all_repositories_stream { if &repository_name == repo_name { @@ -157,6 +157,7 @@ impl RemoteRepositoryRegistry for RemoteRepositoryRegistryNull { //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// // Config //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + pub struct RemoteReposDir(PathBuf); impl RemoteReposDir { diff --git a/src/utils/kamu-cli-puppet/Cargo.toml b/src/utils/kamu-cli-puppet/Cargo.toml index 43620908c..1285e6659 100644 --- a/src/utils/kamu-cli-puppet/Cargo.toml +++ b/src/utils/kamu-cli-puppet/Cargo.toml @@ -29,6 +29,8 @@ extensions = [ # External "dep:async-trait", "dep:datafusion", + "dep:indoc", + "dep:pretty_assertions", "dep:serde", "dep:serde_json", ] @@ -47,6 +49,8 @@ opendatafabric = { optional = true, workspace = true } async-trait = { optional = true, version = "0.1" } datafusion = { optional = true, version = "42", default-features = false } +indoc = { optional = true, version = "2" } +pretty_assertions = { optional = true, version = "1" } serde = { optional = true, version = "1", default-features = false, features = [ "derive", ] } diff --git a/src/utils/kamu-cli-puppet/src/kamu_cli_puppet.rs b/src/utils/kamu-cli-puppet/src/kamu_cli_puppet.rs index 422b4c1dd..915311fc2 100644 --- a/src/utils/kamu-cli-puppet/src/kamu_cli_puppet.rs +++ b/src/utils/kamu-cli-puppet/src/kamu_cli_puppet.rs @@ -14,6 +14,10 @@ use chrono::{DateTime, Utc}; //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// +pub type ExecuteCommandResult = assert_cmd::assert::Assert; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + pub struct KamuCliPuppet { workspace_path: PathBuf, system_time: Option>, @@ -88,7 +92,7 @@ impl KamuCliPuppet { temp_dir.join("e2e-output-data.txt") } - pub async fn execute(&self, cmd: I) -> assert_cmd::assert::Assert + pub async fn execute(&self, cmd: I) -> ExecuteCommandResult where I: IntoIterator, S: AsRef, @@ -96,7 +100,7 @@ impl KamuCliPuppet { self.execute_impl(cmd, None::>).await } - pub async fn execute_with_input(&self, cmd: I, input: T) -> assert_cmd::assert::Assert + pub async fn execute_with_input(&self, cmd: I, input: T) -> ExecuteCommandResult where I: IntoIterator, S: AsRef, @@ -105,11 +109,7 @@ impl KamuCliPuppet { self.execute_impl(cmd, Some(input)).await } - async fn execute_impl( - &self, - cmd: I, - maybe_input: Option, - ) -> assert_cmd::assert::Assert + async fn execute_impl(&self, cmd: I, maybe_input: Option) -> ExecuteCommandResult where I: IntoIterator, S: AsRef, diff --git a/src/utils/kamu-cli-puppet/src/kamu_cli_puppet_ext.rs b/src/utils/kamu-cli-puppet/src/kamu_cli_puppet_ext.rs index 09ae75317..5ba68f52c 100644 --- a/src/utils/kamu-cli-puppet/src/kamu_cli_puppet_ext.rs +++ b/src/utils/kamu-cli-puppet/src/kamu_cli_puppet_ext.rs @@ -14,21 +14,72 @@ use std::path::PathBuf; use async_trait::async_trait; use chrono::{DateTime, Utc}; use datafusion::prelude::{ParquetReadOptions, SessionContext}; -use opendatafabric::serde::yaml::{DatasetKindDef, YamlDatasetSnapshotSerializer}; -use opendatafabric::serde::DatasetSnapshotSerializer; -use opendatafabric::{DatasetID, DatasetKind, DatasetName, DatasetRef, DatasetSnapshot, Multihash}; +use opendatafabric::serde::yaml::{YamlDatasetSnapshotSerializer, YamlMetadataBlockDeserializer}; +use opendatafabric::serde::{DatasetSnapshotSerializer, MetadataBlockDeserializer}; +use opendatafabric::{ + DatasetID, + DatasetName, + DatasetRef, + DatasetSnapshot, + MetadataBlock, + Multihash, +}; use serde::Deserialize; -use crate::KamuCliPuppet; +use crate::{ExecuteCommandResult, KamuCliPuppet}; //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// #[async_trait] pub trait KamuCliPuppetExt { + async fn assert_success_command_execution( + &self, + cmd: I, + maybe_expected_stdout: Option<&str>, + maybe_expected_stderr: Option + Send>, + ) where + I: IntoIterator + Send, + S: AsRef; + + async fn assert_success_command_execution_with_input( + &self, + cmd: I, + input: T, + maybe_expected_stdout: Option<&str>, + maybe_expected_stderr: Option + Send>, + ) where + I: IntoIterator + Send, + S: AsRef, + T: Into> + Send; + + async fn assert_failure_command_execution( + &self, + cmd: I, + maybe_expected_stdout: Option<&str>, + maybe_expected_stderr: Option + Send>, + ) where + I: IntoIterator + Send, + S: AsRef; + + async fn assert_failure_command_execution_with_input( + &self, + cmd: I, + input: T, + maybe_expected_stdout: Option<&str>, + maybe_expected_stderr: Option + Send>, + ) where + I: IntoIterator + Send, + S: AsRef, + T: Into> + Send; + async fn list_datasets(&self) -> Vec; async fn add_dataset(&self, dataset_snapshot: DatasetSnapshot); + async fn list_blocks(&self, dataset_name: &DatasetName) -> Vec; + + async fn ingest_data(&self, dataset_name: &DatasetName, data: &str); + async fn get_list_of_repo_aliases(&self, dataset_ref: &DatasetRef) -> Vec; async fn complete(&self, input: T, current: usize) -> Vec @@ -37,6 +88,8 @@ pub trait KamuCliPuppetExt { async fn start_api_server(self, e2e_data_file_path: PathBuf) -> ServerOutput; + async fn assert_player_scores_dataset_data(&self, expected_player_scores_table: &str); + async fn assert_last_data_slice( &self, dataset_name: &DatasetName, @@ -111,6 +164,43 @@ impl KamuCliPuppetExt for KamuCliPuppet { stdout.lines().map(ToString::to_string).collect() } + async fn list_blocks(&self, dataset_name: &DatasetName) -> Vec { + let assert = self + .execute(["log", dataset_name.as_str(), "--output-format", "yaml"]) + .await + .success(); + + let stdout = std::str::from_utf8(&assert.get_output().stdout).unwrap(); + + // TODO: Don't parse the output, after implementation: + // `kamu log`: support `--output-format json` + // https://github.com/kamu-data/kamu-cli/issues/887 + + stdout + .split("---") + .skip(1) + .map(str::trim) + .map(|block_data| { + let Some(pos) = block_data.find('\n') else { + unreachable!() + }; + let (first_line_with_block_hash, metadata_block_str) = block_data.split_at(pos); + + let block_hash = first_line_with_block_hash + .strip_prefix("# Block: ") + .unwrap(); + let block = YamlMetadataBlockDeserializer {} + .read_manifest(metadata_block_str.as_ref()) + .unwrap(); + + BlockRecord { + block_hash: Multihash::from_multibase(block_hash).unwrap(), + block, + } + }) + .collect() + } + async fn start_api_server(self, e2e_data_file_path: PathBuf) -> ServerOutput { let host = Ipv4Addr::LOCALHOST.to_string(); @@ -136,6 +226,36 @@ impl KamuCliPuppetExt for KamuCliPuppet { ServerOutput { stdout, stderr } } + async fn assert_player_scores_dataset_data(&self, expected_player_scores_table: &str) { + self.assert_success_command_execution( + [ + "sql", + "--engine", + "datafusion", + "--command", + // Without unstable "offset" column. + // For a beautiful output, cut to seconds + indoc::indoc!( + r#" + SELECT op, + system_time, + match_time, + match_id, + player_id, + score + FROM "player-scores" + ORDER BY match_id, score, player_id; + "# + ), + "--output-format", + "table", + ], + Some(expected_player_scores_table), + None::>, + ) + .await; + } + async fn assert_last_data_slice( &self, dataset_name: &DatasetName, @@ -176,6 +296,80 @@ impl KamuCliPuppetExt for KamuCliPuppet { kamu_data_utils::testing::assert_data_eq(df.clone(), expected_data).await; kamu_data_utils::testing::assert_schema_eq(df.schema(), expected_schema); } + + async fn ingest_data(&self, dataset_name: &DatasetName, data: &str) { + self.execute_with_input(["ingest", dataset_name, "--stdin"], data) + .await + .success(); + } + + async fn assert_success_command_execution( + &self, + cmd: I, + maybe_expected_stdout: Option<&str>, + maybe_expected_stderr: Option + Send>, + ) where + I: IntoIterator + Send, + S: AsRef, + { + assert_execute_command_result( + &self.execute(cmd).await.success(), + maybe_expected_stdout, + maybe_expected_stderr, + ); + } + + async fn assert_success_command_execution_with_input( + &self, + cmd: I, + input: T, + maybe_expected_stdout: Option<&str>, + maybe_expected_stderr: Option + Send>, + ) where + I: IntoIterator + Send, + S: AsRef, + T: Into> + Send, + { + assert_execute_command_result( + &self.execute_with_input(cmd, input).await.success(), + maybe_expected_stdout, + maybe_expected_stderr, + ); + } + + async fn assert_failure_command_execution( + &self, + cmd: I, + maybe_expected_stdout: Option<&str>, + maybe_expected_stderr: Option + Send>, + ) where + I: IntoIterator + Send, + S: AsRef, + { + assert_execute_command_result( + &self.execute(cmd).await.failure(), + maybe_expected_stdout, + maybe_expected_stderr, + ); + } + + async fn assert_failure_command_execution_with_input( + &self, + cmd: I, + input: T, + maybe_expected_stdout: Option<&str>, + maybe_expected_stderr: Option + Send>, + ) where + I: IntoIterator + Send, + S: AsRef, + T: Into> + Send, + { + assert_execute_command_result( + &self.execute_with_input(cmd, input).await.failure(), + maybe_expected_stdout, + maybe_expected_stderr, + ); + } } //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// @@ -193,8 +387,9 @@ pub struct DatasetRecord { #[serde(rename = "ID")] pub id: DatasetID, pub name: DatasetName, - #[serde(with = "DatasetKindDef")] - pub kind: DatasetKind, + // CLI returns regular ENUM DatasetKind(Root/Derivative) for local datasets + // but for remote it is Remote(DatasetKind) type + pub kind: String, pub head: Multihash, pub pulled: Option>, pub records: usize, @@ -211,4 +406,35 @@ pub struct RepoAlias { pub alias: String, } +#[derive(Debug, PartialEq, Eq)] +pub struct BlockRecord { + pub block_hash: Multihash, + pub block: MetadataBlock, +} + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +fn assert_execute_command_result<'a>( + command_result: &ExecuteCommandResult, + maybe_expected_stdout: Option<&str>, + maybe_expected_stderr: Option>, +) { + let actual_stdout = std::str::from_utf8(&command_result.get_output().stdout).unwrap(); + + if let Some(expected_stdout) = maybe_expected_stdout { + pretty_assertions::assert_eq!(expected_stdout, actual_stdout); + } + + if let Some(expected_stderr_items) = maybe_expected_stderr { + let stderr = std::str::from_utf8(&command_result.get_output().stderr).unwrap(); + + for expected_stderr_item in expected_stderr_items { + assert!( + stderr.contains(expected_stderr_item), + "Unexpected output:\n{stderr}", + ); + } + } +} + ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////