Skip to content

Commit

Permalink
new: automatic quotes for string values and messages
Browse files Browse the repository at this point in the history
  • Loading branch information
pamburus committed Apr 25, 2024
1 parent bf3ac3a commit 95c30f6
Show file tree
Hide file tree
Showing 15 changed files with 1,536 additions and 28 deletions.
7 changes: 6 additions & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

28 changes: 23 additions & 5 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,12 +1,24 @@
[package]
[workspace]
members = [".", "crate/encstr"]

[workspace.package]
repository = "https://github.com/pamburus/hl"
authors = ["Pavel Ivanov <mr.pavel.ivanov@gmail.com>"]
version = "0.28.0-alpha.3"
edition = "2021"
license = "MIT"

[package]
name = "hl"
description = "Utility for viewing json-formatted log files"
categories = ["command-line-utilities"]
description = "Utility for viewing json-formatted log files."
keywords = ["cli", "human", "log"]
name = "hl"
version = "0.28.0-alpha.2"
edition = "2021"
build = "build.rs"
repository.workspace = true
edition.workspace = true
authors.workspace = true
license.workspace = true
version.workspace = true

[build-dependencies]
capnpc = "0.19"
Expand Down Expand Up @@ -65,6 +77,7 @@ thiserror = "1"
wildflower = { git = "https://github.com/cassaundra/wildflower.git" }
winapi = { version = "0", features = ["handleapi"] }
wyhash = "0"
encstr = { path = "crate/encstr" }

[target.'cfg(target_os = "macos")'.dependencies]
kqueue = "1"
Expand Down Expand Up @@ -117,3 +130,8 @@ harness = false
[[bench]]
name = "json"
harness = false

[[bench]]
name = "encstr"
path = "benches/encstr/benches.rs"
harness = false
5 changes: 5 additions & 0 deletions benches/encstr/benches.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
use criterion::criterion_main;

mod json;

criterion_main!(json::benches);
78 changes: 78 additions & 0 deletions benches/encstr/json.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
// std imports
use std::{hint::black_box, time::Duration};

// third-party imports
use criterion::*;
use serde_json::de::{Read, StrRead};

// local imports
use encstr::{json::JsonEncodedString, AnyEncodedString, Builder, Handler, Ignorer};

criterion_group!(benches, serde, decode, tokens);

fn serde(c: &mut Criterion) {
let mut group = c.benchmark_group("encstr/json/serde");
group.warm_up_time(Duration::from_millis(250));
group.measurement_time(Duration::from_secs(2));
group.bench_function("medium", |b| {
let mut buf = Vec::with_capacity(4096);
b.iter(|| {
buf.clear();
let mut reader = StrRead::new(&MEDIUM[1..]);
black_box(reader.parse_str_raw(black_box(&mut buf))).unwrap();
});
});
group.finish();
}

fn decode(c: &mut Criterion) {
let mut group = c.benchmark_group("encstr/json/decode");
group.warm_up_time(Duration::from_millis(250));
group.measurement_time(Duration::from_secs(2));
group.bench_function("ignore/medium", |b| {
let _ = serde_json::from_str::<String>(MEDIUM).unwrap();
let string = JsonEncodedString::new(MEDIUM);
let mut result = Ignorer;
b.iter(|| {
string.decode(black_box(&mut result)).unwrap();
});
});
group.bench_function("build/medium", |b| {
let string = JsonEncodedString::new(MEDIUM);
let mut result = Builder::with_capacity(4096);
b.iter(|| {
result.clear();
string.decode(black_box(&mut result)).unwrap();
});
assert_eq!(result.as_str(), serde_json::from_str::<String>(MEDIUM).unwrap());
});
group.finish();
}

fn tokens(c: &mut Criterion) {
let mut group = c.benchmark_group("encstr/json/tokens");
group.warm_up_time(Duration::from_millis(250));
group.measurement_time(Duration::from_secs(2));
group.bench_function("ignore/medium", |b| {
let string = JsonEncodedString::new(MEDIUM);
b.iter(|| {
for token in string.tokens() {
black_box(token).unwrap();
}
});
});
group.bench_function("build/medium", |b| {
let string = JsonEncodedString::new(MEDIUM);
let mut result = Builder::with_capacity(4096);
b.iter(|| {
result.clear();
for token in black_box(string.tokens()) {
black_box(result.handle(black_box(token).unwrap())).unwrap();
}
});
assert_eq!(result.as_str(), serde_json::from_str::<String>(MEDIUM).unwrap());
});
group.finish();
}

const MEDIUM: &str = r#""UPDATE \"apple\" SET \"seed\"='8c858361-5b73-442e-b84c-78482ed60ce1',\"planted_at\"=now() + timeout,\"importer\"='00d1cce2-c32e-4bb7-88da-474083fc2a1a',\"start_at\"=now() + repeat_interval,\"planted_at\"=now(),\"state\"='running',\"updated_at\"='2023-12-04 10:01:29.399' WHERE id IN (SELECT id FROM \"apple\" WHERE breed in ('red-delicious') AND distributor in ('magic-fruits','grand-provider') AND ((now() >= harvest_at AND (seed IS NULL OR (seed = 'b66134a4-c5c5-4adc-8c33-c8b7f780853b' AND importer != 'f86eb35d-33cd-499b-85cd-da175188e459'))) OR (now() >= planted_at)) ORDER BY \"updated_at\" LIMIT 4) AND ((now() >= harvest_at AND (seed IS NULL OR (seed = 'a3ecc839-0a32-4722-b4db-90c2ce8296a5' AND importer != '73a1fe4e-f4d1-4d09-99cb-9b07f2e32a96'))) OR (now() >= planted_at)) RETURNING *""#;
2 changes: 1 addition & 1 deletion benches/parse-and-format.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ use hl::{
fn benchmark(c: &mut Criterion) {
let mut c = c.benchmark_group("parse-and-format");
for (name, record) in [("kibana-record-01", KIBANA_RECORD_01)] {
for theme in ["classic", "one-dark-green-truecolor", "dmt"] {
for theme in ["universal", "classic"] {
c.bench_function(format!("{}/{}", name, theme), |b| {
let settings = Settings::default();
let parser = Parser::new(ParserSettings::new(&settings.fields.predefined, empty(), false, None));
Expand Down
9 changes: 9 additions & 0 deletions crate/encstr/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
[package]
description = "Encoded string"
name = "encstr"
workspace = "../.."
repository.workspace = true
edition.workspace = true
authors.workspace = true
license.workspace = true
version.workspace = true
Loading

0 comments on commit 95c30f6

Please sign in to comment.