[tests] fix :export-session-to test

Also, fix some regex101 import bitrot
pull/1161/merge
Tim Stack 9 months ago
parent d13a6e8b2d
commit 3e49915ef2

@ -82,6 +82,8 @@ Bug Fixes:
* A warning will now be issued if a timestamp in a log format's
sample message does not match completely. Warnings in the
configuration can be viewed by passing the `-W` flag.
* Importing from regex101.com broke due to some changes in the
API.
Interface changes:
* The breadcrumb bar hotkey is moving to backtick `` ` ``

@ -31,6 +31,7 @@
#include <curl/curl.h>
#include "base/itertools.hh"
#include "config.h"
#include "curl_looper.hh"
#include "ghc/filesystem.hpp"
@ -58,6 +59,7 @@ static const json_path_container UNIT_TEST_HANDLERS = {
};
static const typed_json_path_container<entry> ENTRY_HANDLERS = {
yajlpp::property_handler("dateCreated").for_field(&entry::e_date_created),
yajlpp::property_handler("regex").for_field(&entry::e_regex),
yajlpp::property_handler("testString").for_field(&entry::e_test_string),
yajlpp::property_handler("flags").for_field(&entry::e_flags),
@ -124,6 +126,7 @@ upsert(entry& en)
auto parse_res
= RESPONSE_HANDLERS.parser_for(intern_string::lookup(cr.get_name()))
.with_ignore_unused(true)
.of(response);
if (parse_res.isOk()) {
return Ok(parse_res.unwrap());
@ -137,14 +140,14 @@ upsert(entry& en)
struct retrieve_entity {
std::string re_permalink_fragment;
int32_t re_versions{1};
std::vector<int32_t> re_versions;
};
static const typed_json_path_container<retrieve_entity> RETRIEVE_ENTITY_HANDLERS
= {
yajlpp::property_handler("permalinkFragment")
.for_field(&retrieve_entity::re_permalink_fragment),
yajlpp::property_handler("versions")
yajlpp::property_handler("versions#")
.for_field(&retrieve_entity::re_versions),
};
@ -187,6 +190,7 @@ retrieve(const std::string& permalink)
auto parse_res
= RETRIEVE_ENTITY_HANDLERS
.parser_for(intern_string::lookup(entry_req.get_name()))
.with_ignore_unused(true)
.of(response);
if (parse_res.isErr()) {
@ -201,11 +205,12 @@ retrieve(const std::string& permalink)
auto entry_value = parse_res.unwrap();
if (entry_value.re_versions == 0) {
auto latest_version = entry_value.re_versions | lnav::itertools::max();
if (!latest_version) {
return no_entry{};
}
auto version_url = entry_url / fmt::to_string(entry_value.re_versions);
auto version_url = entry_url / fmt::to_string(latest_version.value());
curl_request version_req(version_url.string());
curl_easy_setopt(version_req, CURLOPT_URL, version_req.get_name().c_str());
@ -224,6 +229,7 @@ retrieve(const std::string& permalink)
auto version_parse_res
= ENTRY_HANDLERS
.parser_for(intern_string::lookup(version_req.get_name()))
.with_ignore_unused(true)
.of(version_response);
if (version_parse_res.isErr()) {

@ -56,6 +56,7 @@ struct unit_test {
};
struct entry {
std::string e_date_created;
std::string e_regex;
std::string e_test_string;
std::string e_flags{"gs"};

@ -395,6 +395,25 @@ struct json_path_handler : public json_path_handler_base {
static constexpr bool value = false;
};
template<typename T, typename... Args>
struct LastIsIntegerVector {
using value_type = typename LastIsIntegerVector<Args...>::value_type;
static constexpr bool value = LastIsIntegerVector<Args...>::value;
};
template<typename T, typename U>
struct LastIsIntegerVector<std::vector<U> T::*> {
using value_type = U;
static constexpr bool value
= std::is_integral<U>::value && !std::is_same<U, bool>::value;
};
template<typename T, typename U>
struct LastIsIntegerVector<U T::*> {
using value_type = void;
static constexpr bool value = false;
};
template<typename T, typename... Args>
struct LastIsMap {
using value_type = typename LastIsMap<Args...>::value_type;
@ -489,11 +508,34 @@ struct json_path_handler : public json_path_handler_base {
return *this;
}
template<typename... Args,
std::enable_if_t<LastIsIntegerVector<Args...>::value, bool> = true>
json_path_handler& for_field(Args... args)
{
this->add_cb(int_field_cb);
this->jph_integer_cb
= [args...](yajlpp_parse_context* ypc, long long val) {
const auto* jph = ypc->ypc_current_handler;
auto* obj = ypc->ypc_obj_stack.top();
if (val < jph->jph_min_value) {
jph->report_min_value_error(ypc, val);
return 1;
}
json_path_handler::get_field(obj, args...).emplace_back(val);
return 1;
};
return *this;
}
template<typename... Args,
std::enable_if_t<LastIsVector<Args...>::value, bool> = true,
std::enable_if_t<
!std::is_same<typename LastIsVector<Args...>::value_type,
std::string>::value,
std::string>::value
&& !LastIsIntegerVector<Args...>::value,
bool>
= true>
json_path_handler& for_field(Args... args)

@ -362,6 +362,8 @@ EXPECTED_FILES = \
$(srcdir)/%reldir%/test_logfile.sh_6602faf7817c494c33e32da7ee95f13aa9210d01.out \
$(srcdir)/%reldir%/test_logfile.sh_7c2e11488bccc59458b5775db4b90de964858259.err \
$(srcdir)/%reldir%/test_logfile.sh_7c2e11488bccc59458b5775db4b90de964858259.out \
$(srcdir)/%reldir%/test_logfile.sh_82d65f4dac07b1623f8202444d5b9144096e8243.err \
$(srcdir)/%reldir%/test_logfile.sh_82d65f4dac07b1623f8202444d5b9144096e8243.out \
$(srcdir)/%reldir%/test_logfile.sh_8a5e754cd471e5fdcdaede49c9290903acd7aad6.err \
$(srcdir)/%reldir%/test_logfile.sh_8a5e754cd471e5fdcdaede49c9290903acd7aad6.out \
$(srcdir)/%reldir%/test_logfile.sh_a7037efd0c4bbf51940137a44e57d94e9307e83e.err \
@ -436,6 +438,58 @@ EXPECTED_FILES = \
$(srcdir)/%reldir%/test_pretty_print.sh_cd361eeca7e91bfab942b75d6c3422c7a456a111.out \
$(srcdir)/%reldir%/test_pretty_print.sh_f8feb52a321026d9562b271eb37a2c56dfaed329.err \
$(srcdir)/%reldir%/test_pretty_print.sh_f8feb52a321026d9562b271eb37a2c56dfaed329.out \
$(srcdir)/%reldir%/test_regex101.sh_0fa3663a45aca6a328cb728872af7ed7ee896f1c.err \
$(srcdir)/%reldir%/test_regex101.sh_0fa3663a45aca6a328cb728872af7ed7ee896f1c.out \
$(srcdir)/%reldir%/test_regex101.sh_182ae9244db314a953af2bee969726e381bc5a32.err \
$(srcdir)/%reldir%/test_regex101.sh_182ae9244db314a953af2bee969726e381bc5a32.out \
$(srcdir)/%reldir%/test_regex101.sh_2158f1f011ba8e1b152396c072790c076fdb8ce8.err \
$(srcdir)/%reldir%/test_regex101.sh_2158f1f011ba8e1b152396c072790c076fdb8ce8.out \
$(srcdir)/%reldir%/test_regex101.sh_281af24141680330791db7f7c5fa70833ce08a6b.err \
$(srcdir)/%reldir%/test_regex101.sh_281af24141680330791db7f7c5fa70833ce08a6b.out \
$(srcdir)/%reldir%/test_regex101.sh_35703b13990785632cca82123fb3883797959c0b.err \
$(srcdir)/%reldir%/test_regex101.sh_35703b13990785632cca82123fb3883797959c0b.out \
$(srcdir)/%reldir%/test_regex101.sh_366730cac50b4a09b7de4b84641791470b1cb9a3.err \
$(srcdir)/%reldir%/test_regex101.sh_366730cac50b4a09b7de4b84641791470b1cb9a3.out \
$(srcdir)/%reldir%/test_regex101.sh_3d18474a3e472fff6e23e0c41337ec9188fee591.err \
$(srcdir)/%reldir%/test_regex101.sh_3d18474a3e472fff6e23e0c41337ec9188fee591.out \
$(srcdir)/%reldir%/test_regex101.sh_442cc58676590a3604d5c2183f5fe0a75c98351a.err \
$(srcdir)/%reldir%/test_regex101.sh_442cc58676590a3604d5c2183f5fe0a75c98351a.out \
$(srcdir)/%reldir%/test_regex101.sh_566fd88d216a44bc1c6e23f2d6f2d0caf99d42f9.err \
$(srcdir)/%reldir%/test_regex101.sh_566fd88d216a44bc1c6e23f2d6f2d0caf99d42f9.out \
$(srcdir)/%reldir%/test_regex101.sh_5f2f7ecb6ab9cbec4b41385b91bd038906b8a7b2.err \
$(srcdir)/%reldir%/test_regex101.sh_5f2f7ecb6ab9cbec4b41385b91bd038906b8a7b2.out \
$(srcdir)/%reldir%/test_regex101.sh_629bde30483e0a6461076e9058f3a5eb81ae0425.err \
$(srcdir)/%reldir%/test_regex101.sh_629bde30483e0a6461076e9058f3a5eb81ae0425.out \
$(srcdir)/%reldir%/test_regex101.sh_630db454054cf92ec9bd0f4e3e83300047f583ff.err \
$(srcdir)/%reldir%/test_regex101.sh_630db454054cf92ec9bd0f4e3e83300047f583ff.out \
$(srcdir)/%reldir%/test_regex101.sh_771af6f3d29b8350542d5c6e98bdbf4c223cd531.err \
$(srcdir)/%reldir%/test_regex101.sh_771af6f3d29b8350542d5c6e98bdbf4c223cd531.out \
$(srcdir)/%reldir%/test_regex101.sh_7991a5b617867cf37c9f7baa85ffa425f7d455a2.err \
$(srcdir)/%reldir%/test_regex101.sh_7991a5b617867cf37c9f7baa85ffa425f7d455a2.out \
$(srcdir)/%reldir%/test_regex101.sh_79ee3f5fe71ccec97b2619d8c1f74ca97ffd2243.err \
$(srcdir)/%reldir%/test_regex101.sh_79ee3f5fe71ccec97b2619d8c1f74ca97ffd2243.out \
$(srcdir)/%reldir%/test_regex101.sh_7de76c174c58d67bf93e8f01d6d55ebb6a023f10.err \
$(srcdir)/%reldir%/test_regex101.sh_7de76c174c58d67bf93e8f01d6d55ebb6a023f10.out \
$(srcdir)/%reldir%/test_regex101.sh_8a43e6657d4f60e68d31eb8302542ca28e80d077.err \
$(srcdir)/%reldir%/test_regex101.sh_8a43e6657d4f60e68d31eb8302542ca28e80d077.out \
$(srcdir)/%reldir%/test_regex101.sh_8e93a3b6b941847c71409a297779fbb0a6666a51.err \
$(srcdir)/%reldir%/test_regex101.sh_8e93a3b6b941847c71409a297779fbb0a6666a51.out \
$(srcdir)/%reldir%/test_regex101.sh_95c56a9d146ec9a7c2196559d316f928b2ae6ae9.err \
$(srcdir)/%reldir%/test_regex101.sh_95c56a9d146ec9a7c2196559d316f928b2ae6ae9.out \
$(srcdir)/%reldir%/test_regex101.sh_9d101ee29c45cdb8c0f117ad736c9a5dd5da5839.err \
$(srcdir)/%reldir%/test_regex101.sh_9d101ee29c45cdb8c0f117ad736c9a5dd5da5839.out \
$(srcdir)/%reldir%/test_regex101.sh_c43e07df9b3068696fdc8759c7561135db981b38.err \
$(srcdir)/%reldir%/test_regex101.sh_c43e07df9b3068696fdc8759c7561135db981b38.out \
$(srcdir)/%reldir%/test_regex101.sh_cbd859487e4ea011cd6e0f0f114d70158bfd8b43.err \
$(srcdir)/%reldir%/test_regex101.sh_cbd859487e4ea011cd6e0f0f114d70158bfd8b43.out \
$(srcdir)/%reldir%/test_regex101.sh_cf6c0a9f0f04e24ce1fae7a0a434830b14447f83.err \
$(srcdir)/%reldir%/test_regex101.sh_cf6c0a9f0f04e24ce1fae7a0a434830b14447f83.out \
$(srcdir)/%reldir%/test_regex101.sh_d84597760285c3964b258726341e018f6cd49954.err \
$(srcdir)/%reldir%/test_regex101.sh_d84597760285c3964b258726341e018f6cd49954.out \
$(srcdir)/%reldir%/test_regex101.sh_f23e393dbf23d0d8e276e9b7610c7b74d79980f8.err \
$(srcdir)/%reldir%/test_regex101.sh_f23e393dbf23d0d8e276e9b7610c7b74d79980f8.out \
$(srcdir)/%reldir%/test_regex101.sh_fc41b6ee90cbf038620151f16d164b361acf82dd.err \
$(srcdir)/%reldir%/test_regex101.sh_fc41b6ee90cbf038620151f16d164b361acf82dd.out \
$(srcdir)/%reldir%/test_sessions.sh_0300a1391c33b1c45ddfa90198a6bd0a5404a77f.err \
$(srcdir)/%reldir%/test_sessions.sh_0300a1391c33b1c45ddfa90198a6bd0a5404a77f.out \
$(srcdir)/%reldir%/test_sessions.sh_17b85654b929b2a8fc1705a170ced544783292fa.err \
@ -466,10 +520,14 @@ EXPECTED_FILES = \
$(srcdir)/%reldir%/test_sessions.sh_b3d71a87fcb4e3487f71ccad8c6ce681db220572.out \
$(srcdir)/%reldir%/test_sessions.sh_b932b33dd087b94d4306dd179c5d4f9ddd394960.err \
$(srcdir)/%reldir%/test_sessions.sh_b932b33dd087b94d4306dd179c5d4f9ddd394960.out \
$(srcdir)/%reldir%/test_sessions.sh_ba1ded92531350668301431db64df2d2f4a2e9ee.err \
$(srcdir)/%reldir%/test_sessions.sh_ba1ded92531350668301431db64df2d2f4a2e9ee.out \
$(srcdir)/%reldir%/test_sessions.sh_ddf45811e9906de9f3930fe802ac7b2cc6e48106.err \
$(srcdir)/%reldir%/test_sessions.sh_ddf45811e9906de9f3930fe802ac7b2cc6e48106.out \
$(srcdir)/%reldir%/test_sessions.sh_e57697be4d81ac8e5b2b2fa84f919b2d494978f3.err \
$(srcdir)/%reldir%/test_sessions.sh_e57697be4d81ac8e5b2b2fa84f919b2d494978f3.out \
$(srcdir)/%reldir%/test_sessions.sh_e988439404f2e97604641c8d087855f3efe052e4.err \
$(srcdir)/%reldir%/test_sessions.sh_e988439404f2e97604641c8d087855f3efe052e4.out \
$(srcdir)/%reldir%/test_shlexer.sh_14dd967cb2af90899c9e5e45d00b676b5a3163aa.err \
$(srcdir)/%reldir%/test_shlexer.sh_14dd967cb2af90899c9e5e45d00b676b5a3163aa.out \
$(srcdir)/%reldir%/test_shlexer.sh_2781f5dd570580cbe746ad91b58a28b8371283b3.err \

@ -1 +0,0 @@
/bin/bash: bad-command: command not found

@ -1,7 +0,0 @@
✘ error: invalid timestamp: 2022-06-16Tabc
reason: the leading part of the timestamp was matched, however, the trailing text “Tabc” was not
 --> command-option:1
 | :goto 2022-06-16Tabc 
 |  ^--^ unrecognized input 
 = note: input matched time format “%Y-%m-%d”
 = help: fix the timestamp or remove the trailing text

@ -1,20 +0,0 @@
✘ error: 'bad' is not a supported configuration $schema version
 --> {test_dir}/bad-config2/formats/invalid-config/config.bad-schema.json:2
 |  "$schema": "bad" 
 = note: expecting one of the following $schema values:
 https://lnav.org/schemas/config-v1.schema.json
 = help: Property Synopsis
/$schema <schema-uri>
Description
The URI that specifies the schema that describes this type of file
Example
https://lnav.org/schemas/config-v1.schema.json
✘ error: invalid JSON
 --> {test_dir}/bad-config2/formats/invalid-config/config.malformed.json:3
 | parse error: object key and value must be separated by a colon (':')
 |  "ui": "theme", "abc", "def": "" }
 |  (right here) ------^
 | 
✘ error: invalid JSON
reason: parse error: premature EOF
 --> {test_dir}/bad-config2/formats/invalid-config/config.truncated.json:3

@ -1,171 +0,0 @@
✘ error: “invalid(abc” is not a valid regular expression
reason: missing closing parenthesis
 --> /invalid_props_log/tags/badtag3/pattern
 | invalid(abc 
 |  ^ missing closing parenthesis
 --> {test_dir}/bad-config/formats/invalid-properties/format.json:35
 |  "pattern": "invalid(abc"
 = help: Property Synopsis
/invalid_props_log/tags/badtag3/pattern <regex>
Description
The regular expression to match against the body of the log message
Example
\w+ is down
✘ error: “abc(def” is not a valid regular expression
reason: missing closing parenthesis
 --> /invalid_props_log/search-table/bad_table_regex/pattern
 | abc(def 
 |  ^ missing closing parenthesis 
 --> {test_dir}/bad-config/formats/invalid-properties/format.json:40
 |  "pattern": "abc(def" 
 = help: Property Synopsis
/invalid_props_log/search-table/bad_table_regex/pattern <regex>
Description
The regular expression for this search table.
✘ error: “^(?<timestamp>\d+: (?<body>.*)$” is not a valid regular expression
reason: missing closing parenthesis
 --> /bad_regex_log/regex/std/pattern
 | ^(?<timestamp>\d+: (?<body>.*)$ 
 |  ^ missing closing parenthesis
 --> {test_dir}/bad-config/formats/invalid-regex/format.json:6
 |  "pattern": "^(?<timestamp>\\d+: (?<body>.*)$"
 = help: Property Synopsis
/bad_regex_log/regex/std/pattern <message-regex>
Description
The regular expression to match a log message and capture fields.
✘ error: “(foo” is not a valid regular expression
reason: missing closing parenthesis
 --> pattern
 | (foo 
 |  ^ missing closing parenthesis 
 --> {test_dir}/bad-config/formats/invalid-regex/format.json:13
 |  "error": "(foo" 
 = help: Property Synopsis
/bad_regex_log/level/error <pattern|integer>
Description
The regular expression used to match the log text for this level. For JSON logs with numeric levels, this should be the number for the corresponding level.
✘ error: “abc(” is not a valid regular expression
reason: missing closing parenthesis
 --> /bad_regex_log/highlights/foobar/pattern
 | abc( 
 |  ^ missing closing parenthesis 
 --> {test_dir}/bad-config/formats/invalid-regex/format.json:25
 |  "pattern": "abc(" 
 = help: Property Synopsis
/bad_regex_log/highlights/foobar/pattern <regex>
Description
A regular expression to highlight in logs of this format.
✘ error: “foo” is not a valid value for option “/bad_sample_log/value/pid/kind”
 --> {test_dir}/bad-config/formats/invalid-sample/format.json:24
 |  "kind": "foo" 
 = help: Property Synopsis
/bad_sample_log/value/pid/kind <data-type>
Description
The type of data in the field
Allowed Values
string, integer, float, boolean, json, struct, quoted, xml
✘ error: 'bad' is not a supported log format $schema version
 --> {test_dir}/bad-config/formats/invalid-schema/format.json:2
 |  "$schema": "bad" 
 = note: expecting one of the following $schema values:
 https://lnav.org/schemas/format-v1.schema.json
 = help: Property Synopsis
/$schema The URI of the schema for this file
Description
Specifies the type of this file
✘ error: invalid pattern: “incomplete-match”
reason: pattern does not match entire message
 --> {test_dir}/bad-config/formats/invalid-regex/format.json:20
 | 1428634687123; foo 
 |  ^ matched up to here 
 = note: incomplete-match = ^(?<timestamp>\d+);
 = help: update the regular expression to fully capture the sample message
✘ error: invalid sample log message: "abc: foo"
reason: unrecognized timestamp -- abc
 --> {test_dir}/bad-config/formats/invalid-sample/format.json:30
 = note: the following custom formats were tried:
abc
^ “%i” matched up to here
 = help: If the timestamp format is not supported by default, you can add a custom format with the “timestamp-format” property
✘ error: invalid sample log message: "1428634687123| debug hello"
reason: “debug” does not match the expected level of “info”
 --> {test_dir}/bad-config/formats/invalid-sample/format.json:33
 = note: matched regex = with-level
captured level = “debug”
✘ error: invalid pattern: “with-level”
reason: pattern does not match entire multiline sample message
 --> {test_dir}/bad-config/formats/invalid-sample/format.json:37
 = note: with-level = ^(?<timestamp>\d+)\| (?<level>\w+) (?<body>\w+)$
 = help: use “.*” to match new-lines
✘ error: invalid sample log message: "1428634687123; foo bar"
reason: sample does not match any patterns
 --> {test_dir}/bad-config/formats/invalid-sample/format.json:41
 = note: the following shows how each pattern matched this sample:
1428634687123; foo bar
^ bad-time matched up to here
^ semi matched up to here
^ std matched up to here
^ with-level matched up to here
 = note: bad-time  = “^(?<timestamp>\w+): (?<body>\w+)$”
semi  = “^(?<timestamp>\d+); (?<body>\w+)$”
std  = “^(?<timestamp>\d+): (?<pid>\w+) (?<body>.*)$”
with-level = “^(?<timestamp>\d+)\| (?<level>\w+) (?<body>\w+)$”
⚠ warning: invalid pattern: “/bad_sample_log/regex/semi”
reason: pattern does not match any samples
 --> {test_dir}/bad-config/formats/invalid-sample/format.json:10
 = help: every pattern should have at least one sample that it matches
⚠ warning: invalid pattern: “/bad_sample_log/regex/std”
reason: pattern does not match any samples
 --> {test_dir}/bad-config/formats/invalid-sample/format.json:7
 = help: every pattern should have at least one sample that it matches
⚠ warning: invalid value “/invalid_props_log/value/non-existent”
reason: no patterns have a capture named “non-existent”
 --> {test_dir}/bad-config/formats/invalid-properties/format.json:4
 = note: the following captures are available:
body, pid, timestamp
 = help: values are populated from captures in patterns, so at least one pattern must have a capture with this value name
✘ error: invalid tag definition “/invalid_props_log/tags/badtag”
reason: tag definitions must have a non-empty pattern
 --> {test_dir}/bad-config/formats/invalid-properties/format.json:4
✘ error: invalid tag definition “/invalid_props_log/tags/badtag2”
reason: tag definitions must have a non-empty pattern
 --> {test_dir}/bad-config/formats/invalid-properties/format.json:4
✘ error: invalid tag definition “/invalid_props_log/tags/badtag3”
reason: tag definitions must have a non-empty pattern
 --> {test_dir}/bad-config/formats/invalid-properties/format.json:4
✘ error: invalid value for property “/invalid_props_log/timestamp-field”
reason: “ts” was not found in the pattern at /invalid_props_log/regex/std
 --> {test_dir}/bad-config/formats/invalid-properties/format.json:4
 = note: the following captures are available:
body, pid, timestamp
✘ error: “not a color” is not a valid color value for property “/invalid_props_log/highlights/hl1/color”
reason: Unknown color: 'not a color'. See https://jonasjacek.github.io/colors/ for a list of supported color names
 --> {test_dir}/bad-config/formats/invalid-properties/format.json:23
✘ error: “also not a color” is not a valid color value for property “/invalid_props_log/highlights/hl1/background-color”
reason: Unknown color: 'also not a color'. See https://jonasjacek.github.io/colors/ for a list of supported color names
 --> {test_dir}/bad-config/formats/invalid-properties/format.json:24
✘ error: “no_regexes_log” is not a valid log format
reason: no regexes specified
 --> {test_dir}/bad-config/formats/no-regexes/format.json:4
✘ error: “no_regexes_log” is not a valid log format
reason: log message samples must be included in a format definition
 --> {test_dir}/bad-config/formats/no-regexes/format.json:4
✘ error: “no_sample_log” is not a valid log format
reason: log message samples must be included in a format definition
 --> {test_dir}/bad-config/formats/no-samples/format.json:4
✘ error: failed to compile SQL statement
reason: near "TALE": syntax error
 --> {test_dir}/bad-config/formats/invalid-sql/init.sql:4
 | -- comment test 
 | CREATE TALE invalid (x y z); 
 |  ^ near "TALE": syntax error 
✘ error: failed to execute SQL statement
reason: ✘ error: “abc(” is not a valid regular expression
 |  reason: missing closing parenthesis
 |   --> arg
 |   | abc( 
 |   |  ^ missing closing parenthesis
 --> {test_dir}/bad-config/formats/invalid-sql/init2.sql
 | SELECT regexp_match('abc(', '123') 
 | FROM sqlite_master; 

@ -1,61 +0,0 @@
✘ error: invalid JSON
 --> {test_dir}/bad-config-json/formats/invalid-json/format.json:4
 | parse error: object key and value must be separated by a colon (':')
 |  ar_log": { "abc" } }
 |  (right here) ------^
 | 
✘ error: “abc(” is not a valid regular expression
reason: missing closing parenthesis
 --> /invalid_key_log/level-pointer
 | abc( 
 |  ^ missing closing parenthesis 
 --> {test_dir}/bad-config-json/formats/invalid-key/format.json:4
 |  "level-pointer": "abc(", 
 = help: Property Synopsis
/invalid_key_log/level-pointer
Description
A regular-expression that matches the JSON-pointer of the level property
✘ error: “def[ghi” is not a valid regular expression
reason: missing terminating ] for character class
 --> /invalid_key_log/file-pattern
 | def[ghi 
 |  ^ missing terminating ] for character class
 --> {test_dir}/bad-config-json/formats/invalid-key/format.json:5
 |  "file-pattern": "def[ghi", 
 = help: Property Synopsis
/invalid_key_log/file-pattern
Description
A regular expression that restricts this format to log files with a matching name
⚠ warning: unexpected value for property “/invalid_key_log/value/test/identifiers”
 --> {test_dir}/bad-config-json/formats/invalid-key/format.json:14
 |  "identifiers": true 
 = help: Available Properties
kind <data-type>
collate <function>
unit/
identifier <bool>
foreign-key <bool>
hidden <bool>
action-list <string>
rewriter <command>
description <string>
✘ error: “-1.2” is not a valid value for “/invalid_key_log/timestamp-divisor”
reason: value cannot be less than or equal to zero
 --> {test_dir}/bad-config-json/formats/invalid-key/format.json:25
 |  "timestamp-divisor": -1.2 
 = help: Property Synopsis
/invalid_key_log/timestamp-divisor <number>
Description
The value to divide a numeric timestamp by in a JSON log.
✘ error: “foobar_log” is not a valid log format
reason: no regexes specified
 --> {test_dir}/bad-config-json/formats/invalid-json/format.json:3
✘ error: “foobar_log” is not a valid log format
reason: log message samples must be included in a format definition
 --> {test_dir}/bad-config-json/formats/invalid-json/format.json:3
✘ error: “invalid_key_log” is not a valid log format
reason: structured logs cannot have regexes
 --> {test_dir}/bad-config-json/formats/invalid-key/format.json:4
✘ error: invalid line format element “/invalid_key_log/line-format/0/field”
reason: “non-existent” is not a defined value
 --> {test_dir}/bad-config-json/formats/invalid-key/format.json:22

@ -1,3 +0,0 @@
2017-03-24T20:06:26.240 1.1.1.1 GET 200 /example/uri/5
2017-03-24T20:12:47.764 1.1.1.1 GET 500 /example/uri/5
2017-03-24T20:15:31.694 1.1.1.1 GET 400 /example/uri/5

@ -1,4 +0,0 @@
log_line,log_part,log_time,log_idle_msecs,log_level,log_mark,log_comment,log_tags,log_filters,client_ip,request/method,request/uri,request/size,response/status,details1,details2,details3
0,<NULL>,2017-03-24 20:06:26.240,0,info,0,<NULL>,<NULL>,<NULL>,1.1.1.1,GET,/example/uri/5,166,200,<NULL>,<NULL>,<NULL>
1,<NULL>,2017-03-24 20:12:47.764,381524,critical,0,<NULL>,<NULL>,<NULL>,1.1.1.1,GET,/example/uri/5,166,500,<NULL>,<NULL>,<NULL>
2,<NULL>,2017-03-24 20:15:31.694,163930,warning,0,<NULL>,<NULL>,<NULL>,1.1.1.1,GET,/example/uri/5,166,400,"{""foo"": ""bar""}","{""foo"": ""bar""}","{""foo"": ""bar""}"

@ -1,2 +0,0 @@
 filepath  descriptor  mimetype  content 
{test_dir}/logfile_syslog.1.gz net.zlib.gzip.header application/json {"name":"logfile_syslog.1","mtime":"2007-11-03T16:23:00.000","comment":""} 

@ -1,7 +0,0 @@
192.168.202.254 - - [20/Jul/2009:22:59:26 +0000] "GET /vmw/cgi/tramp HTTP/1.0" 200 134 "-" "gPXE/0.9.7"
 ├ org.lnav.test:
 ╰ Hello, World!
192.168.202.254 - - [20/Jul/2009:22:59:29 +0000] "GET /vmw/vSphere/default/vmkboot.gz HTTP/1.0" 404 46210 "-" "gPXE/0.9.7"
 📝 Annotations available, focus on this line and use :annotate to apply them
192.168.202.254 - - [20/Jul/2009:22:59:29 +0000] "GET /vmw/vSphere/default/vmkernel.gz HTTP/1.0" 200 78929 "-" "gPXE/0.9.7"
 📝 Annotations available, focus on this line and use :annotate to apply them

@ -1,3 +0,0 @@
⚠ warning: not deleting regex101 entry “zpEnjV”
reason: delete code is not known for this entry
 = note: formats created by importing a regex101.com entry will not have a delete code

@ -1,34 +1,7 @@
✘ error: invalid value “/unit_test_log/value/jobserver”
reason: no patterns have a capture named “jobserver”
 = note: the following captures are available:
 = help: values are populated from captures in patterns, so at least one pattern must have a capture with this value name
✘ error: invalid value “/unit_test_log/value/processid”
reason: no patterns have a capture named “processid”
 = note: the following captures are available:
 = help: values are populated from captures in patterns, so at least one pattern must have a capture with this value name
✘ error: invalid value “/unit_test_log/value/timestamp”
reason: no patterns have a capture named “timestamp”
 = note: the following captures are available:
 = help: values are populated from captures in patterns, so at least one pattern must have a capture with this value name
✘ error: invalid value “/unit_test_log/value/workqueue”
reason: no patterns have a capture named “workqueue”
 = note: the following captures are available:
 = help: values are populated from captures in patterns, so at least one pattern must have a capture with this value name
✘ error: invalid sample log message: "[03/22/2021 02:00:02 job1074.example.com db.db81.example_events 54026] {\"ELAPSED\":\"0.011\",\"LEVEL\":\"info\",\"MESSAGE\":\"finished in 0.011\\n\",\"PREFIX\":\"YFgyWQriCmsAAofJAAAAHg\",\"ROUTINGKEY\":\"EXAMPLE1366.Example.Events._Publish\"}"
reason: sample does not match any patterns
 --> regex101-home/.lnav/formats/installed/unit_test_log.json:26
 = note: the following shows how each pattern matched this sample:
[03/22/2021 02:00:02 job1074.example.com db.db81.example_events 54026] {"ELAPSED":"0.011","LEVEL":"info","MESSAGE":"finished in 0.011\n","PREFIX":"YFgyWQriCmsAAofJAAAAHg","ROUTINGKEY":"EXAMPLE1366.Example.Events._Publish"}
 = note: std = “”
✘ error: invalid sample log message: "[03/22/2021 02:00:02 job1074.example.com db.db81.example_events 54026] {\"ELAPSED\":\"0.011\",\"LEVEL\":\"info\",\"MESSAGE\":\"finished in 0.011\\n\",\"PREFIX\":\"YFgyWQriCmsAAofJAAAAHg\",\"ROUTINGKEY\":\"EXAMPLE1366.Example.Events._Publish\"}"
reason: sample does not match any patterns
 --> regex101-home/.lnav/formats/installed/unit_test_log.json:30
 = note: the following shows how each pattern matched this sample:
[03/22/2021 02:00:02 job1074.example.com db.db81.example_events 54026] {"ELAPSED":"0.011","LEVEL":"info","MESSAGE":"finished in 0.011\n","PREFIX":"YFgyWQriCmsAAofJAAAAHg","ROUTINGKEY":"EXAMPLE1366.Example.Events._Publish"}
 = note: std = “”
✘ error: invalid pattern: “/unit_test_log/regex/std”
reason: no timestamp capture found in the pattern
 = help: all log messages need a timestamp
✘ error: invalid value for property “/unit_test_log/timestamp-field”
reason: “timestamp” was not found in the pattern at /unit_test_log/regex/std
✘ error: invalid value for property “/unit_test_log/timestamp-field”
reason: “timestamp” was not found in the pattern at /unit_test_log/regex/std

@ -1,5 +1,5 @@
✘ error: expecting an operation to perform on the std regex using regex101.com
 = help: the available subcommands are:
push: create/update an entry for this regex on regex101.com
pull: create a patch format file for this regular expression based on the entry in regex101.com
delete: delete the entry regex101.com that was created by a push operation
 push: create/update an entry for this regex on regex101.com
 pull: create a patch format file for this regular expression based on the entry in regex101.com
 delete: delete the entry regex101.com that was created by a push operation

@ -1,3 +1,3 @@
✘ error: expecting an operation to perform on the std regular expression
 = help: the available subcommands are:
regex101: use regex101.com to edit this regular expression
 regex101: use regex101.com to edit this regular expression

@ -2,6 +2,6 @@
 = note: this regex is currently associated with the following regex101.com entry:
https://regex101.com/r/zpEnjV
 = help: the available subcommands are:
push: create/update an entry for this regex on regex101.com
pull: create a patch format file for this regular expression based on the entry in regex101.com
delete: delete the entry regex101.com that was created by a push operation
 push: create/update an entry for this regex on regex101.com
 pull: create a patch format file for this regular expression based on the entry in regex101.com
 delete: delete the entry regex101.com that was created by a push operation

@ -1,33 +0,0 @@
#!lnav -Nf
# This file is an export of an lnav session. You can type
# '|/path/to/this/file' in lnav to execute this file and
# restore the state of the session.
;SELECT raise_error('This session export was made with a newer version of lnav, please upgrade to ' || '0.11.0' || ' or later')
WHERE lnav_version() < '0.11.0' COLLATE naturalcase
# The files loaded into the session were:
;INSERT OR IGNORE INTO environ (name, value) VALUES ('LOG_DIR_0', '{top_srcdir_parent}')
:open $LOG_DIR_0/lnav/test/logfile_access_log.0
:rebuild
# The following SQL statements will restore the bookmarks,
# comments, and tags that were added in the session.
;SELECT total_changes() AS before_mark_changes
;UPDATE all_logs SET log_mark = 1, log_comment = NULL, log_tags = NULL WHERE log_time_msecs = 1248130769000 AND log_format = 'access_log' AND log_line_hash = 'v1:b05c1bdfe75cde41e151c89087e31951'
;SELECT 1 - (total_changes() - $before_mark_changes) AS failed_mark_changes
;SELECT echoln(printf('%sERROR%s: failed to restore %d bookmarks',
$ansi_red, $ansi_norm, $failed_mark_changes))
WHERE $failed_mark_changes != 0
# The following commands will restore the state of the LOG view.
:switch-to-view log
:goto 1

@ -0,0 +1,7 @@
192.168.202.254 - - [20/Jul/2009:22:59:26 +0000] "GET /vmw/cgi/tramp HTTP/1.0" 200 134 "-" "gPXE/0.9.7"
├ org.lnav.test:
╰ Hello, World!
192.168.202.254 - - [20/Jul/2009:22:59:29 +0000] "GET /vmw/vSphere/default/vmkboot.gz HTTP/1.0" 404 46210 "-" "gPXE/0.9.7"
📝 Annotations available, focus on this line and use :annotate to apply them
192.168.202.254 - - [20/Jul/2009:22:59:29 +0000] "GET /vmw/vSphere/default/vmkernel.gz HTTP/1.0" 200 78929 "-" "gPXE/0.9.7"
📝 Annotations available, focus on this line and use :annotate to apply them

@ -3,12 +3,13 @@
# '|/path/to/this/file' in lnav to execute this file and
# restore the state of the session.
;SELECT raise_error('This session export was made with a newer version of lnav, please upgrade to ' || '0.11.0' || ' or later')
WHERE lnav_version() < '0.11.0' COLLATE naturalcase
;SELECT raise_error('This session export was made with a newer version of lnav, please upgrade to ' || '0.11.2' || ' or later')
WHERE lnav_version() < '0.11.2' COLLATE naturalcase
# The files loaded into the session were:
# Set this environment variable to override this value or edit this script.
;INSERT OR IGNORE INTO environ (name, value) VALUES ('LOG_DIR_0', '{test_dir}')
:open $LOG_DIR_0/support-dump/logfile_access_log.0
@ -19,7 +20,7 @@
# comments, and tags that were added in the session.
;SELECT total_changes() AS before_mark_changes
;UPDATE all_logs SET log_mark = 1, log_comment = NULL, log_tags = NULL WHERE log_time_msecs = 1248130769000 AND log_format = 'access_log' AND log_line_hash = 'v1:b05c1bdfe75cde41e151c89087e31951'
;UPDATE all_logs SET log_mark = 0, log_comment = NULL, log_tags = NULL, log_annotations = '{"org.lnav.test":"Hello, <span style=\"color: #f00\">World</span>!\n"}' WHERE log_time_msecs = 1248130766000 AND log_format = 'access_log' AND log_line_hash = 'v1:3f7e0f10f2473f83b2b4eacccfc9b4e2'
;SELECT 1 - (total_changes() - $before_mark_changes) AS failed_mark_changes
;SELECT echoln(printf('%sERROR%s: failed to restore %d bookmarks',
@ -27,7 +28,13 @@
WHERE $failed_mark_changes != 0
# The following SQL statements will restore the filters that
# were added in the session.
;REPLACE INTO lnav_view_filters (view_name, enabled, type, language, pattern) VALUES ('log', 0, 'in', 'regex', 'credential status')
# The following commands will restore the state of the LOG view.
:switch-to-view log
:goto 1
:goto 0

@ -1 +0,0 @@
error: sqlite3_exec failed -- lnav-error:{"level":"error","message":{"str":"invalid URL: “https://bad@[fe::”","attrs":[]},"reason":{"str":"Port number was not a decimal number between 0 and 65535","attrs":[]},"snippets":[],"help":{"str":"","attrs":[]}}

@ -1,4 +0,0 @@
✘ error: failed to parse YAML content
reason: closing ] not found
 --> command-option:1
 | ;SELECT yaml_to_json('[abc') 

@ -1,2 +0,0 @@
✘ error: unable to open file: non-existent:
reason: failed to ssh to host: ssh: Could not resolve hostname non-existent: nodename nor servname provided, or not known

@ -1,111 +0,0 @@
/**
* Copyright (c) 2018, Timothy Stack
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of Timothy Stack nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ''AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "log_level.hh"
#include <ctype.h>
#include "config.h"
const char* level_names[LEVEL__MAX + 1] = {
"unknown",
"trace",
"debug5",
"debug4",
"debug3",
"debug2",
"debug",
"info",
"stats",
"notice",
"warning",
"error",
"critical",
"fatal",
"invalid",
nullptr,
};
log_level_t
abbrev2level(const char* levelstr, ssize_t len)
{
if (len == 0 || levelstr[0] == '\0') {
return LEVEL_UNKNOWN;
}
switch (toupper(levelstr[0])) {
case 'T':
return LEVEL_TRACE;
case 'D':
case 'V':
if (len > 1) {
switch (levelstr[len - 1]) {
case '2':
return LEVEL_DEBUG2;
case '3':
return LEVEL_DEBUG3;
case '4':
return LEVEL_DEBUG4;
case '5':
return LEVEL_DEBUG5;
}
}
return LEVEL_DEBUG;
case 'I':
if (len == 7 && toupper(levelstr[1]) == 'N'
&& toupper(levelstr[2]) == 'V' && toupper(levelstr[3]) == 'A'
&& toupper(levelstr[4]) == 'L' && toupper(levelstr[5]) == 'I'
&& toupper(levelstr[6]) == 'D')
{
return LEVEL_INVALID;
}
return LEVEL_INFO;
case 'S':
return LEVEL_STATS;
case 'N':
return LEVEL_NOTICE;
case 'W':
return LEVEL_WARNING;
case 'E':
return LEVEL_ERROR;
case 'C':
return LEVEL_CRITICAL;
case 'F':
return LEVEL_FATAL;
default:
return LEVEL_UNKNOWN;
}
}
int
levelcmp(const char* l1, ssize_t l1_len, const char* l2, ssize_t l2_len)
{
return abbrev2level(l1, l1_len) - abbrev2level(l2, l2_len);
}

@ -1,159 +0,0 @@
Build[1][2] Docs[3][4] Coverage Status[5][6] lnav[7][8]
▌[1] - https://github.com/tstack/lnav/workflows/ci-build/badge.svg
▌[2] - https://github.com/tstack/lnav/actions?query=workflow%3Aci-build
▌[3] - https://readthedocs.org/projects/lnav/badge/?version=latest&style=plastic
▌[4] - https://docs.lnav.org
▌[5] - https://coveralls.io/repos/github/tstack/lnav/badge.svg?branch=master
▌[6] - https://coveralls.io/github/tstack/lnav?branch=master
▌[7] - https://snapcraft.io//lnav/badge.svg
▌[8] - https://snapcraft.io/lnav
This is the source repository for lnav, visit https://lnav.org[1] for
a high level overview.
▌[1] - https://lnav.org
LNAV The Logfile Navigator
The Log File Navigator, lnav for short, is an advanced log file viewer
for the small-scale. It is a terminal application that can understand
your log files and make it easy for you to find problems with little
to no setup.
Screenshot
The following screenshot shows a syslog file. Log lines are displayed
with highlights. Errors are red and warnings are yellow.
Screenshot[1][2]
▌[1] - file://{top_srcdir}/docs/assets/images/lnav-syslog-thumb.png
▌[2] - file://{top_srcdir}/docs/assets/images/lnav-syslog.png
Features
• Log messages from different files are collated together
into a single view
• Automatic detection of log format
• Automatic decompression of GZip and BZip2 files
• Filter log messages based on regular expressions
• Use SQL to analyze your logs
• And more...
Installation
Download a statically-linked binary for Linux/MacOS from the release
page[1]
▌[1] - https://github.com/tstack/lnav/releases/latest#release-artifacts
Usage
The only file installed is the executable,  lnav . You can execute it
with no arguments to view the default set of files:
$ lnav 
You can view all the syslog messages by running:
$ lnav /var/log/messages* 
Usage with  systemd-journald 
On systems running  systemd-journald , you can use  lnav  as the
pager:
$ journalctl | lnav 
or in follow mode:
$ journalctl -f | lnav 
Since  journalctl 's default output format omits the year, if you are
viewing logs which span multiple years you will need to change the
output format to include the year, otherwise  lnav  gets confused:
$ journalctl -o short-iso | lnav 
It is also possible to use  journalctl 's json output format and  lnav
will make use of additional fields such as PRIORITY and _SYSTEMD_UNIT:
$ journalctl -o json | lnav 
In case some MESSAGE fields contain special characters such as ANSI
color codes which are considered as unprintable by journalctl,
specifying  journalctl 's  -a  option might be preferable in order to
output those messages still in a non binary representation:
$ journalctl -a -o json | lnav 
If using systemd v236 or newer, the output fields can be limited to
the ones actually recognized by  lnav  for increased efficiency:
$ journalctl -o json --output-fields=MESSAGE,PRIORITY,_PID,SYSLOG_IDENTIFIER,_SYSTEMD_UNIT | lnav 
If your system has been running for a long time, for increased
efficiency you may want to limit the number of log lines fed into  lnav
, e.g. via  journalctl 's  -n  or  --since=...  options.
In case of a persistent journal, you may want to limit the number of
log lines fed into  lnav  via  journalctl 's  -b  option.
Links
• Main Site[1]
• Documentation[2] on Read the Docs
• Internal Architecture[3]
▌[1] - https://lnav.org
▌[2] - https://docs.lnav.org
▌[3] - file://{top_srcdir}/ARCHITECTURE.md
Contributing
• Become a Sponsor on GitHub[1]
▌[1] - https://github.com/sponsors/tstack
Building From Source
Prerequisites
The following software packages are required to build lnav:
• gcc/clang - A C++14-compatible compiler.
• libpcre - The Perl Compatible Regular Expression
(PCRE) library.
• sqlite - The SQLite database engine. Version 3.9.0
or higher is required.
• ncurses - The ncurses text UI library.
• readline - The readline line editing library.
• zlib - The zlib compression library.
• bz2 - The bzip2 compression library.
• libcurl - The cURL library for downloading files
from URLs. Version 7.23.0 or higher is required.
• libarchive - The libarchive library for opening archive
files, like zip/tgz.
• wireshark - The 'tshark' program is used to interpret
pcap files.
Build
Lnav follows the usual GNU style for configuring and installing
software:
Run  ./autogen.sh  if compiling from a cloned repository.
$ ./configure 
$ make 
$ sudo make install 
See Also
Angle-grinder[1] is a tool to slice and dice log files on the
command-line. If you're familiar with the SumoLogic query language,
you might find this tool more comfortable to work with.
▌[1] - https://github.com/rcoh/angle-grinder

@ -134,14 +134,3 @@ run_cap_test ${lnav_test} -n \
-c ";UPDATE access_log SET log_annotations = '{\"abc\": \"def\"}' WHERE log_line = 0" \
-c ";SELECT log_line,log_annotations FROM access_log WHERE log_annotations IS NOT NULL" \
${test_dir}/logfile_access_log.0
export TEST_ANNO=1
run_cap_test ${lnav_test} -d /tmp/lnav.err -I ${test_dir} -n \
-c ':annotate' \
-c ':save-session' \
${test_dir}/logfile_access_log.0
run_cap_test ${lnav_test} -d /tmp/lnav.err -I ${test_dir} -n \
-c ':load-session' \
-c ':export-session-to -' \
${test_dir}/logfile_access_log.0

@ -135,3 +135,14 @@ run_cap_test ${lnav_test} -n \
-c ":goto -10" \
-c ":test-comment restoring hidden fields" \
${test_dir}/logfile_bro_http.log.0
export TEST_ANNO=1
run_cap_test ${lnav_test} -d /tmp/lnav.err -I ${test_dir} -n \
-c ':annotate' \
-c ':save-session' \
support-dump/logfile_access_log.0
run_cap_test ${lnav_test} -d /tmp/lnav.err -I ${test_dir} -n \
-c ':load-session' \
-c ':export-session-to -' \
support-dump/logfile_access_log.0

Loading…
Cancel
Save