[build] fix cmake build and lexing of sqlite in docs

Defect Number:
    Reviewed By:
   Testing Done:
pull/757/head
Timothy Stack 4 years ago
parent e85fff0b88
commit 9c3f0eee89

@ -26,6 +26,197 @@ format2csv.main(["",
os.path.join(this_dir, "format-table.csv"), os.path.join(this_dir, "format-table.csv"),
os.path.join(src_dir, "formats")]) os.path.join(src_dir, "formats")])
import re
from pygments.lexer import RegexLexer, words
from pygments.token import Punctuation, Whitespace, Text, Comment, Operator, \
Keyword, Name, String, Number, Generic
from sphinx.highlighting import lexers
class CustSqliteLexer(RegexLexer):
name = 'custsqlite'
flags = re.IGNORECASE
tokens = {
'root': [
(r'\s+', Text),
(r'--.*\n?', Comment.Single),
(r'/\*', Comment.Multiline, 'multiline-comments'),
(words((
'ABORT',
'ACTION',
'ADD',
'AFTER',
'ALL',
'ALTER',
'ALWAYS',
'ANALYZE',
'AND',
'AS',
'ASC',
'ATTACH',
'AUTOINCREMENT',
'BEFORE',
'BEGIN',
'BETWEEN',
'BY',
'CASCADE',
'CASE',
'CAST',
'CHECK',
'COLLATE',
'COLUMN',
'COMMIT',
'CONFLICT',
'CONSTRAINT',
'CREATE',
'CROSS',
'CURRENT',
'CURRENT_DATE',
'CURRENT_TIME',
'CURRENT_TIMESTAMP',
'DATABASE',
'DEFAULT',
'DEFERRABLE',
'DEFERRED',
'DELETE',
'DESC',
'DETACH',
'DISTINCT',
'DO',
'DROP',
'EACH',
'ELSE',
'END',
'ESCAPE',
'EXCEPT',
'EXCLUDE',
'EXCLUSIVE',
'EXISTS',
'EXPLAIN',
'FAIL',
'FILTER',
'FIRST',
'FOLLOWING',
'FOR',
'FOREIGN',
'FROM',
'FULL',
'GENERATED',
'GLOB',
'GROUP',
'GROUPS',
'HAVING',
'IF',
'IGNORE',
'IMMEDIATE',
'IN',
'INDEX',
'INDEXED',
'INITIALLY',
'INNER',
'INSERT',
'INSTEAD',
'INTERSECT',
'INTO',
'IS',
'ISNULL',
'JOIN',
'KEY',
'LAST',
'LEFT',
'LIKE',
'LIMIT',
'MATCH',
'NATURAL',
'NO',
'NOT',
'NOTHING',
'NOTNULL',
'NULL',
'NULLS',
'OF',
'OFFSET',
'ON',
'OR',
'ORDER',
'OTHERS',
'OUTER',
'OVER',
'PARTITION',
'PLAN',
'PRAGMA',
'PRECEDING',
'PRIMARY',
'QUERY',
'RAISE',
'RANGE',
'RECURSIVE',
'REFERENCES',
'REGEXP',
'REINDEX',
'RELEASE',
'RENAME',
'REPLACE',
'RESTRICT',
'RIGHT',
'ROLLBACK',
'ROW',
'ROWS',
'SAVEPOINT',
'SELECT',
'SET',
'TABLE',
'TEMP',
'TEMPORARY',
'THEN',
'TIES',
'TO',
'TRANSACTION',
'TRIGGER',
'UNBOUNDED',
'UNION',
'UNIQUE',
'UPDATE',
'USING',
'VACUUM',
'VALUES',
'VIEW',
'VIRTUAL',
'WHEN',
'WHERE',
'WINDOW',
'WITH',
'WITHOUT'), suffix=r'\b'),
Keyword),
(words((
'ARRAY', 'BIGINT', 'BINARY', 'BIT', 'BLOB', 'BOOLEAN', 'CHAR',
'CHARACTER', 'DATE', 'DEC', 'DECIMAL', 'FLOAT', 'INT', 'INTEGER',
'INTERVAL', 'NUMBER', 'NUMERIC', 'REAL', 'SERIAL', 'SMALLINT',
'VARCHAR', 'VARYING', 'INT8', 'SERIAL8', 'TEXT'), suffix=r'\b'),
Name.Builtin),
(r'[+*/<>=~!@#%^&|`?-]', Operator),
(r'[0-9]+', Number.Integer),
# TODO: Backslash escapes?
(r"'(''|[^'])*'", String.Single),
(r'"(""|[^"])*"', String.Symbol), # not a real string literal in ANSI SQL
(r'[a-z_][\w$]*', Name), # allow $s in strings for Oracle
(r'\$[a-z_]+', Name),
(r'[;:()\[\],.]', Punctuation)
],
'multiline-comments': [
(r'/\*', Comment.Multiline, 'multiline-comments'),
(r'\*/', Comment.Multiline, '#pop'),
(r'[^/*]+', Comment.Multiline),
(r'[/*]', Comment.Multiline)
]
}
def analyse_text(text):
return 0.01
lexers['custsqlite'] = CustSqliteLexer(startinline=True)
# -- General configuration ----------------------------------------------------- # -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here. # If your documentation needs a minimal Sphinx version, state it here.

@ -33,8 +33,8 @@ The following high-level configuration options are available:
.. jsonschema:: ../../src/internals/config-v1.schema.json#/properties/ui/properties/keymap .. jsonschema:: ../../src/internals/config-v1.schema.json#/properties/ui/properties/keymap
Themes Theme Definitions
------ -----------------
User interface themes are also defined through the JSON configuration files. User interface themes are also defined through the JSON configuration files.
@ -46,8 +46,8 @@ User interface themes are also defined through the JSON configuration files.
.. _keymaps: .. _keymaps:
Keymaps Keymap Definitions
------- ------------------
Keymaps in **lnav** map a key sequence to a command to execute. Keymaps in **lnav** map a key sequence to a command to execute.

@ -47,7 +47,7 @@ the prompt to guide you in the usage of SQL keywords and functions.
A simple query to perform on an Apache access log might be to get the average A simple query to perform on an Apache access log might be to get the average
and maximum number of bytes returned by the server, grouped by IP address: and maximum number of bytes returned by the server, grouped by IP address:
.. code-block:: sql .. code-block:: custsqlite
;SELECT c_ip, avg(sc_bytes), max(sc_bytes) FROM access_log GROUP BY c_ip ;SELECT c_ip, avg(sc_bytes), max(sc_bytes) FROM access_log GROUP BY c_ip
@ -132,7 +132,7 @@ Next, we will create a view over the :code:`dhclient_ip` table that returns
the log message line number, the IP address from the current row and the IP the log message line number, the IP address from the current row and the IP
address from the previous row: address from the previous row:
.. code-block:: sql .. code-block:: custsqlite
;CREATE VIEW IF NOT EXISTS dhclient_ip_changes AS SELECT log_line, ip, lag(ip) OVER (ORDER BY log_line) AS prev_ip FROM dhclient_ip ;CREATE VIEW IF NOT EXISTS dhclient_ip_changes AS SELECT log_line, ip, lag(ip) OVER (ORDER BY log_line) AS prev_ip FROM dhclient_ip
@ -140,7 +140,7 @@ Finally, the following :code:`UPDATE` statement will concatenate the tag
"#ipchanged" onto the :code:`log_tags` column for any rows in the view where "#ipchanged" onto the :code:`log_tags` column for any rows in the view where
the current IP is different from the previous IP: the current IP is different from the previous IP:
.. code-block:: sql .. code-block:: custsqlite
;UPDATE syslog_log SET log_tags = json_concat(log_tags, '#ipchanged') WHERE log_line IN (SELECT log_line FROM dhclient_ip_changes WHERE ip != prev_ip) ;UPDATE syslog_log SET log_tags = json_concat(log_tags, '#ipchanged') WHERE log_line IN (SELECT log_line FROM dhclient_ip_changes WHERE ip != prev_ip)
@ -177,9 +177,9 @@ Environment variables can be accessed in queries using the usual syntax of
:code:`$VAR_NAME`. For example, to read the value of the "USER" variable, you :code:`$VAR_NAME`. For example, to read the value of the "USER" variable, you
can write: can write:
.. code-block:: sql .. code-block:: custsqlite
SELECT $USER ;SELECT $USER
.. _collators: .. _collators:

@ -31,7 +31,7 @@ The **environ** table gives you access to the **lnav** process' environment
variables. You can SELECT, INSERT, and UPDATE environment variables, like variables. You can SELECT, INSERT, and UPDATE environment variables, like
so: so:
.. code-block:: sql .. code-block:: custsqlite
;SELECT * FROM environ WHERE name = 'SHELL' ;SELECT * FROM environ WHERE name = 'SHELL'
name value name value
@ -45,7 +45,7 @@ from **lnav**'s SQL environment to **lnav**'s commands. For example, the
named "FILENAME" and then open it in **lnav** by referencing it with named "FILENAME" and then open it in **lnav** by referencing it with
"$FILENAME": "$FILENAME":
.. code-block:: sql .. code-block:: custsqlite
;INSERT INTO environ VALUES ('FILENAME', '/path/to/file') ;INSERT INTO environ VALUES ('FILENAME', '/path/to/file')
:open $FILENAME :open $FILENAME

@ -98,7 +98,6 @@ endfunction(bin2c)
foreach(FILE_TO_LINK foreach(FILE_TO_LINK
ansi-palette.json ansi-palette.json
xterm-palette.json xterm-palette.json
default-log-formats.json
help.txt help.txt
init.sql) init.sql)
string(REPLACE "." "-" DST_FILE "${FILE_TO_LINK}") string(REPLACE "." "-" DST_FILE "${FILE_TO_LINK}")
@ -114,6 +113,53 @@ foreach(FILE_TO_LINK
) )
endforeach(FILE_TO_LINK) endforeach(FILE_TO_LINK)
set(FORMAT_FILES
formats/access_log.json
formats/alb_log.json
formats/autodeploy_log.json
formats/block_log.json
formats/candlepin_log.json
formats/choose_repo_log.json
formats/cups_log.json
formats/dpkg_log.json
formats/elb_log.json
formats/engine_log.json
formats/error_log.json
formats/fsck_hfs_log.json
formats/glog_log.json
formats/haproxy_log.json
formats/java_log.json
formats/journald_json_log.json
formats/katello_log.json
formats/openam_log.json
formats/openamdb_log.json
formats/openstack_log.json
formats/page_log.json
formats/papertrail_log.json
formats/snaplogic_log.json
formats/sssd_log.json
formats/strace_log.json
formats/sudo_log.json
formats/syslog_log.json
formats/tcf_log.json
formats/tcsh_history.json
formats/uwsgi_log.json
formats/vdsm_log.json
formats/vmk_log.json
formats/vmw_log.json
formats/xmlrpc_log.json
)
set(FORMAT_FILE_PATHS ${FORMAT_FILES})
list(TRANSFORM FORMAT_FILE_PATHS PREPEND "${CMAKE_CURRENT_SOURCE_DIR}/")
add_custom_command(
OUTPUT default-formats.h default-formats.c
COMMAND bin2c -n lnav_format_json default-formats ${FORMAT_FILE_PATHS}
DEPENDS bin2c ${FORMAT_FILES}
)
list(APPEND GEN_SRCS default-formats.h default-formats.c)
set(CONFIG_FILES set(CONFIG_FILES
root-config.json root-config.json

@ -565,7 +565,7 @@ void format_help_text_for_rst(const help_text &ht,
for (auto &example: ht.ht_example) { for (auto &example: ht.ht_example) {
fprintf(rst_file, " %s:\n\n", example.he_description); fprintf(rst_file, " %s:\n\n", example.he_description);
fprintf(rst_file, " .. code-block:: %s\n\n", fprintf(rst_file, " .. code-block:: %s\n\n",
is_sql ? "sql" : ""); is_sql ? "custsqlite" : "");
if (ht.ht_context == help_context_t::HC_COMMAND) { if (ht.ht_context == help_context_t::HC_COMMAND) {
fprintf(rst_file, " %s%s %s\n", prefix, ht.ht_name, fprintf(rst_file, " %s%s %s\n", prefix, ht.ht_name,
example.he_cmd); example.he_cmd);

Loading…
Cancel
Save