[data-parser] fix some issues with parsing hierarchical data and improve the debugger

pull/318/head
Timothy Stack 8 years ago
parent 1477c2ae51
commit 3167302722

@ -6,6 +6,7 @@ set(diag_STAT_SRCS
command_executor.cc
curl_looper.cc
db_sub_source.cc
elem_to_json.cc
environ_vtab.cc
extension-functions.c
field_overlay_source.cc
@ -88,6 +89,7 @@ set(diag_STAT_SRCS
concise_index.hh
column_namer.hh
curl_looper.hh
elem_to_json.hh
field_overlay_source.hh
filter_observer.hh
format-text-files.hh

@ -123,6 +123,7 @@ noinst_HEADERS = \
data_parser.hh \
default-log-formats-json.hh \
db_sub_source.hh \
elem_to_json.hh \
environ_vtab.hh \
field_overlay_source.hh \
filter_observer.hh \
@ -223,6 +224,7 @@ libdiag_a_SOURCES = \
command_executor.cc \
curl_looper.cc \
db_sub_source.cc \
elem_to_json.cc \
environ_vtab.cc \
extension-functions.c \
field_overlay_source.cc \

@ -175,6 +175,10 @@ data_format_state_t dfs_comma_next(data_format_state_t state,
switch (next_token) {
case DT_COLON:
case DT_EQUALS:
case DT_LPAREN:
case DT_LCURLY:
case DT_LSQUARE:
case DT_LANGLE:
retval = DFS_VALUE;
break;

@ -47,12 +47,13 @@
#include "byte_array.hh"
#include "data_scanner.hh"
#define ELEMENT_LIST_T(var) var("" #var, __FILE__, __LINE__)
#define ELEMENT_LIST_T(var) var("" #var, __FILE__, __LINE__, group_depth)
#define PUSH_FRONT(elem) push_front(elem, __FILE__, __LINE__)
#define PUSH_BACK(elem) push_back(elem, __FILE__, __LINE__)
#define POP_FRONT(elem) pop_front(__FILE__, __LINE__)
#define POP_BACK(elem) pop_back(__FILE__, __LINE__)
#define CLEAR(elem) clear2(__FILE__, __LINE__)
#define SWAP(other) swap(other, __FILE__, __LINE__)
#define SPLICE(pos, other, first, last) splice(pos, other, first, last, \
__FILE__, __LINE__)
@ -102,16 +103,17 @@ data_format_state_t dfs_semi_next(data_format_state_t state,
data_format_state_t dfs_comma_next(data_format_state_t state,
data_token_t next_token);
#define LIST_INIT_TRACE \
do { \
if (TRACE_FILE != NULL) { \
fprintf(TRACE_FILE, \
"%p %s:%d %s %s\n", \
this, \
fn, line, \
__func__, \
varname); \
} \
#define LIST_INIT_TRACE \
do { \
if (TRACE_FILE != NULL) { \
fprintf(TRACE_FILE, \
"%p %s:%d %s %s %d\n", \
this, \
fn, line, \
__func__, \
varname, \
group_depth); \
} \
} while (false)
#define LIST_DEINIT_TRACE \
@ -165,6 +167,18 @@ data_format_state_t dfs_comma_next(data_format_state_t state,
} \
} while (false);
#define SWAP_TRACE(other) \
do { \
if (TRACE_FILE != NULL) { \
fprintf(TRACE_FILE, \
"%p %s:%d %s %p\n", \
this, \
fn, line, \
__func__, \
&other); \
} \
} while (false);
#define POINT_TRACE(name) \
do { \
if (TRACE_FILE) { \
@ -175,6 +189,33 @@ data_format_state_t dfs_comma_next(data_format_state_t state,
} \
} while (false);
#define FORMAT_TRACE(elist) \
do { \
if (TRACE_FILE) { \
const data_format &df = elist.el_format; \
fprintf(TRACE_FILE, \
"%p %s:%d format %d %s %s %s %s %s\n", \
&elist, \
__FILE__, __LINE__, \
group_depth, \
data_scanner::token2name(df.df_appender), \
data_scanner::token2name(df.df_terminator), \
data_scanner::token2name(df.df_qualifier), \
data_scanner::token2name(df.df_separator), \
data_scanner::token2name(df.df_prefix_terminator)); \
} \
} while (false);
#define CONSUMED_TRACE(elist) \
do { \
if (TRACE_FILE) { \
fprintf(TRACE_FILE, \
"%p %s:%d consumed\n", \
&elist, \
__FILE__, __LINE__); \
} \
} while (false);
class data_parser {
public:
static data_format FORMAT_SEMI;
@ -190,7 +231,7 @@ public:
class element_list_t : public std::list<element> {
public:
element_list_t(const char *varname, const char *fn, int line)
element_list_t(const char *varname, const char *fn, int line, int group_depth = -1)
{
LIST_INIT_TRACE;
}
@ -200,6 +241,7 @@ public:
const char *varname = "_anon2_";
const char *fn = __FILE__;
int line = __LINE__;
int group_depth = -1;
LIST_INIT_TRACE;
};
@ -251,6 +293,12 @@ public:
this->std::list<element>::clear();
};
void swap(element_list_t &other, const char *fn, int line) {
SWAP_TRACE(other);
this->std::list<element>::swap(other);
}
void splice(iterator pos,
element_list_t &other,
iterator first,
@ -310,14 +358,14 @@ public:
return *this;
};
void assign_elements(element_list_t &subs)
void assign_elements(element_list_t &subs)
{
if (this->e_sub_elements == NULL) {
this->e_sub_elements = new element_list_t("_sub_", __FILE__,
__LINE__);
this->e_sub_elements->el_format = subs.el_format;
}
this->e_sub_elements->swap(subs);
this->e_sub_elements->SWAP(subs);
this->update_capture();
};
@ -331,14 +379,14 @@ public:
}
};
const element & get_pair_value(void) const
const element &get_pair_value(void) const
{
require(this->e_token == DNT_PAIR);
return this->e_sub_elements->back();
};
data_token_t value_token(void) const
data_token_t value_token(void) const
{
data_token_t retval = DT_INVALID;
@ -357,6 +405,23 @@ public:
return retval;
};
const element &get_value_elem() const {
if (this->e_token == DNT_VALUE) {
if (this->e_sub_elements != NULL &&
this->e_sub_elements->size() == 1) {
return this->e_sub_elements->front();
}
}
return *this;
};
const element &get_pair_elem() const {
if (this->e_token == DNT_VALUE) {
return this->e_sub_elements->front();
}
return *this;
}
void print(FILE *out, pcre_input &pi, int offset =
0) const
{
@ -503,7 +568,7 @@ private:
};
void pairup(schema_id_t *schema, element_list_t &pairs_out,
element_list_t &in_list)
element_list_t &in_list, int group_depth = 0)
{
element_list_t ELEMENT_LIST_T(el_stack), ELEMENT_LIST_T(free_row),
ELEMENT_LIST_T(key_comps), ELEMENT_LIST_T(value),
@ -514,13 +579,15 @@ private:
POINT_TRACE("pairup_start");
FORMAT_TRACE(in_list);
for (element_list_t::iterator iter = in_list.begin();
iter != in_list.end();
++iter) {
if (iter->e_token == DNT_GROUP) {
element_list_t ELEMENT_LIST_T(group_pairs);
this->pairup(NULL, group_pairs, *iter->e_sub_elements);
this->pairup(NULL, group_pairs, *iter->e_sub_elements, group_depth + 1);
if (!group_pairs.empty()) {
iter->assign_elements(group_pairs);
}
@ -535,7 +602,7 @@ private:
}
}
else if (iter->e_token == in_list.el_format.df_terminator) {
this->end_of_value(el_stack, key_comps, value, in_list);
this->end_of_value(el_stack, key_comps, value, in_list, group_depth);
key_comps.PUSH_BACK(*iter);
}
@ -639,12 +706,16 @@ private:
POINT_TRACE("pairup_eol");
if (el_stack.empty()) {
CONSUMED_TRACE(in_list);
// Only perform the free-row logic at the top level, if we're in a group
// assume it is a list.
if (group_depth < 1 && el_stack.empty()) {
free_row.SPLICE(free_row.begin(),
key_comps, key_comps.begin(), key_comps.end());
}
else {
this->end_of_value(el_stack, key_comps, value, in_list);
this->end_of_value(el_stack, key_comps, value, in_list, group_depth);
}
POINT_TRACE("pairup_stack");
@ -764,6 +835,10 @@ private:
}
}
if (group_depth >= 1 && pairs_out.empty() && !free_row.empty()) {
pairs_out.SWAP(free_row);
}
if (pairs_out.empty() && !free_row.empty()) {
while (!free_row.empty()) {
switch (free_row.front().e_token) {
@ -957,7 +1032,8 @@ private:
void end_of_value(element_list_t &el_stack,
element_list_t &key_comps,
element_list_t &value,
const element_list_t &in_list) {
const element_list_t &in_list,
int group_depth) {
key_comps.remove_if(element_if(in_list.el_format.df_terminator));
key_comps.remove_if(element_if(DT_COMMA));
value.remove_if(element_if(in_list.el_format.df_terminator));

@ -0,0 +1,210 @@
/**
* Copyright (c) 2016, Timothy Stack
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of Timothy Stack nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ''AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "config.h"
#include "elem_to_json.hh"
using namespace std;
static
void element_to_json(yajl_gen gen, data_parser &dp, const data_parser::element &elem)
{
size_t value_len;
const char *value_str = dp.get_element_string(elem, value_len);
switch (elem.value_token()) {
case DT_NUMBER: {
yajl_gen_number(gen, value_str, value_len);
break;
}
case DNT_GROUP: {
elements_to_json(gen, dp, elem.get_value_elem().e_sub_elements, false);
break;
}
case DNT_PAIR: {
const data_parser::element &pair_elem = elem.get_pair_elem();
string key_str = dp.get_element_string(pair_elem.e_sub_elements->front());
if (!key_str.empty()) {
yajlpp_map singleton_map(gen);
singleton_map.gen(key_str);
element_to_json(gen, dp, pair_elem.get_pair_value());
}
else {
element_to_json(gen, dp, pair_elem.get_pair_value());
}
break;
}
case DT_CONSTANT: {
if (strncasecmp("true", value_str, value_len) == 0) {
yajl_gen_bool(gen, true);
}
else if (strncasecmp("false", value_str, value_len) == 0) {
yajl_gen_bool(gen, false);
}
else {
yajl_gen_null(gen);
}
break;
}
default:
yajl_gen_pstring(gen, value_str, value_len);
break;
}
}
static
void map_elements_to_json2(yajl_gen gen, data_parser &dp, data_parser::element_list_t *el)
{
yajlpp_map root_map(gen);
int col = 0;
for (data_parser::element_list_t::iterator iter = el->begin();
iter != el->end();
++iter) {
const data_parser::element &pvalue = iter->get_pair_value();
if (pvalue.value_token() == DT_INVALID) {
log_debug("invalid!!");
// continue;
}
std::string key_str = dp.get_element_string(
iter->e_sub_elements->front());
if (key_str.empty()) {
char buffer[32];
snprintf(buffer, sizeof(buffer), "col_%d", col);
key_str = buffer;
col += 1;
}
root_map.gen(key_str);
element_to_json(gen, dp, pvalue);
}
}
static
void list_body_elements_to_json(yajl_gen gen, data_parser &dp, data_parser::element_list_t *el)
{
for (data_parser::element_list_t::iterator iter = el->begin();
iter != el->end();
++iter) {
element_to_json(gen, dp, *iter);
}
}
static
void list_elements_to_json(yajl_gen gen, data_parser &dp, data_parser::element_list_t *el)
{
yajlpp_array root_array(gen);
list_body_elements_to_json(gen, dp, el);
}
static
void map_elements_to_json(yajl_gen gen, data_parser &dp, data_parser::element_list_t *el)
{
bool unique_names = el->size() > 1;
vector<string> names;
for (data_parser::element_list_t::iterator iter = el->begin();
iter != el->end();
++iter) {
const data_parser::element &pvalue = iter->get_pair_value();
if (pvalue.value_token() == DT_INVALID) {
log_debug("invalid!!");
// continue;
}
std::string key_str = dp.get_element_string(
iter->e_sub_elements->front());
if (key_str.empty()) {
continue;
}
if (find(names.begin(), names.end(), key_str) != names.end()) {
unique_names = false;
break;
}
else {
names.push_back(key_str);
}
}
names.clear();
if (unique_names) {
map_elements_to_json2(gen, dp, el);
}
else {
list_elements_to_json(gen, dp, el);
}
}
void elements_to_json(yajl_gen gen, data_parser &dp, data_parser::element_list_t *el, bool root)
{
if (el->empty()) {
yajl_gen_null(gen);
}
else {
switch (el->front().e_token) {
case DNT_PAIR: {
if (root && el->size() == 1) {
const data_parser::element &pair_elem = el->front().get_pair_elem();
std::string key_str = dp.get_element_string(
pair_elem.e_sub_elements->front());
if (key_str.empty() &&
el->front().get_pair_value().value_token() == DNT_GROUP) {
element_to_json(gen, dp, el->front().get_pair_value());
}
else {
yajlpp_map singleton_map(gen);
if (key_str.empty()) {
key_str = "col_0";
}
singleton_map.gen(key_str);
element_to_json(gen, dp, pair_elem.get_pair_value());
}
}
else {
map_elements_to_json(gen, dp, el);
}
break;
}
default:
list_elements_to_json(gen, dp, el);
break;
}
}
}

@ -0,0 +1,38 @@
/**
* Copyright (c) 2016, Timothy Stack
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of Timothy Stack nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ''AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef __elem_to_json_hh
#define __elem_to_json_hh
#include "yajl/api/yajl_gen.h"
#include "data_parser.hh"
void elements_to_json(yajl_gen gen, data_parser &dp, data_parser::element_list_t *el, bool root = true);
#endif

@ -175,7 +175,7 @@ public:
}
this->ldt_pairs.clear();
this->ldt_pairs.swap(dp.dp_pairs);
this->ldt_pairs.swap(dp.dp_pairs, __FILE__, __LINE__);
this->ldt_instance += 1;
return true;

@ -23,6 +23,7 @@
#include "sqlite-extension-func.h"
#include "data_scanner.hh"
#include "data_parser.hh"
#include "elem_to_json.hh"
typedef struct {
char * s;
@ -36,6 +37,8 @@ typedef struct {
#define JSON_SUBTYPE 74 /* Ascii for "J" */
using namespace std;
static
cache_entry *find_re(sqlite3_context *ctx, const char *re)
{
@ -244,105 +247,6 @@ void regexp_match(sqlite3_context *ctx, int argc, sqlite3_value **argv)
#endif
}
static
void elements_to_json(yajl_gen gen, data_parser &dp, data_parser::element_list_t *el);
static
void element_to_json(yajl_gen gen, data_parser &dp, const data_parser::element &elem)
{
size_t value_len;
const char *value_str = dp.get_element_string(elem, value_len);
switch (elem.value_token()) {
case DT_NUMBER: {
yajl_gen_number(gen, value_str, value_len);
break;
}
case DNT_GROUP: {
elements_to_json(gen, dp, elem.e_sub_elements);
break;
}
case DNT_PAIR: {
const data_parser::element &pair_elem = elem.e_sub_elements->front();
yajlpp_map singleton_map(gen);
singleton_map.gen(dp.get_element_string(pair_elem.e_sub_elements->front()));
element_to_json(gen, dp, pair_elem.get_pair_value());
break;
}
case DT_CONSTANT: {
if (strncasecmp("true", value_str, value_len) == 0) {
yajl_gen_bool(gen, true);
}
else if (strncasecmp("false", value_str, value_len) == 0) {
yajl_gen_bool(gen, false);
}
else {
yajl_gen_null(gen);
}
break;
}
default:
yajl_gen_pstring(gen, value_str, value_len);
break;
}
}
static
void map_elements_to_json(yajl_gen gen, data_parser &dp, data_parser::element_list_t *el)
{
yajlpp_map root_map(gen);
column_namer cn;
for (data_parser::element_list_t::iterator iter = el->begin();
iter != el->end();
++iter) {
const data_parser::element &pvalue = iter->get_pair_value();
if (pvalue.value_token() == DT_INVALID) {
log_debug("invalid!!");
// continue;
}
std::string key_str = dp.get_element_string(
iter->e_sub_elements->front());
string colname = cn.add_column(key_str);
root_map.gen(colname);
element_to_json(gen, dp, pvalue);
}
}
static
void list_elements_to_json(yajl_gen gen, data_parser &dp, data_parser::element_list_t *el)
{
yajlpp_array root_array(gen);
for (data_parser::element_list_t::iterator iter = el->begin();
iter != el->end();
++iter) {
element_to_json(gen, dp, *iter);
}
}
static
void elements_to_json(yajl_gen gen, data_parser &dp, data_parser::element_list_t *el)
{
if (el->empty()) {
yajl_gen_null(gen);
}
else {
switch (el->front().e_token) {
case DNT_PAIR:
map_elements_to_json(gen, dp, el);
break;
default:
list_elements_to_json(gen, dp, el);
break;
}
}
}
static
void extract(sqlite3_context *ctx, int argc, sqlite3_value **argv)
{

@ -237,6 +237,13 @@ dist_noinst_DATA = \
datafile_simple.11 \
datafile_simple.12 \
datafile_simple.13 \
datafile_simple.14 \
datafile_simple.15 \
datafile_simple.16 \
datafile_simple.17 \
datafile_simple.18 \
datafile_simple.19 \
datafile_simple.20 \
datafile_xml.0 \
listview_output.0 \
listview_output.1 \

@ -17,3 +17,9 @@ pair 8:11 ^-^ c=3
pair 12:13 ^ 4
msg :a=1 b=2 c=3,4
format :a=# b=# c=#,#
{
"a": 1,
"b": 2,
"c": 3,
"col_0": 4
}

@ -7,3 +7,7 @@ pair 0:13 ^-----------^ current speed
pair 15:17 ^^ 38
msg :current speed: 38 mph
format :#: # mph
{
"col_0": "current speed",
"col_1": 38
}

@ -4,3 +4,6 @@ quot 16:26 ^--------^ pm/runtime
pair 16:26 ^--------^ pm/runtime
msg :quoted string u'pm/runtime'
format :quoted string #
{
"col_0": "pm/runtime"
}

@ -7,3 +7,7 @@ vers 25:31 ^----^ 1.2-a1
pair 25:31 ^----^ 1.2-a1
msg :version numbers 0.6.16 1.2-a1
format :version numbers # #
{
"col_0": "0.6.16",
"col_1": "1.2-a1"
}

@ -5,3 +5,6 @@
pair 0:34 ^--------------------------------^ kickoff_duration=4.41074371338e-05
msg :kickoff_duration=4.41074371338e-05
format :kickoff_duration=#
{
"kickoff_duration": 4.41074371338e-05
}

@ -5,3 +5,6 @@
pair 0:34 ^--------------------------------^ kickoff_duration=4.41074371338E-05
msg :kickoff_duration=4.41074371338E-05
format :kickoff_duration=#
{
"kickoff_duration": 4.41074371338E-05
}

@ -18,10 +18,7 @@ wspc 55:56 ^
val 49:60 ^---------^ Fanime 2015
pair 44:60 ^--------------^ name=Fanime 2015
key 62:67 ^---^ route
key 69:69 ^
sym 69:79 ^--------^ CloudEntry
pair 69:79 ^--------^ CloudEntry
key 80:80 ^
key 69:79 ^--------^ CloudEntry
key 80:86 ^----^ doc_id
sym 87:131 ^------------------------------------------^ 1g5Yho6JmysVGRO-Xmfurra_cQRFb0nTIfZRhGompweg
val 87:131 ^------------------------------------------^ 1g5Yho6JmysVGRO-Xmfurra_cQRFb0nTIfZRhGompweg
@ -33,7 +30,8 @@ word 146:151
val 141:151 ^--------^ Baby Names
pair 132:151 ^-----------------^ filename=Baby Names
grp 80:151 ^---------------------------------------------------------------------^ doc_id=1g5Yho6JmysVGRO-Xmfurra_cQRFb0nTIfZRhGompweg,filename=Baby Names
pair 80:151 ^---------------------------------------------------------------------^ doc_id=1g5Yho6JmysVGRO-Xmfurra_cQRFb0nTIfZRhGompweg,filename=Baby Names
val 80:151 ^---------------------------------------------------------------------^ doc_id=1g5Yho6JmysVGRO-Xmfurra_cQRFb0nTIfZRhGompweg,filename=Baby Names
pair 69:151 ^--------------------------------------------------------------------------------^ CloudEntry(doc_id=1g5Yho6JmysVGRO-Xmfurra_cQRFb0nTIfZRhGompweg,filename=Baby Names
grp 69:151 ^--------------------------------------------------------------------------------^ CloudEntry(doc_id=1g5Yho6JmysVGRO-Xmfurra_cQRFb0nTIfZRhGompweg,filename=Baby Names
val 69:151 ^--------------------------------------------------------------------------------^ CloudEntry(doc_id=1g5Yho6JmysVGRO-Xmfurra_cQRFb0nTIfZRhGompweg,filename=Baby Names
pair 62:151 ^---------------------------------------------------------------------------------------^ route=[CloudEntry(doc_id=1g5Yho6JmysVGRO-Xmfurra_cQRFb0nTIfZRhGompweg,filename=Baby Names
@ -41,3 +39,19 @@ pair 62:151 ^----
pair 9:151 ^--------------------------------------------------------------------------------------------------------------------------------------------^ Direction.DOWNLOAD, Action.CREATE, name=Fanime 2015, route=[CloudEntry(doc_id=1g5Yho6JmysVGRO-Xmfurra_cQRFb0nTIfZRhGompweg,filename=Baby Names
msg :FSChange(Direction.DOWNLOAD, Action.CREATE, name=Fanime 2015, route=[CloudEntry(doc_id=1g5Yho6JmysVGRO-Xmfurra_cQRFb0nTIfZRhGompweg,filename=Baby Names)])
format :#(#)])
{
"col_0": "FSChange",
"col_1": {
"col_0": "Direction.DOWNLOAD",
"col_1": "Action.CREATE",
"name": "Fanime 2015",
"route": [
{
"CloudEntry": {
"doc_id": "1g5Yho6JmysVGRO-Xmfurra_cQRFb0nTIfZRhGompweg",
"filename": "Baby Names"
}
}
]
}
}

@ -1,9 +1,6 @@
Worker successfully completed [ImmutableChange(Direction.UPLOAD, Action.CREATE, ino=LocalID(inode=5567236), path=u'/Users/stack/Google Drive', name=u'pyjsonpath1.patch', parent_ino=LocalID(inode=46166734), is_folder=False)]
key 31:31 ^
key 31:31 ^
sym 31:46 ^-------------^ ImmutableChange
pair 31:46 ^-------------^ ImmutableChange
key 47:47 ^
key 31:46 ^-------------^ ImmutableChange
key 47:47 ^
sym 47:63 ^--------------^ Direction.UPLOAD
val 47:63 ^--------------^ Direction.UPLOAD
@ -45,8 +42,34 @@ cnst 216:221
val 216:221 ^---^ False
pair 206:221 ^-------------^ is_folder=False
grp 47:221 ^----------------------------------------------------------------------------------------------------------------------------------------------------------------------------^ Direction.UPLOAD, Action.CREATE, ino=LocalID(inode=5567236), path=u'/Users/stack/Google Drive', name=u'pyjsonpath1.patch', parent_ino=LocalID(inode=46166734), is_folder=False
pair 47:221 ^----------------------------------------------------------------------------------------------------------------------------------------------------------------------------^ Direction.UPLOAD, Action.CREATE, ino=LocalID(inode=5567236), path=u'/Users/stack/Google Drive', name=u'pyjsonpath1.patch', parent_ino=LocalID(inode=46166734), is_folder=False
val 47:221 ^----------------------------------------------------------------------------------------------------------------------------------------------------------------------------^ Direction.UPLOAD, Action.CREATE, ino=LocalID(inode=5567236), path=u'/Users/stack/Google Drive', name=u'pyjsonpath1.patch', parent_ino=LocalID(inode=46166734), is_folder=False
pair 31:221 ^--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------^ ImmutableChange(Direction.UPLOAD, Action.CREATE, ino=LocalID(inode=5567236), path=u'/Users/stack/Google Drive', name=u'pyjsonpath1.patch', parent_ino=LocalID(inode=46166734), is_folder=False
grp 31:221 ^--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------^ ImmutableChange(Direction.UPLOAD, Action.CREATE, ino=LocalID(inode=5567236), path=u'/Users/stack/Google Drive', name=u'pyjsonpath1.patch', parent_ino=LocalID(inode=46166734), is_folder=False
pair 31:221 ^--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------^ ImmutableChange(Direction.UPLOAD, Action.CREATE, ino=LocalID(inode=5567236), path=u'/Users/stack/Google Drive', name=u'pyjsonpath1.patch', parent_ino=LocalID(inode=46166734), is_folder=False
msg :Worker successfully completed [ImmutableChange(Direction.UPLOAD, Action.CREATE, ino=LocalID(inode=5567236), path=u'/Users/stack/Google Drive', name=u'pyjsonpath1.patch', parent_ino=LocalID(inode=46166734), is_folder=False)]
format :Worker successfully completed [#)]
[
{
"ImmutableChange": {
"col_0": "Direction.UPLOAD",
"col_1": "Action.CREATE",
"ino": {
"LocalID": [
{
"inode": 5567236
}
]
},
"path": "/Users/stack/Google Drive",
"name": "pyjsonpath1.patch",
"parent_ino": {
"LocalID": [
{
"inode": 46166734
}
]
},
"is_folder": false
}
}
]

@ -4,3 +4,6 @@
pair 0:4 ^--^ test
msg :test: c'est la vie charlie's
format :#: c'est la vie charlie's
{
"col_0": "test"
}

@ -4,16 +4,19 @@
val 5:5 ^
pair 0:5 ^---^ foo=[
key 8:11 ^-^ bar
key 14:14 ^
quot 14:15 ^ a
val 14:15 ^ a
pair 14:15 ^ a
key 19:19 ^
quot 19:20 ^ b
val 19:20 ^ b
pair 19:20 ^ b
grp 14:20 ^----^ a', 'b
val 14:20 ^----^ a', 'b
pair 8:20 ^----------^ bar=['a', 'b
msg :foo=[]; bar=['a', 'b']
format :foo=[#]; bar=[#]
{
"foo": null,
"bar": [
"a",
"b"
]
}

@ -0,0 +1,23 @@
list [foo(bar=1)]
key 6:6 ^
key 6:9 ^-^ foo
key 10:13 ^-^ bar
num 14:15 ^ 1
val 14:15 ^ 1
pair 10:15 ^---^ bar=1
grp 10:15 ^---^ bar=1
val 10:15 ^---^ bar=1
pair 6:15 ^-------^ foo(bar=1
grp 6:15 ^-------^ foo(bar=1
pair 6:15 ^-------^ foo(bar=1
msg :list [foo(bar=1)]
format :list [#)]
[
{
"foo": [
{
"bar": 1
}
]
}
]

@ -0,0 +1,53 @@
list [foo(bar=1), foo(bar=2), foo(bar=3)]
key 6:6 ^
key 6:9 ^-^ foo
key 10:13 ^-^ bar
num 14:15 ^ 1
val 14:15 ^ 1
pair 10:15 ^---^ bar=1
grp 10:15 ^---^ bar=1
val 10:15 ^---^ bar=1
pair 6:15 ^-------^ foo(bar=1
key 18:21 ^-^ foo
key 22:25 ^-^ bar
num 26:27 ^ 2
val 26:27 ^ 2
pair 22:27 ^---^ bar=2
grp 22:27 ^---^ bar=2
val 22:27 ^---^ bar=2
pair 18:27 ^-------^ foo(bar=2
key 30:33 ^-^ foo
key 34:37 ^-^ bar
num 38:39 ^ 3
val 38:39 ^ 3
pair 34:39 ^---^ bar=3
grp 34:39 ^---^ bar=3
val 34:39 ^---^ bar=3
pair 30:39 ^-------^ foo(bar=3
grp 6:39 ^-------------------------------^ foo(bar=1), foo(bar=2), foo(bar=3
pair 6:39 ^-------------------------------^ foo(bar=1), foo(bar=2), foo(bar=3
msg :list [foo(bar=1), foo(bar=2), foo(bar=3)]
format :list [#)]
[
{
"foo": [
{
"bar": 1
}
]
},
{
"foo": [
{
"bar": 2
}
]
},
{
"foo": [
{
"bar": 3
}
]
}
]

@ -29,3 +29,12 @@ pair 13:16 ^-^ six
pair 17:18 ^ 7
msg :1,2,3,4,five,six,7
format :#,#,#,#,#,#,#
{
"col_0": 1,
"col_1": 2,
"col_2": 3,
"col_3": 4,
"col_4": "five",
"col_5": "six",
"col_6": 7
}

@ -0,0 +1,17 @@
list ["abc", "def", "ghi"]
key 7:7 ^
quot 7:10 ^-^ abc
val 7:10 ^-^ abc
quot 14:17 ^-^ def
val 14:17 ^-^ def
quot 21:24 ^-^ ghi
val 21:24 ^-^ ghi
grp 7:24 ^---------------^ abc", "def", "ghi
pair 7:24 ^---------------^ abc", "def", "ghi
msg :list ["abc", "def", "ghi"]
format :list [#]
[
"abc",
"def",
"ghi"
]

@ -16,3 +16,10 @@ pair 6:7 ^ 4
pair 17:18 ^ 7
msg :1 2 3 4 five six 7
format :# # # # five six #
{
"col_0": 1,
"col_1": 2,
"col_2": 3,
"col_3": 4,
"col_4": 7
}

@ -5,3 +5,6 @@ quot 12:25 ^-----------^ Hello, World!
pair 0:25 ^-----------------------^ the-value: "Hello, World!
msg :the-value: "Hello, World!"
format :the-value: #
{
"the-value": "Hello, World!"
}

@ -5,3 +5,6 @@
pair 0:45 ^-------------------------------------------^ this is a url: http://www.example.com/foo-bar
msg :this is a url: http://www.example.com/foo-bar
format :this is a url: #
{
"this is a url": "http://www.example.com/foo-bar"
}

@ -17,3 +17,9 @@ pair 16:21 ^---^ foo=1
pair 22:27 ^---^ bar=2
msg :qualified:name: foo=1 bar=2
format :#:#: foo=# bar=#
{
"col_0": "qualified",
"col_1": "name",
"foo": 1,
"bar": 2
}

@ -12,3 +12,7 @@ pair 15:22 ^-----^ arg2="b
pair 5:22 ^---------------^ arg1="a", arg2="b
msg :func(arg1="a", arg2="b")
format :func(#)
{
"arg1": "a",
"arg2": "b"
}

@ -5,31 +5,40 @@ pair 29:60 ^-----------------------------^
key 73:73 ^
quot 73:95 ^--------------------^ /usr/libexec/taskgated
pair 73:95 ^--------------------^ /usr/libexec/taskgated
key 98:98 ^
key 98:98 ^
num 98:103 ^---^ 76339
pair 98:103 ^---^ 76339
val 98:103 ^---^ 76339
grp 98:103 ^---^ 76339
pair 98:103 ^---^ 76339
key 135:135 ^
quot 135:157 ^--------------------^ /usr/libexec/taskgated
pair 135:157 ^--------------------^ /usr/libexec/taskgated
key 160:160 ^
key 160:160 ^
num 160:165 ^---^ 77395
pair 160:165 ^---^ 77395
val 160:165 ^---^ 77395
grp 160:165 ^---^ 77395
pair 160:165 ^---^ 77395
key 168:168 ^
key 168:168 ^
num 168:174 ^----^ 100003
val 168:174 ^----^ 100003
pair 168:174 ^----^ 100003
key 175:175 ^
num 175:176 ^ 1
val 175:176 ^ 1
pair 175:176 ^ 1
grp 168:176 ^------^ 100003,1
pair 168:176 ^------^ 100003,1
msg :Succeeded authorizing right 'system.privilege.taskport.debug' by client '/usr/libexec/taskgated' [76339] for authorization created by '/usr/libexec/taskgated' [77395] (100003,1)
format :Succeeded authorizing right # by client # [#] for authorization created by # [#] (#)
{
"col_0": "system.privilege.taskport.debug",
"col_1": "/usr/libexec/taskgated",
"col_2": [
76339
],
"col_3": "/usr/libexec/taskgated",
"col_4": [
77395
],
"col_5": [
100003,
1
]
}

@ -10,3 +10,8 @@ time 17:29 ^----------^ 12:01:22.123
pair 17:29 ^----------^ 12:01:22.123
msg :12:01 12:01:22 12:01:22.123
format :# # #
{
"col_0": "12:01",
"col_1": "12:01:22",
"col_2": "12:01:22.123"
}

@ -18,3 +18,9 @@ format :# # # #
<ns1:foo>
<elem attr1=xyz attr2="123"> </elem>
<closed />
{
"col_0": "<ns1:foo>",
"col_1": "<elem attr1=xyz attr2=\"123\">",
"col_2": "</elem>",
"col_3": "<closed />"
}

@ -43,6 +43,7 @@
#include "log_format_loader.hh"
#include "pretty_printer.hh"
#include "shared_buffer.hh"
#include "elem_to_json.hh"
#include "../src/data_parser.hh"
#include "../src/view_curses.hh"
@ -178,6 +179,20 @@ int main(int argc, char *argv[])
string pretty_out = pp.print();
fprintf(out, "\n--\n%s", pretty_out.c_str());
}
auto_mem<yajl_gen_t> gen(yajl_gen_free);
gen = yajl_gen_alloc(NULL);
yajl_gen_config(gen.in(), yajl_gen_beautify, true);
elements_to_json(gen, dp, &dp.dp_pairs);
const unsigned char *buf;
size_t len;
yajl_gen_get_buf(gen, &buf, &len);
fwrite(buf, 1, len, out);
fclose(out);
sprintf(cmd, "diff -u %s %s", argv[lpc], TMP_NAME);
@ -186,6 +201,7 @@ int main(int argc, char *argv[])
if (prompt) {
char resp[4];
printf("\nOriginal line:\n%s\n", sub_line.c_str() + body.lr_start);
printf("Would you like to update the original file? (y/N) ");
fflush(stdout);
log_perror(scanf("%3s", resp));

@ -7,3 +7,7 @@ pair 29:40 ^---------^ com
pair 58:62 ^--^ 8099
msg ::com.twisted:Site starting on 8099
format ::#:Site starting on #
{
"col_0": "com.twisted",
"col_1": 8099
}

@ -2,9 +2,8 @@
key 40:46 ^----^ Device
path 48:56 ^------^ /dev/sda
wspc 56:57 ^
key 58:58 ^
sym 58:61 ^-^ SAT
pair 58:61 ^-^ SAT
val 58:61 ^-^ SAT
grp 58:61 ^-^ SAT
val 48:61 ^-----------^ /dev/sda [SAT
pair 40:61 ^-------------------^ Device: /dev/sda [SAT
@ -32,3 +31,10 @@ wspc 116:117
pair 112:119 ^-----^ 17.1 GB
msg : Device: /dev/sda [SAT], VBOX HARDDISK, S/N:VBc8882b62-a0263a39, FW:1.0, 17.1 GB
format : Device: #], #, #, FW:#, #
{
"Device": "/dev/sda [SAT",
"col_0": "VBOX HARDDISK",
"col_1": "S/N:VBc8882b62-a0263a39",
"FW": 1.0,
"col_2": "17.1 GB"
}

@ -10,3 +10,8 @@ pair 61:63 ^^
pair 75:80 ^---^ 55327
msg : bound to 10.1.10.62 -- renewal in 55327 seconds.
format : bound to # # renewal in # seconds
{
"col_0": "10.1.10.62",
"col_1": "--",
"col_2": 55327
}

@ -21,3 +21,10 @@ path 103:110
pair 95:110 ^-------------^ COMMAND=/bin/ls
msg : stack : TTY=ttys002 ; PWD=/ ; USER=root ; COMMAND=/bin/ls
format : # : TTY=# ; PWD=# ; USER=# ; COMMAND=#
{
"col_0": "stack",
"TTY": "ttys002",
"PWD": "/",
"USER": "root",
"COMMAND": "/bin/ls"
}

@ -10,3 +10,8 @@ ipv4 116:126
pair 116:126 ^--------^ 10.1.10.62
msg : Leaving mDNS multicast group on interface eth0.IPv4 with address 10.1.10.62.
format : Leaving # multicast group on interface # with address #
{
"col_0": "mDNS",
"col_1": "eth0.IPv4",
"col_2": "10.1.10.62"
}

@ -29,3 +29,12 @@ pair 162:177
pair 179:199 ^------------------^ compressor_pages 144
msg : hibernate_teardown: wired_pages 518290, free_pages 5699523, active_pages 40010, inactive_pages 0, speculative_pages 0, cleaned_pages 0, compressor_pages 144
format : hibernate_teardown: wired_pages #, free_pages #, active_pages #, inactive_pages #, speculative_pages #, cleaned_pages #, compressor_pages #
{
"wired_pages": 518290,
"free_pages": 5699523,
"active_pages": 40010,
"inactive_pages": 0,
"speculative_pages": 0,
"cleaned_pages": 0,
"compressor_pages": 144
}

@ -7,3 +7,7 @@ pair 74:83
pair 88:92 ^--^ mDNS
msg : New relevant interface eth0.IPv4 for mDNS.
format : New relevant interface # for #
{
"col_0": "eth0.IPv4",
"col_1": "mDNS"
}

@ -9,3 +9,7 @@ pair 51:90 ^---------------
pair 92:106 ^------------^ mux-device:509
msg : Entered:__thr_AMMuxedDeviceDisconnected, mux-device:509
format : Entered:#, mux-device:#
{
"Entered": "__thr_AMMuxedDeviceDisconnected",
"mux-device": 509
}

@ -7,3 +7,7 @@ pair 82:92
pair 96:100 ^--^ eth0
msg : Withdrawing address record for 10.1.10.62 on eth0.
format : Withdrawing address record for # on #
{
"col_0": "10.1.10.62",
"col_1": "eth0"
}

@ -10,3 +10,8 @@ ipv4 116:126
pair 116:126 ^--------^ 10.1.10.62
msg : Joining mDNS multicast group on interface eth0.IPv4 with address 10.1.10.62.
format : Joining # multicast group on interface # with address #
{
"col_0": "mDNS",
"col_1": "eth0.IPv4",
"col_2": "10.1.10.62"
}

@ -26,3 +26,10 @@ word 125:127
pair 95:127 ^------------------------------^ COMMAND=/usr/bin/env VAR1=foo ls
msg : stack : TTY=ttys002 ; PWD=/ ; USER=root ; COMMAND=/usr/bin/env VAR1=foo ls
format : # : TTY=# ; PWD=# ; USER=# ; COMMAND=#
{
"col_0": "stack",
"TTY": "ttys002",
"PWD": "/",
"USER": "root",
"COMMAND": "/usr/bin/env VAR1=foo ls"
}

@ -7,3 +7,7 @@ quot 169:199
pair 169:199 ^----------------------------^ com.json.components.JSONReader
msg : Doing prepare for resource name "Json_Reader", component "com.json.components.JSONReader"
format : Doing prepare for resource name #, component #
{
"col_0": "Json_Reader",
"col_1": "com.json.components.JSONReader"
}

@ -7,3 +7,7 @@ pair 61:70 ^-----
pair 94:98 ^--^ mDNS
msg : Interface eth0.IPv4 no longer relevant for mDNS.
format : Interface # no longer relevant for #
{
"col_0": "eth0.IPv4",
"col_1": "mDNS"
}

@ -9,3 +9,7 @@ path 75:91
pair 67:91 ^----------------------^ updated /etc/resolv.conf
msg : /sbin/dhclient-script : updated /etc/resolv.conf
format : # : updated #
{
"col_0": "/sbin/dhclient-script",
"updated": "/etc/resolv.conf"
}

@ -15,3 +15,7 @@ wspc 86:87
pair 50:105 ^-----------------------------------------------------^ VNetUserIf_Create: created userIf at 0xffffff802644f400
msg : vmnet: VNetUserIf_Create: created userIf at 0xffffff802644f400.
format : vmnet:# VNetUserIf_Create: #
{
"vmnet": "",
"VNetUserIf_Create": "created userIf at 0xffffff802644f400"
}

@ -14,3 +14,12 @@ pair 65:79 ^-
pair 65:79 ^------------^ xid=0x4e17f141
msg : DHCPNAK from 10.1.10.1 (xid=0x4e17f141)
format : # from # (#)
{
"col_0": "DHCPNAK",
"col_1": "10.1.10.1",
"col_2": [
{
"xid": "0x4e17f141"
}
]
}

@ -67,3 +67,21 @@ pair 246:258
pair 259:265 ^----^ URGP=0
msg : [31809412.513897] [UFW BLOCK] IN=eth0 OUT= MAC=40:40:2e:9a:ad:92:c4:71:fe:f1:b9:7f:08:00 SRC=69.60.116.202 DST=173.203.237.224 LEN=44 TOS=0x00 PREC=0x00 TTL=29 ID=15852 PROTO=TCP SPT=43998 DPT=3389 WINDOW=3072 RES=0x00 SYN URGP=0
format : [31809412.513897] [UFW BLOCK] IN=# OUT=# MAC=# SRC=# DST=# LEN=# TOS=# PREC=# TTL=# ID=# PROTO=# SPT=# DPT=# WINDOW=# RES=# URGP=#
{
"31809412.513897] [UFW BLOCK] IN": "eth0",
"OUT": "",
"MAC": "40:40:2e:9a:ad:92:c4:71:fe:f1:b9:7f:08:00",
"SRC": "69.60.116.202",
"DST": "173.203.237.224",
"LEN": 44,
"TOS": "0x00",
"PREC": "0x00",
"TTL": 29,
"ID": 15852,
"PROTO": "TCP",
"SPT": 43998,
"DPT": 3389,
"WINDOW": 3072,
"RES": "0x00 SYN",
"URGP": 0
}

@ -23,3 +23,15 @@ pair 101:114
pair 101:114 ^-----------^ xid=0xd16b79d
msg : DHCPDISCOVER on eth0 to 255.255.255.255 port 67 interval 5 (xid=0xd16b79d)
format : # on # to # port # interval # (#)
{
"col_0": "DHCPDISCOVER",
"col_1": "eth0",
"col_2": "255.255.255.255",
"col_3": 67,
"col_4": 5,
"col_5": [
{
"xid": "0xd16b79d"
}
]
}

@ -20,3 +20,14 @@ pair 83:96
pair 83:96 ^-----------^ xid=0xd16b79d
msg : DHCPREQUEST on eth0 to 10.1.10.1 port 67 (xid=0xd16b79d)
format : # on # to # port # (#)
{
"col_0": "DHCPREQUEST",
"col_1": "eth0",
"col_2": "10.1.10.1",
"col_3": 67,
"col_4": [
{
"xid": "0xd16b79d"
}
]
}

@ -4,3 +4,6 @@ ipv6 85:110
pair 85:110 ^-----------------------^ fe80::22c9:d0ff:fe15:1b7c
msg : Invalid response packet from host fe80::22c9:d0ff:fe15:1b7c.
format : Invalid response packet from host #
{
"col_0": "fe80::22c9:d0ff:fe15:1b7c"
}

@ -4,3 +4,6 @@ ipv4 85:95
pair 85:95 ^--------^ 10.1.10.10
msg : Invalid response packet from host 10.1.10.10.
format : Invalid response packet from host #
{
"col_0": "10.1.10.10"
}

@ -2,14 +2,22 @@
key 29:29 ^
key 29:40 ^---------^ com.twisted
pair 29:40 ^---------^ com.twisted
key 59:59 ^
key 59:59 ^
sym 59:82 ^---------------------^ twisted.web.server.Site
pair 59:82 ^---------------------^ twisted.web.server.Site
key 95:95 ^
wspc 82:83 ^
word 83:91 ^------^ instance
wspc 91:92 ^
word 92:94 ^^ at
wspc 94:95 ^
hex 95:104 ^-------^ 0x1de9290
pair 95:104 ^-------^ 0x1de9290
val 59:104 ^-------------------------------------------^ twisted.web.server.Site instance at 0x1de9290
grp 59:104 ^-------------------------------------------^ twisted.web.server.Site instance at 0x1de9290
pair 59:104 ^-------------------------------------------^ twisted.web.server.Site instance at 0x1de9290
msg ::com.twisted:Starting factory <twisted.web.server.Site instance at 0x1de9290>
format ::#:Starting factory <#>
{
"col_0": "com.twisted",
"col_1": [
"twisted.web.server.Site instance at 0x1de9290"
]
}

@ -14,3 +14,12 @@ pair 65:78 ^-
pair 65:78 ^-----------^ xid=0xd16b79d
msg : DHCPACK from 10.1.10.1 (xid=0xd16b79d)
format : # from # (#)
{
"col_0": "DHCPACK",
"col_1": "10.1.10.1",
"col_2": [
{
"xid": "0xd16b79d"
}
]
}

@ -7,3 +7,7 @@ ipv4 56:65 ^-------^
pair 56:65 ^-------^ 10.1.10.1
msg : DHCPOFFER from 10.1.10.1
format : # from #
{
"col_0": "DHCPOFFER",
"col_1": "10.1.10.1"
}

@ -7,3 +7,7 @@ pair 86:96
pair 100:104 ^--^ eth0
msg : Registering new address record for 10.1.10.62 on eth0.
format : Registering new address record for # on #
{
"col_0": "10.1.10.62",
"col_1": "eth0"
}

@ -41,6 +41,8 @@ SRC_DIR = os.path.join(ROOT_DIR, "src")
addr_to_name = {}
name_to_addr = {}
element_lists = collections.defaultdict(list)
list_depth = {}
list_format = {}
breakpoints = set()
def completer(text, state):
@ -74,13 +76,20 @@ def getstr(capture):
def printlist(name_or_addr):
if name_or_addr in name_to_addr:
print "(%s) %s" % (name_or_addr, element_lists[name_to_addr[name_or_addr]])
addr = name_to_addr[name_or_addr]
print "% 3d (%s:%s) %s" % (list_depth.get(addr, -1), name_or_addr, addr, element_lists[addr])
elif name_or_addr in element_lists:
print "(%s) %s" % (addr_to_name.get(name_or_addr, name_or_addr),
element_lists[name_or_addr])
addr = name_or_addr
print "% 3d (%s:%s) %s" % (list_depth.get(name_or_addr, -1),
addr_to_name.get(name_or_addr, name_or_addr),
name_or_addr,
element_lists[name_or_addr])
else:
print "error: unknown list --", name_or_addr
if addr in list_format:
print " format -- appender(%s) term(%s) qual(%s) sep(%s) prefix_term(%s)" % tuple(list_format[addr])
def handleop(fields):
addr = fields[0]
loc = fields[1].split(':')
@ -95,8 +104,14 @@ def handleop(fields):
if method_name == 'element_list_t':
addr_to_name[addr] = method_args[0]
name_to_addr[method_args[0]] = addr
list_depth[addr] = int(method_args[1])
elif method_name == '~element_list_t':
pass
del element_lists[addr]
elif method_name == 'format':
list_depth[addr] = int(method_args[0])
list_format[addr] = method_args[1:]
elif method_name == 'consumed':
list_depth[addr] = -1
elif method_name == 'push_back':
el.append((method_args[0], getstr(method_args[1])))
elif method_name == 'pop_front':
@ -113,6 +128,10 @@ def handleop(fields):
sub_list = other[start:end]
del other[start:end]
el[pos:pos] = sub_list
elif method_name == 'swap':
other = element_lists[method_args[0]]
element_lists[method_args[0]] = el
element_lists[addr] = other
elif method_name == 'point':
breakpoints.add(method_args[0])
else:
@ -122,6 +141,7 @@ def playupto(length):
addr_to_name.clear()
name_to_addr.clear()
element_lists.clear()
list_depth.clear()
for index in range(length):
handleop(ops[index])
@ -147,6 +167,13 @@ def find_next_point(start, name):
return start + 1
return orig_start + 1
def printall():
print input_line
sorted_lists = [(list_depth.get(addr, -1), addr) for addr in element_lists]
sorted_lists.sort()
for _depth, addr in sorted_lists:
printlist(addr)
index = len(ops)
last_cmd = ['']
watch_list = set()
@ -209,9 +236,7 @@ while True:
if len(cmd) > 1:
printlist(cmd[1])
else:
print input_line
for addr in element_lists:
printlist(addr)
printall()
elif cmd[0] == 'w':
watch_list.add(cmd[1])
elif cmd[0] == 'u':
@ -220,4 +245,6 @@ while True:
else:
print "error: unknown command --", cmd
printall()
last_cmd = cmd

@ -0,0 +1,13 @@
#!/usr/bin/env bash
test_dir=`dirname $0`
for fn in ${test_dir}/datafile_simple.*; do
echo "Checking $fn"
./drive_data_scanner -p $fn
done
for fn in ${test_dir}/log-samples/sample-*; do
echo "Checking $fn"
./drive_data_scanner -p -l $fn
done
Loading…
Cancel
Save