2024-03-09 21:28:55 +00:00
|
|
|
from hashlib import md5
|
2023-12-11 21:53:30 +00:00
|
|
|
from typing import Any, Dict, List, Optional
|
|
|
|
|
2024-03-05 17:36:56 +00:00
|
|
|
from langchain_core.utils import get_from_dict_or_env
|
2023-12-11 21:53:30 +00:00
|
|
|
|
|
|
|
from langchain_community.graphs.graph_document import GraphDocument
|
|
|
|
from langchain_community.graphs.graph_store import GraphStore
|
|
|
|
|
2024-03-01 20:33:52 +00:00
|
|
|
BASE_ENTITY_LABEL = "__Entity__"
|
2024-03-05 20:54:05 +00:00
|
|
|
EXCLUDED_LABELS = ["_Bloom_Perspective_", "_Bloom_Scene_"]
|
|
|
|
EXCLUDED_RELS = ["_Bloom_HAS_SCENE_"]
|
2024-03-01 20:33:52 +00:00
|
|
|
|
2023-12-11 21:53:30 +00:00
|
|
|
node_properties_query = """
|
|
|
|
CALL apoc.meta.data()
|
|
|
|
YIELD label, other, elementType, type, property
|
2024-03-01 20:33:52 +00:00
|
|
|
WHERE NOT type = "RELATIONSHIP" AND elementType = "node"
|
2024-03-05 20:54:05 +00:00
|
|
|
AND NOT label IN $EXCLUDED_LABELS
|
2023-12-11 21:53:30 +00:00
|
|
|
WITH label AS nodeLabels, collect({property:property, type:type}) AS properties
|
|
|
|
RETURN {labels: nodeLabels, properties: properties} AS output
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
rel_properties_query = """
|
|
|
|
CALL apoc.meta.data()
|
|
|
|
YIELD label, other, elementType, type, property
|
|
|
|
WHERE NOT type = "RELATIONSHIP" AND elementType = "relationship"
|
2024-03-05 20:54:05 +00:00
|
|
|
AND NOT label in $EXCLUDED_LABELS
|
2023-12-11 21:53:30 +00:00
|
|
|
WITH label AS nodeLabels, collect({property:property, type:type}) AS properties
|
|
|
|
RETURN {type: nodeLabels, properties: properties} AS output
|
|
|
|
"""
|
|
|
|
|
|
|
|
rel_query = """
|
|
|
|
CALL apoc.meta.data()
|
|
|
|
YIELD label, other, elementType, type, property
|
|
|
|
WHERE type = "RELATIONSHIP" AND elementType = "node"
|
|
|
|
UNWIND other AS other_node
|
2024-03-05 20:54:05 +00:00
|
|
|
WITH * WHERE NOT label IN $EXCLUDED_LABELS
|
|
|
|
AND NOT other_node IN $EXCLUDED_LABELS
|
2023-12-11 21:53:30 +00:00
|
|
|
RETURN {start: label, type: property, end: toString(other_node)} AS output
|
|
|
|
"""
|
|
|
|
|
2024-03-01 20:33:52 +00:00
|
|
|
include_docs_query = (
|
2024-03-09 21:28:55 +00:00
|
|
|
"MERGE (d:Document {id:$document.metadata.id}) "
|
2024-03-01 20:33:52 +00:00
|
|
|
"SET d.text = $document.page_content "
|
|
|
|
"SET d += $document.metadata "
|
|
|
|
"WITH d "
|
|
|
|
)
|
|
|
|
|
2023-12-11 21:53:30 +00:00
|
|
|
|
2024-03-08 03:21:52 +00:00
|
|
|
def value_sanitize(d: Any) -> Any:
|
|
|
|
"""Sanitize the input dictionary or list.
|
2024-02-09 20:48:57 +00:00
|
|
|
|
2024-03-08 03:21:52 +00:00
|
|
|
Sanitizes the input by removing embedding-like values,
|
2024-01-17 21:22:19 +00:00
|
|
|
lists with more than 128 elements, that are mostly irrelevant for
|
|
|
|
generating answers in a LLM context. These properties, if left in
|
|
|
|
results, can occupy significant context space and detract from
|
|
|
|
the LLM's performance by introducing unnecessary noise and cost.
|
|
|
|
"""
|
|
|
|
LIST_LIMIT = 128
|
2024-03-08 03:21:52 +00:00
|
|
|
if isinstance(d, dict):
|
|
|
|
new_dict = {}
|
|
|
|
for key, value in d.items():
|
|
|
|
if isinstance(value, dict):
|
|
|
|
sanitized_value = value_sanitize(value)
|
|
|
|
if (
|
|
|
|
sanitized_value is not None
|
|
|
|
): # Check if the sanitized value is not None
|
|
|
|
new_dict[key] = sanitized_value
|
|
|
|
elif isinstance(value, list):
|
|
|
|
if len(value) < LIST_LIMIT:
|
|
|
|
sanitized_value = value_sanitize(value)
|
|
|
|
if (
|
|
|
|
sanitized_value is not None
|
|
|
|
): # Check if the sanitized value is not None
|
|
|
|
new_dict[key] = sanitized_value
|
|
|
|
# Do not include the key if the list is oversized
|
|
|
|
else:
|
|
|
|
new_dict[key] = value
|
|
|
|
return new_dict
|
|
|
|
elif isinstance(d, list):
|
|
|
|
if len(d) < LIST_LIMIT:
|
|
|
|
return [
|
|
|
|
value_sanitize(item) for item in d if value_sanitize(item) is not None
|
|
|
|
]
|
2024-01-17 21:22:19 +00:00
|
|
|
else:
|
2024-03-08 03:21:52 +00:00
|
|
|
return None
|
|
|
|
else:
|
|
|
|
return d
|
2024-01-17 21:22:19 +00:00
|
|
|
|
|
|
|
|
2024-03-01 20:33:52 +00:00
|
|
|
def _get_node_import_query(baseEntityLabel: bool, include_source: bool) -> str:
|
|
|
|
if baseEntityLabel:
|
|
|
|
return (
|
|
|
|
f"{include_docs_query if include_source else ''}"
|
|
|
|
"UNWIND $data AS row "
|
|
|
|
f"MERGE (source:`{BASE_ENTITY_LABEL}` {{id: row.id}}) "
|
|
|
|
"SET source += row.properties "
|
|
|
|
f"{'MERGE (d)-[:MENTIONS]->(source) ' if include_source else ''}"
|
|
|
|
"WITH source, row "
|
|
|
|
"CALL apoc.create.addLabels( source, [row.type] ) YIELD node "
|
|
|
|
"RETURN distinct 'done' AS result"
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
return (
|
|
|
|
f"{include_docs_query if include_source else ''}"
|
|
|
|
"UNWIND $data AS row "
|
|
|
|
"CALL apoc.merge.node([row.type], {id: row.id}, "
|
|
|
|
"row.properties, {}) YIELD node "
|
|
|
|
f"{'MERGE (d)-[:MENTIONS]->(node) ' if include_source else ''}"
|
|
|
|
"RETURN distinct 'done' AS result"
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def _get_rel_import_query(baseEntityLabel: bool) -> str:
|
|
|
|
if baseEntityLabel:
|
|
|
|
return (
|
|
|
|
"UNWIND $data AS row "
|
|
|
|
f"MERGE (source:`{BASE_ENTITY_LABEL}` {{id: row.source}}) "
|
|
|
|
f"MERGE (target:`{BASE_ENTITY_LABEL}` {{id: row.target}}) "
|
|
|
|
"WITH source, target, row "
|
|
|
|
"CALL apoc.merge.relationship(source, row.type, "
|
|
|
|
"{}, row.properties, target) YIELD rel "
|
|
|
|
"RETURN distinct 'done'"
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
return (
|
|
|
|
"UNWIND $data AS row "
|
|
|
|
"CALL apoc.merge.node([row.source_label], {id: row.source},"
|
|
|
|
"{}, {}) YIELD node as source "
|
|
|
|
"CALL apoc.merge.node([row.target_label], {id: row.target},"
|
|
|
|
"{}, {}) YIELD node as target "
|
|
|
|
"CALL apoc.merge.relationship(source, row.type, "
|
|
|
|
"{}, row.properties, target) YIELD rel "
|
|
|
|
"RETURN distinct 'done'"
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2023-12-11 21:53:30 +00:00
|
|
|
class Neo4jGraph(GraphStore):
|
2024-02-09 20:48:57 +00:00
|
|
|
"""Neo4j database wrapper for various graph operations.
|
|
|
|
|
2024-01-17 21:22:19 +00:00
|
|
|
Parameters:
|
|
|
|
url (Optional[str]): The URL of the Neo4j database server.
|
|
|
|
username (Optional[str]): The username for database authentication.
|
|
|
|
password (Optional[str]): The password for database authentication.
|
|
|
|
database (str): The name of the database to connect to. Default is 'neo4j'.
|
|
|
|
timeout (Optional[float]): The timeout for transactions in seconds.
|
|
|
|
Useful for terminating long-running queries.
|
|
|
|
By default, there is no timeout set.
|
|
|
|
sanitize (bool): A flag to indicate whether to remove lists with
|
|
|
|
more than 128 elements from results. Useful for removing
|
|
|
|
embedding-like properties from database responses. Default is False.
|
2024-03-27 18:20:12 +00:00
|
|
|
refresh_schema (bool): A flag whether to refresh schema information
|
|
|
|
at initialization. Default is True.
|
2023-12-11 21:53:30 +00:00
|
|
|
|
|
|
|
*Security note*: Make sure that the database connection uses credentials
|
|
|
|
that are narrowly-scoped to only include necessary permissions.
|
|
|
|
Failure to do so may result in data corruption or loss, since the calling
|
|
|
|
code may attempt commands that would result in deletion, mutation
|
|
|
|
of data if appropriately prompted or reading sensitive data if such
|
|
|
|
data is present in the database.
|
|
|
|
The best way to guard against such negative outcomes is to (as appropriate)
|
|
|
|
limit the permissions granted to the credentials used with this tool.
|
|
|
|
|
|
|
|
See https://python.langchain.com/docs/security for more information.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
url: Optional[str] = None,
|
|
|
|
username: Optional[str] = None,
|
|
|
|
password: Optional[str] = None,
|
2024-03-05 17:36:56 +00:00
|
|
|
database: Optional[str] = None,
|
2024-01-17 21:22:19 +00:00
|
|
|
timeout: Optional[float] = None,
|
|
|
|
sanitize: bool = False,
|
2024-03-27 18:20:12 +00:00
|
|
|
refresh_schema: bool = True,
|
2023-12-11 21:53:30 +00:00
|
|
|
) -> None:
|
|
|
|
"""Create a new Neo4j graph wrapper instance."""
|
|
|
|
try:
|
|
|
|
import neo4j
|
|
|
|
except ImportError:
|
|
|
|
raise ValueError(
|
|
|
|
"Could not import neo4j python package. "
|
|
|
|
"Please install it with `pip install neo4j`."
|
|
|
|
)
|
|
|
|
|
2024-03-05 17:36:56 +00:00
|
|
|
url = get_from_dict_or_env({"url": url}, "url", "NEO4J_URI")
|
|
|
|
username = get_from_dict_or_env(
|
|
|
|
{"username": username}, "username", "NEO4J_USERNAME"
|
|
|
|
)
|
|
|
|
password = get_from_dict_or_env(
|
|
|
|
{"password": password}, "password", "NEO4J_PASSWORD"
|
|
|
|
)
|
|
|
|
database = get_from_dict_or_env(
|
|
|
|
{"database": database}, "database", "NEO4J_DATABASE", "neo4j"
|
|
|
|
)
|
2023-12-11 21:53:30 +00:00
|
|
|
|
|
|
|
self._driver = neo4j.GraphDatabase.driver(url, auth=(username, password))
|
|
|
|
self._database = database
|
2024-01-17 21:22:19 +00:00
|
|
|
self.timeout = timeout
|
|
|
|
self.sanitize = sanitize
|
2023-12-11 21:53:30 +00:00
|
|
|
self.schema: str = ""
|
|
|
|
self.structured_schema: Dict[str, Any] = {}
|
|
|
|
# Verify connection
|
|
|
|
try:
|
|
|
|
self._driver.verify_connectivity()
|
|
|
|
except neo4j.exceptions.ServiceUnavailable:
|
|
|
|
raise ValueError(
|
|
|
|
"Could not connect to Neo4j database. "
|
|
|
|
"Please ensure that the url is correct"
|
|
|
|
)
|
|
|
|
except neo4j.exceptions.AuthError:
|
|
|
|
raise ValueError(
|
|
|
|
"Could not connect to Neo4j database. "
|
|
|
|
"Please ensure that the username and password are correct"
|
|
|
|
)
|
|
|
|
# Set schema
|
2024-03-27 18:20:12 +00:00
|
|
|
if refresh_schema:
|
|
|
|
try:
|
|
|
|
self.refresh_schema()
|
|
|
|
except neo4j.exceptions.ClientError as e:
|
|
|
|
if e.code == "Neo.ClientError.Procedure.ProcedureNotFound":
|
|
|
|
raise ValueError(
|
|
|
|
"Could not use APOC procedures. "
|
|
|
|
"Please ensure the APOC plugin is installed in Neo4j and that "
|
|
|
|
"'apoc.meta.data()' is allowed in Neo4j configuration "
|
|
|
|
)
|
|
|
|
raise e
|
2023-12-11 21:53:30 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def get_schema(self) -> str:
|
|
|
|
"""Returns the schema of the Graph"""
|
|
|
|
return self.schema
|
|
|
|
|
|
|
|
@property
|
|
|
|
def get_structured_schema(self) -> Dict[str, Any]:
|
|
|
|
"""Returns the structured schema of the Graph"""
|
|
|
|
return self.structured_schema
|
|
|
|
|
|
|
|
def query(self, query: str, params: dict = {}) -> List[Dict[str, Any]]:
|
|
|
|
"""Query Neo4j database."""
|
2024-01-17 21:22:19 +00:00
|
|
|
from neo4j import Query
|
2023-12-11 21:53:30 +00:00
|
|
|
from neo4j.exceptions import CypherSyntaxError
|
|
|
|
|
|
|
|
with self._driver.session(database=self._database) as session:
|
|
|
|
try:
|
2024-01-17 21:22:19 +00:00
|
|
|
data = session.run(Query(text=query, timeout=self.timeout), params)
|
|
|
|
json_data = [r.data() for r in data]
|
|
|
|
if self.sanitize:
|
2024-01-23 15:56:28 +00:00
|
|
|
json_data = [value_sanitize(el) for el in json_data]
|
2024-01-17 21:22:19 +00:00
|
|
|
return json_data
|
2023-12-11 21:53:30 +00:00
|
|
|
except CypherSyntaxError as e:
|
|
|
|
raise ValueError(f"Generated Cypher Statement is not valid\n{e}")
|
|
|
|
|
|
|
|
def refresh_schema(self) -> None:
|
|
|
|
"""
|
|
|
|
Refreshes the Neo4j graph schema information.
|
|
|
|
"""
|
2024-03-01 20:33:52 +00:00
|
|
|
from neo4j.exceptions import ClientError
|
|
|
|
|
|
|
|
node_properties = [
|
|
|
|
el["output"]
|
|
|
|
for el in self.query(
|
2024-03-05 20:54:05 +00:00
|
|
|
node_properties_query,
|
|
|
|
params={"EXCLUDED_LABELS": EXCLUDED_LABELS + [BASE_ENTITY_LABEL]},
|
2024-03-01 20:33:52 +00:00
|
|
|
)
|
|
|
|
]
|
|
|
|
rel_properties = [
|
|
|
|
el["output"]
|
|
|
|
for el in self.query(
|
2024-03-05 20:54:05 +00:00
|
|
|
rel_properties_query, params={"EXCLUDED_LABELS": EXCLUDED_RELS}
|
2024-03-01 20:33:52 +00:00
|
|
|
)
|
|
|
|
]
|
|
|
|
relationships = [
|
|
|
|
el["output"]
|
|
|
|
for el in self.query(
|
2024-03-05 20:54:05 +00:00
|
|
|
rel_query,
|
|
|
|
params={"EXCLUDED_LABELS": EXCLUDED_LABELS + [BASE_ENTITY_LABEL]},
|
2024-03-01 20:33:52 +00:00
|
|
|
)
|
|
|
|
]
|
|
|
|
|
|
|
|
# Get constraints & indexes
|
|
|
|
try:
|
|
|
|
constraint = self.query("SHOW CONSTRAINTS")
|
|
|
|
index = self.query("SHOW INDEXES YIELD *")
|
|
|
|
except (
|
|
|
|
ClientError
|
|
|
|
): # Read-only user might not have access to schema information
|
|
|
|
constraint = []
|
|
|
|
index = []
|
2023-12-11 21:53:30 +00:00
|
|
|
|
|
|
|
self.structured_schema = {
|
|
|
|
"node_props": {el["labels"]: el["properties"] for el in node_properties},
|
|
|
|
"rel_props": {el["type"]: el["properties"] for el in rel_properties},
|
|
|
|
"relationships": relationships,
|
2024-03-01 20:33:52 +00:00
|
|
|
"metadata": {"constraint": constraint, "index": index},
|
2023-12-11 21:53:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
# Format node properties
|
|
|
|
formatted_node_props = []
|
|
|
|
for el in node_properties:
|
|
|
|
props_str = ", ".join(
|
|
|
|
[f"{prop['property']}: {prop['type']}" for prop in el["properties"]]
|
|
|
|
)
|
|
|
|
formatted_node_props.append(f"{el['labels']} {{{props_str}}}")
|
|
|
|
|
|
|
|
# Format relationship properties
|
|
|
|
formatted_rel_props = []
|
|
|
|
for el in rel_properties:
|
|
|
|
props_str = ", ".join(
|
|
|
|
[f"{prop['property']}: {prop['type']}" for prop in el["properties"]]
|
|
|
|
)
|
|
|
|
formatted_rel_props.append(f"{el['type']} {{{props_str}}}")
|
|
|
|
|
|
|
|
# Format relationships
|
|
|
|
formatted_rels = [
|
|
|
|
f"(:{el['start']})-[:{el['type']}]->(:{el['end']})" for el in relationships
|
|
|
|
]
|
|
|
|
|
|
|
|
self.schema = "\n".join(
|
|
|
|
[
|
|
|
|
"Node properties are the following:",
|
|
|
|
",".join(formatted_node_props),
|
|
|
|
"Relationship properties are the following:",
|
|
|
|
",".join(formatted_rel_props),
|
|
|
|
"The relationships are the following:",
|
|
|
|
",".join(formatted_rels),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
|
|
|
def add_graph_documents(
|
2024-03-01 20:33:52 +00:00
|
|
|
self,
|
|
|
|
graph_documents: List[GraphDocument],
|
|
|
|
include_source: bool = False,
|
|
|
|
baseEntityLabel: bool = False,
|
2023-12-11 21:53:30 +00:00
|
|
|
) -> None:
|
|
|
|
"""
|
2024-03-01 20:33:52 +00:00
|
|
|
This method constructs nodes and relationships in the graph based on the
|
|
|
|
provided GraphDocument objects.
|
|
|
|
|
|
|
|
Parameters:
|
|
|
|
- graph_documents (List[GraphDocument]): A list of GraphDocument objects
|
|
|
|
that contain the nodes and relationships to be added to the graph. Each
|
|
|
|
GraphDocument should encapsulate the structure of part of the graph,
|
|
|
|
including nodes, relationships, and the source document information.
|
|
|
|
- include_source (bool, optional): If True, stores the source document
|
|
|
|
and links it to nodes in the graph using the MENTIONS relationship.
|
2024-03-09 21:28:55 +00:00
|
|
|
This is useful for tracing back the origin of data. Merges source
|
|
|
|
documents based on the `id` property from the source document metadata
|
|
|
|
if available; otherwise it calculates the MD5 hash of `page_content`
|
|
|
|
for merging process. Defaults to False.
|
2024-03-01 20:33:52 +00:00
|
|
|
- baseEntityLabel (bool, optional): If True, each newly created node
|
|
|
|
gets a secondary __Entity__ label, which is indexed and improves import
|
|
|
|
speed and performance. Defaults to False.
|
2023-12-11 21:53:30 +00:00
|
|
|
"""
|
2024-03-01 20:33:52 +00:00
|
|
|
if baseEntityLabel: # Check if constraint already exists
|
|
|
|
constraint_exists = any(
|
|
|
|
[
|
|
|
|
el["labelsOrTypes"] == [BASE_ENTITY_LABEL]
|
|
|
|
and el["properties"] == ["id"]
|
|
|
|
for el in self.structured_schema.get("metadata", {}).get(
|
|
|
|
"constraint"
|
|
|
|
)
|
|
|
|
]
|
2023-12-11 21:53:30 +00:00
|
|
|
)
|
2024-03-01 20:33:52 +00:00
|
|
|
if not constraint_exists:
|
|
|
|
# Create constraint
|
|
|
|
self.query(
|
|
|
|
f"CREATE CONSTRAINT IF NOT EXISTS FOR (b:{BASE_ENTITY_LABEL}) "
|
|
|
|
"REQUIRE b.id IS UNIQUE;"
|
|
|
|
)
|
|
|
|
self.refresh_schema() # Refresh constraint information
|
|
|
|
|
|
|
|
node_import_query = _get_node_import_query(baseEntityLabel, include_source)
|
|
|
|
rel_import_query = _get_rel_import_query(baseEntityLabel)
|
|
|
|
for document in graph_documents:
|
2024-03-09 21:28:55 +00:00
|
|
|
if not document.source.metadata.get("id"):
|
|
|
|
document.source.metadata["id"] = md5(
|
|
|
|
document.source.page_content.encode("utf-8")
|
|
|
|
).hexdigest()
|
|
|
|
|
2023-12-11 21:53:30 +00:00
|
|
|
# Import nodes
|
|
|
|
self.query(
|
2024-03-01 20:33:52 +00:00
|
|
|
node_import_query,
|
2023-12-11 21:53:30 +00:00
|
|
|
{
|
|
|
|
"data": [el.__dict__ for el in document.nodes],
|
|
|
|
"document": document.source.__dict__,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
# Import relationships
|
|
|
|
self.query(
|
2024-03-01 20:33:52 +00:00
|
|
|
rel_import_query,
|
2023-12-11 21:53:30 +00:00
|
|
|
{
|
|
|
|
"data": [
|
|
|
|
{
|
|
|
|
"source": el.source.id,
|
|
|
|
"source_label": el.source.type,
|
|
|
|
"target": el.target.id,
|
|
|
|
"target_label": el.target.type,
|
|
|
|
"type": el.type.replace(" ", "_").upper(),
|
|
|
|
"properties": el.properties,
|
|
|
|
}
|
|
|
|
for el in document.relationships
|
|
|
|
]
|
|
|
|
},
|
|
|
|
)
|