diff --git a/backend/score.py b/backend/score.py index e2a6c4e51..7edb541ad 100644 --- a/backend/score.py +++ b/backend/score.py @@ -307,7 +307,8 @@ async def extract_knowledge_graph_from_file( node_detail = graphDb_data_Access.get_current_status_document_node(file_name) # Set the status "Completed" in logging becuase we are treating these error already handled by application as like custom errors. json_obj = {'api_name':'extract','message':error_message,'file_created_at':formatted_time(node_detail[0]['created_time']),'error_message':error_message, 'file_name': file_name,'status':'Completed', - 'db_url':uri, 'userName':userName, 'database':database,'success_count':1, 'source_type': source_type, 'source_url':source_url, 'wiki_query':wiki_query, 'logging_time': formatted_time(datetime.now(timezone.utc)),'email':email} + 'db_url':uri, 'userName':userName, 'database':database,'success_count':1, 'source_type': source_type, 'source_url':source_url, 'wiki_query':wiki_query, 'logging_time': formatted_time(datetime.now(timezone.utc)),'email':email, + 'allowedNodes': allowedNodes, 'allowedRelationship': allowedRelationship} logger.log_struct(json_obj, "INFO") logging.exception(f'File Failed in extraction: {e}') return create_api_response("Failed", message = error_message, error=error_message, file_name=file_name) @@ -322,7 +323,8 @@ async def extract_knowledge_graph_from_file( node_detail = graphDb_data_Access.get_current_status_document_node(file_name) json_obj = {'api_name':'extract','message':message,'file_created_at':formatted_time(node_detail[0]['created_time']),'error_message':error_message, 'file_name': file_name,'status':'Failed', - 'db_url':uri, 'userName':userName, 'database':database,'failed_count':1, 'source_type': source_type, 'source_url':source_url, 'wiki_query':wiki_query, 'logging_time': formatted_time(datetime.now(timezone.utc)),'email':email} + 'db_url':uri, 'userName':userName, 'database':database,'failed_count':1, 'source_type': source_type, 'source_url':source_url, 'wiki_query':wiki_query, 'logging_time': formatted_time(datetime.now(timezone.utc)),'email':email, + 'allowedNodes': allowedNodes, 'allowedRelationship': allowedRelationship} logger.log_struct(json_obj, "ERROR") logging.exception(f'File Failed in extraction: {e}') return create_api_response('Failed', message=message + error_message[:100], error=error_message, file_name = file_name) @@ -341,14 +343,6 @@ async def get_source_list( """ try: start = time.time() - # if password is not None and password != "null": - # decoded_password = decode_password(password) - # else: - # decoded_password = None - # userName = None - # database = None - # if " " in uri: - # uri = uri.replace(" ","+") result = await asyncio.to_thread(get_source_list_from_graph,uri,userName,password,database) end = time.time() elapsed_time = end - start diff --git a/backend/src/graphDB_dataAccess.py b/backend/src/graphDB_dataAccess.py index e3142a10b..77cc9e592 100644 --- a/backend/src/graphDB_dataAccess.py +++ b/backend/src/graphDB_dataAccess.py @@ -29,10 +29,10 @@ def update_exception_db(self, file_name, exp_msg, retry_condition=None): if retry_condition is not None: retry_condition = None self.graph.query("""MERGE(d:Document {fileName :$fName}) SET d.status = $status, d.errorMessage = $error_msg, d.retry_condition = $retry_condition""", - {"fName":file_name, "status":job_status, "error_msg":exp_msg, "retry_condition":retry_condition}) + {"fName":file_name, "status":job_status, "error_msg":exp_msg, "retry_condition":retry_condition},session_params={"database":self.graph._database}) else : self.graph.query("""MERGE(d:Document {fileName :$fName}) SET d.status = $status, d.errorMessage = $error_msg""", - {"fName":file_name, "status":job_status, "error_msg":exp_msg}) + {"fName":file_name, "status":job_status, "error_msg":exp_msg},session_params={"database":self.graph._database}) except Exception as e: error_message = str(e) logging.error(f"Error in updating document node status as failed: {error_message}") @@ -66,7 +66,7 @@ def create_source_node(self, obj_source_node:sourceNode): "entityEntityRelCount":obj_source_node.entityEntityRelCount, "communityNodeCount":obj_source_node.communityNodeCount, "communityRelCount":obj_source_node.communityRelCount - }) + },session_params={"database":self.graph._database}) except Exception as e: error_message = str(e) logging.info(f"error_message = {error_message}") @@ -118,7 +118,7 @@ def update_source_node(self, obj_source_node:sourceNode): logging.info(f'Base Param value 1 : {param}') query = "MERGE(d:Document {fileName :$props.fileName}) SET d += $props" logging.info("Update source node properties") - self.graph.query(query,param) + self.graph.query(query,param,session_params={"database":self.graph._database}) except Exception as e: error_message = str(e) self.update_exception_db(self,self.file_name,error_message) @@ -139,7 +139,7 @@ def get_source_list(self): """ logging.info("Get existing files list from graph") query = "MATCH(d:Document) WHERE d.fileName IS NOT NULL RETURN d ORDER BY d.updatedAt DESC" - result = self.graph.query(query) + result = self.graph.query(query,session_params={"database":self.graph._database}) list_of_json_objects = [entry['d'] for entry in result] return list_of_json_objects @@ -147,7 +147,7 @@ def update_KNN_graph(self): """ Update the graph node with SIMILAR relationship where embedding scrore match """ - index = self.graph.query("""show indexes yield * where type = 'VECTOR' and name = 'vector'""") + index = self.graph.query("""show indexes yield * where type = 'VECTOR' and name = 'vector'""",session_params={"database":self.graph._database}) # logging.info(f'show index vector: {index}') knn_min_score = os.environ.get('KNN_MIN_SCORE') if len(index) > 0: @@ -158,14 +158,14 @@ def update_KNN_graph(self): WHERE node <> c and score >= $score MERGE (c)-[rel:SIMILAR]-(node) SET rel.score = score """, {"score":float(knn_min_score)} - ) + ,session_params={"database":self.graph._database}) else: logging.info("Vector index does not exist, So KNN graph not update") def check_account_access(self, database): try: query_dbms_componenet = "call dbms.components() yield edition" - result_dbms_componenet = self.graph.query(query_dbms_componenet) + result_dbms_componenet = self.graph.query(query_dbms_componenet,session_params={"database":self.graph._database}) if result_dbms_componenet[0]["edition"] == "enterprise": query = """ @@ -177,7 +177,7 @@ def check_account_access(self, database): logging.info(f"Checking access for database: {database}") - result = self.graph.query(query, params={"database": database}) + result = self.graph.query(query, params={"database": database},session_params={"database":self.graph._database}) read_access_count = result[0]["readAccessCount"] if result else 0 logging.info(f"Read access count: {read_access_count}") @@ -202,7 +202,7 @@ def check_gds_version(self): gds_procedure_count = """ SHOW FUNCTIONS YIELD name WHERE name STARTS WITH 'gds.version' RETURN COUNT(*) AS totalGdsProcedures """ - result = self.graph.query(gds_procedure_count) + result = self.graph.query(gds_procedure_count,session_params={"database":self.graph._database}) total_gds_procedures = result[0]['totalGdsProcedures'] if result else 0 if total_gds_procedures > 0: @@ -231,11 +231,11 @@ def connection_check_and_get_vector_dimensions(self,database): db_vector_dimension = self.graph.query("""SHOW INDEXES YIELD * WHERE type = 'VECTOR' AND name = 'vector' RETURN options.indexConfig['vector.dimensions'] AS vector_dimensions - """) + """,session_params={"database":self.graph._database}) result_chunks = self.graph.query("""match (c:Chunk) return size(c.embedding) as embeddingSize, count(*) as chunks, count(c.embedding) as hasEmbedding - """) + """,session_params={"database":self.graph._database}) embedding_model = os.getenv('EMBEDDING_MODEL') embeddings, application_dimension = load_embedding_model(embedding_model) @@ -260,7 +260,7 @@ def execute_query(self, query, param=None,max_retries=3, delay=2): retries = 0 while retries < max_retries: try: - return self.graph.query(query, param) + return self.graph.query(query, param,session_params={"database":self.graph._database}) except TransientError as e: if "DeadlockDetected" in str(e): retries += 1 @@ -473,8 +473,8 @@ def drop_create_vector_index(self, isVectorIndexExist): embeddings, dimension = load_embedding_model(embedding_model) if isVectorIndexExist == 'true': - self.graph.query("""drop index vector""") - # self.graph.query("""drop index vector""") + self.graph.query("""drop index vector""",session_params={"database":self.graph._database}) + self.graph.query("""CREATE VECTOR INDEX `vector` if not exists for (c:Chunk) on (c.embedding) OPTIONS {indexConfig: { `vector.dimensions`: $dimensions, @@ -483,7 +483,7 @@ def drop_create_vector_index(self, isVectorIndexExist): """, { "dimensions" : dimension - } + },session_params={"database":self.graph._database} ) return "Drop and Re-Create vector index succesfully" diff --git a/backend/src/llm.py b/backend/src/llm.py index ef835b560..a36c7cc33 100644 --- a/backend/src/llm.py +++ b/backend/src/llm.py @@ -127,6 +127,14 @@ def get_llm(model: str): logging.info(f"Model created - Model Version: {model}") return llm, model_name +def get_llm_model_name(llm): + """Extract name of llm model from llm object""" + for attr in ["model_name", "model", "model_id"]: + model_name = getattr(llm, attr, None) + if model_name: + return model_name.lower() + print("Could not determine model name; defaulting to empty string") + return "" def get_combined_chunks(chunkId_chunkDoc_list, chunks_to_combine): combined_chunk_document_list = [] @@ -181,8 +189,9 @@ async def get_graph_document_list( node_properties = ["description"] relationship_properties = ["description"] TOOL_SUPPORTED_MODELS = {"qwen3", "deepseek"} - model_name = llm.model_name.lower() + model_name = get_llm_model_name(llm) ignore_tool_usage = not any(pattern in model_name for pattern in TOOL_SUPPORTED_MODELS) + logging.info(f"Keeping ignore tool usage parameter as {ignore_tool_usage}") llm_transformer = LLMGraphTransformer( llm=llm, node_properties=node_properties, diff --git a/frontend/src/components/ChatBot/ChunkInfo.tsx b/frontend/src/components/ChatBot/ChunkInfo.tsx index 8a2341f06..b56057503 100644 --- a/frontend/src/components/ChatBot/ChunkInfo.tsx +++ b/frontend/src/components/ChatBot/ChunkInfo.tsx @@ -15,7 +15,6 @@ import { handleGraphNodeClick } from './chatInfo'; import { IconButtonWithToolTip } from '../UI/IconButtonToolTip'; import remarkGfm from 'remark-gfm'; import rehypeRaw from 'rehype-raw'; - const ChunkInfo: FC = ({ loading, chunks, mode }) => { const themeUtils = useContext(ThemeWrapperContext); const [neoNodes, setNeoNodes] = useState([]); diff --git a/frontend/src/components/Content.tsx b/frontend/src/components/Content.tsx index fd840f892..d2a5e14ac 100644 --- a/frontend/src/components/Content.tsx +++ b/frontend/src/components/Content.tsx @@ -929,7 +929,12 @@ const Content: React.FC = ({
Neo4j connection {isReadOnlyUser ? '(Read only Mode)' : ''} - +
{!hasSelections ? : }
diff --git a/frontend/src/components/DataSources/Local/DropZone.tsx b/frontend/src/components/DataSources/Local/DropZone.tsx index 88d200267..6b2e29f24 100644 --- a/frontend/src/components/DataSources/Local/DropZone.tsx +++ b/frontend/src/components/DataSources/Local/DropZone.tsx @@ -126,7 +126,7 @@ const DropZone: FunctionComponent = () => { if (curfile.name == file.name) { return { ...curfile, - uploadProgress: chunkNumber * chunkProgressIncrement, + uploadProgress: Math.ceil(chunkNumber * chunkProgressIncrement), }; } return curfile; @@ -138,7 +138,7 @@ const DropZone: FunctionComponent = () => { if (curfile.name == file.name) { return { ...curfile, - uploadProgress: chunkNumber * chunkProgressIncrement, + uploadProgress: Math.ceil(chunkNumber * chunkProgressIncrement), }; } return curfile; diff --git a/frontend/src/components/FileTable.tsx b/frontend/src/components/FileTable.tsx index e49ae7ba5..afef7d542 100644 --- a/frontend/src/components/FileTable.tsx +++ b/frontend/src/components/FileTable.tsx @@ -105,7 +105,7 @@ const FileTable: ForwardRefRenderFunction = (props, re setCopyRow(true); setTimeout(() => { setCopyRow(false); - }, 5000); + }, 3000); }; const columns = useMemo( () => [ @@ -593,7 +593,7 @@ const FileTable: ForwardRefRenderFunction = (props, re handleCopy(copied); }} > - + { @@ -616,7 +616,16 @@ const FileTable: ForwardRefRenderFunction = (props, re footer: (info) => info.column.id, }), ], - [filesData.length, statusFilter, filetypeFilter, llmtypeFilter, fileSourceFilter, isReadOnlyUser, colorMode] + [ + filesData.length, + statusFilter, + filetypeFilter, + llmtypeFilter, + fileSourceFilter, + isReadOnlyUser, + colorMode, + copyRow, + ] ); const table = useReactTable({ diff --git a/frontend/src/components/UI/DatabaseStatusIcon.tsx b/frontend/src/components/UI/DatabaseStatusIcon.tsx index 7b159607e..31b0b54e1 100644 --- a/frontend/src/components/UI/DatabaseStatusIcon.tsx +++ b/frontend/src/components/UI/DatabaseStatusIcon.tsx @@ -4,7 +4,7 @@ import { DatabaseStatusProps } from '../../types'; import { connectionLabels } from '../../utils/Constants'; import ScienceMoleculeIcon from '../UI/ScienceMolecule'; -const DatabaseStatusIcon: React.FC = ({ isConnected, isGdsActive, uri }) => { +const DatabaseStatusIcon: React.FC = ({ isConnected, isGdsActive, uri, database = 'neo4j' }) => { const strokeColour = isConnected ? connectionLabels.greenStroke : connectionLabels.redStroke; const text = isGdsActive ? connectionLabels.graphDataScience : connectionLabels.graphDatabase; return ( @@ -18,7 +18,7 @@ const DatabaseStatusIcon: React.FC = ({ isConnected, isGdsA )} - {isConnected ? uri : connectionLabels.notConnected} + {isConnected ? `${uri} / ${database}` : connectionLabels.notConnected}
); }; diff --git a/frontend/src/types.ts b/frontend/src/types.ts index 922e47d3f..d544ed46c 100644 --- a/frontend/src/types.ts +++ b/frontend/src/types.ts @@ -799,6 +799,7 @@ export interface DatabaseStatusProps { isConnected: boolean; isGdsActive: boolean; uri?: string; + database?: string; } export type SourcesProps = { diff --git a/frontend/src/utils/Utils.ts b/frontend/src/utils/Utils.ts index 7c8c7dae3..b623da9ad 100644 --- a/frontend/src/utils/Utils.ts +++ b/frontend/src/utils/Utils.ts @@ -791,7 +791,6 @@ export const generateGraphFromNodeAndRelVals = ( type, }); }); - return { nodes: transformedNodes, relationships: transformedRelationships, diff --git a/frontend/yarn.lock b/frontend/yarn.lock index bdd5653bc..fbb42264d 100644 --- a/frontend/yarn.lock +++ b/frontend/yarn.lock @@ -6722,7 +6722,6 @@ sonner@1.7.1: integrity sha512-b6LHBfH32SoVasRFECrdY8p8s7hXPDn3OHUFbZZbiB1ctLS9Gdh6rpX2dVrpQA0kiL5jcRzDDldwwLkSKk3+QQ== - source-map@0.5.6: version "0.5.6" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.6.tgz#75ce38f52bf0733c5a7f0c118d81334a2bb5f412"