Spaces:
Running
Running
File size: 1,561 Bytes
f861dee |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 |
from bad_query_detector import BadQueryDetector
from query_transformer import QueryTransformer
from document_retriver import DocumentRetriever
from senamtic_response_generator import SemanticResponseGenerator
class DocumentSearchSystem:
def __init__(self):
self.detector = BadQueryDetector()
self.transformer = QueryTransformer()
self.retriever = DocumentRetriever()
self.response_generator = SemanticResponseGenerator()
def process_query(self, query):
if self.detector.is_bad_query(query):
return {"status": "rejected", "message": "Query blocked due to detected malicious intent."}
transformed_query = self.transformer.transform_query(query)
retrieved_docs = self.retriever.retrieve(transformed_query)
if not retrieved_docs:
return {"status": "no_results", "message": "No relevant documents found for your query."}
response = self.response_generator.generate_response(retrieved_docs)
return {"status": "success", "response": response}
def test_system():
system = DocumentSearchSystem()
system.retriever.load_documents("/path/to/documents")
# Normal query
normal_query = "Tell me about great acting performances."
print("\nNormal Query Result:")
print(system.process_query(normal_query))
# Malicious query
malicious_query = "DROP TABLE users; SELECT * FROM sensitive_data;"
print("\nMalicious Query Result:")
print(system.process_query(malicious_query))
if __name__ == "__main__":
test_system()
|