299 lines
9.7 KiB
Python
299 lines
9.7 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
Data access layer for NEFT inward file processing.
|
|
Handles CRUD operations and transaction management.
|
|
"""
|
|
|
|
from typing import List, Optional
|
|
from logging_config import get_logger
|
|
from .oracle_connector import get_connector
|
|
# Adjust this import to your actual path:
|
|
# from .models import NEFTInwardRecord, ProcessedFile
|
|
from .models import NEFTInwardRecord, ProcessedFile
|
|
|
|
logger = get_logger(__name__)
|
|
|
|
|
|
class Repository:
|
|
"""Data access layer for NEFT inward processing."""
|
|
|
|
def __init__(self):
|
|
"""Initialize repository with connector."""
|
|
self.connector = get_connector()
|
|
|
|
def bulk_insert_transactions(self, transactions: List[NEFTInwardRecord]) -> int:
|
|
"""
|
|
Bulk insert NEFT transactions into inward_neft_api_log.
|
|
|
|
Args:
|
|
transactions: List of NEFTInwardRecord objects
|
|
|
|
Returns:
|
|
Number of inserted rows
|
|
"""
|
|
if not transactions:
|
|
logger.warning("No transactions to insert")
|
|
return 0
|
|
|
|
conn = self.connector.get_connection()
|
|
cursor = None
|
|
try:
|
|
cursor = conn.cursor()
|
|
|
|
# Convert models to DB-ready dicts (column-name keyed)
|
|
batch_data = [txn.to_dict() for txn in transactions]
|
|
logger.info(batch_data)
|
|
|
|
insert_sql = """
|
|
INSERT INTO inward_neft_api_log (
|
|
TXNIND,
|
|
JRNL_ID,
|
|
REF_NO,
|
|
TXN_DATE,
|
|
TXN_AMT,
|
|
SENDER_IFSC,
|
|
RECIEVER_IFSC,
|
|
SENDER_ACCT_NO,
|
|
SENDER_ACCT_NAME,
|
|
REMITTER_DETAIL,
|
|
REMITTER_INFO,
|
|
RECVR_ACCT_NO,
|
|
RECVR_ACCT_NAME,
|
|
STATUS,
|
|
REJECT_CODE,
|
|
REJECT_REASON,
|
|
BENEF_ADDRESS,
|
|
MSG_TYPE
|
|
) VALUES (
|
|
:TXNIND,
|
|
:JRNL_ID,
|
|
:REF_NO,
|
|
:TXN_DATE,
|
|
:TXN_AMT,
|
|
:SENDER_IFSC,
|
|
:RECIEVER_IFSC,
|
|
:SENDER_ACCT_NO,
|
|
:SENDER_ACCT_NAME,
|
|
:REMITTER_DETAIL,
|
|
:REMITTER_INFO,
|
|
:RECVR_ACCT_NO,
|
|
:RECVR_ACCT_NAME,
|
|
:STATUS,
|
|
:REJECT_CODE,
|
|
:REJECT_REASON,
|
|
:BENEF_ADDRESS,
|
|
:MSG_TYPE
|
|
)
|
|
"""
|
|
|
|
cursor.executemany(insert_sql, batch_data)
|
|
conn.commit()
|
|
|
|
inserted_count = len(transactions)
|
|
logger.info(f"Inserted {inserted_count} NEFT transactions into inward_neft_api_log")
|
|
return inserted_count
|
|
|
|
except Exception as e:
|
|
if conn:
|
|
conn.rollback()
|
|
logger.error(f"Error inserting NEFT transactions: {e}", exc_info=True)
|
|
raise
|
|
finally:
|
|
if cursor:
|
|
cursor.close()
|
|
conn.close()
|
|
|
|
def is_file_processed(self, filename: str, bankcode: str) -> bool:
|
|
"""
|
|
Check if file has already been processed for a specific bank.
|
|
|
|
Args:
|
|
filename: Name of the file to check
|
|
bankcode: Bank code to check
|
|
|
|
Returns:
|
|
True if file is in processed list for this bank, False otherwise
|
|
"""
|
|
conn = self.connector.get_connection()
|
|
cursor = None
|
|
try:
|
|
cursor = conn.cursor()
|
|
cursor.execute(
|
|
"""
|
|
SELECT COUNT(*)
|
|
FROM neft_inward_processed_files
|
|
WHERE filename = :filename
|
|
AND bankcode = :bankcode
|
|
""",
|
|
{'filename': filename, 'bankcode': bankcode}
|
|
)
|
|
count = cursor.fetchone()[0]
|
|
return count > 0
|
|
except Exception as e:
|
|
logger.error(f"Error checking processed file: {e}", exc_info=True)
|
|
return False
|
|
finally:
|
|
if cursor:
|
|
cursor.close()
|
|
conn.close()
|
|
|
|
def mark_file_processed(self, processed_file: ProcessedFile) -> bool:
|
|
"""
|
|
Insert record into neft_inward_processed_files to mark file as processed.
|
|
|
|
Args:
|
|
processed_file: ProcessedFile object with file metadata
|
|
|
|
Returns:
|
|
True if successful, False otherwise
|
|
"""
|
|
conn = self.connector.get_connection()
|
|
cursor = None
|
|
try:
|
|
cursor = conn.cursor()
|
|
|
|
file_data = processed_file.to_dict()
|
|
insert_sql = """
|
|
INSERT INTO neft_inward_processed_files (
|
|
filename, bankcode, file_path, transaction_count,
|
|
status, error_message, processed_at
|
|
) VALUES (
|
|
:filename, :bankcode, :file_path, :transaction_count,
|
|
:status, :error_message, :processed_at
|
|
)
|
|
"""
|
|
|
|
cursor.execute(insert_sql, file_data)
|
|
conn.commit()
|
|
|
|
logger.info(f"Marked file as processed: {processed_file.filename}")
|
|
return True
|
|
|
|
except Exception as e:
|
|
if conn:
|
|
conn.rollback()
|
|
logger.error(f"Error marking file as processed: {e}", exc_info=True)
|
|
return False
|
|
finally:
|
|
if cursor:
|
|
cursor.close()
|
|
conn.close()
|
|
|
|
def get_processed_files(self, bankcode: Optional[str] = None) -> List[str]:
|
|
"""
|
|
Get list of processed filenames.
|
|
|
|
Args:
|
|
bankcode: Optional bankcode filter
|
|
|
|
Returns:
|
|
List of filenames that have been processed
|
|
"""
|
|
conn = self.connector.get_connection()
|
|
cursor = None
|
|
try:
|
|
cursor = conn.cursor()
|
|
|
|
if bankcode:
|
|
cursor.execute(
|
|
"""
|
|
SELECT filename
|
|
FROM neft_inward_processed_files
|
|
WHERE bankcode = :bankcode
|
|
ORDER BY processed_at DESC
|
|
""",
|
|
{'bankcode': bankcode}
|
|
)
|
|
else:
|
|
cursor.execute(
|
|
"""
|
|
SELECT filename
|
|
FROM neft_inward_processed_files
|
|
ORDER BY processed_at DESC
|
|
"""
|
|
)
|
|
|
|
filenames = [row[0] for row in cursor.fetchall()]
|
|
return filenames
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error retrieving processed files: {e}", exc_info=True)
|
|
return []
|
|
finally:
|
|
if cursor:
|
|
cursor.close()
|
|
conn.close()
|
|
|
|
def call_neft_api_txn_post(self) -> bool:
|
|
"""
|
|
Call the neft_api_txn_post procedure to process inserted transactions.
|
|
Should be called once per processing cycle after all files are inserted.
|
|
|
|
Returns:
|
|
True if procedure executed successfully, False otherwise
|
|
"""
|
|
conn = self.connector.get_connection()
|
|
cursor = None
|
|
try:
|
|
cursor = conn.cursor()
|
|
logger.info("Calling neft_api_txn_post procedure to process all inserted transactions...")
|
|
|
|
# Prefer callproc if available
|
|
try:
|
|
cursor.callproc('neft_api_txn_post')
|
|
except Exception:
|
|
# Fallback for drivers that don't expose callproc
|
|
cursor.execute("BEGIN neft_api_txn_post; END;")
|
|
|
|
conn.commit()
|
|
logger.info("neft_api_txn_post procedure executed successfully")
|
|
return True
|
|
except Exception as e:
|
|
logger.error(f"Error calling neft_api_txn_post procedure: {e}", exc_info=True)
|
|
return False
|
|
finally:
|
|
if cursor:
|
|
cursor.close()
|
|
conn.close()
|
|
|
|
def verify_tables_exist(self):
|
|
"""
|
|
Verify that required database tables exist.
|
|
If tables are missing, terminate the program.
|
|
"""
|
|
conn = self.connector.get_connection()
|
|
cursor = None
|
|
try:
|
|
cursor = conn.cursor()
|
|
|
|
# Check if inward_neft_api_log table exists
|
|
try:
|
|
cursor.execute("SELECT COUNT(*) FROM inward_neft_api_log WHERE ROWNUM = 1")
|
|
logger.info("✓ inward_neft_api_log table exists")
|
|
except Exception as e:
|
|
logger.error(f"✗ inward_neft_api_log table not found: {e}")
|
|
raise SystemExit(
|
|
"FATAL: inward_neft_api_log table must be created manually before running this application"
|
|
)
|
|
|
|
# Check if neft_inward_processed_files table exists
|
|
try:
|
|
cursor.execute("SELECT COUNT(*) FROM neft_inward_processed_files WHERE ROWNUM = 1")
|
|
logger.info("✓ neft_inward_processed_files table exists")
|
|
except Exception as e:
|
|
logger.error(f"✗ neft_inward_processed_files table not found: {e}")
|
|
raise SystemExit(
|
|
"FATAL: neft_inward_processed_files table must be created manually before running this application"
|
|
)
|
|
|
|
logger.info("Database tables verified successfully")
|
|
|
|
except SystemExit:
|
|
raise
|
|
except Exception as e:
|
|
logger.error(f"Error verifying tables: {e}", exc_info=True)
|
|
raise SystemExit(f"FATAL: Error verifying database tables: {e}")
|
|
finally:
|
|
if cursor:
|
|
cursor.close()
|
|
conn.close() |