Initial implementation
This commit is contained in:
110
.gitignore
vendored
Normal file
110
.gitignore
vendored
Normal file
@@ -0,0 +1,110 @@
|
|||||||
|
# Byte-compiled / optimized / DLL files
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
|
||||||
|
# C extensions
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Distribution / packaging
|
||||||
|
.Python
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
wheels/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
|
||||||
|
# PyInstaller
|
||||||
|
*.manifest
|
||||||
|
*.spec
|
||||||
|
|
||||||
|
# Installer logs
|
||||||
|
pip-log.txt
|
||||||
|
pip-delete-this-directory.txt
|
||||||
|
|
||||||
|
# Unit test / coverage reports
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.coverage
|
||||||
|
.coverage.*
|
||||||
|
.cache
|
||||||
|
nosetests.xml
|
||||||
|
coverage.xml
|
||||||
|
*.cover
|
||||||
|
.hypothesis/
|
||||||
|
.pytest_cache/
|
||||||
|
|
||||||
|
# Translations
|
||||||
|
*.mo
|
||||||
|
*.pot
|
||||||
|
|
||||||
|
# Django
|
||||||
|
*.log
|
||||||
|
local_settings.py
|
||||||
|
db.sqlite3
|
||||||
|
|
||||||
|
# Flask
|
||||||
|
instance/
|
||||||
|
.webassets-cache
|
||||||
|
|
||||||
|
# Scrapy
|
||||||
|
.scrapy
|
||||||
|
|
||||||
|
# Sphinx documentation
|
||||||
|
docs/_build/
|
||||||
|
|
||||||
|
# PyBuilder
|
||||||
|
target/
|
||||||
|
|
||||||
|
# Jupyter Notebook
|
||||||
|
.ipynb_checkpoints
|
||||||
|
|
||||||
|
# pyenv
|
||||||
|
.python-version
|
||||||
|
|
||||||
|
# celery beat schedule file
|
||||||
|
celerybeat-schedule
|
||||||
|
|
||||||
|
# SageMath parsed files
|
||||||
|
*.sage.py
|
||||||
|
|
||||||
|
# Environments
|
||||||
|
.env
|
||||||
|
.venv
|
||||||
|
env/
|
||||||
|
venv/
|
||||||
|
ENV/
|
||||||
|
env.bak/
|
||||||
|
venv.bak/
|
||||||
|
|
||||||
|
# Spyder project settings
|
||||||
|
.spyderproject
|
||||||
|
.spyproject
|
||||||
|
|
||||||
|
# Rope project settings
|
||||||
|
.ropeproject
|
||||||
|
|
||||||
|
# mkdocs documentation
|
||||||
|
/site
|
||||||
|
|
||||||
|
# mypy
|
||||||
|
.mypy_cache/
|
||||||
|
|
||||||
|
# IDE files
|
||||||
|
.idea/
|
||||||
|
.vscode/
|
||||||
|
*.swp
|
||||||
|
*.swo
|
||||||
|
|
||||||
|
# Application logs
|
||||||
|
logs/
|
169
DEPLOYMENT.md
Normal file
169
DEPLOYMENT.md
Normal file
@@ -0,0 +1,169 @@
|
|||||||
|
# Deployment Guide
|
||||||
|
|
||||||
|
This guide explains how to deploy the SSHFileToCbs Python application in a production environment.
|
||||||
|
|
||||||
|
## Prerequisites
|
||||||
|
|
||||||
|
- Python 3.6 or higher
|
||||||
|
- pip for package installation
|
||||||
|
- Access to a Linux server (Ubuntu/Debian or CentOS/RHEL instructions provided)
|
||||||
|
|
||||||
|
## Installation Steps
|
||||||
|
|
||||||
|
### 1. Clone or Copy the Application
|
||||||
|
|
||||||
|
Clone the repository or copy the application files to the deployment server:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Optional: create a directory for the application
|
||||||
|
mkdir -p /opt/ssh-file-to-cbs
|
||||||
|
# Copy application files
|
||||||
|
cp -R /path/to/SSHFileToCbs_PYTHON/* /opt/ssh-file-to-cbs/
|
||||||
|
cd /opt/ssh-file-to-cbs
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Install Required Packages
|
||||||
|
|
||||||
|
Install the required Python packages:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pip3 install -r requirements.txt
|
||||||
|
# Or install as a package
|
||||||
|
pip3 install -e .
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Configure the Application
|
||||||
|
|
||||||
|
Edit the configuration file to match your environment:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cp config/config.ini config/config.production.ini
|
||||||
|
# Edit the configuration file
|
||||||
|
nano config/config.production.ini
|
||||||
|
```
|
||||||
|
|
||||||
|
Update the following settings:
|
||||||
|
- Server connection details
|
||||||
|
- Local and remote file paths
|
||||||
|
- Transfer protocol and other application settings
|
||||||
|
|
||||||
|
### 4. Set Up as a System Service
|
||||||
|
|
||||||
|
For reliable operation, set up the application as a system service:
|
||||||
|
|
||||||
|
#### For systemd-based systems (Ubuntu, Debian, CentOS 7+, RHEL 7+):
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Copy the service file
|
||||||
|
sudo cp ssh-file-to-cbs.service /etc/systemd/system/
|
||||||
|
|
||||||
|
# Edit the service file if necessary to update paths
|
||||||
|
sudo nano /etc/systemd/system/ssh-file-to-cbs.service
|
||||||
|
|
||||||
|
# Reload systemd to recognize the new service
|
||||||
|
sudo systemctl daemon-reload
|
||||||
|
|
||||||
|
# Enable the service to start on boot
|
||||||
|
sudo systemctl enable ssh-file-to-cbs.service
|
||||||
|
|
||||||
|
# Start the service
|
||||||
|
sudo systemctl start ssh-file-to-cbs.service
|
||||||
|
|
||||||
|
# Check the status
|
||||||
|
sudo systemctl status ssh-file-to-cbs.service
|
||||||
|
```
|
||||||
|
|
||||||
|
### 5. Set Up Log Rotation
|
||||||
|
|
||||||
|
Although the application has built-in log rotation, it's a good practice to set up system-level log rotation as well:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sudo nano /etc/logrotate.d/ssh-file-to-cbs
|
||||||
|
```
|
||||||
|
|
||||||
|
Add the following content:
|
||||||
|
|
||||||
|
```
|
||||||
|
/opt/ssh-file-to-cbs/logs/*.log {
|
||||||
|
daily
|
||||||
|
missingok
|
||||||
|
rotate 14
|
||||||
|
compress
|
||||||
|
delaycompress
|
||||||
|
notifempty
|
||||||
|
create 0640 user group
|
||||||
|
sharedscripts
|
||||||
|
postrotate
|
||||||
|
systemctl restart ssh-file-to-cbs.service >/dev/null 2>&1 || true
|
||||||
|
endscript
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Replace `user` and `group` with the appropriate values for your system.
|
||||||
|
|
||||||
|
### 6. Verify Installation
|
||||||
|
|
||||||
|
Check if the application is running correctly:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Check service status
|
||||||
|
sudo systemctl status ssh-file-to-cbs.service
|
||||||
|
|
||||||
|
# Check logs
|
||||||
|
tail -f /opt/ssh-file-to-cbs/logs/SSHFileToCbs_YYYYMMDD.log
|
||||||
|
```
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### Service Won't Start
|
||||||
|
|
||||||
|
Check the logs for errors:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sudo journalctl -u ssh-file-to-cbs.service -n 50
|
||||||
|
```
|
||||||
|
|
||||||
|
### Connection Issues
|
||||||
|
|
||||||
|
Verify network connectivity and credentials:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Test SSH connectivity
|
||||||
|
ssh -p <port> <username>@<host>
|
||||||
|
|
||||||
|
# Test network connectivity
|
||||||
|
ping <host>
|
||||||
|
telnet <host> <port>
|
||||||
|
```
|
||||||
|
|
||||||
|
### Permission Issues
|
||||||
|
|
||||||
|
Ensure the application has proper permissions to read/write files:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Check directory permissions
|
||||||
|
ls -la /path/to/local/files/
|
||||||
|
ls -la /path/to/archive/
|
||||||
|
|
||||||
|
# Change ownership if needed
|
||||||
|
sudo chown -R user:group /path/to/directory/
|
||||||
|
```
|
||||||
|
|
||||||
|
## Monitoring
|
||||||
|
|
||||||
|
The application has built-in monitoring, but you can also set up external monitoring:
|
||||||
|
|
||||||
|
### Check Application Logs
|
||||||
|
|
||||||
|
```bash
|
||||||
|
tail -f /opt/ssh-file-to-cbs/logs/SSHFileToCbs_*.log
|
||||||
|
```
|
||||||
|
|
||||||
|
### Set Up Monitoring Alerts
|
||||||
|
|
||||||
|
Consider setting up monitoring with tools like Prometheus, Nagios, or a simple cron job that checks the logs for errors:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Example cron job to check logs for errors
|
||||||
|
*/10 * * * * grep -i "error" /opt/ssh-file-to-cbs/logs/SSHFileToCbs_$(date +\%Y\%m\%d).log >/dev/null && echo "Errors found in SSH File to CBS logs" | mail -s "SSH File to CBS Error" admin@example.com
|
||||||
|
```
|
86
README.md
Normal file
86
README.md
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
# SSHFileToCbs - File Transfer Application
|
||||||
|
|
||||||
|
A modern Python implementation of a file transfer application between local and remote servers using SSH/SFTP or FTP.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Support for both SSH/SFTP and FTP protocols
|
||||||
|
- Robust logging with rotation
|
||||||
|
- System monitoring for performance and resource usage
|
||||||
|
- Error handling and recovery
|
||||||
|
- Configurable file paths and patterns
|
||||||
|
- Clean code architecture following modern Python practices
|
||||||
|
|
||||||
|
## Requirements
|
||||||
|
|
||||||
|
- Python 3.6+
|
||||||
|
- Dependencies listed in requirements.txt
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
1. Clone the repository
|
||||||
|
2. Install dependencies:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pip install -r requirements.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
Create or edit the `config/config.ini` file with your server details and file paths:
|
||||||
|
|
||||||
|
```ini
|
||||||
|
[server]
|
||||||
|
REMOTE_HOST=your_host
|
||||||
|
REMOTE_USER=your_username
|
||||||
|
REMOTE_PASS=your_password
|
||||||
|
REMOTE_PORT=22
|
||||||
|
|
||||||
|
[app]
|
||||||
|
SLEEP_TIME_MINS=30
|
||||||
|
TRANSFER_PROTOCOL=SSH
|
||||||
|
|
||||||
|
[paths]
|
||||||
|
LOCAL_FOLDER_PATH=/path/to/local/files/
|
||||||
|
ARCHIVE_FOLDER_PATH=/path/to/archive/
|
||||||
|
LOCAL_REPORT_PATH=/path/to/report/files/
|
||||||
|
LOCAL_FAILED_PATH=/path/to/failed/files/
|
||||||
|
|
||||||
|
[remote_paths]
|
||||||
|
REMOTE_REPORT_PATTERN=BLK_*
|
||||||
|
REMOTE_INPUT_FILE_PATH=path/to/input/
|
||||||
|
REMOTE_OUTPUT_FILE_PATH=path/to/output/
|
||||||
|
REMOTE_FAILURE_FILE_PATH=path/to/failure/
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
Run the application with:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
python main.py
|
||||||
|
```
|
||||||
|
|
||||||
|
### Command Line Options
|
||||||
|
|
||||||
|
- `--config`: Path to configuration file (default: config/config.ini)
|
||||||
|
- `--log-level`: Set logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)
|
||||||
|
|
||||||
|
## How It Works
|
||||||
|
|
||||||
|
The application:
|
||||||
|
1. Sends local files to the remote server
|
||||||
|
2. Archives sent files locally
|
||||||
|
3. Fetches report files from the remote server
|
||||||
|
4. Fetches failed files from the remote server
|
||||||
|
5. Sleeps for a configured period
|
||||||
|
6. Repeats the cycle
|
||||||
|
|
||||||
|
## Architecture
|
||||||
|
|
||||||
|
- `protocols/`: Protocol interfaces and implementations (SSH, FTP)
|
||||||
|
- `services/`: Core services for file sending and fetching
|
||||||
|
- `file_operations/`: Local file management utilities
|
||||||
|
- `utils/`: Logging, configuration, and monitoring utilities
|
||||||
|
- `app.py`: Main application class
|
||||||
|
- `main.py`: Entry point and command-line interface
|
21
config/config.ini
Normal file
21
config/config.ini
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
[server]
|
||||||
|
REMOTE_HOST=180.179.110.185
|
||||||
|
REMOTE_USER=ipks
|
||||||
|
REMOTE_PASS=ipks
|
||||||
|
REMOTE_PORT=22
|
||||||
|
|
||||||
|
[app]
|
||||||
|
SLEEP_TIME_MINS=30
|
||||||
|
TRANSFER_PROTOCOL=SSH
|
||||||
|
|
||||||
|
[paths]
|
||||||
|
LOCAL_FOLDER_PATH=/home/ec2-user/PRODFILES/
|
||||||
|
ARCHIVE_FOLDER_PATH=/home/ec2-user/PRODFILES/archive/
|
||||||
|
LOCAL_REPORT_PATH=/home/ec2-user/PRODFILES/reportFiles/
|
||||||
|
LOCAL_FAILED_PATH=/home/ec2-user/PRODFILES/failedFiles/
|
||||||
|
|
||||||
|
[remote_paths]
|
||||||
|
REMOTE_REPORT_PATTERN=BLK_*
|
||||||
|
REMOTE_INPUT_FILE_PATH=IPKS_FILES/FROMIPKS/
|
||||||
|
REMOTE_OUTPUT_FILE_PATH=IPKS_FILES/TOIPKS
|
||||||
|
REMOTE_FAILURE_FILE_PATH=IPKS_FILES/FAILURE
|
62
main.py
Executable file
62
main.py
Executable file
@@ -0,0 +1,62 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
SSHFileToCbs - File Transfer Application
|
||||||
|
|
||||||
|
This application transfers files between local and remote servers using SSH/SFTP or FTP.
|
||||||
|
It's a modernized Python replacement for the legacy Java application.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from src.app import Application
|
||||||
|
|
||||||
|
def parse_arguments():
|
||||||
|
"""Parse command line arguments"""
|
||||||
|
parser = argparse.ArgumentParser(description="SSH/FTP File Transfer Application")
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--config",
|
||||||
|
type=str,
|
||||||
|
help="Path to configuration file (default: config/config.ini)"
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--log-level",
|
||||||
|
type=str,
|
||||||
|
choices=["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"],
|
||||||
|
default="INFO",
|
||||||
|
help="Set logging level (default: INFO)"
|
||||||
|
)
|
||||||
|
|
||||||
|
return parser.parse_args()
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""Application entry point"""
|
||||||
|
# Parse command line arguments
|
||||||
|
args = parse_arguments()
|
||||||
|
|
||||||
|
# Determine log level
|
||||||
|
log_level = getattr(logging, args.log_level)
|
||||||
|
|
||||||
|
# Set up base logger
|
||||||
|
logging.basicConfig(
|
||||||
|
level=log_level,
|
||||||
|
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
|
||||||
|
datefmt="%Y-%m-%d %H:%M:%S"
|
||||||
|
)
|
||||||
|
|
||||||
|
logger = logging.getLogger("SSHFileToCbs")
|
||||||
|
logger.info("Starting SSHFileToCbs application")
|
||||||
|
|
||||||
|
# Initialize and run application
|
||||||
|
app = Application(args.config)
|
||||||
|
app.run()
|
||||||
|
|
||||||
|
logger.info("SSHFileToCbs application finished")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
4
requirements.txt
Normal file
4
requirements.txt
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
paramiko>=2.7.2
|
||||||
|
psutil>=5.8.0
|
||||||
|
python-dotenv>=0.19.1
|
||||||
|
pyftpdlib>=1.5.6
|
30
setup.py
Normal file
30
setup.py
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
from setuptools import setup, find_packages
|
||||||
|
|
||||||
|
setup(
|
||||||
|
name="ssh-file-to-cbs",
|
||||||
|
version="1.0.0",
|
||||||
|
packages=find_packages(),
|
||||||
|
install_requires=[
|
||||||
|
"paramiko>=2.7.2",
|
||||||
|
"psutil>=5.8.0",
|
||||||
|
"python-dotenv>=0.19.1",
|
||||||
|
"pyftpdlib>=1.5.6",
|
||||||
|
],
|
||||||
|
entry_points={
|
||||||
|
"console_scripts": [
|
||||||
|
"ssh-file-to-cbs=main:main",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
python_requires=">=3.6",
|
||||||
|
author="Converted from Java by Claude",
|
||||||
|
description="A file transfer application using SSH/SFTP or FTP",
|
||||||
|
classifiers=[
|
||||||
|
"Programming Language :: Python :: 3",
|
||||||
|
"Programming Language :: Python :: 3.6",
|
||||||
|
"Programming Language :: Python :: 3.7",
|
||||||
|
"Programming Language :: Python :: 3.8",
|
||||||
|
"Programming Language :: Python :: 3.9",
|
||||||
|
"License :: OSI Approved :: MIT License",
|
||||||
|
"Operating System :: OS Independent",
|
||||||
|
],
|
||||||
|
)
|
0
src/__init__.py
Normal file
0
src/__init__.py
Normal file
226
src/app.py
Normal file
226
src/app.py
Normal file
@@ -0,0 +1,226 @@
|
|||||||
|
import logging
|
||||||
|
import time
|
||||||
|
from typing import Dict, Any, Optional
|
||||||
|
import signal
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
|
from .utils.logger import Logger
|
||||||
|
from .utils.config import Config
|
||||||
|
from .utils.monitoring import Monitoring
|
||||||
|
from .protocols.protocol_factory import ProtocolFactory
|
||||||
|
from .services.file_sender import FileSender
|
||||||
|
from .services.file_fetcher import FileFetcher
|
||||||
|
|
||||||
|
|
||||||
|
class Application:
|
||||||
|
"""
|
||||||
|
Main application class for SSH/FTP file transfer
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, config_path: Optional[str] = None):
|
||||||
|
# Initialize logging
|
||||||
|
self.logger_util = Logger("ssh_file_to_cbs")
|
||||||
|
self.logger = self.logger_util.get_logger()
|
||||||
|
|
||||||
|
# Load configuration
|
||||||
|
self.config = Config(config_path)
|
||||||
|
|
||||||
|
# Initialize monitoring
|
||||||
|
self.monitoring = Monitoring("SSHFileToCbs")
|
||||||
|
|
||||||
|
# Set up signal handlers
|
||||||
|
signal.signal(signal.SIGINT, self._signal_handler)
|
||||||
|
signal.signal(signal.SIGTERM, self._signal_handler)
|
||||||
|
|
||||||
|
self.running = False
|
||||||
|
self.logger.info("Application initialized")
|
||||||
|
|
||||||
|
def _signal_handler(self, sig, frame):
|
||||||
|
"""Handle termination signals"""
|
||||||
|
self.logger.info(f"Received signal {sig}, shutting down...")
|
||||||
|
self.running = False
|
||||||
|
|
||||||
|
def _create_protocol(self) -> Any:
|
||||||
|
"""Create protocol instance from configuration"""
|
||||||
|
try:
|
||||||
|
protocol_type = self.config.get("app", "TRANSFER_PROTOCOL")
|
||||||
|
host = self.config.get("server", "REMOTE_HOST")
|
||||||
|
username = self.config.get("server", "REMOTE_USER")
|
||||||
|
password = self.config.get("server", "REMOTE_PASS")
|
||||||
|
port = self.config.get_int("server", "REMOTE_PORT")
|
||||||
|
|
||||||
|
if not all([protocol_type, host, username, password, port]):
|
||||||
|
self.logger.error("Missing required configuration for protocol")
|
||||||
|
return None
|
||||||
|
|
||||||
|
protocol = ProtocolFactory.create_protocol(
|
||||||
|
protocol_type, host, username, password, port
|
||||||
|
)
|
||||||
|
|
||||||
|
if not protocol:
|
||||||
|
self.logger.error(f"Failed to create protocol: {protocol_type}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
return protocol
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Error creating protocol: {str(e)}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def send_files(self) -> bool:
|
||||||
|
"""Send files from local to remote server"""
|
||||||
|
try:
|
||||||
|
protocol = self._create_protocol()
|
||||||
|
if not protocol:
|
||||||
|
return False
|
||||||
|
|
||||||
|
local_folder_path = self.config.get("paths", "LOCAL_FOLDER_PATH")
|
||||||
|
archive_folder_path = self.config.get("paths", "ARCHIVE_FOLDER_PATH")
|
||||||
|
remote_input_path = self.config.get(
|
||||||
|
"remote_paths", "REMOTE_INPUT_FILE_PATH"
|
||||||
|
)
|
||||||
|
|
||||||
|
if not all([local_folder_path, archive_folder_path, remote_input_path]):
|
||||||
|
self.logger.error("Missing required configuration for sending files")
|
||||||
|
return False
|
||||||
|
|
||||||
|
sender = FileSender(
|
||||||
|
protocol, local_folder_path, remote_input_path, archive_folder_path
|
||||||
|
)
|
||||||
|
|
||||||
|
files_sent = sender.send_files()
|
||||||
|
|
||||||
|
self.monitoring.record_operation(
|
||||||
|
"send_files",
|
||||||
|
"success" if files_sent > 0 else "no_files",
|
||||||
|
{"files_sent": files_sent},
|
||||||
|
)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Error in send_files: {str(e)}")
|
||||||
|
self.monitoring.record_operation("send_files", "failure", {"error": str(e)})
|
||||||
|
return False
|
||||||
|
|
||||||
|
def fetch_report_files(self) -> bool:
|
||||||
|
"""Fetch report files from remote server"""
|
||||||
|
try:
|
||||||
|
protocol = self._create_protocol()
|
||||||
|
if not protocol:
|
||||||
|
return False
|
||||||
|
|
||||||
|
local_report_path = self.config.get("paths", "LOCAL_REPORT_PATH")
|
||||||
|
remote_output_path = self.config.get(
|
||||||
|
"remote_paths", "REMOTE_OUTPUT_FILE_PATH"
|
||||||
|
)
|
||||||
|
report_pattern = self.config.get("remote_paths", "REMOTE_REPORT_PATTERN")
|
||||||
|
|
||||||
|
if not all([local_report_path, remote_output_path]):
|
||||||
|
self.logger.error(
|
||||||
|
"Missing required configuration for fetching report files"
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
fetcher = FileFetcher(
|
||||||
|
protocol, remote_output_path, local_report_path, report_pattern
|
||||||
|
)
|
||||||
|
|
||||||
|
files_fetched = fetcher.fetch_files()
|
||||||
|
|
||||||
|
self.monitoring.record_operation(
|
||||||
|
"fetch_report_files",
|
||||||
|
"success" if files_fetched else "no_files",
|
||||||
|
{"files_fetched": len(files_fetched)},
|
||||||
|
)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Error in fetch_report_files: {str(e)}")
|
||||||
|
self.monitoring.record_operation(
|
||||||
|
"fetch_report_files", "failure", {"error": str(e)}
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def fetch_failed_files(self) -> bool:
|
||||||
|
"""Fetch failed files from remote server"""
|
||||||
|
try:
|
||||||
|
protocol = self._create_protocol()
|
||||||
|
if not protocol:
|
||||||
|
return False
|
||||||
|
|
||||||
|
local_failed_path = self.config.get("paths", "LOCAL_FAILED_PATH")
|
||||||
|
remote_failure_path = self.config.get(
|
||||||
|
"remote_paths", "REMOTE_FAILURE_FILE_PATH"
|
||||||
|
)
|
||||||
|
report_pattern = self.config.get("remote_paths", "REMOTE_REPORT_PATTERN")
|
||||||
|
|
||||||
|
if not all([local_failed_path, remote_failure_path]):
|
||||||
|
self.logger.error(
|
||||||
|
"Missing required configuration for fetching failed files"
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
fetcher = FileFetcher(
|
||||||
|
protocol, remote_failure_path, local_failed_path, report_pattern
|
||||||
|
)
|
||||||
|
|
||||||
|
files_fetched = fetcher.fetch_files()
|
||||||
|
|
||||||
|
self.monitoring.record_operation(
|
||||||
|
"fetch_failed_files",
|
||||||
|
"success" if files_fetched else "no_files",
|
||||||
|
{"files_fetched": len(files_fetched)},
|
||||||
|
)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Error in fetch_failed_files: {str(e)}")
|
||||||
|
self.monitoring.record_operation(
|
||||||
|
"fetch_failed_files", "failure", {"error": str(e)}
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def run(self) -> None:
|
||||||
|
"""Run the application main loop"""
|
||||||
|
self.logger.info("Starting application main loop")
|
||||||
|
self.running = True
|
||||||
|
|
||||||
|
# Start background monitoring
|
||||||
|
self.monitoring.start_background_monitoring()
|
||||||
|
|
||||||
|
sleep_time = self.config.get_int("app", "SLEEP_TIME_MINS", fallback=30)
|
||||||
|
self.logger.info(f"Sleep time between cycles: {sleep_time} minutes")
|
||||||
|
|
||||||
|
try:
|
||||||
|
while self.running:
|
||||||
|
self.logger.info("Starting file transfer cycle")
|
||||||
|
|
||||||
|
# Send files to remote server
|
||||||
|
self.send_files()
|
||||||
|
|
||||||
|
# Fetch report files
|
||||||
|
self.fetch_report_files()
|
||||||
|
|
||||||
|
# Fetch failed files
|
||||||
|
self.fetch_failed_files()
|
||||||
|
|
||||||
|
self.logger.info(
|
||||||
|
f"File transfer cycle completed, sleeping for {sleep_time} minutes"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Sleep but check periodically if we should continue running
|
||||||
|
for _ in range(sleep_time * 60 // 10):
|
||||||
|
if not self.running:
|
||||||
|
break
|
||||||
|
time.sleep(10)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Error in main loop: {str(e)}")
|
||||||
|
finally:
|
||||||
|
self.monitoring.stop_background_monitoring()
|
||||||
|
self.logger.info("Application shutdown complete")
|
||||||
|
|
0
src/file_operations/__init__.py
Normal file
0
src/file_operations/__init__.py
Normal file
75
src/file_operations/file_manager.py
Normal file
75
src/file_operations/file_manager.py
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import logging
|
||||||
|
from typing import List, Optional
|
||||||
|
import glob
|
||||||
|
|
||||||
|
class FileManager:
|
||||||
|
"""
|
||||||
|
Handles local file operations like moving files to archive
|
||||||
|
"""
|
||||||
|
def __init__(self):
|
||||||
|
self.logger = logging.getLogger("SSHFileToCbs.FileManager")
|
||||||
|
|
||||||
|
def move_file(self, source_path: str, target_dir: str) -> bool:
|
||||||
|
"""
|
||||||
|
Move a file from source path to target directory
|
||||||
|
|
||||||
|
Args:
|
||||||
|
source_path: Path to source file
|
||||||
|
target_dir: Directory to move file to
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if successful, False otherwise
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
if not os.path.isfile(source_path):
|
||||||
|
self.logger.error(f"Source file not found: {source_path}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Create target directory if it doesn't exist
|
||||||
|
os.makedirs(target_dir, exist_ok=True)
|
||||||
|
|
||||||
|
# Get filename
|
||||||
|
filename = os.path.basename(source_path)
|
||||||
|
target_path = os.path.join(target_dir, filename)
|
||||||
|
|
||||||
|
# Move the file
|
||||||
|
shutil.move(source_path, target_path)
|
||||||
|
self.logger.info(f"Moved file from {source_path} to {target_path}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Failed to move file {source_path} to {target_dir}: {str(e)}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get_files_in_directory(self, directory: str, pattern: Optional[str] = None) -> List[str]:
|
||||||
|
"""
|
||||||
|
Get list of files in directory, optionally filtered by pattern
|
||||||
|
|
||||||
|
Args:
|
||||||
|
directory: Directory to list files from
|
||||||
|
pattern: Optional glob pattern to filter files
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of file paths
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
if not os.path.isdir(directory):
|
||||||
|
self.logger.error(f"Directory not found: {directory}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
# Get all files in directory
|
||||||
|
if pattern:
|
||||||
|
file_pattern = os.path.join(directory, pattern)
|
||||||
|
files = glob.glob(file_pattern)
|
||||||
|
else:
|
||||||
|
files = [os.path.join(directory, f) for f in os.listdir(directory)
|
||||||
|
if os.path.isfile(os.path.join(directory, f))]
|
||||||
|
|
||||||
|
self.logger.debug(f"Found {len(files)} files in {directory}")
|
||||||
|
return files
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Error listing files in {directory}: {str(e)}")
|
||||||
|
return []
|
0
src/protocols/__init__.py
Normal file
0
src/protocols/__init__.py
Normal file
221
src/protocols/ftp_protocol.py
Normal file
221
src/protocols/ftp_protocol.py
Normal file
@@ -0,0 +1,221 @@
|
|||||||
|
import os
|
||||||
|
import ftplib
|
||||||
|
import fnmatch
|
||||||
|
from typing import List, Optional
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from .protocol_interface import FileTransferProtocol
|
||||||
|
|
||||||
|
class FTPProtocol(FileTransferProtocol):
|
||||||
|
"""
|
||||||
|
Implementation of FTP file transfer protocol
|
||||||
|
"""
|
||||||
|
def __init__(self, host: str, username: str, password: str, port: int = 21):
|
||||||
|
super().__init__(host, username, password, port)
|
||||||
|
self.client = None
|
||||||
|
self.connected = False
|
||||||
|
|
||||||
|
def connect(self) -> bool:
|
||||||
|
"""
|
||||||
|
Establish FTP connection
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
self.logger.info(f"Connecting to FTP server {self.username}@{self.host}:{self.port}")
|
||||||
|
self.client = ftplib.FTP()
|
||||||
|
self.client.connect(self.host, self.port, timeout=30)
|
||||||
|
self.client.login(self.username, self.password)
|
||||||
|
self.client.set_pasv(True) # Use passive mode
|
||||||
|
self.connected = True
|
||||||
|
self.logger.info("FTP connection established successfully")
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Failed to establish FTP connection: {str(e)}")
|
||||||
|
self.disconnect()
|
||||||
|
return False
|
||||||
|
|
||||||
|
def disconnect(self) -> None:
|
||||||
|
"""
|
||||||
|
Close FTP connection
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
if self.client:
|
||||||
|
self.client.quit()
|
||||||
|
self.client = None
|
||||||
|
|
||||||
|
self.connected = False
|
||||||
|
self.logger.info("FTP connection closed")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Error disconnecting FTP: {str(e)}")
|
||||||
|
|
||||||
|
def _ensure_connection(self) -> bool:
|
||||||
|
"""
|
||||||
|
Ensure connection is established
|
||||||
|
"""
|
||||||
|
if not self.connected or not self.client:
|
||||||
|
return self.connect()
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _ensure_remote_dir(self, remote_dir_path: str) -> bool:
|
||||||
|
"""
|
||||||
|
Ensure remote directory exists, create if it doesn't
|
||||||
|
"""
|
||||||
|
if not self._ensure_connection():
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Get current directory to return to later
|
||||||
|
original_dir = self.client.pwd()
|
||||||
|
|
||||||
|
# Split path and create each directory component
|
||||||
|
dirs = remote_dir_path.strip('/').split('/')
|
||||||
|
|
||||||
|
for i, directory in enumerate(dirs):
|
||||||
|
if not directory:
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Try to change to this directory
|
||||||
|
self.client.cwd(directory)
|
||||||
|
except ftplib.error_perm:
|
||||||
|
# If it doesn't exist, create it
|
||||||
|
try:
|
||||||
|
self.logger.info(f"Creating remote directory: {directory}")
|
||||||
|
self.client.mkd(directory)
|
||||||
|
self.client.cwd(directory)
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Failed to create directory {directory}: {str(e)}")
|
||||||
|
# Return to original directory
|
||||||
|
self.client.cwd(original_dir)
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Return to original directory
|
||||||
|
self.client.cwd(original_dir)
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Failed to ensure remote directory {remote_dir_path}: {str(e)}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def send_file(self, local_file_path: str, remote_dir_path: str) -> bool:
|
||||||
|
"""
|
||||||
|
Send file to remote server
|
||||||
|
|
||||||
|
Args:
|
||||||
|
local_file_path: Path to local file
|
||||||
|
remote_dir_path: Directory on remote server
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if successful, False otherwise
|
||||||
|
"""
|
||||||
|
if not self._ensure_connection():
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Ensure local file exists
|
||||||
|
if not os.path.isfile(local_file_path):
|
||||||
|
self.logger.error(f"Local file not found: {local_file_path}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Get filename and create branchwise directory
|
||||||
|
filename = os.path.basename(local_file_path)
|
||||||
|
file_tokens = filename.split('-', 1)
|
||||||
|
|
||||||
|
if len(file_tokens) < 2:
|
||||||
|
self.logger.warning(f"Filename {filename} doesn't match expected format (branch-filename)")
|
||||||
|
branch_dir = "default"
|
||||||
|
remote_filename = filename
|
||||||
|
else:
|
||||||
|
branch_dir = file_tokens[0]
|
||||||
|
remote_filename = file_tokens[1]
|
||||||
|
|
||||||
|
# Ensure remote directory structure exists
|
||||||
|
branch_path = f"{remote_dir_path.rstrip('/')}/{branch_dir}"
|
||||||
|
if not self._ensure_remote_dir(branch_path):
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Change to target directory
|
||||||
|
self.client.cwd(branch_path)
|
||||||
|
|
||||||
|
# Upload the file
|
||||||
|
self.logger.info(f"Uploading {local_file_path} to {branch_path}/{remote_filename}")
|
||||||
|
with open(local_file_path, 'rb') as file:
|
||||||
|
self.client.storbinary(f'STOR {remote_filename}', file)
|
||||||
|
|
||||||
|
self.logger.info(f"File {filename} uploaded successfully")
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Failed to upload file {local_file_path}: {str(e)}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def fetch_files(self, remote_dir_path: str, local_dir_path: str, pattern: Optional[str] = None) -> List[str]:
|
||||||
|
"""
|
||||||
|
Fetch files from remote server
|
||||||
|
|
||||||
|
Args:
|
||||||
|
remote_dir_path: Directory on remote server
|
||||||
|
local_dir_path: Directory to store downloaded files
|
||||||
|
pattern: Optional file pattern to match
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of successfully downloaded files
|
||||||
|
"""
|
||||||
|
if not self._ensure_connection():
|
||||||
|
return []
|
||||||
|
|
||||||
|
downloaded_files = []
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Ensure local directory exists
|
||||||
|
os.makedirs(local_dir_path, exist_ok=True)
|
||||||
|
|
||||||
|
# Try to change to remote directory
|
||||||
|
try:
|
||||||
|
self.client.cwd(remote_dir_path)
|
||||||
|
except ftplib.error_perm as e:
|
||||||
|
self.logger.warning(f"Remote directory not found or permission denied: {remote_dir_path}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
# Get list of files
|
||||||
|
file_list = []
|
||||||
|
self.client.retrlines('LIST', lambda x: file_list.append(x))
|
||||||
|
|
||||||
|
# Process files
|
||||||
|
for file_info in file_list:
|
||||||
|
# Skip directories
|
||||||
|
if file_info.startswith('d'):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Extract filename (last part of the listing)
|
||||||
|
parts = file_info.split()
|
||||||
|
if len(parts) < 9:
|
||||||
|
continue
|
||||||
|
|
||||||
|
filename = ' '.join(parts[8:])
|
||||||
|
|
||||||
|
# Skip if pattern is specified and doesn't match
|
||||||
|
if pattern and not fnmatch.fnmatch(filename, pattern):
|
||||||
|
continue
|
||||||
|
|
||||||
|
local_file_path = os.path.join(local_dir_path, filename)
|
||||||
|
|
||||||
|
# Only download if file doesn't exist locally
|
||||||
|
if not os.path.exists(local_file_path):
|
||||||
|
self.logger.info(f"Downloading {filename} to {local_file_path}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(local_file_path, 'wb') as file:
|
||||||
|
self.client.retrbinary(f'RETR {filename}', file.write)
|
||||||
|
downloaded_files.append(local_file_path)
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Failed to download {filename}: {str(e)}")
|
||||||
|
else:
|
||||||
|
self.logger.debug(f"File already exists locally: {local_file_path}")
|
||||||
|
|
||||||
|
return downloaded_files
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Failed to fetch files from {remote_dir_path}: {str(e)}")
|
||||||
|
return downloaded_files
|
41
src/protocols/protocol_factory.py
Normal file
41
src/protocols/protocol_factory.py
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
import logging
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from .protocol_interface import FileTransferProtocol
|
||||||
|
from .ssh_protocol import SSHProtocol
|
||||||
|
from .ftp_protocol import FTPProtocol
|
||||||
|
|
||||||
|
class ProtocolFactory:
|
||||||
|
"""
|
||||||
|
Factory class to create appropriate protocol instances
|
||||||
|
"""
|
||||||
|
@staticmethod
|
||||||
|
def create_protocol(protocol_type: str, host: str, username: str, password: str, port: int) -> Optional[FileTransferProtocol]:
|
||||||
|
"""
|
||||||
|
Create and return a protocol instance based on protocol type
|
||||||
|
|
||||||
|
Args:
|
||||||
|
protocol_type: Type of protocol ('SSH' or 'FTP')
|
||||||
|
host: Remote host address
|
||||||
|
username: Username for authentication
|
||||||
|
password: Password for authentication
|
||||||
|
port: Port number
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
FileTransferProtocol instance or None if invalid protocol type
|
||||||
|
"""
|
||||||
|
logger = logging.getLogger("SSHFileToCbs.ProtocolFactory")
|
||||||
|
|
||||||
|
protocol_type = protocol_type.upper()
|
||||||
|
|
||||||
|
if protocol_type == "SSH":
|
||||||
|
logger.info(f"Creating SSH protocol for {username}@{host}:{port}")
|
||||||
|
return SSHProtocol(host, username, password, port)
|
||||||
|
|
||||||
|
elif protocol_type == "FTP":
|
||||||
|
logger.info(f"Creating FTP protocol for {username}@{host}:{port}")
|
||||||
|
return FTPProtocol(host, username, password, port)
|
||||||
|
|
||||||
|
else:
|
||||||
|
logger.error(f"Invalid protocol type: {protocol_type}")
|
||||||
|
return None
|
58
src/protocols/protocol_interface.py
Normal file
58
src/protocols/protocol_interface.py
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from typing import List, Optional
|
||||||
|
import logging
|
||||||
|
|
||||||
|
class FileTransferProtocol(ABC):
|
||||||
|
"""
|
||||||
|
Abstract base class that defines the interface for file transfer protocols
|
||||||
|
"""
|
||||||
|
def __init__(self, host: str, username: str, password: str, port: int):
|
||||||
|
self.host = host
|
||||||
|
self.username = username
|
||||||
|
self.password = password
|
||||||
|
self.port = port
|
||||||
|
self.logger = logging.getLogger(f"SSHFileToCbs.{self.__class__.__name__}")
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def connect(self) -> bool:
|
||||||
|
"""
|
||||||
|
Establish connection to remote server
|
||||||
|
Returns True if connection successful, False otherwise
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def disconnect(self) -> None:
|
||||||
|
"""
|
||||||
|
Close connection to remote server
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def send_file(self, local_file_path: str, remote_dir_path: str) -> bool:
|
||||||
|
"""
|
||||||
|
Send file to remote server
|
||||||
|
|
||||||
|
Args:
|
||||||
|
local_file_path: Path to local file
|
||||||
|
remote_dir_path: Directory on remote server
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if successful, False otherwise
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def fetch_files(self, remote_dir_path: str, local_dir_path: str, pattern: Optional[str] = None) -> List[str]:
|
||||||
|
"""
|
||||||
|
Fetch files from remote server
|
||||||
|
|
||||||
|
Args:
|
||||||
|
remote_dir_path: Directory on remote server
|
||||||
|
local_dir_path: Directory to store downloaded files
|
||||||
|
pattern: Optional file pattern to match
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of successfully downloaded files
|
||||||
|
"""
|
||||||
|
pass
|
254
src/protocols/ssh_protocol.py
Normal file
254
src/protocols/ssh_protocol.py
Normal file
@@ -0,0 +1,254 @@
|
|||||||
|
import os
|
||||||
|
import paramiko
|
||||||
|
import fnmatch
|
||||||
|
import stat
|
||||||
|
from typing import List, Optional, Tuple
|
||||||
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from .protocol_interface import FileTransferProtocol
|
||||||
|
|
||||||
|
class SSHProtocol(FileTransferProtocol):
|
||||||
|
"""
|
||||||
|
Implementation of SSH/SFTP file transfer protocol
|
||||||
|
"""
|
||||||
|
def __init__(self, host: str, username: str, password: str, port: int = 22):
|
||||||
|
super().__init__(host, username, password, port)
|
||||||
|
self.client = None
|
||||||
|
self.sftp = None
|
||||||
|
self.connected = False
|
||||||
|
|
||||||
|
def connect(self) -> bool:
|
||||||
|
"""
|
||||||
|
Establish SSH connection and open SFTP channel
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
self.logger.info(f"Connecting to {self.username}@{self.host}:{self.port}")
|
||||||
|
self.client = paramiko.SSHClient()
|
||||||
|
self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||||
|
self.client.connect(
|
||||||
|
hostname=self.host,
|
||||||
|
port=self.port,
|
||||||
|
username=self.username,
|
||||||
|
password=self.password,
|
||||||
|
timeout=30
|
||||||
|
)
|
||||||
|
|
||||||
|
self.sftp = self.client.open_sftp()
|
||||||
|
self.connected = True
|
||||||
|
self.logger.info("SSH connection established successfully")
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Failed to establish SSH connection: {str(e)}")
|
||||||
|
self.disconnect()
|
||||||
|
return False
|
||||||
|
|
||||||
|
def disconnect(self) -> None:
|
||||||
|
"""
|
||||||
|
Close SFTP channel and SSH connection
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
if self.sftp:
|
||||||
|
self.sftp.close()
|
||||||
|
self.sftp = None
|
||||||
|
|
||||||
|
if self.client:
|
||||||
|
self.client.close()
|
||||||
|
self.client = None
|
||||||
|
|
||||||
|
self.connected = False
|
||||||
|
self.logger.info("SSH connection closed")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Error disconnecting SSH: {str(e)}")
|
||||||
|
|
||||||
|
def _ensure_connection(self) -> bool:
|
||||||
|
"""
|
||||||
|
Ensure connection is established
|
||||||
|
"""
|
||||||
|
if not self.connected or not self.client or not self.sftp:
|
||||||
|
return self.connect()
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _ensure_remote_dir(self, remote_dir_path: str) -> bool:
|
||||||
|
"""
|
||||||
|
Ensure remote directory exists, create if it doesn't
|
||||||
|
"""
|
||||||
|
if not self._ensure_connection():
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
dirs_to_create = []
|
||||||
|
path_parts = remote_dir_path.rstrip('/').split('/')
|
||||||
|
current_path = ""
|
||||||
|
|
||||||
|
# Build the path incrementally and check each component
|
||||||
|
for part in path_parts:
|
||||||
|
if not part:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if current_path:
|
||||||
|
current_path = f"{current_path}/{part}"
|
||||||
|
else:
|
||||||
|
current_path = part
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.sftp.stat(current_path)
|
||||||
|
except FileNotFoundError:
|
||||||
|
dirs_to_create.append(current_path)
|
||||||
|
|
||||||
|
# Create directories that don't exist
|
||||||
|
for dir_path in dirs_to_create:
|
||||||
|
self.logger.info(f"Creating remote directory: {dir_path}")
|
||||||
|
self.sftp.mkdir(dir_path)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Failed to ensure remote directory {remote_dir_path}: {str(e)}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def send_file(self, local_file_path: str, remote_dir_path: str) -> bool:
|
||||||
|
"""
|
||||||
|
Send file to remote server
|
||||||
|
|
||||||
|
Args:
|
||||||
|
local_file_path: Path to local file
|
||||||
|
remote_dir_path: Directory on remote server
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if successful, False otherwise
|
||||||
|
"""
|
||||||
|
if not self._ensure_connection():
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Ensure local file exists
|
||||||
|
if not os.path.isfile(local_file_path):
|
||||||
|
self.logger.error(f"Local file not found: {local_file_path}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Get filename and create branchwise directory
|
||||||
|
filename = os.path.basename(local_file_path)
|
||||||
|
file_tokens = filename.sp lit('-', 1)
|
||||||
|
|
||||||
|
if len(file_tokens) < 2:
|
||||||
|
self.logger.warning(f"Filename {filename} doesn't match expected format (branch-filename)")
|
||||||
|
branch_dir = "default"
|
||||||
|
remote_filename = filename
|
||||||
|
else:
|
||||||
|
branch_dir = file_tokens[0]
|
||||||
|
remote_filename = file_tokens[1]
|
||||||
|
|
||||||
|
# Ensure remote directory structure exists
|
||||||
|
branch_path = f"{remote_dir_path.rstrip('/')}/{branch_dir}"
|
||||||
|
if not self._ensure_remote_dir(branch_path):
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Upload the file
|
||||||
|
remote_file_path = f"{branch_path}/{remote_filename}"
|
||||||
|
self.logger.info(f"Uploading {local_file_path} to {remote_file_path}")
|
||||||
|
self.sftp.put(local_file_path, remote_file_path)
|
||||||
|
|
||||||
|
self.logger.info(f"File {filename} uploaded successfully to {remote_file_path}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Failed to upload file {local_file_path}: {str(e)}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def fetch_files(self, remote_dir_path: str, local_dir_path: str, pattern: Optional[str] = None) -> List[str]:
|
||||||
|
"""
|
||||||
|
Fetch files from remote server
|
||||||
|
|
||||||
|
Args:
|
||||||
|
remote_dir_path: Directory on remote server
|
||||||
|
local_dir_path: Directory to store downloaded files
|
||||||
|
pattern: Optional file pattern to match
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of successfully downloaded files
|
||||||
|
"""
|
||||||
|
if not self._ensure_connection():
|
||||||
|
return []
|
||||||
|
|
||||||
|
downloaded_files = []
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Ensure local directory exists
|
||||||
|
os.makedirs(local_dir_path, exist_ok=True)
|
||||||
|
|
||||||
|
# Check if remote directory exists
|
||||||
|
try:
|
||||||
|
self.sftp.chdir(remote_dir_path)
|
||||||
|
except FileNotFoundError:
|
||||||
|
self.logger.warning(f"Remote directory not found: {remote_dir_path}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
# Get current path to return to after operations
|
||||||
|
current_path = self.sftp.getcwd()
|
||||||
|
|
||||||
|
# Process subdirectories recursively
|
||||||
|
self._process_remote_dir(current_path, local_dir_path, pattern, downloaded_files)
|
||||||
|
|
||||||
|
return downloaded_files
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Failed to fetch files from {remote_dir_path}: {str(e)}")
|
||||||
|
return downloaded_files
|
||||||
|
|
||||||
|
def _process_remote_dir(self, remote_dir: str, local_dir: str, pattern: Optional[str], downloaded_files: List[str]) -> None:
|
||||||
|
"""
|
||||||
|
Process remote directory recursively
|
||||||
|
|
||||||
|
Args:
|
||||||
|
remote_dir: Current remote directory path
|
||||||
|
local_dir: Local directory to save files to
|
||||||
|
pattern: File pattern to match
|
||||||
|
downloaded_files: List to append downloaded file paths to
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# List files in current directory
|
||||||
|
items = self.sftp.listdir_attr(remote_dir)
|
||||||
|
|
||||||
|
for item in items:
|
||||||
|
remote_path = f"{remote_dir}/{item.filename}"
|
||||||
|
|
||||||
|
# Skip . and .. directories
|
||||||
|
if item.filename in ('.', '..'):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Handle directories
|
||||||
|
if stat.S_ISDIR(item.st_mode):
|
||||||
|
self.logger.debug(f"Found directory: {item.filename}")
|
||||||
|
|
||||||
|
# Create corresponding local directory
|
||||||
|
local_subdir = os.path.join(local_dir, item.filename)
|
||||||
|
os.makedirs(local_subdir, exist_ok=True)
|
||||||
|
|
||||||
|
# Process subdirectory recursively
|
||||||
|
self._process_remote_dir(remote_path, local_subdir, pattern, downloaded_files)
|
||||||
|
|
||||||
|
# Handle files
|
||||||
|
elif stat.S_ISREG(item.st_mode):
|
||||||
|
# Skip if pattern is specified and doesn't match
|
||||||
|
if pattern and not fnmatch.fnmatch(item.filename, pattern):
|
||||||
|
continue
|
||||||
|
|
||||||
|
local_file_path = os.path.join(local_dir, item.filename)
|
||||||
|
|
||||||
|
# Only download if file doesn't exist locally
|
||||||
|
if not os.path.exists(local_file_path):
|
||||||
|
self.logger.info(f"Downloading {remote_path} to {local_file_path}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.sftp.get(remote_path, local_file_path)
|
||||||
|
downloaded_files.append(local_file_path)
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Failed to download {remote_path}: {str(e)}")
|
||||||
|
else:
|
||||||
|
self.logger.debug(f"File already exists locally: {local_file_path}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Error processing remote directory {remote_dir}: {str(e)}")
|
0
src/services/__init__.py
Normal file
0
src/services/__init__.py
Normal file
54
src/services/file_fetcher.py
Normal file
54
src/services/file_fetcher.py
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
import logging
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
from ..protocols.protocol_interface import FileTransferProtocol
|
||||||
|
|
||||||
|
class FileFetcher:
|
||||||
|
"""
|
||||||
|
Service for fetching files from remote server
|
||||||
|
"""
|
||||||
|
def __init__(self, protocol: FileTransferProtocol, remote_dir_path: str,
|
||||||
|
local_dir_path: str, pattern: Optional[str] = None):
|
||||||
|
self.protocol = protocol
|
||||||
|
self.remote_dir_path = remote_dir_path
|
||||||
|
self.local_dir_path = local_dir_path
|
||||||
|
self.pattern = pattern
|
||||||
|
self.logger = logging.getLogger("SSHFileToCbs.FileFetcher")
|
||||||
|
|
||||||
|
def fetch_files(self) -> List[str]:
|
||||||
|
"""
|
||||||
|
Fetch files from remote server
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of downloaded files
|
||||||
|
"""
|
||||||
|
self.logger.info(f"Starting to fetch files from {self.remote_dir_path} to {self.local_dir_path}")
|
||||||
|
|
||||||
|
pattern_info = f" with pattern '{self.pattern}'" if self.pattern else ""
|
||||||
|
self.logger.info(f"Fetching files from {self.remote_dir_path}{pattern_info}")
|
||||||
|
|
||||||
|
# Connect to remote server
|
||||||
|
if not self.protocol.connect():
|
||||||
|
self.logger.error("Failed to connect to remote server")
|
||||||
|
return []
|
||||||
|
|
||||||
|
# Fetch files
|
||||||
|
downloaded_files = []
|
||||||
|
|
||||||
|
try:
|
||||||
|
downloaded_files = self.protocol.fetch_files(
|
||||||
|
self.remote_dir_path,
|
||||||
|
self.local_dir_path,
|
||||||
|
self.pattern
|
||||||
|
)
|
||||||
|
|
||||||
|
self.logger.info(f"Downloaded {len(downloaded_files)} files")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Error fetching files: {str(e)}")
|
||||||
|
|
||||||
|
finally:
|
||||||
|
# Always disconnect
|
||||||
|
self.protocol.disconnect()
|
||||||
|
|
||||||
|
return downloaded_files
|
65
src/services/file_sender.py
Normal file
65
src/services/file_sender.py
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
import logging
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
from ..protocols.protocol_interface import FileTransferProtocol
|
||||||
|
from ..file_operations.file_manager import FileManager
|
||||||
|
|
||||||
|
class FileSender:
|
||||||
|
"""
|
||||||
|
Service for sending local files to remote server
|
||||||
|
"""
|
||||||
|
def __init__(self, protocol: FileTransferProtocol, local_folder_path: str,
|
||||||
|
remote_file_path: str, archive_path: str):
|
||||||
|
self.protocol = protocol
|
||||||
|
self.local_folder_path = local_folder_path
|
||||||
|
self.remote_file_path = remote_file_path
|
||||||
|
self.archive_path = archive_path
|
||||||
|
self.file_manager = FileManager()
|
||||||
|
self.logger = logging.getLogger("SSHFileToCbs.FileSender")
|
||||||
|
|
||||||
|
def send_files(self) -> int:
|
||||||
|
"""
|
||||||
|
Send all files from local folder to remote server and archive them
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Number of files sent successfully
|
||||||
|
"""
|
||||||
|
self.logger.info(f"Starting to send files from {self.local_folder_path} to {self.remote_file_path}")
|
||||||
|
|
||||||
|
# Get list of files in local folder
|
||||||
|
files = self.file_manager.get_files_in_directory(self.local_folder_path)
|
||||||
|
|
||||||
|
if not files:
|
||||||
|
self.logger.info(f"No files found in {self.local_folder_path}")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
self.logger.info(f"Found {len(files)} files to send")
|
||||||
|
|
||||||
|
# Connect to remote server
|
||||||
|
if not self.protocol.connect():
|
||||||
|
self.logger.error("Failed to connect to remote server")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
# Send each file
|
||||||
|
files_sent = 0
|
||||||
|
|
||||||
|
try:
|
||||||
|
for file_path in files:
|
||||||
|
self.logger.info(f"Sending file: {file_path}")
|
||||||
|
|
||||||
|
# Send file to remote server
|
||||||
|
if self.protocol.send_file(file_path, self.remote_file_path):
|
||||||
|
# Archive file after successful transfer
|
||||||
|
if self.file_manager.move_file(file_path, self.archive_path):
|
||||||
|
files_sent += 1
|
||||||
|
else:
|
||||||
|
self.logger.error(f"Failed to archive file {file_path}")
|
||||||
|
else:
|
||||||
|
self.logger.error(f"Failed to send file {file_path}")
|
||||||
|
|
||||||
|
finally:
|
||||||
|
# Always disconnect
|
||||||
|
self.protocol.disconnect()
|
||||||
|
|
||||||
|
self.logger.info(f"Completed sending files. {files_sent} of {len(files)} files sent successfully.")
|
||||||
|
return files_sent
|
0
src/utils/__init__.py
Normal file
0
src/utils/__init__.py
Normal file
60
src/utils/config.py
Normal file
60
src/utils/config.py
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
import configparser
|
||||||
|
import os
|
||||||
|
from typing import Dict, Any, Optional
|
||||||
|
import logging
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
"""
|
||||||
|
Configuration utility for loading and accessing application settings
|
||||||
|
"""
|
||||||
|
def __init__(self, config_path: Optional[str] = None):
|
||||||
|
self.logger = logging.getLogger("SSHFileToCbs.Config")
|
||||||
|
self.config = configparser.ConfigParser()
|
||||||
|
|
||||||
|
if config_path is None:
|
||||||
|
# Default config path relative to this file
|
||||||
|
self.config_path = os.path.join(
|
||||||
|
os.path.dirname(os.path.dirname(os.path.dirname(__file__))),
|
||||||
|
'config',
|
||||||
|
'config.ini'
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.config_path = config_path
|
||||||
|
|
||||||
|
self.load_config()
|
||||||
|
|
||||||
|
def load_config(self) -> None:
|
||||||
|
"""Load configuration from file"""
|
||||||
|
try:
|
||||||
|
if not os.path.exists(self.config_path):
|
||||||
|
self.logger.error(f"Config file not found: {self.config_path}")
|
||||||
|
raise FileNotFoundError(f"Config file not found: {self.config_path}")
|
||||||
|
|
||||||
|
self.config.read(self.config_path)
|
||||||
|
self.logger.info(f"Configuration loaded from {self.config_path}")
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Failed to load configuration: {str(e)}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def get(self, section: str, key: str, fallback: Any = None) -> Any:
|
||||||
|
"""Get config value by section and key"""
|
||||||
|
try:
|
||||||
|
return self.config.get(section, key)
|
||||||
|
except (configparser.NoSectionError, configparser.NoOptionError) as e:
|
||||||
|
self.logger.warning(f"Config value not found for {section}.{key}, using fallback: {fallback}")
|
||||||
|
return fallback
|
||||||
|
|
||||||
|
def get_int(self, section: str, key: str, fallback: Optional[int] = None) -> Optional[int]:
|
||||||
|
"""Get integer config value"""
|
||||||
|
try:
|
||||||
|
return self.config.getint(section, key)
|
||||||
|
except (configparser.NoSectionError, configparser.NoOptionError, ValueError) as e:
|
||||||
|
self.logger.warning(f"Failed to get integer config for {section}.{key}, using fallback: {fallback}")
|
||||||
|
return fallback
|
||||||
|
|
||||||
|
def get_all(self) -> Dict[str, Dict[str, str]]:
|
||||||
|
"""Get entire configuration as nested dict"""
|
||||||
|
result = {}
|
||||||
|
for section in self.config.sections():
|
||||||
|
result[section] = dict(self.config[section])
|
||||||
|
return result
|
51
src/utils/logger.py
Normal file
51
src/utils/logger.py
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
import logging
|
||||||
|
import logging.handlers
|
||||||
|
import os
|
||||||
|
from datetime import datetime
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
class Logger:
|
||||||
|
"""
|
||||||
|
Logger utility class for application-wide logging with rotation
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, name="SSHFileToCbs", log_level=logging.INFO):
|
||||||
|
self.logger = logging.getLogger(name)
|
||||||
|
self.logger.setLevel(log_level)
|
||||||
|
|
||||||
|
# Create logs directory if it doesn't exist
|
||||||
|
logs_dir = os.path.join(
|
||||||
|
os.path.dirname(os.path.dirname(os.path.dirname(__file__))), "logs"
|
||||||
|
)
|
||||||
|
os.makedirs(logs_dir, exist_ok=True)
|
||||||
|
|
||||||
|
# Set up file handler with rotation
|
||||||
|
log_file_path = os.path.join(
|
||||||
|
logs_dir, f"{name}_{datetime.now().strftime('%Y%m%d')}.log"
|
||||||
|
)
|
||||||
|
file_handler = logging.handlers.RotatingFileHandler(
|
||||||
|
log_file_path, maxBytes=10485760, backupCount=10 # 10MB
|
||||||
|
)
|
||||||
|
|
||||||
|
# Set up console handler
|
||||||
|
console_handler = logging.StreamHandler(sys.stdout)
|
||||||
|
|
||||||
|
# Create formatter
|
||||||
|
formatter = logging.Formatter(
|
||||||
|
"%(asctime)s - %(name)s - %(levelname)s - %(message)s",
|
||||||
|
datefmt="%Y-%m-%d %H:%M:%S",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Set formatter for handlers
|
||||||
|
file_handler.setFormatter(formatter)
|
||||||
|
console_handler.setFormatter(formatter)
|
||||||
|
|
||||||
|
# Add handlers to logger
|
||||||
|
self.logger.addHandler(file_handler)
|
||||||
|
self.logger.addHandler(console_handler)
|
||||||
|
|
||||||
|
def get_logger(self):
|
||||||
|
"""Returns the logger instance"""
|
||||||
|
return self.logger
|
||||||
|
|
145
src/utils/monitoring.py
Normal file
145
src/utils/monitoring.py
Normal file
@@ -0,0 +1,145 @@
|
|||||||
|
import logging
|
||||||
|
import time
|
||||||
|
import psutil
|
||||||
|
import platform
|
||||||
|
import os
|
||||||
|
from typing import Dict, Any, List, Optional
|
||||||
|
import socket
|
||||||
|
import threading
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
class Monitoring:
|
||||||
|
"""
|
||||||
|
Monitoring class for tracking application health and performance
|
||||||
|
"""
|
||||||
|
def __init__(self, app_name: str = "SSHFileToCbs"):
|
||||||
|
self.app_name = app_name
|
||||||
|
self.logger = logging.getLogger(f"{app_name}.Monitoring")
|
||||||
|
self.start_time = time.time()
|
||||||
|
self.metrics: Dict[str, Any] = {}
|
||||||
|
self._lock = threading.Lock()
|
||||||
|
self._background_thread: Optional[threading.Thread] = None
|
||||||
|
self._stop_thread = threading.Event()
|
||||||
|
|
||||||
|
# Initialize metrics
|
||||||
|
self._collect_system_info()
|
||||||
|
|
||||||
|
def _collect_system_info(self) -> None:
|
||||||
|
"""Collect system information for monitoring"""
|
||||||
|
with self._lock:
|
||||||
|
self.metrics["hostname"] = socket.gethostname()
|
||||||
|
self.metrics["os"] = platform.system()
|
||||||
|
self.metrics["os_version"] = platform.version()
|
||||||
|
self.metrics["python_version"] = platform.python_version()
|
||||||
|
self.metrics["cpu_count"] = psutil.cpu_count(logical=True)
|
||||||
|
self.metrics["start_time"] = datetime.fromtimestamp(self.start_time).isoformat()
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.metrics["ip_address"] = socket.gethostbyname(socket.gethostname())
|
||||||
|
except:
|
||||||
|
self.metrics["ip_address"] = "Unknown"
|
||||||
|
|
||||||
|
def _collect_resource_usage(self) -> None:
|
||||||
|
"""Collect current resource usage"""
|
||||||
|
with self._lock:
|
||||||
|
process = psutil.Process(os.getpid())
|
||||||
|
|
||||||
|
self.metrics["memory_usage_mb"] = process.memory_info().rss / (1024 * 1024)
|
||||||
|
self.metrics["cpu_percent"] = process.cpu_percent(interval=0.1)
|
||||||
|
self.metrics["threads_count"] = threading.active_count()
|
||||||
|
self.metrics["uptime_seconds"] = time.time() - self.start_time
|
||||||
|
self.metrics["last_updated"] = datetime.now().isoformat()
|
||||||
|
|
||||||
|
def start_background_monitoring(self, interval: int = 60) -> None:
|
||||||
|
"""
|
||||||
|
Start background monitoring in a separate thread
|
||||||
|
|
||||||
|
Args:
|
||||||
|
interval: Monitoring interval in seconds
|
||||||
|
"""
|
||||||
|
if self._background_thread and self._background_thread.is_alive():
|
||||||
|
self.logger.warning("Background monitoring is already running")
|
||||||
|
return
|
||||||
|
|
||||||
|
def _monitoring_thread() -> None:
|
||||||
|
self.logger.info(f"Starting background monitoring with interval {interval} seconds")
|
||||||
|
|
||||||
|
while not self._stop_thread.is_set():
|
||||||
|
try:
|
||||||
|
self._collect_resource_usage()
|
||||||
|
self.log_metrics()
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.error(f"Error in monitoring thread: {str(e)}")
|
||||||
|
|
||||||
|
# Sleep for the interval, but check stop flag periodically
|
||||||
|
for _ in range(interval):
|
||||||
|
if self._stop_thread.is_set():
|
||||||
|
break
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
|
self._stop_thread.clear()
|
||||||
|
self._background_thread = threading.Thread(target=_monitoring_thread, daemon=True)
|
||||||
|
self._background_thread.start()
|
||||||
|
|
||||||
|
def stop_background_monitoring(self) -> None:
|
||||||
|
"""Stop background monitoring thread"""
|
||||||
|
if self._background_thread and self._background_thread.is_alive():
|
||||||
|
self._stop_thread.set()
|
||||||
|
self._background_thread.join(timeout=5)
|
||||||
|
self.logger.info("Background monitoring stopped")
|
||||||
|
|
||||||
|
def get_metrics(self) -> Dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Get current metrics
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary of metrics
|
||||||
|
"""
|
||||||
|
with self._lock:
|
||||||
|
# Update resource usage before returning
|
||||||
|
self._collect_resource_usage()
|
||||||
|
return self.metrics.copy()
|
||||||
|
|
||||||
|
def log_metrics(self) -> None:
|
||||||
|
"""Log current metrics"""
|
||||||
|
with self._lock:
|
||||||
|
metrics = self.get_metrics()
|
||||||
|
|
||||||
|
self.logger.info(
|
||||||
|
f"System metrics - "
|
||||||
|
f"Memory: {metrics.get('memory_usage_mb', 0):.2f} MB, "
|
||||||
|
f"CPU: {metrics.get('cpu_percent', 0):.1f}%, "
|
||||||
|
f"Threads: {metrics.get('threads_count', 0)}, "
|
||||||
|
f"Uptime: {metrics.get('uptime_seconds', 0):.1f} seconds"
|
||||||
|
)
|
||||||
|
|
||||||
|
def record_operation(self, operation: str, status: str, details: Dict[str, Any]) -> None:
|
||||||
|
"""
|
||||||
|
Record an operation for monitoring
|
||||||
|
|
||||||
|
Args:
|
||||||
|
operation: Name of the operation
|
||||||
|
status: Status of the operation (success/failure)
|
||||||
|
details: Details about the operation
|
||||||
|
"""
|
||||||
|
timestamp = datetime.now().isoformat()
|
||||||
|
|
||||||
|
with self._lock:
|
||||||
|
if "operations" not in self.metrics:
|
||||||
|
self.metrics["operations"] = []
|
||||||
|
|
||||||
|
operation_record = {
|
||||||
|
"timestamp": timestamp,
|
||||||
|
"operation": operation,
|
||||||
|
"status": status,
|
||||||
|
**details
|
||||||
|
}
|
||||||
|
|
||||||
|
self.metrics["operations"].append(operation_record)
|
||||||
|
|
||||||
|
# Keep only the latest 100 operations
|
||||||
|
if len(self.metrics["operations"]) > 100:
|
||||||
|
self.metrics["operations"] = self.metrics["operations"][-100:]
|
||||||
|
|
||||||
|
log_level = logging.INFO if status == "success" else logging.ERROR
|
||||||
|
self.logger.log(log_level, f"Operation '{operation}' {status}: {details}")
|
18
ssh-file-to-cbs.service
Normal file
18
ssh-file-to-cbs.service
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
[Unit]
|
||||||
|
Description=SSH File to CBS Transfer Service
|
||||||
|
After=network.target
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
User=ec2-user
|
||||||
|
Group=ec2-user
|
||||||
|
WorkingDirectory=/home/ec2-user/SSHFileToCbs_PYTHON
|
||||||
|
ExecStart=/usr/bin/python3 /home/ec2-user/SSHFileToCbs_PYTHON/main.py
|
||||||
|
Restart=always
|
||||||
|
RestartSec=30
|
||||||
|
StandardOutput=journal
|
||||||
|
StandardError=journal
|
||||||
|
SyslogIdentifier=ssh-file-to-cbs
|
||||||
|
Environment=PYTHONUNBUFFERED=1
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
0
tests/__init__.py
Normal file
0
tests/__init__.py
Normal file
60
tests/test_app.py
Normal file
60
tests/test_app.py
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
import unittest
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from unittest.mock import patch, MagicMock
|
||||||
|
|
||||||
|
# Add parent directory to path so we can import the application
|
||||||
|
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
|
||||||
|
|
||||||
|
from src.app import Application
|
||||||
|
from src.protocols.protocol_interface import FileTransferProtocol
|
||||||
|
from src.utils.config import Config
|
||||||
|
|
||||||
|
class TestApplication(unittest.TestCase):
|
||||||
|
"""Test cases for the main application"""
|
||||||
|
|
||||||
|
@patch('src.utils.config.Config')
|
||||||
|
def test_application_init(self, mock_config):
|
||||||
|
"""Test application initialization"""
|
||||||
|
# Set up mock configuration
|
||||||
|
mock_config_instance = MagicMock()
|
||||||
|
mock_config.return_value = mock_config_instance
|
||||||
|
|
||||||
|
# Initialize application
|
||||||
|
app = Application()
|
||||||
|
|
||||||
|
# Verify application initialized correctly
|
||||||
|
self.assertIsNotNone(app.logger)
|
||||||
|
self.assertIsNotNone(app.monitoring)
|
||||||
|
self.assertFalse(app.running)
|
||||||
|
|
||||||
|
@patch('src.utils.config.Config')
|
||||||
|
@patch('src.protocols.protocol_factory.ProtocolFactory.create_protocol')
|
||||||
|
def test_create_protocol(self, mock_create_protocol, mock_config):
|
||||||
|
"""Test protocol creation"""
|
||||||
|
# Set up mock configuration
|
||||||
|
mock_config_instance = MagicMock()
|
||||||
|
mock_config_instance.get.side_effect = lambda section, key: {
|
||||||
|
('app', 'TRANSFER_PROTOCOL'): 'SSH',
|
||||||
|
('server', 'REMOTE_HOST'): 'testhost',
|
||||||
|
('server', 'REMOTE_USER'): 'testuser',
|
||||||
|
('server', 'REMOTE_PASS'): 'testpass',
|
||||||
|
}.get((section, key))
|
||||||
|
|
||||||
|
mock_config_instance.get_int.return_value = 22
|
||||||
|
mock_config.return_value = mock_config_instance
|
||||||
|
|
||||||
|
# Set up mock protocol
|
||||||
|
mock_protocol = MagicMock(spec=FileTransferProtocol)
|
||||||
|
mock_create_protocol.return_value = mock_protocol
|
||||||
|
|
||||||
|
# Initialize application and create protocol
|
||||||
|
app = Application()
|
||||||
|
protocol = app._create_protocol()
|
||||||
|
|
||||||
|
# Verify protocol was created correctly
|
||||||
|
self.assertEqual(protocol, mock_protocol)
|
||||||
|
mock_create_protocol.assert_called_once_with('SSH', 'testhost', 'testuser', 'testpass', 22)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
Reference in New Issue
Block a user